|
dataset,version,metric,mode,XinYuan-Qwen2-1_5B-0822 |
|
lukaemon_mmlu_college_biology,caec7d,accuracy,gen,53.47 |
|
lukaemon_mmlu_college_chemistry,520aa6,accuracy,gen,40.00 |
|
lukaemon_mmlu_college_computer_science,99c216,accuracy,gen,45.00 |
|
lukaemon_mmlu_college_mathematics,678751,accuracy,gen,34.00 |
|
lukaemon_mmlu_college_physics,4f382c,accuracy,gen,29.41 |
|
lukaemon_mmlu_electrical_engineering,770ce3,accuracy,gen,55.17 |
|
lukaemon_mmlu_astronomy,d3ee01,accuracy,gen,50.66 |
|
lukaemon_mmlu_anatomy,72183b,accuracy,gen,54.07 |
|
lukaemon_mmlu_abstract_algebra,2db373,accuracy,gen,38.00 |
|
lukaemon_mmlu_machine_learning,0283bb,accuracy,gen,39.29 |
|
lukaemon_mmlu_clinical_knowledge,cb3218,accuracy,gen,57.74 |
|
lukaemon_mmlu_global_facts,ab07b6,accuracy,gen,33.00 |
|
lukaemon_mmlu_management,80876d,accuracy,gen,80.58 |
|
lukaemon_mmlu_nutrition,4543bd,accuracy,gen,65.03 |
|
lukaemon_mmlu_marketing,7394e3,accuracy,gen,82.05 |
|
lukaemon_mmlu_professional_accounting,444b7f,accuracy,gen,43.97 |
|
lukaemon_mmlu_high_school_geography,0780e6,accuracy,gen,69.19 |
|
lukaemon_mmlu_international_law,cf3179,accuracy,gen,72.73 |
|
lukaemon_mmlu_moral_scenarios,f6dbe2,accuracy,gen,26.37 |
|
lukaemon_mmlu_computer_security,ce7550,accuracy,gen,67.00 |
|
lukaemon_mmlu_high_school_microeconomics,04d21a,accuracy,gen,56.30 |
|
lukaemon_mmlu_professional_law,5f7e6c,accuracy,gen,42.37 |
|
lukaemon_mmlu_medical_genetics,881ef5,accuracy,gen,63.00 |
|
lukaemon_mmlu_professional_psychology,221a16,accuracy,gen,53.43 |
|
lukaemon_mmlu_jurisprudence,001f24,accuracy,gen,69.44 |
|
lukaemon_mmlu_world_religions,232c09,accuracy,gen,69.01 |
|
lukaemon_mmlu_philosophy,08042b,accuracy,gen,60.45 |
|
lukaemon_mmlu_virology,12e270,accuracy,gen,43.98 |
|
lukaemon_mmlu_high_school_chemistry,ae8820,accuracy,gen,49.75 |
|
lukaemon_mmlu_public_relations,e7d39b,accuracy,gen,58.18 |
|
lukaemon_mmlu_high_school_macroeconomics,a01685,accuracy,gen,56.15 |
|
lukaemon_mmlu_human_sexuality,42407c,accuracy,gen,67.18 |
|
lukaemon_mmlu_elementary_mathematics,269926,accuracy,gen,38.10 |
|
lukaemon_mmlu_high_school_physics,93278f,accuracy,gen,33.77 |
|
lukaemon_mmlu_high_school_computer_science,9965a5,accuracy,gen,56.00 |
|
lukaemon_mmlu_high_school_european_history,eefc90,accuracy,gen,65.45 |
|
lukaemon_mmlu_business_ethics,1dec08,accuracy,gen,66.00 |
|
lukaemon_mmlu_moral_disputes,a2173e,accuracy,gen,58.09 |
|
lukaemon_mmlu_high_school_statistics,8f3f3a,accuracy,gen,42.59 |
|
lukaemon_mmlu_miscellaneous,935647,accuracy,gen,70.50 |
|
lukaemon_mmlu_formal_logic,cfcb0c,accuracy,gen,37.30 |
|
lukaemon_mmlu_high_school_government_and_politics,3c52f9,accuracy,gen,75.65 |
|
lukaemon_mmlu_prehistory,bbb197,accuracy,gen,57.72 |
|
lukaemon_mmlu_security_studies,9b1743,accuracy,gen,70.61 |
|
lukaemon_mmlu_high_school_biology,37b125,accuracy,gen,63.55 |
|
lukaemon_mmlu_logical_fallacies,9cebb0,accuracy,gen,68.10 |
|
lukaemon_mmlu_high_school_world_history,048e7e,accuracy,gen,71.31 |
|
lukaemon_mmlu_professional_medicine,857144,accuracy,gen,49.26 |
|
lukaemon_mmlu_high_school_mathematics,ed4dc0,accuracy,gen,33.33 |
|
lukaemon_mmlu_college_medicine,38709e,accuracy,gen,49.71 |
|
lukaemon_mmlu_high_school_us_history,8932df,accuracy,gen,69.12 |
|
lukaemon_mmlu_sociology,c266a2,accuracy,gen,71.14 |
|
lukaemon_mmlu_econometrics,d1134d,accuracy,gen,35.09 |
|
lukaemon_mmlu_high_school_psychology,7db114,accuracy,gen,73.39 |
|
lukaemon_mmlu_human_aging,82a410,accuracy,gen,60.09 |
|
lukaemon_mmlu_us_foreign_policy,528cfe,accuracy,gen,78.00 |
|
lukaemon_mmlu_conceptual_physics,63588e,accuracy,gen,45.11 |
|
mmlu_pro_math,736233,accuracy,gen,17.25 |
|
mmlu_pro_physics,736233,accuracy,gen,13.16 |
|
mmlu_pro_chemistry,736233,accuracy,gen,9.54 |
|
mmlu_pro_law,736233,accuracy,gen,17.44 |
|
mmlu_pro_engineering,736233,accuracy,gen,11.35 |
|
mmlu_pro_other,736233,accuracy,gen,25.11 |
|
mmlu_pro_economics,736233,accuracy,gen,29.50 |
|
mmlu_pro_health,736233,accuracy,gen,21.64 |
|
mmlu_pro_psychology,736233,accuracy,gen,30.83 |
|
mmlu_pro_business,736233,accuracy,gen,17.11 |
|
mmlu_pro_biology,736233,accuracy,gen,31.52 |
|
mmlu_pro_philosophy,736233,accuracy,gen,20.84 |
|
mmlu_pro_computer_science,736233,accuracy,gen,23.90 |
|
mmlu_pro_history,736233,accuracy,gen,22.31 |
|
gsm8k,1d7fe4,accuracy,gen,57.62 |
|
ceval-computer_network,db9ce2,accuracy,gen,52.63 |
|
ceval-operating_system,1c2571,accuracy,gen,52.63 |
|
ceval-computer_architecture,a74dad,accuracy,gen,61.90 |
|
ceval-college_programming,4ca32a,accuracy,gen,67.57 |
|
ceval-college_physics,963fa8,accuracy,gen,36.84 |
|
ceval-college_chemistry,e78857,accuracy,gen,50.00 |
|
ceval-advanced_mathematics,ce03e2,accuracy,gen,57.89 |
|
ceval-probability_and_statistics,65e812,accuracy,gen,27.78 |
|
ceval-discrete_mathematics,e894ae,accuracy,gen,31.25 |
|
ceval-electrical_engineer,ae42b9,accuracy,gen,48.65 |
|
ceval-metrology_engineer,ee34ea,accuracy,gen,83.33 |
|
ceval-high_school_mathematics,1dc5bf,accuracy,gen,16.67 |
|
ceval-high_school_physics,adf25f,accuracy,gen,63.16 |
|
ceval-high_school_chemistry,2ed27f,accuracy,gen,68.42 |
|
ceval-high_school_biology,8e2b9a,accuracy,gen,84.21 |
|
ceval-middle_school_mathematics,bee8d5,accuracy,gen,63.16 |
|
ceval-middle_school_biology,86817c,accuracy,gen,85.71 |
|
ceval-middle_school_physics,8accf6,accuracy,gen,78.95 |
|
ceval-middle_school_chemistry,167a15,accuracy,gen,85.00 |
|
ceval-veterinary_medicine,b4e08d,accuracy,gen,78.26 |
|
ceval-college_economics,f3f4e6,accuracy,gen,72.73 |
|
ceval-business_administration,c1614e,accuracy,gen,72.73 |
|
ceval-marxism,cf874c,accuracy,gen,89.47 |
|
ceval-mao_zedong_thought,51c7a4,accuracy,gen,87.50 |
|
ceval-education_science,591fee,accuracy,gen,82.76 |
|
ceval-teacher_qualification,4e4ced,accuracy,gen,86.36 |
|
ceval-high_school_politics,5c0de2,accuracy,gen,78.95 |
|
ceval-high_school_geography,865461,accuracy,gen,73.68 |
|
ceval-middle_school_politics,5be3e7,accuracy,gen,85.71 |
|
ceval-middle_school_geography,8a63be,accuracy,gen,91.67 |
|
ceval-modern_chinese_history,fc01af,accuracy,gen,86.96 |
|
ceval-ideological_and_moral_cultivation,a2aa4a,accuracy,gen,100.00 |
|
ceval-logic,f5b022,accuracy,gen,63.64 |
|
ceval-law,a110a1,accuracy,gen,70.83 |
|
ceval-chinese_language_and_literature,0f8b68,accuracy,gen,52.17 |
|
ceval-art_studies,2a1300,accuracy,gen,69.70 |
|
ceval-professional_tour_guide,4e673e,accuracy,gen,82.76 |
|
ceval-legal_professional,ce8787,accuracy,gen,60.87 |
|
ceval-high_school_chinese,315705,accuracy,gen,68.42 |
|
ceval-high_school_history,7eb30a,accuracy,gen,80.00 |
|
ceval-middle_school_history,48ab4a,accuracy,gen,95.45 |
|
ceval-civil_servant,87d061,accuracy,gen,59.57 |
|
ceval-sports_science,70f27b,accuracy,gen,73.68 |
|
ceval-plant_protection,8941f9,accuracy,gen,63.64 |
|
ceval-basic_medicine,c409d6,accuracy,gen,84.21 |
|
ceval-clinical_medicine,49e82d,accuracy,gen,72.73 |
|
ceval-urban_and_rural_planner,95b885,accuracy,gen,67.39 |
|
ceval-accountant,002837,accuracy,gen,67.35 |
|
ceval-fire_engineer,bc23f5,accuracy,gen,61.29 |
|
ceval-environmental_impact_assessment_engineer,c64e2d,accuracy,gen,67.74 |
|
ceval-tax_accountant,3a5e3c,accuracy,gen,71.43 |
|
ceval-physician,6e277d,accuracy,gen,75.51 |
|
cmmlu-agronomy,4c7f2c,accuracy,gen,62.13 |
|
cmmlu-anatomy,ea09bf,accuracy,gen,75.00 |
|
cmmlu-ancient_chinese,f7c97f,accuracy,gen,32.93 |
|
cmmlu-arts,dd77b8,accuracy,gen,86.25 |
|
cmmlu-astronomy,1e49db,accuracy,gen,34.55 |
|
cmmlu-business_ethics,dc78cb,accuracy,gen,62.68 |
|
cmmlu-chinese_civil_service_exam,1de82c,accuracy,gen,66.25 |
|
cmmlu-chinese_driving_rule,b8a42b,accuracy,gen,96.18 |
|
cmmlu-chinese_food_culture,2d568a,accuracy,gen,59.56 |
|
cmmlu-chinese_foreign_policy,dc2427,accuracy,gen,68.22 |
|
cmmlu-chinese_history,4cc7ed,accuracy,gen,82.97 |
|
cmmlu-chinese_literature,af3c41,accuracy,gen,54.90 |
|
cmmlu-chinese_teacher_qualification,87de11,accuracy,gen,86.03 |
|
cmmlu-clinical_knowledge,c55b1d,accuracy,gen,62.03 |
|
cmmlu-college_actuarial_science,d3c360,accuracy,gen,37.74 |
|
cmmlu-college_education,df8790,accuracy,gen,80.37 |
|
cmmlu-college_engineering_hydrology,673f23,accuracy,gen,65.09 |
|
cmmlu-college_law,524c3a,accuracy,gen,62.04 |
|
cmmlu-college_mathematics,e4ebad,accuracy,gen,25.71 |
|
cmmlu-college_medical_statistics,55af35,accuracy,gen,58.49 |
|
cmmlu-college_medicine,702f48,accuracy,gen,70.70 |
|
cmmlu-computer_science,637007,accuracy,gen,69.12 |
|
cmmlu-computer_security,932b6b,accuracy,gen,87.72 |
|
cmmlu-conceptual_physics,cfc077,accuracy,gen,78.23 |
|
cmmlu-construction_project_management,968a4a,accuracy,gen,55.40 |
|
cmmlu-economics,ddaf7c,accuracy,gen,74.21 |
|
cmmlu-education,c35963,accuracy,gen,74.85 |
|
cmmlu-electrical_engineering,70e98a,accuracy,gen,77.91 |
|
cmmlu-elementary_chinese,cbcd6a,accuracy,gen,69.05 |
|
cmmlu-elementary_commonsense,a67f37,accuracy,gen,74.75 |
|
cmmlu-elementary_information_and_technology,d34d2a,accuracy,gen,86.97 |
|
cmmlu-elementary_mathematics,a9d403,accuracy,gen,43.91 |
|
cmmlu-ethnology,31955f,accuracy,gen,65.19 |
|
cmmlu-food_science,741d8e,accuracy,gen,64.34 |
|
cmmlu-genetics,c326f7,accuracy,gen,51.14 |
|
cmmlu-global_facts,0a1236,accuracy,gen,63.09 |
|
cmmlu-high_school_biology,2be811,accuracy,gen,75.15 |
|
cmmlu-high_school_chemistry,d63c05,accuracy,gen,54.55 |
|
cmmlu-high_school_geography,5cd489,accuracy,gen,74.58 |
|
cmmlu-high_school_mathematics,6b2087,accuracy,gen,37.80 |
|
cmmlu-high_school_physics,3df353,accuracy,gen,52.73 |
|
cmmlu-high_school_politics,7a88d8,accuracy,gen,67.83 |
|
cmmlu-human_sexuality,54ac98,accuracy,gen,63.49 |
|
cmmlu-international_law,0f5d40,accuracy,gen,54.59 |
|
cmmlu-journalism,a4f6a0,accuracy,gen,65.70 |
|
cmmlu-jurisprudence,7843da,accuracy,gen,76.40 |
|
cmmlu-legal_and_moral_basis,f906b0,accuracy,gen,96.26 |
|
cmmlu-logical,15a71b,accuracy,gen,62.60 |
|
cmmlu-machine_learning,bc6ad4,accuracy,gen,61.48 |
|
cmmlu-management,e5e8db,accuracy,gen,78.57 |
|
cmmlu-marketing,8b4c18,accuracy,gen,78.89 |
|
cmmlu-marxist_theory,75eb79,accuracy,gen,95.24 |
|
cmmlu-modern_chinese,83a9b7,accuracy,gen,47.41 |
|
cmmlu-nutrition,adfff7,accuracy,gen,71.03 |
|
cmmlu-philosophy,75e22d,accuracy,gen,74.29 |
|
cmmlu-professional_accounting,0edc91,accuracy,gen,84.57 |
|
cmmlu-professional_law,d24af5,accuracy,gen,63.51 |
|
cmmlu-professional_medicine,134139,accuracy,gen,63.30 |
|
cmmlu-professional_psychology,ec920e,accuracy,gen,79.31 |
|
cmmlu-public_relations,70ee06,accuracy,gen,67.82 |
|
cmmlu-security_study,45f96f,accuracy,gen,82.96 |
|
cmmlu-sociology,485285,accuracy,gen,66.81 |
|
cmmlu-sports_science,838cfe,accuracy,gen,64.85 |
|
cmmlu-traditional_chinese_medicine,3bbf64,accuracy,gen,76.76 |
|
cmmlu-virology,8925bf,accuracy,gen,72.19 |
|
cmmlu-world_history,57c97c,accuracy,gen,76.40 |
|
cmmlu-world_religions,1d0f4b,accuracy,gen,65.00 |
|
math,265cce,accuracy,gen,22.70 |
|
mbpp,830460,score,gen,33.80 |
|
mbpp,830460,pass,gen,169.00 |
|
mbpp,830460,timeout,gen,0.00 |
|
mbpp,830460,failed,gen,70.00 |
|
mbpp,830460,wrong_answer,gen,261.00 |
|
GPQA_extended,4baadb,accuracy,gen,29.30 |
|
GPQA_main,4baadb,accuracy,gen,28.35 |
|
GPQA_diamond,4baadb,accuracy,gen,26.26 |
|
bbh-temporal_sequences,e43931,score,gen,16.40 |
|
bbh-disambiguation_qa,d52c61,score,gen,43.20 |
|
bbh-date_understanding,a8000b,score,gen,46.80 |
|
bbh-tracking_shuffled_objects_three_objects,7964c0,score,gen,32.40 |
|
bbh-penguins_in_a_table,fceb27,score,gen,36.99 |
|
bbh-geometric_shapes,503c8f,score,gen,32.00 |
|
bbh-snarks,42d6ca,score,gen,55.06 |
|
bbh-ruin_names,408de8,score,gen,26.80 |
|
bbh-tracking_shuffled_objects_seven_objects,7964c0,score,gen,11.20 |
|
bbh-tracking_shuffled_objects_five_objects,7964c0,score,gen,16.40 |
|
bbh-logical_deduction_three_objects,45ebc5,score,gen,43.60 |
|
bbh-hyperbaton,5e5016,score,gen,64.80 |
|
bbh-logical_deduction_five_objects,45ebc5,score,gen,26.00 |
|
bbh-logical_deduction_seven_objects,45ebc5,score,gen,22.80 |
|
bbh-movie_recommendation,cc2fde,score,gen,47.60 |
|
bbh-salient_translation_error_detection,5b5f35,score,gen,30.00 |
|
bbh-reasoning_about_colored_objects,1cb761,score,gen,39.60 |
|
bbh-multistep_arithmetic_two,30f91e,score,gen,19.60 |
|
bbh-navigate,1576d9,score,gen,64.80 |
|
bbh-dyck_languages,805bea,score,gen,0.80 |
|
bbh-word_sorting,9a3f78,score,gen,3.60 |
|
bbh-sports_understanding,d3fa77,score,gen,57.20 |
|
bbh-boolean_expressions,612c92,score,gen,73.20 |
|
bbh-object_counting,781e5c,score,gen,56.40 |
|
bbh-formal_fallacies,eada96,score,gen,51.60 |
|
bbh-causal_judgement,89eaa4,score,gen,51.87 |
|
bbh-web_of_lies,0c0441,score,gen,65.20 |
|
IFEval,3321a3,Prompt-level-strict-accuracy,gen,24.03 |
|
IFEval,3321a3,Inst-level-strict-accuracy,gen,37.05 |
|
IFEval,3321a3,Prompt-level-loose-accuracy,gen,25.14 |
|
IFEval,3321a3,Inst-level-loose-accuracy,gen,38.49 |
|
ARC-c,1e0de5,accuracy,gen,71.19 |
|
hellaswag,6faab5,accuracy,gen,48.75 |
|
openai_humaneval,8e312c,humaneval_pass@1,gen,41.46 |
|
mmlu-humanities,-,naive_average,gen,59.04 |
|
mmlu-stem,-,naive_average,gen,45.70 |
|
mmlu-social-science,-,naive_average,gen,63.69 |
|
mmlu-other,-,naive_average,gen,58.84 |
|
mmlu,-,naive_average,gen,55.53 |
|
mmlu-weighted,-,weighted_average,gen,54.14 |
|
cmmlu-humanities,-,naive_average,gen,70.56 |
|
cmmlu-stem,-,naive_average,gen,57.10 |
|
cmmlu-social-science,-,naive_average,gen,69.72 |
|
cmmlu-other,-,naive_average,gen,73.06 |
|
cmmlu-china-specific,-,naive_average,gen,66.90 |
|
cmmlu,-,naive_average,gen,67.43 |
|
ceval-stem,-,naive_average,gen,59.70 |
|
ceval-social-science,-,naive_average,gen,82.16 |
|
ceval-humanities,-,naive_average,gen,75.53 |
|
ceval-other,-,naive_average,gen,69.50 |
|
ceval-hard,-,naive_average,gen,44.00 |
|
ceval,-,naive_average,gen,69.44 |
|
bbh,-,naive_average,gen,38.37 |
|
|