Edit model card

MaziyarPanahi/TheTop-5x7B-Instruct-S2-v0.1

Merge of top 7B models with SLERP method.

mergekit is a toolkit for merging pre-trained language models. mergekit uses an out-of-core approach to perform unreasonably elaborate merges in resource-constrained situations. Merges can be run entirely on CPU or accelerated with as little as 8 GB of VRAM. Many merging algorithms are supported, with more coming as they catch my attention.

Eval

{
    "all": {
        "acc": 0.6545868511485138,
        "acc_stderr": 0.031980293841566164,
        "acc_norm": 0.6542757501692061,
        "acc_norm_stderr": 0.03263807517879597,
        "mc1": 0.45165238678090575,
        "mc1_stderr": 0.017421480300277643,
        "mc2": 0.6217500644350165,
        "mc2_stderr": 0.015583825644663436
    },
    "harness|arc:challenge|25": {
        "acc": 0.6723549488054608,
        "acc_stderr": 0.01371584794071934,
        "acc_norm": 0.6945392491467577,
        "acc_norm_stderr": 0.01346008047800251
    },
    "harness|hellaswag|10": {
        "acc": 0.7046405098585939,
        "acc_stderr": 0.0045527183605131,
        "acc_norm": 0.871539533957379,
        "acc_norm_stderr": 0.0033391798350182853
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.37,
        "acc_stderr": 0.048523658709391,
        "acc_norm": 0.37,
        "acc_norm_stderr": 0.048523658709391
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.6148148148148148,
        "acc_stderr": 0.04203921040156279,
        "acc_norm": 0.6148148148148148,
        "acc_norm_stderr": 0.04203921040156279
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.6907894736842105,
        "acc_stderr": 0.037610708698674805,
        "acc_norm": 0.6907894736842105,
        "acc_norm_stderr": 0.037610708698674805
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.63,
        "acc_stderr": 0.04852365870939099,
        "acc_norm": 0.63,
        "acc_norm_stderr": 0.04852365870939099
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.7132075471698113,
        "acc_stderr": 0.02783491252754407,
        "acc_norm": 0.7132075471698113,
        "acc_norm_stderr": 0.02783491252754407
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.7638888888888888,
        "acc_stderr": 0.03551446610810826,
        "acc_norm": 0.7638888888888888,
        "acc_norm_stderr": 0.03551446610810826
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.47,
        "acc_stderr": 0.050161355804659205,
        "acc_norm": 0.47,
        "acc_norm_stderr": 0.050161355804659205
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.57,
        "acc_stderr": 0.04975698519562428,
        "acc_norm": 0.57,
        "acc_norm_stderr": 0.04975698519562428
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.27,
        "acc_stderr": 0.0446196043338474,
        "acc_norm": 0.27,
        "acc_norm_stderr": 0.0446196043338474
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.6820809248554913,
        "acc_stderr": 0.0355068398916558,
        "acc_norm": 0.6820809248554913,
        "acc_norm_stderr": 0.0355068398916558
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.4019607843137255,
        "acc_stderr": 0.04878608714466996,
        "acc_norm": 0.4019607843137255,
        "acc_norm_stderr": 0.04878608714466996
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.77,
        "acc_stderr": 0.04229525846816506,
        "acc_norm": 0.77,
        "acc_norm_stderr": 0.04229525846816506
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.5829787234042553,
        "acc_stderr": 0.03223276266711712,
        "acc_norm": 0.5829787234042553,
        "acc_norm_stderr": 0.03223276266711712
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.5175438596491229,
        "acc_stderr": 0.04700708033551038,
        "acc_norm": 0.5175438596491229,
        "acc_norm_stderr": 0.04700708033551038
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.5586206896551724,
        "acc_stderr": 0.04137931034482757,
        "acc_norm": 0.5586206896551724,
        "acc_norm_stderr": 0.04137931034482757
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.41798941798941797,
        "acc_stderr": 0.025402555503260912,
        "acc_norm": 0.41798941798941797,
        "acc_norm_stderr": 0.025402555503260912
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.47619047619047616,
        "acc_stderr": 0.04467062628403273,
        "acc_norm": 0.47619047619047616,
        "acc_norm_stderr": 0.04467062628403273
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.34,
        "acc_stderr": 0.04760952285695235,
        "acc_norm": 0.34,
        "acc_norm_stderr": 0.04760952285695235
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.7903225806451613,
        "acc_stderr": 0.023157879349083522,
        "acc_norm": 0.7903225806451613,
        "acc_norm_stderr": 0.023157879349083522
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.4975369458128079,
        "acc_stderr": 0.03517945038691063,
        "acc_norm": 0.4975369458128079,
        "acc_norm_stderr": 0.03517945038691063
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.68,
        "acc_stderr": 0.04688261722621505,
        "acc_norm": 0.68,
        "acc_norm_stderr": 0.04688261722621505
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.7696969696969697,
        "acc_stderr": 0.0328766675860349,
        "acc_norm": 0.7696969696969697,
        "acc_norm_stderr": 0.0328766675860349
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.7828282828282829,
        "acc_stderr": 0.029376616484945633,
        "acc_norm": 0.7828282828282829,
        "acc_norm_stderr": 0.029376616484945633
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.9015544041450777,
        "acc_stderr": 0.021500249576033456,
        "acc_norm": 0.9015544041450777,
        "acc_norm_stderr": 0.021500249576033456
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.6717948717948717,
        "acc_stderr": 0.023807633198657266,
        "acc_norm": 0.6717948717948717,
        "acc_norm_stderr": 0.023807633198657266
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.34444444444444444,
        "acc_stderr": 0.02897264888484427,
        "acc_norm": 0.34444444444444444,
        "acc_norm_stderr": 0.02897264888484427
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.6638655462184874,
        "acc_stderr": 0.030684737115135363,
        "acc_norm": 0.6638655462184874,
        "acc_norm_stderr": 0.030684737115135363
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.304635761589404,
        "acc_stderr": 0.03757949922943343,
        "acc_norm": 0.304635761589404,
        "acc_norm_stderr": 0.03757949922943343
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.8458715596330275,
        "acc_stderr": 0.015480826865374303,
        "acc_norm": 0.8458715596330275,
        "acc_norm_stderr": 0.015480826865374303
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.5185185185185185,
        "acc_stderr": 0.03407632093854051,
        "acc_norm": 0.5185185185185185,
        "acc_norm_stderr": 0.03407632093854051
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.8382352941176471,
        "acc_stderr": 0.025845017986926917,
        "acc_norm": 0.8382352941176471,
        "acc_norm_stderr": 0.025845017986926917
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.810126582278481,
        "acc_stderr": 0.02553010046023349,
        "acc_norm": 0.810126582278481,
        "acc_norm_stderr": 0.02553010046023349
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.6905829596412556,
        "acc_stderr": 0.03102441174057221,
        "acc_norm": 0.6905829596412556,
        "acc_norm_stderr": 0.03102441174057221
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.7786259541984732,
        "acc_stderr": 0.036412970813137296,
        "acc_norm": 0.7786259541984732,
        "acc_norm_stderr": 0.036412970813137296
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.8099173553719008,
        "acc_stderr": 0.03581796951709282,
        "acc_norm": 0.8099173553719008,
        "acc_norm_stderr": 0.03581796951709282
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.7685185185185185,
        "acc_stderr": 0.04077494709252627,
        "acc_norm": 0.7685185185185185,
        "acc_norm_stderr": 0.04077494709252627
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.7607361963190185,
        "acc_stderr": 0.0335195387952127,
        "acc_norm": 0.7607361963190185,
        "acc_norm_stderr": 0.0335195387952127
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.45535714285714285,
        "acc_stderr": 0.047268355537191,
        "acc_norm": 0.45535714285714285,
        "acc_norm_stderr": 0.047268355537191
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.8058252427184466,
        "acc_stderr": 0.03916667762822584,
        "acc_norm": 0.8058252427184466,
        "acc_norm_stderr": 0.03916667762822584
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.8675213675213675,
        "acc_stderr": 0.022209309073165612,
        "acc_norm": 0.8675213675213675,
        "acc_norm_stderr": 0.022209309073165612
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.71,
        "acc_stderr": 0.045604802157206845,
        "acc_norm": 0.71,
        "acc_norm_stderr": 0.045604802157206845
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.8352490421455939,
        "acc_stderr": 0.013265346261323788,
        "acc_norm": 0.8352490421455939,
        "acc_norm_stderr": 0.013265346261323788
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.7543352601156069,
        "acc_stderr": 0.023176298203992005,
        "acc_norm": 0.7543352601156069,
        "acc_norm_stderr": 0.023176298203992005
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.4547486033519553,
        "acc_stderr": 0.016653875777524006,
        "acc_norm": 0.4547486033519553,
        "acc_norm_stderr": 0.016653875777524006
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.7483660130718954,
        "acc_stderr": 0.0248480182638752,
        "acc_norm": 0.7483660130718954,
        "acc_norm_stderr": 0.0248480182638752
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.7202572347266881,
        "acc_stderr": 0.02549425935069491,
        "acc_norm": 0.7202572347266881,
        "acc_norm_stderr": 0.02549425935069491
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.7592592592592593,
        "acc_stderr": 0.02378858355165854,
        "acc_norm": 0.7592592592592593,
        "acc_norm_stderr": 0.02378858355165854
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.4787234042553192,
        "acc_stderr": 0.029800481645628693,
        "acc_norm": 0.4787234042553192,
        "acc_norm_stderr": 0.029800481645628693
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.4745762711864407,
        "acc_stderr": 0.012753716929101008,
        "acc_norm": 0.4745762711864407,
        "acc_norm_stderr": 0.012753716929101008
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.7095588235294118,
        "acc_stderr": 0.027576468622740536,
        "acc_norm": 0.7095588235294118,
        "acc_norm_stderr": 0.027576468622740536
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.6928104575163399,
        "acc_stderr": 0.01866335967146367,
        "acc_norm": 0.6928104575163399,
        "acc_norm_stderr": 0.01866335967146367
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.6727272727272727,
        "acc_stderr": 0.0449429086625209,
        "acc_norm": 0.6727272727272727,
        "acc_norm_stderr": 0.0449429086625209
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.7387755102040816,
        "acc_stderr": 0.02812342933514278,
        "acc_norm": 0.7387755102040816,
        "acc_norm_stderr": 0.02812342933514278
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.845771144278607,
        "acc_stderr": 0.025538433368578337,
        "acc_norm": 0.845771144278607,
        "acc_norm_stderr": 0.025538433368578337
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.86,
        "acc_stderr": 0.0348735088019777,
        "acc_norm": 0.86,
        "acc_norm_stderr": 0.0348735088019777
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.5481927710843374,
        "acc_stderr": 0.03874371556587953,
        "acc_norm": 0.5481927710843374,
        "acc_norm_stderr": 0.03874371556587953
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.8362573099415205,
        "acc_stderr": 0.028380919596145866,
        "acc_norm": 0.8362573099415205,
        "acc_norm_stderr": 0.028380919596145866
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.45165238678090575,
        "mc1_stderr": 0.017421480300277643,
        "mc2": 0.6217500644350165,
        "mc2_stderr": 0.015583825644663436
    },
    "harness|winogrande|5": {
        "acc": 0.7963693764798737,
        "acc_stderr": 0.011317798781626913
    },
    "harness|gsm8k|5": {
        "acc": 0.7202426080363912,
        "acc_stderr": 0.01236438401673532
    }
}

# [Open LLM Leaderboard Evaluation Results](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)
Detailed results can be found [here](https://huggingface.co/datasets/open-llm-leaderboard/details_MaziyarPanahi__TheTop-5x7B-Instruct-S2-v0.1)

|             Metric              |Value|
|---------------------------------|----:|
|Avg.                             |72.57|
|AI2 Reasoning Challenge (25-Shot)|69.45|
|HellaSwag (10-Shot)              |87.15|
|MMLU (5-Shot)                    |64.98|
|TruthfulQA (0-shot)              |62.18|
|Winogrande (5-shot)              |79.64|
|GSM8k (5-shot)                   |72.02|
Downloads last month
76
Safetensors
Model size
7.24B params
Tensor type
BF16
·
Inference Examples
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social visibility and check back later, or deploy to Inference Endpoints (dedicated) instead.

Model tree for MaziyarPanahi/TheTop-5x7B-Instruct-S2-v0.1

Quantizations
1 model

Collection including MaziyarPanahi/TheTop-5x7B-Instruct-S2-v0.1

Evaluation results