{
    "herbert-base-retrieval-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>",
                    "AllegroReviews": 34.11,
                    "CBD": 68.35,
                    "MassiveIntentClassification (pl)": 65.53,
                    "MassiveScenarioClassification (pl)": 68.51,
                    "PAC": 68.4,
                    "PolEmo2.0-IN": 64.18,
                    "PolEmo2.0-OUT": 45.73
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>",
                    "8TagsClustering": 28.15
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>",
                    "CDSC-E": 63.31,
                    "PPC": 84.18,
                    "PSC": 98.87,
                    "SICK-E-PL": 54.93
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>",
                    "ArguAna-PL": 41.97,
                    "DBPedia-PL": 24.07,
                    "FiQA-PL": 24.25,
                    "HotpotQA-PL": 43.41,
                    "MSMARCO-PL": 51.56,
                    "NFCorpus-PL": 25.95,
                    "NQ-PL": 35.09,
                    "Quora-PL": 78.86,
                    "SCIDOCS-PL": 11.0,
                    "SciFact-PL": 51.92,
                    "TRECCOVID-PL": 42.64
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>",
                    "CDSC-R": 86.18,
                    "SICK-R-PL": 64.67,
                    "STS22 (pl)": 39.73
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/herbert-base-retrieval-v2\">herbert-base-retrieval-v2</a>"
                }
            ]
        }
    },
    "multilingual-e5-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "BornholmBitextMining": 46.4
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "AllegroReviews": 40.85,
                    "AmazonReviewsClassification (fr)": 40.94,
                    "AngryTweetsClassification": 54.65,
                    "CBD": 62.66,
                    "DKHateClassification": 63.53,
                    "DanishPoliticalCommentsClassification": 36.69,
                    "IFlyTek": 44.93,
                    "JDReview": 76.21,
                    "LccSentimentClassification": 59.67,
                    "MTOPDomainClassification (fr)": 84.79,
                    "MTOPIntentClassification (fr)": 55.51,
                    "MasakhaNEWSClassification (fra)": 79.69,
                    "MassiveIntentClassification (da)": 60.16,
                    "MassiveIntentClassification (nb)": 59.83,
                    "MassiveIntentClassification (sv)": 61.78,
                    "MassiveIntentClassification (pl)": 61.04,
                    "MassiveScenarioClassification (da)": 67.46,
                    "MassiveScenarioClassification (nb)": 66.18,
                    "MassiveScenarioClassification (sv)": 69.15,
                    "MassiveScenarioClassification (pl)": 66.11,
                    "MultilingualSentiment": 65.28,
                    "NoRecClassification": 57.58,
                    "NordicLangClassification": 75.94,
                    "NorwegianParliament": 59.94,
                    "OnlineShopping": 88.4,
                    "PAC": 70.87,
                    "PolEmo2.0-IN": 67.66,
                    "PolEmo2.0-OUT": 43.91,
                    "ScalaDaClassification": 50.79,
                    "ScalaNbClassification": 50.32,
                    "TNews": 47.06,
                    "Waimai": 84.42
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "8TagsClustering": 24.97,
                    "AlloProfClusteringP2P": 62.09,
                    "AlloProfClusteringS2S": 32.98,
                    "CLSClusteringP2P": 32.41,
                    "CLSClusteringS2S": 36.99,
                    "HALClusteringS2S": 22.48,
                    "MLSUMClusteringP2P": 43.48,
                    "MLSUMClusteringS2S": 38.53,
                    "MasakhaNEWSClusteringP2P (fra)": 47.91,
                    "MasakhaNEWSClusteringS2S (fra)": 51.16,
                    "ThuNewsClusteringP2P": 40.98,
                    "ThuNewsClusteringS2S": 52.36
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "CDSC-E": 72.67,
                    "Cmnli": 74.51,
                    "Ocnli": 59.63,
                    "OpusparcusPC (fr)": 92.72,
                    "PPC": 88.01,
                    "PSC": 99.14,
                    "PawsX (fr)": 56.93,
                    "SICK-E-PL": 68.77
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "AlloprofReranking": 58.1,
                    "CMedQAv1": 65.21,
                    "CMedQAv2": 66.06,
                    "MMarcoReranking": 21.76,
                    "SyntecReranking": 85.43,
                    "T2Reranking": 64.39
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "AlloprofRetrieval": 36.21,
                    "ArguAna-PL": 42.81,
                    "BSARDRetrieval": 0.0,
                    "CmedqaRetrieval": 27.2,
                    "CovidRetrieval": 73.45,
                    "DBPedia-PL": 30.23,
                    "DuRetrieval": 81.64,
                    "EcomRetrieval": 54.17,
                    "FiQA-PL": 25.52,
                    "HotpotQA-PL": 63.52,
                    "MMarcoRetrieval": 76.04,
                    "MSMARCO-PL": 29.52,
                    "MedicalRetrieval": 48.35,
                    "MintakaRetrieval (fr)": 23.46,
                    "NFCorpus-PL": 25.98,
                    "NQ-PL": 44.8,
                    "Quora-PL": 81.22,
                    "SCIDOCS-PL": 12.35,
                    "SciFact-PL": 62.11,
                    "SyntecRetrieval": 80.49,
                    "T2Retrieval": 70.86,
                    "TRECCOVID-PL": 66.06,
                    "VideoRetrieval": 61.3,
                    "XPQARetrieval (fr)": 65.81
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "AFQMC": 29.67,
                    "ATEC": 37.01,
                    "BQ": 45.45,
                    "CDSC-R": 90.08,
                    "LCQMC": 74.15,
                    "PAWSX": 12.14,
                    "QBQTC": 28.81,
                    "SICK-R-PL": 71.23,
                    "SICKFr": 76.23,
                    "STS22 (zh)": 65.64,
                    "STS22 (pl)": 34.07,
                    "STSB": 79.05,
                    "STSBenchmarkMultilingualSTS (fr)": 80.62
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>",
                    "SummEvalFr": 30.76
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-base\">multilingual-e5-base</a>"
                }
            ]
        }
    },
    "nomic-embed-text-v1.5-512": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "AmazonCounterfactualClassification (en)": 74.27,
                    "AmazonPolarityClassification": 91.89,
                    "AmazonReviewsClassification (en)": 46.97,
                    "Banking77Classification": 84.15,
                    "EmotionClassification": 47.73,
                    "ImdbClassification": 85.47,
                    "MTOPDomainClassification (en)": 92.62,
                    "MTOPIntentClassification (en)": 74.27,
                    "MassiveIntentClassification (en)": 73.07,
                    "MassiveScenarioClassification (en)": 76.82,
                    "ToxicConversationsClassification": 71.25,
                    "TweetSentimentExtractionClassification": 60.4
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "ArxivClusteringP2P": 45.45,
                    "ArxivClusteringS2S": 36.19,
                    "BiorxivClusteringP2P": 38.41,
                    "BiorxivClusteringS2S": 32.28,
                    "MedrxivClusteringP2P": 34.47,
                    "MedrxivClusteringS2S": 31.43,
                    "RedditClustering": 55.9,
                    "RedditClusteringP2P": 60.58,
                    "StackExchangeClustering": 62.94,
                    "StackExchangeClusteringP2P": 33.81,
                    "TwentyNewsgroupsClustering": 49.36
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "SprintDuplicateQuestions": 92.91,
                    "TwitterSemEval2015": 74.3,
                    "TwitterURLCorpus": 86.57
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "AskUbuntuDupQuestions": 61.6,
                    "MindSmallReranking": 30.34,
                    "SciDocsRR": 80.33,
                    "StackOverflowDupQuestions": 50.32
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "ArguAna": 47.45,
                    "CQADupstackRetrieval": 39.06,
                    "ClimateFEVER": 40.7,
                    "DBPedia": 42.96,
                    "FEVER": 85.7,
                    "FiQA2018": 36.92,
                    "HotpotQA": 71.48,
                    "MSMARCO": 42.29,
                    "NFCorpus": 33.31,
                    "NQ": 58.83,
                    "QuoraRetrieval": 87.87,
                    "SCIDOCS": 17.88,
                    "SciFact": 70.12,
                    "TRECCOVID": 82.12,
                    "Touche2020": 29.24
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "BIOSSES": 83.3,
                    "SICK-R": 79.27,
                    "STS12": 78.3,
                    "STS13": 85.81,
                    "STS14": 81.38,
                    "STS15": 86.79,
                    "STS16": 84.56,
                    "STS17 (en-en)": 87.25,
                    "STS22 (en)": 65.24,
                    "STSBenchmark": 85.14
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>",
                    "SummEval": 30.47
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512</a>"
                }
            ]
        }
    },
    "bge-base-zh-v1.5": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>",
                    "AmazonReviewsClassification (zh)": 40.15,
                    "IFlyTek": 48.62,
                    "JDReview": 83.62,
                    "MassiveIntentClassification (zh-CN)": 67.93,
                    "MassiveScenarioClassification (zh-CN)": 73.98,
                    "MultilingualSentiment": 70.67,
                    "OnlineShopping": 91.26,
                    "TNews": 51.08,
                    "Waimai": 85.36
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>",
                    "CLSClusteringP2P": 39.91,
                    "CLSClusteringS2S": 37.63,
                    "ThuNewsClusteringP2P": 58.45,
                    "ThuNewsClusteringS2S": 54.12
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>",
                    "Cmnli": 84.1,
                    "Ocnli": 75.41
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>",
                    "CMedQAv1": 80.47,
                    "CMedQAv2": 84.88,
                    "MMarcoReranking": 29.74,
                    "T2Reranking": 66.49
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>",
                    "CmedqaRetrieval": 41.61,
                    "CovidRetrieval": 74.7,
                    "DuRetrieval": 85.07,
                    "EcomRetrieval": 64.25,
                    "MMarcoRetrieval": 77.69,
                    "MedicalRetrieval": 56.51,
                    "T2Retrieval": 83.71,
                    "VideoRetrieval": 72.35
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>",
                    "AFQMC": 42.4,
                    "ATEC": 48.17,
                    "BQ": 61.78,
                    "LCQMC": 74.45,
                    "PAWSX": 20.4,
                    "QBQTC": 36.22,
                    "STS22 (zh)": 68.01,
                    "STSB": 78.31
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-base-zh-v1.5\">bge-base-zh-v1.5</a>"
                }
            ]
        }
    },
    "bert-base-15lang-cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "AmazonReviewsClassification (fr)": 29.35,
                    "MTOPDomainClassification (fr)": 63.7,
                    "MTOPIntentClassification (fr)": 37.85,
                    "MasakhaNEWSClassification (fra)": 63.89,
                    "MassiveIntentClassification (fr)": 37.28,
                    "MassiveScenarioClassification (fr)": 44.47
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "AlloProfClusteringP2P": 53.16,
                    "AlloProfClusteringS2S": 43.43,
                    "HALClusteringS2S": 20.26,
                    "MLSUMClusteringP2P": 41.22,
                    "MLSUMClusteringS2S": 31.88,
                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
                    "MasakhaNEWSClusteringS2S (fra)": 24.46
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "OpusparcusPC (fr)": 86.78,
                    "PawsX (fr)": 53.38
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "AlloprofReranking": 36.21,
                    "SyntecReranking": 53.25
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "AlloprofRetrieval": 1.61,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 3.55,
                    "SyntecRetrieval": 18.95,
                    "XPQARetrieval (fr)": 18.35
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "SICKFr": 58.77,
                    "STS22 (fr)": 40.4,
                    "STSBenchmarkMultilingualSTS (fr)": 52.25
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>",
                    "SummEvalFr": 29.13
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-15lang-cased\">bert-base-15lang-cased</a>"
                }
            ]
        }
    },
    "paraphrase-multilingual-MiniLM-L12-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "BUCC (de-en)": 97.11,
                    "BUCC (fr-en)": 94.99,
                    "BUCC (ru-en)": 95.06,
                    "BUCC (zh-en)": 95.63,
                    "Tatoeba (afr-eng)": 58.22,
                    "Tatoeba (amh-eng)": 36.21,
                    "Tatoeba (ang-eng)": 10.24,
                    "Tatoeba (ara-eng)": 87.93,
                    "Tatoeba (arq-eng)": 18.6,
                    "Tatoeba (arz-eng)": 51.26,
                    "Tatoeba (ast-eng)": 62.17,
                    "Tatoeba (awa-eng)": 33.43,
                    "Tatoeba (aze-eng)": 62.1,
                    "Tatoeba (bel-eng)": 67.73,
                    "Tatoeba (ben-eng)": 36.48,
                    "Tatoeba (ber-eng)": 4.43,
                    "Tatoeba (bos-eng)": 93.27,
                    "Tatoeba (bre-eng)": 5.56,
                    "Tatoeba (bul-eng)": 92.65,
                    "Tatoeba (cat-eng)": 94.42,
                    "Tatoeba (cbk-eng)": 55.37,
                    "Tatoeba (ceb-eng)": 8.05,
                    "Tatoeba (ces-eng)": 95.12,
                    "Tatoeba (cha-eng)": 15.98,
                    "Tatoeba (cmn-eng)": 94.93,
                    "Tatoeba (cor-eng)": 3.42,
                    "Tatoeba (csb-eng)": 21.56,
                    "Tatoeba (cym-eng)": 13.25,
                    "Tatoeba (dan-eng)": 94.8,
                    "Tatoeba (deu-eng)": 97.02,
                    "Tatoeba (dsb-eng)": 33.43,
                    "Tatoeba (dtp-eng)": 5.69,
                    "Tatoeba (ell-eng)": 95.43,
                    "Tatoeba (epo-eng)": 41.73,
                    "Tatoeba (est-eng)": 97.33,
                    "Tatoeba (eus-eng)": 23.18,
                    "Tatoeba (fao-eng)": 27.51,
                    "Tatoeba (fin-eng)": 93.1,
                    "Tatoeba (fra-eng)": 91.72,
                    "Tatoeba (fry-eng)": 31.13,
                    "Tatoeba (gla-eng)": 3.61,
                    "Tatoeba (gle-eng)": 11.62,
                    "Tatoeba (glg-eng)": 94.0,
                    "Tatoeba (gsw-eng)": 25.74,
                    "Tatoeba (heb-eng)": 86.88,
                    "Tatoeba (hin-eng)": 97.62,
                    "Tatoeba (hrv-eng)": 95.98,
                    "Tatoeba (hsb-eng)": 36.1,
                    "Tatoeba (hun-eng)": 91.58,
                    "Tatoeba (hye-eng)": 93.28,
                    "Tatoeba (ido-eng)": 40.25,
                    "Tatoeba (ile-eng)": 57.71,
                    "Tatoeba (ina-eng)": 79.13,
                    "Tatoeba (ind-eng)": 92.74,
                    "Tatoeba (isl-eng)": 24.07,
                    "Tatoeba (ita-eng)": 93.05,
                    "Tatoeba (jav-eng)": 17.04,
                    "Tatoeba (jpn-eng)": 90.41,
                    "Tatoeba (kab-eng)": 1.16,
                    "Tatoeba (kat-eng)": 95.44,
                    "Tatoeba (kaz-eng)": 34.89,
                    "Tatoeba (khm-eng)": 32.11,
                    "Tatoeba (kor-eng)": 92.52,
                    "Tatoeba (kur-eng)": 46.94,
                    "Tatoeba (kzj-eng)": 6.24,
                    "Tatoeba (lat-eng)": 19.47,
                    "Tatoeba (lfn-eng)": 47.02,
                    "Tatoeba (lit-eng)": 93.16,
                    "Tatoeba (lvs-eng)": 97.87,
                    "Tatoeba (mal-eng)": 32.2,
                    "Tatoeba (mar-eng)": 92.38,
                    "Tatoeba (max-eng)": 45.25,
                    "Tatoeba (mhr-eng)": 6.89,
                    "Tatoeba (mkd-eng)": 91.0,
                    "Tatoeba (mon-eng)": 95.04,
                    "Tatoeba (nds-eng)": 32.16,
                    "Tatoeba (nld-eng)": 94.58,
                    "Tatoeba (nno-eng)": 76.34,
                    "Tatoeba (nob-eng)": 97.73,
                    "Tatoeba (nov-eng)": 47.99,
                    "Tatoeba (oci-eng)": 38.57,
                    "Tatoeba (orv-eng)": 15.1,
                    "Tatoeba (pam-eng)": 5.41,
                    "Tatoeba (pes-eng)": 92.59,
                    "Tatoeba (pms-eng)": 30.7,
                    "Tatoeba (pol-eng)": 94.28,
                    "Tatoeba (por-eng)": 92.13,
                    "Tatoeba (ron-eng)": 95.3,
                    "Tatoeba (rus-eng)": 91.87,
                    "Tatoeba (slk-eng)": 95.15,
                    "Tatoeba (slv-eng)": 96.92,
                    "Tatoeba (spa-eng)": 95.42,
                    "Tatoeba (sqi-eng)": 98.17,
                    "Tatoeba (srp-eng)": 92.24,
                    "Tatoeba (swe-eng)": 94.42,
                    "Tatoeba (swg-eng)": 26.31,
                    "Tatoeba (swh-eng)": 14.48,
                    "Tatoeba (tam-eng)": 24.64,
                    "Tatoeba (tat-eng)": 10.25,
                    "Tatoeba (tel-eng)": 36.4,
                    "Tatoeba (tgl-eng)": 13.09,
                    "Tatoeba (tha-eng)": 96.72,
                    "Tatoeba (tuk-eng)": 15.16,
                    "Tatoeba (tur-eng)": 95.08,
                    "Tatoeba (tzl-eng)": 25.46,
                    "Tatoeba (uig-eng)": 24.39,
                    "Tatoeba (ukr-eng)": 92.82,
                    "Tatoeba (urd-eng)": 94.57,
                    "Tatoeba (uzb-eng)": 17.14,
                    "Tatoeba (vie-eng)": 95.12,
                    "Tatoeba (war-eng)": 7.25,
                    "Tatoeba (wuu-eng)": 76.0,
                    "Tatoeba (xho-eng)": 4.52,
                    "Tatoeba (yid-eng)": 14.38,
                    "Tatoeba (yue-eng)": 71.45,
                    "Tatoeba (zsm-eng)": 95.31
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "AllegroReviews": 30.88,
                    "AmazonCounterfactualClassification (de)": 68.35,
                    "AmazonCounterfactualClassification (en)": 71.57,
                    "AmazonCounterfactualClassification (en-ext)": 69.99,
                    "AmazonCounterfactualClassification (ja)": 63.45,
                    "AmazonPolarityClassification": 69.21,
                    "AmazonReviewsClassification (de)": 35.91,
                    "AmazonReviewsClassification (en)": 35.11,
                    "AmazonReviewsClassification (es)": 37.49,
                    "AmazonReviewsClassification (fr)": 35.3,
                    "AmazonReviewsClassification (ja)": 33.24,
                    "AmazonReviewsClassification (zh)": 35.26,
                    "Banking77Classification": 79.77,
                    "CBD": 57.68,
                    "EmotionClassification": 42.37,
                    "ImdbClassification": 60.46,
                    "MTOPDomainClassification (de)": 79.2,
                    "MTOPDomainClassification (en)": 87.06,
                    "MTOPDomainClassification (es)": 83.04,
                    "MTOPDomainClassification (fr)": 78.63,
                    "MTOPDomainClassification (hi)": 81.36,
                    "MTOPDomainClassification (th)": 79.99,
                    "MTOPIntentClassification (de)": 54.23,
                    "MTOPIntentClassification (en)": 65.52,
                    "MTOPIntentClassification (es)": 60.28,
                    "MTOPIntentClassification (fr)": 54.05,
                    "MTOPIntentClassification (hi)": 59.9,
                    "MTOPIntentClassification (th)": 61.96,
                    "MasakhaNEWSClassification (fra)": 76.09,
                    "MassiveIntentClassification (pl)": 59.43,
                    "MassiveIntentClassification (fr)": 57.52,
                    "MassiveScenarioClassification (pl)": 65.04,
                    "MassiveScenarioClassification (fr)": 64.52,
                    "PAC": 65.76,
                    "PolEmo2.0-IN": 57.76,
                    "PolEmo2.0-OUT": 28.7,
                    "ToxicConversationsClassification": 66.07,
                    "TweetSentimentExtractionClassification": 56.12
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "8TagsClustering": 23.24,
                    "AlloProfClusteringP2P": 56.06,
                    "AlloProfClusteringS2S": 42.16,
                    "ArxivClusteringP2P": 38.33,
                    "ArxivClusteringS2S": 31.55,
                    "BiorxivClusteringP2P": 33.49,
                    "BiorxivClusteringS2S": 29.44,
                    "BlurbsClusteringP2P": 32.46,
                    "BlurbsClusteringS2S": 14.33,
                    "HALClusteringS2S": 23.21,
                    "MLSUMClusteringP2P": 39.97,
                    "MLSUMClusteringS2S": 36.55,
                    "MasakhaNEWSClusteringP2P (fra)": 36.58,
                    "MasakhaNEWSClusteringS2S (fra)": 33.9,
                    "MedrxivClusteringP2P": 31.52,
                    "MedrxivClusteringS2S": 30.87,
                    "RedditClustering": 42.02,
                    "RedditClusteringP2P": 50.73,
                    "StackExchangeClustering": 49.6,
                    "StackExchangeClusteringP2P": 31.69,
                    "TenKGnadClusteringP2P": 36.13,
                    "TenKGnadClusteringS2S": 22.26,
                    "TwentyNewsgroupsClustering": 39.28
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "CDSC-E": 72.22,
                    "OpusparcusPC (fr)": 92.01,
                    "PPC": 91.8,
                    "PSC": 97.14,
                    "PawsX (fr)": 56.94,
                    "SICK-E-PL": 71.94,
                    "SprintDuplicateQuestions": 89.46,
                    "TwitterSemEval2015": 62.06,
                    "TwitterURLCorpus": 83.83
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "AlloprofReranking": 49.01,
                    "AskUbuntuDupQuestions": 60.49,
                    "MindSmallReranking": 30.37,
                    "SciDocsRR": 77.78,
                    "StackOverflowDupQuestions": 45.85,
                    "SyntecReranking": 75.03
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "AlloprofRetrieval": 26.63,
                    "ArguAna": 44.88,
                    "ArguAna-PL": 37.83,
                    "BSARDRetrieval": 0.0,
                    "CQADupstackRetrieval": 30.7,
                    "ClimateFEVER": 18.49,
                    "DBPedia": 22.63,
                    "DBPedia-PL": 18.0,
                    "FEVER": 52.66,
                    "FiQA-PL": 12.49,
                    "FiQA2018": 20.33,
                    "HotpotQA": 30.01,
                    "HotpotQA-PL": 22.76,
                    "MSMARCO": 23.72,
                    "MSMARCO-PL": 10.39,
                    "MintakaRetrieval (fr)": 21.53,
                    "NFCorpus": 23.45,
                    "NFCorpus-PL": 17.16,
                    "NQ": 29.8,
                    "NQ-PL": 12.56,
                    "Quora-PL": 77.18,
                    "QuoraRetrieval": 86.55,
                    "SCIDOCS": 0.03,
                    "SCIDOCS-PL": 10.26,
                    "SciFact": 48.37,
                    "SciFact-PL": 40.24,
                    "SyntecRetrieval": 65.54,
                    "TRECCOVID": 39.12,
                    "TRECCOVID-PL": 34.38,
                    "Touche2020": 16.06,
                    "XPQARetrieval (fr)": 42.51
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "BIOSSES": 74.18,
                    "CDSC-R": 88.98,
                    "SICK-R": 79.61,
                    "SICK-R-PL": 68.77,
                    "SICKFr": 75.1,
                    "STS12": 76.02,
                    "STS13": 80.7,
                    "STS14": 78.85,
                    "STS15": 85.84,
                    "STS16": 81.05,
                    "STS17 (ar-ar)": 79.16,
                    "STS17 (en-ar)": 81.22,
                    "STS17 (en-de)": 84.22,
                    "STS17 (en-en)": 86.87,
                    "STS17 (en-tr)": 76.74,
                    "STS17 (es-en)": 84.44,
                    "STS17 (es-es)": 85.56,
                    "STS17 (fr-en)": 76.59,
                    "STS17 (it-en)": 82.35,
                    "STS17 (ko-ko)": 77.03,
                    "STS17 (nl-en)": 81.71,
                    "STS22 (pl)": 33.73,
                    "STS22 (fr)": 70.55,
                    "STSBenchmark": 84.42,
                    "STSBenchmarkMultilingualSTS (fr)": 79.9
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>",
                    "SummEval": 30.67,
                    "SummEvalFr": 29.2
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2</a>"
                }
            ]
        }
    },
    "all-mpnet-base-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "AmazonCounterfactualClassification (en)": 65.27,
                    "AmazonPolarityClassification": 67.13,
                    "AmazonReviewsClassification (en)": 31.92,
                    "Banking77Classification": 81.86,
                    "EmotionClassification": 39.72,
                    "ImdbClassification": 70.72,
                    "MTOPDomainClassification (en)": 92.08,
                    "MTOPIntentClassification (en)": 70.21,
                    "MassiveIntentClassification (en)": 69.57,
                    "MassiveScenarioClassification (en)": 76.01,
                    "ToxicConversationsClassification": 60.86,
                    "TweetSentimentExtractionClassification": 55.46
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "ArxivClusteringP2P": 48.38,
                    "ArxivClusteringS2S": 39.72,
                    "BiorxivClusteringP2P": 39.62,
                    "BiorxivClusteringS2S": 35.02,
                    "MedrxivClusteringP2P": 35.58,
                    "MedrxivClusteringS2S": 32.87,
                    "RedditClustering": 54.82,
                    "RedditClusteringP2P": 56.77,
                    "StackExchangeClustering": 53.8,
                    "StackExchangeClusteringP2P": 34.28,
                    "TwentyNewsgroupsClustering": 49.74
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "SprintDuplicateQuestions": 90.15,
                    "TwitterSemEval2015": 73.85,
                    "TwitterURLCorpus": 85.11
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "AskUbuntuDupQuestions": 65.85,
                    "MindSmallReranking": 30.97,
                    "SciDocsRR": 88.65,
                    "StackOverflowDupQuestions": 51.98
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "ArguAna": 46.52,
                    "CQADupstackRetrieval": 44.96,
                    "ClimateFEVER": 21.97,
                    "DBPedia": 32.09,
                    "FEVER": 50.86,
                    "FiQA2018": 49.96,
                    "HotpotQA": 39.29,
                    "MSMARCO": 39.75,
                    "NFCorpus": 33.29,
                    "NQ": 50.45,
                    "QuoraRetrieval": 87.46,
                    "SCIDOCS": 23.76,
                    "SciFact": 65.57,
                    "TRECCOVID": 51.33,
                    "Touche2020": 19.93
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "BIOSSES": 80.43,
                    "SICK-R": 80.59,
                    "STS12": 72.63,
                    "STS13": 83.48,
                    "STS14": 78.0,
                    "STS15": 85.66,
                    "STS16": 80.03,
                    "STS17 (en-en)": 90.6,
                    "STS22 (en)": 67.95,
                    "STSBenchmark": 83.42
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>",
                    "SummEval": 27.49
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-mpnet-base-v2\">all-mpnet-base-v2</a>"
                }
            ]
        }
    },
    "nb-bert-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>",
                    "BornholmBitextMining": 4.53
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>",
                    "AngryTweetsClassification": 52.14,
                    "DKHateClassification": 62.13,
                    "DanishPoliticalCommentsClassification": 35.04,
                    "LccSentimentClassification": 56.27,
                    "MassiveIntentClassification (da)": 57.03,
                    "MassiveIntentClassification (nb)": 62.68,
                    "MassiveIntentClassification (sv)": 55.02,
                    "MassiveScenarioClassification (da)": 60.43,
                    "MassiveScenarioClassification (nb)": 67.44,
                    "MassiveScenarioClassification (sv)": 57.12,
                    "NoRecClassification": 55.46,
                    "NordicLangClassification": 85.27,
                    "NorwegianParliament": 62.58,
                    "ScalaDaClassification": 62.85,
                    "ScalaNbClassification": 66.97
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-large\">nb-bert-large</a>"
                }
            ]
        }
    },
    "all-MiniLM-L6-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "BornholmBitextMining": 29.68
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "AmazonCounterfactualClassification (en)": 64.15,
                    "AmazonPolarityClassification": 62.58,
                    "AmazonReviewsClassification (en)": 31.79,
                    "AngryTweetsClassification": 42.49,
                    "Banking77Classification": 79.75,
                    "DKHateClassification": 55.05,
                    "DanishPoliticalCommentsClassification": 26.96,
                    "EmotionClassification": 38.43,
                    "ImdbClassification": 60.66,
                    "LccSentimentClassification": 38.47,
                    "MTOPDomainClassification (en)": 91.56,
                    "MTOPIntentClassification (en)": 62.18,
                    "MasakhaNEWSClassification (fra)": 74.05,
                    "MassiveIntentClassification (en)": 67.4,
                    "MassiveIntentClassification (da)": 40.99,
                    "MassiveIntentClassification (nb)": 39.34,
                    "MassiveIntentClassification (sv)": 38.1,
                    "MassiveScenarioClassification (en)": 75.76,
                    "MassiveScenarioClassification (da)": 47.01,
                    "MassiveScenarioClassification (nb)": 44.67,
                    "MassiveScenarioClassification (sv)": 42.93,
                    "NoRecClassification": 40.02,
                    "NordicLangClassification": 54.71,
                    "NorwegianParliament": 54.8,
                    "ScalaDaClassification": 50.03,
                    "ScalaNbClassification": 50.17,
                    "ToxicConversationsClassification": 66.99,
                    "TweetSentimentExtractionClassification": 55.41
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "AlloProfClusteringP2P": 51.83,
                    "AlloProfClusteringS2S": 32.07,
                    "ArxivClusteringP2P": 46.55,
                    "ArxivClusteringS2S": 37.86,
                    "BiorxivClusteringP2P": 38.48,
                    "BiorxivClusteringS2S": 33.17,
                    "HALClusteringS2S": 18.84,
                    "MLSUMClusteringP2P": 36.74,
                    "MLSUMClusteringS2S": 28.12,
                    "MasakhaNEWSClusteringP2P (fra)": 34.92,
                    "MasakhaNEWSClusteringS2S (fra)": 40.58,
                    "MedrxivClusteringP2P": 34.41,
                    "MedrxivClusteringS2S": 32.29,
                    "RedditClustering": 50.67,
                    "RedditClusteringP2P": 54.15,
                    "StackExchangeClustering": 53.36,
                    "StackExchangeClusteringP2P": 38.0,
                    "TwentyNewsgroupsClustering": 46.86
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "OpusparcusPC (fr)": 86.53,
                    "PawsX (fr)": 55.4,
                    "SprintDuplicateQuestions": 94.55,
                    "TwitterSemEval2015": 67.86,
                    "TwitterURLCorpus": 84.7
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "AlloprofReranking": 31.69,
                    "AskUbuntuDupQuestions": 63.48,
                    "MindSmallReranking": 30.8,
                    "SciDocsRR": 87.12,
                    "StackOverflowDupQuestions": 50.76,
                    "SyntecReranking": 59.57
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "AlloprofRetrieval": 28.41,
                    "ArguAna": 50.17,
                    "BSARDRetrieval": 0.0,
                    "CQADupstackRetrieval": 41.32,
                    "ClimateFEVER": 20.27,
                    "DBPedia": 32.33,
                    "FEVER": 51.93,
                    "FiQA2018": 36.87,
                    "HotpotQA": 46.51,
                    "MSMARCO": 36.54,
                    "MintakaRetrieval (fr)": 9.19,
                    "NFCorpus": 31.59,
                    "NQ": 43.87,
                    "QuoraRetrieval": 87.56,
                    "SCIDOCS": 21.64,
                    "SciFact": 64.51,
                    "SyntecRetrieval": 60.15,
                    "TRECCOVID": 47.25,
                    "Touche2020": 16.9,
                    "XPQARetrieval (fr)": 51.79
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "BIOSSES": 81.64,
                    "SICK-R": 77.58,
                    "SICKFr": 62.48,
                    "STS12": 72.37,
                    "STS13": 80.6,
                    "STS14": 75.59,
                    "STS15": 85.39,
                    "STS16": 78.99,
                    "STS17 (ar-ar)": 50.89,
                    "STS17 (en-ar)": -4.28,
                    "STS17 (en-de)": 35.82,
                    "STS17 (en-en)": 87.59,
                    "STS17 (en-tr)": 4.5,
                    "STS17 (es-en)": 16.31,
                    "STS17 (es-es)": 76.12,
                    "STS17 (fr-en)": 37.09,
                    "STS17 (it-en)": 24.45,
                    "STS17 (ko-ko)": 43.39,
                    "STS17 (nl-en)": 29.0,
                    "STS22 (ar)": 22.64,
                    "STS22 (de)": 31.04,
                    "STS22 (de-en)": 44.04,
                    "STS22 (de-fr)": 30.07,
                    "STS22 (de-pl)": 4.93,
                    "STS22 (en)": 67.21,
                    "STS22 (es)": 54.78,
                    "STS22 (es-en)": 53.42,
                    "STS22 (es-it)": 44.27,
                    "STS22 (fr)": 77.0,
                    "STS22 (fr-pl)": 50.71,
                    "STS22 (it)": 60.4,
                    "STS22 (pl)": 26.77,
                    "STS22 (pl-en)": 32.8,
                    "STS22 (ru)": 14.72,
                    "STS22 (tr)": 33.69,
                    "STS22 (zh)": 44.93,
                    "STS22 (zh-en)": 41.64,
                    "STSBenchmark": 82.03,
                    "STSBenchmarkMultilingualSTS (fr)": 64.93
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>",
                    "SummEval": 30.81,
                    "SummEvalFr": 28.28
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2\">all-MiniLM-L6-v2</a>"
                }
            ]
        }
    },
    "bge-small-zh-v1.5": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>",
                    "AmazonReviewsClassification (zh)": 35.91,
                    "IFlyTek": 45.49,
                    "JDReview": 80.04,
                    "MassiveIntentClassification (zh-CN)": 63.95,
                    "MassiveScenarioClassification (zh-CN)": 70.8,
                    "MultilingualSentiment": 63.06,
                    "OnlineShopping": 85.05,
                    "TNews": 48.15,
                    "Waimai": 83.18
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>",
                    "CLSClusteringP2P": 38.14,
                    "CLSClusteringS2S": 35.14,
                    "ThuNewsClusteringP2P": 54.22,
                    "ThuNewsClusteringS2S": 49.22
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>",
                    "Cmnli": 76.24,
                    "Ocnli": 64.57
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>",
                    "CMedQAv1": 77.4,
                    "CMedQAv2": 79.86,
                    "MMarcoReranking": 20.5,
                    "T2Reranking": 65.9
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>",
                    "CmedqaRetrieval": 35.11,
                    "CovidRetrieval": 70.14,
                    "DuRetrieval": 77.28,
                    "EcomRetrieval": 55.71,
                    "MMarcoRetrieval": 63.48,
                    "MedicalRetrieval": 49.8,
                    "T2Retrieval": 76.43,
                    "VideoRetrieval": 66.19
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>",
                    "AFQMC": 33.42,
                    "ATEC": 43.01,
                    "BQ": 55.22,
                    "LCQMC": 72.19,
                    "PAWSX": 9.26,
                    "QBQTC": 35.29,
                    "STS22 (zh)": 67.72,
                    "STSB": 76.73
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-small-zh-v1.5\">bge-small-zh-v1.5</a>"
                }
            ]
        }
    },
    "Cohere-embed-english-v3.0": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>",
                    "AILACasedocs": 31.54,
                    "AILAStatutes": 27.15,
                    "GerDaLIRSmall": 6.05,
                    "LeCaRDv2": 21.02,
                    "LegalBenchConsumerContractsQA": 77.12,
                    "LegalBenchCorporateLobbying": 93.68,
                    "LegalQuAD": 26.08,
                    "LegalSummarization": 61.7
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0</a>",
                    "Core17InstructionRetrieval": 2.8,
                    "News21InstructionRetrieval": 0.2,
                    "Robust04InstructionRetrieval": -3.63
                }
            ]
        }
    },
    "LLM2Vec-Meta-Llama-3-supervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "AmazonCounterfactualClassification (en)": 79.94,
                    "AmazonPolarityClassification": 86.07,
                    "AmazonReviewsClassification (en)": 46.84,
                    "Banking77Classification": 88.05,
                    "EmotionClassification": 51.2,
                    "ImdbClassification": 82.94,
                    "MTOPDomainClassification (en)": 96.14,
                    "MTOPIntentClassification (en)": 86.11,
                    "MassiveIntentClassification (en)": 79.8,
                    "MassiveScenarioClassification (en)": 81.52,
                    "ToxicConversationsClassification": 70.59,
                    "TweetSentimentExtractionClassification": 61.9
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "ArxivClusteringP2P": 44.27,
                    "ArxivClusteringS2S": 46.85,
                    "BiorxivClusteringP2P": 32.35,
                    "BiorxivClusteringS2S": 36.7,
                    "MedrxivClusteringP2P": 30.71,
                    "MedrxivClusteringS2S": 32.96,
                    "RedditClustering": 61.72,
                    "RedditClusteringP2P": 63.98,
                    "StackExchangeClustering": 72.74,
                    "StackExchangeClusteringP2P": 32.26,
                    "TwentyNewsgroupsClustering": 56.41
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "SprintDuplicateQuestions": 95.09,
                    "TwitterSemEval2015": 81.73,
                    "TwitterURLCorpus": 86.56
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "AskUbuntuDupQuestions": 65.19,
                    "MindSmallReranking": 32.67,
                    "SciDocsRR": 86.05,
                    "StackOverflowDupQuestions": 54.82
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "ArguAna": 62.78,
                    "CQADupstackRetrieval": 48.25,
                    "ClimateFEVER": 34.27,
                    "DBPedia": 48.34,
                    "FEVER": 90.2,
                    "FiQA2018": 55.33,
                    "HotpotQA": 71.76,
                    "MSMARCO": 43.24,
                    "NFCorpus": 41.83,
                    "NQ": 64.21,
                    "QuoraRetrieval": 87.16,
                    "SCIDOCS": 22.96,
                    "SciFact": 78.22,
                    "TRECCOVID": 80.34,
                    "Touche2020": 20.5
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "BIOSSES": 84.92,
                    "SICK-R": 83.94,
                    "STS12": 79.27,
                    "STS13": 84.83,
                    "STS14": 82.94,
                    "STS15": 88.09,
                    "STS16": 86.54,
                    "STS17 (en-en)": 89.58,
                    "STS22 (en)": 67.67,
                    "STSBenchmark": 88.05
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>",
                    "SummEval": 30.94
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised</a>"
                }
            ]
        }
    },
    "bert-base-swedish-cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>",
                    "BornholmBitextMining": 6.6
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>",
                    "AngryTweetsClassification": 44.58,
                    "DKHateClassification": 55.53,
                    "DanishPoliticalCommentsClassification": 28.97,
                    "LccSentimentClassification": 41.2,
                    "MassiveIntentClassification (da)": 37.98,
                    "MassiveIntentClassification (nb)": 35.75,
                    "MassiveIntentClassification (sv)": 52.75,
                    "MassiveScenarioClassification (da)": 40.44,
                    "MassiveScenarioClassification (nb)": 35.76,
                    "MassiveScenarioClassification (sv)": 56.09,
                    "NoRecClassification": 43.91,
                    "NordicLangClassification": 62.45,
                    "NorwegianParliament": 57.56,
                    "ScalaDaClassification": 53.53,
                    "ScalaNbClassification": 53.63
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KB/bert-base-swedish-cased\">bert-base-swedish-cased</a>"
                }
            ]
        }
    },
    "tart-dual-contriever-msmarco": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/orionweller/tart-dual-contriever-msmarco\">tart-dual-contriever-msmarco</a>",
                    "Core17InstructionRetrieval": -3.04,
                    "News21InstructionRetrieval": -2.98,
                    "Robust04InstructionRetrieval": -8.98
                }
            ]
        }
    },
    "electra-small-swedish-cased-discriminator": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>",
                    "BornholmBitextMining": 0.85
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>",
                    "AngryTweetsClassification": 40.52,
                    "DKHateClassification": 52.28,
                    "DanishPoliticalCommentsClassification": 25.17,
                    "LccSentimentClassification": 36.67,
                    "MassiveIntentClassification (da)": 6.51,
                    "MassiveIntentClassification (nb)": 5.66,
                    "MassiveIntentClassification (sv)": 6.6,
                    "MassiveScenarioClassification (da)": 11.5,
                    "MassiveScenarioClassification (nb)": 11.26,
                    "MassiveScenarioClassification (sv)": 12.16,
                    "NoRecClassification": 39.72,
                    "NordicLangClassification": 44.53,
                    "NorwegianParliament": 52.44,
                    "ScalaDaClassification": 51.66,
                    "ScalaNbClassification": 52.41
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/electra-small-swedish-cased-discriminator\">electra-small-swedish-cased-discriminator</a>"
                }
            ]
        }
    },
    "e5-large-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large-v2\">e5-large-v2</a>",
                    "Core17InstructionRetrieval": 0.12,
                    "News21InstructionRetrieval": 0.87,
                    "Robust04InstructionRetrieval": -4.16
                }
            ]
        }
    },
    "Cohere-embed-multilingual-light-v3.0": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "AmazonReviewsClassification (fr)": 38.6,
                    "MTOPDomainClassification (fr)": 80.79,
                    "MTOPIntentClassification (fr)": 50.01,
                    "MasakhaNEWSClassification (fra)": 82.58,
                    "MassiveIntentClassification (fr)": 56.31,
                    "MassiveScenarioClassification (fr)": 59.5
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "AlloProfClusteringP2P": 61.96,
                    "AlloProfClusteringS2S": 31.36,
                    "HALClusteringS2S": 17.31,
                    "MLSUMClusteringP2P": 42.8,
                    "MLSUMClusteringS2S": 32.72,
                    "MasakhaNEWSClusteringP2P (fra)": 56.81,
                    "MasakhaNEWSClusteringS2S (fra)": 29.41
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "OpusparcusPC (fr)": 90.92,
                    "PawsX (fr)": 57.32
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "AlloprofReranking": 51.6,
                    "SyntecReranking": 88.03
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "AlloprofRetrieval": 35.39,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 23.0,
                    "SyntecRetrieval": 76.88,
                    "XPQARetrieval (fr)": 45.23
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "SICKFr": 75.5,
                    "STS22 (fr)": 82.8,
                    "STSBenchmarkMultilingualSTS (fr)": 76.48
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>",
                    "SummEvalFr": 31.4
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0</a>"
                }
            ]
        }
    },
    "mistral-7b-instruct-v0.2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2\">mistral-7b-instruct-v0.2</a>",
                    "Core17InstructionRetrieval": 13.03,
                    "News21InstructionRetrieval": 4.81,
                    "Robust04InstructionRetrieval": 12.61
                }
            ]
        }
    },
    "voyage-lite-02-instruct": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "AmazonCounterfactualClassification (en)": 88.31,
                    "AmazonPolarityClassification": 96.32,
                    "AmazonReviewsClassification (en)": 56.25,
                    "Banking77Classification": 88.59,
                    "EmotionClassification": 50.28,
                    "ImdbClassification": 95.75,
                    "MTOPDomainClassification (en)": 97.65,
                    "MTOPIntentClassification (en)": 75.16,
                    "MassiveIntentClassification (en)": 73.97,
                    "MassiveScenarioClassification (en)": 83.99,
                    "ToxicConversationsClassification": 81.75,
                    "TweetSentimentExtractionClassification": 62.98
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "ArxivClusteringP2P": 51.95,
                    "ArxivClusteringS2S": 42.48,
                    "BiorxivClusteringP2P": 50.15,
                    "BiorxivClusteringS2S": 42.84,
                    "MedrxivClusteringP2P": 47.24,
                    "MedrxivClusteringS2S": 43.48,
                    "RedditClustering": 63.73,
                    "RedditClusteringP2P": 64.09,
                    "StackExchangeClustering": 70.71,
                    "StackExchangeClusteringP2P": 40.34,
                    "TwentyNewsgroupsClustering": 59.56
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "SprintDuplicateQuestions": 98.07,
                    "TwitterSemEval2015": 74.44,
                    "TwitterURLCorpus": 88.11
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "AskUbuntuDupQuestions": 63.24,
                    "MindSmallReranking": 31.48,
                    "SciDocsRR": 84.68,
                    "StackOverflowDupQuestions": 53.56
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "ArguAna": 70.28,
                    "CQADupstackRetrieval": 46.2,
                    "ClimateFEVER": 31.95,
                    "DBPedia": 39.79,
                    "FEVER": 91.35,
                    "FiQA2018": 52.51,
                    "HotpotQA": 75.51,
                    "MSMARCO": 37.93,
                    "NFCorpus": 43.7,
                    "NQ": 64.26,
                    "QuoraRetrieval": 87.62,
                    "SCIDOCS": 20.24,
                    "SciFact": 79.91,
                    "TRECCOVID": 81.02,
                    "Touche2020": 26.8
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "BIOSSES": 89.7,
                    "SICK-R": 78.44,
                    "STS12": 86.46,
                    "STS13": 87.76,
                    "STS14": 86.6,
                    "STS15": 90.1,
                    "STS16": 86.39,
                    "STS17 (en-en)": 86.98,
                    "STS22 (en)": 76.89,
                    "STSBenchmark": 88.56
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>",
                    "SummEval": 31.01
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-02-instruct</a>"
                }
            ]
        }
    },
    "text-search-babbage-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>",
                    "ArguAna": 49.2,
                    "ClimateFEVER": 19.9,
                    "FEVER": 77.0,
                    "FiQA2018": 42.2,
                    "HotpotQA": 63.1,
                    "NFCorpus": 36.7,
                    "QuoraRetrieval": 69.7,
                    "SciFact": 70.4,
                    "TRECCOVID": 58.5,
                    "Touche2020": 29.7
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-babbage-001</a>"
                }
            ]
        }
    },
    "gottbert-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>",
                    "BlurbsClusteringP2P": 34.49,
                    "BlurbsClusteringS2S": 8.37,
                    "TenKGnadClusteringP2P": 33.66,
                    "TenKGnadClusteringS2S": 9.34
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/uklfr/gottbert-base\">gottbert-base</a>"
                }
            ]
        }
    },
    "OpenSearch-text-hybrid": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>",
                    "AmazonReviewsClassification (zh)": 46.18,
                    "IFlyTek": 51.8,
                    "JDReview": 86.02,
                    "MassiveIntentClassification (zh-CN)": 73.85,
                    "MassiveScenarioClassification (zh-CN)": 77.13,
                    "MultilingualSentiment": 76.35,
                    "OnlineShopping": 93.2,
                    "TNews": 53.06,
                    "Waimai": 88.1
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>",
                    "CLSClusteringP2P": 41.64,
                    "CLSClusteringS2S": 40.33,
                    "ThuNewsClusteringP2P": 69.28,
                    "ThuNewsClusteringS2S": 63.75
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>",
                    "Cmnli": 90.77,
                    "Ocnli": 85.44
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>",
                    "CMedQAv1": 88.99,
                    "CMedQAv2": 89.6,
                    "MMarcoReranking": 28.12,
                    "T2Reranking": 66.38
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>",
                    "CmedqaRetrieval": 46.56,
                    "CovidRetrieval": 84.03,
                    "DuRetrieval": 87.85,
                    "EcomRetrieval": 68.79,
                    "MMarcoRetrieval": 79.93,
                    "MedicalRetrieval": 65.92,
                    "T2Retrieval": 86.76,
                    "VideoRetrieval": 75.43
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>",
                    "AFQMC": 59.11,
                    "ATEC": 58.19,
                    "BQ": 71.07,
                    "LCQMC": 78.27,
                    "PAWSX": 44.98,
                    "QBQTC": 38.69,
                    "STS22 (zh)": 66.53,
                    "STSB": 82.8
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://help.aliyun.com/zh/open-search/vector-search-edition/hybrid-retrieval\">OpenSearch-text-hybrid</a>"
                }
            ]
        }
    },
    "glove.6B.300d": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "BUCC (de-en)": 0.18,
                    "BUCC (fr-en)": 0.19,
                    "BUCC (ru-en)": 0.1,
                    "BUCC (zh-en)": 0.0
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "AmazonCounterfactualClassification (en)": 56.91,
                    "AmazonPolarityClassification": 60.32,
                    "AmazonReviewsClassification (en)": 29.67,
                    "Banking77Classification": 67.69,
                    "EmotionClassification": 36.93,
                    "ImdbClassification": 62.57,
                    "MTOPDomainClassification (en)": 79.11,
                    "MTOPIntentClassification (en)": 55.85,
                    "MassiveIntentClassification (en)": 56.19,
                    "MassiveScenarioClassification (en)": 66.03,
                    "ToxicConversationsClassification": 65.4,
                    "TweetSentimentExtractionClassification": 50.8
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "ArxivClusteringP2P": 32.56,
                    "ArxivClusteringS2S": 23.14,
                    "BiorxivClusteringP2P": 29.27,
                    "BiorxivClusteringS2S": 19.18,
                    "MedrxivClusteringP2P": 26.12,
                    "MedrxivClusteringS2S": 20.38,
                    "RedditClustering": 28.46,
                    "RedditClusteringP2P": 35.82,
                    "StackExchangeClustering": 35.8,
                    "StackExchangeClusteringP2P": 28.51,
                    "TwentyNewsgroupsClustering": 25.83
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "SprintDuplicateQuestions": 86.96,
                    "TwitterSemEval2015": 48.45,
                    "TwitterURLCorpus": 77.35
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "AskUbuntuDupQuestions": 49.57,
                    "MindSmallReranking": 27.01,
                    "SciDocsRR": 62.56,
                    "StackOverflowDupQuestions": 34.03
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "ArguAna": 36.3,
                    "CQADupstackRetrieval": 15.47,
                    "ClimateFEVER": 14.44,
                    "DBPedia": 18.28,
                    "FEVER": 14.99,
                    "FiQA2018": 10.09,
                    "HotpotQA": 19.18,
                    "MSMARCO": 9.6,
                    "NFCorpus": 13.87,
                    "NQ": 12.87,
                    "QuoraRetrieval": 71.32,
                    "SCIDOCS": 8.04,
                    "SciFact": 29.58,
                    "TRECCOVID": 36.22,
                    "Touche2020": 13.99
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "BIOSSES": 44.93,
                    "SICK-R": 55.43,
                    "STS12": 54.64,
                    "STS13": 69.16,
                    "STS14": 60.81,
                    "STS15": 72.31,
                    "STS16": 65.34,
                    "STS17 (en-en)": 77.95,
                    "STS22 (en)": 56.35,
                    "STSBenchmark": 61.54
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>",
                    "SummEval": 28.87
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_glove.6B.300d\">glove.6B.300d</a>"
                }
            ]
        }
    },
    "komninos": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "BUCC (de-en)": 0.18,
                    "BUCC (fr-en)": 0.08,
                    "BUCC (ru-en)": 0.15,
                    "BUCC (zh-en)": 0.05,
                    "Tatoeba (afr-eng)": 4.82,
                    "Tatoeba (amh-eng)": 1.18,
                    "Tatoeba (ang-eng)": 8.54,
                    "Tatoeba (ara-eng)": 0.63,
                    "Tatoeba (arq-eng)": 0.4,
                    "Tatoeba (arz-eng)": 0.63,
                    "Tatoeba (ast-eng)": 11.69,
                    "Tatoeba (awa-eng)": 0.0,
                    "Tatoeba (aze-eng)": 3.22,
                    "Tatoeba (bel-eng)": 1.75,
                    "Tatoeba (ben-eng)": 0.2,
                    "Tatoeba (ber-eng)": 7.0,
                    "Tatoeba (bos-eng)": 9.31,
                    "Tatoeba (bre-eng)": 4.17,
                    "Tatoeba (bul-eng)": 1.29,
                    "Tatoeba (cat-eng)": 7.73,
                    "Tatoeba (cbk-eng)": 5.61,
                    "Tatoeba (ceb-eng)": 4.88,
                    "Tatoeba (ces-eng)": 3.55,
                    "Tatoeba (cha-eng)": 19.29,
                    "Tatoeba (cmn-eng)": 0.5,
                    "Tatoeba (cor-eng)": 4.15,
                    "Tatoeba (csb-eng)": 5.69,
                    "Tatoeba (cym-eng)": 8.4,
                    "Tatoeba (dan-eng)": 6.99,
                    "Tatoeba (deu-eng)": 3.67,
                    "Tatoeba (dsb-eng)": 5.33,
                    "Tatoeba (dtp-eng)": 4.25,
                    "Tatoeba (ell-eng)": 0.63,
                    "Tatoeba (epo-eng)": 2.45,
                    "Tatoeba (est-eng)": 2.69,
                    "Tatoeba (eus-eng)": 4.69,
                    "Tatoeba (fao-eng)": 7.61,
                    "Tatoeba (fin-eng)": 3.36,
                    "Tatoeba (fra-eng)": 7.0,
                    "Tatoeba (fry-eng)": 12.36,
                    "Tatoeba (gla-eng)": 3.07,
                    "Tatoeba (gle-eng)": 4.81,
                    "Tatoeba (glg-eng)": 8.12,
                    "Tatoeba (gsw-eng)": 18.87,
                    "Tatoeba (heb-eng)": 0.68,
                    "Tatoeba (hin-eng)": 0.1,
                    "Tatoeba (hrv-eng)": 5.41,
                    "Tatoeba (hsb-eng)": 6.32,
                    "Tatoeba (hun-eng)": 3.42,
                    "Tatoeba (hye-eng)": 0.97,
                    "Tatoeba (ido-eng)": 7.1,
                    "Tatoeba (ile-eng)": 13.61,
                    "Tatoeba (ina-eng)": 8.57,
                    "Tatoeba (ind-eng)": 7.26,
                    "Tatoeba (isl-eng)": 4.09,
                    "Tatoeba (ita-eng)": 5.54,
                    "Tatoeba (jav-eng)": 11.43,
                    "Tatoeba (jpn-eng)": 0.2,
                    "Tatoeba (kab-eng)": 2.71,
                    "Tatoeba (kat-eng)": 1.11,
                    "Tatoeba (kaz-eng)": 1.17,
                    "Tatoeba (khm-eng)": 0.55,
                    "Tatoeba (kor-eng)": 0.5,
                    "Tatoeba (kur-eng)": 8.55,
                    "Tatoeba (kzj-eng)": 4.61,
                    "Tatoeba (lat-eng)": 4.07,
                    "Tatoeba (lfn-eng)": 2.83,
                    "Tatoeba (lit-eng)": 0.95,
                    "Tatoeba (lvs-eng)": 3.25,
                    "Tatoeba (mal-eng)": 0.29,
                    "Tatoeba (mar-eng)": 0.2,
                    "Tatoeba (max-eng)": 14.53,
                    "Tatoeba (mhr-eng)": 0.2,
                    "Tatoeba (mkd-eng)": 0.2,
                    "Tatoeba (mon-eng)": 1.1,
                    "Tatoeba (nds-eng)": 10.37,
                    "Tatoeba (nld-eng)": 9.5,
                    "Tatoeba (nno-eng)": 4.49,
                    "Tatoeba (nob-eng)": 4.95,
                    "Tatoeba (nov-eng)": 14.53,
                    "Tatoeba (oci-eng)": 5.8,
                    "Tatoeba (orv-eng)": 0.24,
                    "Tatoeba (pam-eng)": 6.65,
                    "Tatoeba (pes-eng)": 0.5,
                    "Tatoeba (pms-eng)": 8.05,
                    "Tatoeba (pol-eng)": 5.13,
                    "Tatoeba (por-eng)": 5.87,
                    "Tatoeba (ron-eng)": 6.76,
                    "Tatoeba (rus-eng)": 0.2,
                    "Tatoeba (slk-eng)": 4.23,
                    "Tatoeba (slv-eng)": 6.05,
                    "Tatoeba (spa-eng)": 5.03,
                    "Tatoeba (sqi-eng)": 4.36,
                    "Tatoeba (srp-eng)": 1.77,
                    "Tatoeba (swe-eng)": 6.72,
                    "Tatoeba (swg-eng)": 8.54,
                    "Tatoeba (swh-eng)": 11.49,
                    "Tatoeba (tam-eng)": 1.3,
                    "Tatoeba (tat-eng)": 0.77,
                    "Tatoeba (tel-eng)": 0.85,
                    "Tatoeba (tgl-eng)": 2.61,
                    "Tatoeba (tha-eng)": 0.69,
                    "Tatoeba (tuk-eng)": 5.76,
                    "Tatoeba (tur-eng)": 5.24,
                    "Tatoeba (tzl-eng)": 15.51,
                    "Tatoeba (uig-eng)": 0.6,
                    "Tatoeba (ukr-eng)": 1.23,
                    "Tatoeba (urd-eng)": 0.4,
                    "Tatoeba (uzb-eng)": 4.73,
                    "Tatoeba (vie-eng)": 6.55,
                    "Tatoeba (war-eng)": 4.12,
                    "Tatoeba (wuu-eng)": 0.2,
                    "Tatoeba (xho-eng)": 4.33,
                    "Tatoeba (yid-eng)": 0.59,
                    "Tatoeba (yue-eng)": 0.5,
                    "Tatoeba (zsm-eng)": 7.27
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "AmazonCounterfactualClassification (en)": 60.54,
                    "AmazonPolarityClassification": 59.59,
                    "AmazonReviewsClassification (en)": 31.01,
                    "Banking77Classification": 67.05,
                    "EmotionClassification": 33.18,
                    "ImdbClassification": 63.98,
                    "MTOPDomainClassification (en)": 78.57,
                    "MTOPIntentClassification (en)": 57.07,
                    "MassiveIntentClassification (en)": 57.21,
                    "MassiveScenarioClassification (en)": 66.11,
                    "ToxicConversationsClassification": 67.76,
                    "TweetSentimentExtractionClassification": 49.68
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "ArxivClusteringP2P": 34.73,
                    "ArxivClusteringS2S": 26.01,
                    "BiorxivClusteringP2P": 29.76,
                    "BiorxivClusteringS2S": 20.71,
                    "BlurbsClusteringP2P": 11.37,
                    "BlurbsClusteringS2S": 8.01,
                    "MedrxivClusteringP2P": 26.65,
                    "MedrxivClusteringS2S": 21.5,
                    "RedditClustering": 28.84,
                    "RedditClusteringP2P": 7.37,
                    "StackExchangeClustering": 39.04,
                    "StackExchangeClusteringP2P": 30.23,
                    "TenKGnadClusteringP2P": 15.89,
                    "TenKGnadClusteringS2S": 4.84,
                    "TwentyNewsgroupsClustering": 27.42
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "SprintDuplicateQuestions": 85.55,
                    "TwitterSemEval2015": 53.85,
                    "TwitterURLCorpus": 79.41
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "AskUbuntuDupQuestions": 50.88,
                    "MindSmallReranking": 28.92,
                    "SciDocsRR": 63.55,
                    "StackOverflowDupQuestions": 35.65
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "ArguAna": 30.96,
                    "CQADupstackRetrieval": 16.79,
                    "ClimateFEVER": 14.87,
                    "DBPedia": 15.88,
                    "FEVER": 15.56,
                    "FiQA2018": 10.49,
                    "HotpotQA": 20.77,
                    "MSMARCO": 9.75,
                    "NFCorpus": 11.79,
                    "NQ": 12.75,
                    "QuoraRetrieval": 71.57,
                    "SCIDOCS": 8.47,
                    "SciFact": 29.53,
                    "TRECCOVID": 35.92,
                    "Touche2020": 13.17
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "BIOSSES": 50.25,
                    "SICK-R": 55.49,
                    "STS12": 53.51,
                    "STS13": 70.8,
                    "STS14": 63.56,
                    "STS15": 74.08,
                    "STS16": 64.6,
                    "STS17 (ar-ar)": 13.78,
                    "STS17 (en-ar)": 9.08,
                    "STS17 (en-de)": -3.11,
                    "STS17 (en-en)": 76.91,
                    "STS17 (en-tr)": -0.45,
                    "STS17 (es-en)": -8.18,
                    "STS17 (es-es)": 48.23,
                    "STS17 (fr-en)": 5.81,
                    "STS17 (it-en)": 3.64,
                    "STS17 (ko-ko)": 2.54,
                    "STS17 (nl-en)": 0.44,
                    "STS22 (ar)": 32.42,
                    "STS22 (de)": 33.04,
                    "STS22 (de-en)": 28.65,
                    "STS22 (de-fr)": 14.77,
                    "STS22 (de-pl)": 11.21,
                    "STS22 (en)": 53.89,
                    "STS22 (es)": 48.53,
                    "STS22 (es-en)": 26.97,
                    "STS22 (es-it)": 41.1,
                    "STS22 (fr)": 49.43,
                    "STS22 (fr-pl)": 39.44,
                    "STS22 (it)": 57.77,
                    "STS22 (pl)": 12.47,
                    "STS22 (pl-en)": 45.55,
                    "STS22 (ru)": 19.44,
                    "STS22 (tr)": 47.38,
                    "STS22 (zh)": 4.78,
                    "STS22 (zh-en)": 14.05,
                    "STSBenchmark": 61.55
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>",
                    "SummEval": 30.49
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/average_word_embeddings_komninos\">komninos</a>"
                }
            ]
        }
    },
    "bge-large-en-v1.5": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>",
                    "AILACasedocs": 25.15,
                    "AILAStatutes": 20.74,
                    "GerDaLIRSmall": 3.96,
                    "LeCaRDv2": 22.68,
                    "LegalBenchConsumerContractsQA": 73.52,
                    "LegalBenchCorporateLobbying": 91.51,
                    "LegalQuAD": 16.22,
                    "LegalSummarization": 59.99
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-en-v1.5\">bge-large-en-v1.5</a>"
                }
            ]
        }
    },
    "tart-full-flan-t5-xl": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/facebook/tart-full-flan-t5-xl\">tart-full-flan-t5-xl</a>",
                    "Core17InstructionRetrieval": 2.82,
                    "News21InstructionRetrieval": 1.99,
                    "Robust04InstructionRetrieval": -0.72
                }
            ]
        }
    },
    "gtr-t5-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "AmazonCounterfactualClassification (en)": 69.33,
                    "AmazonPolarityClassification": 67.82,
                    "AmazonReviewsClassification (en)": 38.48,
                    "Banking77Classification": 79.26,
                    "EmotionClassification": 42.2,
                    "ImdbClassification": 65.99,
                    "MTOPDomainClassification (en)": 92.42,
                    "MTOPIntentClassification (en)": 62.44,
                    "MassiveIntentClassification (en)": 67.05,
                    "MassiveScenarioClassification (en)": 75.4,
                    "ToxicConversationsClassification": 66.6,
                    "TweetSentimentExtractionClassification": 56.02
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "ArxivClusteringP2P": 35.49,
                    "ArxivClusteringS2S": 27.18,
                    "BiorxivClusteringP2P": 27.66,
                    "BiorxivClusteringS2S": 23.25,
                    "MedrxivClusteringP2P": 27.57,
                    "MedrxivClusteringS2S": 25.13,
                    "RedditClustering": 56.13,
                    "RedditClusteringP2P": 58.53,
                    "StackExchangeClustering": 64.21,
                    "StackExchangeClusteringP2P": 33.01,
                    "TwentyNewsgroupsClustering": 46.72
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "SprintDuplicateQuestions": 94.55,
                    "TwitterSemEval2015": 72.23,
                    "TwitterURLCorpus": 84.77
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "AskUbuntuDupQuestions": 60.86,
                    "MindSmallReranking": 31.33,
                    "SciDocsRR": 73.71,
                    "StackOverflowDupQuestions": 51.01
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "ArguAna": 50.83,
                    "CQADupstackRetrieval": 34.55,
                    "ClimateFEVER": 24.88,
                    "DBPedia": 35.24,
                    "FEVER": 68.93,
                    "FiQA2018": 35.15,
                    "HotpotQA": 54.93,
                    "MSMARCO": 41.16,
                    "NFCorpus": 30.22,
                    "NQ": 50.47,
                    "QuoraRetrieval": 87.98,
                    "SCIDOCS": 14.0,
                    "SciFact": 59.74,
                    "TRECCOVID": 56.05,
                    "Touche2020": 25.89
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "BIOSSES": 79.0,
                    "SICK-R": 71.45,
                    "STS12": 68.59,
                    "STS13": 79.09,
                    "STS14": 74.64,
                    "STS15": 84.85,
                    "STS16": 81.57,
                    "STS17 (en-en)": 85.8,
                    "STS22 (en)": 66.17,
                    "STSBenchmark": 79.58
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>",
                    "SummEval": 29.67
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-base\">gtr-t5-base</a>"
                }
            ]
        }
    },
    "nb-bert-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>",
                    "BornholmBitextMining": 9.88
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>",
                    "AngryTweetsClassification": 52.14,
                    "DKHateClassification": 61.73,
                    "DanishPoliticalCommentsClassification": 34.84,
                    "LccSentimentClassification": 51.4,
                    "MassiveIntentClassification (da)": 56.69,
                    "MassiveIntentClassification (nb)": 60.67,
                    "MassiveIntentClassification (sv)": 53.89,
                    "MassiveScenarioClassification (da)": 61.93,
                    "MassiveScenarioClassification (nb)": 67.31,
                    "MassiveScenarioClassification (sv)": 55.37,
                    "NoRecClassification": 51.32,
                    "NordicLangClassification": 84.69,
                    "NorwegianParliament": 57.41,
                    "ScalaDaClassification": 57.99,
                    "ScalaNbClassification": 62.25
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/NbAiLab/nb-bert-base\">nb-bert-base</a>"
                }
            ]
        }
    },
    "voyage-2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "AmazonReviewsClassification (fr)": 37.26,
                    "MTOPDomainClassification (fr)": 79.79,
                    "MTOPIntentClassification (fr)": 45.62,
                    "MasakhaNEWSClassification (fra)": 80.19,
                    "MassiveIntentClassification (fr)": 53.7,
                    "MassiveScenarioClassification (fr)": 62.46
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "AlloProfClusteringP2P": 57.96,
                    "AlloProfClusteringS2S": 41.65,
                    "HALClusteringS2S": 24.84,
                    "MLSUMClusteringP2P": 45.08,
                    "MLSUMClusteringS2S": 38.77,
                    "MasakhaNEWSClusteringP2P (fra)": 48.54,
                    "MasakhaNEWSClusteringS2S (fra)": 36.33
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "OpusparcusPC (fr)": 89.76,
                    "PawsX (fr)": 58.96
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "AlloprofReranking": 63.54,
                    "SyntecReranking": 82.65
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "AlloprofRetrieval": 45.5,
                    "BSARDRetrieval": 0.15,
                    "MintakaRetrieval (fr)": 15.51,
                    "SyntecRetrieval": 75.83,
                    "XPQARetrieval (fr)": 67.07
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "SICKFr": 68.51,
                    "STS22 (fr)": 70.51,
                    "STSBenchmarkMultilingualSTS (fr)": 76.43
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>",
                    "SummEvalFr": 30.88
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-2</a>"
                }
            ]
        }
    },
    "text-search-ada-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>",
                    "BiorxivClusteringS2S": 26.05,
                    "MedrxivClusteringS2S": 25.67,
                    "TwentyNewsgroupsClustering": 44.92
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>",
                    "ArguAna": 46.91,
                    "ClimateFEVER": 18.5,
                    "DBPedia": 36.2,
                    "FEVER": 72.1,
                    "FiQA2018": 38.41,
                    "HotpotQA": 59.39,
                    "MSMARCO": 37.94,
                    "NFCorpus": 33.17,
                    "NQ": 42.81,
                    "QuoraRetrieval": 70.57,
                    "SCIDOCS": 14.83,
                    "SciFact": 67.25,
                    "TRECCOVID": 72.43,
                    "Touche2020": 28.68
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-001</a>"
                }
            ]
        }
    },
    "gtr-t5-xl": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "BUCC (de-en)": 90.99,
                    "BUCC (fr-en)": 88.55,
                    "BUCC (ru-en)": 2.07,
                    "BUCC (zh-en)": 1.49,
                    "Tatoeba (afr-eng)": 33.47,
                    "Tatoeba (amh-eng)": 0.01,
                    "Tatoeba (ang-eng)": 30.74,
                    "Tatoeba (ara-eng)": 0.47,
                    "Tatoeba (arq-eng)": 0.34,
                    "Tatoeba (arz-eng)": 0.14,
                    "Tatoeba (ast-eng)": 51.74,
                    "Tatoeba (awa-eng)": 0.49,
                    "Tatoeba (aze-eng)": 7.43,
                    "Tatoeba (bel-eng)": 3.45,
                    "Tatoeba (ben-eng)": 0.06,
                    "Tatoeba (ber-eng)": 5.79,
                    "Tatoeba (bos-eng)": 17.43,
                    "Tatoeba (bre-eng)": 5.69,
                    "Tatoeba (bul-eng)": 7.55,
                    "Tatoeba (cat-eng)": 48.06,
                    "Tatoeba (cbk-eng)": 54.56,
                    "Tatoeba (ceb-eng)": 8.72,
                    "Tatoeba (ces-eng)": 8.76,
                    "Tatoeba (cha-eng)": 27.56,
                    "Tatoeba (cmn-eng)": 2.26,
                    "Tatoeba (cor-eng)": 3.69,
                    "Tatoeba (csb-eng)": 13.18,
                    "Tatoeba (cym-eng)": 6.97,
                    "Tatoeba (dan-eng)": 47.36,
                    "Tatoeba (deu-eng)": 91.54,
                    "Tatoeba (dsb-eng)": 13.2,
                    "Tatoeba (dtp-eng)": 4.54,
                    "Tatoeba (ell-eng)": 0.55,
                    "Tatoeba (epo-eng)": 27.86,
                    "Tatoeba (est-eng)": 5.13,
                    "Tatoeba (eus-eng)": 10.23,
                    "Tatoeba (fao-eng)": 21.44,
                    "Tatoeba (fin-eng)": 6.62,
                    "Tatoeba (fra-eng)": 79.66,
                    "Tatoeba (fry-eng)": 32.92,
                    "Tatoeba (gla-eng)": 2.87,
                    "Tatoeba (gle-eng)": 3.26,
                    "Tatoeba (glg-eng)": 63.81,
                    "Tatoeba (gsw-eng)": 29.71,
                    "Tatoeba (heb-eng)": 0.33,
                    "Tatoeba (hin-eng)": 0.25,
                    "Tatoeba (hrv-eng)": 17.16,
                    "Tatoeba (hsb-eng)": 12.02,
                    "Tatoeba (hun-eng)": 7.21,
                    "Tatoeba (hye-eng)": 0.78,
                    "Tatoeba (ido-eng)": 40.83,
                    "Tatoeba (ile-eng)": 54.95,
                    "Tatoeba (ina-eng)": 72.28,
                    "Tatoeba (ind-eng)": 30.95,
                    "Tatoeba (isl-eng)": 11.29,
                    "Tatoeba (ita-eng)": 73.83,
                    "Tatoeba (jav-eng)": 8.66,
                    "Tatoeba (jpn-eng)": 0.61,
                    "Tatoeba (kab-eng)": 1.78,
                    "Tatoeba (kat-eng)": 0.79,
                    "Tatoeba (kaz-eng)": 0.95,
                    "Tatoeba (khm-eng)": 0.49,
                    "Tatoeba (kor-eng)": 1.87,
                    "Tatoeba (kur-eng)": 10.91,
                    "Tatoeba (kzj-eng)": 5.72,
                    "Tatoeba (lat-eng)": 18.24,
                    "Tatoeba (lfn-eng)": 43.49,
                    "Tatoeba (lit-eng)": 7.13,
                    "Tatoeba (lvs-eng)": 7.04,
                    "Tatoeba (mal-eng)": 0.44,
                    "Tatoeba (mar-eng)": 0.03,
                    "Tatoeba (max-eng)": 18.99,
                    "Tatoeba (mhr-eng)": 1.11,
                    "Tatoeba (mkd-eng)": 2.49,
                    "Tatoeba (mon-eng)": 2.01,
                    "Tatoeba (nds-eng)": 39.96,
                    "Tatoeba (nld-eng)": 58.86,
                    "Tatoeba (nno-eng)": 29.07,
                    "Tatoeba (nob-eng)": 40.25,
                    "Tatoeba (nov-eng)": 50.19,
                    "Tatoeba (oci-eng)": 30.72,
                    "Tatoeba (orv-eng)": 0.85,
                    "Tatoeba (pam-eng)": 7.21,
                    "Tatoeba (pes-eng)": 0.53,
                    "Tatoeba (pms-eng)": 31.07,
                    "Tatoeba (pol-eng)": 18.06,
                    "Tatoeba (por-eng)": 81.92,
                    "Tatoeba (ron-eng)": 62.6,
                    "Tatoeba (rus-eng)": 22.24,
                    "Tatoeba (slk-eng)": 10.59,
                    "Tatoeba (slv-eng)": 11.4,
                    "Tatoeba (spa-eng)": 85.78,
                    "Tatoeba (sqi-eng)": 14.92,
                    "Tatoeba (srp-eng)": 9.87,
                    "Tatoeba (swe-eng)": 55.08,
                    "Tatoeba (swg-eng)": 32.66,
                    "Tatoeba (swh-eng)": 7.64,
                    "Tatoeba (tam-eng)": 0.49,
                    "Tatoeba (tat-eng)": 1.28,
                    "Tatoeba (tel-eng)": 0.45,
                    "Tatoeba (tgl-eng)": 23.63,
                    "Tatoeba (tha-eng)": 0.61,
                    "Tatoeba (tuk-eng)": 5.71,
                    "Tatoeba (tur-eng)": 8.25,
                    "Tatoeba (tzl-eng)": 28.4,
                    "Tatoeba (uig-eng)": 0.57,
                    "Tatoeba (ukr-eng)": 5.69,
                    "Tatoeba (urd-eng)": 0.0,
                    "Tatoeba (uzb-eng)": 4.19,
                    "Tatoeba (vie-eng)": 9.07,
                    "Tatoeba (war-eng)": 12.31,
                    "Tatoeba (wuu-eng)": 1.38,
                    "Tatoeba (xho-eng)": 7.6,
                    "Tatoeba (yid-eng)": 0.41,
                    "Tatoeba (yue-eng)": 1.31,
                    "Tatoeba (zsm-eng)": 29.74
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "AmazonCounterfactualClassification (de)": 59.79,
                    "AmazonCounterfactualClassification (en)": 68.6,
                    "AmazonCounterfactualClassification (en-ext)": 69.03,
                    "AmazonCounterfactualClassification (ja)": 50.59,
                    "AmazonPolarityClassification": 74.58,
                    "AmazonReviewsClassification (de)": 35.06,
                    "AmazonReviewsClassification (en)": 38.2,
                    "AmazonReviewsClassification (es)": 37.18,
                    "AmazonReviewsClassification (fr)": 35.48,
                    "AmazonReviewsClassification (ja)": 22.24,
                    "AmazonReviewsClassification (zh)": 21.89,
                    "Banking77Classification": 82.22,
                    "EmotionClassification": 45.54,
                    "ImdbClassification": 68.15,
                    "MTOPDomainClassification (de)": 85.42,
                    "MTOPDomainClassification (en)": 93.6,
                    "MTOPDomainClassification (es)": 88.2,
                    "MTOPDomainClassification (fr)": 85.05,
                    "MTOPDomainClassification (hi)": 21.74,
                    "MTOPDomainClassification (th)": 15.87,
                    "MTOPIntentClassification (de)": 55.75,
                    "MTOPIntentClassification (en)": 65.93,
                    "MTOPIntentClassification (es)": 57.73,
                    "MTOPIntentClassification (fr)": 51.07,
                    "MTOPIntentClassification (hi)": 3.19,
                    "MTOPIntentClassification (th)": 5.55,
                    "MassiveIntentClassification (af)": 42.6,
                    "MassiveIntentClassification (am)": 2.12,
                    "MassiveIntentClassification (ar)": 4.64,
                    "MassiveIntentClassification (az)": 35.05,
                    "MassiveIntentClassification (bn)": 2.84,
                    "MassiveIntentClassification (cy)": 36.19,
                    "MassiveIntentClassification (da)": 48.42,
                    "MassiveIntentClassification (de)": 55.49,
                    "MassiveIntentClassification (el)": 10.14,
                    "MassiveIntentClassification (en)": 70.23,
                    "MassiveIntentClassification (es)": 56.72,
                    "MassiveIntentClassification (fa)": 3.54,
                    "MassiveIntentClassification (fi)": 37.13,
                    "MassiveIntentClassification (fr)": 57.67,
                    "MassiveIntentClassification (he)": 2.56,
                    "MassiveIntentClassification (hi)": 3.24,
                    "MassiveIntentClassification (hu)": 34.22,
                    "MassiveIntentClassification (hy)": 3.01,
                    "MassiveIntentClassification (id)": 46.54,
                    "MassiveIntentClassification (is)": 34.77,
                    "MassiveIntentClassification (it)": 54.13,
                    "MassiveIntentClassification (ja)": 4.27,
                    "MassiveIntentClassification (jv)": 36.97,
                    "MassiveIntentClassification (ka)": 2.72,
                    "MassiveIntentClassification (km)": 5.35,
                    "MassiveIntentClassification (kn)": 3.17,
                    "MassiveIntentClassification (ko)": 2.64,
                    "MassiveIntentClassification (lv)": 36.32,
                    "MassiveIntentClassification (ml)": 3.18,
                    "MassiveIntentClassification (mn)": 22.85,
                    "MassiveIntentClassification (ms)": 42.87,
                    "MassiveIntentClassification (my)": 4.04,
                    "MassiveIntentClassification (nb)": 45.87,
                    "MassiveIntentClassification (nl)": 49.53,
                    "MassiveIntentClassification (pl)": 42.64,
                    "MassiveIntentClassification (pt)": 57.03,
                    "MassiveIntentClassification (ro)": 49.95,
                    "MassiveIntentClassification (ru)": 36.58,
                    "MassiveIntentClassification (sl)": 39.44,
                    "MassiveIntentClassification (sq)": 41.78,
                    "MassiveIntentClassification (sv)": 47.95,
                    "MassiveIntentClassification (sw)": 35.85,
                    "MassiveIntentClassification (ta)": 2.32,
                    "MassiveIntentClassification (te)": 2.2,
                    "MassiveIntentClassification (th)": 3.74,
                    "MassiveIntentClassification (tl)": 43.12,
                    "MassiveIntentClassification (tr)": 35.24,
                    "MassiveIntentClassification (ur)": 3.0,
                    "MassiveIntentClassification (vi)": 30.01,
                    "MassiveIntentClassification (zh-CN)": 1.72,
                    "MassiveIntentClassification (zh-TW)": 3.35,
                    "MassiveScenarioClassification (af)": 52.54,
                    "MassiveScenarioClassification (am)": 6.3,
                    "MassiveScenarioClassification (ar)": 11.96,
                    "MassiveScenarioClassification (az)": 40.17,
                    "MassiveScenarioClassification (bn)": 8.29,
                    "MassiveScenarioClassification (cy)": 42.24,
                    "MassiveScenarioClassification (da)": 57.28,
                    "MassiveScenarioClassification (de)": 68.09,
                    "MassiveScenarioClassification (el)": 16.66,
                    "MassiveScenarioClassification (en)": 75.94,
                    "MassiveScenarioClassification (es)": 64.32,
                    "MassiveScenarioClassification (fa)": 6.9,
                    "MassiveScenarioClassification (fi)": 43.96,
                    "MassiveScenarioClassification (fr)": 66.72,
                    "MassiveScenarioClassification (he)": 7.51,
                    "MassiveScenarioClassification (hi)": 7.82,
                    "MassiveScenarioClassification (hu)": 42.16,
                    "MassiveScenarioClassification (hy)": 9.33,
                    "MassiveScenarioClassification (id)": 53.54,
                    "MassiveScenarioClassification (is)": 42.84,
                    "MassiveScenarioClassification (it)": 62.44,
                    "MassiveScenarioClassification (ja)": 7.29,
                    "MassiveScenarioClassification (jv)": 43.13,
                    "MassiveScenarioClassification (ka)": 7.63,
                    "MassiveScenarioClassification (km)": 9.08,
                    "MassiveScenarioClassification (kn)": 8.1,
                    "MassiveScenarioClassification (ko)": 6.35,
                    "MassiveScenarioClassification (lv)": 40.24,
                    "MassiveScenarioClassification (ml)": 7.65,
                    "MassiveScenarioClassification (mn)": 27.98,
                    "MassiveScenarioClassification (ms)": 52.41,
                    "MassiveScenarioClassification (my)": 9.21,
                    "MassiveScenarioClassification (nb)": 54.44,
                    "MassiveScenarioClassification (nl)": 60.35,
                    "MassiveScenarioClassification (pl)": 49.97,
                    "MassiveScenarioClassification (pt)": 62.78,
                    "MassiveScenarioClassification (ro)": 59.62,
                    "MassiveScenarioClassification (ru)": 43.44,
                    "MassiveScenarioClassification (sl)": 44.79,
                    "MassiveScenarioClassification (sq)": 50.84,
                    "MassiveScenarioClassification (sv)": 58.21,
                    "MassiveScenarioClassification (sw)": 44.63,
                    "MassiveScenarioClassification (ta)": 7.95,
                    "MassiveScenarioClassification (te)": 7.5,
                    "MassiveScenarioClassification (th)": 8.79,
                    "MassiveScenarioClassification (tl)": 53.54,
                    "MassiveScenarioClassification (tr)": 42.47,
                    "MassiveScenarioClassification (ur)": 9.58,
                    "MassiveScenarioClassification (vi)": 34.68,
                    "MassiveScenarioClassification (zh-CN)": 5.21,
                    "MassiveScenarioClassification (zh-TW)": 8.77,
                    "ToxicConversationsClassification": 67.56,
                    "TweetSentimentExtractionClassification": 54.77
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "ArxivClusteringP2P": 37.9,
                    "ArxivClusteringS2S": 30.45,
                    "BiorxivClusteringP2P": 30.52,
                    "BiorxivClusteringS2S": 26.06,
                    "MedrxivClusteringP2P": 28.69,
                    "MedrxivClusteringS2S": 26.69,
                    "RedditClustering": 61.34,
                    "RedditClusteringP2P": 61.11,
                    "StackExchangeClustering": 69.95,
                    "StackExchangeClusteringP2P": 32.73,
                    "TwentyNewsgroupsClustering": 51.15
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "SprintDuplicateQuestions": 95.45,
                    "TwitterSemEval2015": 77.81,
                    "TwitterURLCorpus": 85.14
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "AskUbuntuDupQuestions": 63.08,
                    "MindSmallReranking": 31.5,
                    "SciDocsRR": 76.49,
                    "StackOverflowDupQuestions": 52.79
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "ArguAna": 52.81,
                    "CQADupstackRetrieval": 37.35,
                    "ClimateFEVER": 27.01,
                    "DBPedia": 39.74,
                    "FEVER": 72.18,
                    "FiQA2018": 44.19,
                    "HotpotQA": 58.91,
                    "MSMARCO": 43.52,
                    "NFCorpus": 33.34,
                    "NQ": 56.16,
                    "QuoraRetrieval": 88.91,
                    "SCIDOCS": 15.71,
                    "SciFact": 64.2,
                    "TRECCOVID": 60.09,
                    "Touche2020": 25.26
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "BIOSSES": 78.94,
                    "SICK-R": 73.63,
                    "STS12": 69.11,
                    "STS13": 81.82,
                    "STS14": 77.07,
                    "STS15": 86.01,
                    "STS16": 82.23,
                    "STS17 (ar-ar)": 9.06,
                    "STS17 (en-ar)": -3.22,
                    "STS17 (en-de)": 70.38,
                    "STS17 (en-en)": 84.9,
                    "STS17 (en-tr)": 17.17,
                    "STS17 (es-en)": 60.24,
                    "STS17 (es-es)": 81.93,
                    "STS17 (fr-en)": 62.17,
                    "STS17 (it-en)": 59.11,
                    "STS17 (ko-ko)": 8.9,
                    "STS17 (nl-en)": 56.91,
                    "STS22 (ar)": 37.66,
                    "STS22 (de)": 50.58,
                    "STS22 (de-en)": 53.63,
                    "STS22 (de-fr)": 55.72,
                    "STS22 (de-pl)": 27.99,
                    "STS22 (en)": 66.61,
                    "STS22 (es)": 59.14,
                    "STS22 (es-en)": 69.99,
                    "STS22 (es-it)": 60.94,
                    "STS22 (fr)": 79.43,
                    "STS22 (fr-pl)": 61.98,
                    "STS22 (it)": 67.14,
                    "STS22 (pl)": 33.74,
                    "STS22 (pl-en)": 60.18,
                    "STS22 (ru)": 32.69,
                    "STS22 (tr)": 55.79,
                    "STS22 (zh)": 31.16,
                    "STS22 (zh-en)": 28.85,
                    "STSBenchmark": 77.65
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>",
                    "SummEval": 30.21
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xl\">gtr-t5-xl</a>"
                }
            ]
        }
    },
    "LLM2Vec-Meta-Llama-3-unsupervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "AmazonCounterfactualClassification (en)": 75.7,
                    "AmazonPolarityClassification": 80.68,
                    "AmazonReviewsClassification (en)": 40.0,
                    "Banking77Classification": 84.77,
                    "EmotionClassification": 47.08,
                    "ImdbClassification": 75.19,
                    "MTOPDomainClassification (en)": 94.47,
                    "MTOPIntentClassification (en)": 81.09,
                    "MassiveIntentClassification (en)": 75.01,
                    "MassiveScenarioClassification (en)": 79.16,
                    "ToxicConversationsClassification": 71.85,
                    "TweetSentimentExtractionClassification": 57.61
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "ArxivClusteringP2P": 49.22,
                    "ArxivClusteringS2S": 41.71,
                    "BiorxivClusteringP2P": 38.39,
                    "BiorxivClusteringS2S": 31.31,
                    "MedrxivClusteringP2P": 31.47,
                    "MedrxivClusteringS2S": 27.87,
                    "RedditClustering": 43.67,
                    "RedditClusteringP2P": 61.67,
                    "StackExchangeClustering": 68.2,
                    "StackExchangeClusteringP2P": 36.36,
                    "TwentyNewsgroupsClustering": 32.01
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "SprintDuplicateQuestions": 88.14,
                    "TwitterSemEval2015": 66.6,
                    "TwitterURLCorpus": 79.3
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "AskUbuntuDupQuestions": 57.16,
                    "MindSmallReranking": 30.1,
                    "SciDocsRR": 76.28,
                    "StackOverflowDupQuestions": 48.82
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "ArguAna": 51.73,
                    "CQADupstackRetrieval": 32.4,
                    "ClimateFEVER": 23.58,
                    "DBPedia": 26.78,
                    "FEVER": 53.42,
                    "FiQA2018": 28.56,
                    "HotpotQA": 52.37,
                    "MSMARCO": 17.47,
                    "NFCorpus": 26.28,
                    "NQ": 37.65,
                    "QuoraRetrieval": 84.64,
                    "SCIDOCS": 10.39,
                    "SciFact": 66.36,
                    "TRECCOVID": 63.34,
                    "Touche2020": 12.82
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "BIOSSES": 84.67,
                    "SICK-R": 72.16,
                    "STS12": 61.6,
                    "STS13": 79.71,
                    "STS14": 72.11,
                    "STS15": 82.18,
                    "STS16": 79.41,
                    "STS17 (en-en)": 85.44,
                    "STS22 (en)": 63.9,
                    "STSBenchmark": 77.44
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>",
                    "SummEval": 31.45
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised</a>"
                }
            ]
        }
    },
    "sentence-bert-swedish-cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>",
                    "BornholmBitextMining": 14.08
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>",
                    "AngryTweetsClassification": 44.46,
                    "DKHateClassification": 59.36,
                    "DanishPoliticalCommentsClassification": 28.32,
                    "LccSentimentClassification": 47.2,
                    "MassiveIntentClassification (da)": 42.84,
                    "MassiveIntentClassification (nb)": 42.74,
                    "MassiveIntentClassification (sv)": 69.11,
                    "MassiveScenarioClassification (da)": 49.64,
                    "MassiveScenarioClassification (nb)": 49.49,
                    "MassiveScenarioClassification (sv)": 75.96,
                    "NoRecClassification": 43.53,
                    "NordicLangClassification": 51.45,
                    "NorwegianParliament": 55.74,
                    "ScalaDaClassification": 50.12,
                    "ScalaNbClassification": 50.34
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/KBLab/sentence-bert-swedish-cased\">sentence-bert-swedish-cased</a>"
                }
            ]
        }
    },
    "allenai-specter": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "AmazonCounterfactualClassification (de)": 54.46,
                    "AmazonCounterfactualClassification (en)": 58.7,
                    "AmazonCounterfactualClassification (en-ext)": 59.28,
                    "AmazonCounterfactualClassification (ja)": 43.87,
                    "AmazonPolarityClassification": 57.77,
                    "AmazonReviewsClassification (de)": 24.08,
                    "AmazonReviewsClassification (en)": 26.26,
                    "AmazonReviewsClassification (es)": 23.88,
                    "AmazonReviewsClassification (fr)": 23.31,
                    "AmazonReviewsClassification (ja)": 20.25,
                    "AmazonReviewsClassification (zh)": 20.49,
                    "Banking77Classification": 66.66,
                    "EmotionClassification": 24.82,
                    "ImdbClassification": 56.35,
                    "MTOPDomainClassification (de)": 48.55,
                    "MTOPDomainClassification (en)": 74.53,
                    "MTOPDomainClassification (es)": 58.39,
                    "MTOPDomainClassification (fr)": 54.61,
                    "MTOPDomainClassification (hi)": 21.22,
                    "MTOPDomainClassification (th)": 14.98,
                    "MTOPIntentClassification (de)": 35.55,
                    "MTOPIntentClassification (en)": 50.05,
                    "MTOPIntentClassification (es)": 36.72,
                    "MTOPIntentClassification (fr)": 34.71,
                    "MTOPIntentClassification (hi)": 4.44,
                    "MTOPIntentClassification (th)": 4.67,
                    "MassiveIntentClassification (af)": 33.68,
                    "MassiveIntentClassification (am)": 2.94,
                    "MassiveIntentClassification (ar)": 10.04,
                    "MassiveIntentClassification (az)": 30.74,
                    "MassiveIntentClassification (bn)": 3.02,
                    "MassiveIntentClassification (cy)": 33.94,
                    "MassiveIntentClassification (da)": 38.47,
                    "MassiveIntentClassification (de)": 36.06,
                    "MassiveIntentClassification (el)": 27.7,
                    "MassiveIntentClassification (en)": 51.73,
                    "MassiveIntentClassification (es)": 35.6,
                    "MassiveIntentClassification (fa)": 17.97,
                    "MassiveIntentClassification (fi)": 35.53,
                    "MassiveIntentClassification (fr)": 38.41,
                    "MassiveIntentClassification (he)": 2.69,
                    "MassiveIntentClassification (hi)": 3.43,
                    "MassiveIntentClassification (hu)": 34.05,
                    "MassiveIntentClassification (hy)": 3.11,
                    "MassiveIntentClassification (id)": 40.02,
                    "MassiveIntentClassification (is)": 32.63,
                    "MassiveIntentClassification (it)": 39.28,
                    "MassiveIntentClassification (ja)": 4.95,
                    "MassiveIntentClassification (jv)": 34.95,
                    "MassiveIntentClassification (ka)": 2.57,
                    "MassiveIntentClassification (km)": 4.73,
                    "MassiveIntentClassification (kn)": 3.54,
                    "MassiveIntentClassification (ko)": 2.68,
                    "MassiveIntentClassification (lv)": 37.91,
                    "MassiveIntentClassification (ml)": 2.88,
                    "MassiveIntentClassification (mn)": 16.94,
                    "MassiveIntentClassification (ms)": 36.6,
                    "MassiveIntentClassification (my)": 3.96,
                    "MassiveIntentClassification (nb)": 34.75,
                    "MassiveIntentClassification (nl)": 33.95,
                    "MassiveIntentClassification (pl)": 35.77,
                    "MassiveIntentClassification (pt)": 43.05,
                    "MassiveIntentClassification (ro)": 36.2,
                    "MassiveIntentClassification (ru)": 25.3,
                    "MassiveIntentClassification (sl)": 35.9,
                    "MassiveIntentClassification (sq)": 36.6,
                    "MassiveIntentClassification (sv)": 36.0,
                    "MassiveIntentClassification (sw)": 34.81,
                    "MassiveIntentClassification (ta)": 3.11,
                    "MassiveIntentClassification (te)": 2.53,
                    "MassiveIntentClassification (th)": 4.38,
                    "MassiveIntentClassification (tl)": 35.51,
                    "MassiveIntentClassification (tr)": 32.02,
                    "MassiveIntentClassification (ur)": 9.61,
                    "MassiveIntentClassification (vi)": 37.07,
                    "MassiveIntentClassification (zh-CN)": 2.81,
                    "MassiveIntentClassification (zh-TW)": 4.79,
                    "MassiveScenarioClassification (af)": 36.17,
                    "MassiveScenarioClassification (am)": 7.64,
                    "MassiveScenarioClassification (ar)": 15.26,
                    "MassiveScenarioClassification (az)": 30.73,
                    "MassiveScenarioClassification (bn)": 7.15,
                    "MassiveScenarioClassification (cy)": 34.73,
                    "MassiveScenarioClassification (da)": 39.93,
                    "MassiveScenarioClassification (de)": 38.62,
                    "MassiveScenarioClassification (el)": 27.18,
                    "MassiveScenarioClassification (en)": 58.58,
                    "MassiveScenarioClassification (es)": 39.44,
                    "MassiveScenarioClassification (fa)": 21.43,
                    "MassiveScenarioClassification (fi)": 33.21,
                    "MassiveScenarioClassification (fr)": 40.26,
                    "MassiveScenarioClassification (he)": 7.42,
                    "MassiveScenarioClassification (hi)": 8.06,
                    "MassiveScenarioClassification (hu)": 34.54,
                    "MassiveScenarioClassification (hy)": 8.61,
                    "MassiveScenarioClassification (id)": 40.04,
                    "MassiveScenarioClassification (is)": 33.57,
                    "MassiveScenarioClassification (it)": 40.1,
                    "MassiveScenarioClassification (ja)": 9.96,
                    "MassiveScenarioClassification (jv)": 36.11,
                    "MassiveScenarioClassification (ka)": 7.13,
                    "MassiveScenarioClassification (km)": 9.66,
                    "MassiveScenarioClassification (kn)": 7.55,
                    "MassiveScenarioClassification (ko)": 7.27,
                    "MassiveScenarioClassification (lv)": 37.03,
                    "MassiveScenarioClassification (ml)": 7.22,
                    "MassiveScenarioClassification (mn)": 21.53,
                    "MassiveScenarioClassification (ms)": 37.57,
                    "MassiveScenarioClassification (my)": 9.54,
                    "MassiveScenarioClassification (nb)": 35.71,
                    "MassiveScenarioClassification (nl)": 34.62,
                    "MassiveScenarioClassification (pl)": 36.87,
                    "MassiveScenarioClassification (pt)": 44.68,
                    "MassiveScenarioClassification (ro)": 37.29,
                    "MassiveScenarioClassification (ru)": 28.16,
                    "MassiveScenarioClassification (sl)": 37.95,
                    "MassiveScenarioClassification (sq)": 37.82,
                    "MassiveScenarioClassification (sv)": 35.35,
                    "MassiveScenarioClassification (sw)": 35.37,
                    "MassiveScenarioClassification (ta)": 7.19,
                    "MassiveScenarioClassification (te)": 7.29,
                    "MassiveScenarioClassification (th)": 9.47,
                    "MassiveScenarioClassification (tl)": 37.31,
                    "MassiveScenarioClassification (tr)": 34.57,
                    "MassiveScenarioClassification (ur)": 16.17,
                    "MassiveScenarioClassification (vi)": 35.91,
                    "MassiveScenarioClassification (zh-CN)": 9.19,
                    "MassiveScenarioClassification (zh-TW)": 10.19,
                    "ToxicConversationsClassification": 57.44,
                    "TweetSentimentExtractionClassification": 45.52
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "ArxivClusteringP2P": 44.75,
                    "ArxivClusteringS2S": 35.27,
                    "BiorxivClusteringP2P": 39.52,
                    "BiorxivClusteringS2S": 34.53,
                    "MedrxivClusteringP2P": 35.04,
                    "MedrxivClusteringS2S": 31.66,
                    "RedditClustering": 24.13,
                    "RedditClusteringP2P": 35.06,
                    "StackExchangeClustering": 39.01,
                    "StackExchangeClusteringP2P": 31.46,
                    "TwentyNewsgroupsClustering": 24.22
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "SprintDuplicateQuestions": 71.63,
                    "TwitterSemEval2015": 43.25,
                    "TwitterURLCorpus": 69.22
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "AskUbuntuDupQuestions": 50.07,
                    "MindSmallReranking": 24.8,
                    "SciDocsRR": 81.31,
                    "StackOverflowDupQuestions": 36.22
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "ArguAna": 32.67,
                    "CQADupstackRetrieval": 14.6,
                    "ClimateFEVER": 6.86,
                    "DBPedia": 4.14,
                    "FEVER": 5.45,
                    "FiQA2018": 5.64,
                    "HotpotQA": 5.46,
                    "MSMARCO": 5.59,
                    "NFCorpus": 0.85,
                    "NQ": 5.99,
                    "QuoraRetrieval": 64.65,
                    "SCIDOCS": 0.0,
                    "SciFact": 47.88,
                    "TRECCOVID": 29.91,
                    "Touche2020": 8.46
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "BIOSSES": 64.95,
                    "SICK-R": 56.39,
                    "STS12": 62.49,
                    "STS13": 58.7,
                    "STS14": 54.87,
                    "STS15": 62.54,
                    "STS16": 64.27,
                    "STS17 (ar-ar)": 27.14,
                    "STS17 (en-ar)": 6.9,
                    "STS17 (en-de)": 11.59,
                    "STS17 (en-en)": 69.63,
                    "STS17 (en-tr)": 6.46,
                    "STS17 (es-en)": 10.86,
                    "STS17 (es-es)": 55.45,
                    "STS17 (fr-en)": 16.02,
                    "STS17 (it-en)": 19.87,
                    "STS17 (ko-ko)": 8.08,
                    "STS17 (nl-en)": 24.92,
                    "STS22 (ar)": 19.57,
                    "STS22 (de)": 17.31,
                    "STS22 (de-en)": 26.03,
                    "STS22 (de-fr)": 10.26,
                    "STS22 (de-pl)": 16.94,
                    "STS22 (en)": 55.06,
                    "STS22 (es)": 48.89,
                    "STS22 (es-en)": 51.79,
                    "STS22 (es-it)": 25.24,
                    "STS22 (fr)": 53.92,
                    "STS22 (fr-pl)": 39.44,
                    "STS22 (it)": 39.43,
                    "STS22 (pl)": 13.56,
                    "STS22 (pl-en)": 25.36,
                    "STS22 (ru)": 1.11,
                    "STS22 (tr)": 31.73,
                    "STS22 (zh)": 16.35,
                    "STS22 (zh-en)": 8.44,
                    "STSBenchmark": 61.26
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>",
                    "SummEval": 27.66
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/allenai-specter\">allenai-specter</a>"
                }
            ]
        }
    },
    "m3e-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>",
                    "AmazonReviewsClassification (zh)": 44.44,
                    "IFlyTek": 43.96,
                    "JDReview": 86.92,
                    "MassiveIntentClassification (zh-CN)": 67.23,
                    "MassiveScenarioClassification (zh-CN)": 74.88,
                    "MultilingualSentiment": 72.47,
                    "OnlineShopping": 89.59,
                    "TNews": 48.26,
                    "Waimai": 86.08
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>",
                    "CLSClusteringP2P": 38.6,
                    "CLSClusteringS2S": 38.02,
                    "ThuNewsClusteringP2P": 60.39,
                    "ThuNewsClusteringS2S": 58.51
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>",
                    "Cmnli": 69.27,
                    "Ocnli": 59.33
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>",
                    "CMedQAv1": 77.76,
                    "CMedQAv2": 78.27,
                    "MMarcoReranking": 16.46,
                    "T2Reranking": 66.13
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>",
                    "CmedqaRetrieval": 30.73,
                    "CovidRetrieval": 61.33,
                    "DuRetrieval": 74.69,
                    "EcomRetrieval": 45.18,
                    "MMarcoRetrieval": 61.06,
                    "MedicalRetrieval": 48.66,
                    "T2Retrieval": 72.36,
                    "VideoRetrieval": 44.02
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>",
                    "AFQMC": 36.53,
                    "ATEC": 41.8,
                    "BQ": 65.2,
                    "LCQMC": 74.2,
                    "PAWSX": 15.95,
                    "QBQTC": 32.65,
                    "STS22 (zh)": 62.91,
                    "STSB": 74.16
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-large\">m3e-large</a>"
                }
            ]
        }
    },
    "LLM2Vec-Llama-2-unsupervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "AmazonCounterfactualClassification (en)": 76.91,
                    "AmazonPolarityClassification": 79.05,
                    "AmazonReviewsClassification (en)": 40.08,
                    "Banking77Classification": 84.65,
                    "EmotionClassification": 46.58,
                    "ImdbClassification": 75.68,
                    "MTOPDomainClassification (en)": 94.33,
                    "MTOPIntentClassification (en)": 79.54,
                    "MassiveIntentClassification (en)": 73.84,
                    "MassiveScenarioClassification (en)": 79.17,
                    "ToxicConversationsClassification": 71.81,
                    "TweetSentimentExtractionClassification": 57.17
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "ArxivClusteringP2P": 47.81,
                    "ArxivClusteringS2S": 40.53,
                    "BiorxivClusteringP2P": 38.12,
                    "BiorxivClusteringS2S": 31.25,
                    "MedrxivClusteringP2P": 30.94,
                    "MedrxivClusteringS2S": 28.04,
                    "RedditClustering": 42.84,
                    "RedditClusteringP2P": 60.1,
                    "StackExchangeClustering": 65.12,
                    "StackExchangeClusteringP2P": 33.61,
                    "TwentyNewsgroupsClustering": 30.76
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "SprintDuplicateQuestions": 87.57,
                    "TwitterSemEval2015": 65.14,
                    "TwitterURLCorpus": 80.94
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "AskUbuntuDupQuestions": 55.56,
                    "MindSmallReranking": 30.86,
                    "SciDocsRR": 77.62,
                    "StackOverflowDupQuestions": 47.77
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "ArguAna": 47.09,
                    "CQADupstackRetrieval": 30.78,
                    "ClimateFEVER": 20.67,
                    "DBPedia": 25.81,
                    "FEVER": 43.48,
                    "FiQA2018": 24.62,
                    "HotpotQA": 48.46,
                    "MSMARCO": 18.81,
                    "NFCorpus": 26.81,
                    "NQ": 33.21,
                    "QuoraRetrieval": 86.15,
                    "SCIDOCS": 10.0,
                    "SciFact": 64.48,
                    "TRECCOVID": 60.67,
                    "Touche2020": 10.18
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "BIOSSES": 82.41,
                    "SICK-R": 71.77,
                    "STS12": 65.39,
                    "STS13": 79.26,
                    "STS14": 72.98,
                    "STS15": 82.72,
                    "STS16": 81.02,
                    "STS17 (en-en)": 86.7,
                    "STS22 (en)": 63.47,
                    "STSBenchmark": 78.32
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>",
                    "SummEval": 31.38
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised</a>"
                }
            ]
        }
    },
    "nomic-embed-text-v1.5-128": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "AmazonCounterfactualClassification (en)": 69.78,
                    "AmazonPolarityClassification": 88.74,
                    "AmazonReviewsClassification (en)": 43.11,
                    "Banking77Classification": 82.78,
                    "EmotionClassification": 42.92,
                    "ImdbClassification": 80.87,
                    "MTOPDomainClassification (en)": 89.61,
                    "MTOPIntentClassification (en)": 68.9,
                    "MassiveIntentClassification (en)": 69.34,
                    "MassiveScenarioClassification (en)": 74.21,
                    "ToxicConversationsClassification": 68.16,
                    "TweetSentimentExtractionClassification": 57.99
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "ArxivClusteringP2P": 43.87,
                    "ArxivClusteringS2S": 34.57,
                    "BiorxivClusteringP2P": 36.79,
                    "BiorxivClusteringS2S": 30.68,
                    "MedrxivClusteringP2P": 34.09,
                    "MedrxivClusteringS2S": 31.3,
                    "RedditClustering": 53.31,
                    "RedditClusteringP2P": 58.96,
                    "StackExchangeClustering": 59.92,
                    "StackExchangeClusteringP2P": 33.88,
                    "TwentyNewsgroupsClustering": 47.29
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "SprintDuplicateQuestions": 91.45,
                    "TwitterSemEval2015": 73.23,
                    "TwitterURLCorpus": 85.93
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "AskUbuntuDupQuestions": 61.16,
                    "MindSmallReranking": 30.02,
                    "SciDocsRR": 78.05,
                    "StackOverflowDupQuestions": 49.0
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "ArguAna": 43.4,
                    "CQADupstackRetrieval": 34.67,
                    "ClimateFEVER": 36.52,
                    "DBPedia": 36.22,
                    "FEVER": 80.48,
                    "FiQA2018": 32.08,
                    "HotpotQA": 60.09,
                    "MSMARCO": 39.99,
                    "NFCorpus": 30.72,
                    "NQ": 53.62,
                    "QuoraRetrieval": 87.07,
                    "SCIDOCS": 15.56,
                    "SciFact": 64.28,
                    "TRECCOVID": 74.58,
                    "Touche2020": 26.99
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "BIOSSES": 80.19,
                    "SICK-R": 79.09,
                    "STS12": 77.49,
                    "STS13": 85.62,
                    "STS14": 80.5,
                    "STS15": 85.84,
                    "STS16": 83.9,
                    "STS17 (en-en)": 86.27,
                    "STS22 (en)": 64.24,
                    "STSBenchmark": 84.28
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>",
                    "SummEval": 29.59
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128</a>"
                }
            ]
        }
    },
    "bert-base-multilingual-uncased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "AmazonReviewsClassification (fr)": 29.02,
                    "MTOPDomainClassification (fr)": 64.49,
                    "MTOPIntentClassification (fr)": 39.4,
                    "MasakhaNEWSClassification (fra)": 75.69,
                    "MassiveIntentClassification (fr)": 38.01,
                    "MassiveScenarioClassification (fr)": 43.63
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "AlloProfClusteringP2P": 60.66,
                    "AlloProfClusteringS2S": 35.05,
                    "HALClusteringS2S": 20.9,
                    "MLSUMClusteringP2P": 43.5,
                    "MLSUMClusteringS2S": 30.99,
                    "MasakhaNEWSClusteringP2P (fra)": 49.71,
                    "MasakhaNEWSClusteringS2S (fra)": 42.23
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "OpusparcusPC (fr)": 87.43,
                    "PawsX (fr)": 53.22
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "AlloprofReranking": 38.85,
                    "SyntecReranking": 66.4
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "AlloprofRetrieval": 5.51,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 2.87,
                    "SyntecRetrieval": 34.95,
                    "XPQARetrieval (fr)": 26.12
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "SICKFr": 58.26,
                    "STS22 (fr)": 56.47,
                    "STSBenchmarkMultilingualSTS (fr)": 54.97
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>",
                    "SummEvalFr": 30.72
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-uncased\">bert-base-multilingual-uncased</a>"
                }
            ]
        }
    },
    "distiluse-base-multilingual-cased-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "AllegroReviews": 28.03,
                    "AmazonCounterfactualClassification (de)": 68.14,
                    "AmazonCounterfactualClassification (en)": 71.81,
                    "AmazonCounterfactualClassification (en-ext)": 72.96,
                    "AmazonCounterfactualClassification (ja)": 65.39,
                    "AmazonPolarityClassification": 68.0,
                    "AmazonReviewsClassification (de)": 35.03,
                    "AmazonReviewsClassification (en)": 35.45,
                    "AmazonReviewsClassification (es)": 36.24,
                    "AmazonReviewsClassification (fr)": 35.7,
                    "AmazonReviewsClassification (ja)": 31.08,
                    "AmazonReviewsClassification (zh)": 33.89,
                    "Banking77Classification": 71.48,
                    "CBD": 60.0,
                    "EmotionClassification": 40.04,
                    "ImdbClassification": 61.52,
                    "MTOPDomainClassification (de)": 86.19,
                    "MTOPDomainClassification (en)": 91.59,
                    "MTOPDomainClassification (es)": 87.75,
                    "MTOPDomainClassification (fr)": 84.61,
                    "MTOPDomainClassification (hi)": 76.41,
                    "MTOPDomainClassification (th)": 73.62,
                    "MTOPIntentClassification (de)": 59.21,
                    "MTOPIntentClassification (en)": 66.4,
                    "MTOPIntentClassification (es)": 57.21,
                    "MTOPIntentClassification (fr)": 53.41,
                    "MTOPIntentClassification (hi)": 45.54,
                    "MTOPIntentClassification (th)": 47.73,
                    "MasakhaNEWSClassification (fra)": 76.87,
                    "MassiveIntentClassification (af)": 40.02,
                    "MassiveIntentClassification (am)": 2.35,
                    "MassiveIntentClassification (ar)": 43.14,
                    "MassiveIntentClassification (az)": 25.6,
                    "MassiveIntentClassification (bn)": 4.84,
                    "MassiveIntentClassification (cy)": 15.43,
                    "MassiveIntentClassification (da)": 52.33,
                    "MassiveIntentClassification (de)": 51.57,
                    "MassiveIntentClassification (el)": 49.65,
                    "MassiveIntentClassification (en)": 66.71,
                    "MassiveIntentClassification (es)": 56.57,
                    "MassiveIntentClassification (fa)": 55.36,
                    "MassiveIntentClassification (fi)": 45.72,
                    "MassiveIntentClassification (fr)": 57.02,
                    "MassiveIntentClassification (he)": 46.74,
                    "MassiveIntentClassification (hi)": 48.55,
                    "MassiveIntentClassification (hu)": 50.65,
                    "MassiveIntentClassification (hy)": 40.79,
                    "MassiveIntentClassification (id)": 56.0,
                    "MassiveIntentClassification (is)": 16.08,
                    "MassiveIntentClassification (it)": 57.65,
                    "MassiveIntentClassification (ja)": 55.33,
                    "MassiveIntentClassification (jv)": 28.16,
                    "MassiveIntentClassification (ka)": 29.41,
                    "MassiveIntentClassification (km)": 4.79,
                    "MassiveIntentClassification (kn)": 3.37,
                    "MassiveIntentClassification (ko)": 49.97,
                    "MassiveIntentClassification (lv)": 44.31,
                    "MassiveIntentClassification (ml)": 3.24,
                    "MassiveIntentClassification (mn)": 40.37,
                    "MassiveIntentClassification (ms)": 47.97,
                    "MassiveIntentClassification (my)": 38.48,
                    "MassiveIntentClassification (nb)": 46.01,
                    "MassiveIntentClassification (nl)": 58.29,
                    "MassiveIntentClassification (pl)": 53.1,
                    "MassiveIntentClassification (pt)": 58.63,
                    "MassiveIntentClassification (ro)": 50.63,
                    "MassiveIntentClassification (ru)": 57.96,
                    "MassiveIntentClassification (sl)": 50.66,
                    "MassiveIntentClassification (sq)": 50.25,
                    "MassiveIntentClassification (sv)": 52.41,
                    "MassiveIntentClassification (sw)": 19.29,
                    "MassiveIntentClassification (ta)": 3.79,
                    "MassiveIntentClassification (te)": 3.36,
                    "MassiveIntentClassification (th)": 45.28,
                    "MassiveIntentClassification (tl)": 28.44,
                    "MassiveIntentClassification (tr)": 50.47,
                    "MassiveIntentClassification (ur)": 46.03,
                    "MassiveIntentClassification (vi)": 45.25,
                    "MassiveIntentClassification (zh-CN)": 59.22,
                    "MassiveIntentClassification (zh-TW)": 54.96,
                    "MassiveScenarioClassification (af)": 53.67,
                    "MassiveScenarioClassification (am)": 7.72,
                    "MassiveScenarioClassification (ar)": 52.19,
                    "MassiveScenarioClassification (az)": 34.75,
                    "MassiveScenarioClassification (bn)": 10.65,
                    "MassiveScenarioClassification (cy)": 21.24,
                    "MassiveScenarioClassification (da)": 62.55,
                    "MassiveScenarioClassification (de)": 61.4,
                    "MassiveScenarioClassification (el)": 60.68,
                    "MassiveScenarioClassification (en)": 74.0,
                    "MassiveScenarioClassification (es)": 64.61,
                    "MassiveScenarioClassification (fa)": 59.24,
                    "MassiveScenarioClassification (fi)": 54.66,
                    "MassiveScenarioClassification (fr)": 65.2,
                    "MassiveScenarioClassification (he)": 54.74,
                    "MassiveScenarioClassification (hi)": 55.99,
                    "MassiveScenarioClassification (hu)": 61.2,
                    "MassiveScenarioClassification (hy)": 49.63,
                    "MassiveScenarioClassification (id)": 65.25,
                    "MassiveScenarioClassification (is)": 22.6,
                    "MassiveScenarioClassification (it)": 64.63,
                    "MassiveScenarioClassification (ja)": 62.32,
                    "MassiveScenarioClassification (jv)": 35.77,
                    "MassiveScenarioClassification (ka)": 39.08,
                    "MassiveScenarioClassification (km)": 9.24,
                    "MassiveScenarioClassification (kn)": 8.28,
                    "MassiveScenarioClassification (ko)": 57.6,
                    "MassiveScenarioClassification (lv)": 51.72,
                    "MassiveScenarioClassification (ml)": 8.25,
                    "MassiveScenarioClassification (mn)": 47.21,
                    "MassiveScenarioClassification (ms)": 55.65,
                    "MassiveScenarioClassification (my)": 43.31,
                    "MassiveScenarioClassification (nb)": 54.98,
                    "MassiveScenarioClassification (nl)": 67.49,
                    "MassiveScenarioClassification (pl)": 61.29,
                    "MassiveScenarioClassification (pt)": 64.26,
                    "MassiveScenarioClassification (ro)": 58.03,
                    "MassiveScenarioClassification (ru)": 65.41,
                    "MassiveScenarioClassification (sl)": 59.36,
                    "MassiveScenarioClassification (sq)": 62.69,
                    "MassiveScenarioClassification (sv)": 64.35,
                    "MassiveScenarioClassification (sw)": 25.12,
                    "MassiveScenarioClassification (ta)": 8.67,
                    "MassiveScenarioClassification (te)": 7.82,
                    "MassiveScenarioClassification (th)": 54.65,
                    "MassiveScenarioClassification (tl)": 36.09,
                    "MassiveScenarioClassification (tr)": 60.89,
                    "MassiveScenarioClassification (ur)": 54.71,
                    "MassiveScenarioClassification (vi)": 55.15,
                    "MassiveScenarioClassification (zh-CN)": 66.44,
                    "MassiveScenarioClassification (zh-TW)": 62.89,
                    "PAC": 68.17,
                    "PolEmo2.0-IN": 48.84,
                    "PolEmo2.0-OUT": 30.0,
                    "ToxicConversationsClassification": 69.09,
                    "TweetSentimentExtractionClassification": 59.97
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "8TagsClustering": 12.51,
                    "AlloProfClusteringP2P": 55.95,
                    "AlloProfClusteringS2S": 35.39,
                    "ArxivClusteringP2P": 33.59,
                    "HALClusteringS2S": 18.2,
                    "MLSUMClusteringP2P": 40.17,
                    "MLSUMClusteringS2S": 34.65,
                    "MasakhaNEWSClusteringP2P (fra)": 53.76,
                    "MasakhaNEWSClusteringS2S (fra)": 32.76
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "CDSC-E": 71.83,
                    "OpusparcusPC (fr)": 92.07,
                    "PPC": 86.83,
                    "PSC": 96.35,
                    "PawsX (fr)": 51.08,
                    "SICK-E-PL": 62.05,
                    "SprintDuplicateQuestions": 87.15,
                    "TwitterSemEval2015": 61.67,
                    "TwitterURLCorpus": 84.02
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "AlloprofReranking": 51.77,
                    "AskUbuntuDupQuestions": 53.75,
                    "MindSmallReranking": 30.39,
                    "SciDocsRR": 69.22,
                    "StackOverflowDupQuestions": 41.92,
                    "SyntecReranking": 74.78
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "AlloprofRetrieval": 26.99,
                    "ArguAna-PL": 36.7,
                    "BSARDRetrieval": 0.0,
                    "DBPedia-PL": 12.36,
                    "FiQA-PL": 8.02,
                    "HotpotQA-PL": 20.83,
                    "MSMARCO-PL": 4.57,
                    "MintakaRetrieval (fr)": 22.55,
                    "NFCorpus-PL": 16.28,
                    "NQ-PL": 5.85,
                    "Quora-PL": 71.95,
                    "SCIDOCS-PL": 6.5,
                    "SciFact-PL": 33.03,
                    "SyntecRetrieval": 65.34,
                    "TRECCOVID-PL": 16.91,
                    "XPQARetrieval (fr)": 51.2
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "BIOSSES": 78.34,
                    "CDSC-R": 87.67,
                    "SICK-R": 75.25,
                    "SICK-R-PL": 65.53,
                    "SICKFr": 72.49,
                    "STS12": 72.96,
                    "STS13": 70.58,
                    "STS14": 70.29,
                    "STS15": 81.94,
                    "STS16": 76.8,
                    "STS17 (ar-ar)": 77.34,
                    "STS17 (en-ar)": 77.46,
                    "STS17 (en-de)": 80.24,
                    "STS17 (en-en)": 86.19,
                    "STS17 (en-tr)": 74.34,
                    "STS17 (es-en)": 77.4,
                    "STS17 (es-es)": 83.71,
                    "STS17 (fr-en)": 79.28,
                    "STS17 (it-en)": 80.82,
                    "STS17 (ko-ko)": 76.4,
                    "STS17 (nl-en)": 80.51,
                    "STS22 (ar)": 49.04,
                    "STS22 (de)": 35.73,
                    "STS22 (de-en)": 47.51,
                    "STS22 (de-fr)": 60.76,
                    "STS22 (de-pl)": 36.09,
                    "STS22 (en)": 62.88,
                    "STS22 (es)": 59.34,
                    "STS22 (es-en)": 68.96,
                    "STS22 (es-it)": 63.28,
                    "STS22 (fr)": 76.41,
                    "STS22 (fr-pl)": 61.98,
                    "STS22 (it)": 65.1,
                    "STS22 (pl)": 34.58,
                    "STS22 (pl-en)": 71.33,
                    "STS22 (ru)": 52.4,
                    "STS22 (tr)": 54.07,
                    "STS22 (zh)": 54.32,
                    "STS22 (zh-en)": 61.75,
                    "STSBenchmark": 80.75,
                    "STSBenchmarkMultilingualSTS (fr)": 77.49
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>",
                    "SummEvalFr": 28.12
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2</a>"
                }
            ]
        }
    },
    "e5-small": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>",
                    "BornholmBitextMining": 40.27
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>",
                    "AngryTweetsClassification": 43.6,
                    "DKHateClassification": 57.57,
                    "DanishPoliticalCommentsClassification": 28.37,
                    "LccSentimentClassification": 40.27,
                    "MassiveIntentClassification (da)": 41.89,
                    "MassiveIntentClassification (nb)": 40.25,
                    "MassiveIntentClassification (sv)": 40.07,
                    "MassiveScenarioClassification (da)": 49.93,
                    "MassiveScenarioClassification (nb)": 48.58,
                    "MassiveScenarioClassification (sv)": 47.06,
                    "NoRecClassification": 41.84,
                    "NordicLangClassification": 53.47,
                    "NorwegianParliament": 56.57,
                    "ScalaDaClassification": 50.15,
                    "ScalaNbClassification": 50.03
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-small\">e5-small</a>"
                }
            ]
        }
    },
    "flaubert_large_cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "AmazonReviewsClassification (fr)": 22.45,
                    "MTOPDomainClassification (fr)": 24.27,
                    "MTOPIntentClassification (fr)": 9.79,
                    "MasakhaNEWSClassification (fra)": 55.64,
                    "MassiveIntentClassification (fr)": 16.41,
                    "MassiveScenarioClassification (fr)": 22.72
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "AlloProfClusteringP2P": 40.85,
                    "AlloProfClusteringS2S": 21.76,
                    "HALClusteringS2S": 5.26,
                    "MLSUMClusteringP2P": 38.09,
                    "MLSUMClusteringS2S": 18.71,
                    "MasakhaNEWSClusteringP2P (fra)": 26.43,
                    "MasakhaNEWSClusteringS2S (fra)": 24.68
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "OpusparcusPC (fr)": 74.78,
                    "PawsX (fr)": 54.14
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "AlloprofReranking": 26.29,
                    "SyntecReranking": 42.8
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "AlloprofRetrieval": 0.58,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 0.26,
                    "SyntecRetrieval": 1.58,
                    "XPQARetrieval (fr)": 3.69
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "SICKFr": 34.6,
                    "STS22 (fr)": 48.52,
                    "STSBenchmarkMultilingualSTS (fr)": 15.66
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>",
                    "SummEvalFr": 29.25
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_large_cased\">flaubert_large_cased</a>"
                }
            ]
        }
    },
    "xlm-roberta-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "BornholmBitextMining": 4.42
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "AmazonReviewsClassification (fr)": 26.75,
                    "AngryTweetsClassification": 52.41,
                    "DKHateClassification": 56.78,
                    "DanishPoliticalCommentsClassification": 34.03,
                    "LccSentimentClassification": 52.27,
                    "MTOPDomainClassification (fr)": 43.83,
                    "MTOPIntentClassification (fr)": 19.38,
                    "MasakhaNEWSClassification (fra)": 60.5,
                    "MassiveIntentClassification (da)": 41.06,
                    "MassiveIntentClassification (nb)": 40.46,
                    "MassiveIntentClassification (sv)": 45.12,
                    "MassiveIntentClassification (fr)": 13.58,
                    "MassiveScenarioClassification (da)": 43.91,
                    "MassiveScenarioClassification (nb)": 44.83,
                    "MassiveScenarioClassification (sv)": 47.35,
                    "MassiveScenarioClassification (fr)": 23.21,
                    "NoRecClassification": 46.28,
                    "NordicLangClassification": 79.39,
                    "NorwegianParliament": 56.75,
                    "ScalaDaClassification": 57.3,
                    "ScalaNbClassification": 58.33
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "AlloProfClusteringP2P": 52.24,
                    "AlloProfClusteringS2S": 20.37,
                    "HALClusteringS2S": 8.68,
                    "MLSUMClusteringP2P": 40.44,
                    "MLSUMClusteringS2S": 24.14,
                    "MasakhaNEWSClusteringP2P (fra)": 29.29,
                    "MasakhaNEWSClusteringS2S (fra)": 23.76
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "OpusparcusPC (fr)": 85.45,
                    "PawsX (fr)": 51.35
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "AlloprofReranking": 25.58,
                    "SyntecReranking": 43.75
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "AlloprofRetrieval": 0.16,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 0.88,
                    "SyntecRetrieval": 3.33,
                    "XPQARetrieval (fr)": 11.65
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "SICKFr": 48.62,
                    "STS22 (fr)": 56.72,
                    "STSBenchmarkMultilingualSTS (fr)": 46.23
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>",
                    "SummEvalFr": 29.14
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-base\">xlm-roberta-base</a>"
                }
            ]
        }
    },
    "sentence-croissant-llm-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "AmazonReviewsClassification (fr)": 34.79,
                    "MTOPDomainClassification (fr)": 85.52,
                    "MTOPIntentClassification (fr)": 63.12,
                    "MasakhaNEWSClassification (fra)": 79.29,
                    "MassiveIntentClassification (fr)": 59.41,
                    "MassiveScenarioClassification (fr)": 65.29
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "AlloProfClusteringP2P": 64.12,
                    "AlloProfClusteringS2S": 32.52,
                    "HALClusteringS2S": 23.4,
                    "MLSUMClusteringP2P": 42.94,
                    "MLSUMClusteringS2S": 33.91,
                    "MasakhaNEWSClusteringP2P (fra)": 53.94,
                    "MasakhaNEWSClusteringS2S (fra)": 41.05
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "OpusparcusPC (fr)": 91.42,
                    "PawsX (fr)": 63.13
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "AlloprofReranking": 53.0,
                    "SyntecReranking": 82.9
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "AlloprofRetrieval": 29.97,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 21.31,
                    "SyntecRetrieval": 74.2,
                    "XPQARetrieval (fr)": 58.57
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "SICKFr": 69.6,
                    "STS22 (fr)": 78.77,
                    "STSBenchmarkMultilingualSTS (fr)": 79.23
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>",
                    "SummEvalFr": 29.04
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Wissam42/sentence-croissant-llm-base\">sentence-croissant-llm-base</a>"
                }
            ]
        }
    },
    "dfm-sentence-encoder-large-1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>",
                    "BornholmBitextMining": 15.93
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>",
                    "AngryTweetsClassification": 54.42,
                    "DKHateClassification": 63.19,
                    "DanishPoliticalCommentsClassification": 37.76,
                    "LccSentimentClassification": 58.07,
                    "MassiveIntentClassification (da)": 65.83,
                    "MassiveIntentClassification (nb)": 57.57,
                    "MassiveIntentClassification (sv)": 55.39,
                    "MassiveScenarioClassification (da)": 71.61,
                    "MassiveScenarioClassification (nb)": 63.66,
                    "MassiveScenarioClassification (sv)": 57.16,
                    "NoRecClassification": 50.46,
                    "NordicLangClassification": 75.98,
                    "NorwegianParliament": 57.66,
                    "ScalaDaClassification": 66.09,
                    "ScalaNbClassification": 62.69
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-sentence-encoder-large-1</a>"
                }
            ]
        }
    },
    "bge-large-zh-v1.5": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>",
                    "AmazonReviewsClassification (zh)": 41.38,
                    "IFlyTek": 48.74,
                    "JDReview": 85.14,
                    "MassiveIntentClassification (zh-CN)": 68.84,
                    "MassiveScenarioClassification (zh-CN)": 74.7,
                    "MultilingualSentiment": 72.97,
                    "OnlineShopping": 91.43,
                    "TNews": 52.1,
                    "Waimai": 86.9
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>",
                    "CLSClusteringP2P": 41.44,
                    "CLSClusteringS2S": 38.33,
                    "ThuNewsClusteringP2P": 59.61,
                    "ThuNewsClusteringS2S": 56.58
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>",
                    "Cmnli": 85.27,
                    "Ocnli": 77.94
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>",
                    "CMedQAv1": 83.45,
                    "CMedQAv2": 85.44,
                    "MMarcoReranking": 28.74,
                    "T2Reranking": 65.74
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>",
                    "CmedqaRetrieval": 42.57,
                    "CovidRetrieval": 73.35,
                    "DuRetrieval": 86.32,
                    "EcomRetrieval": 65.33,
                    "MMarcoRetrieval": 79.23,
                    "MedicalRetrieval": 59.59,
                    "T2Retrieval": 83.99,
                    "VideoRetrieval": 73.32
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>",
                    "AFQMC": 44.36,
                    "ATEC": 49.54,
                    "BQ": 62.94,
                    "LCQMC": 74.33,
                    "PAWSX": 33.92,
                    "QBQTC": 37.29,
                    "STS22 (zh)": 68.94,
                    "STSB": 78.7
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-v1.5\">bge-large-zh-v1.5</a>"
                }
            ]
        }
    },
    "use-cmlm-multilingual": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>",
                    "BlurbsClusteringP2P": 29.63,
                    "BlurbsClusteringS2S": 15.24,
                    "TenKGnadClusteringP2P": 37.1,
                    "TenKGnadClusteringS2S": 25.64
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/use-cmlm-multilingual\">use-cmlm-multilingual</a>"
                }
            ]
        }
    },
    "m3e-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>",
                    "AmazonReviewsClassification (zh)": 43.02,
                    "IFlyTek": 44.42,
                    "JDReview": 85.33,
                    "MassiveIntentClassification (zh-CN)": 68.4,
                    "MassiveScenarioClassification (zh-CN)": 74.6,
                    "MultilingualSentiment": 71.9,
                    "OnlineShopping": 87.77,
                    "TNews": 48.28,
                    "Waimai": 83.99
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>",
                    "CLSClusteringP2P": 39.81,
                    "CLSClusteringS2S": 37.34,
                    "ThuNewsClusteringP2P": 59.77,
                    "ThuNewsClusteringS2S": 53.78
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>",
                    "Cmnli": 69.98,
                    "Ocnli": 58.0
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>",
                    "CMedQAv1": 77.05,
                    "CMedQAv2": 76.76,
                    "MMarcoReranking": 17.51,
                    "T2Reranking": 66.03
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>",
                    "CmedqaRetrieval": 30.33,
                    "CovidRetrieval": 66.42,
                    "DuRetrieval": 75.76,
                    "EcomRetrieval": 50.27,
                    "MMarcoRetrieval": 65.46,
                    "MedicalRetrieval": 42.79,
                    "T2Retrieval": 73.14,
                    "VideoRetrieval": 51.11
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>",
                    "AFQMC": 35.87,
                    "ATEC": 41.27,
                    "BQ": 63.81,
                    "LCQMC": 74.88,
                    "PAWSX": 12.19,
                    "QBQTC": 32.07,
                    "STS22 (zh)": 66.73,
                    "STSB": 76.97
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/moka-ai/m3e-base\">m3e-base</a>"
                }
            ]
        }
    },
    "bert-base-10lang-cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "AmazonReviewsClassification (fr)": 29.38,
                    "MTOPDomainClassification (fr)": 63.65,
                    "MTOPIntentClassification (fr)": 37.87,
                    "MasakhaNEWSClassification (fra)": 63.93,
                    "MassiveIntentClassification (fr)": 37.28,
                    "MassiveScenarioClassification (fr)": 44.5
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "AlloProfClusteringP2P": 53.22,
                    "AlloProfClusteringS2S": 42.92,
                    "HALClusteringS2S": 19.94,
                    "MLSUMClusteringP2P": 40.96,
                    "MLSUMClusteringS2S": 31.87,
                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
                    "MasakhaNEWSClusteringS2S (fra)": 24.46
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "OpusparcusPC (fr)": 86.79,
                    "PawsX (fr)": 53.4
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "AlloprofReranking": 36.21,
                    "SyntecReranking": 53.25
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "AlloprofRetrieval": 1.6,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 3.55,
                    "SyntecRetrieval": 18.95,
                    "XPQARetrieval (fr)": 18.39
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "SICKFr": 58.76,
                    "STS22 (fr)": 40.31,
                    "STSBenchmarkMultilingualSTS (fr)": 52.25
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>",
                    "SummEvalFr": 29.06
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-10lang-cased\">bert-base-10lang-cased</a>"
                }
            ]
        }
    },
    "text-embedding-3-small": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "AmazonCounterfactualClassification (en)": 76.42,
                    "AmazonPolarityClassification": 90.84,
                    "AmazonReviewsClassification (en)": 45.73,
                    "Banking77Classification": 83.01,
                    "EmotionClassification": 50.63,
                    "ImdbClassification": 83.66,
                    "MTOPDomainClassification (en)": 93.91,
                    "MTOPIntentClassification (en)": 70.98,
                    "MassiveIntentClassification (en)": 72.86,
                    "MassiveScenarioClassification (en)": 76.84,
                    "ToxicConversationsClassification": 71.91,
                    "TweetSentimentExtractionClassification": 61.72
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "ArxivClusteringP2P": 46.57,
                    "ArxivClusteringS2S": 39.35,
                    "BiorxivClusteringP2P": 37.77,
                    "BiorxivClusteringS2S": 34.68,
                    "MedrxivClusteringP2P": 32.77,
                    "MedrxivClusteringS2S": 31.85,
                    "RedditClustering": 64.09,
                    "RedditClusteringP2P": 65.12,
                    "StackExchangeClustering": 72.05,
                    "StackExchangeClusteringP2P": 34.04,
                    "TwentyNewsgroupsClustering": 54.81
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "OpusparcusPC (fr)": 94.45,
                    "SprintDuplicateQuestions": 94.58,
                    "TwitterSemEval2015": 73.33,
                    "TwitterURLCorpus": 87.21
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "AskUbuntuDupQuestions": 62.18,
                    "MindSmallReranking": 29.93,
                    "SciDocsRR": 83.25,
                    "StackOverflowDupQuestions": 51.53
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "ArguAna": 55.49,
                    "CQADupstackRetrieval": 42.58,
                    "ClimateFEVER": 26.86,
                    "DBPedia": 39.97,
                    "FEVER": 79.42,
                    "FiQA2018": 44.91,
                    "HotpotQA": 63.63,
                    "MSMARCO": 37.02,
                    "NFCorpus": 38.33,
                    "NQ": 52.86,
                    "QuoraRetrieval": 88.83,
                    "SCIDOCS": 20.8,
                    "SciFact": 73.37,
                    "TRECCOVID": 77.9,
                    "Touche2020": 24.28
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "BIOSSES": 88.72,
                    "SICK-R": 76.73,
                    "STS12": 73.09,
                    "STS13": 84.92,
                    "STS14": 79.81,
                    "STS15": 88.01,
                    "STS16": 84.41,
                    "STS17 (en-en)": 90.94,
                    "STS22 (en)": 64.96,
                    "STSBenchmark": 84.24
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>",
                    "SummEval": 31.12
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-small</a>"
                }
            ]
        }
    },
    "DanskBERT": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>",
                    "BornholmBitextMining": 6.34
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>",
                    "AngryTweetsClassification": 54.28,
                    "DKHateClassification": 59.3,
                    "DanishPoliticalCommentsClassification": 39.81,
                    "LccSentimentClassification": 58.0,
                    "MassiveIntentClassification (da)": 54.68,
                    "MassiveIntentClassification (nb)": 45.38,
                    "MassiveIntentClassification (sv)": 40.82,
                    "MassiveScenarioClassification (da)": 59.56,
                    "MassiveScenarioClassification (nb)": 47.55,
                    "MassiveScenarioClassification (sv)": 40.14,
                    "NoRecClassification": 46.06,
                    "NordicLangClassification": 74.25,
                    "NorwegianParliament": 56.79,
                    "ScalaDaClassification": 66.59,
                    "ScalaNbClassification": 59.99
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vesteinn/DanskBERT\">DanskBERT</a>"
                }
            ]
        }
    },
    "e5-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>",
                    "BornholmBitextMining": 40.15
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>",
                    "AngryTweetsClassification": 46.14,
                    "DKHateClassification": 58.72,
                    "DanishPoliticalCommentsClassification": 28.67,
                    "LccSentimentClassification": 42.13,
                    "MassiveIntentClassification (da)": 42.29,
                    "MassiveIntentClassification (nb)": 40.63,
                    "MassiveIntentClassification (sv)": 40.69,
                    "MassiveScenarioClassification (da)": 52.95,
                    "MassiveScenarioClassification (nb)": 51.91,
                    "MassiveScenarioClassification (sv)": 50.97,
                    "NoRecClassification": 41.83,
                    "NordicLangClassification": 58.3,
                    "NorwegianParliament": 57.26,
                    "ScalaDaClassification": 49.9,
                    "ScalaNbClassification": 50.13
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-large\">e5-large</a>"
                }
            ]
        }
    },
    "voyage-lite-01-instruct": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "AmazonCounterfactualClassification (en)": 71.43,
                    "AmazonPolarityClassification": 96.41,
                    "AmazonReviewsClassification (en)": 57.06,
                    "Banking77Classification": 81.64,
                    "EmotionClassification": 48.29,
                    "ImdbClassification": 95.49,
                    "MTOPDomainClassification (en)": 96.3,
                    "MTOPIntentClassification (en)": 67.93,
                    "MassiveIntentClassification (en)": 71.29,
                    "MassiveScenarioClassification (en)": 76.74,
                    "ToxicConversationsClassification": 75.45,
                    "TweetSentimentExtractionClassification": 59.44
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "ArxivClusteringP2P": 47.92,
                    "ArxivClusteringS2S": 42.42,
                    "BiorxivClusteringP2P": 38.72,
                    "BiorxivClusteringS2S": 36.6,
                    "MedrxivClusteringP2P": 34.04,
                    "MedrxivClusteringS2S": 32.81,
                    "RedditClustering": 61.56,
                    "RedditClusteringP2P": 65.35,
                    "StackExchangeClustering": 70.16,
                    "StackExchangeClusteringP2P": 38.23,
                    "TwentyNewsgroupsClustering": 53.56
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "SprintDuplicateQuestions": 96.01,
                    "TwitterSemEval2015": 76.87,
                    "TwitterURLCorpus": 86.84
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "AskUbuntuDupQuestions": 65.77,
                    "MindSmallReranking": 31.69,
                    "SciDocsRR": 87.03,
                    "StackOverflowDupQuestions": 54.49
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "ArguAna": 58.73,
                    "CQADupstackRetrieval": 45.11,
                    "ClimateFEVER": 37.47,
                    "DBPedia": 43.42,
                    "FEVER": 89.71,
                    "FiQA2018": 44.79,
                    "HotpotQA": 70.46,
                    "MSMARCO": 39.66,
                    "NFCorpus": 43.33,
                    "NQ": 60.65,
                    "QuoraRetrieval": 87.83,
                    "SCIDOCS": 23.19,
                    "SciFact": 73.64,
                    "TRECCOVID": 78.92,
                    "Touche2020": 36.83
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "BIOSSES": 84.85,
                    "SICK-R": 79.71,
                    "STS12": 77.09,
                    "STS13": 88.91,
                    "STS14": 82.08,
                    "STS15": 89.21,
                    "STS16": 84.74,
                    "STS17 (en-en)": 90.73,
                    "STS22 (en)": 62.1,
                    "STSBenchmark": 89.86
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>",
                    "SummEval": 30.97
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-lite-01-instruct</a>"
                }
            ]
        }
    },
    "LLM2Vec-Sheared-Llama-supervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "AmazonCounterfactualClassification (en)": 77.42,
                    "AmazonPolarityClassification": 82.05,
                    "AmazonReviewsClassification (en)": 40.81,
                    "Banking77Classification": 86.01,
                    "EmotionClassification": 48.38,
                    "ImdbClassification": 75.33,
                    "MTOPDomainClassification (en)": 94.09,
                    "MTOPIntentClassification (en)": 77.05,
                    "MassiveIntentClassification (en)": 75.58,
                    "MassiveScenarioClassification (en)": 79.16,
                    "ToxicConversationsClassification": 69.92,
                    "TweetSentimentExtractionClassification": 60.76
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "ArxivClusteringP2P": 43.47,
                    "ArxivClusteringS2S": 39.85,
                    "BiorxivClusteringP2P": 37.1,
                    "BiorxivClusteringS2S": 34.28,
                    "MedrxivClusteringP2P": 33.55,
                    "MedrxivClusteringS2S": 31.11,
                    "RedditClustering": 53.02,
                    "RedditClusteringP2P": 60.47,
                    "StackExchangeClustering": 63.04,
                    "StackExchangeClusteringP2P": 34.01,
                    "TwentyNewsgroupsClustering": 49.37
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "SprintDuplicateQuestions": 96.25,
                    "TwitterSemEval2015": 76.14,
                    "TwitterURLCorpus": 86.23
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "AskUbuntuDupQuestions": 60.71,
                    "MindSmallReranking": 31.96,
                    "SciDocsRR": 79.23,
                    "StackOverflowDupQuestions": 49.61
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "ArguAna": 51.66,
                    "CQADupstackRetrieval": 41.73,
                    "ClimateFEVER": 33.49,
                    "DBPedia": 43.58,
                    "FEVER": 86.81,
                    "FiQA2018": 41.0,
                    "HotpotQA": 63.85,
                    "MSMARCO": 38.32,
                    "NFCorpus": 37.12,
                    "NQ": 53.89,
                    "QuoraRetrieval": 87.37,
                    "SCIDOCS": 17.96,
                    "SciFact": 72.08,
                    "TRECCOVID": 80.41,
                    "Touche2020": 22.31
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "BIOSSES": 85.88,
                    "SICK-R": 82.25,
                    "STS12": 78.28,
                    "STS13": 85.52,
                    "STS14": 82.49,
                    "STS15": 88.76,
                    "STS16": 87.11,
                    "STS17 (en-en)": 90.1,
                    "STS22 (en)": 68.25,
                    "STSBenchmark": 87.16
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>",
                    "SummEval": 30.01
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised</a>"
                }
            ]
        }
    },
    "electra-small-nordic": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>",
                    "BornholmBitextMining": 1.44
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>",
                    "AngryTweetsClassification": 47.91,
                    "DKHateClassification": 59.45,
                    "DanishPoliticalCommentsClassification": 31.89,
                    "LccSentimentClassification": 47.93,
                    "MassiveIntentClassification (da)": 26.3,
                    "MassiveIntentClassification (nb)": 24.6,
                    "MassiveIntentClassification (sv)": 27.58,
                    "MassiveScenarioClassification (da)": 28.93,
                    "MassiveScenarioClassification (nb)": 27.3,
                    "MassiveScenarioClassification (sv)": 29.93,
                    "NoRecClassification": 45.44,
                    "NordicLangClassification": 57.82,
                    "NorwegianParliament": 53.25,
                    "ScalaDaClassification": 70.41,
                    "ScalaNbClassification": 75.28
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jonfd/electra-small-nordic\">electra-small-nordic</a>"
                }
            ]
        }
    },
    "text-search-ada-doc-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>",
                    "TwentyNewsgroupsClustering": 32.92
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-ada-doc-001</a>"
                }
            ]
        }
    },
    "flaubert_base_uncased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "AmazonReviewsClassification (fr)": 23.52,
                    "MTOPDomainClassification (fr)": 27.74,
                    "MTOPIntentClassification (fr)": 8.61,
                    "MasakhaNEWSClassification (fra)": 62.61,
                    "MassiveIntentClassification (fr)": 6.24,
                    "MassiveScenarioClassification (fr)": 10.98
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "AlloProfClusteringP2P": 43.2,
                    "AlloProfClusteringS2S": 12.94,
                    "HALClusteringS2S": 1.8,
                    "MLSUMClusteringP2P": 33.22,
                    "MLSUMClusteringS2S": 14.9,
                    "MasakhaNEWSClusteringP2P (fra)": 28.49,
                    "MasakhaNEWSClusteringS2S (fra)": 22.58
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "OpusparcusPC (fr)": 82.0,
                    "PawsX (fr)": 52.78
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "AlloprofReranking": 34.55,
                    "SyntecReranking": 57.18
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "AlloprofRetrieval": 1.72,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 0.51,
                    "SyntecRetrieval": 22.33,
                    "XPQARetrieval (fr)": 9.09
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "SICKFr": 41.9,
                    "STS22 (fr)": 55.15,
                    "STSBenchmarkMultilingualSTS (fr)": 33.41
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>",
                    "SummEvalFr": 29.43
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_uncased\">flaubert_base_uncased</a>"
                }
            ]
        }
    },
    "gbert-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>",
                    "BlurbsClusteringP2P": 39.3,
                    "BlurbsClusteringS2S": 13.38,
                    "TenKGnadClusteringP2P": 41.69,
                    "TenKGnadClusteringS2S": 34.97
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-large\">gbert-large</a>"
                }
            ]
        }
    },
    "norbert3-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>",
                    "BornholmBitextMining": 2.9
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>",
                    "AngryTweetsClassification": 49.04,
                    "DKHateClassification": 62.71,
                    "DanishPoliticalCommentsClassification": 33.53,
                    "LccSentimentClassification": 46.93,
                    "MassiveIntentClassification (da)": 45.98,
                    "MassiveIntentClassification (nb)": 47.42,
                    "MassiveIntentClassification (sv)": 48.47,
                    "MassiveScenarioClassification (da)": 50.51,
                    "MassiveScenarioClassification (nb)": 54.25,
                    "MassiveScenarioClassification (sv)": 50.6,
                    "NoRecClassification": 50.46,
                    "NordicLangClassification": 84.25,
                    "NorwegianParliament": 58.85,
                    "ScalaDaClassification": 60.72,
                    "ScalaNbClassification": 66.79
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-large\">norbert3-large</a>"
                }
            ]
        }
    },
    "monot5-base-msmarco-10k": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-base-msmarco-10k\">monot5-base-msmarco-10k</a>",
                    "Core17InstructionRetrieval": -4.06,
                    "News21InstructionRetrieval": 5.02,
                    "Robust04InstructionRetrieval": -6.2
                }
            ]
        }
    },
    "sentence-camembert-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "AmazonReviewsClassification (fr)": 37.97,
                    "MTOPDomainClassification (fr)": 85.74,
                    "MTOPIntentClassification (fr)": 58.62,
                    "MasakhaNEWSClassification (fra)": 80.62,
                    "MassiveIntentClassification (fr)": 62.65,
                    "MassiveScenarioClassification (fr)": 69.29
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "AlloProfClusteringP2P": 62.69,
                    "AlloProfClusteringS2S": 42.06,
                    "HALClusteringS2S": 23.9,
                    "MLSUMClusteringP2P": 42.04,
                    "MLSUMClusteringS2S": 32.29,
                    "MasakhaNEWSClusteringP2P (fra)": 54.51,
                    "MasakhaNEWSClusteringS2S (fra)": 44.73
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "OpusparcusPC (fr)": 94.63,
                    "PawsX (fr)": 59.59
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "AlloprofReranking": 57.62,
                    "SyntecReranking": 88.15
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "AlloprofRetrieval": 31.62,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 21.87,
                    "SyntecRetrieval": 81.11,
                    "XPQARetrieval (fr)": 65.62
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "SICKFr": 77.7,
                    "STS22 (fr)": 81.73,
                    "STSBenchmarkMultilingualSTS (fr)": 85.79
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>",
                    "SummEvalFr": 30.88
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-large\">sentence-camembert-large</a>"
                }
            ]
        }
    },
    "FollowIR-7B": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jhu-clsp/FollowIR-7B\">FollowIR-7B</a>",
                    "Core17InstructionRetrieval": 16.48,
                    "News21InstructionRetrieval": 6.26,
                    "Robust04InstructionRetrieval": 13.72
                }
            ]
        }
    },
    "voyage-large-2-instruct": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "AmazonCounterfactualClassification (en)": 77.6,
                    "AmazonPolarityClassification": 96.58,
                    "AmazonReviewsClassification (en)": 50.77,
                    "Banking77Classification": 86.96,
                    "EmotionClassification": 59.81,
                    "ImdbClassification": 96.13,
                    "MTOPDomainClassification (en)": 98.86,
                    "MTOPIntentClassification (en)": 86.97,
                    "MassiveIntentClassification (en)": 81.08,
                    "MassiveScenarioClassification (en)": 87.95,
                    "ToxicConversationsClassification": 83.58,
                    "TweetSentimentExtractionClassification": 71.55
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "ArxivClusteringP2P": 51.81,
                    "ArxivClusteringS2S": 44.73,
                    "BiorxivClusteringP2P": 46.07,
                    "BiorxivClusteringS2S": 40.64,
                    "MedrxivClusteringP2P": 42.94,
                    "MedrxivClusteringS2S": 41.44,
                    "RedditClustering": 68.5,
                    "RedditClusteringP2P": 64.86,
                    "StackExchangeClustering": 74.16,
                    "StackExchangeClusteringP2P": 45.1,
                    "TwentyNewsgroupsClustering": 66.62
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "SprintDuplicateQuestions": 94.5,
                    "TwitterSemEval2015": 86.32,
                    "TwitterURLCorpus": 86.9
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "AskUbuntuDupQuestions": 64.92,
                    "MindSmallReranking": 30.97,
                    "SciDocsRR": 89.34,
                    "StackOverflowDupQuestions": 55.11
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "ArguAna": 64.06,
                    "CQADupstackRetrieval": 46.6,
                    "ClimateFEVER": 32.65,
                    "DBPedia": 46.03,
                    "FEVER": 91.47,
                    "FiQA2018": 59.76,
                    "HotpotQA": 70.86,
                    "MSMARCO": 40.6,
                    "NFCorpus": 40.32,
                    "NQ": 65.92,
                    "QuoraRetrieval": 87.4,
                    "SCIDOCS": 24.32,
                    "SciFact": 79.99,
                    "TRECCOVID": 85.07,
                    "Touche2020": 39.16
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "BIOSSES": 89.12,
                    "SICK-R": 83.16,
                    "STS12": 76.15,
                    "STS13": 88.49,
                    "STS14": 86.49,
                    "STS15": 91.13,
                    "STS16": 85.68,
                    "STS17 (en-en)": 90.06,
                    "STS22 (en)": 66.32,
                    "STSBenchmark": 89.22
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>",
                    "SummEval": 30.84
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-large-2-instruct</a>"
                }
            ]
        }
    },
    "udever-bloom-560m": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "AmazonReviewsClassification (fr)": 26.85,
                    "MTOPDomainClassification (fr)": 34.99,
                    "MTOPIntentClassification (fr)": 15.76,
                    "MasakhaNEWSClassification (fra)": 67.94,
                    "MassiveIntentClassification (fr)": 15.09,
                    "MassiveScenarioClassification (fr)": 21.67
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "AlloProfClusteringP2P": 53.57,
                    "AlloProfClusteringS2S": 22.13,
                    "HALClusteringS2S": 7.68,
                    "MLSUMClusteringP2P": 36.43,
                    "MLSUMClusteringS2S": 25.26,
                    "MasakhaNEWSClusteringP2P (fra)": 37.57,
                    "MasakhaNEWSClusteringS2S (fra)": 20.58
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "OpusparcusPC (fr)": 82.1,
                    "PawsX (fr)": 59.69
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "AlloprofReranking": 28.75,
                    "SyntecReranking": 50.88
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "AlloprofRetrieval": 1.98,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 0.48,
                    "SyntecRetrieval": 24.45,
                    "XPQARetrieval (fr)": 12.98
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "SICKFr": 54.54,
                    "STS22 (fr)": 61.35,
                    "STSBenchmarkMultilingualSTS (fr)": 36.78
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>",
                    "SummEvalFr": 23.63
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-560m\">udever-bloom-560m</a>"
                }
            ]
        }
    },
    "multilingual-e5-small": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "BornholmBitextMining": 43.89
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "AllegroReviews": 37.42,
                    "AmazonReviewsClassification (fr)": 39.68,
                    "AngryTweetsClassification": 53.57,
                    "CBD": 63.25,
                    "DKHateClassification": 60.73,
                    "DanishPoliticalCommentsClassification": 34.38,
                    "IFlyTek": 47.35,
                    "JDReview": 79.34,
                    "LccSentimentClassification": 57.87,
                    "MTOPDomainClassification (fr)": 81.2,
                    "MTOPIntentClassification (fr)": 46.01,
                    "MasakhaNEWSClassification (fra)": 77.65,
                    "MassiveIntentClassification (da)": 54.63,
                    "MassiveIntentClassification (nb)": 53.96,
                    "MassiveIntentClassification (sv)": 56.6,
                    "MassiveIntentClassification (pl)": 57.4,
                    "MassiveScenarioClassification (da)": 62.34,
                    "MassiveScenarioClassification (nb)": 59.9,
                    "MassiveScenarioClassification (sv)": 65.54,
                    "MassiveScenarioClassification (pl)": 64.25,
                    "MultilingualSentiment": 64.74,
                    "NoRecClassification": 53.96,
                    "NordicLangClassification": 75.15,
                    "NorwegianParliament": 60.15,
                    "OnlineShopping": 88.73,
                    "PAC": 70.55,
                    "PolEmo2.0-IN": 67.35,
                    "PolEmo2.0-OUT": 39.13,
                    "ScalaDaClassification": 50.3,
                    "ScalaNbClassification": 50.06,
                    "TNews": 48.38,
                    "Waimai": 83.9
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "8TagsClustering": 23.92,
                    "AlloProfClusteringP2P": 60.89,
                    "AlloProfClusteringS2S": 32.52,
                    "CLSClusteringP2P": 39.14,
                    "CLSClusteringS2S": 37.79,
                    "HALClusteringS2S": 18.95,
                    "MLSUMClusteringP2P": 43.2,
                    "MLSUMClusteringS2S": 37.61,
                    "MasakhaNEWSClusteringP2P (fra)": 40.12,
                    "MasakhaNEWSClusteringS2S (fra)": 39.22,
                    "ThuNewsClusteringP2P": 55.18,
                    "ThuNewsClusteringS2S": 48.93
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "CDSC-E": 69.7,
                    "Cmnli": 72.12,
                    "Ocnli": 60.77,
                    "OpusparcusPC (fr)": 92.52,
                    "PPC": 86.72,
                    "PSC": 99.24,
                    "PawsX (fr)": 55.68,
                    "SICK-E-PL": 66.34
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "AlloprofReranking": 56.17,
                    "CMedQAv1": 63.44,
                    "CMedQAv2": 62.41,
                    "MMarcoReranking": 24.33,
                    "SyntecReranking": 86.7,
                    "T2Reranking": 65.24
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "AlloprofRetrieval": 27.01,
                    "ArguAna-PL": 37.43,
                    "BSARDRetrieval": 0.0,
                    "CmedqaRetrieval": 24.38,
                    "CovidRetrieval": 72.82,
                    "DBPedia-PL": 29.27,
                    "DuRetrieval": 81.35,
                    "EcomRetrieval": 53.56,
                    "FiQA-PL": 22.03,
                    "HotpotQA-PL": 60.15,
                    "MMarcoRetrieval": 73.17,
                    "MSMARCO-PL": 26.94,
                    "MedicalRetrieval": 44.84,
                    "MintakaRetrieval (fr)": 22.53,
                    "NFCorpus-PL": 26.48,
                    "NQ-PL": 40.46,
                    "Quora-PL": 78.7,
                    "SCIDOCS-PL": 11.6,
                    "SciFact-PL": 62.76,
                    "SyntecRetrieval": 75.76,
                    "T2Retrieval": 71.39,
                    "TRECCOVID-PL": 70.92,
                    "VideoRetrieval": 58.09,
                    "XPQARetrieval (fr)": 57.47
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "AFQMC": 25.21,
                    "ATEC": 35.14,
                    "BQ": 43.27,
                    "CDSC-R": 90.27,
                    "LCQMC": 72.7,
                    "PAWSX": 11.01,
                    "QBQTC": 30.25,
                    "SICK-R-PL": 69.46,
                    "SICKFr": 75.62,
                    "STS22 (pl)": 35.8,
                    "STSB": 77.73,
                    "STSBenchmarkMultilingualSTS (fr)": 79.32
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>",
                    "SummEvalFr": 31.85
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-small\">multilingual-e5-small</a>"
                }
            ]
        }
    },
    "Cohere-embed-multilingual-v3.0": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "AmazonReviewsClassification (fr)": 41.89,
                    "MTOPDomainClassification (fr)": 86.23,
                    "MTOPIntentClassification (fr)": 61.07,
                    "MasakhaNEWSClassification (fra)": 83.06,
                    "MassiveIntentClassification (fr)": 62.94,
                    "MassiveScenarioClassification (fr)": 67.29
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "AlloProfClusteringP2P": 63.53,
                    "AlloProfClusteringS2S": 36.18,
                    "HALClusteringS2S": 19.9,
                    "MLSUMClusteringP2P": 45.08,
                    "MLSUMClusteringS2S": 34.75,
                    "MasakhaNEWSClusteringP2P (fra)": 53.18,
                    "MasakhaNEWSClusteringS2S (fra)": 32.31
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "OpusparcusPC (fr)": 94.08,
                    "PawsX (fr)": 61.26
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "AlloprofReranking": 51.01,
                    "SyntecReranking": 85.72
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "AlloprofRetrieval": 38.36,
                    "BSARDRetrieval": 0.14,
                    "MintakaRetrieval (fr)": 25.44,
                    "SyntecRetrieval": 79.27,
                    "XPQARetrieval (fr)": 58.87
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "SICKFr": 79.23,
                    "STS22 (fr)": 82.76,
                    "STSBenchmarkMultilingualSTS (fr)": 81.84
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>",
                    "SummEvalFr": 31.26
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Cohere/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0</a>"
                }
            ]
        }
    },
    "sentence-t5-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "AmazonCounterfactualClassification (de)": 69.98,
                    "AmazonCounterfactualClassification (en)": 75.82,
                    "AmazonCounterfactualClassification (en-ext)": 76.81,
                    "AmazonCounterfactualClassification (ja)": 46.05,
                    "AmazonPolarityClassification": 85.12,
                    "AmazonReviewsClassification (de)": 37.9,
                    "AmazonReviewsClassification (en)": 44.94,
                    "AmazonReviewsClassification (es)": 37.33,
                    "AmazonReviewsClassification (fr)": 37.35,
                    "AmazonReviewsClassification (ja)": 22.29,
                    "AmazonReviewsClassification (zh)": 21.53,
                    "Banking77Classification": 76.48,
                    "EmotionClassification": 51.35,
                    "ImdbClassification": 77.34,
                    "MTOPDomainClassification (de)": 76.98,
                    "MTOPDomainClassification (en)": 90.34,
                    "MTOPDomainClassification (es)": 73.61,
                    "MTOPDomainClassification (fr)": 75.03,
                    "MTOPDomainClassification (hi)": 21.4,
                    "MTOPDomainClassification (th)": 16.21,
                    "MTOPIntentClassification (de)": 44.43,
                    "MTOPIntentClassification (en)": 63.32,
                    "MTOPIntentClassification (es)": 42.03,
                    "MTOPIntentClassification (fr)": 43.85,
                    "MTOPIntentClassification (hi)": 3.8,
                    "MTOPIntentClassification (th)": 5.21,
                    "MasakhaNEWSClassification (fra)": 81.21,
                    "MassiveIntentClassification (af)": 34.32,
                    "MassiveIntentClassification (am)": 2.38,
                    "MassiveIntentClassification (ar)": 4.53,
                    "MassiveIntentClassification (az)": 31.76,
                    "MassiveIntentClassification (bn)": 2.58,
                    "MassiveIntentClassification (cy)": 28.94,
                    "MassiveIntentClassification (da)": 38.82,
                    "MassiveIntentClassification (de)": 45.23,
                    "MassiveIntentClassification (el)": 10.05,
                    "MassiveIntentClassification (en)": 69.74,
                    "MassiveIntentClassification (es)": 45.32,
                    "MassiveIntentClassification (fa)": 3.58,
                    "MassiveIntentClassification (fi)": 33.52,
                    "MassiveIntentClassification (fr)": 51.13,
                    "MassiveIntentClassification (he)": 2.63,
                    "MassiveIntentClassification (hi)": 2.68,
                    "MassiveIntentClassification (hu)": 32.31,
                    "MassiveIntentClassification (hy)": 3.33,
                    "MassiveIntentClassification (id)": 35.5,
                    "MassiveIntentClassification (is)": 29.82,
                    "MassiveIntentClassification (it)": 45.59,
                    "MassiveIntentClassification (ja)": 3.67,
                    "MassiveIntentClassification (jv)": 31.15,
                    "MassiveIntentClassification (ka)": 2.77,
                    "MassiveIntentClassification (km)": 5.66,
                    "MassiveIntentClassification (kn)": 2.59,
                    "MassiveIntentClassification (ko)": 2.34,
                    "MassiveIntentClassification (lv)": 33.97,
                    "MassiveIntentClassification (ml)": 2.55,
                    "MassiveIntentClassification (mn)": 14.7,
                    "MassiveIntentClassification (ms)": 33.12,
                    "MassiveIntentClassification (my)": 4.42,
                    "MassiveIntentClassification (nb)": 38.53,
                    "MassiveIntentClassification (nl)": 37.96,
                    "MassiveIntentClassification (pl)": 34.41,
                    "MassiveIntentClassification (pt)": 43.35,
                    "MassiveIntentClassification (ro)": 42.69,
                    "MassiveIntentClassification (ru)": 14.82,
                    "MassiveIntentClassification (sl)": 34.54,
                    "MassiveIntentClassification (sq)": 38.54,
                    "MassiveIntentClassification (sv)": 35.98,
                    "MassiveIntentClassification (sw)": 32.14,
                    "MassiveIntentClassification (ta)": 1.41,
                    "MassiveIntentClassification (te)": 2.5,
                    "MassiveIntentClassification (th)": 3.71,
                    "MassiveIntentClassification (tl)": 36.04,
                    "MassiveIntentClassification (tr)": 33.77,
                    "MassiveIntentClassification (ur)": 2.99,
                    "MassiveIntentClassification (vi)": 22.62,
                    "MassiveIntentClassification (zh-CN)": 1.12,
                    "MassiveIntentClassification (zh-TW)": 4.63,
                    "MassiveScenarioClassification (af)": 44.45,
                    "MassiveScenarioClassification (am)": 7.51,
                    "MassiveScenarioClassification (ar)": 12.32,
                    "MassiveScenarioClassification (az)": 38.41,
                    "MassiveScenarioClassification (bn)": 8.45,
                    "MassiveScenarioClassification (cy)": 35.04,
                    "MassiveScenarioClassification (da)": 48.36,
                    "MassiveScenarioClassification (de)": 59.12,
                    "MassiveScenarioClassification (el)": 17.68,
                    "MassiveScenarioClassification (en)": 72.32,
                    "MassiveScenarioClassification (es)": 55.61,
                    "MassiveScenarioClassification (fa)": 6.86,
                    "MassiveScenarioClassification (fi)": 41.34,
                    "MassiveScenarioClassification (fr)": 59.92,
                    "MassiveScenarioClassification (he)": 7.86,
                    "MassiveScenarioClassification (hi)": 7.63,
                    "MassiveScenarioClassification (hu)": 41.31,
                    "MassiveScenarioClassification (hy)": 9.23,
                    "MassiveScenarioClassification (id)": 44.64,
                    "MassiveScenarioClassification (is)": 39.63,
                    "MassiveScenarioClassification (it)": 54.58,
                    "MassiveScenarioClassification (ja)": 4.96,
                    "MassiveScenarioClassification (jv)": 40.73,
                    "MassiveScenarioClassification (ka)": 7.51,
                    "MassiveScenarioClassification (km)": 8.73,
                    "MassiveScenarioClassification (kn)": 7.99,
                    "MassiveScenarioClassification (ko)": 6.03,
                    "MassiveScenarioClassification (lv)": 36.42,
                    "MassiveScenarioClassification (ml)": 6.96,
                    "MassiveScenarioClassification (mn)": 19.85,
                    "MassiveScenarioClassification (ms)": 43.18,
                    "MassiveScenarioClassification (my)": 9.46,
                    "MassiveScenarioClassification (nb)": 46.6,
                    "MassiveScenarioClassification (nl)": 50.0,
                    "MassiveScenarioClassification (pl)": 42.3,
                    "MassiveScenarioClassification (pt)": 52.24,
                    "MassiveScenarioClassification (ro)": 53.7,
                    "MassiveScenarioClassification (ru)": 20.69,
                    "MassiveScenarioClassification (sl)": 39.79,
                    "MassiveScenarioClassification (sq)": 50.16,
                    "MassiveScenarioClassification (sv)": 46.69,
                    "MassiveScenarioClassification (sw)": 40.48,
                    "MassiveScenarioClassification (ta)": 7.47,
                    "MassiveScenarioClassification (te)": 6.87,
                    "MassiveScenarioClassification (th)": 8.26,
                    "MassiveScenarioClassification (tl)": 48.94,
                    "MassiveScenarioClassification (tr)": 41.83,
                    "MassiveScenarioClassification (ur)": 9.77,
                    "MassiveScenarioClassification (vi)": 30.01,
                    "MassiveScenarioClassification (zh-CN)": 4.17,
                    "MassiveScenarioClassification (zh-TW)": 7.91,
                    "ToxicConversationsClassification": 68.2,
                    "TweetSentimentExtractionClassification": 62.71
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "AlloProfClusteringP2P": 58.44,
                    "AlloProfClusteringS2S": 35.93,
                    "ArxivClusteringP2P": 39.28,
                    "ArxivClusteringS2S": 27.26,
                    "BiorxivClusteringP2P": 33.99,
                    "BiorxivClusteringS2S": 22.92,
                    "BlurbsClusteringP2P": 30.59,
                    "BlurbsClusteringS2S": 11.57,
                    "HALClusteringS2S": 17.72,
                    "MLSUMClusteringP2P": 40.77,
                    "MLSUMClusteringS2S": 30.06,
                    "MasakhaNEWSClusteringP2P (fra)": 61.9,
                    "MasakhaNEWSClusteringS2S (fra)": 35.64,
                    "MedrxivClusteringP2P": 33.2,
                    "MedrxivClusteringS2S": 26.13,
                    "RedditClustering": 52.93,
                    "RedditClusteringP2P": 59.67,
                    "StackExchangeClustering": 63.13,
                    "StackExchangeClusteringP2P": 35.68,
                    "TenKGnadClusteringP2P": 44.88,
                    "TenKGnadClusteringS2S": 18.11,
                    "TwentyNewsgroupsClustering": 48.1
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "OpusparcusPC (fr)": 89.4,
                    "PawsX (fr)": 55.35,
                    "SprintDuplicateQuestions": 91.23,
                    "TwitterSemEval2015": 78.25,
                    "TwitterURLCorpus": 86.05
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "AlloprofReranking": 50.12,
                    "AskUbuntuDupQuestions": 59.73,
                    "MindSmallReranking": 30.2,
                    "SciDocsRR": 73.96,
                    "StackOverflowDupQuestions": 48.46,
                    "SyntecReranking": 78.05
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "AlloprofRetrieval": 27.52,
                    "ArguAna": 44.85,
                    "BSARDRetrieval": 0.16,
                    "CQADupstackRetrieval": 35.23,
                    "ClimateFEVER": 10.37,
                    "DBPedia": 27.77,
                    "FEVER": 26.17,
                    "FiQA2018": 34.83,
                    "HotpotQA": 33.2,
                    "MSMARCO": 20.7,
                    "MintakaRetrieval (fr)": 21.04,
                    "NFCorpus": 28.65,
                    "NQ": 36.32,
                    "QuoraRetrieval": 85.49,
                    "SCIDOCS": 14.15,
                    "SciFact": 45.76,
                    "SyntecRetrieval": 67.0,
                    "TRECCOVID": 40.7,
                    "Touche2020": 20.3,
                    "XPQARetrieval (fr)": 45.19
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "BIOSSES": 75.89,
                    "SICK-R": 80.18,
                    "SICKFr": 71.74,
                    "STS12": 78.05,
                    "STS13": 85.85,
                    "STS14": 82.19,
                    "STS15": 87.46,
                    "STS16": 84.03,
                    "STS17 (ar-ar)": 13.36,
                    "STS17 (en-ar)": -5.65,
                    "STS17 (en-de)": 67.11,
                    "STS17 (en-en)": 89.57,
                    "STS17 (en-tr)": -0.02,
                    "STS17 (es-en)": 47.72,
                    "STS17 (es-es)": 79.94,
                    "STS17 (fr-en)": 56.61,
                    "STS17 (it-en)": 30.46,
                    "STS17 (ko-ko)": 10.06,
                    "STS17 (nl-en)": 36.46,
                    "STS22 (ar)": 31.2,
                    "STS22 (de)": 42.08,
                    "STS22 (de-en)": 46.9,
                    "STS22 (de-fr)": 55.04,
                    "STS22 (de-pl)": 33.94,
                    "STS22 (en)": 62.66,
                    "STS22 (es)": 53.81,
                    "STS22 (es-en)": 65.19,
                    "STS22 (es-it)": 55.29,
                    "STS22 (fr)": 77.69,
                    "STS22 (fr-pl)": 28.17,
                    "STS22 (it)": 60.65,
                    "STS22 (pl)": 24.42,
                    "STS22 (pl-en)": 42.97,
                    "STS22 (ru)": 12.13,
                    "STS22 (tr)": 40.45,
                    "STS22 (zh)": 32.9,
                    "STS22 (zh-en)": 20.15,
                    "STSBenchmark": 85.52,
                    "STSBenchmarkMultilingualSTS (fr)": 74.04
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>",
                    "SummEval": 31.39,
                    "SummEvalFr": 30.01
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-base\">sentence-t5-base</a>"
                }
            ]
        }
    },
    "gelectra-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>",
                    "BlurbsClusteringP2P": 13.96,
                    "BlurbsClusteringS2S": 7.57,
                    "TenKGnadClusteringP2P": 11.49,
                    "TenKGnadClusteringS2S": 3.91
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-large\">gelectra-large</a>"
                }
            ]
        }
    },
    "monobert-large-msmarco": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monobert-large-msmarco\">monobert-large-msmarco</a>",
                    "Core17InstructionRetrieval": -0.24,
                    "News21InstructionRetrieval": -0.8,
                    "Robust04InstructionRetrieval": -9.36
                }
            ]
        }
    },
    "sup-simcse-bert-base-uncased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "AmazonCounterfactualClassification (en)": 75.75,
                    "AmazonPolarityClassification": 82.47,
                    "AmazonReviewsClassification (en)": 39.6,
                    "Banking77Classification": 75.76,
                    "EmotionClassification": 44.81,
                    "ImdbClassification": 73.53,
                    "MTOPDomainClassification (en)": 84.29,
                    "MTOPIntentClassification (en)": 63.14,
                    "MassiveIntentClassification (en)": 65.95,
                    "MassiveScenarioClassification (en)": 70.78,
                    "ToxicConversationsClassification": 72.04,
                    "TweetSentimentExtractionClassification": 59.73
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "ArxivClusteringP2P": 35.18,
                    "ArxivClusteringS2S": 27.54,
                    "BiorxivClusteringP2P": 30.15,
                    "BiorxivClusteringS2S": 24.67,
                    "MedrxivClusteringP2P": 26.25,
                    "MedrxivClusteringS2S": 24.12,
                    "RedditClustering": 40.23,
                    "RedditClusteringP2P": 47.74,
                    "StackExchangeClustering": 47.55,
                    "StackExchangeClusteringP2P": 29.45,
                    "TwentyNewsgroupsClustering": 34.86
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "SprintDuplicateQuestions": 69.39,
                    "TwitterSemEval2015": 67.75,
                    "TwitterURLCorpus": 83.89
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "AskUbuntuDupQuestions": 51.8,
                    "MindSmallReranking": 29.3,
                    "SciDocsRR": 70.14,
                    "StackOverflowDupQuestions": 38.9
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "ArguAna": 38.33,
                    "CQADupstackRetrieval": 14.5,
                    "ClimateFEVER": 11.98,
                    "DBPedia": 19.73,
                    "FEVER": 20.41,
                    "FiQA2018": 10.41,
                    "HotpotQA": 22.9,
                    "MSMARCO": 11.0,
                    "NFCorpus": 12.42,
                    "NQ": 16.08,
                    "QuoraRetrieval": 79.62,
                    "SCIDOCS": 7.53,
                    "SciFact": 29.59,
                    "TRECCOVID": 22.93,
                    "Touche2020": 9.9
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "BIOSSES": 68.38,
                    "SICK-R": 80.77,
                    "STS12": 75.3,
                    "STS13": 84.67,
                    "STS14": 80.19,
                    "STS15": 85.4,
                    "STS16": 80.82,
                    "STS17 (en-en)": 89.44,
                    "STS22 (en)": 61.96,
                    "STSBenchmark": 84.25
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>",
                    "SummEval": 31.17
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased</a>"
                }
            ]
        }
    },
    "sentence-t5-xl": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "BUCC (de-en)": 95.04,
                    "BUCC (fr-en)": 94.96,
                    "BUCC (ru-en)": 8.33,
                    "BUCC (zh-en)": 1.3,
                    "Tatoeba (afr-eng)": 41.84,
                    "Tatoeba (amh-eng)": 0.03,
                    "Tatoeba (ang-eng)": 37.87,
                    "Tatoeba (ara-eng)": 0.61,
                    "Tatoeba (arq-eng)": 0.74,
                    "Tatoeba (arz-eng)": 0.42,
                    "Tatoeba (ast-eng)": 65.41,
                    "Tatoeba (awa-eng)": 1.46,
                    "Tatoeba (aze-eng)": 8.79,
                    "Tatoeba (bel-eng)": 5.76,
                    "Tatoeba (ben-eng)": 0.01,
                    "Tatoeba (ber-eng)": 5.92,
                    "Tatoeba (bos-eng)": 16.12,
                    "Tatoeba (bre-eng)": 6.12,
                    "Tatoeba (bul-eng)": 9.06,
                    "Tatoeba (cat-eng)": 57.4,
                    "Tatoeba (cbk-eng)": 57.68,
                    "Tatoeba (ceb-eng)": 12.56,
                    "Tatoeba (ces-eng)": 9.47,
                    "Tatoeba (cha-eng)": 27.13,
                    "Tatoeba (cmn-eng)": 1.82,
                    "Tatoeba (cor-eng)": 3.87,
                    "Tatoeba (csb-eng)": 14.41,
                    "Tatoeba (cym-eng)": 6.69,
                    "Tatoeba (dan-eng)": 54.87,
                    "Tatoeba (deu-eng)": 93.72,
                    "Tatoeba (dsb-eng)": 14.74,
                    "Tatoeba (dtp-eng)": 5.84,
                    "Tatoeba (ell-eng)": 0.6,
                    "Tatoeba (epo-eng)": 30.8,
                    "Tatoeba (est-eng)": 5.39,
                    "Tatoeba (eus-eng)": 11.9,
                    "Tatoeba (fao-eng)": 28.08,
                    "Tatoeba (fin-eng)": 6.81,
                    "Tatoeba (fra-eng)": 85.29,
                    "Tatoeba (fry-eng)": 38.68,
                    "Tatoeba (gla-eng)": 2.96,
                    "Tatoeba (gle-eng)": 3.74,
                    "Tatoeba (glg-eng)": 70.0,
                    "Tatoeba (gsw-eng)": 30.49,
                    "Tatoeba (heb-eng)": 0.87,
                    "Tatoeba (hin-eng)": 0.1,
                    "Tatoeba (hrv-eng)": 17.43,
                    "Tatoeba (hsb-eng)": 14.69,
                    "Tatoeba (hun-eng)": 7.28,
                    "Tatoeba (hye-eng)": 0.77,
                    "Tatoeba (ido-eng)": 46.65,
                    "Tatoeba (ile-eng)": 59.43,
                    "Tatoeba (ina-eng)": 82.71,
                    "Tatoeba (ind-eng)": 37.26,
                    "Tatoeba (isl-eng)": 11.21,
                    "Tatoeba (ita-eng)": 79.77,
                    "Tatoeba (jav-eng)": 7.81,
                    "Tatoeba (jpn-eng)": 0.91,
                    "Tatoeba (kab-eng)": 2.23,
                    "Tatoeba (kat-eng)": 1.48,
                    "Tatoeba (kaz-eng)": 1.77,
                    "Tatoeba (khm-eng)": 0.38,
                    "Tatoeba (kor-eng)": 1.96,
                    "Tatoeba (kur-eng)": 12.11,
                    "Tatoeba (kzj-eng)": 6.13,
                    "Tatoeba (lat-eng)": 27.84,
                    "Tatoeba (lfn-eng)": 45.89,
                    "Tatoeba (lit-eng)": 5.94,
                    "Tatoeba (lvs-eng)": 8.11,
                    "Tatoeba (mal-eng)": 0.59,
                    "Tatoeba (mar-eng)": 0.03,
                    "Tatoeba (max-eng)": 21.7,
                    "Tatoeba (mhr-eng)": 0.68,
                    "Tatoeba (mkd-eng)": 5.92,
                    "Tatoeba (mon-eng)": 2.39,
                    "Tatoeba (nds-eng)": 45.04,
                    "Tatoeba (nld-eng)": 64.75,
                    "Tatoeba (nno-eng)": 36.74,
                    "Tatoeba (nob-eng)": 54.77,
                    "Tatoeba (nov-eng)": 57.12,
                    "Tatoeba (oci-eng)": 34.39,
                    "Tatoeba (orv-eng)": 2.04,
                    "Tatoeba (pam-eng)": 8.34,
                    "Tatoeba (pes-eng)": 0.87,
                    "Tatoeba (pms-eng)": 38.06,
                    "Tatoeba (pol-eng)": 28.35,
                    "Tatoeba (por-eng)": 83.61,
                    "Tatoeba (ron-eng)": 65.27,
                    "Tatoeba (rus-eng)": 30.42,
                    "Tatoeba (slk-eng)": 13.19,
                    "Tatoeba (slv-eng)": 13.49,
                    "Tatoeba (spa-eng)": 89.18,
                    "Tatoeba (sqi-eng)": 14.66,
                    "Tatoeba (srp-eng)": 13.24,
                    "Tatoeba (swe-eng)": 60.67,
                    "Tatoeba (swg-eng)": 34.76,
                    "Tatoeba (swh-eng)": 8.07,
                    "Tatoeba (tam-eng)": 0.36,
                    "Tatoeba (tat-eng)": 1.46,
                    "Tatoeba (tel-eng)": 0.67,
                    "Tatoeba (tgl-eng)": 25.22,
                    "Tatoeba (tha-eng)": 1.58,
                    "Tatoeba (tuk-eng)": 4.99,
                    "Tatoeba (tur-eng)": 7.72,
                    "Tatoeba (tzl-eng)": 38.49,
                    "Tatoeba (uig-eng)": 0.87,
                    "Tatoeba (ukr-eng)": 9.12,
                    "Tatoeba (urd-eng)": 0.0,
                    "Tatoeba (uzb-eng)": 5.48,
                    "Tatoeba (vie-eng)": 8.45,
                    "Tatoeba (war-eng)": 13.75,
                    "Tatoeba (wuu-eng)": 1.44,
                    "Tatoeba (xho-eng)": 9.15,
                    "Tatoeba (yid-eng)": 0.28,
                    "Tatoeba (yue-eng)": 0.98,
                    "Tatoeba (zsm-eng)": 35.71
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "AmazonCounterfactualClassification (de)": 67.01,
                    "AmazonCounterfactualClassification (en)": 76.01,
                    "AmazonCounterfactualClassification (en-ext)": 77.29,
                    "AmazonCounterfactualClassification (ja)": 45.61,
                    "AmazonPolarityClassification": 93.17,
                    "AmazonReviewsClassification (de)": 44.05,
                    "AmazonReviewsClassification (en)": 48.18,
                    "AmazonReviewsClassification (es)": 45.01,
                    "AmazonReviewsClassification (fr)": 43.52,
                    "AmazonReviewsClassification (ja)": 22.23,
                    "AmazonReviewsClassification (zh)": 21.88,
                    "Banking77Classification": 80.88,
                    "EmotionClassification": 51.95,
                    "ImdbClassification": 87.54,
                    "MTOPDomainClassification (de)": 83.28,
                    "MTOPDomainClassification (en)": 90.73,
                    "MTOPDomainClassification (es)": 85.32,
                    "MTOPDomainClassification (fr)": 85.14,
                    "MTOPDomainClassification (hi)": 20.85,
                    "MTOPDomainClassification (th)": 15.62,
                    "MTOPIntentClassification (de)": 54.65,
                    "MTOPIntentClassification (en)": 68.15,
                    "MTOPIntentClassification (es)": 57.38,
                    "MTOPIntentClassification (fr)": 54.39,
                    "MTOPIntentClassification (hi)": 3.28,
                    "MTOPIntentClassification (th)": 5.08,
                    "MasakhaNEWSClassification (fra)": 80.09,
                    "MassiveIntentClassification (af)": 40.17,
                    "MassiveIntentClassification (am)": 2.18,
                    "MassiveIntentClassification (ar)": 4.18,
                    "MassiveIntentClassification (az)": 30.02,
                    "MassiveIntentClassification (bn)": 2.6,
                    "MassiveIntentClassification (cy)": 29.15,
                    "MassiveIntentClassification (da)": 47.69,
                    "MassiveIntentClassification (de)": 57.43,
                    "MassiveIntentClassification (el)": 9.96,
                    "MassiveIntentClassification (en)": 72.09,
                    "MassiveIntentClassification (es)": 57.97,
                    "MassiveIntentClassification (fa)": 3.6,
                    "MassiveIntentClassification (fi)": 34.02,
                    "MassiveIntentClassification (fr)": 60.99,
                    "MassiveIntentClassification (he)": 2.51,
                    "MassiveIntentClassification (hi)": 3.02,
                    "MassiveIntentClassification (hu)": 31.66,
                    "MassiveIntentClassification (hy)": 3.32,
                    "MassiveIntentClassification (id)": 41.53,
                    "MassiveIntentClassification (is)": 30.25,
                    "MassiveIntentClassification (it)": 56.57,
                    "MassiveIntentClassification (ja)": 3.5,
                    "MassiveIntentClassification (jv)": 31.67,
                    "MassiveIntentClassification (ka)": 2.79,
                    "MassiveIntentClassification (km)": 5.43,
                    "MassiveIntentClassification (kn)": 2.79,
                    "MassiveIntentClassification (ko)": 2.67,
                    "MassiveIntentClassification (lv)": 34.25,
                    "MassiveIntentClassification (ml)": 2.98,
                    "MassiveIntentClassification (mn)": 20.99,
                    "MassiveIntentClassification (ms)": 37.43,
                    "MassiveIntentClassification (my)": 4.02,
                    "MassiveIntentClassification (nb)": 45.91,
                    "MassiveIntentClassification (nl)": 50.51,
                    "MassiveIntentClassification (pl)": 43.95,
                    "MassiveIntentClassification (pt)": 57.95,
                    "MassiveIntentClassification (ro)": 49.37,
                    "MassiveIntentClassification (ru)": 33.46,
                    "MassiveIntentClassification (sl)": 36.33,
                    "MassiveIntentClassification (sq)": 37.65,
                    "MassiveIntentClassification (sv)": 46.35,
                    "MassiveIntentClassification (sw)": 30.6,
                    "MassiveIntentClassification (ta)": 1.79,
                    "MassiveIntentClassification (te)": 2.26,
                    "MassiveIntentClassification (th)": 4.02,
                    "MassiveIntentClassification (tl)": 38.92,
                    "MassiveIntentClassification (tr)": 32.05,
                    "MassiveIntentClassification (ur)": 2.7,
                    "MassiveIntentClassification (vi)": 21.47,
                    "MassiveIntentClassification (zh-CN)": 0.59,
                    "MassiveIntentClassification (zh-TW)": 3.24,
                    "MassiveScenarioClassification (af)": 50.81,
                    "MassiveScenarioClassification (am)": 6.95,
                    "MassiveScenarioClassification (ar)": 12.32,
                    "MassiveScenarioClassification (az)": 38.79,
                    "MassiveScenarioClassification (bn)": 8.0,
                    "MassiveScenarioClassification (cy)": 33.91,
                    "MassiveScenarioClassification (da)": 55.79,
                    "MassiveScenarioClassification (de)": 65.33,
                    "MassiveScenarioClassification (el)": 16.89,
                    "MassiveScenarioClassification (en)": 73.26,
                    "MassiveScenarioClassification (es)": 62.52,
                    "MassiveScenarioClassification (fa)": 6.08,
                    "MassiveScenarioClassification (fi)": 43.34,
                    "MassiveScenarioClassification (fr)": 66.42,
                    "MassiveScenarioClassification (he)": 7.55,
                    "MassiveScenarioClassification (hi)": 7.44,
                    "MassiveScenarioClassification (hu)": 40.85,
                    "MassiveScenarioClassification (hy)": 9.25,
                    "MassiveScenarioClassification (id)": 51.92,
                    "MassiveScenarioClassification (is)": 40.09,
                    "MassiveScenarioClassification (it)": 62.94,
                    "MassiveScenarioClassification (ja)": 7.9,
                    "MassiveScenarioClassification (jv)": 41.33,
                    "MassiveScenarioClassification (ka)": 7.76,
                    "MassiveScenarioClassification (km)": 9.19,
                    "MassiveScenarioClassification (kn)": 8.36,
                    "MassiveScenarioClassification (ko)": 6.13,
                    "MassiveScenarioClassification (lv)": 40.7,
                    "MassiveScenarioClassification (ml)": 6.98,
                    "MassiveScenarioClassification (mn)": 27.0,
                    "MassiveScenarioClassification (ms)": 46.9,
                    "MassiveScenarioClassification (my)": 9.55,
                    "MassiveScenarioClassification (nb)": 53.43,
                    "MassiveScenarioClassification (nl)": 59.65,
                    "MassiveScenarioClassification (pl)": 49.87,
                    "MassiveScenarioClassification (pt)": 62.18,
                    "MassiveScenarioClassification (ro)": 58.22,
                    "MassiveScenarioClassification (ru)": 40.73,
                    "MassiveScenarioClassification (sl)": 43.66,
                    "MassiveScenarioClassification (sq)": 49.25,
                    "MassiveScenarioClassification (sv)": 57.17,
                    "MassiveScenarioClassification (sw)": 40.55,
                    "MassiveScenarioClassification (ta)": 7.46,
                    "MassiveScenarioClassification (te)": 7.03,
                    "MassiveScenarioClassification (th)": 8.52,
                    "MassiveScenarioClassification (tl)": 51.74,
                    "MassiveScenarioClassification (tr)": 43.01,
                    "MassiveScenarioClassification (ur)": 9.61,
                    "MassiveScenarioClassification (vi)": 28.91,
                    "MassiveScenarioClassification (zh-CN)": 5.86,
                    "MassiveScenarioClassification (zh-TW)": 7.14,
                    "ToxicConversationsClassification": 70.95,
                    "TweetSentimentExtractionClassification": 61.21
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "AlloProfClusteringP2P": 60.37,
                    "AlloProfClusteringS2S": 40.76,
                    "ArxivClusteringP2P": 41.62,
                    "ArxivClusteringS2S": 31.17,
                    "BiorxivClusteringP2P": 36.43,
                    "BiorxivClusteringS2S": 26.47,
                    "HALClusteringS2S": 20.28,
                    "MLSUMClusteringP2P": 41.61,
                    "MLSUMClusteringS2S": 33.6,
                    "MasakhaNEWSClusteringP2P (fra)": 62.82,
                    "MasakhaNEWSClusteringS2S (fra)": 31.74,
                    "MedrxivClusteringP2P": 32.3,
                    "MedrxivClusteringS2S": 26.93,
                    "RedditClustering": 57.03,
                    "RedditClusteringP2P": 62.34,
                    "StackExchangeClustering": 67.13,
                    "StackExchangeClusteringP2P": 34.79,
                    "TwentyNewsgroupsClustering": 49.53
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "OpusparcusPC (fr)": 92.48,
                    "PawsX (fr)": 62.52,
                    "SprintDuplicateQuestions": 91.44,
                    "TwitterSemEval2015": 80.89,
                    "TwitterURLCorpus": 85.86
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "AlloprofReranking": 63.3,
                    "AskUbuntuDupQuestions": 62.86,
                    "MindSmallReranking": 29.77,
                    "SciDocsRR": 75.16,
                    "StackOverflowDupQuestions": 51.05,
                    "SyntecReranking": 83.07
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "AlloprofRetrieval": 40.38,
                    "ArguAna": 39.4,
                    "BSARDRetrieval": 0.14,
                    "CQADupstackRetrieval": 40.78,
                    "ClimateFEVER": 10.61,
                    "DBPedia": 33.65,
                    "FEVER": 36.12,
                    "FiQA2018": 44.71,
                    "HotpotQA": 37.17,
                    "MSMARCO": 25.17,
                    "MintakaRetrieval (fr)": 31.54,
                    "NFCorpus": 33.18,
                    "NQ": 46.29,
                    "QuoraRetrieval": 85.85,
                    "SCIDOCS": 15.97,
                    "SciFact": 50.91,
                    "SyntecRetrieval": 74.24,
                    "TRECCOVID": 54.77,
                    "Touche2020": 22.51,
                    "XPQARetrieval (fr)": 52.14
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "BIOSSES": 73.12,
                    "SICK-R": 79.98,
                    "SICKFr": 75.08,
                    "STS12": 79.02,
                    "STS13": 88.8,
                    "STS14": 84.33,
                    "STS15": 88.89,
                    "STS16": 85.31,
                    "STS17 (ar-ar)": 11.13,
                    "STS17 (en-ar)": -3.93,
                    "STS17 (en-de)": 79.04,
                    "STS17 (en-en)": 88.91,
                    "STS17 (en-tr)": 13.61,
                    "STS17 (es-en)": 71.72,
                    "STS17 (es-es)": 83.42,
                    "STS17 (fr-en)": 71.38,
                    "STS17 (it-en)": 69.5,
                    "STS17 (ko-ko)": 9.61,
                    "STS17 (nl-en)": 66.12,
                    "STS22 (ar)": 29.6,
                    "STS22 (de)": 47.72,
                    "STS22 (de-en)": 49.64,
                    "STS22 (de-fr)": 62.21,
                    "STS22 (de-pl)": 34.34,
                    "STS22 (en)": 64.32,
                    "STS22 (es)": 58.16,
                    "STS22 (es-en)": 69.15,
                    "STS22 (es-it)": 65.26,
                    "STS22 (fr)": 77.49,
                    "STS22 (fr-pl)": 50.71,
                    "STS22 (it)": 66.91,
                    "STS22 (pl)": 27.04,
                    "STS22 (pl-en)": 58.85,
                    "STS22 (ru)": 26.63,
                    "STS22 (tr)": 43.36,
                    "STS22 (zh)": 33.55,
                    "STS22 (zh-en)": 29.0,
                    "STSBenchmark": 83.93,
                    "STSBenchmarkMultilingualSTS (fr)": 79.42
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>",
                    "SummEval": 29.91,
                    "SummEvalFr": 31.59
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xl\">sentence-t5-xl</a>"
                }
            ]
        }
    },
    "google-gecko.text-embedding-preview-0409": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "AmazonCounterfactualClassification (en)": 75.34,
                    "AmazonPolarityClassification": 97.34,
                    "AmazonReviewsClassification (en)": 51.17,
                    "Banking77Classification": 88.62,
                    "EmotionClassification": 52.51,
                    "ImdbClassification": 95.65,
                    "MTOPDomainClassification (en)": 98.35,
                    "MTOPIntentClassification (en)": 83.43,
                    "MassiveIntentClassification (en)": 80.22,
                    "MassiveScenarioClassification (en)": 87.19,
                    "ToxicConversationsClassification": 89.67,
                    "TweetSentimentExtractionClassification": 74.52
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "ArxivClusteringP2P": 46.27,
                    "ArxivClusteringS2S": 38.36,
                    "BiorxivClusteringP2P": 37.87,
                    "BiorxivClusteringS2S": 35.67,
                    "MedrxivClusteringP2P": 33.11,
                    "MedrxivClusteringS2S": 31.54,
                    "RedditClustering": 65.81,
                    "RedditClusteringP2P": 66.62,
                    "StackExchangeClustering": 74.52,
                    "StackExchangeClusteringP2P": 37.63,
                    "TwentyNewsgroupsClustering": 54.87
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "SprintDuplicateQuestions": 96.26,
                    "TwitterSemEval2015": 79.04,
                    "TwitterURLCorpus": 87.53
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "AskUbuntuDupQuestions": 64.4,
                    "MindSmallReranking": 33.07,
                    "SciDocsRR": 83.59,
                    "StackOverflowDupQuestions": 54.56
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "ArguAna": 62.18,
                    "CQADupstackRetrieval": 48.89,
                    "ClimateFEVER": 33.21,
                    "DBPedia": 47.12,
                    "FEVER": 86.96,
                    "FiQA2018": 59.24,
                    "HotpotQA": 71.33,
                    "MSMARCO": 32.58,
                    "NFCorpus": 40.33,
                    "NQ": 61.28,
                    "QuoraRetrieval": 88.18,
                    "SCIDOCS": 20.34,
                    "SciFact": 75.42,
                    "TRECCOVID": 82.62,
                    "Touche2020": 25.86
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "BIOSSES": 89.46,
                    "SICK-R": 81.93,
                    "STS12": 77.59,
                    "STS13": 90.36,
                    "STS14": 85.25,
                    "STS15": 89.66,
                    "STS16": 87.34,
                    "STS17 (en-en)": 92.06,
                    "STS22 (en)": 68.02,
                    "STSBenchmark": 88.99
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "SummEval": 32.63
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409</a>",
                    "Core17InstructionRetrieval": 5.44,
                    "News21InstructionRetrieval": 3.94,
                    "Robust04InstructionRetrieval": -2.4
                }
            ]
        }
    },
    "voyage-code-2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "AmazonReviewsClassification (fr)": 42.15,
                    "MTOPDomainClassification (fr)": 87.68,
                    "MTOPIntentClassification (fr)": 59.44,
                    "MasakhaNEWSClassification (fra)": 82.13,
                    "MassiveIntentClassification (fr)": 63.08,
                    "MassiveScenarioClassification (fr)": 70.15
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "AlloProfClusteringP2P": 61.63,
                    "AlloProfClusteringS2S": 50.67,
                    "HALClusteringS2S": 27.44,
                    "MLSUMClusteringP2P": 45.23,
                    "MLSUMClusteringS2S": 41.48,
                    "MasakhaNEWSClusteringP2P (fra)": 56.59,
                    "MasakhaNEWSClusteringS2S (fra)": 35.18
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "OpusparcusPC (fr)": 92.87,
                    "PawsX (fr)": 60.83
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "AlloprofReranking": 70.79,
                    "SyntecReranking": 86.77
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "AlloprofRetrieval": 52.61,
                    "BSARDRetrieval": 0.29,
                    "MintakaRetrieval (fr)": 19.05,
                    "SyntecRetrieval": 82.77,
                    "XPQARetrieval (fr)": 71.95
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "SICKFr": 73.56,
                    "STS22 (fr)": 79.99,
                    "STSBenchmarkMultilingualSTS (fr)": 79.02
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>",
                    "SummEvalFr": 28.34
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-code-2</a>"
                }
            ]
        }
    },
    "LLM2Vec-Sheared-Llama-unsupervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "AmazonCounterfactualClassification (en)": 72.93,
                    "AmazonPolarityClassification": 74.28,
                    "AmazonReviewsClassification (en)": 36.14,
                    "Banking77Classification": 79.0,
                    "EmotionClassification": 42.85,
                    "ImdbClassification": 71.92,
                    "MTOPDomainClassification (en)": 91.24,
                    "MTOPIntentClassification (en)": 74.08,
                    "MassiveIntentClassification (en)": 69.99,
                    "MassiveScenarioClassification (en)": 75.15,
                    "ToxicConversationsClassification": 68.4,
                    "TweetSentimentExtractionClassification": 56.08
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "ArxivClusteringP2P": 42.92,
                    "ArxivClusteringS2S": 35.2,
                    "BiorxivClusteringP2P": 35.02,
                    "BiorxivClusteringS2S": 27.21,
                    "MedrxivClusteringP2P": 30.15,
                    "MedrxivClusteringS2S": 26.96,
                    "RedditClustering": 38.67,
                    "RedditClusteringP2P": 53.42,
                    "StackExchangeClustering": 59.35,
                    "StackExchangeClusteringP2P": 31.47,
                    "TwentyNewsgroupsClustering": 31.54
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "SprintDuplicateQuestions": 77.36,
                    "TwitterSemEval2015": 61.54,
                    "TwitterURLCorpus": 77.73
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "AskUbuntuDupQuestions": 52.7,
                    "MindSmallReranking": 29.52,
                    "SciDocsRR": 67.76,
                    "StackOverflowDupQuestions": 40.82
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "ArguAna": 43.64,
                    "CQADupstackRetrieval": 18.5,
                    "ClimateFEVER": 18.95,
                    "DBPedia": 13.21,
                    "FEVER": 16.96,
                    "FiQA2018": 16.99,
                    "HotpotQA": 22.64,
                    "MSMARCO": 7.03,
                    "NFCorpus": 15.73,
                    "NQ": 17.96,
                    "QuoraRetrieval": 78.23,
                    "SCIDOCS": 5.53,
                    "SciFact": 38.31,
                    "TRECCOVID": 56.04,
                    "Touche2020": 19.17
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "BIOSSES": 75.12,
                    "SICK-R": 69.34,
                    "STS12": 60.09,
                    "STS13": 72.52,
                    "STS14": 66.7,
                    "STS15": 77.69,
                    "STS16": 75.94,
                    "STS17 (en-en)": 81.67,
                    "STS22 (en)": 63.7,
                    "STSBenchmark": 73.36
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>",
                    "SummEval": 31.23
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised</a>"
                }
            ]
        }
    },
    "e5-base-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base-v2\">e5-base-v2</a>",
                    "Core17InstructionRetrieval": -2.9,
                    "News21InstructionRetrieval": -2.0,
                    "Robust04InstructionRetrieval": -6.73
                }
            ]
        }
    },
    "xlm-roberta-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "AmazonReviewsClassification (fr)": 26.62,
                    "MTOPDomainClassification (fr)": 36.77,
                    "MTOPIntentClassification (fr)": 15.37,
                    "MasakhaNEWSClassification (fra)": 65.76,
                    "MassiveIntentClassification (fr)": 15.82,
                    "MassiveScenarioClassification (fr)": 23.92
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "AlloProfClusteringP2P": 56.54,
                    "AlloProfClusteringS2S": 21.18,
                    "BlurbsClusteringP2P": 29.84,
                    "BlurbsClusteringS2S": 7.29,
                    "HALClusteringS2S": 5.94,
                    "MLSUMClusteringP2P": 42.67,
                    "MLSUMClusteringS2S": 18.5,
                    "MasakhaNEWSClusteringP2P (fra)": 34.02,
                    "MasakhaNEWSClusteringS2S (fra)": 21.52,
                    "TenKGnadClusteringP2P": 32.46,
                    "TenKGnadClusteringS2S": 6.16
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "OpusparcusPC (fr)": 83.73,
                    "PawsX (fr)": 53.38
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "AlloprofReranking": 28.62,
                    "SyntecReranking": 49.4
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "AlloprofRetrieval": 0.52,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 0.9,
                    "SyntecRetrieval": 6.6,
                    "XPQARetrieval (fr)": 12.7
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "SICKFr": 50.01,
                    "STS22 (fr)": 55.49,
                    "STSBenchmarkMultilingualSTS (fr)": 42.32
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>",
                    "SummEvalFr": 28.89
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/xlm-roberta-large\">xlm-roberta-large</a>"
                }
            ]
        }
    },
    "Baichuan-text-embedding": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>",
                    "AmazonReviewsClassification (zh)": 48.3,
                    "IFlyTek": 50.75,
                    "JDReview": 87.69,
                    "MassiveIntentClassification (zh-CN)": 74.91,
                    "MassiveScenarioClassification (zh-CN)": 81.28,
                    "MultilingualSentiment": 76.83,
                    "OnlineShopping": 94.42,
                    "TNews": 52.62,
                    "Waimai": 88.77
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>",
                    "CLSClusteringP2P": 60.37,
                    "CLSClusteringS2S": 51.09,
                    "ThuNewsClusteringP2P": 58.23,
                    "ThuNewsClusteringS2S": 57.83
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>",
                    "Cmnli": 85.31,
                    "Ocnli": 79.33
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>",
                    "CMedQAv1": 88.06,
                    "CMedQAv2": 88.46,
                    "MMarcoReranking": 34.3,
                    "T2Reranking": 67.85
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>",
                    "CmedqaRetrieval": 47.64,
                    "CovidRetrieval": 86.86,
                    "DuRetrieval": 88.43,
                    "EcomRetrieval": 66.39,
                    "MMarcoRetrieval": 80.17,
                    "MedicalRetrieval": 61.1,
                    "T2Retrieval": 80.11,
                    "VideoRetrieval": 74.28
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>",
                    "AFQMC": 50.8,
                    "ATEC": 53.23,
                    "BQ": 66.49,
                    "LCQMC": 76.6,
                    "PAWSX": 47.56,
                    "QBQTC": 39.96,
                    "STS22 (zh)": 65.78,
                    "STSB": 80.14
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://platform.baichuan-ai.com/docs/text-Embedding\">Baichuan-text-embedding</a>"
                }
            ]
        }
    },
    "bert-base-multilingual-cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "AmazonReviewsClassification (fr)": 29.39,
                    "MTOPDomainClassification (fr)": 63.61,
                    "MTOPIntentClassification (fr)": 37.84,
                    "MasakhaNEWSClassification (fra)": 64.0,
                    "MassiveIntentClassification (fr)": 37.3,
                    "MassiveScenarioClassification (fr)": 44.47
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "AlloProfClusteringP2P": 51.5,
                    "AlloProfClusteringS2S": 43.06,
                    "HALClusteringS2S": 20.81,
                    "MLSUMClusteringP2P": 40.9,
                    "MLSUMClusteringS2S": 31.8,
                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
                    "MasakhaNEWSClusteringS2S (fra)": 24.46
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "OpusparcusPC (fr)": 86.77,
                    "PawsX (fr)": 53.39
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "AlloprofReranking": 36.23,
                    "SyntecReranking": 53.25
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "AlloprofRetrieval": 1.63,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 3.55,
                    "SyntecRetrieval": 18.95,
                    "XPQARetrieval (fr)": 18.49
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "SICKFr": 58.75,
                    "STS22 (fr)": 39.05,
                    "STSBenchmarkMultilingualSTS (fr)": 52.25
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>",
                    "SummEvalFr": 28.81
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google-bert/bert-base-multilingual-cased\">bert-base-multilingual-cased</a>"
                }
            ]
        }
    },
    "msmarco-bert-co-condensor": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "AmazonCounterfactualClassification (en)": 64.06,
                    "AmazonPolarityClassification": 66.88,
                    "AmazonReviewsClassification (en)": 34.85,
                    "Banking77Classification": 82.35,
                    "EmotionClassification": 41.91,
                    "ImdbClassification": 60.17,
                    "MTOPDomainClassification (en)": 91.34,
                    "MTOPIntentClassification (en)": 71.07,
                    "MassiveIntentClassification (en)": 70.4,
                    "MassiveScenarioClassification (en)": 73.73,
                    "ToxicConversationsClassification": 64.01,
                    "TweetSentimentExtractionClassification": 55.74
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "ArxivClusteringP2P": 36.94,
                    "ArxivClusteringS2S": 29.03,
                    "BiorxivClusteringP2P": 32.35,
                    "BiorxivClusteringS2S": 28.16,
                    "MedrxivClusteringP2P": 30.23,
                    "MedrxivClusteringS2S": 27.01,
                    "RedditClustering": 48.04,
                    "RedditClusteringP2P": 53.53,
                    "StackExchangeClustering": 59.54,
                    "StackExchangeClusteringP2P": 30.48,
                    "TwentyNewsgroupsClustering": 38.68
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "SprintDuplicateQuestions": 96.09,
                    "TwitterSemEval2015": 65.95,
                    "TwitterURLCorpus": 83.17
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "AskUbuntuDupQuestions": 58.99,
                    "MindSmallReranking": 27.13,
                    "SciDocsRR": 72.78,
                    "StackOverflowDupQuestions": 48.48
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "ArguAna": 45.15,
                    "CQADupstackRetrieval": 27.72,
                    "ClimateFEVER": 16.96,
                    "DBPedia": 27.86,
                    "FEVER": 45.68,
                    "FiQA2018": 15.62,
                    "HotpotQA": 35.61,
                    "MSMARCO": 29.57,
                    "NFCorpus": 22.29,
                    "NQ": 29.85,
                    "QuoraRetrieval": 86.51,
                    "SCIDOCS": 10.13,
                    "SciFact": 52.31,
                    "TRECCOVID": 40.54,
                    "Touche2020": 8.57
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "BIOSSES": 77.32,
                    "SICK-R": 72.0,
                    "STS12": 68.19,
                    "STS13": 80.4,
                    "STS14": 74.02,
                    "STS15": 82.57,
                    "STS16": 79.78,
                    "STS17 (en-en)": 85.94,
                    "STS22 (en)": 67.54,
                    "STSBenchmark": 76.97
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>",
                    "SummEval": 29.5
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/msmarco-bert-co-condensor\">msmarco-bert-co-condensor</a>"
                }
            ]
        }
    },
    "text-similarity-davinci-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>",
                    "RedditClustering": 31.78,
                    "StackExchangeClustering": 36.86,
                    "TwentyNewsgroupsClustering": 29.33
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>",
                    "SprintDuplicateQuestions": 69.52,
                    "TwitterSemEval2015": 74.42,
                    "TwitterURLCorpus": 83.75
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>",
                    "AskUbuntuDupQuestions": 53.56,
                    "SciDocsRR": 68.7,
                    "StackOverflowDupQuestions": 39.41
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>",
                    "BIOSSES": 68.95,
                    "SICK-R": 78.72,
                    "STSBenchmark": 84.08
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-davinci-001</a>"
                }
            ]
        }
    },
    "paraphrase-multilingual-mpnet-base-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "BUCC (de-en)": 98.59,
                    "BUCC (fr-en)": 96.89,
                    "BUCC (ru-en)": 96.44,
                    "BUCC (zh-en)": 97.56,
                    "Tatoeba (afr-eng)": 72.96,
                    "Tatoeba (amh-eng)": 53.49,
                    "Tatoeba (ang-eng)": 16.72,
                    "Tatoeba (ara-eng)": 90.19,
                    "Tatoeba (arq-eng)": 19.84,
                    "Tatoeba (arz-eng)": 55.69,
                    "Tatoeba (ast-eng)": 70.08,
                    "Tatoeba (awa-eng)": 42.83,
                    "Tatoeba (aze-eng)": 76.36,
                    "Tatoeba (bel-eng)": 79.94,
                    "Tatoeba (ben-eng)": 64.9,
                    "Tatoeba (ber-eng)": 4.88,
                    "Tatoeba (bos-eng)": 94.02,
                    "Tatoeba (bre-eng)": 6.42,
                    "Tatoeba (bul-eng)": 93.52,
                    "Tatoeba (cat-eng)": 96.05,
                    "Tatoeba (cbk-eng)": 58.68,
                    "Tatoeba (ceb-eng)": 7.39,
                    "Tatoeba (ces-eng)": 95.73,
                    "Tatoeba (cha-eng)": 12.59,
                    "Tatoeba (cmn-eng)": 95.83,
                    "Tatoeba (cor-eng)": 3.53,
                    "Tatoeba (csb-eng)": 23.73,
                    "Tatoeba (cym-eng)": 22.31,
                    "Tatoeba (dan-eng)": 96.17,
                    "Tatoeba (deu-eng)": 97.73,
                    "Tatoeba (dsb-eng)": 36.85,
                    "Tatoeba (dtp-eng)": 5.03,
                    "Tatoeba (ell-eng)": 94.93,
                    "Tatoeba (epo-eng)": 55.12,
                    "Tatoeba (est-eng)": 98.4,
                    "Tatoeba (eus-eng)": 31.33,
                    "Tatoeba (fao-eng)": 38.24,
                    "Tatoeba (fin-eng)": 95.92,
                    "Tatoeba (fra-eng)": 93.12,
                    "Tatoeba (fry-eng)": 43.54,
                    "Tatoeba (gla-eng)": 4.72,
                    "Tatoeba (gle-eng)": 16.85,
                    "Tatoeba (glg-eng)": 95.32,
                    "Tatoeba (gsw-eng)": 25.12,
                    "Tatoeba (heb-eng)": 88.26,
                    "Tatoeba (hin-eng)": 97.75,
                    "Tatoeba (hrv-eng)": 97.0,
                    "Tatoeba (hsb-eng)": 44.32,
                    "Tatoeba (hun-eng)": 94.18,
                    "Tatoeba (hye-eng)": 94.38,
                    "Tatoeba (ido-eng)": 43.91,
                    "Tatoeba (ile-eng)": 60.36,
                    "Tatoeba (ina-eng)": 84.32,
                    "Tatoeba (ind-eng)": 93.5,
                    "Tatoeba (isl-eng)": 59.25,
                    "Tatoeba (ita-eng)": 93.76,
                    "Tatoeba (jav-eng)": 23.39,
                    "Tatoeba (jpn-eng)": 92.51,
                    "Tatoeba (kab-eng)": 1.41,
                    "Tatoeba (kat-eng)": 95.46,
                    "Tatoeba (kaz-eng)": 61.49,
                    "Tatoeba (khm-eng)": 58.8,
                    "Tatoeba (kor-eng)": 93.07,
                    "Tatoeba (kur-eng)": 61.44,
                    "Tatoeba (kzj-eng)": 5.88,
                    "Tatoeba (lat-eng)": 24.25,
                    "Tatoeba (lfn-eng)": 49.56,
                    "Tatoeba (lit-eng)": 95.37,
                    "Tatoeba (lvs-eng)": 97.53,
                    "Tatoeba (mal-eng)": 88.46,
                    "Tatoeba (mar-eng)": 93.83,
                    "Tatoeba (max-eng)": 48.77,
                    "Tatoeba (mhr-eng)": 7.57,
                    "Tatoeba (mkd-eng)": 93.02,
                    "Tatoeba (mon-eng)": 96.14,
                    "Tatoeba (nds-eng)": 38.88,
                    "Tatoeba (nld-eng)": 95.5,
                    "Tatoeba (nno-eng)": 81.41,
                    "Tatoeba (nob-eng)": 98.53,
                    "Tatoeba (nov-eng)": 50.23,
                    "Tatoeba (oci-eng)": 43.49,
                    "Tatoeba (orv-eng)": 23.77,
                    "Tatoeba (pam-eng)": 5.39,
                    "Tatoeba (pes-eng)": 93.47,
                    "Tatoeba (pms-eng)": 34.19,
                    "Tatoeba (pol-eng)": 96.95,
                    "Tatoeba (por-eng)": 93.02,
                    "Tatoeba (ron-eng)": 96.43,
                    "Tatoeba (rus-eng)": 92.92,
                    "Tatoeba (slk-eng)": 96.62,
                    "Tatoeba (slv-eng)": 97.08,
                    "Tatoeba (spa-eng)": 97.0,
                    "Tatoeba (sqi-eng)": 98.57,
                    "Tatoeba (srp-eng)": 94.12,
                    "Tatoeba (swe-eng)": 95.45,
                    "Tatoeba (swg-eng)": 22.8,
                    "Tatoeba (swh-eng)": 16.02,
                    "Tatoeba (tam-eng)": 73.6,
                    "Tatoeba (tat-eng)": 10.89,
                    "Tatoeba (tel-eng)": 79.73,
                    "Tatoeba (tgl-eng)": 17.67,
                    "Tatoeba (tha-eng)": 95.99,
                    "Tatoeba (tuk-eng)": 14.91,
                    "Tatoeba (tur-eng)": 96.17,
                    "Tatoeba (tzl-eng)": 34.21,
                    "Tatoeba (uig-eng)": 48.35,
                    "Tatoeba (ukr-eng)": 92.67,
                    "Tatoeba (urd-eng)": 95.12,
                    "Tatoeba (uzb-eng)": 23.19,
                    "Tatoeba (vie-eng)": 97.23,
                    "Tatoeba (war-eng)": 7.42,
                    "Tatoeba (wuu-eng)": 78.25,
                    "Tatoeba (xho-eng)": 6.53,
                    "Tatoeba (yid-eng)": 30.73,
                    "Tatoeba (yue-eng)": 77.58,
                    "Tatoeba (zsm-eng)": 95.8
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "AllegroReviews": 33.86,
                    "AmazonCounterfactualClassification (de)": 69.95,
                    "AmazonCounterfactualClassification (en)": 75.81,
                    "AmazonCounterfactualClassification (en-ext)": 76.23,
                    "AmazonCounterfactualClassification (ja)": 69.79,
                    "AmazonPolarityClassification": 76.41,
                    "AmazonReviewsClassification (de)": 39.52,
                    "AmazonReviewsClassification (en)": 38.51,
                    "AmazonReviewsClassification (es)": 39.99,
                    "AmazonReviewsClassification (fr)": 39.0,
                    "AmazonReviewsClassification (ja)": 36.64,
                    "AmazonReviewsClassification (zh)": 37.74,
                    "Banking77Classification": 81.07,
                    "CBD": 65.0,
                    "EmotionClassification": 45.83,
                    "ImdbClassification": 64.57,
                    "MTOPDomainClassification (de)": 85.73,
                    "MTOPDomainClassification (en)": 89.24,
                    "MTOPDomainClassification (es)": 86.96,
                    "MTOPDomainClassification (fr)": 81.21,
                    "MTOPDomainClassification (hi)": 84.76,
                    "MTOPDomainClassification (th)": 82.51,
                    "MTOPIntentClassification (de)": 61.27,
                    "MTOPIntentClassification (en)": 68.69,
                    "MTOPIntentClassification (es)": 66.59,
                    "MTOPIntentClassification (fr)": 59.76,
                    "MTOPIntentClassification (hi)": 62.37,
                    "MTOPIntentClassification (th)": 64.8,
                    "MasakhaNEWSClassification (fra)": 78.1,
                    "MassiveIntentClassification (pl)": 64.29,
                    "MassiveIntentClassification (fr)": 61.88,
                    "MassiveScenarioClassification (pl)": 68.98,
                    "MassiveScenarioClassification (fr)": 67.9,
                    "PAC": 63.76,
                    "PolEmo2.0-IN": 62.78,
                    "PolEmo2.0-OUT": 19.98,
                    "ToxicConversationsClassification": 71.02,
                    "TweetSentimentExtractionClassification": 59.03
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "8TagsClustering": 25.62,
                    "AlloProfClusteringP2P": 54.49,
                    "AlloProfClusteringS2S": 44.79,
                    "ArxivClusteringP2P": 37.78,
                    "ArxivClusteringS2S": 31.68,
                    "BiorxivClusteringP2P": 33.09,
                    "BiorxivClusteringS2S": 29.6,
                    "BlurbsClusteringP2P": 34.38,
                    "BlurbsClusteringS2S": 15.81,
                    "HALClusteringS2S": 23.97,
                    "MLSUMClusteringP2P": 40.55,
                    "MLSUMClusteringS2S": 37.53,
                    "MasakhaNEWSClusteringP2P (fra)": 41.57,
                    "MasakhaNEWSClusteringS2S (fra)": 30.88,
                    "MedrxivClusteringP2P": 31.96,
                    "MedrxivClusteringS2S": 31.7,
                    "RedditClustering": 45.24,
                    "RedditClusteringP2P": 51.31,
                    "StackExchangeClustering": 52.98,
                    "StackExchangeClusteringP2P": 32.94,
                    "TenKGnadClusteringP2P": 35.96,
                    "TenKGnadClusteringS2S": 22.0,
                    "TwentyNewsgroupsClustering": 44.1
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "CDSC-E": 75.76,
                    "OpusparcusPC (fr)": 93.45,
                    "PPC": 93.67,
                    "PSC": 98.26,
                    "PawsX (fr)": 58.14,
                    "SICK-E-PL": 77.22,
                    "SprintDuplicateQuestions": 90.55,
                    "TwitterSemEval2015": 66.75,
                    "TwitterURLCorpus": 85.14
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "AlloprofReranking": 54.34,
                    "AskUbuntuDupQuestions": 60.16,
                    "MindSmallReranking": 30.15,
                    "SciDocsRR": 78.09,
                    "StackOverflowDupQuestions": 46.79,
                    "SyntecReranking": 83.23
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "AlloprofRetrieval": 30.8,
                    "ArguAna": 48.91,
                    "ArguAna-PL": 42.62,
                    "BSARDRetrieval": 0.0,
                    "CQADupstackRetrieval": 31.32,
                    "ClimateFEVER": 15.27,
                    "DBPedia": 26.22,
                    "DBPedia-PL": 20.18,
                    "FEVER": 56.76,
                    "FiQA-PL": 14.68,
                    "FiQA2018": 22.96,
                    "HotpotQA": 37.03,
                    "HotpotQA-PL": 29.36,
                    "MSMARCO": 26.6,
                    "MSMARCO-PL": 12.45,
                    "MintakaRetrieval (fr)": 24.45,
                    "NFCorpus": 25.49,
                    "NFCorpus-PL": 18.53,
                    "NQ": 33.6,
                    "NQ-PL": 15.64,
                    "Quora-PL": 79.18,
                    "QuoraRetrieval": 86.4,
                    "SCIDOCS": 13.97,
                    "SCIDOCS-PL": 11.18,
                    "SciFact": 50.3,
                    "SciFact-PL": 41.53,
                    "SyntecRetrieval": 76.0,
                    "TRECCOVID": 37.87,
                    "TRECCOVID-PL": 35.38,
                    "Touche2020": 17.4,
                    "XPQARetrieval (fr)": 46.22
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "BIOSSES": 76.27,
                    "CDSC-R": 88.8,
                    "SICK-R": 79.62,
                    "SICK-R-PL": 73.13,
                    "SICKFr": 75.56,
                    "STS12": 77.9,
                    "STS13": 85.11,
                    "STS14": 80.81,
                    "STS15": 87.48,
                    "STS16": 83.2,
                    "STS17 (ar-ar)": 79.1,
                    "STS17 (en-ar)": 80.85,
                    "STS17 (en-de)": 83.28,
                    "STS17 (en-en)": 86.99,
                    "STS17 (en-tr)": 74.9,
                    "STS17 (es-en)": 86.11,
                    "STS17 (es-es)": 85.14,
                    "STS17 (fr-en)": 81.17,
                    "STS17 (it-en)": 84.24,
                    "STS17 (ko-ko)": 83.41,
                    "STS17 (nl-en)": 82.51,
                    "STS22 (pl)": 33.64,
                    "STS22 (fr)": 74.3,
                    "STSBenchmark": 86.82,
                    "STSBenchmarkMultilingualSTS (fr)": 84.69
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>",
                    "SummEval": 31.57,
                    "SummEvalFr": 29.47
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2</a>"
                }
            ]
        }
    },
    "bert-base-25lang-cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "AmazonReviewsClassification (fr)": 29.39,
                    "MTOPDomainClassification (fr)": 63.63,
                    "MTOPIntentClassification (fr)": 37.86,
                    "MasakhaNEWSClassification (fra)": 63.91,
                    "MassiveIntentClassification (fr)": 37.3,
                    "MassiveScenarioClassification (fr)": 44.47
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "AlloProfClusteringP2P": 53.49,
                    "AlloProfClusteringS2S": 43.1,
                    "HALClusteringS2S": 19.78,
                    "MLSUMClusteringP2P": 40.73,
                    "MLSUMClusteringS2S": 31.94,
                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
                    "MasakhaNEWSClusteringS2S (fra)": 24.46
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "OpusparcusPC (fr)": 86.79,
                    "PawsX (fr)": 53.39
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "AlloprofReranking": 36.25,
                    "SyntecReranking": 53.25
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "AlloprofRetrieval": 1.6,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 3.55,
                    "SyntecRetrieval": 18.95,
                    "XPQARetrieval (fr)": 18.46
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "SICKFr": 58.76,
                    "STS22 (fr)": 38.77,
                    "STSBenchmarkMultilingualSTS (fr)": 52.25
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>",
                    "SummEvalFr": 28.84
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Geotrend/bert-base-25lang-cased\">bert-base-25lang-cased</a>"
                }
            ]
        }
    },
    "contriever-base-msmarco": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "AmazonCounterfactualClassification (en)": 72.19,
                    "AmazonPolarityClassification": 68.63,
                    "AmazonReviewsClassification (en)": 37.42,
                    "Banking77Classification": 80.02,
                    "EmotionClassification": 44.77,
                    "ImdbClassification": 67.04,
                    "MTOPDomainClassification (en)": 93.18,
                    "MTOPIntentClassification (en)": 69.31,
                    "MassiveIntentClassification (en)": 67.78,
                    "MassiveScenarioClassification (en)": 76.0,
                    "ToxicConversationsClassification": 67.77,
                    "TweetSentimentExtractionClassification": 56.1
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "ArxivClusteringP2P": 42.61,
                    "ArxivClusteringS2S": 32.32,
                    "BiorxivClusteringP2P": 34.97,
                    "BiorxivClusteringS2S": 29.08,
                    "MedrxivClusteringP2P": 31.19,
                    "MedrxivClusteringS2S": 27.27,
                    "RedditClustering": 54.89,
                    "RedditClusteringP2P": 57.58,
                    "StackExchangeClustering": 63.15,
                    "StackExchangeClusteringP2P": 32.25,
                    "TwentyNewsgroupsClustering": 46.82
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "SprintDuplicateQuestions": 95.55,
                    "TwitterSemEval2015": 66.85,
                    "TwitterURLCorpus": 85.21
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "AskUbuntuDupQuestions": 56.69,
                    "MindSmallReranking": 31.58,
                    "SciDocsRR": 76.51,
                    "StackOverflowDupQuestions": 47.78
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "ArguAna": 48.32,
                    "CQADupstackRetrieval": 33.67,
                    "ClimateFEVER": 24.79,
                    "DBPedia": 38.1,
                    "FEVER": 59.29,
                    "FiQA2018": 27.42,
                    "HotpotQA": 56.81,
                    "MSMARCO": 36.77,
                    "NFCorpus": 31.32,
                    "NQ": 41.83,
                    "QuoraRetrieval": 86.72,
                    "SCIDOCS": 17.12,
                    "SciFact": 65.51,
                    "TRECCOVID": 44.77,
                    "Touche2020": 15.79
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "BIOSSES": 83.32,
                    "SICK-R": 70.2,
                    "STS12": 64.34,
                    "STS13": 80.03,
                    "STS14": 74.51,
                    "STS15": 83.3,
                    "STS16": 79.67,
                    "STS17 (en-en)": 86.32,
                    "STS22 (en)": 64.64,
                    "STSBenchmark": 78.81
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "SummEval": 30.36
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nthakur/contriever-base-msmarco\">contriever-base-msmarco</a>",
                    "Core17InstructionRetrieval": -2.48,
                    "News21InstructionRetrieval": -2.83,
                    "Robust04InstructionRetrieval": -6.12
                }
            ]
        }
    },
    "titan-embed-text-v1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>",
                    "AmazonCounterfactualClassification (en)": 61.85,
                    "Banking77Classification": 83.21
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>",
                    "SciDocsRR": 88.87
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>",
                    "ArguAna": 48.83,
                    "FiQA2018": 40.38,
                    "MSMARCO": 35.19,
                    "NQ": 51.08,
                    "SciFact": 73.5,
                    "TRECCOVID": 54.74
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>",
                    "BIOSSES": 84.17,
                    "SICK-R": 73.05,
                    "STS12": 66.59,
                    "STS13": 83.24,
                    "STS14": 73.71,
                    "STS15": 82.4,
                    "STS16": NaN,
                    "STS17 (en-en)": 80.9,
                    "STSBenchmark": 74.85
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.aws.amazon.com/bedrock/latest/userguide/embeddings.html\">titan-embed-text-v1</a>"
                }
            ]
        }
    },
    "google-gecko-256.text-embedding-preview-0409": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "AmazonCounterfactualClassification (en)": 70.93,
                    "AmazonPolarityClassification": 97.34,
                    "AmazonReviewsClassification (en)": 48.47,
                    "Banking77Classification": 86.01,
                    "EmotionClassification": 51.53,
                    "ImdbClassification": 95.7,
                    "MTOPDomainClassification (en)": 98.02,
                    "MTOPIntentClassification (en)": 77.82,
                    "MassiveIntentClassification (en)": 75.67,
                    "MassiveScenarioClassification (en)": 85.16,
                    "ToxicConversationsClassification": 88.33,
                    "TweetSentimentExtractionClassification": 72.97
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "ArxivClusteringP2P": 44.12,
                    "ArxivClusteringS2S": 36.54,
                    "BiorxivClusteringP2P": 36.28,
                    "BiorxivClusteringS2S": 33.09,
                    "MedrxivClusteringP2P": 32.08,
                    "MedrxivClusteringS2S": 30.84,
                    "RedditClustering": 62.24,
                    "RedditClusteringP2P": 63.7,
                    "StackExchangeClustering": 70.19,
                    "StackExchangeClusteringP2P": 36.1,
                    "TwentyNewsgroupsClustering": 50.6
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "SprintDuplicateQuestions": 96.49,
                    "TwitterSemEval2015": 78.23,
                    "TwitterURLCorpus": 87.04
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "AskUbuntuDupQuestions": 63.84,
                    "MindSmallReranking": 31.89,
                    "SciDocsRR": 81.62,
                    "StackOverflowDupQuestions": 53.76
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "ArguAna": 56.27,
                    "CQADupstackRetrieval": 45.41,
                    "ClimateFEVER": 29.35,
                    "DBPedia": 41.91,
                    "FEVER": 82.61,
                    "FiQA2018": 55.54,
                    "HotpotQA": 64.65,
                    "MSMARCO": 31.12,
                    "NFCorpus": 37.81,
                    "NQ": 57.37,
                    "QuoraRetrieval": 87.89,
                    "SCIDOCS": 18.21,
                    "SciFact": 70.86,
                    "TRECCOVID": 80.13,
                    "Touche2020": 27.4
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "BIOSSES": 89.42,
                    "SICK-R": 81.67,
                    "STS12": 78.02,
                    "STS13": 90.1,
                    "STS14": 85.44,
                    "STS15": 89.64,
                    "STS16": 87.24,
                    "STS17 (en-en)": 90.46,
                    "STS22 (en)": 67.99,
                    "STSBenchmark": 89.33
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>",
                    "SummEval": 32.36
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://cloud.google.com/vertex-ai/generative-ai/docs/embeddings/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409</a>"
                }
            ]
        }
    },
    "gtr-t5-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "AmazonCounterfactualClassification (de)": 59.38,
                    "AmazonCounterfactualClassification (en)": 70.03,
                    "AmazonCounterfactualClassification (en-ext)": 69.86,
                    "AmazonCounterfactualClassification (ja)": 45.87,
                    "AmazonPolarityClassification": 73.92,
                    "AmazonReviewsClassification (de)": 33.06,
                    "AmazonReviewsClassification (en)": 37.21,
                    "AmazonReviewsClassification (es)": 34.0,
                    "AmazonReviewsClassification (fr)": 33.48,
                    "AmazonReviewsClassification (ja)": 21.78,
                    "AmazonReviewsClassification (zh)": 21.83,
                    "Banking77Classification": 81.21,
                    "EmotionClassification": 46.33,
                    "ImdbClassification": 70.86,
                    "MTOPDomainClassification (de)": 81.91,
                    "MTOPDomainClassification (en)": 94.01,
                    "MTOPDomainClassification (es)": 84.7,
                    "MTOPDomainClassification (fr)": 82.48,
                    "MTOPDomainClassification (hi)": 22.11,
                    "MTOPDomainClassification (th)": 16.36,
                    "MTOPIntentClassification (de)": 52.13,
                    "MTOPIntentClassification (en)": 63.86,
                    "MTOPIntentClassification (es)": 52.62,
                    "MTOPIntentClassification (fr)": 46.39,
                    "MTOPIntentClassification (hi)": 3.9,
                    "MTOPIntentClassification (th)": 5.38,
                    "MassiveIntentClassification (af)": 41.02,
                    "MassiveIntentClassification (am)": 2.34,
                    "MassiveIntentClassification (ar)": 4.87,
                    "MassiveIntentClassification (az)": 34.92,
                    "MassiveIntentClassification (bn)": 2.52,
                    "MassiveIntentClassification (cy)": 35.87,
                    "MassiveIntentClassification (da)": 45.3,
                    "MassiveIntentClassification (de)": 51.48,
                    "MassiveIntentClassification (el)": 10.0,
                    "MassiveIntentClassification (en)": 70.06,
                    "MassiveIntentClassification (es)": 53.3,
                    "MassiveIntentClassification (fa)": 3.59,
                    "MassiveIntentClassification (fi)": 37.35,
                    "MassiveIntentClassification (fr)": 54.83,
                    "MassiveIntentClassification (he)": 2.52,
                    "MassiveIntentClassification (hi)": 2.88,
                    "MassiveIntentClassification (hu)": 33.52,
                    "MassiveIntentClassification (hy)": 3.13,
                    "MassiveIntentClassification (id)": 40.11,
                    "MassiveIntentClassification (is)": 34.77,
                    "MassiveIntentClassification (it)": 51.21,
                    "MassiveIntentClassification (ja)": 4.75,
                    "MassiveIntentClassification (jv)": 35.6,
                    "MassiveIntentClassification (ka)": 2.71,
                    "MassiveIntentClassification (km)": 5.48,
                    "MassiveIntentClassification (kn)": 2.44,
                    "MassiveIntentClassification (ko)": 2.59,
                    "MassiveIntentClassification (lv)": 38.15,
                    "MassiveIntentClassification (ml)": 2.67,
                    "MassiveIntentClassification (mn)": 18.47,
                    "MassiveIntentClassification (ms)": 35.58,
                    "MassiveIntentClassification (my)": 4.35,
                    "MassiveIntentClassification (nb)": 43.78,
                    "MassiveIntentClassification (nl)": 45.96,
                    "MassiveIntentClassification (pl)": 39.08,
                    "MassiveIntentClassification (pt)": 52.27,
                    "MassiveIntentClassification (ro)": 46.39,
                    "MassiveIntentClassification (ru)": 16.82,
                    "MassiveIntentClassification (sl)": 37.3,
                    "MassiveIntentClassification (sq)": 41.73,
                    "MassiveIntentClassification (sv)": 43.51,
                    "MassiveIntentClassification (sw)": 35.97,
                    "MassiveIntentClassification (ta)": 1.52,
                    "MassiveIntentClassification (te)": 2.57,
                    "MassiveIntentClassification (th)": 3.94,
                    "MassiveIntentClassification (tl)": 41.03,
                    "MassiveIntentClassification (tr)": 33.75,
                    "MassiveIntentClassification (ur)": 2.57,
                    "MassiveIntentClassification (vi)": 25.23,
                    "MassiveIntentClassification (zh-CN)": 2.41,
                    "MassiveIntentClassification (zh-TW)": 4.64,
                    "MassiveScenarioClassification (af)": 51.48,
                    "MassiveScenarioClassification (am)": 7.74,
                    "MassiveScenarioClassification (ar)": 12.03,
                    "MassiveScenarioClassification (az)": 41.77,
                    "MassiveScenarioClassification (bn)": 8.07,
                    "MassiveScenarioClassification (cy)": 43.67,
                    "MassiveScenarioClassification (da)": 54.88,
                    "MassiveScenarioClassification (de)": 63.63,
                    "MassiveScenarioClassification (el)": 16.83,
                    "MassiveScenarioClassification (en)": 75.49,
                    "MassiveScenarioClassification (es)": 61.48,
                    "MassiveScenarioClassification (fa)": 6.48,
                    "MassiveScenarioClassification (fi)": 43.54,
                    "MassiveScenarioClassification (fr)": 64.06,
                    "MassiveScenarioClassification (he)": 8.03,
                    "MassiveScenarioClassification (hi)": 7.5,
                    "MassiveScenarioClassification (hu)": 42.59,
                    "MassiveScenarioClassification (hy)": 9.22,
                    "MassiveScenarioClassification (id)": 48.67,
                    "MassiveScenarioClassification (is)": 43.87,
                    "MassiveScenarioClassification (it)": 59.83,
                    "MassiveScenarioClassification (ja)": 5.62,
                    "MassiveScenarioClassification (jv)": 42.18,
                    "MassiveScenarioClassification (ka)": 7.52,
                    "MassiveScenarioClassification (km)": 9.55,
                    "MassiveScenarioClassification (kn)": 8.34,
                    "MassiveScenarioClassification (ko)": 6.11,
                    "MassiveScenarioClassification (lv)": 43.35,
                    "MassiveScenarioClassification (ml)": 7.28,
                    "MassiveScenarioClassification (mn)": 23.94,
                    "MassiveScenarioClassification (ms)": 45.18,
                    "MassiveScenarioClassification (my)": 9.33,
                    "MassiveScenarioClassification (nb)": 52.71,
                    "MassiveScenarioClassification (nl)": 57.02,
                    "MassiveScenarioClassification (pl)": 46.79,
                    "MassiveScenarioClassification (pt)": 59.45,
                    "MassiveScenarioClassification (ro)": 56.8,
                    "MassiveScenarioClassification (ru)": 25.85,
                    "MassiveScenarioClassification (sl)": 42.51,
                    "MassiveScenarioClassification (sq)": 50.41,
                    "MassiveScenarioClassification (sv)": 54.16,
                    "MassiveScenarioClassification (sw)": 43.02,
                    "MassiveScenarioClassification (ta)": 7.21,
                    "MassiveScenarioClassification (te)": 6.9,
                    "MassiveScenarioClassification (th)": 8.7,
                    "MassiveScenarioClassification (tl)": 51.76,
                    "MassiveScenarioClassification (tr)": 42.54,
                    "MassiveScenarioClassification (ur)": 9.32,
                    "MassiveScenarioClassification (vi)": 31.51,
                    "MassiveScenarioClassification (zh-CN)": 3.84,
                    "MassiveScenarioClassification (zh-TW)": 8.16,
                    "ToxicConversationsClassification": 68.65,
                    "TweetSentimentExtractionClassification": 54.09
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "ArxivClusteringP2P": 37.5,
                    "ArxivClusteringS2S": 30.55,
                    "BiorxivClusteringP2P": 29.59,
                    "BiorxivClusteringS2S": 25.72,
                    "MedrxivClusteringP2P": 28.72,
                    "MedrxivClusteringS2S": 27.39,
                    "RedditClustering": 61.69,
                    "RedditClusteringP2P": 61.67,
                    "StackExchangeClustering": 69.93,
                    "StackExchangeClusteringP2P": 33.21,
                    "TwentyNewsgroupsClustering": 51.64
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "SprintDuplicateQuestions": 95.05,
                    "TwitterSemEval2015": 76.03,
                    "TwitterURLCorpus": 84.89
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "AskUbuntuDupQuestions": 61.64,
                    "MindSmallReranking": 31.84,
                    "SciDocsRR": 76.39,
                    "StackOverflowDupQuestions": 51.58
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "ArguAna": 52.09,
                    "CQADupstackRetrieval": 36.62,
                    "ClimateFEVER": 26.9,
                    "DBPedia": 39.55,
                    "FEVER": 72.66,
                    "FiQA2018": 42.79,
                    "HotpotQA": 57.85,
                    "MSMARCO": 42.73,
                    "NFCorpus": 32.63,
                    "NQ": 55.09,
                    "QuoraRetrieval": 88.47,
                    "SCIDOCS": 15.51,
                    "SciFact": 63.42,
                    "TRECCOVID": 56.68,
                    "Touche2020": 28.29
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "BIOSSES": 84.86,
                    "SICK-R": 73.39,
                    "STS12": 70.33,
                    "STS13": 82.19,
                    "STS14": 77.16,
                    "STS15": 86.31,
                    "STS16": 81.85,
                    "STS17 (ar-ar)": 10.19,
                    "STS17 (en-ar)": -5.77,
                    "STS17 (en-de)": 67.43,
                    "STS17 (en-en)": 83.93,
                    "STS17 (en-tr)": 8.75,
                    "STS17 (es-en)": 54.96,
                    "STS17 (es-es)": 82.74,
                    "STS17 (fr-en)": 60.5,
                    "STS17 (it-en)": 46.26,
                    "STS17 (ko-ko)": 8.96,
                    "STS17 (nl-en)": 47.48,
                    "STS22 (ar)": 34.97,
                    "STS22 (de)": 51.7,
                    "STS22 (de-en)": 48.76,
                    "STS22 (de-fr)": 57.5,
                    "STS22 (de-pl)": 32.76,
                    "STS22 (en)": 64.3,
                    "STS22 (es)": 57.49,
                    "STS22 (es-en)": 67.76,
                    "STS22 (es-it)": 57.18,
                    "STS22 (fr)": 78.7,
                    "STS22 (fr-pl)": 61.98,
                    "STS22 (it)": 67.67,
                    "STS22 (pl)": 30.68,
                    "STS22 (pl-en)": 54.17,
                    "STS22 (ru)": 15.36,
                    "STS22 (tr)": 58.12,
                    "STS22 (zh)": 27.32,
                    "STS22 (zh-en)": 29.42,
                    "STSBenchmark": 77.6
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>",
                    "SummEval": 29.5
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-large\">gtr-t5-large</a>"
                }
            ]
        }
    },
    "text-embedding-ada-002": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "AmazonCounterfactualClassification (en)": 75.94,
                    "AmazonPolarityClassification": 86.72,
                    "AmazonReviewsClassification (zh)": 38.3,
                    "AmazonReviewsClassification (en)": 44.78,
                    "AmazonReviewsClassification (fr)": 43.76,
                    "Banking77Classification": 80.66,
                    "EmotionClassification": 48.74,
                    "IFlyTek": 44.62,
                    "ImdbClassification": 77.98,
                    "JDReview": 74.6,
                    "MTOPDomainClassification (en)": 92.13,
                    "MTOPDomainClassification (fr)": 89.38,
                    "MTOPIntentClassification (en)": 64.68,
                    "MTOPIntentClassification (fr)": 64.45,
                    "MasakhaNEWSClassification (fra)": 81.52,
                    "MassiveIntentClassification (zh-CN)": 64.81,
                    "MassiveIntentClassification (en)": 70.15,
                    "MassiveIntentClassification (fr)": 65.42,
                    "MassiveScenarioClassification (zh-CN)": 71.4,
                    "MassiveScenarioClassification (en)": 75.33,
                    "MassiveScenarioClassification (fr)": 71.11,
                    "MultilingualSentiment": 67.99,
                    "OnlineShopping": 88.94,
                    "TNews": 45.77,
                    "ToxicConversationsClassification": 72.29,
                    "TweetSentimentExtractionClassification": 61.81,
                    "Waimai": 82.37
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "AlloProfClusteringP2P": 64.83,
                    "AlloProfClusteringS2S": 53.52,
                    "ArxivClusteringP2P": 45.01,
                    "ArxivClusteringS2S": 36.85,
                    "BiorxivClusteringP2P": 36.66,
                    "BiorxivClusteringS2S": 34.21,
                    "CLSClusteringP2P": 38.26,
                    "CLSClusteringS2S": 35.91,
                    "HALClusteringS2S": 26.18,
                    "MLSUMClusteringP2P": 44.59,
                    "MLSUMClusteringS2S": 41.67,
                    "MasakhaNEWSClusteringP2P (fra)": 68.35,
                    "MasakhaNEWSClusteringS2S (fra)": 48.58,
                    "MedrxivClusteringP2P": 32.6,
                    "MedrxivClusteringS2S": 30.8,
                    "RedditClustering": 61.42,
                    "RedditClusteringP2P": 64.13,
                    "StackExchangeClustering": 72.22,
                    "StackExchangeClusteringP2P": 38.49,
                    "ThuNewsClusteringP2P": 58.71,
                    "ThuNewsClusteringS2S": 49.86,
                    "TwentyNewsgroupsClustering": 52.56
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "Cmnli": 76.03,
                    "Ocnli": 63.08,
                    "OpusparcusPC (fr)": 94.12,
                    "PawsX (fr)": 60.16,
                    "SprintDuplicateQuestions": 92.17,
                    "TwitterSemEval2015": 75.28,
                    "TwitterURLCorpus": 87.22
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "AskUbuntuDupQuestions": 62.05,
                    "CMedQAv1": 63.08,
                    "CMedQAv2": 64.02,
                    "MMarcoReranking": 23.39,
                    "MindSmallReranking": 31.45,
                    "SciDocsRR": 81.22,
                    "StackOverflowDupQuestions": 50.54,
                    "SyntecReranking": 89.87,
                    "T2Reranking": 66.65
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "AlloprofRetrieval": 51.64,
                    "ArguAna": 57.44,
                    "BSARDRetrieval": 0.61,
                    "CQADupstackRetrieval": 41.69,
                    "ClimateFEVER": 21.64,
                    "CmedqaRetrieval": 22.36,
                    "CovidRetrieval": 57.21,
                    "DBPedia": 39.39,
                    "DuRetrieval": 71.17,
                    "EcomRetrieval": 44.49,
                    "FEVER": 74.99,
                    "FiQA2018": 44.41,
                    "HotpotQA": 60.9,
                    "MMarcoRetrieval": 69.86,
                    "MSMARCO": 40.91,
                    "MedicalRetrieval": 37.92,
                    "MintakaRetrieval (fr)": 29.94,
                    "NFCorpus": 36.97,
                    "NQ": 51.58,
                    "QuoraRetrieval": 87.6,
                    "SCIDOCS": 18.36,
                    "SciFact": 72.75,
                    "SyntecRetrieval": 85.97,
                    "T2Retrieval": 69.14,
                    "TRECCOVID": 68.47,
                    "Touche2020": 21.61,
                    "VideoRetrieval": 43.85,
                    "XPQARetrieval (fr)": 73.0
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "AFQMC": 23.88,
                    "ATEC": 29.25,
                    "BIOSSES": 86.35,
                    "BQ": 45.33,
                    "LCQMC": 68.41,
                    "PAWSX": 16.55,
                    "QBQTC": 30.27,
                    "SICK-R": 80.6,
                    "SICKFr": 76.28,
                    "STS12": 69.8,
                    "STS13": 83.27,
                    "STS14": 76.09,
                    "STS15": 86.12,
                    "STS16": 85.96,
                    "STS17 (en-en)": 90.25,
                    "STS22 (zh)": 62.53,
                    "STS22 (en)": 68.12,
                    "STS22 (tr)": 64.5,
                    "STS22 (fr)": 81.09,
                    "STSB": 70.61,
                    "STSBenchmark": 83.17,
                    "STSBenchmarkMultilingualSTS (fr)": 77.55
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>",
                    "SummEval": 30.8,
                    "SummEvalFr": 30.5
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-and-improved-embedding-model\">text-embedding-ada-002</a>"
                }
            ]
        }
    },
    "multilingual-e5-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "BornholmBitextMining": 44.16
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "AllegroReviews": 41.14,
                    "AmazonReviewsClassification (fr)": 41.91,
                    "AngryTweetsClassification": 54.95,
                    "CBD": 69.9,
                    "DKHateClassification": 66.02,
                    "DanishPoliticalCommentsClassification": 38.27,
                    "IFlyTek": 45.47,
                    "JDReview": 80.99,
                    "LccSentimentClassification": 59.6,
                    "MTOPDomainClassification (fr)": 86.41,
                    "MTOPIntentClassification (fr)": 59.43,
                    "MasakhaNEWSClassification (fra)": 79.38,
                    "MassiveIntentClassification (da)": 60.16,
                    "MassiveIntentClassification (nb)": 59.83,
                    "MassiveIntentClassification (sv)": 61.78,
                    "MassiveIntentClassification (pl)": 65.07,
                    "MassiveScenarioClassification (da)": 67.46,
                    "MassiveScenarioClassification (nb)": 66.18,
                    "MassiveScenarioClassification (sv)": 69.15,
                    "MassiveScenarioClassification (pl)": 69.82,
                    "MultilingualSentiment": 68.58,
                    "NoRecClassification": 62.76,
                    "NordicLangClassification": 82.29,
                    "NorwegianParliament": 60.36,
                    "OnlineShopping": 90.81,
                    "PAC": 70.37,
                    "PolEmo2.0-IN": 77.06,
                    "PolEmo2.0-OUT": 53.38,
                    "ScalaDaClassification": 50.77,
                    "ScalaNbClassification": 50.44,
                    "TNews": 48.38,
                    "Waimai": 85.02
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "8TagsClustering": 33.88,
                    "AlloProfClusteringP2P": 62.99,
                    "AlloProfClusteringS2S": 32.26,
                    "CLSClusteringP2P": 40.68,
                    "CLSClusteringS2S": 38.59,
                    "HALClusteringS2S": 22.44,
                    "MLSUMClusteringP2P": 44.04,
                    "MLSUMClusteringS2S": 37.65,
                    "MasakhaNEWSClusteringP2P (fra)": 40.94,
                    "MasakhaNEWSClusteringS2S (fra)": 30.56,
                    "ThuNewsClusteringP2P": 58.05,
                    "ThuNewsClusteringS2S": 55.59
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "CDSC-E": 74.47,
                    "Cmnli": 78.18,
                    "Ocnli": 61.6,
                    "OpusparcusPC (fr)": 93.89,
                    "PPC": 92.18,
                    "PSC": 99.39,
                    "PawsX (fr)": 58.5,
                    "SICK-E-PL": 75.96
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "AlloprofReranking": 57.37,
                    "CMedQAv1": 68.25,
                    "CMedQAv2": 68.56,
                    "MMarcoReranking": 21.34,
                    "SyntecReranking": 86.9,
                    "T2Reranking": 65.83
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "AlloprofRetrieval": 38.15,
                    "ArguAna-PL": 53.02,
                    "BSARDRetrieval": 0.27,
                    "CmedqaRetrieval": 28.67,
                    "CovidRetrieval": 75.51,
                    "DBPedia-PL": 35.82,
                    "DuRetrieval": 85.32,
                    "EcomRetrieval": 54.75,
                    "FiQA-PL": 33.0,
                    "HotpotQA-PL": 67.41,
                    "MMarcoRetrieval": 79.2,
                    "MSMARCO-PL": 33.38,
                    "MedicalRetrieval": 51.44,
                    "MintakaRetrieval (fr)": 25.2,
                    "NFCorpus-PL": 30.24,
                    "NQ-PL": 52.79,
                    "Quora-PL": 83.65,
                    "SCIDOCS-PL": 13.81,
                    "SciFact-PL": 65.66,
                    "SyntecRetrieval": 81.07,
                    "T2Retrieval": 76.11,
                    "TRECCOVID-PL": 70.03,
                    "VideoRetrieval": 58.25,
                    "XPQARetrieval (fr)": 66.15
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "AFQMC": 33.02,
                    "ATEC": 39.81,
                    "BQ": 46.44,
                    "CDSC-R": 91.0,
                    "LCQMC": 75.95,
                    "PAWSX": 14.63,
                    "QBQTC": 29.77,
                    "SICK-R-PL": 75.08,
                    "SICKFr": 78.78,
                    "STS22 (zh)": 65.64,
                    "STS22 (pl)": 34.66,
                    "STSB": 81.08,
                    "STSBenchmarkMultilingualSTS (fr)": 82.53
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>",
                    "SummEvalFr": 30.92
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/multilingual-e5-large\">multilingual-e5-large</a>"
                }
            ]
        }
    },
    "text-similarity-ada-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "AmazonCounterfactualClassification (en)": 76.4,
                    "AmazonPolarityClassification": 92.83,
                    "AmazonReviewsClassification (en)": 47.45,
                    "Banking77Classification": 68.04,
                    "EmotionClassification": 50.33,
                    "ImdbClassification": 89.38,
                    "MTOPDomainClassification (en)": 89.89,
                    "MTOPIntentClassification (en)": 64.8,
                    "MassiveIntentClassification (en)": 65.17,
                    "MassiveScenarioClassification (en)": 67.67,
                    "ToxicConversationsClassification": 70.0,
                    "TweetSentimentExtractionClassification": 63.35
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "ArxivClusteringP2P": 41.49,
                    "ArxivClusteringS2S": 28.47,
                    "BiorxivClusteringP2P": 36.86,
                    "BiorxivClusteringS2S": 27.55,
                    "MedrxivClusteringP2P": 31.09,
                    "MedrxivClusteringS2S": 26.5,
                    "RedditClustering": 42.47,
                    "RedditClusteringP2P": 58.1,
                    "StackExchangeClustering": 53.52,
                    "StackExchangeClusteringP2P": 30.43,
                    "TwentyNewsgroupsClustering": 36.26
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "SprintDuplicateQuestions": 77.85,
                    "TwitterSemEval2015": 69.04,
                    "TwitterURLCorpus": 83.69
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "AskUbuntuDupQuestions": 53.49,
                    "MindSmallReranking": 30.71,
                    "SciDocsRR": 71.04,
                    "StackOverflowDupQuestions": 40.85
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "ArguAna": 39.65,
                    "CQADupstackRetrieval": 10.17,
                    "ClimateFEVER": 2.83,
                    "DBPedia": 3.48,
                    "FEVER": 4.45,
                    "FiQA2018": 7.54,
                    "HotpotQA": 12.6,
                    "MSMARCO": 10.53,
                    "NFCorpus": 20.59,
                    "NQ": 2.02,
                    "QuoraRetrieval": 82.18,
                    "SCIDOCS": 6.28,
                    "SciFact": 45.46,
                    "TRECCOVID": 24.56,
                    "Touche2020": 3.1
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "BIOSSES": 78.04,
                    "SICK-R": 77.48,
                    "STS12": 72.3,
                    "STS13": 81.49,
                    "STS14": 74.74,
                    "STS15": 84.28,
                    "STS16": 82.06,
                    "STS17 (en-en)": 87.08,
                    "STS22 (en)": 64.71,
                    "STSBenchmark": 83.78
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>",
                    "SummEval": 26.94
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-ada-001</a>"
                }
            ]
        }
    },
    "text2vec-base-chinese": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>",
                    "AmazonReviewsClassification (zh)": 34.12,
                    "IFlyTek": 42.05,
                    "JDReview": 82.14,
                    "MassiveIntentClassification (zh-CN)": 63.98,
                    "MassiveScenarioClassification (zh-CN)": 70.52,
                    "MultilingualSentiment": 60.98,
                    "OnlineShopping": 85.69,
                    "TNews": 43.01,
                    "Waimai": 77.22
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>",
                    "CLSClusteringP2P": 35.27,
                    "CLSClusteringS2S": 32.42,
                    "ThuNewsClusteringP2P": 42.92,
                    "ThuNewsClusteringS2S": 40.01
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>",
                    "Cmnli": 73.87,
                    "Ocnli": 60.95
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>",
                    "CMedQAv1": 59.26,
                    "CMedQAv2": 59.82,
                    "MMarcoReranking": 12.76,
                    "T2Reranking": 65.95
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>",
                    "CmedqaRetrieval": 15.91,
                    "CovidRetrieval": 44.81,
                    "DuRetrieval": 52.23,
                    "EcomRetrieval": 34.6,
                    "MMarcoRetrieval": 44.06,
                    "MedicalRetrieval": 27.56,
                    "T2Retrieval": 51.67,
                    "VideoRetrieval": 39.52
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>",
                    "AFQMC": 26.06,
                    "ATEC": 31.93,
                    "BQ": 42.67,
                    "LCQMC": 70.16,
                    "PAWSX": 17.21,
                    "QBQTC": 24.62,
                    "STS22 (zh)": 55.35,
                    "STSB": 79.3
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/shibing624/text2vec-base-chinese\">text2vec-base-chinese</a>"
                }
            ]
        }
    },
    "st-polish-paraphrase-from-mpnet": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>",
                    "AllegroReviews": 34.55,
                    "CBD": 67.48,
                    "MassiveIntentClassification (pl)": 65.93,
                    "MassiveScenarioClassification (pl)": 71.85,
                    "PAC": 63.25,
                    "PolEmo2.0-IN": 68.37,
                    "PolEmo2.0-OUT": 30.99
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>",
                    "8TagsClustering": 33.15
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>",
                    "CDSC-E": 75.06,
                    "PPC": 93.49,
                    "PSC": 99.05,
                    "SICK-E-PL": 80.56
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>",
                    "ArguAna-PL": 51.87,
                    "DBPedia-PL": 24.59,
                    "FiQA-PL": 22.27,
                    "HotpotQA-PL": 32.11,
                    "MSMARCO-PL": 17.91,
                    "NFCorpus-PL": 24.05,
                    "NQ-PL": 23.54,
                    "Quora-PL": 81.49,
                    "SCIDOCS-PL": 13.23,
                    "SciFact-PL": 52.51,
                    "TRECCOVID-PL": 35.23
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>",
                    "CDSC-R": 88.55,
                    "SICK-R-PL": 76.18,
                    "STS22 (pl)": 37.34
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-mpnet\">st-polish-paraphrase-from-mpnet</a>"
                }
            ]
        }
    },
    "mistral-embed": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "AmazonReviewsClassification (fr)": 41.59,
                    "MTOPDomainClassification (fr)": 90.05,
                    "MTOPIntentClassification (fr)": 66.09,
                    "MasakhaNEWSClassification (fra)": 81.4,
                    "MassiveIntentClassification (fr)": 62.83,
                    "MassiveScenarioClassification (fr)": 69.71
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "AlloProfClusteringP2P": 62.01,
                    "AlloProfClusteringS2S": 49.2,
                    "HALClusteringS2S": 26.17,
                    "MLSUMClusteringP2P": 45.28,
                    "MLSUMClusteringS2S": 42.74,
                    "MasakhaNEWSClusteringP2P (fra)": 48.13,
                    "MasakhaNEWSClusteringS2S (fra)": 39.62
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "OpusparcusPC (fr)": 92.61,
                    "PawsX (fr)": 62.02
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "AlloprofReranking": 72.36,
                    "SyntecReranking": 88.57
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "AILACasedocs": 38.2,
                    "AILAStatutes": 44.81,
                    "AlloprofRetrieval": 56.84,
                    "BSARDRetrieval": 2.48,
                    "GerDaLIRSmall": 17.85,
                    "LeCaRDv2": 61.12,
                    "LegalBenchConsumerContractsQA": 80.8,
                    "LegalBenchCorporateLobbying": 94.11,
                    "LegalQuAD": 47.17,
                    "LegalSummarization": 67.39,
                    "MintakaRetrieval (fr)": 21.73,
                    "SyntecRetrieval": 78.77,
                    "XPQARetrieval (fr)": 74.24
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "SICKFr": 76.21,
                    "STS22 (fr)": 82.74,
                    "STSBenchmarkMultilingualSTS (fr)": 79.72
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>",
                    "SummEvalFr": 31.47
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.mistral.ai/guides/embeddings\">mistral-embed</a>"
                }
            ]
        }
    },
    "monot5-3b-msmarco-10k": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/castorini/monot5-3b-msmarco-10k\">monot5-3b-msmarco-10k</a>",
                    "Core17InstructionRetrieval": 1.84,
                    "News21InstructionRetrieval": 1.78,
                    "Robust04InstructionRetrieval": 3.96
                }
            ]
        }
    },
    "all-MiniLM-L12-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "AmazonCounterfactualClassification (de)": 57.1,
                    "AmazonCounterfactualClassification (en)": 65.28,
                    "AmazonCounterfactualClassification (en-ext)": 67.24,
                    "AmazonCounterfactualClassification (ja)": 59.91,
                    "AmazonPolarityClassification": 62.98,
                    "AmazonReviewsClassification (de)": 25.91,
                    "AmazonReviewsClassification (en)": 30.79,
                    "AmazonReviewsClassification (es)": 27.63,
                    "AmazonReviewsClassification (fr)": 27.54,
                    "AmazonReviewsClassification (ja)": 23.57,
                    "AmazonReviewsClassification (zh)": 22.99,
                    "Banking77Classification": 80.4,
                    "EmotionClassification": 41.17,
                    "ImdbClassification": 59.76,
                    "MTOPDomainClassification (de)": 72.04,
                    "MTOPDomainClassification (en)": 91.9,
                    "MTOPDomainClassification (es)": 72.99,
                    "MTOPDomainClassification (fr)": 75.59,
                    "MTOPDomainClassification (hi)": 40.36,
                    "MTOPDomainClassification (th)": 17.1,
                    "MTOPIntentClassification (de)": 43.41,
                    "MTOPIntentClassification (en)": 62.84,
                    "MTOPIntentClassification (es)": 41.88,
                    "MTOPIntentClassification (fr)": 38.94,
                    "MTOPIntentClassification (hi)": 17.75,
                    "MTOPIntentClassification (th)": 5.63,
                    "MasakhaNEWSClassification (fra)": 72.2,
                    "MassiveIntentClassification (af)": 38.94,
                    "MassiveIntentClassification (am)": 2.45,
                    "MassiveIntentClassification (ar)": 20.94,
                    "MassiveIntentClassification (az)": 34.25,
                    "MassiveIntentClassification (bn)": 13.67,
                    "MassiveIntentClassification (cy)": 35.71,
                    "MassiveIntentClassification (da)": 44.43,
                    "MassiveIntentClassification (de)": 44.17,
                    "MassiveIntentClassification (el)": 28.7,
                    "MassiveIntentClassification (en)": 67.15,
                    "MassiveIntentClassification (es)": 40.91,
                    "MassiveIntentClassification (fa)": 23.52,
                    "MassiveIntentClassification (fi)": 39.27,
                    "MassiveIntentClassification (fr)": 44.82,
                    "MassiveIntentClassification (he)": 23.65,
                    "MassiveIntentClassification (hi)": 17.98,
                    "MassiveIntentClassification (hu)": 38.0,
                    "MassiveIntentClassification (hy)": 8.69,
                    "MassiveIntentClassification (id)": 39.66,
                    "MassiveIntentClassification (is)": 35.14,
                    "MassiveIntentClassification (it)": 43.17,
                    "MassiveIntentClassification (ja)": 30.94,
                    "MassiveIntentClassification (jv)": 36.69,
                    "MassiveIntentClassification (ka)": 9.17,
                    "MassiveIntentClassification (km)": 4.99,
                    "MassiveIntentClassification (kn)": 3.08,
                    "MassiveIntentClassification (ko)": 19.97,
                    "MassiveIntentClassification (lv)": 38.61,
                    "MassiveIntentClassification (ml)": 2.85,
                    "MassiveIntentClassification (mn)": 23.25,
                    "MassiveIntentClassification (ms)": 36.21,
                    "MassiveIntentClassification (my)": 4.38,
                    "MassiveIntentClassification (nb)": 41.91,
                    "MassiveIntentClassification (nl)": 41.85,
                    "MassiveIntentClassification (pl)": 37.63,
                    "MassiveIntentClassification (pt)": 45.12,
                    "MassiveIntentClassification (ro)": 41.71,
                    "MassiveIntentClassification (ru)": 26.33,
                    "MassiveIntentClassification (sl)": 38.52,
                    "MassiveIntentClassification (sq)": 41.62,
                    "MassiveIntentClassification (sv)": 40.42,
                    "MassiveIntentClassification (sw)": 35.28,
                    "MassiveIntentClassification (ta)": 13.1,
                    "MassiveIntentClassification (te)": 2.56,
                    "MassiveIntentClassification (th)": 10.54,
                    "MassiveIntentClassification (tl)": 38.56,
                    "MassiveIntentClassification (tr)": 35.9,
                    "MassiveIntentClassification (ur)": 16.18,
                    "MassiveIntentClassification (vi)": 37.38,
                    "MassiveIntentClassification (zh-CN)": 23.74,
                    "MassiveIntentClassification (zh-TW)": 22.39,
                    "MassiveScenarioClassification (af)": 45.71,
                    "MassiveScenarioClassification (am)": 7.41,
                    "MassiveScenarioClassification (ar)": 27.62,
                    "MassiveScenarioClassification (az)": 39.58,
                    "MassiveScenarioClassification (bn)": 18.98,
                    "MassiveScenarioClassification (cy)": 41.4,
                    "MassiveScenarioClassification (da)": 49.47,
                    "MassiveScenarioClassification (de)": 52.07,
                    "MassiveScenarioClassification (el)": 35.51,
                    "MassiveScenarioClassification (en)": 74.58,
                    "MassiveScenarioClassification (es)": 50.74,
                    "MassiveScenarioClassification (fa)": 29.0,
                    "MassiveScenarioClassification (fi)": 45.8,
                    "MassiveScenarioClassification (fr)": 53.76,
                    "MassiveScenarioClassification (he)": 25.68,
                    "MassiveScenarioClassification (hi)": 23.02,
                    "MassiveScenarioClassification (hu)": 44.09,
                    "MassiveScenarioClassification (hy)": 14.83,
                    "MassiveScenarioClassification (id)": 44.35,
                    "MassiveScenarioClassification (is)": 43.08,
                    "MassiveScenarioClassification (it)": 51.71,
                    "MassiveScenarioClassification (ja)": 36.75,
                    "MassiveScenarioClassification (jv)": 44.57,
                    "MassiveScenarioClassification (ka)": 14.84,
                    "MassiveScenarioClassification (km)": 9.75,
                    "MassiveScenarioClassification (kn)": 8.32,
                    "MassiveScenarioClassification (ko)": 25.72,
                    "MassiveScenarioClassification (lv)": 42.75,
                    "MassiveScenarioClassification (ml)": 7.25,
                    "MassiveScenarioClassification (mn)": 29.03,
                    "MassiveScenarioClassification (ms)": 44.65,
                    "MassiveScenarioClassification (my)": 10.07,
                    "MassiveScenarioClassification (nb)": 47.36,
                    "MassiveScenarioClassification (nl)": 49.15,
                    "MassiveScenarioClassification (pl)": 44.72,
                    "MassiveScenarioClassification (pt)": 53.0,
                    "MassiveScenarioClassification (ro)": 49.97,
                    "MassiveScenarioClassification (ru)": 28.75,
                    "MassiveScenarioClassification (sl)": 42.26,
                    "MassiveScenarioClassification (sq)": 49.14,
                    "MassiveScenarioClassification (sv)": 46.83,
                    "MassiveScenarioClassification (sw)": 43.18,
                    "MassiveScenarioClassification (ta)": 19.38,
                    "MassiveScenarioClassification (te)": 7.74,
                    "MassiveScenarioClassification (th)": 18.32,
                    "MassiveScenarioClassification (tl)": 48.31,
                    "MassiveScenarioClassification (tr)": 41.79,
                    "MassiveScenarioClassification (ur)": 24.46,
                    "MassiveScenarioClassification (vi)": 40.94,
                    "MassiveScenarioClassification (zh-CN)": 33.18,
                    "MassiveScenarioClassification (zh-TW)": 31.16,
                    "ToxicConversationsClassification": 67.47,
                    "TweetSentimentExtractionClassification": 54.25
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "AlloProfClusteringP2P": 46.03,
                    "AlloProfClusteringS2S": 31.83,
                    "ArxivClusteringP2P": 46.07,
                    "ArxivClusteringS2S": 37.5,
                    "BiorxivClusteringP2P": 36.99,
                    "BiorxivClusteringS2S": 33.21,
                    "HALClusteringS2S": 19.58,
                    "MLSUMClusteringP2P": 34.35,
                    "MLSUMClusteringS2S": 29.3,
                    "MasakhaNEWSClusteringP2P (fra)": 42.72,
                    "MasakhaNEWSClusteringS2S (fra)": 32.47,
                    "MedrxivClusteringP2P": 34.25,
                    "MedrxivClusteringS2S": 32.24,
                    "RedditClustering": 51.18,
                    "RedditClusteringP2P": 54.8,
                    "StackExchangeClustering": 53.05,
                    "StackExchangeClusteringP2P": 33.13,
                    "TwentyNewsgroupsClustering": 47.47
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "OpusparcusPC (fr)": 87.35,
                    "PawsX (fr)": 55.53,
                    "SprintDuplicateQuestions": 92.45,
                    "TwitterSemEval2015": 70.02,
                    "TwitterURLCorpus": 84.77
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "AlloprofReranking": 45.73,
                    "AskUbuntuDupQuestions": 64.06,
                    "MindSmallReranking": 31.02,
                    "SciDocsRR": 87.2,
                    "StackOverflowDupQuestions": 51.47,
                    "SyntecReranking": 68.33
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "AlloprofRetrieval": 33.2,
                    "ArguAna": 47.13,
                    "BSARDRetrieval": 0.0,
                    "CQADupstackRetrieval": 42.53,
                    "ClimateFEVER": 21.57,
                    "DBPedia": 33.35,
                    "FEVER": 55.9,
                    "FiQA2018": 37.27,
                    "HotpotQA": 44.59,
                    "MSMARCO": 39.03,
                    "MintakaRetrieval (fr)": 16.08,
                    "NFCorpus": 32.25,
                    "NQ": 46.47,
                    "QuoraRetrieval": 87.75,
                    "SCIDOCS": 21.82,
                    "SciFact": 62.64,
                    "SyntecRetrieval": 60.8,
                    "TRECCOVID": 50.82,
                    "Touche2020": 17.22,
                    "XPQARetrieval (fr)": 55.9
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "BIOSSES": 83.57,
                    "SICK-R": 79.32,
                    "SICKFr": 63.16,
                    "STS12": 73.08,
                    "STS13": 82.13,
                    "STS14": 76.73,
                    "STS15": 85.58,
                    "STS16": 80.23,
                    "STS17 (ar-ar)": 58.71,
                    "STS17 (en-ar)": 0.54,
                    "STS17 (en-de)": 27.54,
                    "STS17 (en-en)": 88.63,
                    "STS17 (en-tr)": 0.43,
                    "STS17 (es-en)": 22.01,
                    "STS17 (es-es)": 78.37,
                    "STS17 (fr-en)": 30.7,
                    "STS17 (it-en)": 24.28,
                    "STS17 (ko-ko)": 43.37,
                    "STS17 (nl-en)": 24.51,
                    "STS22 (ar)": 17.54,
                    "STS22 (de)": 22.53,
                    "STS22 (de-en)": 42.86,
                    "STS22 (de-fr)": 43.52,
                    "STS22 (de-pl)": 1.63,
                    "STS22 (en)": 65.67,
                    "STS22 (es)": 43.98,
                    "STS22 (es-en)": 53.99,
                    "STS22 (es-it)": 40.71,
                    "STS22 (fr)": 69.51,
                    "STS22 (fr-pl)": 16.9,
                    "STS22 (it)": 47.48,
                    "STS22 (pl)": 19.22,
                    "STS22 (pl-en)": 42.67,
                    "STS22 (ru)": 11.19,
                    "STS22 (tr)": 21.6,
                    "STS22 (zh)": 33.15,
                    "STS22 (zh-en)": 44.39,
                    "STSBenchmark": 83.09,
                    "STSBenchmarkMultilingualSTS (fr)": 66.68
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>",
                    "SummEval": 27.9,
                    "SummEvalFr": 26.63
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2\">all-MiniLM-L12-v2</a>"
                }
            ]
        }
    },
    "sentence-camembert-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "AmazonReviewsClassification (fr)": 36.03,
                    "MTOPDomainClassification (fr)": 77.1,
                    "MTOPIntentClassification (fr)": 43.44,
                    "MasakhaNEWSClassification (fra)": 70.36,
                    "MassiveIntentClassification (fr)": 51.59,
                    "MassiveScenarioClassification (fr)": 61.28
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "AlloProfClusteringP2P": 59.09,
                    "AlloProfClusteringS2S": 38.92,
                    "HALClusteringS2S": 20.22,
                    "MLSUMClusteringP2P": 35.98,
                    "MLSUMClusteringS2S": 27.05,
                    "MasakhaNEWSClusteringP2P (fra)": 36.03,
                    "MasakhaNEWSClusteringS2S (fra)": 30.77
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "OpusparcusPC (fr)": 92.05,
                    "PawsX (fr)": 57.44
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "AlloprofReranking": 48.68,
                    "SyntecReranking": 79.75
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "AlloprofRetrieval": 21.94,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 13.36,
                    "SyntecRetrieval": 68.62,
                    "XPQARetrieval (fr)": 57.92
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "SICKFr": 74.18,
                    "STS22 (fr)": 77.54,
                    "STSBenchmarkMultilingualSTS (fr)": 81.64
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>",
                    "SummEvalFr": 28.77
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dangvantuan/sentence-camembert-base\">sentence-camembert-base</a>"
                }
            ]
        }
    },
    "GritLM-7B": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GritLM/GritLM-7B\">GritLM-7B</a>",
                    "Core17InstructionRetrieval": 2.62,
                    "News21InstructionRetrieval": -1.01,
                    "Robust04InstructionRetrieval": -1.68
                }
            ]
        }
    },
    "text2vec-base-multilingual": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "AmazonReviewsClassification (fr)": 34.25,
                    "MTOPDomainClassification (fr)": 71.83,
                    "MTOPIntentClassification (fr)": 44.53,
                    "MasakhaNEWSClassification (fra)": 73.84,
                    "MassiveIntentClassification (fr)": 51.93,
                    "MassiveScenarioClassification (fr)": 58.31
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "AlloProfClusteringP2P": 49.11,
                    "AlloProfClusteringS2S": 32.72,
                    "HALClusteringS2S": 16.19,
                    "MLSUMClusteringP2P": 36.19,
                    "MLSUMClusteringS2S": 30.39,
                    "MasakhaNEWSClusteringP2P (fra)": 38.51,
                    "MasakhaNEWSClusteringS2S (fra)": 32.51
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "OpusparcusPC (fr)": 92.04,
                    "PawsX (fr)": 65.57
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "AlloprofReranking": 51.48,
                    "SyntecReranking": 70.28
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "AlloprofRetrieval": 18.9,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 14.81,
                    "SyntecRetrieval": 49.69,
                    "XPQARetrieval (fr)": 40.4
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "SICKFr": 77.25,
                    "STS22 (fr)": 74.1,
                    "STSBenchmarkMultilingualSTS (fr)": 83.48
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>",
                    "SummEvalFr": 29.33
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/spaces/mteb/leaderboard\">text2vec-base-multilingual</a>"
                }
            ]
        }
    },
    "LASER2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "BUCC (de-en)": 99.21,
                    "BUCC (fr-en)": 98.39,
                    "BUCC (ru-en)": 97.62,
                    "BUCC (zh-en)": 97.7,
                    "Tatoeba (afr-eng)": 92.59,
                    "Tatoeba (amh-eng)": 80.82,
                    "Tatoeba (ang-eng)": 25.22,
                    "Tatoeba (ara-eng)": 90.14,
                    "Tatoeba (arq-eng)": 26.63,
                    "Tatoeba (arz-eng)": 66.16,
                    "Tatoeba (ast-eng)": 76.35,
                    "Tatoeba (awa-eng)": 33.74,
                    "Tatoeba (aze-eng)": 82.41,
                    "Tatoeba (bel-eng)": 79.54,
                    "Tatoeba (ben-eng)": 89.43,
                    "Tatoeba (ber-eng)": 77.63,
                    "Tatoeba (bos-eng)": 95.86,
                    "Tatoeba (bre-eng)": 31.2,
                    "Tatoeba (bul-eng)": 93.57,
                    "Tatoeba (cat-eng)": 95.8,
                    "Tatoeba (cbk-eng)": 77.17,
                    "Tatoeba (ceb-eng)": 9.93,
                    "Tatoeba (ces-eng)": 95.52,
                    "Tatoeba (cha-eng)": 14.86,
                    "Tatoeba (cmn-eng)": 85.62,
                    "Tatoeba (cor-eng)": 4.45,
                    "Tatoeba (csb-eng)": 27.03,
                    "Tatoeba (cym-eng)": 5.85,
                    "Tatoeba (dan-eng)": 95.22,
                    "Tatoeba (deu-eng)": 99.07,
                    "Tatoeba (dsb-eng)": 42.34,
                    "Tatoeba (dtp-eng)": 7.39,
                    "Tatoeba (ell-eng)": 96.2,
                    "Tatoeba (epo-eng)": 96.61,
                    "Tatoeba (est-eng)": 96.43,
                    "Tatoeba (eus-eng)": 93.32,
                    "Tatoeba (fao-eng)": 57.04,
                    "Tatoeba (fin-eng)": 96.98,
                    "Tatoeba (fra-eng)": 94.28,
                    "Tatoeba (fry-eng)": 42.07,
                    "Tatoeba (gla-eng)": 1.52,
                    "Tatoeba (gle-eng)": 4.2,
                    "Tatoeba (glg-eng)": 96.14,
                    "Tatoeba (gsw-eng)": 27.52,
                    "Tatoeba (heb-eng)": 0.0,
                    "Tatoeba (hin-eng)": 95.32,
                    "Tatoeba (hrv-eng)": 96.72,
                    "Tatoeba (hsb-eng)": 45.75,
                    "Tatoeba (hun-eng)": 95.2,
                    "Tatoeba (hye-eng)": 88.72,
                    "Tatoeba (ido-eng)": 80.86,
                    "Tatoeba (ile-eng)": 87.88,
                    "Tatoeba (ina-eng)": 93.93,
                    "Tatoeba (ind-eng)": 92.98,
                    "Tatoeba (isl-eng)": 94.32,
                    "Tatoeba (ita-eng)": 94.32,
                    "Tatoeba (jav-eng)": 9.95,
                    "Tatoeba (jpn-eng)": 93.78,
                    "Tatoeba (kab-eng)": 65.88,
                    "Tatoeba (kat-eng)": 81.16,
                    "Tatoeba (kaz-eng)": 53.3,
                    "Tatoeba (khm-eng)": 74.19,
                    "Tatoeba (kor-eng)": 87.97,
                    "Tatoeba (kur-eng)": 19.09,
                    "Tatoeba (kzj-eng)": 4.46,
                    "Tatoeba (lat-eng)": 64.81,
                    "Tatoeba (lfn-eng)": 63.39,
                    "Tatoeba (lit-eng)": 96.2,
                    "Tatoeba (lvs-eng)": 95.33,
                    "Tatoeba (mal-eng)": 98.16,
                    "Tatoeba (mar-eng)": 92.93,
                    "Tatoeba (max-eng)": 36.96,
                    "Tatoeba (mhr-eng)": 6.86,
                    "Tatoeba (mkd-eng)": 93.63,
                    "Tatoeba (mon-eng)": 3.42,
                    "Tatoeba (nds-eng)": 77.13,
                    "Tatoeba (nld-eng)": 95.35,
                    "Tatoeba (nno-eng)": 72.75,
                    "Tatoeba (nob-eng)": 95.77,
                    "Tatoeba (nov-eng)": 60.02,
                    "Tatoeba (oci-eng)": 58.13,
                    "Tatoeba (orv-eng)": 23.24,
                    "Tatoeba (pam-eng)": 3.24,
                    "Tatoeba (pes-eng)": 93.13,
                    "Tatoeba (pms-eng)": 36.23,
                    "Tatoeba (pol-eng)": 97.32,
                    "Tatoeba (por-eng)": 94.54,
                    "Tatoeba (ron-eng)": 96.52,
                    "Tatoeba (rus-eng)": 92.58,
                    "Tatoeba (slk-eng)": 95.82,
                    "Tatoeba (slv-eng)": 95.4,
                    "Tatoeba (spa-eng)": 97.33,
                    "Tatoeba (sqi-eng)": 97.22,
                    "Tatoeba (srp-eng)": 93.64,
                    "Tatoeba (swe-eng)": 95.31,
                    "Tatoeba (swg-eng)": 33.1,
                    "Tatoeba (swh-eng)": 55.66,
                    "Tatoeba (tam-eng)": 87.32,
                    "Tatoeba (tat-eng)": 34.74,
                    "Tatoeba (tel-eng)": 96.72,
                    "Tatoeba (tgl-eng)": 63.19,
                    "Tatoeba (tha-eng)": 96.38,
                    "Tatoeba (tuk-eng)": 16.35,
                    "Tatoeba (tur-eng)": 98.03,
                    "Tatoeba (tzl-eng)": 36.56,
                    "Tatoeba (uig-eng)": 56.49,
                    "Tatoeba (ukr-eng)": 93.52,
                    "Tatoeba (urd-eng)": 84.23,
                    "Tatoeba (uzb-eng)": 23.2,
                    "Tatoeba (vie-eng)": 96.73,
                    "Tatoeba (war-eng)": 8.25,
                    "Tatoeba (wuu-eng)": 75.09,
                    "Tatoeba (xho-eng)": 4.68,
                    "Tatoeba (yid-eng)": 2.49,
                    "Tatoeba (yue-eng)": 87.75,
                    "Tatoeba (zsm-eng)": 95.41
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "AmazonCounterfactualClassification (de)": 67.82,
                    "AmazonCounterfactualClassification (en)": 76.84,
                    "AmazonCounterfactualClassification (en-ext)": 76.17,
                    "AmazonCounterfactualClassification (ja)": 68.76,
                    "AmazonPolarityClassification": 61.01,
                    "AmazonReviewsClassification (de)": 31.07,
                    "AmazonReviewsClassification (en)": 28.71,
                    "AmazonReviewsClassification (es)": 32.72,
                    "AmazonReviewsClassification (fr)": 31.12,
                    "AmazonReviewsClassification (ja)": 28.94,
                    "AmazonReviewsClassification (zh)": 30.89,
                    "Banking77Classification": 57.76,
                    "EmotionClassification": 24.83,
                    "ImdbClassification": 57.58,
                    "MTOPDomainClassification (de)": 74.08,
                    "MTOPDomainClassification (en)": 75.36,
                    "MTOPDomainClassification (es)": 73.47,
                    "MTOPDomainClassification (fr)": 72.26,
                    "MTOPDomainClassification (hi)": 72.95,
                    "MTOPDomainClassification (th)": 72.68,
                    "MTOPIntentClassification (de)": 51.62,
                    "MTOPIntentClassification (en)": 49.47,
                    "MTOPIntentClassification (es)": 52.75,
                    "MTOPIntentClassification (fr)": 50.12,
                    "MTOPIntentClassification (hi)": 45.55,
                    "MTOPIntentClassification (th)": 50.07,
                    "MasakhaNEWSClassification (fra)": 65.9,
                    "MassiveIntentClassification (af)": 38.01,
                    "MassiveIntentClassification (am)": 12.7,
                    "MassiveIntentClassification (ar)": 37.16,
                    "MassiveIntentClassification (az)": 19.98,
                    "MassiveIntentClassification (bn)": 42.51,
                    "MassiveIntentClassification (cy)": 17.33,
                    "MassiveIntentClassification (da)": 45.61,
                    "MassiveIntentClassification (de)": 44.79,
                    "MassiveIntentClassification (el)": 46.71,
                    "MassiveIntentClassification (en)": 47.91,
                    "MassiveIntentClassification (es)": 45.44,
                    "MassiveIntentClassification (fa)": 45.01,
                    "MassiveIntentClassification (fi)": 45.94,
                    "MassiveIntentClassification (fr)": 46.13,
                    "MassiveIntentClassification (he)": 42.55,
                    "MassiveIntentClassification (hi)": 40.2,
                    "MassiveIntentClassification (hu)": 42.77,
                    "MassiveIntentClassification (hy)": 28.07,
                    "MassiveIntentClassification (id)": 45.81,
                    "MassiveIntentClassification (is)": 39.86,
                    "MassiveIntentClassification (it)": 48.25,
                    "MassiveIntentClassification (ja)": 45.3,
                    "MassiveIntentClassification (jv)": 24.3,
                    "MassiveIntentClassification (ka)": 22.7,
                    "MassiveIntentClassification (km)": 22.48,
                    "MassiveIntentClassification (kn)": 4.32,
                    "MassiveIntentClassification (ko)": 44.26,
                    "MassiveIntentClassification (lv)": 39.75,
                    "MassiveIntentClassification (ml)": 41.33,
                    "MassiveIntentClassification (mn)": 16.2,
                    "MassiveIntentClassification (ms)": 43.23,
                    "MassiveIntentClassification (my)": 25.37,
                    "MassiveIntentClassification (nb)": 37.74,
                    "MassiveIntentClassification (nl)": 45.0,
                    "MassiveIntentClassification (pl)": 44.99,
                    "MassiveIntentClassification (pt)": 48.55,
                    "MassiveIntentClassification (ro)": 44.3,
                    "MassiveIntentClassification (ru)": 44.29,
                    "MassiveIntentClassification (sl)": 44.72,
                    "MassiveIntentClassification (sq)": 46.12,
                    "MassiveIntentClassification (sv)": 45.95,
                    "MassiveIntentClassification (sw)": 31.89,
                    "MassiveIntentClassification (ta)": 29.63,
                    "MassiveIntentClassification (te)": 36.03,
                    "MassiveIntentClassification (th)": 43.39,
                    "MassiveIntentClassification (tl)": 29.73,
                    "MassiveIntentClassification (tr)": 43.93,
                    "MassiveIntentClassification (ur)": 26.11,
                    "MassiveIntentClassification (vi)": 44.33,
                    "MassiveIntentClassification (zh-CN)": 40.62,
                    "MassiveIntentClassification (zh-TW)": 32.93,
                    "MassiveScenarioClassification (af)": 47.1,
                    "MassiveScenarioClassification (am)": 17.7,
                    "MassiveScenarioClassification (ar)": 45.21,
                    "MassiveScenarioClassification (az)": 28.21,
                    "MassiveScenarioClassification (bn)": 50.52,
                    "MassiveScenarioClassification (cy)": 22.58,
                    "MassiveScenarioClassification (da)": 54.87,
                    "MassiveScenarioClassification (de)": 54.34,
                    "MassiveScenarioClassification (el)": 55.47,
                    "MassiveScenarioClassification (en)": 55.92,
                    "MassiveScenarioClassification (es)": 52.77,
                    "MassiveScenarioClassification (fa)": 52.5,
                    "MassiveScenarioClassification (fi)": 52.63,
                    "MassiveScenarioClassification (fr)": 54.32,
                    "MassiveScenarioClassification (he)": 52.41,
                    "MassiveScenarioClassification (hi)": 47.37,
                    "MassiveScenarioClassification (hu)": 53.43,
                    "MassiveScenarioClassification (hy)": 33.57,
                    "MassiveScenarioClassification (id)": 54.38,
                    "MassiveScenarioClassification (is)": 49.78,
                    "MassiveScenarioClassification (it)": 54.84,
                    "MassiveScenarioClassification (ja)": 54.12,
                    "MassiveScenarioClassification (jv)": 32.71,
                    "MassiveScenarioClassification (ka)": 26.92,
                    "MassiveScenarioClassification (km)": 27.23,
                    "MassiveScenarioClassification (kn)": 10.06,
                    "MassiveScenarioClassification (ko)": 52.01,
                    "MassiveScenarioClassification (lv)": 44.82,
                    "MassiveScenarioClassification (ml)": 49.1,
                    "MassiveScenarioClassification (mn)": 21.51,
                    "MassiveScenarioClassification (ms)": 53.6,
                    "MassiveScenarioClassification (my)": 29.72,
                    "MassiveScenarioClassification (nb)": 43.9,
                    "MassiveScenarioClassification (nl)": 53.33,
                    "MassiveScenarioClassification (pl)": 52.92,
                    "MassiveScenarioClassification (pt)": 53.41,
                    "MassiveScenarioClassification (ro)": 50.48,
                    "MassiveScenarioClassification (ru)": 51.84,
                    "MassiveScenarioClassification (sl)": 51.29,
                    "MassiveScenarioClassification (sq)": 55.65,
                    "MassiveScenarioClassification (sv)": 54.64,
                    "MassiveScenarioClassification (sw)": 42.04,
                    "MassiveScenarioClassification (ta)": 36.72,
                    "MassiveScenarioClassification (te)": 42.08,
                    "MassiveScenarioClassification (th)": 52.15,
                    "MassiveScenarioClassification (tl)": 37.34,
                    "MassiveScenarioClassification (tr)": 52.56,
                    "MassiveScenarioClassification (ur)": 32.6,
                    "MassiveScenarioClassification (vi)": 50.97,
                    "MassiveScenarioClassification (zh-CN)": 50.22,
                    "MassiveScenarioClassification (zh-TW)": 42.32,
                    "ToxicConversationsClassification": 54.05,
                    "TweetSentimentExtractionClassification": 48.73
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "AlloProfClusteringP2P": 48.45,
                    "AlloProfClusteringS2S": 25.81,
                    "ArxivClusteringP2P": 17.77,
                    "ArxivClusteringS2S": 12.39,
                    "BiorxivClusteringP2P": 12.4,
                    "BiorxivClusteringS2S": 8.83,
                    "HALClusteringS2S": 11.52,
                    "MLSUMClusteringP2P": 34.53,
                    "MLSUMClusteringS2S": 27.35,
                    "MasakhaNEWSClusteringP2P (fra)": 32.04,
                    "MasakhaNEWSClusteringS2S (fra)": 29.77,
                    "MedrxivClusteringP2P": 17.91,
                    "MedrxivClusteringS2S": 16.63,
                    "RedditClustering": 9.96,
                    "RedditClusteringP2P": 26.42,
                    "StackExchangeClustering": 15.79,
                    "StackExchangeClusteringP2P": 18.63,
                    "TwentyNewsgroupsClustering": 11.38
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "OpusparcusPC (fr)": 93.77,
                    "PawsX (fr)": 69.53,
                    "SprintDuplicateQuestions": 65.54,
                    "TwitterSemEval2015": 59.57,
                    "TwitterURLCorpus": 81.47
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "AlloprofReranking": 35.29,
                    "AskUbuntuDupQuestions": 48.99,
                    "MindSmallReranking": 24.79,
                    "SciDocsRR": 54.99,
                    "StackOverflowDupQuestions": 36.98,
                    "SyntecReranking": 55.93
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "AlloprofRetrieval": 3.1,
                    "ArguAna": 12.86,
                    "BSARDRetrieval": 0.36,
                    "CQADupstackRetrieval": 4.12,
                    "ClimateFEVER": 0.36,
                    "DBPedia": 1.53,
                    "FEVER": 0.77,
                    "FiQA2018": 1.73,
                    "HotpotQA": 5.5,
                    "MSMARCO": 1.09,
                    "MintakaRetrieval (fr)": 6.31,
                    "NFCorpus": 2.44,
                    "NQ": 0.64,
                    "QuoraRetrieval": 71.14,
                    "SCIDOCS": 0.78,
                    "SciFact": 4.04,
                    "SyntecRetrieval": 28.58,
                    "TRECCOVID": 10.97,
                    "Touche2020": 1.06,
                    "XPQARetrieval (fr)": 42.59
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "BIOSSES": 62.01,
                    "SICK-R": 62.86,
                    "SICKFr": 64.95,
                    "STS12": 62.6,
                    "STS13": 59.62,
                    "STS14": 57.03,
                    "STS15": 71.57,
                    "STS16": 70.75,
                    "STS17 (ar-ar)": 67.47,
                    "STS17 (en-ar)": 65.05,
                    "STS17 (en-de)": 66.66,
                    "STS17 (en-en)": 76.73,
                    "STS17 (en-tr)": 70.05,
                    "STS17 (es-en)": 55.3,
                    "STS17 (es-es)": 79.67,
                    "STS17 (fr-en)": 70.82,
                    "STS17 (it-en)": 70.98,
                    "STS17 (ko-ko)": 70.52,
                    "STS17 (nl-en)": 68.12,
                    "STS22 (ar)": 42.57,
                    "STS22 (de)": 25.69,
                    "STS22 (de-en)": 32.35,
                    "STS22 (de-fr)": 37.41,
                    "STS22 (de-pl)": 15.67,
                    "STS22 (en)": 39.76,
                    "STS22 (es)": 54.92,
                    "STS22 (es-en)": 54.34,
                    "STS22 (es-it)": 42.21,
                    "STS22 (fr)": 58.61,
                    "STS22 (fr-pl)": 39.44,
                    "STS22 (it)": 60.31,
                    "STS22 (pl)": 18.34,
                    "STS22 (pl-en)": 53.63,
                    "STS22 (ru)": 39.24,
                    "STS22 (tr)": 36.97,
                    "STS22 (zh)": 49.41,
                    "STS22 (zh-en)": 46.19,
                    "STSBenchmark": 69.77,
                    "STSBenchmarkMultilingualSTS (fr)": 69.82
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>",
                    "SummEval": 26.8,
                    "SummEvalFr": 31.56
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://github.com/facebookresearch/LASER\">LASER2</a>"
                }
            ]
        }
    },
    "luotuo-bert-medium": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>",
                    "AmazonReviewsClassification (zh)": 34.46,
                    "IFlyTek": 41.75,
                    "JDReview": 79.68,
                    "MassiveIntentClassification (zh-CN)": 57.47,
                    "MassiveScenarioClassification (zh-CN)": 65.32,
                    "MultilingualSentiment": 61.21,
                    "OnlineShopping": 84.3,
                    "TNews": 45.22,
                    "Waimai": 79.57
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>",
                    "CLSClusteringP2P": 37.01,
                    "CLSClusteringS2S": 33.46,
                    "ThuNewsClusteringP2P": 58.83,
                    "ThuNewsClusteringS2S": 48.26
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>",
                    "Cmnli": 72.55,
                    "Ocnli": 60.7
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>",
                    "CMedQAv1": 57.82,
                    "CMedQAv2": 58.88,
                    "MMarcoReranking": 14.55,
                    "T2Reranking": 65.76
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>",
                    "CmedqaRetrieval": 18.04,
                    "CovidRetrieval": 55.48,
                    "DuRetrieval": 59.36,
                    "EcomRetrieval": 40.48,
                    "MMarcoRetrieval": 55.31,
                    "MedicalRetrieval": 29.8,
                    "T2Retrieval": 58.67,
                    "VideoRetrieval": 38.04
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>",
                    "AFQMC": 22.24,
                    "ATEC": 30.84,
                    "BQ": 43.33,
                    "LCQMC": 66.74,
                    "PAWSX": 12.31,
                    "QBQTC": 27.2,
                    "STS22 (zh)": 66.4,
                    "STSB": 73.22
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/silk-road/luotuo-bert-medium\">luotuo-bert-medium</a>"
                }
            ]
        }
    },
    "LLM2Vec-Mistral-unsupervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "AmazonCounterfactualClassification (en)": 76.94,
                    "AmazonPolarityClassification": 85.29,
                    "AmazonReviewsClassification (en)": 47.09,
                    "Banking77Classification": 86.16,
                    "EmotionClassification": 48.88,
                    "ImdbClassification": 77.95,
                    "MTOPDomainClassification (en)": 95.48,
                    "MTOPIntentClassification (en)": 82.84,
                    "MassiveIntentClassification (en)": 76.65,
                    "MassiveScenarioClassification (en)": 79.99,
                    "ToxicConversationsClassification": 70.71,
                    "TweetSentimentExtractionClassification": 60.9
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "ArxivClusteringP2P": 47.56,
                    "ArxivClusteringS2S": 39.92,
                    "BiorxivClusteringP2P": 36.14,
                    "BiorxivClusteringS2S": 30.26,
                    "MedrxivClusteringP2P": 30.11,
                    "MedrxivClusteringS2S": 26.93,
                    "RedditClustering": 41.83,
                    "RedditClusteringP2P": 62.08,
                    "StackExchangeClustering": 67.34,
                    "StackExchangeClusteringP2P": 34.5,
                    "TwentyNewsgroupsClustering": 30.26
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "SprintDuplicateQuestions": 91.3,
                    "TwitterSemEval2015": 68.76,
                    "TwitterURLCorpus": 82.76
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "AskUbuntuDupQuestions": 58.6,
                    "MindSmallReranking": 29.73,
                    "SciDocsRR": 77.81,
                    "StackOverflowDupQuestions": 49.8
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "ArguAna": 51.0,
                    "CQADupstackRetrieval": 33.37,
                    "ClimateFEVER": 22.97,
                    "DBPedia": 25.48,
                    "FEVER": 45.11,
                    "FiQA2018": 27.24,
                    "HotpotQA": 54.54,
                    "MSMARCO": 19.13,
                    "NFCorpus": 27.16,
                    "NQ": 34.16,
                    "QuoraRetrieval": 84.4,
                    "SCIDOCS": 15.35,
                    "SciFact": 68.68,
                    "TRECCOVID": 55.67,
                    "Touche2020": 6.54
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "BIOSSES": 83.29,
                    "SICK-R": 75.55,
                    "STS12": 67.65,
                    "STS13": 83.9,
                    "STS14": 76.97,
                    "STS15": 83.8,
                    "STS16": 81.91,
                    "STS17 (en-en)": 85.58,
                    "STS22 (en)": 65.93,
                    "STSBenchmark": 80.42
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>",
                    "SummEval": 30.19
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised</a>"
                }
            ]
        }
    },
    "dfm-encoder-large-v1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>",
                    "BornholmBitextMining": 11.65
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>",
                    "AngryTweetsClassification": 53.8,
                    "DKHateClassification": 60.09,
                    "DanishPoliticalCommentsClassification": 36.6,
                    "LccSentimentClassification": 57.33,
                    "MassiveIntentClassification (da)": 60.55,
                    "MassiveIntentClassification (nb)": 52.49,
                    "MassiveIntentClassification (sv)": 49.74,
                    "MassiveScenarioClassification (da)": 64.16,
                    "MassiveScenarioClassification (nb)": 54.59,
                    "MassiveScenarioClassification (sv)": 50.1,
                    "NoRecClassification": 48.3,
                    "NordicLangClassification": 77.68,
                    "NorwegianParliament": 58.78,
                    "ScalaDaClassification": 63.08,
                    "ScalaNbClassification": 58.95
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/chcaa/dfm-encoder-large-v1\">dfm-encoder-large-v1</a>"
                }
            ]
        }
    },
    "text-search-curie-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>",
                    "ArguAna": 46.98,
                    "ClimateFEVER": 19.4,
                    "FEVER": 75.6,
                    "FiQA2018": 45.21,
                    "HotpotQA": 64.8,
                    "NFCorpus": 38.01,
                    "QuoraRetrieval": 67.7,
                    "SCIDOCS": 17.74,
                    "SciFact": 74.35,
                    "TRECCOVID": 56.14,
                    "Touche2020": 30.9
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-curie-001</a>"
                }
            ]
        }
    },
    "silver-retriever-base-v1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>",
                    "AllegroReviews": 33.35,
                    "CBD": 68.51,
                    "MassiveIntentClassification (pl)": 66.63,
                    "MassiveScenarioClassification (pl)": 69.97,
                    "PAC": 66.26,
                    "PolEmo2.0-IN": 63.52,
                    "PolEmo2.0-OUT": 44.7
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>",
                    "8TagsClustering": 31.49
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>",
                    "CDSC-E": 67.35,
                    "PPC": 85.33,
                    "PSC": 98.46,
                    "SICK-E-PL": 58.19
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>",
                    "ArguAna-PL": 44.12,
                    "DBPedia-PL": 26.32,
                    "FiQA-PL": 24.95,
                    "HotpotQA-PL": 45.13,
                    "MSMARCO-PL": 25.47,
                    "NFCorpus-PL": 28.55,
                    "NQ-PL": 37.9,
                    "Quora-PL": 77.98,
                    "SCIDOCS-PL": 10.9,
                    "SciFact-PL": 54.44,
                    "TRECCOVID-PL": 46.98
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>",
                    "CDSC-R": 89.09,
                    "SICK-R-PL": 67.26,
                    "STS22 (pl)": 38.69
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ipipan/silver-retriever-base-v1\">silver-retriever-base-v1</a>"
                }
            ]
        }
    },
    "bert-base-uncased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "AmazonCounterfactualClassification (en)": 74.25,
                    "AmazonPolarityClassification": 71.33,
                    "AmazonReviewsClassification (en)": 33.56,
                    "Banking77Classification": 63.41,
                    "EmotionClassification": 35.28,
                    "ImdbClassification": 65.35,
                    "MTOPDomainClassification (en)": 82.63,
                    "MTOPIntentClassification (en)": 68.14,
                    "MassiveIntentClassification (en)": 59.88,
                    "MassiveScenarioClassification (en)": 64.28,
                    "ToxicConversationsClassification": 70.0,
                    "TweetSentimentExtractionClassification": 51.81
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "ArxivClusteringP2P": 35.19,
                    "ArxivClusteringS2S": 27.51,
                    "BiorxivClusteringP2P": 30.12,
                    "BiorxivClusteringS2S": 24.77,
                    "MedrxivClusteringP2P": 26.09,
                    "MedrxivClusteringS2S": 23.6,
                    "RedditClustering": 27.24,
                    "RedditClusteringP2P": 43.32,
                    "StackExchangeClustering": 43.58,
                    "StackExchangeClusteringP2P": 26.55,
                    "TwentyNewsgroupsClustering": 23.35
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "SprintDuplicateQuestions": 36.81,
                    "TwitterSemEval2015": 55.9,
                    "TwitterURLCorpus": 76.29
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "AskUbuntuDupQuestions": 45.84,
                    "MindSmallReranking": 28.37,
                    "SciDocsRR": 64.94,
                    "StackOverflowDupQuestions": 34.62
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "ArguAna": 28.29,
                    "CQADupstackRetrieval": 5.51,
                    "ClimateFEVER": 5.41,
                    "DBPedia": 4.13,
                    "FEVER": 3.3,
                    "FiQA2018": 2.19,
                    "HotpotQA": 8.26,
                    "MSMARCO": 1.91,
                    "NFCorpus": 4.3,
                    "NQ": 2.62,
                    "QuoraRetrieval": 61.03,
                    "SCIDOCS": 2.82,
                    "SciFact": 13.34,
                    "TRECCOVID": 14.74,
                    "Touche2020": 0.97
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "BIOSSES": 54.7,
                    "SICK-R": 58.65,
                    "STS12": 30.87,
                    "STS13": 59.89,
                    "STS14": 47.73,
                    "STS15": 60.29,
                    "STS16": 63.73,
                    "STS17 (en-en)": 64.1,
                    "STS22 (en)": 56.37,
                    "STSBenchmark": 47.29
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>",
                    "SummEval": 29.82
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/bert-base-uncased\">bert-base-uncased</a>"
                }
            ]
        }
    },
    "text-search-davinci-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>",
                    "ArguAna": 43.5,
                    "ClimateFEVER": 22.3,
                    "FEVER": 77.5,
                    "FiQA2018": 51.2,
                    "HotpotQA": 68.8,
                    "NFCorpus": 40.7,
                    "QuoraRetrieval": 63.8,
                    "SciFact": 75.4,
                    "TRECCOVID": 64.9,
                    "Touche2020": 29.1
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-search-davinci-001</a>"
                }
            ]
        }
    },
    "text-similarity-babbage-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>",
                    "RedditClustering": 45.64,
                    "StackExchangeClustering": 53.01,
                    "TwentyNewsgroupsClustering": 42.01
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>",
                    "SprintDuplicateQuestions": 76.46,
                    "TwitterSemEval2015": 70.85,
                    "TwitterURLCorpus": 85.08
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>",
                    "AskUbuntuDupQuestions": 54.68,
                    "SciDocsRR": 72.78,
                    "StackOverflowDupQuestions": 40.65
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>",
                    "BIOSSES": 78.12,
                    "SICK-R": 77.02,
                    "STSBenchmark": 84.32
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-babbage-001</a>"
                }
            ]
        }
    },
    "instructor-xl": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-xl\">instructor-xl</a>",
                    "Core17InstructionRetrieval": 0.69,
                    "News21InstructionRetrieval": -0.9,
                    "Robust04InstructionRetrieval": -8.08
                }
            ]
        }
    },
    "instructor-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/hkunlp/instructor-base\">instructor-base</a>",
                    "Core17InstructionRetrieval": -1.09,
                    "News21InstructionRetrieval": -1.78,
                    "Robust04InstructionRetrieval": -10.42
                }
            ]
        }
    },
    "llama-2-7b-chat": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/meta-llama/Llama-2-7b-chat-hf\">llama-2-7b-chat</a>",
                    "Core17InstructionRetrieval": 2.84,
                    "News21InstructionRetrieval": 0.23,
                    "Robust04InstructionRetrieval": 2.0
                }
            ]
        }
    },
    "st-polish-paraphrase-from-distilroberta": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>",
                    "AllegroReviews": 34.5,
                    "CBD": 70.27,
                    "MassiveIntentClassification (pl)": 64.81,
                    "MassiveScenarioClassification (pl)": 70.01,
                    "PAC": 64.6,
                    "PolEmo2.0-IN": 67.06,
                    "PolEmo2.0-OUT": 38.58
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>",
                    "8TagsClustering": 31.68
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>",
                    "CDSC-E": 75.99,
                    "PPC": 93.29,
                    "PSC": 99.1,
                    "SICK-E-PL": 79.63
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>",
                    "ArguAna-PL": 49.42,
                    "DBPedia-PL": 19.82,
                    "FiQA-PL": 19.58,
                    "HotpotQA-PL": 23.47,
                    "MSMARCO-PL": 16.51,
                    "NFCorpus-PL": 22.49,
                    "NQ-PL": 19.83,
                    "Quora-PL": 81.17,
                    "SCIDOCS-PL": 12.15,
                    "SciFact-PL": 49.49,
                    "TRECCOVID-PL": 38.97
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>",
                    "CDSC-R": 89.62,
                    "SICK-R-PL": 76.37,
                    "STS22 (pl)": 40.36
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sdadas/st-polish-paraphrase-from-distilroberta\">st-polish-paraphrase-from-distilroberta</a>"
                }
            ]
        }
    },
    "universal-sentence-encoder-multilingual-3": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "AmazonReviewsClassification (fr)": 33.51,
                    "MTOPDomainClassification (fr)": 85.5,
                    "MTOPIntentClassification (fr)": 53.98,
                    "MasakhaNEWSClassification (fra)": 82.06,
                    "MassiveIntentClassification (fr)": 61.19,
                    "MassiveScenarioClassification (fr)": 70.22
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "AlloProfClusteringP2P": 56.9,
                    "AlloProfClusteringS2S": 37.84,
                    "HALClusteringS2S": 18.95,
                    "MLSUMClusteringP2P": 43.9,
                    "MLSUMClusteringS2S": 35.5,
                    "MasakhaNEWSClusteringP2P (fra)": 60.57,
                    "MasakhaNEWSClusteringS2S (fra)": 40.31
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "OpusparcusPC (fr)": 91.46,
                    "PawsX (fr)": 52.39
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "AlloprofReranking": 56.23,
                    "SyntecReranking": 73.85
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "AlloprofRetrieval": 35.27,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 26.12,
                    "SyntecRetrieval": 69.82,
                    "XPQARetrieval (fr)": 59.59
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "SICKFr": 71.37,
                    "STS22 (fr)": 77.91,
                    "STSBenchmarkMultilingualSTS (fr)": 75.48
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>",
                    "SummEvalFr": 28.21
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-3\">universal-sentence-encoder-multilingual-3</a>"
                }
            ]
        }
    },
    "flan-t5-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-large\">flan-t5-large</a>",
                    "Core17InstructionRetrieval": 1.32,
                    "News21InstructionRetrieval": 8.95,
                    "Robust04InstructionRetrieval": 3.9
                }
            ]
        }
    },
    "text-similarity-curie-001": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>",
                    "RedditClustering": 40.79,
                    "StackExchangeClustering": 55.14,
                    "TwentyNewsgroupsClustering": 37.64
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>",
                    "SprintDuplicateQuestions": 79.85,
                    "TwitterSemEval2015": 69.45,
                    "TwitterURLCorpus": 84.06
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>",
                    "AskUbuntuDupQuestions": 55.09,
                    "SciDocsRR": 70.93,
                    "StackOverflowDupQuestions": 42.42
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>",
                    "FiQA2018": 5.14,
                    "NFCorpus": 19.96,
                    "QuoraRetrieval": 83.11,
                    "SciFact": 46.68,
                    "TRECCOVID": 7.61
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>",
                    "BIOSSES": 77.46,
                    "SICK-R": 77.26,
                    "STSBenchmark": 83.02
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/introducing-text-and-code-embeddings\">text-similarity-curie-001</a>"
                }
            ]
        }
    },
    "udever-bloom-1b1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "AmazonReviewsClassification (fr)": 35.12,
                    "MTOPDomainClassification (fr)": 69.24,
                    "MTOPIntentClassification (fr)": 51.25,
                    "MasakhaNEWSClassification (fra)": 80.83,
                    "MassiveIntentClassification (fr)": 43.21,
                    "MassiveScenarioClassification (fr)": 49.78
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "AlloProfClusteringP2P": 62.22,
                    "AlloProfClusteringS2S": 27.06,
                    "HALClusteringS2S": 13.86,
                    "MLSUMClusteringP2P": 44.11,
                    "MLSUMClusteringS2S": 30.47,
                    "MasakhaNEWSClusteringP2P (fra)": 40.2,
                    "MasakhaNEWSClusteringS2S (fra)": 27.35
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "OpusparcusPC (fr)": 85.54,
                    "PawsX (fr)": 61.99
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "AlloprofReranking": 39.13,
                    "SyntecReranking": 62.58
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "AlloprofRetrieval": 12.37,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 2.78,
                    "SyntecRetrieval": 40.57,
                    "XPQARetrieval (fr)": 33.82
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "SICKFr": 59.94,
                    "STS22 (fr)": 77.1,
                    "STSBenchmarkMultilingualSTS (fr)": 49.97
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>",
                    "SummEvalFr": 29.48
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/izhx/udever-bloom-1b1\">udever-bloom-1b1</a>"
                }
            ]
        }
    },
    "text2vec-large-chinese": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>",
                    "AmazonReviewsClassification (zh)": 33.77,
                    "IFlyTek": 41.54,
                    "JDReview": 81.56,
                    "MassiveIntentClassification (zh-CN)": 63.23,
                    "MassiveScenarioClassification (zh-CN)": 68.45,
                    "MultilingualSentiment": 58.97,
                    "OnlineShopping": 83.51,
                    "TNews": 38.92,
                    "Waimai": 76.01
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>",
                    "CLSClusteringP2P": 30.13,
                    "CLSClusteringS2S": 28.77,
                    "ThuNewsClusteringP2P": 35.05,
                    "ThuNewsClusteringS2S": 26.14
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>",
                    "Cmnli": 77.67,
                    "Ocnli": 64.04
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>",
                    "CMedQAv1": 58.92,
                    "CMedQAv2": 60.41,
                    "MMarcoReranking": 12.48,
                    "T2Reranking": 64.82
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>",
                    "CmedqaRetrieval": 15.53,
                    "CovidRetrieval": 60.48,
                    "DuRetrieval": 51.87,
                    "EcomRetrieval": 37.58,
                    "MMarcoRetrieval": 45.96,
                    "MedicalRetrieval": 30.93,
                    "T2Retrieval": 50.52,
                    "VideoRetrieval": 42.65
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>",
                    "AFQMC": 24.51,
                    "ATEC": 32.45,
                    "BQ": 44.22,
                    "LCQMC": 69.16,
                    "PAWSX": 14.55,
                    "QBQTC": 29.51,
                    "STS22 (zh)": 65.94,
                    "STSB": 79.45
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/GanymedeNil/text2vec-large-chinese\">text2vec-large-chinese</a>"
                }
            ]
        }
    },
    "LLM2Vec-Llama-2-supervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "AmazonCounterfactualClassification (en)": 82.22,
                    "AmazonPolarityClassification": 89.69,
                    "AmazonReviewsClassification (en)": 48.47,
                    "Banking77Classification": 88.17,
                    "EmotionClassification": 51.71,
                    "ImdbClassification": 85.78,
                    "MTOPDomainClassification (en)": 95.57,
                    "MTOPIntentClassification (en)": 82.81,
                    "MassiveIntentClassification (en)": 78.06,
                    "MassiveScenarioClassification (en)": 81.35,
                    "ToxicConversationsClassification": 71.01,
                    "TweetSentimentExtractionClassification": 61.11
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "ArxivClusteringP2P": 43.14,
                    "ArxivClusteringS2S": 42.38,
                    "BiorxivClusteringP2P": 35.88,
                    "BiorxivClusteringS2S": 34.81,
                    "MedrxivClusteringP2P": 32.23,
                    "MedrxivClusteringS2S": 31.37,
                    "RedditClustering": 61.1,
                    "RedditClusteringP2P": 64.52,
                    "StackExchangeClustering": 67.98,
                    "StackExchangeClusteringP2P": 33.2,
                    "TwentyNewsgroupsClustering": 51.04
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "SprintDuplicateQuestions": 96.83,
                    "TwitterSemEval2015": 80.7,
                    "TwitterURLCorpus": 86.56
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "AskUbuntuDupQuestions": 63.13,
                    "MindSmallReranking": 31.34,
                    "SciDocsRR": 84.03,
                    "StackOverflowDupQuestions": 51.02
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "ArguAna": 56.53,
                    "CQADupstackRetrieval": 45.94,
                    "ClimateFEVER": 30.7,
                    "DBPedia": 48.42,
                    "FEVER": 89.93,
                    "FiQA2018": 51.28,
                    "HotpotQA": 72.99,
                    "MSMARCO": 41.46,
                    "NFCorpus": 40.33,
                    "NQ": 61.24,
                    "QuoraRetrieval": 85.59,
                    "SCIDOCS": 21.05,
                    "SciFact": 77.3,
                    "TRECCOVID": 79.25,
                    "Touche2020": 16.92
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "BIOSSES": 82.13,
                    "SICK-R": 83.01,
                    "STS12": 78.85,
                    "STS13": 86.84,
                    "STS14": 84.04,
                    "STS15": 88.72,
                    "STS16": 86.79,
                    "STS17 (en-en)": 90.63,
                    "STS22 (en)": 67.55,
                    "STSBenchmark": 88.72
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>",
                    "SummEval": 28.49
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised</a>"
                }
            ]
        }
    },
    "bm25": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://en.wikipedia.org/wiki/Okapi_BM25\">bm25</a>",
                    "Core17InstructionRetrieval": -1.06,
                    "News21InstructionRetrieval": -2.15,
                    "Robust04InstructionRetrieval": -3.06
                }
            ]
        }
    },
    "nomic-embed-text-v1.5-256": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "AmazonCounterfactualClassification (en)": 72.94,
                    "AmazonPolarityClassification": 91.35,
                    "AmazonReviewsClassification (en)": 45.73,
                    "Banking77Classification": 83.69,
                    "EmotionClassification": 45.88,
                    "ImdbClassification": 83.99,
                    "MTOPDomainClassification (en)": 91.68,
                    "MTOPIntentClassification (en)": 72.47,
                    "MassiveIntentClassification (en)": 71.76,
                    "MassiveScenarioClassification (en)": 75.67,
                    "ToxicConversationsClassification": 70.87,
                    "TweetSentimentExtractionClassification": 59.2
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "ArxivClusteringP2P": 44.82,
                    "ArxivClusteringS2S": 35.32,
                    "BiorxivClusteringP2P": 38.19,
                    "BiorxivClusteringS2S": 31.83,
                    "MedrxivClusteringP2P": 34.08,
                    "MedrxivClusteringS2S": 30.98,
                    "RedditClustering": 54.92,
                    "RedditClusteringP2P": 60.23,
                    "StackExchangeClustering": 61.81,
                    "StackExchangeClusteringP2P": 34.03,
                    "TwentyNewsgroupsClustering": 48.56
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "SprintDuplicateQuestions": 92.31,
                    "TwitterSemEval2015": 73.61,
                    "TwitterURLCorpus": 86.34
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "AskUbuntuDupQuestions": 61.34,
                    "MindSmallReranking": 30.04,
                    "SciDocsRR": 79.4,
                    "StackOverflowDupQuestions": 49.95
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "ArguAna": 45.44,
                    "CQADupstackRetrieval": 37.61,
                    "ClimateFEVER": 39.63,
                    "DBPedia": 39.42,
                    "FEVER": 84.4,
                    "FiQA2018": 35.0,
                    "HotpotQA": 67.78,
                    "MSMARCO": 41.38,
                    "NFCorpus": 32.54,
                    "NQ": 57.1,
                    "QuoraRetrieval": 87.65,
                    "SCIDOCS": 16.76,
                    "SciFact": 68.24,
                    "TRECCOVID": 80.65,
                    "Touche2020": 28.49
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "BIOSSES": 81.58,
                    "SICK-R": 79.24,
                    "STS12": 78.16,
                    "STS13": 86.01,
                    "STS14": 81.25,
                    "STS15": 86.51,
                    "STS16": 84.24,
                    "STS17 (en-en)": 86.44,
                    "STS22 (en)": 65.14,
                    "STSBenchmark": 84.8
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>",
                    "SummEval": 30.05
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256</a>"
                }
            ]
        }
    },
    "nomic-embed-text-v1.5-64": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "AmazonCounterfactualClassification (en)": 66.85,
                    "AmazonPolarityClassification": 85.92,
                    "AmazonReviewsClassification (en)": 41.02,
                    "Banking77Classification": 80.63,
                    "EmotionClassification": 40.55,
                    "ImdbClassification": 76.6,
                    "MTOPDomainClassification (en)": 86.31,
                    "MTOPIntentClassification (en)": 62.77,
                    "MassiveIntentClassification (en)": 64.95,
                    "MassiveScenarioClassification (en)": 70.38,
                    "ToxicConversationsClassification": 66.53,
                    "TweetSentimentExtractionClassification": 55.23
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "ArxivClusteringP2P": 41.8,
                    "ArxivClusteringS2S": 32.41,
                    "BiorxivClusteringP2P": 34.81,
                    "BiorxivClusteringS2S": 28.59,
                    "MedrxivClusteringP2P": 32.73,
                    "MedrxivClusteringS2S": 29.91,
                    "RedditClustering": 50.31,
                    "RedditClusteringP2P": 56.57,
                    "StackExchangeClustering": 57.99,
                    "StackExchangeClusteringP2P": 33.64,
                    "TwentyNewsgroupsClustering": 44.61
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "SprintDuplicateQuestions": 90.06,
                    "TwitterSemEval2015": 71.68,
                    "TwitterURLCorpus": 85.03
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "AskUbuntuDupQuestions": 60.79,
                    "MindSmallReranking": 29.7,
                    "SciDocsRR": 75.79,
                    "StackOverflowDupQuestions": 47.42
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "ArguAna": 37.16,
                    "CQADupstackRetrieval": 28.72,
                    "ClimateFEVER": 31.48,
                    "DBPedia": 28.19,
                    "FEVER": 70.24,
                    "FiQA2018": 25.78,
                    "HotpotQA": 43.07,
                    "MSMARCO": 35.95,
                    "NFCorpus": 26.03,
                    "NQ": 45.54,
                    "QuoraRetrieval": 85.83,
                    "SCIDOCS": 12.09,
                    "SciFact": 52.71,
                    "TRECCOVID": 67.83,
                    "Touche2020": 23.13
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "BIOSSES": 77.18,
                    "SICK-R": 78.76,
                    "STS12": 77.3,
                    "STS13": 84.18,
                    "STS14": 79.37,
                    "STS15": 84.69,
                    "STS16": 83.36,
                    "STS17 (en-en)": 85.73,
                    "STS22 (en)": 63.83,
                    "STSBenchmark": 83.46
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>",
                    "SummEval": 28.41
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64</a>"
                }
            ]
        }
    },
    "flan-t5-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/google/flan-t5-base\">flan-t5-base</a>",
                    "Core17InstructionRetrieval": -3.31,
                    "News21InstructionRetrieval": -0.12,
                    "Robust04InstructionRetrieval": 5.35
                }
            ]
        }
    },
    "cross-en-de-roberta-sentence-transformer": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>",
                    "BlurbsClusteringP2P": 30.82,
                    "BlurbsClusteringS2S": 12.69,
                    "TenKGnadClusteringP2P": 23.5,
                    "TenKGnadClusteringS2S": 10.94
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer\">cross-en-de-roberta-sentence-transformer</a>"
                }
            ]
        }
    },
    "sentence-t5-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "BUCC (de-en)": 87.0,
                    "BUCC (fr-en)": 88.91,
                    "BUCC (ru-en)": 0.44,
                    "BUCC (zh-en)": 0.95,
                    "Tatoeba (afr-eng)": 23.7,
                    "Tatoeba (amh-eng)": 0.65,
                    "Tatoeba (ang-eng)": 30.98,
                    "Tatoeba (ara-eng)": 0.48,
                    "Tatoeba (arq-eng)": 0.68,
                    "Tatoeba (arz-eng)": 0.22,
                    "Tatoeba (ast-eng)": 55.3,
                    "Tatoeba (awa-eng)": 1.03,
                    "Tatoeba (aze-eng)": 5.83,
                    "Tatoeba (bel-eng)": 1.66,
                    "Tatoeba (ben-eng)": 0.0,
                    "Tatoeba (ber-eng)": 5.62,
                    "Tatoeba (bos-eng)": 12.23,
                    "Tatoeba (bre-eng)": 5.84,
                    "Tatoeba (bul-eng)": 1.35,
                    "Tatoeba (cat-eng)": 48.56,
                    "Tatoeba (cbk-eng)": 46.97,
                    "Tatoeba (ceb-eng)": 9.79,
                    "Tatoeba (ces-eng)": 6.0,
                    "Tatoeba (cha-eng)": 24.21,
                    "Tatoeba (cmn-eng)": 2.26,
                    "Tatoeba (cor-eng)": 4.03,
                    "Tatoeba (csb-eng)": 9.53,
                    "Tatoeba (cym-eng)": 9.17,
                    "Tatoeba (dan-eng)": 34.63,
                    "Tatoeba (deu-eng)": 89.31,
                    "Tatoeba (dsb-eng)": 9.68,
                    "Tatoeba (dtp-eng)": 4.66,
                    "Tatoeba (ell-eng)": 0.77,
                    "Tatoeba (epo-eng)": 26.88,
                    "Tatoeba (est-eng)": 5.19,
                    "Tatoeba (eus-eng)": 9.46,
                    "Tatoeba (fao-eng)": 21.59,
                    "Tatoeba (fin-eng)": 5.66,
                    "Tatoeba (fra-eng)": 79.71,
                    "Tatoeba (fry-eng)": 28.29,
                    "Tatoeba (gla-eng)": 2.34,
                    "Tatoeba (gle-eng)": 3.55,
                    "Tatoeba (glg-eng)": 56.25,
                    "Tatoeba (gsw-eng)": 24.25,
                    "Tatoeba (heb-eng)": 0.57,
                    "Tatoeba (hin-eng)": 0.12,
                    "Tatoeba (hrv-eng)": 10.29,
                    "Tatoeba (hsb-eng)": 9.52,
                    "Tatoeba (hun-eng)": 6.22,
                    "Tatoeba (hye-eng)": 0.81,
                    "Tatoeba (ido-eng)": 41.11,
                    "Tatoeba (ile-eng)": 54.0,
                    "Tatoeba (ina-eng)": 75.47,
                    "Tatoeba (ind-eng)": 13.02,
                    "Tatoeba (isl-eng)": 8.98,
                    "Tatoeba (ita-eng)": 67.23,
                    "Tatoeba (jav-eng)": 8.54,
                    "Tatoeba (jpn-eng)": 0.99,
                    "Tatoeba (kab-eng)": 1.85,
                    "Tatoeba (kat-eng)": 1.37,
                    "Tatoeba (kaz-eng)": 0.67,
                    "Tatoeba (khm-eng)": 0.56,
                    "Tatoeba (kor-eng)": 1.73,
                    "Tatoeba (kur-eng)": 9.23,
                    "Tatoeba (kzj-eng)": 5.38,
                    "Tatoeba (lat-eng)": 21.3,
                    "Tatoeba (lfn-eng)": 40.48,
                    "Tatoeba (lit-eng)": 5.38,
                    "Tatoeba (lvs-eng)": 6.83,
                    "Tatoeba (mal-eng)": 0.45,
                    "Tatoeba (mar-eng)": 0.01,
                    "Tatoeba (max-eng)": 16.44,
                    "Tatoeba (mhr-eng)": 0.33,
                    "Tatoeba (mkd-eng)": 0.4,
                    "Tatoeba (mon-eng)": 2.48,
                    "Tatoeba (nds-eng)": 34.66,
                    "Tatoeba (nld-eng)": 42.72,
                    "Tatoeba (nno-eng)": 24.08,
                    "Tatoeba (nob-eng)": 34.17,
                    "Tatoeba (nov-eng)": 55.01,
                    "Tatoeba (oci-eng)": 29.15,
                    "Tatoeba (orv-eng)": 0.2,
                    "Tatoeba (pam-eng)": 6.99,
                    "Tatoeba (pes-eng)": 0.9,
                    "Tatoeba (pms-eng)": 30.8,
                    "Tatoeba (pol-eng)": 12.81,
                    "Tatoeba (por-eng)": 73.45,
                    "Tatoeba (ron-eng)": 54.86,
                    "Tatoeba (rus-eng)": 2.43,
                    "Tatoeba (slk-eng)": 8.35,
                    "Tatoeba (slv-eng)": 9.3,
                    "Tatoeba (spa-eng)": 78.87,
                    "Tatoeba (sqi-eng)": 11.74,
                    "Tatoeba (srp-eng)": 5.83,
                    "Tatoeba (swe-eng)": 35.41,
                    "Tatoeba (swg-eng)": 28.18,
                    "Tatoeba (swh-eng)": 7.53,
                    "Tatoeba (tam-eng)": 0.36,
                    "Tatoeba (tat-eng)": 1.01,
                    "Tatoeba (tel-eng)": 1.1,
                    "Tatoeba (tgl-eng)": 12.4,
                    "Tatoeba (tha-eng)": 1.58,
                    "Tatoeba (tuk-eng)": 4.95,
                    "Tatoeba (tur-eng)": 6.45,
                    "Tatoeba (tzl-eng)": 37.82,
                    "Tatoeba (uig-eng)": 0.67,
                    "Tatoeba (ukr-eng)": 1.88,
                    "Tatoeba (urd-eng)": 0.0,
                    "Tatoeba (uzb-eng)": 4.79,
                    "Tatoeba (vie-eng)": 7.03,
                    "Tatoeba (war-eng)": 9.68,
                    "Tatoeba (wuu-eng)": 1.28,
                    "Tatoeba (xho-eng)": 10.64,
                    "Tatoeba (yid-eng)": 0.57,
                    "Tatoeba (yue-eng)": 0.88,
                    "Tatoeba (zsm-eng)": 14.67
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "AmazonCounterfactualClassification (de)": 67.97,
                    "AmazonCounterfactualClassification (en)": 75.51,
                    "AmazonCounterfactualClassification (en-ext)": 75.44,
                    "AmazonCounterfactualClassification (ja)": 45.72,
                    "AmazonPolarityClassification": 92.87,
                    "AmazonReviewsClassification (de)": 43.16,
                    "AmazonReviewsClassification (en)": 47.12,
                    "AmazonReviewsClassification (es)": 42.89,
                    "AmazonReviewsClassification (fr)": 41.48,
                    "AmazonReviewsClassification (ja)": 22.49,
                    "AmazonReviewsClassification (zh)": 22.12,
                    "Banking77Classification": 78.46,
                    "EmotionClassification": 51.74,
                    "ImdbClassification": 87.01,
                    "MTOPDomainClassification (de)": 80.56,
                    "MTOPDomainClassification (en)": 90.99,
                    "MTOPDomainClassification (es)": 80.78,
                    "MTOPDomainClassification (fr)": 79.6,
                    "MTOPDomainClassification (hi)": 21.22,
                    "MTOPDomainClassification (th)": 15.82,
                    "MTOPIntentClassification (de)": 52.5,
                    "MTOPIntentClassification (en)": 64.98,
                    "MTOPIntentClassification (es)": 52.07,
                    "MTOPIntentClassification (fr)": 47.73,
                    "MTOPIntentClassification (hi)": 3.74,
                    "MTOPIntentClassification (th)": 4.96,
                    "MasakhaNEWSClassification (fra)": 80.43,
                    "MassiveIntentClassification (af)": 38.41,
                    "MassiveIntentClassification (am)": 2.49,
                    "MassiveIntentClassification (ar)": 4.7,
                    "MassiveIntentClassification (az)": 31.77,
                    "MassiveIntentClassification (bn)": 2.77,
                    "MassiveIntentClassification (cy)": 31.69,
                    "MassiveIntentClassification (da)": 41.76,
                    "MassiveIntentClassification (de)": 52.01,
                    "MassiveIntentClassification (el)": 9.74,
                    "MassiveIntentClassification (en)": 71.78,
                    "MassiveIntentClassification (es)": 54.1,
                    "MassiveIntentClassification (fa)": 3.86,
                    "MassiveIntentClassification (fi)": 34.07,
                    "MassiveIntentClassification (fr)": 57.01,
                    "MassiveIntentClassification (he)": 2.14,
                    "MassiveIntentClassification (hi)": 2.97,
                    "MassiveIntentClassification (hu)": 32.01,
                    "MassiveIntentClassification (hy)": 3.17,
                    "MassiveIntentClassification (id)": 34.55,
                    "MassiveIntentClassification (is)": 32.0,
                    "MassiveIntentClassification (it)": 52.94,
                    "MassiveIntentClassification (ja)": 2.9,
                    "MassiveIntentClassification (jv)": 32.42,
                    "MassiveIntentClassification (ka)": 2.71,
                    "MassiveIntentClassification (km)": 5.5,
                    "MassiveIntentClassification (kn)": 2.41,
                    "MassiveIntentClassification (ko)": 2.57,
                    "MassiveIntentClassification (lv)": 35.09,
                    "MassiveIntentClassification (ml)": 2.95,
                    "MassiveIntentClassification (mn)": 18.33,
                    "MassiveIntentClassification (ms)": 29.69,
                    "MassiveIntentClassification (my)": 3.99,
                    "MassiveIntentClassification (nb)": 41.29,
                    "MassiveIntentClassification (nl)": 44.95,
                    "MassiveIntentClassification (pl)": 37.67,
                    "MassiveIntentClassification (pt)": 51.96,
                    "MassiveIntentClassification (ro)": 43.83,
                    "MassiveIntentClassification (ru)": 17.32,
                    "MassiveIntentClassification (sl)": 33.71,
                    "MassiveIntentClassification (sq)": 37.62,
                    "MassiveIntentClassification (sv)": 40.67,
                    "MassiveIntentClassification (sw)": 31.9,
                    "MassiveIntentClassification (ta)": 1.91,
                    "MassiveIntentClassification (te)": 2.54,
                    "MassiveIntentClassification (th)": 3.85,
                    "MassiveIntentClassification (tl)": 36.83,
                    "MassiveIntentClassification (tr)": 33.0,
                    "MassiveIntentClassification (ur)": 2.62,
                    "MassiveIntentClassification (vi)": 22.81,
                    "MassiveIntentClassification (zh-CN)": 1.09,
                    "MassiveIntentClassification (zh-TW)": 3.49,
                    "MassiveScenarioClassification (af)": 50.28,
                    "MassiveScenarioClassification (am)": 7.15,
                    "MassiveScenarioClassification (ar)": 12.12,
                    "MassiveScenarioClassification (az)": 39.68,
                    "MassiveScenarioClassification (bn)": 8.06,
                    "MassiveScenarioClassification (cy)": 38.01,
                    "MassiveScenarioClassification (da)": 51.44,
                    "MassiveScenarioClassification (de)": 62.71,
                    "MassiveScenarioClassification (el)": 17.19,
                    "MassiveScenarioClassification (en)": 73.16,
                    "MassiveScenarioClassification (es)": 59.56,
                    "MassiveScenarioClassification (fa)": 6.5,
                    "MassiveScenarioClassification (fi)": 41.72,
                    "MassiveScenarioClassification (fr)": 63.6,
                    "MassiveScenarioClassification (he)": 7.93,
                    "MassiveScenarioClassification (hi)": 7.85,
                    "MassiveScenarioClassification (hu)": 41.37,
                    "MassiveScenarioClassification (hy)": 9.42,
                    "MassiveScenarioClassification (id)": 44.88,
                    "MassiveScenarioClassification (is)": 40.86,
                    "MassiveScenarioClassification (it)": 60.09,
                    "MassiveScenarioClassification (ja)": 6.56,
                    "MassiveScenarioClassification (jv)": 40.18,
                    "MassiveScenarioClassification (ka)": 7.37,
                    "MassiveScenarioClassification (km)": 9.56,
                    "MassiveScenarioClassification (kn)": 8.4,
                    "MassiveScenarioClassification (ko)": 5.96,
                    "MassiveScenarioClassification (lv)": 41.44,
                    "MassiveScenarioClassification (ml)": 7.47,
                    "MassiveScenarioClassification (mn)": 25.36,
                    "MassiveScenarioClassification (ms)": 39.69,
                    "MassiveScenarioClassification (my)": 9.68,
                    "MassiveScenarioClassification (nb)": 49.92,
                    "MassiveScenarioClassification (nl)": 56.09,
                    "MassiveScenarioClassification (pl)": 45.2,
                    "MassiveScenarioClassification (pt)": 57.99,
                    "MassiveScenarioClassification (ro)": 56.0,
                    "MassiveScenarioClassification (ru)": 27.47,
                    "MassiveScenarioClassification (sl)": 41.04,
                    "MassiveScenarioClassification (sq)": 49.38,
                    "MassiveScenarioClassification (sv)": 50.97,
                    "MassiveScenarioClassification (sw)": 40.62,
                    "MassiveScenarioClassification (ta)": 7.59,
                    "MassiveScenarioClassification (te)": 7.07,
                    "MassiveScenarioClassification (th)": 8.52,
                    "MassiveScenarioClassification (tl)": 49.89,
                    "MassiveScenarioClassification (tr)": 43.08,
                    "MassiveScenarioClassification (ur)": 9.31,
                    "MassiveScenarioClassification (vi)": 27.46,
                    "MassiveScenarioClassification (zh-CN)": 4.7,
                    "MassiveScenarioClassification (zh-TW)": 7.24,
                    "ToxicConversationsClassification": 71.73,
                    "TweetSentimentExtractionClassification": 62.33
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "AlloProfClusteringP2P": 61.82,
                    "AlloProfClusteringS2S": 39.78,
                    "ArxivClusteringP2P": 41.62,
                    "ArxivClusteringS2S": 29.44,
                    "BiorxivClusteringP2P": 35.99,
                    "BiorxivClusteringS2S": 24.02,
                    "BlurbsClusteringP2P": 35.33,
                    "BlurbsClusteringS2S": 13.27,
                    "HALClusteringS2S": 18.73,
                    "MLSUMClusteringP2P": 42.07,
                    "MLSUMClusteringS2S": 31.87,
                    "MasakhaNEWSClusteringP2P (fra)": 58.6,
                    "MasakhaNEWSClusteringS2S (fra)": 31.33,
                    "MedrxivClusteringP2P": 32.4,
                    "MedrxivClusteringS2S": 26.33,
                    "RedditClustering": 54.53,
                    "RedditClusteringP2P": 62.5,
                    "StackExchangeClustering": 65.11,
                    "StackExchangeClusteringP2P": 36.86,
                    "TenKGnadClusteringP2P": 44.11,
                    "TenKGnadClusteringS2S": 17.26,
                    "TwentyNewsgroupsClustering": 49.33
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "OpusparcusPC (fr)": 91.19,
                    "PawsX (fr)": 59.59,
                    "SprintDuplicateQuestions": 89.01,
                    "TwitterSemEval2015": 79.75,
                    "TwitterURLCorpus": 86.14
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "AlloprofReranking": 57.99,
                    "AskUbuntuDupQuestions": 61.51,
                    "MindSmallReranking": 30.27,
                    "SciDocsRR": 74.88,
                    "StackOverflowDupQuestions": 49.34,
                    "SyntecReranking": 79.77
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "AlloprofRetrieval": 34.52,
                    "ArguAna": 39.27,
                    "BSARDRetrieval": 0.0,
                    "CQADupstackRetrieval": 38.96,
                    "ClimateFEVER": 11.36,
                    "DBPedia": 31.55,
                    "FEVER": 36.21,
                    "FiQA2018": 43.55,
                    "HotpotQA": 33.95,
                    "MSMARCO": 23.96,
                    "MintakaRetrieval (fr)": 23.92,
                    "NFCorpus": 31.1,
                    "NQ": 42.02,
                    "QuoraRetrieval": 85.73,
                    "SCIDOCS": 15.38,
                    "SciFact": 49.91,
                    "SyntecRetrieval": 71.05,
                    "TRECCOVID": 46.11,
                    "Touche2020": 21.63,
                    "XPQARetrieval (fr)": 48.79
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "BIOSSES": 78.93,
                    "SICK-R": 80.34,
                    "SICKFr": 72.83,
                    "STS12": 79.11,
                    "STS13": 87.33,
                    "STS14": 83.17,
                    "STS15": 88.28,
                    "STS16": 84.36,
                    "STS17 (ar-ar)": 10.75,
                    "STS17 (en-ar)": -4.71,
                    "STS17 (en-de)": 73.62,
                    "STS17 (en-en)": 88.99,
                    "STS17 (en-tr)": -0.42,
                    "STS17 (es-en)": 62.62,
                    "STS17 (es-es)": 82.74,
                    "STS17 (fr-en)": 67.86,
                    "STS17 (it-en)": 51.86,
                    "STS17 (ko-ko)": 9.44,
                    "STS17 (nl-en)": 45.95,
                    "STS22 (ar)": 27.01,
                    "STS22 (de)": 43.73,
                    "STS22 (de-en)": 49.93,
                    "STS22 (de-fr)": 61.58,
                    "STS22 (de-pl)": 38.83,
                    "STS22 (en)": 62.39,
                    "STS22 (es)": 57.68,
                    "STS22 (es-en)": 68.09,
                    "STS22 (es-it)": 61.58,
                    "STS22 (fr)": 75.01,
                    "STS22 (fr-pl)": 5.63,
                    "STS22 (it)": 62.01,
                    "STS22 (pl)": 25.0,
                    "STS22 (pl-en)": 51.72,
                    "STS22 (ru)": 14.21,
                    "STS22 (tr)": 47.3,
                    "STS22 (zh)": 30.47,
                    "STS22 (zh-en)": 23.1,
                    "STSBenchmark": 85.36,
                    "STSBenchmarkMultilingualSTS (fr)": 77.59
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>",
                    "SummEval": 29.64,
                    "SummEvalFr": 30.23
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-large\">sentence-t5-large</a>"
                }
            ]
        }
    },
    "universal-sentence-encoder-multilingual-large-3": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "AmazonReviewsClassification (fr)": 35.09,
                    "MTOPDomainClassification (fr)": 88.19,
                    "MTOPIntentClassification (fr)": 63.64,
                    "MasakhaNEWSClassification (fra)": 72.04,
                    "MassiveIntentClassification (fr)": 65.8,
                    "MassiveScenarioClassification (fr)": 73.47
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "AlloProfClusteringP2P": 54.21,
                    "AlloProfClusteringS2S": 37.95,
                    "HALClusteringS2S": 18.94,
                    "MLSUMClusteringP2P": 41.02,
                    "MLSUMClusteringS2S": 37.97,
                    "MasakhaNEWSClusteringP2P (fra)": 24.09,
                    "MasakhaNEWSClusteringS2S (fra)": 40.24
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "OpusparcusPC (fr)": 93.38,
                    "PawsX (fr)": 53.62
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "AlloprofReranking": 55.39,
                    "SyntecReranking": 77.13
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "AlloprofRetrieval": 33.78,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 26.21,
                    "SyntecRetrieval": 63.69,
                    "XPQARetrieval (fr)": 65.21
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "SICKFr": 74.39,
                    "STS22 (fr)": 71.11,
                    "STSBenchmarkMultilingualSTS (fr)": 78.16
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>",
                    "SummEvalFr": 28.56
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/vprelovac/universal-sentence-encoder-multilingual-large-3\">universal-sentence-encoder-multilingual-large-3</a>"
                }
            ]
        }
    },
    "sentence-t5-xxl": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "AmazonCounterfactualClassification (en)": 77.07,
                    "AmazonPolarityClassification": 92.79,
                    "AmazonReviewsClassification (en)": 48.93,
                    "AmazonReviewsClassification (fr)": 46.09,
                    "Banking77Classification": 82.31,
                    "EmotionClassification": 48.57,
                    "ImdbClassification": 90.23,
                    "MTOPDomainClassification (en)": 92.49,
                    "MTOPDomainClassification (fr)": 86.2,
                    "MTOPIntentClassification (en)": 68.33,
                    "MTOPIntentClassification (fr)": 58.33,
                    "MasakhaNEWSClassification (fra)": 79.1,
                    "MassiveIntentClassification (en)": 73.44,
                    "MassiveIntentClassification (fr)": 65.91,
                    "MassiveScenarioClassification (en)": 74.82,
                    "MassiveScenarioClassification (fr)": 68.53,
                    "ToxicConversationsClassification": 70.04,
                    "TweetSentimentExtractionClassification": 62.01
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "AlloProfClusteringP2P": 60.98,
                    "AlloProfClusteringS2S": 43.5,
                    "ArxivClusteringP2P": 42.89,
                    "ArxivClusteringS2S": 33.47,
                    "BiorxivClusteringP2P": 36.53,
                    "BiorxivClusteringS2S": 28.66,
                    "BlurbsClusteringP2P": 39.91,
                    "BlurbsClusteringS2S": 15.94,
                    "HALClusteringS2S": 21.4,
                    "MLSUMClusteringP2P": 42.24,
                    "MLSUMClusteringS2S": 35.25,
                    "MasakhaNEWSClusteringP2P (fra)": 61.15,
                    "MasakhaNEWSClusteringS2S (fra)": 38.24,
                    "MedrxivClusteringP2P": 32.09,
                    "MedrxivClusteringS2S": 26.82,
                    "RedditClustering": 58.99,
                    "RedditClusteringP2P": 64.46,
                    "StackExchangeClustering": 70.78,
                    "StackExchangeClusteringP2P": 35.25,
                    "TenKGnadClusteringP2P": 43.43,
                    "TenKGnadClusteringS2S": 19.69,
                    "TwentyNewsgroupsClustering": 50.93
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "OpusparcusPC (fr)": 93.94,
                    "PawsX (fr)": 63.98,
                    "SprintDuplicateQuestions": 88.89,
                    "TwitterSemEval2015": 80.28,
                    "TwitterURLCorpus": 86.01
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "AlloprofReranking": 68.36,
                    "AskUbuntuDupQuestions": 66.16,
                    "MindSmallReranking": 30.6,
                    "SciDocsRR": 76.09,
                    "StackOverflowDupQuestions": 52.85,
                    "SyntecReranking": 85.15
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "AlloprofRetrieval": 45.75,
                    "ArguAna": 39.85,
                    "BSARDRetrieval": 3.33,
                    "CQADupstackRetrieval": 44.65,
                    "ClimateFEVER": 14.63,
                    "DBPedia": 39.19,
                    "FEVER": 51.2,
                    "FiQA2018": 46.68,
                    "HotpotQA": 42.14,
                    "MSMARCO": 27.67,
                    "MintakaRetrieval (fr)": 34.93,
                    "NFCorpus": 35.08,
                    "NQ": 52.87,
                    "QuoraRetrieval": 85.96,
                    "SCIDOCS": 17.17,
                    "SciFact": 55.38,
                    "SyntecRetrieval": 78.97,
                    "TRECCOVID": 59.48,
                    "Touche2020": 21.65,
                    "XPQARetrieval (fr)": 56.2
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "BIOSSES": 80.43,
                    "SICK-R": 80.47,
                    "SICKFr": 77.07,
                    "STS12": 78.85,
                    "STS13": 88.94,
                    "STS14": 84.86,
                    "STS15": 89.32,
                    "STS16": 84.67,
                    "STS17 (en-en)": 89.46,
                    "STS22 (en)": 65.33,
                    "STS22 (fr)": 76.8,
                    "STSBenchmark": 84.01,
                    "STSBenchmarkMultilingualSTS (fr)": 81.24
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>",
                    "SummEval": 30.08,
                    "SummEvalFr": 30.39
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/sentence-t5-xxl\">sentence-t5-xxl</a>"
                }
            ]
        }
    },
    "LaBSE": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "BUCC (de-en)": 99.35,
                    "BUCC (fr-en)": 98.72,
                    "BUCC (ru-en)": 97.78,
                    "BUCC (zh-en)": 99.16,
                    "Tatoeba (afr-eng)": 96.18,
                    "Tatoeba (amh-eng)": 91.47,
                    "Tatoeba (ang-eng)": 59.28,
                    "Tatoeba (ara-eng)": 88.8,
                    "Tatoeba (arq-eng)": 42.69,
                    "Tatoeba (arz-eng)": 76.0,
                    "Tatoeba (ast-eng)": 90.68,
                    "Tatoeba (awa-eng)": 71.7,
                    "Tatoeba (aze-eng)": 94.93,
                    "Tatoeba (bel-eng)": 95.0,
                    "Tatoeba (ben-eng)": 88.55,
                    "Tatoeba (ber-eng)": 8.4,
                    "Tatoeba (bos-eng)": 94.92,
                    "Tatoeba (bre-eng)": 15.07,
                    "Tatoeba (bul-eng)": 94.58,
                    "Tatoeba (cat-eng)": 95.38,
                    "Tatoeba (cbk-eng)": 79.44,
                    "Tatoeba (ceb-eng)": 64.42,
                    "Tatoeba (ces-eng)": 96.68,
                    "Tatoeba (cha-eng)": 31.77,
                    "Tatoeba (cmn-eng)": 95.1,
                    "Tatoeba (cor-eng)": 10.11,
                    "Tatoeba (csb-eng)": 52.57,
                    "Tatoeba (cym-eng)": 92.0,
                    "Tatoeba (dan-eng)": 95.71,
                    "Tatoeba (deu-eng)": 99.2,
                    "Tatoeba (dsb-eng)": 64.81,
                    "Tatoeba (dtp-eng)": 10.85,
                    "Tatoeba (ell-eng)": 95.35,
                    "Tatoeba (epo-eng)": 98.2,
                    "Tatoeba (est-eng)": 96.55,
                    "Tatoeba (eus-eng)": 95.01,
                    "Tatoeba (fao-eng)": 87.4,
                    "Tatoeba (fin-eng)": 96.37,
                    "Tatoeba (fra-eng)": 94.86,
                    "Tatoeba (fry-eng)": 89.31,
                    "Tatoeba (gla-eng)": 85.66,
                    "Tatoeba (gle-eng)": 93.8,
                    "Tatoeba (glg-eng)": 96.82,
                    "Tatoeba (gsw-eng)": 46.5,
                    "Tatoeba (heb-eng)": 91.53,
                    "Tatoeba (hin-eng)": 96.87,
                    "Tatoeba (hrv-eng)": 96.95,
                    "Tatoeba (hsb-eng)": 67.11,
                    "Tatoeba (hun-eng)": 96.55,
                    "Tatoeba (hye-eng)": 94.09,
                    "Tatoeba (ido-eng)": 89.42,
                    "Tatoeba (ile-eng)": 85.58,
                    "Tatoeba (ina-eng)": 95.37,
                    "Tatoeba (ind-eng)": 93.66,
                    "Tatoeba (isl-eng)": 94.75,
                    "Tatoeba (ita-eng)": 92.72,
                    "Tatoeba (jav-eng)": 79.77,
                    "Tatoeba (jpn-eng)": 95.38,
                    "Tatoeba (kab-eng)": 4.31,
                    "Tatoeba (kat-eng)": 95.02,
                    "Tatoeba (kaz-eng)": 87.49,
                    "Tatoeba (khm-eng)": 78.37,
                    "Tatoeba (kor-eng)": 90.95,
                    "Tatoeba (kur-eng)": 83.59,
                    "Tatoeba (kzj-eng)": 11.33,
                    "Tatoeba (lat-eng)": 80.07,
                    "Tatoeba (lfn-eng)": 67.54,
                    "Tatoeba (lit-eng)": 96.47,
                    "Tatoeba (lvs-eng)": 95.88,
                    "Tatoeba (mal-eng)": 98.45,
                    "Tatoeba (mar-eng)": 92.65,
                    "Tatoeba (max-eng)": 63.26,
                    "Tatoeba (mhr-eng)": 15.74,
                    "Tatoeba (mkd-eng)": 93.6,
                    "Tatoeba (mon-eng)": 95.91,
                    "Tatoeba (nds-eng)": 79.42,
                    "Tatoeba (nld-eng)": 96.07,
                    "Tatoeba (nno-eng)": 94.48,
                    "Tatoeba (nob-eng)": 98.4,
                    "Tatoeba (nov-eng)": 74.38,
                    "Tatoeba (oci-eng)": 65.81,
                    "Tatoeba (orv-eng)": 38.93,
                    "Tatoeba (pam-eng)": 10.73,
                    "Tatoeba (pes-eng)": 94.7,
                    "Tatoeba (pms-eng)": 64.57,
                    "Tatoeba (pol-eng)": 97.22,
                    "Tatoeba (por-eng)": 94.14,
                    "Tatoeba (ron-eng)": 96.92,
                    "Tatoeba (rus-eng)": 93.75,
                    "Tatoeba (slk-eng)": 96.5,
                    "Tatoeba (slv-eng)": 96.03,
                    "Tatoeba (spa-eng)": 98.4,
                    "Tatoeba (sqi-eng)": 96.76,
                    "Tatoeba (srp-eng)": 94.43,
                    "Tatoeba (swe-eng)": 95.63,
                    "Tatoeba (swg-eng)": 59.36,
                    "Tatoeba (swh-eng)": 84.5,
                    "Tatoeba (tam-eng)": 89.0,
                    "Tatoeba (tat-eng)": 85.92,
                    "Tatoeba (tel-eng)": 97.86,
                    "Tatoeba (tgl-eng)": 96.02,
                    "Tatoeba (tha-eng)": 96.14,
                    "Tatoeba (tuk-eng)": 75.27,
                    "Tatoeba (tur-eng)": 98.0,
                    "Tatoeba (tzl-eng)": 58.88,
                    "Tatoeba (uig-eng)": 92.4,
                    "Tatoeba (ukr-eng)": 93.97,
                    "Tatoeba (urd-eng)": 93.22,
                    "Tatoeba (uzb-eng)": 84.23,
                    "Tatoeba (vie-eng)": 97.2,
                    "Tatoeba (war-eng)": 60.29,
                    "Tatoeba (wuu-eng)": 90.18,
                    "Tatoeba (xho-eng)": 91.55,
                    "Tatoeba (yid-eng)": 88.79,
                    "Tatoeba (yue-eng)": 89.58,
                    "Tatoeba (zsm-eng)": 95.62
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "AllegroReviews": 34.89,
                    "AmazonCounterfactualClassification (de)": 73.17,
                    "AmazonCounterfactualClassification (en)": 75.93,
                    "AmazonCounterfactualClassification (en-ext)": 76.09,
                    "AmazonCounterfactualClassification (ja)": 76.42,
                    "AmazonPolarityClassification": 68.95,
                    "AmazonReviewsClassification (de)": 39.92,
                    "AmazonReviewsClassification (en)": 35.8,
                    "AmazonReviewsClassification (es)": 39.39,
                    "AmazonReviewsClassification (fr)": 38.52,
                    "AmazonReviewsClassification (ja)": 36.44,
                    "AmazonReviewsClassification (zh)": 36.45,
                    "Banking77Classification": 69.85,
                    "CBD": 65.71,
                    "EmotionClassification": 37.22,
                    "ImdbClassification": 62.04,
                    "MTOPDomainClassification (de)": 86.95,
                    "MTOPDomainClassification (en)": 86.06,
                    "MTOPDomainClassification (es)": 84.07,
                    "MTOPDomainClassification (fr)": 84.14,
                    "MTOPDomainClassification (hi)": 85.11,
                    "MTOPDomainClassification (th)": 81.24,
                    "MTOPIntentClassification (de)": 63.42,
                    "MTOPIntentClassification (en)": 63.03,
                    "MTOPIntentClassification (es)": 64.44,
                    "MTOPIntentClassification (fr)": 62.01,
                    "MTOPIntentClassification (hi)": 62.58,
                    "MTOPIntentClassification (th)": 64.61,
                    "MasakhaNEWSClassification (fra)": 77.39,
                    "MassiveIntentClassification (af)": 56.12,
                    "MassiveIntentClassification (am)": 55.71,
                    "MassiveIntentClassification (ar)": 50.86,
                    "MassiveIntentClassification (az)": 58.97,
                    "MassiveIntentClassification (bn)": 58.22,
                    "MassiveIntentClassification (cy)": 50.16,
                    "MassiveIntentClassification (da)": 58.25,
                    "MassiveIntentClassification (de)": 56.21,
                    "MassiveIntentClassification (el)": 57.03,
                    "MassiveIntentClassification (en)": 61.46,
                    "MassiveIntentClassification (es)": 58.32,
                    "MassiveIntentClassification (fa)": 62.33,
                    "MassiveIntentClassification (fi)": 60.12,
                    "MassiveIntentClassification (fr)": 60.47,
                    "MassiveIntentClassification (he)": 56.55,
                    "MassiveIntentClassification (hi)": 59.4,
                    "MassiveIntentClassification (hu)": 59.52,
                    "MassiveIntentClassification (hy)": 56.2,
                    "MassiveIntentClassification (id)": 61.12,
                    "MassiveIntentClassification (is)": 54.9,
                    "MassiveIntentClassification (it)": 59.83,
                    "MassiveIntentClassification (ja)": 63.11,
                    "MassiveIntentClassification (jv)": 50.98,
                    "MassiveIntentClassification (ka)": 48.35,
                    "MassiveIntentClassification (km)": 48.55,
                    "MassiveIntentClassification (kn)": 56.24,
                    "MassiveIntentClassification (ko)": 60.99,
                    "MassiveIntentClassification (lv)": 57.1,
                    "MassiveIntentClassification (ml)": 57.91,
                    "MassiveIntentClassification (mn)": 58.5,
                    "MassiveIntentClassification (ms)": 58.6,
                    "MassiveIntentClassification (my)": 57.35,
                    "MassiveIntentClassification (nb)": 57.91,
                    "MassiveIntentClassification (nl)": 59.37,
                    "MassiveIntentClassification (pl)": 59.71,
                    "MassiveIntentClassification (pt)": 60.16,
                    "MassiveIntentClassification (ro)": 57.92,
                    "MassiveIntentClassification (ru)": 60.67,
                    "MassiveIntentClassification (sl)": 59.37,
                    "MassiveIntentClassification (sq)": 58.03,
                    "MassiveIntentClassification (sv)": 59.66,
                    "MassiveIntentClassification (sw)": 51.62,
                    "MassiveIntentClassification (ta)": 55.04,
                    "MassiveIntentClassification (te)": 58.32,
                    "MassiveIntentClassification (th)": 56.58,
                    "MassiveIntentClassification (tl)": 55.28,
                    "MassiveIntentClassification (tr)": 60.91,
                    "MassiveIntentClassification (ur)": 56.7,
                    "MassiveIntentClassification (vi)": 56.67,
                    "MassiveIntentClassification (zh-CN)": 63.86,
                    "MassiveIntentClassification (zh-TW)": 59.51,
                    "MassiveScenarioClassification (af)": 63.39,
                    "MassiveScenarioClassification (am)": 62.02,
                    "MassiveScenarioClassification (ar)": 57.72,
                    "MassiveScenarioClassification (az)": 63.48,
                    "MassiveScenarioClassification (bn)": 61.84,
                    "MassiveScenarioClassification (cy)": 56.13,
                    "MassiveScenarioClassification (da)": 65.24,
                    "MassiveScenarioClassification (de)": 62.39,
                    "MassiveScenarioClassification (el)": 64.58,
                    "MassiveScenarioClassification (en)": 66.41,
                    "MassiveScenarioClassification (es)": 63.61,
                    "MassiveScenarioClassification (fa)": 67.46,
                    "MassiveScenarioClassification (fi)": 64.58,
                    "MassiveScenarioClassification (fr)": 65.1,
                    "MassiveScenarioClassification (he)": 63.53,
                    "MassiveScenarioClassification (hi)": 64.4,
                    "MassiveScenarioClassification (hu)": 65.82,
                    "MassiveScenarioClassification (hy)": 61.25,
                    "MassiveScenarioClassification (id)": 65.84,
                    "MassiveScenarioClassification (is)": 61.94,
                    "MassiveScenarioClassification (it)": 64.09,
                    "MassiveScenarioClassification (ja)": 67.72,
                    "MassiveScenarioClassification (jv)": 58.29,
                    "MassiveScenarioClassification (ka)": 53.38,
                    "MassiveScenarioClassification (km)": 56.18,
                    "MassiveScenarioClassification (kn)": 61.74,
                    "MassiveScenarioClassification (ko)": 67.26,
                    "MassiveScenarioClassification (lv)": 61.87,
                    "MassiveScenarioClassification (ml)": 62.26,
                    "MassiveScenarioClassification (mn)": 62.6,
                    "MassiveScenarioClassification (ms)": 65.63,
                    "MassiveScenarioClassification (my)": 62.94,
                    "MassiveScenarioClassification (nb)": 64.29,
                    "MassiveScenarioClassification (nl)": 65.16,
                    "MassiveScenarioClassification (pl)": 64.58,
                    "MassiveScenarioClassification (pt)": 63.28,
                    "MassiveScenarioClassification (ro)": 62.41,
                    "MassiveScenarioClassification (ru)": 65.25,
                    "MassiveScenarioClassification (sl)": 64.25,
                    "MassiveScenarioClassification (sq)": 64.54,
                    "MassiveScenarioClassification (sv)": 66.01,
                    "MassiveScenarioClassification (sw)": 58.36,
                    "MassiveScenarioClassification (ta)": 59.08,
                    "MassiveScenarioClassification (te)": 64.13,
                    "MassiveScenarioClassification (th)": 64.34,
                    "MassiveScenarioClassification (tl)": 60.23,
                    "MassiveScenarioClassification (tr)": 65.43,
                    "MassiveScenarioClassification (ur)": 61.52,
                    "MassiveScenarioClassification (vi)": 61.05,
                    "MassiveScenarioClassification (zh-CN)": 70.85,
                    "MassiveScenarioClassification (zh-TW)": 67.08,
                    "PAC": 68.11,
                    "PolEmo2.0-IN": 64.0,
                    "PolEmo2.0-OUT": 44.72,
                    "ToxicConversationsClassification": 66.9,
                    "TweetSentimentExtractionClassification": 58.82
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "8TagsClustering": 12.96,
                    "AlloProfClusteringP2P": 54.78,
                    "AlloProfClusteringS2S": 31.6,
                    "ArxivClusteringP2P": 32.13,
                    "ArxivClusteringS2S": 22.05,
                    "BiorxivClusteringP2P": 29.84,
                    "BiorxivClusteringS2S": 20.57,
                    "HALClusteringS2S": 20.62,
                    "MLSUMClusteringP2P": 42.09,
                    "MLSUMClusteringS2S": 34.84,
                    "MasakhaNEWSClusteringP2P (fra)": 46.16,
                    "MasakhaNEWSClusteringS2S (fra)": 38.13,
                    "MedrxivClusteringP2P": 30.13,
                    "MedrxivClusteringS2S": 24.82,
                    "RedditClustering": 28.79,
                    "RedditClusteringP2P": 49.14,
                    "StackExchangeClustering": 35.43,
                    "StackExchangeClusteringP2P": 28.83,
                    "TwentyNewsgroupsClustering": 23.28
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "CDSC-E": 68.91,
                    "OpusparcusPC (fr)": 93.96,
                    "PPC": 86.97,
                    "PSC": 97.42,
                    "PawsX (fr)": 54.63,
                    "SICK-E-PL": 63.77,
                    "SprintDuplicateQuestions": 89.26,
                    "TwitterSemEval2015": 62.78,
                    "TwitterURLCorpus": 84.58
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "AlloprofReranking": 49.51,
                    "AskUbuntuDupQuestions": 52.75,
                    "MindSmallReranking": 29.81,
                    "SciDocsRR": 68.72,
                    "StackOverflowDupQuestions": 42.42,
                    "SyntecReranking": 73.28
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "AlloprofRetrieval": 19.77,
                    "ArguAna": 34.18,
                    "ArguAna-PL": 38.52,
                    "BSARDRetrieval": 0.0,
                    "CQADupstackRetrieval": 18.75,
                    "ClimateFEVER": 3.83,
                    "DBPedia": 15.57,
                    "DBPedia-PL": 16.1,
                    "FEVER": 12.18,
                    "FiQA-PL": 7.63,
                    "FiQA2018": 7.0,
                    "HotpotQA": 18.75,
                    "HotpotQA-PL": 19.72,
                    "MSMARCO": 7.6,
                    "MSMARCO-PL": 7.22,
                    "MintakaRetrieval (fr)": 15.53,
                    "NFCorpus": 16.54,
                    "NFCorpus-PL": 17.45,
                    "NQ": 8.42,
                    "NQ-PL": 9.65,
                    "Quora-PL": 74.96,
                    "QuoraRetrieval": 77.03,
                    "SCIDOCS": 5.63,
                    "SCIDOCS-PL": 7.48,
                    "SciFact": 38.2,
                    "SciFact-PL": 39.79,
                    "SyntecRetrieval": 55.31,
                    "TRECCOVID": 16.34,
                    "TRECCOVID-PL": 18.45,
                    "Touche2020": 4.88,
                    "XPQARetrieval (fr)": 51.74
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "BIOSSES": 78.7,
                    "CDSC-R": 85.53,
                    "SICK-R": 69.99,
                    "SICK-R-PL": 65.9,
                    "SICKFr": 69.94,
                    "STS12": 65.08,
                    "STS13": 67.98,
                    "STS14": 64.03,
                    "STS15": 76.59,
                    "STS16": 72.98,
                    "STS17 (ar-ar)": 69.07,
                    "STS17 (en-ar)": 74.51,
                    "STS17 (en-de)": 73.85,
                    "STS17 (en-en)": 79.45,
                    "STS17 (en-tr)": 72.07,
                    "STS17 (es-en)": 65.71,
                    "STS17 (es-es)": 80.83,
                    "STS17 (fr-en)": 76.98,
                    "STS17 (it-en)": 76.99,
                    "STS17 (ko-ko)": 71.32,
                    "STS17 (nl-en)": 75.22,
                    "STS22 (ar)": 57.67,
                    "STS22 (de)": 48.58,
                    "STS22 (de-en)": 50.14,
                    "STS22 (de-fr)": 53.28,
                    "STS22 (de-pl)": 58.69,
                    "STS22 (en)": 60.97,
                    "STS22 (es)": 63.18,
                    "STS22 (es-en)": 71.86,
                    "STS22 (es-it)": 69.69,
                    "STS22 (fr)": 77.95,
                    "STS22 (fr-pl)": 61.98,
                    "STS22 (it)": 72.22,
                    "STS22 (pl)": 39.28,
                    "STS22 (pl-en)": 69.41,
                    "STS22 (ru)": 57.49,
                    "STS22 (tr)": 58.15,
                    "STS22 (zh)": 63.02,
                    "STS22 (zh-en)": 64.02,
                    "STSBenchmark": 72.25,
                    "STSBenchmarkMultilingualSTS (fr)": 75.1
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>",
                    "SummEval": 31.05,
                    "SummEvalFr": 30.16
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/LaBSE\">LaBSE</a>"
                }
            ]
        }
    },
    "multi-qa-MiniLM-L6-cos-v1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "AmazonReviewsClassification (fr)": 27.05,
                    "MTOPDomainClassification (fr)": 72.97,
                    "MTOPIntentClassification (fr)": 37.18,
                    "MasakhaNEWSClassification (fra)": 75.62,
                    "MassiveIntentClassification (fr)": 42.64,
                    "MassiveScenarioClassification (fr)": 49.92
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "AlloProfClusteringP2P": 49.13,
                    "AlloProfClusteringS2S": 26.16,
                    "HALClusteringS2S": 12.49,
                    "MLSUMClusteringP2P": 35.15,
                    "MLSUMClusteringS2S": 25.95,
                    "MasakhaNEWSClusteringP2P (fra)": 53.73,
                    "MasakhaNEWSClusteringS2S (fra)": 27.27
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "OpusparcusPC (fr)": 88.07,
                    "PawsX (fr)": 57.36
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "AlloprofReranking": 40.28,
                    "SyntecReranking": 65.08
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "AlloprofRetrieval": 30.23,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 16.31,
                    "SyntecRetrieval": 58.07,
                    "XPQARetrieval (fr)": 48.83
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "SICKFr": 62.11,
                    "STS22 (fr)": 74.62,
                    "STSBenchmarkMultilingualSTS (fr)": 63.85
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>",
                    "SummEvalFr": 27.59
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/multi-qa-MiniLM-L6-cos-v1\">multi-qa-MiniLM-L6-cos-v1</a>"
                }
            ]
        }
    },
    "bge-large-zh-noinstruct": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>",
                    "AmazonReviewsClassification (zh)": 41.94,
                    "IFlyTek": 45.32,
                    "JDReview": 85.38,
                    "MassiveIntentClassification (zh-CN)": 66.96,
                    "MassiveScenarioClassification (zh-CN)": 73.39,
                    "MultilingualSentiment": 73.7,
                    "OnlineShopping": 91.66,
                    "TNews": 52.05,
                    "Waimai": 86.83
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>",
                    "CLSClusteringP2P": 41.23,
                    "CLSClusteringS2S": 40.04,
                    "ThuNewsClusteringP2P": 62.03,
                    "ThuNewsClusteringS2S": 56.75
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>",
                    "Cmnli": 82.17,
                    "Ocnli": 71.37
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>",
                    "CMedQAv1": 81.72,
                    "CMedQAv2": 84.64,
                    "MMarcoReranking": 27.1,
                    "T2Reranking": 66.16
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>",
                    "CmedqaRetrieval": 41.03,
                    "CovidRetrieval": 75.07,
                    "DuRetrieval": 84.68,
                    "EcomRetrieval": 65.6,
                    "MMarcoRetrieval": 81.38,
                    "MedicalRetrieval": 58.28,
                    "T2Retrieval": 84.39,
                    "VideoRetrieval": 73.93
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>",
                    "AFQMC": 43.06,
                    "ATEC": 48.29,
                    "BQ": 60.53,
                    "LCQMC": 74.71,
                    "PAWSX": 16.64,
                    "QBQTC": 35.2,
                    "STS22 (zh)": 67.19,
                    "STSB": 78.41
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-large-zh-noinstruct\">bge-large-zh-noinstruct</a>"
                }
            ]
        }
    },
    "gelectra-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>",
                    "BlurbsClusteringP2P": 10.06,
                    "BlurbsClusteringS2S": 7.74,
                    "TenKGnadClusteringP2P": 9.02,
                    "TenKGnadClusteringS2S": 4.11
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gelectra-base\">gelectra-base</a>"
                }
            ]
        }
    },
    "norbert3-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>",
                    "BornholmBitextMining": 6.08
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>",
                    "AngryTweetsClassification": 52.48,
                    "DKHateClassification": 58.78,
                    "DanishPoliticalCommentsClassification": 34.14,
                    "LccSentimentClassification": 54.07,
                    "MassiveIntentClassification (da)": 53.16,
                    "MassiveIntentClassification (nb)": 54.2,
                    "MassiveIntentClassification (sv)": 52.08,
                    "MassiveScenarioClassification (da)": 57.17,
                    "MassiveScenarioClassification (nb)": 60.69,
                    "MassiveScenarioClassification (sv)": 53.53,
                    "NoRecClassification": 53.4,
                    "NordicLangClassification": 82.67,
                    "NorwegianParliament": 59.33,
                    "ScalaDaClassification": 58.25,
                    "ScalaNbClassification": 60.19
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/ltg/norbert3-base\">norbert3-base</a>"
                }
            ]
        }
    },
    "flaubert_base_cased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "AmazonReviewsClassification (fr)": 24.9,
                    "MTOPDomainClassification (fr)": 25.55,
                    "MTOPIntentClassification (fr)": 9.49,
                    "MasakhaNEWSClassification (fra)": 71.14,
                    "MassiveIntentClassification (fr)": 6.98,
                    "MassiveScenarioClassification (fr)": 11.41
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "AlloProfClusteringP2P": 52.86,
                    "AlloProfClusteringS2S": 14.46,
                    "HALClusteringS2S": 3.85,
                    "MLSUMClusteringP2P": 39.06,
                    "MLSUMClusteringS2S": 17.13,
                    "MasakhaNEWSClusteringP2P (fra)": 41.61,
                    "MasakhaNEWSClusteringS2S (fra)": 21.26
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "OpusparcusPC (fr)": 82.15,
                    "PawsX (fr)": 51.89
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "AlloprofReranking": 34.81,
                    "SyntecReranking": 55.88
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "AlloprofRetrieval": 1.63,
                    "BSARDRetrieval": 0.0,
                    "MintakaRetrieval (fr)": 0.58,
                    "SyntecRetrieval": 20.56,
                    "XPQARetrieval (fr)": 6.59
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "SICKFr": 53.86,
                    "STS22 (fr)": 65.37,
                    "STSBenchmarkMultilingualSTS (fr)": 37.14
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>",
                    "SummEvalFr": 31.26
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/flaubert/flaubert_base_cased\">flaubert_base_cased</a>"
                }
            ]
        }
    },
    "gbert-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>",
                    "BlurbsClusteringP2P": 35.36,
                    "BlurbsClusteringS2S": 11.27,
                    "TenKGnadClusteringP2P": 37.16,
                    "TenKGnadClusteringS2S": 24.23
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/deepset/gbert-base\">gbert-base</a>"
                }
            ]
        }
    },
    "LLM2Vec-Mistral-supervised": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "AmazonCounterfactualClassification (en)": 77.58,
                    "AmazonPolarityClassification": 91.12,
                    "AmazonReviewsClassification (en)": 49.97,
                    "Banking77Classification": 88.31,
                    "EmotionClassification": 52.04,
                    "ImdbClassification": 87.42,
                    "MTOPDomainClassification (en)": 96.04,
                    "MTOPIntentClassification (en)": 84.77,
                    "MassiveIntentClassification (en)": 79.29,
                    "MassiveScenarioClassification (en)": 81.64,
                    "ToxicConversationsClassification": 69.26,
                    "TweetSentimentExtractionClassification": 62.14
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "ArxivClusteringP2P": 42.81,
                    "ArxivClusteringS2S": 44.24,
                    "BiorxivClusteringP2P": 34.27,
                    "BiorxivClusteringS2S": 35.53,
                    "MedrxivClusteringP2P": 31.07,
                    "MedrxivClusteringS2S": 31.27,
                    "RedditClustering": 60.24,
                    "RedditClusteringP2P": 64.12,
                    "StackExchangeClustering": 70.73,
                    "StackExchangeClusteringP2P": 34.5,
                    "TwentyNewsgroupsClustering": 52.18
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "SprintDuplicateQuestions": 96.82,
                    "TwitterSemEval2015": 80.6,
                    "TwitterURLCorpus": 86.56
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "AskUbuntuDupQuestions": 63.98,
                    "MindSmallReranking": 31.5,
                    "SciDocsRR": 83.8,
                    "StackOverflowDupQuestions": 54.41
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "ArguAna": 57.48,
                    "CQADupstackRetrieval": 48.84,
                    "ClimateFEVER": 35.19,
                    "DBPedia": 49.58,
                    "FEVER": 89.4,
                    "FiQA2018": 53.11,
                    "HotpotQA": 74.07,
                    "MSMARCO": 42.17,
                    "NFCorpus": 39.33,
                    "NQ": 61.7,
                    "QuoraRetrieval": 87.75,
                    "SCIDOCS": 22.5,
                    "SciFact": 78.86,
                    "TRECCOVID": 77.69,
                    "Touche2020": 22.18
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "BIOSSES": 85.24,
                    "SICK-R": 83.7,
                    "STS12": 78.8,
                    "STS13": 86.37,
                    "STS14": 84.04,
                    "STS15": 88.99,
                    "STS16": 87.22,
                    "STS17 (en-en)": 90.19,
                    "STS22 (en)": 67.68,
                    "STSBenchmark": 88.65
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>",
                    "SummEval": 29.96
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/McGill-NLP/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised</a>"
                }
            ]
        }
    },
    "unsup-simcse-bert-base-uncased": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "AmazonCounterfactualClassification (en)": 67.09,
                    "AmazonPolarityClassification": 74.48,
                    "AmazonReviewsClassification (en)": 33.85,
                    "Banking77Classification": 73.55,
                    "EmotionClassification": 42.22,
                    "ImdbClassification": 69.63,
                    "MTOPDomainClassification (en)": 81.71,
                    "MTOPIntentClassification (en)": 59.23,
                    "MassiveIntentClassification (en)": 59.84,
                    "MassiveScenarioClassification (en)": 66.25,
                    "ToxicConversationsClassification": 68.82,
                    "TweetSentimentExtractionClassification": 53.36
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "ArxivClusteringP2P": 32.61,
                    "ArxivClusteringS2S": 24.68,
                    "BiorxivClusteringP2P": 24.9,
                    "BiorxivClusteringS2S": 19.55,
                    "MedrxivClusteringP2P": 23.6,
                    "MedrxivClusteringS2S": 21.97,
                    "RedditClustering": 32.18,
                    "RedditClusteringP2P": 45.14,
                    "StackExchangeClustering": 43.07,
                    "StackExchangeClusteringP2P": 28.5,
                    "TwentyNewsgroupsClustering": 23.21
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "SprintDuplicateQuestions": 69.41,
                    "TwitterSemEval2015": 60.21,
                    "TwitterURLCorpus": 81.37
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "AskUbuntuDupQuestions": 51.57,
                    "MindSmallReranking": 28.62,
                    "SciDocsRR": 66.33,
                    "StackOverflowDupQuestions": 39.35
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "ArguAna": 38.34,
                    "CQADupstackRetrieval": 13.22,
                    "ClimateFEVER": 11.8,
                    "DBPedia": 15.04,
                    "FEVER": 21.06,
                    "FiQA2018": 9.84,
                    "HotpotQA": 19.75,
                    "MSMARCO": 9.35,
                    "NFCorpus": 9.88,
                    "NQ": 11.69,
                    "QuoraRetrieval": 78.03,
                    "SCIDOCS": 5.5,
                    "SciFact": 25.72,
                    "TRECCOVID": 26.2,
                    "Touche2020": 8.9
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "BIOSSES": 72.31,
                    "SICK-R": 72.24,
                    "STS12": 66.05,
                    "STS13": 81.49,
                    "STS14": 73.61,
                    "STS15": 79.72,
                    "STS16": 78.12,
                    "STS17 (en-en)": 83.58,
                    "STS22 (en)": 59.65,
                    "STSBenchmark": 76.52
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>",
                    "SummEval": 31.15
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/princeton-nlp/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased</a>"
                }
            ]
        }
    },
    "text-embedding-3-large-256": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "AmazonCounterfactualClassification (en)": 73.96,
                    "AmazonPolarityClassification": 91.32,
                    "AmazonReviewsClassification (en)": 46.03,
                    "Banking77Classification": 83.19,
                    "EmotionClassification": 45.8,
                    "ImdbClassification": 85.93,
                    "MTOPDomainClassification (en)": 92.76,
                    "MTOPIntentClassification (en)": 70.45,
                    "MassiveIntentClassification (en)": 71.12,
                    "MassiveScenarioClassification (en)": 75.56,
                    "ToxicConversationsClassification": 68.52,
                    "TweetSentimentExtractionClassification": 58.98
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "ArxivClusteringP2P": 47.05,
                    "ArxivClusteringS2S": 42.59,
                    "BiorxivClusteringP2P": 35.43,
                    "BiorxivClusteringS2S": 33.86,
                    "MedrxivClusteringP2P": 32.1,
                    "MedrxivClusteringS2S": 31.15,
                    "RedditClustering": 60.18,
                    "RedditClusteringP2P": 64.71,
                    "StackExchangeClustering": 71.23,
                    "StackExchangeClusteringP2P": 35.95,
                    "TwentyNewsgroupsClustering": 54.24
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "SprintDuplicateQuestions": 89.02,
                    "TwitterSemEval2015": 76.56,
                    "TwitterURLCorpus": 87.09
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "AskUbuntuDupQuestions": 64.61,
                    "MindSmallReranking": 29.63,
                    "SciDocsRR": 84.25,
                    "StackOverflowDupQuestions": 53.46
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "ArguAna": 55.6,
                    "CQADupstackRetrieval": 42.28,
                    "ClimateFEVER": 25.8,
                    "DBPedia": 40.8,
                    "FEVER": 84.57,
                    "FiQA2018": 50.33,
                    "HotpotQA": 62.69,
                    "MSMARCO": 37.93,
                    "NFCorpus": 37.94,
                    "NQ": 56.64,
                    "QuoraRetrieval": 88.22,
                    "SCIDOCS": 20.44,
                    "SciFact": 73.1,
                    "TRECCOVID": 76.24,
                    "Touche2020": 22.31
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "BIOSSES": 84.87,
                    "SICK-R": 79.18,
                    "STS12": 71.98,
                    "STS13": 85.52,
                    "STS14": 80.5,
                    "STS15": 87.51,
                    "STS16": 84.48,
                    "STS17 (en-en)": 88.11,
                    "STS22 (en)": 65.92,
                    "STSBenchmark": 82.34
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>",
                    "SummEval": 29.92
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large-256</a>"
                }
            ]
        }
    },
    "gtr-t5-xxl": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "AmazonCounterfactualClassification (en)": 67.3,
                    "AmazonPolarityClassification": 75.05,
                    "AmazonReviewsClassification (en)": 37.3,
                    "Banking77Classification": 82.32,
                    "EmotionClassification": 43.19,
                    "ImdbClassification": 70.8,
                    "MTOPDomainClassification (en)": 93.84,
                    "MTOPIntentClassification (en)": 67.71,
                    "MassiveIntentClassification (en)": 70.61,
                    "MassiveScenarioClassification (en)": 77.77,
                    "ToxicConversationsClassification": 68.48,
                    "TweetSentimentExtractionClassification": 54.54
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "ArxivClusteringP2P": 37.9,
                    "ArxivClusteringS2S": 32.39,
                    "BiorxivClusteringP2P": 30.48,
                    "BiorxivClusteringS2S": 27.5,
                    "MedrxivClusteringP2P": 29.12,
                    "MedrxivClusteringS2S": 27.56,
                    "RedditClustering": 64.13,
                    "RedditClusteringP2P": 62.84,
                    "StackExchangeClustering": 71.43,
                    "StackExchangeClusteringP2P": 32.85,
                    "TwentyNewsgroupsClustering": 50.44
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "SprintDuplicateQuestions": 95.68,
                    "TwitterSemEval2015": 77.54,
                    "TwitterURLCorpus": 85.13
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "AskUbuntuDupQuestions": 63.23,
                    "MindSmallReranking": 31.93,
                    "SciDocsRR": 77.96,
                    "StackOverflowDupQuestions": 53.5
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "ArguAna": 53.77,
                    "CQADupstackRetrieval": 38.56,
                    "ClimateFEVER": 27.21,
                    "DBPedia": 41.28,
                    "FEVER": 74.08,
                    "FiQA2018": 46.78,
                    "HotpotQA": 59.67,
                    "MSMARCO": 44.05,
                    "NFCorpus": 34.18,
                    "NQ": 57.24,
                    "QuoraRetrieval": 89.09,
                    "SCIDOCS": 15.88,
                    "SciFact": 66.77,
                    "TRECCOVID": 51.9,
                    "Touche2020": 26.76
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "BIOSSES": 81.91,
                    "SICK-R": 74.29,
                    "STS12": 70.12,
                    "STS13": 82.72,
                    "STS14": 78.24,
                    "STS15": 86.26,
                    "STS16": 81.61,
                    "STS17 (en-en)": 85.18,
                    "STS22 (en)": 65.76,
                    "STSBenchmark": 77.73
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>",
                    "SummEval": 30.64
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/sentence-transformers/gtr-t5-xxl\">gtr-t5-xxl</a>"
                }
            ]
        }
    },
    "gte-Qwen1.5-7B-instruct": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "AmazonCounterfactualClassification (en)": 83.16,
                    "AmazonPolarityClassification": 96.7,
                    "AmazonReviewsClassification (en)": 62.17,
                    "AmazonReviewsClassification (zh)": 52.95,
                    "Banking77Classification": 81.68,
                    "EmotionClassification": 54.53,
                    "IFlyTek": 53.77,
                    "ImdbClassification": 95.58,
                    "JDReview": 88.2,
                    "MTOPDomainClassification (en)": 95.75,
                    "MTOPIntentClassification (en)": 84.26,
                    "MassiveIntentClassification (zh-CN)": 76.25,
                    "MassiveIntentClassification (en)": 78.47,
                    "MassiveScenarioClassification (en)": 78.19,
                    "MassiveScenarioClassification (zh-CN)": 77.26,
                    "MultilingualSentiment": 77.42,
                    "OnlineShopping": 94.48,
                    "TNews": 51.24,
                    "ToxicConversationsClassification": 78.75,
                    "TweetSentimentExtractionClassification": 66.0,
                    "Waimai": 88.63
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "ArxivClusteringP2P": 56.4,
                    "ArxivClusteringS2S": 51.45,
                    "BiorxivClusteringP2P": 49.01,
                    "BiorxivClusteringS2S": 45.06,
                    "CLSClusteringP2P": 47.21,
                    "CLSClusteringS2S": 45.79,
                    "MedrxivClusteringP2P": 44.37,
                    "MedrxivClusteringS2S": 42.0,
                    "RedditClustering": 73.37,
                    "RedditClusteringP2P": 72.51,
                    "StackExchangeClustering": 79.07,
                    "StackExchangeClusteringP2P": 49.57,
                    "ThuNewsClusteringP2P": 87.43,
                    "ThuNewsClusteringS2S": 87.9,
                    "TwentyNewsgroupsClustering": 51.31
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "Cmnli": 91.81,
                    "Ocnli": 85.22,
                    "SprintDuplicateQuestions": 95.99,
                    "TwitterSemEval2015": 79.36,
                    "TwitterURLCorpus": 86.79
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "AskUbuntuDupQuestions": 66.0,
                    "CMedQAv1": 86.37,
                    "CMedQAv2": 87.41,
                    "MindSmallReranking": 32.71,
                    "SciDocsRR": 87.89,
                    "StackOverflowDupQuestions": 53.93,
                    "T2Reranking": 68.11
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "ArguAna": 62.65,
                    "CQADupstackRetrieval": 40.64,
                    "ClimateFEVER": 44.0,
                    "CmedqaRetrieval": 43.47,
                    "CovidRetrieval": 80.87,
                    "DBPedia": 48.04,
                    "DuRetrieval": 86.01,
                    "EcomRetrieval": 66.46,
                    "FEVER": 93.35,
                    "FiQA2018": 55.31,
                    "HotpotQA": 72.25,
                    "MMarcoRetrieval": 73.83,
                    "MSMARCO": 41.68,
                    "MedicalRetrieval": 61.33,
                    "NFCorpus": 38.25,
                    "NQ": 61.79,
                    "QuoraRetrieval": 89.61,
                    "SCIDOCS": 27.69,
                    "SciFact": 75.31,
                    "T2Retrieval": 83.58,
                    "TRECCOVID": 72.72,
                    "Touche2020": 20.3,
                    "VideoRetrieval": 69.41
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "AFQMC": 58.47,
                    "ATEC": 55.46,
                    "BIOSSES": 81.12,
                    "BQ": 77.59,
                    "LCQMC": 76.29,
                    "PAWSX": 50.22,
                    "QBQTC": 31.82,
                    "SICK-R": 79.15,
                    "STS12": 76.52,
                    "STS13": 88.63,
                    "STS14": 83.32,
                    "STS15": 87.5,
                    "STS16": 86.39,
                    "STS17 (en-en)": 87.79,
                    "STS22 (en)": 66.4,
                    "STS22 (zh)": 67.36,
                    "STSB": 81.37,
                    "STSBenchmark": 87.35
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>",
                    "SummEval": 31.46
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct</a>"
                }
            ]
        }
    },
    "elser-v2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "AmazonCounterfactualClassification (en)": 74.16,
                    "AmazonPolarityClassification": 61.91,
                    "AmazonReviewsClassification (en)": 32.06,
                    "Banking77Classification": 82.05,
                    "EmotionClassification": 46.65,
                    "ImdbClassification": 65.02,
                    "MTOPDomainClassification (en)": 93.17,
                    "MTOPIntentClassification (en)": 71.1,
                    "MassiveIntentClassification (en)": 68.48,
                    "MassiveScenarioClassification (en)": 74.98,
                    "ToxicConversationsClassification": 68.15,
                    "TweetSentimentExtractionClassification": 53.57
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "ArxivClusteringP2P": 35.27,
                    "ArxivClusteringS2S": 23.18,
                    "BiorxivClusteringP2P": 31.13,
                    "BiorxivClusteringS2S": 26.78,
                    "MedrxivClusteringP2P": 24.65,
                    "MedrxivClusteringS2S": 24.21,
                    "RedditClustering": 38.74,
                    "RedditClusteringP2P": 51.92,
                    "StackExchangeClustering": 42.7,
                    "StackExchangeClusteringP2P": 28.7,
                    "TwentyNewsgroupsClustering": 27.82
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "SprintDuplicateQuestions": 94.53,
                    "TwitterSemEval2015": 64.41,
                    "TwitterURLCorpus": 85.01
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "AskUbuntuDupQuestions": 58.31,
                    "MindSmallReranking": 30.75,
                    "SciDocsRR": 75.62,
                    "StackOverflowDupQuestions": 48.4
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "ArguAna": 55.98,
                    "CQADupstackRetrieval": 34.27,
                    "ClimateFEVER": 27.08,
                    "DBPedia": 42.7,
                    "FEVER": 78.55,
                    "FiQA2018": 41.57,
                    "HotpotQA": 67.01,
                    "MSMARCO": 38.9,
                    "NFCorpus": 36.66,
                    "NQ": 55.84,
                    "QuoraRetrieval": 84.69,
                    "SCIDOCS": 16.24,
                    "SciFact": 71.8,
                    "TRECCOVID": 72.72,
                    "Touche2020": 26.27
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "BIOSSES": 83.79,
                    "SICK-R": 68.78,
                    "STS12": 64.81,
                    "STS13": 80.1,
                    "STS14": 74.96,
                    "STS15": 83.7,
                    "STS16": 80.55,
                    "STS17 (en-en)": 85.74,
                    "STS22 (en)": 67.5,
                    "STSBenchmark": 79.54
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>",
                    "SummEval": 31.03
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-elser.html\">elser-v2</a>"
                }
            ]
        }
    },
    "e5-mistral-7b-instruct": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "AmazonReviewsClassification (fr)": 36.71,
                    "MTOPDomainClassification (fr)": 74.8,
                    "MTOPIntentClassification (fr)": 53.97,
                    "MasakhaNEWSClassification (fra)": 80.59,
                    "MassiveIntentClassification (fr)": 46.39,
                    "MassiveScenarioClassification (fr)": 53.86
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "AlloProfClusteringP2P": 61.06,
                    "AlloProfClusteringS2S": 28.12,
                    "HALClusteringS2S": 19.69,
                    "MLSUMClusteringP2P": 45.59,
                    "MLSUMClusteringS2S": 32.0,
                    "MasakhaNEWSClusteringP2P (fra)": 52.47,
                    "MasakhaNEWSClusteringS2S (fra)": 49.2
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "OpusparcusPC (fr)": 88.5,
                    "PawsX (fr)": 63.65
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "AlloprofReranking": 47.36,
                    "SyntecReranking": 77.05
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "AILACasedocs": 38.76,
                    "AILAStatutes": 38.07,
                    "AlloprofRetrieval": 16.46,
                    "BSARDRetrieval": 0.0,
                    "GerDaLIRSmall": 37.18,
                    "LEMBNarrativeQARetrieval": 44.62,
                    "LEMBNeedleRetrieval": 48.25,
                    "LEMBPasskeyRetrieval": 71.0,
                    "LEMBQMSumRetrieval": 43.63,
                    "LEMBSummScreenFDRetrieval": 96.82,
                    "LEMBWikimQARetrieval": 82.11,
                    "LeCaRDv2": 68.56,
                    "LegalBenchConsumerContractsQA": 75.46,
                    "LegalBenchCorporateLobbying": 94.01,
                    "LegalQuAD": 59.64,
                    "LegalSummarization": 66.51,
                    "MintakaRetrieval (fr)": 3.57,
                    "SyntecRetrieval": 55.9,
                    "XPQARetrieval (fr)": 41.29
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "SICKFr": 64.39,
                    "STS22 (fr)": 69.82,
                    "STSBenchmarkMultilingualSTS (fr)": 61.87
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "SummEvalFr": 32.22
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-mistral-7b-instruct\">e5-mistral-7b-instruct</a>",
                    "Core17InstructionRetrieval": 0.09,
                    "News21InstructionRetrieval": -0.86,
                    "Robust04InstructionRetrieval": -9.59
                }
            ]
        }
    },
    "voyage-multilingual-2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "AmazonReviewsClassification (fr)": 43.36,
                    "MTOPDomainClassification (fr)": 90.33,
                    "MTOPIntentClassification (fr)": 60.52,
                    "MasakhaNEWSClassification (fra)": 74.81,
                    "MassiveIntentClassification (fr)": 68.06,
                    "MassiveScenarioClassification (fr)": 74.29
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "AlloProfClusteringP2P": 65.37,
                    "AlloProfClusteringS2S": 47.03,
                    "HALClusteringS2S": 27.67,
                    "MLSUMClusteringP2P (fr)": 45.99,
                    "MLSUMClusteringS2S (fr)": 45.57,
                    "MasakhaNEWSClusteringP2P (fra)": 44.53,
                    "MasakhaNEWSClusteringS2S (fra)": 49.8
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "OpusparcusPC (fr)": 93.68,
                    "PawsX (fr)": 63.64
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "AlloprofReranking": 74.78,
                    "SyntecReranking": 90.4
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "AlloprofRetrieval": 58.27,
                    "BSARDRetrieval": 5.14,
                    "LEMBNarrativeQARetrieval": 64.69,
                    "LEMBNeedleRetrieval": 75.25,
                    "LEMBPasskeyRetrieval": 97.0,
                    "LEMBQMSumRetrieval": 51.49,
                    "LEMBSummScreenFDRetrieval": 99.11,
                    "LEMBWikimQARetrieval": 87.49,
                    "MintakaRetrieval (fr)": 49.19,
                    "SyntecRetrieval": 87.28,
                    "XPQARetrieval (fr)": 72.92
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "SICKFr": 74.9,
                    "STS22 (fr)": 82.76,
                    "STSBenchmarkMultilingualSTS (fr)": 82.72
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>",
                    "SummEvalFr": 29.96
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-multilingual-2</a>"
                }
            ]
        }
    },
    "bge-m3": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>",
                    "LEMBNarrativeQARetrieval": 45.76,
                    "LEMBNeedleRetrieval": 40.25,
                    "LEMBPasskeyRetrieval": 46.0,
                    "LEMBQMSumRetrieval": 35.54,
                    "LEMBSummScreenFDRetrieval": 94.09,
                    "LEMBWikimQARetrieval": 77.73
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/BAAI/bge-m3\">bge-m3</a>"
                }
            ]
        }
    },
    "jina-embeddings-v2-base-en": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>",
                    "LEMBNarrativeQARetrieval": 37.89,
                    "LEMBNeedleRetrieval": 54.25,
                    "LEMBPasskeyRetrieval": 50.25,
                    "LEMBQMSumRetrieval": 38.87,
                    "LEMBSummScreenFDRetrieval": 93.48,
                    "LEMBWikimQARetrieval": 73.99
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/jinaai/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en</a>"
                }
            ]
        }
    },
    "nomic-embed-text-v1": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>",
                    "LEMBNarrativeQARetrieval": 41.23,
                    "LEMBNeedleRetrieval": 39.5,
                    "LEMBPasskeyRetrieval": 44.75,
                    "LEMBQMSumRetrieval": 36.65,
                    "LEMBSummScreenFDRetrieval": 92.97,
                    "LEMBWikimQARetrieval": 73.75
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/nomic-ai/nomic-embed-text-v1\">nomic-embed-text-v1</a>"
                }
            ]
        }
    },
    "text-embedding-3-large": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "AmazonCounterfactualClassification (en)": 78.93,
                    "AmazonPolarityClassification": 92.85,
                    "AmazonReviewsClassification (en)": 48.7,
                    "Banking77Classification": 85.69,
                    "EmotionClassification": 51.58,
                    "ImdbClassification": 87.67,
                    "MTOPDomainClassification (en)": 95.36,
                    "MTOPIntentClassification (en)": 75.07,
                    "MassiveIntentClassification (en)": 74.64,
                    "MassiveScenarioClassification (en)": 79.79,
                    "ToxicConversationsClassification": 72.92,
                    "TweetSentimentExtractionClassification": 62.22
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "ArxivClusteringP2P": 49.01,
                    "ArxivClusteringS2S": 44.45,
                    "BiorxivClusteringP2P": 38.03,
                    "BiorxivClusteringS2S": 36.53,
                    "MedrxivClusteringP2P": 32.7,
                    "MedrxivClusteringS2S": 31.27,
                    "RedditClustering": 67.84,
                    "RedditClusteringP2P": 67.96,
                    "StackExchangeClustering": 76.26,
                    "StackExchangeClusteringP2P": 36.88,
                    "TwentyNewsgroupsClustering": 58.14
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "SprintDuplicateQuestions": 92.25,
                    "TwitterSemEval2015": 77.13,
                    "TwitterURLCorpus": 87.78
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "AskUbuntuDupQuestions": 65.03,
                    "MindSmallReranking": 29.86,
                    "SciDocsRR": 86.66,
                    "StackOverflowDupQuestions": 55.08
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "AILACasedocs": 39.0,
                    "AILAStatutes": 41.31,
                    "ArguAna": 58.05,
                    "CQADupstackRetrieval": 47.54,
                    "ClimateFEVER": 30.27,
                    "DBPedia": 44.76,
                    "FEVER": 87.94,
                    "FiQA2018": 55.0,
                    "GerDaLIRSmall": 32.77,
                    "HotpotQA": 71.58,
                    "LEMBNarrativeQARetrieval": 44.09,
                    "LEMBNeedleRetrieval": 29.25,
                    "LEMBPasskeyRetrieval": 63.0,
                    "LEMBQMSumRetrieval": 32.49,
                    "LEMBSummScreenFDRetrieval": 84.8,
                    "LEMBWikimQARetrieval": 54.16,
                    "LeCaRDv2": 57.2,
                    "LegalBenchConsumerContractsQA": 79.39,
                    "LegalBenchCorporateLobbying": 95.09,
                    "LegalQuAD": 57.47,
                    "LegalSummarization": 71.55,
                    "MSMARCO": 40.24,
                    "NFCorpus": 42.07,
                    "NQ": 61.27,
                    "QuoraRetrieval": 89.05,
                    "SCIDOCS": 23.11,
                    "SciFact": 77.77,
                    "TRECCOVID": 79.56,
                    "Touche2020": 23.35
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "BIOSSES": 84.68,
                    "SICK-R": 79.0,
                    "STS12": 72.84,
                    "STS13": 86.1,
                    "STS14": 81.15,
                    "STS15": 88.49,
                    "STS16": 85.08,
                    "STS17 (en-en)": 90.22,
                    "STS22 (en)": 66.14,
                    "STSBenchmark": 83.56
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "SummEval": 29.92
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://openai.com/blog/new-embedding-models-and-api-updates\">text-embedding-3-large</a>",
                    "Core17InstructionRetrieval": -0.2,
                    "News21InstructionRetrieval": -2.03,
                    "Robust04InstructionRetrieval": -5.81
                }
            ]
        }
    },
    "e5-base": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>",
                    "BornholmBitextMining": 40.09
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>",
                    "AngryTweetsClassification": 45.06,
                    "DKHateClassification": 58.51,
                    "DanishPoliticalCommentsClassification": 28.43,
                    "LccSentimentClassification": 37.47,
                    "MassiveIntentClassification (da)": 44.25,
                    "MassiveIntentClassification (nb)": 41.57,
                    "MassiveIntentClassification (sv)": 41.34,
                    "MassiveScenarioClassification (da)": 52.99,
                    "MassiveScenarioClassification (nb)": 50.33,
                    "MassiveScenarioClassification (sv)": 50.0,
                    "NoRecClassification": 42.0,
                    "NordicLangClassification": 59.34,
                    "NorwegianParliament": 57.42,
                    "ScalaDaClassification": 50.08,
                    "ScalaNbClassification": 50.18
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>",
                    "LEMBNarrativeQARetrieval": 25.31,
                    "LEMBNeedleRetrieval": 28.5,
                    "LEMBPasskeyRetrieval": 33.25,
                    "LEMBQMSumRetrieval": 23.83,
                    "LEMBSummScreenFDRetrieval": 74.67,
                    "LEMBWikimQARetrieval": 55.85
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/intfloat/e5-base\">e5-base</a>"
                }
            ]
        }
    },
    "e5-base-4k": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>",
                    "LEMBNarrativeQARetrieval": 30.35,
                    "LEMBNeedleRetrieval": 41.5,
                    "LEMBPasskeyRetrieval": 67.25,
                    "LEMBQMSumRetrieval": 35.6,
                    "LEMBSummScreenFDRetrieval": 95.23,
                    "LEMBWikimQARetrieval": 69.19
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://huggingface.co/dwzhu/e5-base-4k\">e5-base-4k</a>"
                }
            ]
        }
    },
    "voyage-law-2": {
        "BitextMining": {
            "f1": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>"
                }
            ]
        },
        "Classification": {
            "accuracy": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "AmazonReviewsClassification (fr)": 41.98,
                    "MTOPDomainClassification (fr)": 90.12,
                    "MTOPIntentClassification (fr)": 62.44,
                    "MasakhaNEWSClassification (fra)": 76.42,
                    "MassiveIntentClassification (fr)": 66.94,
                    "MassiveScenarioClassification (fr)": 72.78
                }
            ]
        },
        "Clustering": {
            "v_measure": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "AlloProfClusteringP2P": 62.5,
                    "AlloProfClusteringS2S": 44.28,
                    "HALClusteringS2S": 26.36,
                    "MLSUMClusteringP2P (fr)": 44.03,
                    "MLSUMClusteringS2S (fr)": 42.95,
                    "MasakhaNEWSClusteringP2P (fra)": 50.68,
                    "MasakhaNEWSClusteringS2S (fra)": 38.79
                }
            ]
        },
        "PairClassification": {
            "ap": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "OpusparcusPC (fr)": 93.06,
                    "PawsX (fr)": 61.54
                }
            ]
        },
        "Reranking": {
            "map": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "AlloprofReranking": 72.92,
                    "SyntecReranking": 91.2
                }
            ]
        },
        "Retrieval": {
            "ndcg_at_10": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "AILACasedocs": 44.56,
                    "AILAStatutes": 45.51,
                    "AlloprofRetrieval": 57.28,
                    "BSARDRetrieval": 11.83,
                    "GerDaLIRSmall": 44.91,
                    "LEMBNarrativeQARetrieval": 55.78,
                    "LEMBNeedleRetrieval": 80.5,
                    "LEMBPasskeyRetrieval": 93.75,
                    "LEMBQMSumRetrieval": 57.26,
                    "LEMBSummScreenFDRetrieval": 98.72,
                    "LEMBWikimQARetrieval": 87.08,
                    "LeCaRDv2": 72.75,
                    "LegalBenchConsumerContractsQA": 83.27,
                    "LegalBenchCorporateLobbying": 95.66,
                    "LegalQuAD": 67.47,
                    "LegalSummarization": 68.96,
                    "MintakaRetrieval (fr)": 34.92,
                    "SyntecRetrieval": 87.33,
                    "XPQARetrieval (fr)": 73.56
                }
            ]
        },
        "STS": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "SICKFr": 74.09,
                    "STS22 (fr)": 83.75,
                    "STSBenchmarkMultilingualSTS (fr)": 83.02
                }
            ]
        },
        "Summarization": {
            "spearman": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>",
                    "SummEvalFr": 30.34
                }
            ]
        },
        "InstructionRetrieval": {
            "p-MRR": [
                {
                    "Model": "<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https://docs.voyageai.com/embeddings/\">voyage-law-2</a>"
                }
            ]
        }
    }
}