Update README.md
Browse files
README.md
CHANGED
@@ -4,6 +4,400 @@ language:
|
|
4 |
- en
|
5 |
tags:
|
6 |
- sparse sparsity quantized onnx embeddings int8
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
---
|
8 |
|
9 |
# bge-large-en-v1.5-quant
|
|
|
4 |
- en
|
5 |
tags:
|
6 |
- sparse sparsity quantized onnx embeddings int8
|
7 |
+
- mteb
|
8 |
+
model-index:
|
9 |
+
- name: bge-large-en-v1.5-quant
|
10 |
+
results:
|
11 |
+
- task:
|
12 |
+
type: Classification
|
13 |
+
dataset:
|
14 |
+
type: mteb/amazon_counterfactual
|
15 |
+
name: MTEB AmazonCounterfactualClassification (en)
|
16 |
+
config: en
|
17 |
+
split: test
|
18 |
+
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
19 |
+
metrics:
|
20 |
+
- type: accuracy
|
21 |
+
value: 75.53731343283583
|
22 |
+
- type: ap
|
23 |
+
value: 38.30609312253564
|
24 |
+
- type: f1
|
25 |
+
value: 69.42802757893695
|
26 |
+
- task:
|
27 |
+
type: STS
|
28 |
+
dataset:
|
29 |
+
type: mteb/biosses-sts
|
30 |
+
name: MTEB BIOSSES
|
31 |
+
config: default
|
32 |
+
split: test
|
33 |
+
revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
|
34 |
+
metrics:
|
35 |
+
- type: cos_sim_pearson
|
36 |
+
value: 89.27346145216443
|
37 |
+
- type: cos_sim_spearman
|
38 |
+
value: 88.36526647458979
|
39 |
+
- type: euclidean_pearson
|
40 |
+
value: 86.83053354694746
|
41 |
+
- type: euclidean_spearman
|
42 |
+
value: 87.56223612880584
|
43 |
+
- type: manhattan_pearson
|
44 |
+
value: 86.59250609226758
|
45 |
+
- type: manhattan_spearman
|
46 |
+
value: 87.70681773644885
|
47 |
+
- task:
|
48 |
+
type: STS
|
49 |
+
dataset:
|
50 |
+
type: mteb/sickr-sts
|
51 |
+
name: MTEB SICK-R
|
52 |
+
config: default
|
53 |
+
split: test
|
54 |
+
revision: a6ea5a8cab320b040a23452cc28066d9beae2cee
|
55 |
+
metrics:
|
56 |
+
- type: cos_sim_pearson
|
57 |
+
value: 86.18998669716373
|
58 |
+
- type: cos_sim_spearman
|
59 |
+
value: 82.06129973984048
|
60 |
+
- type: euclidean_pearson
|
61 |
+
value: 83.65969509485801
|
62 |
+
- type: euclidean_spearman
|
63 |
+
value: 81.91666612708826
|
64 |
+
- type: manhattan_pearson
|
65 |
+
value: 83.6906794731384
|
66 |
+
- type: manhattan_spearman
|
67 |
+
value: 81.91752705367436
|
68 |
+
- task:
|
69 |
+
type: STS
|
70 |
+
dataset:
|
71 |
+
type: mteb/sts12-sts
|
72 |
+
name: MTEB STS12
|
73 |
+
config: default
|
74 |
+
split: test
|
75 |
+
revision: a0d554a64d88156834ff5ae9920b964011b16384
|
76 |
+
metrics:
|
77 |
+
- type: cos_sim_pearson
|
78 |
+
value: 86.93407086985752
|
79 |
+
- type: cos_sim_spearman
|
80 |
+
value: 78.82992283957066
|
81 |
+
- type: euclidean_pearson
|
82 |
+
value: 83.39733473832982
|
83 |
+
- type: euclidean_spearman
|
84 |
+
value: 78.86999229850214
|
85 |
+
- type: manhattan_pearson
|
86 |
+
value: 83.39397058098533
|
87 |
+
- type: manhattan_spearman
|
88 |
+
value: 78.85397971200753
|
89 |
+
- task:
|
90 |
+
type: STS
|
91 |
+
dataset:
|
92 |
+
type: mteb/sts13-sts
|
93 |
+
name: MTEB STS13
|
94 |
+
config: default
|
95 |
+
split: test
|
96 |
+
revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
|
97 |
+
metrics:
|
98 |
+
- type: cos_sim_pearson
|
99 |
+
value: 87.2586009863056
|
100 |
+
- type: cos_sim_spearman
|
101 |
+
value: 87.99415514558852
|
102 |
+
- type: euclidean_pearson
|
103 |
+
value: 86.98993652364359
|
104 |
+
- type: euclidean_spearman
|
105 |
+
value: 87.72725335668807
|
106 |
+
- type: manhattan_pearson
|
107 |
+
value: 86.897205761048
|
108 |
+
- type: manhattan_spearman
|
109 |
+
value: 87.65231103509018
|
110 |
+
- task:
|
111 |
+
type: STS
|
112 |
+
dataset:
|
113 |
+
type: mteb/sts14-sts
|
114 |
+
name: MTEB STS14
|
115 |
+
config: default
|
116 |
+
split: test
|
117 |
+
revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
|
118 |
+
metrics:
|
119 |
+
- type: cos_sim_pearson
|
120 |
+
value: 85.41417660460755
|
121 |
+
- type: cos_sim_spearman
|
122 |
+
value: 83.50291886604928
|
123 |
+
- type: euclidean_pearson
|
124 |
+
value: 84.67758839660924
|
125 |
+
- type: euclidean_spearman
|
126 |
+
value: 83.4368059512681
|
127 |
+
- type: manhattan_pearson
|
128 |
+
value: 84.66027228213025
|
129 |
+
- type: manhattan_spearman
|
130 |
+
value: 83.43472054456252
|
131 |
+
- task:
|
132 |
+
type: STS
|
133 |
+
dataset:
|
134 |
+
type: mteb/sts15-sts
|
135 |
+
name: MTEB STS15
|
136 |
+
config: default
|
137 |
+
split: test
|
138 |
+
revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
|
139 |
+
metrics:
|
140 |
+
- type: cos_sim_pearson
|
141 |
+
value: 88.02513262365703
|
142 |
+
- type: cos_sim_spearman
|
143 |
+
value: 89.00430907638267
|
144 |
+
- type: euclidean_pearson
|
145 |
+
value: 88.16290361497319
|
146 |
+
- type: euclidean_spearman
|
147 |
+
value: 88.6645154822661
|
148 |
+
- type: manhattan_pearson
|
149 |
+
value: 88.15337528825458
|
150 |
+
- type: manhattan_spearman
|
151 |
+
value: 88.66202950081507
|
152 |
+
- task:
|
153 |
+
type: STS
|
154 |
+
dataset:
|
155 |
+
type: mteb/sts16-sts
|
156 |
+
name: MTEB STS16
|
157 |
+
config: default
|
158 |
+
split: test
|
159 |
+
revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
|
160 |
+
metrics:
|
161 |
+
- type: cos_sim_pearson
|
162 |
+
value: 85.10194022827035
|
163 |
+
- type: cos_sim_spearman
|
164 |
+
value: 86.45367112223394
|
165 |
+
- type: euclidean_pearson
|
166 |
+
value: 85.45292931769094
|
167 |
+
- type: euclidean_spearman
|
168 |
+
value: 86.06607589083283
|
169 |
+
- type: manhattan_pearson
|
170 |
+
value: 85.4111233047049
|
171 |
+
- type: manhattan_spearman
|
172 |
+
value: 86.04379654118996
|
173 |
+
- task:
|
174 |
+
type: STS
|
175 |
+
dataset:
|
176 |
+
type: mteb/sts17-crosslingual-sts
|
177 |
+
name: MTEB STS17 (en-en)
|
178 |
+
config: en-en
|
179 |
+
split: test
|
180 |
+
revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d
|
181 |
+
metrics:
|
182 |
+
- type: cos_sim_pearson
|
183 |
+
value: 89.86966589113663
|
184 |
+
- type: cos_sim_spearman
|
185 |
+
value: 89.5617056243649
|
186 |
+
- type: euclidean_pearson
|
187 |
+
value: 89.018495917952
|
188 |
+
- type: euclidean_spearman
|
189 |
+
value: 88.387335721179
|
190 |
+
- type: manhattan_pearson
|
191 |
+
value: 89.07568042943448
|
192 |
+
- type: manhattan_spearman
|
193 |
+
value: 88.51733863475219
|
194 |
+
- task:
|
195 |
+
type: STS
|
196 |
+
dataset:
|
197 |
+
type: mteb/sts22-crosslingual-sts
|
198 |
+
name: MTEB STS22 (en)
|
199 |
+
config: en
|
200 |
+
split: test
|
201 |
+
revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80
|
202 |
+
metrics:
|
203 |
+
- type: cos_sim_pearson
|
204 |
+
value: 68.38465344518238
|
205 |
+
- type: cos_sim_spearman
|
206 |
+
value: 68.15219488291783
|
207 |
+
- type: euclidean_pearson
|
208 |
+
value: 68.99169681132668
|
209 |
+
- type: euclidean_spearman
|
210 |
+
value: 68.01334641045888
|
211 |
+
- type: manhattan_pearson
|
212 |
+
value: 68.84952679202642
|
213 |
+
- type: manhattan_spearman
|
214 |
+
value: 67.85430179655137
|
215 |
+
- task:
|
216 |
+
type: STS
|
217 |
+
dataset:
|
218 |
+
type: mteb/stsbenchmark-sts
|
219 |
+
name: MTEB STSBenchmark
|
220 |
+
config: default
|
221 |
+
split: test
|
222 |
+
revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
|
223 |
+
metrics:
|
224 |
+
- type: cos_sim_pearson
|
225 |
+
value: 86.60574360222778
|
226 |
+
- type: cos_sim_spearman
|
227 |
+
value: 87.8878986593873
|
228 |
+
- type: euclidean_pearson
|
229 |
+
value: 87.11557232168404
|
230 |
+
- type: euclidean_spearman
|
231 |
+
value: 87.40944677043365
|
232 |
+
- type: manhattan_pearson
|
233 |
+
value: 87.10395398212532
|
234 |
+
- type: manhattan_spearman
|
235 |
+
value: 87.35977283466168
|
236 |
+
- task:
|
237 |
+
type: PairClassification
|
238 |
+
dataset:
|
239 |
+
type: mteb/sprintduplicatequestions-pairclassification
|
240 |
+
name: MTEB SprintDuplicateQuestions
|
241 |
+
config: default
|
242 |
+
split: test
|
243 |
+
revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
|
244 |
+
metrics:
|
245 |
+
- type: cos_sim_accuracy
|
246 |
+
value: 99.84752475247525
|
247 |
+
- type: cos_sim_ap
|
248 |
+
value: 96.49316696572335
|
249 |
+
- type: cos_sim_f1
|
250 |
+
value: 92.35352532274081
|
251 |
+
- type: cos_sim_precision
|
252 |
+
value: 91.71597633136095
|
253 |
+
- type: cos_sim_recall
|
254 |
+
value: 93.0
|
255 |
+
- type: dot_accuracy
|
256 |
+
value: 99.77326732673268
|
257 |
+
- type: dot_ap
|
258 |
+
value: 93.5497681978726
|
259 |
+
- type: dot_f1
|
260 |
+
value: 88.35582208895552
|
261 |
+
- type: dot_precision
|
262 |
+
value: 88.31168831168831
|
263 |
+
- type: dot_recall
|
264 |
+
value: 88.4
|
265 |
+
- type: euclidean_accuracy
|
266 |
+
value: 99.84653465346534
|
267 |
+
- type: euclidean_ap
|
268 |
+
value: 96.36378999360083
|
269 |
+
- type: euclidean_f1
|
270 |
+
value: 92.33052944087086
|
271 |
+
- type: euclidean_precision
|
272 |
+
value: 91.38099902056807
|
273 |
+
- type: euclidean_recall
|
274 |
+
value: 93.30000000000001
|
275 |
+
- type: manhattan_accuracy
|
276 |
+
value: 99.84455445544555
|
277 |
+
- type: manhattan_ap
|
278 |
+
value: 96.36035171233175
|
279 |
+
- type: manhattan_f1
|
280 |
+
value: 92.13260761999011
|
281 |
+
- type: manhattan_precision
|
282 |
+
value: 91.1851126346719
|
283 |
+
- type: manhattan_recall
|
284 |
+
value: 93.10000000000001
|
285 |
+
- type: max_accuracy
|
286 |
+
value: 99.84752475247525
|
287 |
+
- type: max_ap
|
288 |
+
value: 96.49316696572335
|
289 |
+
- type: max_f1
|
290 |
+
value: 92.35352532274081
|
291 |
+
- task:
|
292 |
+
type: PairClassification
|
293 |
+
dataset:
|
294 |
+
type: mteb/twittersemeval2015-pairclassification
|
295 |
+
name: MTEB TwitterSemEval2015
|
296 |
+
config: default
|
297 |
+
split: test
|
298 |
+
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
299 |
+
metrics:
|
300 |
+
- type: cos_sim_accuracy
|
301 |
+
value: 87.26828396018358
|
302 |
+
- type: cos_sim_ap
|
303 |
+
value: 77.79878217023162
|
304 |
+
- type: cos_sim_f1
|
305 |
+
value: 71.0425694621463
|
306 |
+
- type: cos_sim_precision
|
307 |
+
value: 68.71301775147928
|
308 |
+
- type: cos_sim_recall
|
309 |
+
value: 73.53562005277044
|
310 |
+
- type: dot_accuracy
|
311 |
+
value: 84.01978899684092
|
312 |
+
- type: dot_ap
|
313 |
+
value: 66.12134149171163
|
314 |
+
- type: dot_f1
|
315 |
+
value: 63.283507097098365
|
316 |
+
- type: dot_precision
|
317 |
+
value: 60.393191081275475
|
318 |
+
- type: dot_recall
|
319 |
+
value: 66.46437994722955
|
320 |
+
- type: euclidean_accuracy
|
321 |
+
value: 87.24444179531503
|
322 |
+
- type: euclidean_ap
|
323 |
+
value: 77.84821131946212
|
324 |
+
- type: euclidean_f1
|
325 |
+
value: 71.30456661215247
|
326 |
+
- type: euclidean_precision
|
327 |
+
value: 68.1413801394566
|
328 |
+
- type: euclidean_recall
|
329 |
+
value: 74.77572559366754
|
330 |
+
- type: manhattan_accuracy
|
331 |
+
value: 87.19079692436074
|
332 |
+
- type: manhattan_ap
|
333 |
+
value: 77.78054941055291
|
334 |
+
- type: manhattan_f1
|
335 |
+
value: 71.13002127393318
|
336 |
+
- type: manhattan_precision
|
337 |
+
value: 67.65055939062128
|
338 |
+
- type: manhattan_recall
|
339 |
+
value: 74.9868073878628
|
340 |
+
- type: max_accuracy
|
341 |
+
value: 87.26828396018358
|
342 |
+
- type: max_ap
|
343 |
+
value: 77.84821131946212
|
344 |
+
- type: max_f1
|
345 |
+
value: 71.30456661215247
|
346 |
+
- task:
|
347 |
+
type: PairClassification
|
348 |
+
dataset:
|
349 |
+
type: mteb/twitterurlcorpus-pairclassification
|
350 |
+
name: MTEB TwitterURLCorpus
|
351 |
+
config: default
|
352 |
+
split: test
|
353 |
+
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
354 |
+
metrics:
|
355 |
+
- type: cos_sim_accuracy
|
356 |
+
value: 88.91023402025847
|
357 |
+
- type: cos_sim_ap
|
358 |
+
value: 85.94088151184411
|
359 |
+
- type: cos_sim_f1
|
360 |
+
value: 78.25673997223645
|
361 |
+
- type: cos_sim_precision
|
362 |
+
value: 74.45433059919367
|
363 |
+
- type: cos_sim_recall
|
364 |
+
value: 82.46843239913767
|
365 |
+
- type: dot_accuracy
|
366 |
+
value: 87.91865564481701
|
367 |
+
- type: dot_ap
|
368 |
+
value: 82.75373957440969
|
369 |
+
- type: dot_f1
|
370 |
+
value: 75.97383507276201
|
371 |
+
- type: dot_precision
|
372 |
+
value: 72.67294713160854
|
373 |
+
- type: dot_recall
|
374 |
+
value: 79.5888512473052
|
375 |
+
- type: euclidean_accuracy
|
376 |
+
value: 88.8539604921023
|
377 |
+
- type: euclidean_ap
|
378 |
+
value: 85.71590936389937
|
379 |
+
- type: euclidean_f1
|
380 |
+
value: 77.82902261742242
|
381 |
+
- type: euclidean_precision
|
382 |
+
value: 74.7219270279844
|
383 |
+
- type: euclidean_recall
|
384 |
+
value: 81.20572836464429
|
385 |
+
- type: manhattan_accuracy
|
386 |
+
value: 88.78992509799356
|
387 |
+
- type: manhattan_ap
|
388 |
+
value: 85.70200619366904
|
389 |
+
- type: manhattan_f1
|
390 |
+
value: 77.85875848203065
|
391 |
+
- type: manhattan_precision
|
392 |
+
value: 72.94315506222671
|
393 |
+
- type: manhattan_recall
|
394 |
+
value: 83.48475515860795
|
395 |
+
- type: max_accuracy
|
396 |
+
value: 88.91023402025847
|
397 |
+
- type: max_ap
|
398 |
+
value: 85.94088151184411
|
399 |
+
- type: max_f1
|
400 |
+
value: 78.25673997223645
|
401 |
---
|
402 |
|
403 |
# bge-large-en-v1.5-quant
|