Youssefbou62 commited on
Commit
578ea8a
·
verified ·
1 Parent(s): cf34239

Delete README.md

Browse files
Files changed (1) hide show
  1. README.md +0 -2700
README.md DELETED
@@ -1,2700 +0,0 @@
1
- ---
2
- tags:
3
- - mteb
4
- - sentence-similarity
5
- - sentence-transformers
6
- - Sentence Transformers
7
- model-index:
8
- - name: gte-large
9
- results:
10
- - task:
11
- type: Classification
12
- dataset:
13
- type: mteb/amazon_counterfactual
14
- name: MTEB AmazonCounterfactualClassification (en)
15
- config: en
16
- split: test
17
- revision: e8379541af4e31359cca9fbcf4b00f2671dba205
18
- metrics:
19
- - type: accuracy
20
- value: 72.62686567164178
21
- - type: ap
22
- value: 34.46944126809772
23
- - type: f1
24
- value: 66.23684353950857
25
- - task:
26
- type: Classification
27
- dataset:
28
- type: mteb/amazon_polarity
29
- name: MTEB AmazonPolarityClassification
30
- config: default
31
- split: test
32
- revision: e2d317d38cd51312af73b3d32a06d1a08b442046
33
- metrics:
34
- - type: accuracy
35
- value: 92.51805
36
- - type: ap
37
- value: 89.49842783330848
38
- - type: f1
39
- value: 92.51112169431808
40
- - task:
41
- type: Classification
42
- dataset:
43
- type: mteb/amazon_reviews_multi
44
- name: MTEB AmazonReviewsClassification (en)
45
- config: en
46
- split: test
47
- revision: 1399c76144fd37290681b995c656ef9b2e06e26d
48
- metrics:
49
- - type: accuracy
50
- value: 49.074
51
- - type: f1
52
- value: 48.44785682572955
53
- - task:
54
- type: Retrieval
55
- dataset:
56
- type: arguana
57
- name: MTEB ArguAna
58
- config: default
59
- split: test
60
- revision: None
61
- metrics:
62
- - type: map_at_1
63
- value: 32.077
64
- - type: map_at_10
65
- value: 48.153
66
- - type: map_at_100
67
- value: 48.963
68
- - type: map_at_1000
69
- value: 48.966
70
- - type: map_at_3
71
- value: 43.184
72
- - type: map_at_5
73
- value: 46.072
74
- - type: mrr_at_1
75
- value: 33.073
76
- - type: mrr_at_10
77
- value: 48.54
78
- - type: mrr_at_100
79
- value: 49.335
80
- - type: mrr_at_1000
81
- value: 49.338
82
- - type: mrr_at_3
83
- value: 43.563
84
- - type: mrr_at_5
85
- value: 46.383
86
- - type: ndcg_at_1
87
- value: 32.077
88
- - type: ndcg_at_10
89
- value: 57.158
90
- - type: ndcg_at_100
91
- value: 60.324999999999996
92
- - type: ndcg_at_1000
93
- value: 60.402
94
- - type: ndcg_at_3
95
- value: 46.934
96
- - type: ndcg_at_5
97
- value: 52.158
98
- - type: precision_at_1
99
- value: 32.077
100
- - type: precision_at_10
101
- value: 8.591999999999999
102
- - type: precision_at_100
103
- value: 0.991
104
- - type: precision_at_1000
105
- value: 0.1
106
- - type: precision_at_3
107
- value: 19.275000000000002
108
- - type: precision_at_5
109
- value: 14.111
110
- - type: recall_at_1
111
- value: 32.077
112
- - type: recall_at_10
113
- value: 85.917
114
- - type: recall_at_100
115
- value: 99.075
116
- - type: recall_at_1000
117
- value: 99.644
118
- - type: recall_at_3
119
- value: 57.824
120
- - type: recall_at_5
121
- value: 70.555
122
- - task:
123
- type: Clustering
124
- dataset:
125
- type: mteb/arxiv-clustering-p2p
126
- name: MTEB ArxivClusteringP2P
127
- config: default
128
- split: test
129
- revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
130
- metrics:
131
- - type: v_measure
132
- value: 48.619246083417295
133
- - task:
134
- type: Clustering
135
- dataset:
136
- type: mteb/arxiv-clustering-s2s
137
- name: MTEB ArxivClusteringS2S
138
- config: default
139
- split: test
140
- revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
141
- metrics:
142
- - type: v_measure
143
- value: 43.3574067664688
144
- - task:
145
- type: Reranking
146
- dataset:
147
- type: mteb/askubuntudupquestions-reranking
148
- name: MTEB AskUbuntuDupQuestions
149
- config: default
150
- split: test
151
- revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
152
- metrics:
153
- - type: map
154
- value: 63.06359661829253
155
- - type: mrr
156
- value: 76.15596007562766
157
- - task:
158
- type: STS
159
- dataset:
160
- type: mteb/biosses-sts
161
- name: MTEB BIOSSES
162
- config: default
163
- split: test
164
- revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
165
- metrics:
166
- - type: cos_sim_pearson
167
- value: 90.25407547368691
168
- - type: cos_sim_spearman
169
- value: 88.65081514968477
170
- - type: euclidean_pearson
171
- value: 88.14857116664494
172
- - type: euclidean_spearman
173
- value: 88.50683596540692
174
- - type: manhattan_pearson
175
- value: 87.9654797992225
176
- - type: manhattan_spearman
177
- value: 88.21164851646908
178
- - task:
179
- type: Classification
180
- dataset:
181
- type: mteb/banking77
182
- name: MTEB Banking77Classification
183
- config: default
184
- split: test
185
- revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
186
- metrics:
187
- - type: accuracy
188
- value: 86.05844155844157
189
- - type: f1
190
- value: 86.01555597681825
191
- - task:
192
- type: Clustering
193
- dataset:
194
- type: mteb/biorxiv-clustering-p2p
195
- name: MTEB BiorxivClusteringP2P
196
- config: default
197
- split: test
198
- revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
199
- metrics:
200
- - type: v_measure
201
- value: 39.10510519739522
202
- - task:
203
- type: Clustering
204
- dataset:
205
- type: mteb/biorxiv-clustering-s2s
206
- name: MTEB BiorxivClusteringS2S
207
- config: default
208
- split: test
209
- revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
210
- metrics:
211
- - type: v_measure
212
- value: 36.84689960264385
213
- - task:
214
- type: Retrieval
215
- dataset:
216
- type: BeIR/cqadupstack
217
- name: MTEB CQADupstackAndroidRetrieval
218
- config: default
219
- split: test
220
- revision: None
221
- metrics:
222
- - type: map_at_1
223
- value: 32.800000000000004
224
- - type: map_at_10
225
- value: 44.857
226
- - type: map_at_100
227
- value: 46.512
228
- - type: map_at_1000
229
- value: 46.635
230
- - type: map_at_3
231
- value: 41.062
232
- - type: map_at_5
233
- value: 43.126
234
- - type: mrr_at_1
235
- value: 39.628
236
- - type: mrr_at_10
237
- value: 50.879
238
- - type: mrr_at_100
239
- value: 51.605000000000004
240
- - type: mrr_at_1000
241
- value: 51.641000000000005
242
- - type: mrr_at_3
243
- value: 48.14
244
- - type: mrr_at_5
245
- value: 49.835
246
- - type: ndcg_at_1
247
- value: 39.628
248
- - type: ndcg_at_10
249
- value: 51.819
250
- - type: ndcg_at_100
251
- value: 57.318999999999996
252
- - type: ndcg_at_1000
253
- value: 58.955999999999996
254
- - type: ndcg_at_3
255
- value: 46.409
256
- - type: ndcg_at_5
257
- value: 48.825
258
- - type: precision_at_1
259
- value: 39.628
260
- - type: precision_at_10
261
- value: 10.072000000000001
262
- - type: precision_at_100
263
- value: 1.625
264
- - type: precision_at_1000
265
- value: 0.21
266
- - type: precision_at_3
267
- value: 22.556
268
- - type: precision_at_5
269
- value: 16.309
270
- - type: recall_at_1
271
- value: 32.800000000000004
272
- - type: recall_at_10
273
- value: 65.078
274
- - type: recall_at_100
275
- value: 87.491
276
- - type: recall_at_1000
277
- value: 97.514
278
- - type: recall_at_3
279
- value: 49.561
280
- - type: recall_at_5
281
- value: 56.135999999999996
282
- - task:
283
- type: Retrieval
284
- dataset:
285
- type: BeIR/cqadupstack
286
- name: MTEB CQADupstackEnglishRetrieval
287
- config: default
288
- split: test
289
- revision: None
290
- metrics:
291
- - type: map_at_1
292
- value: 32.614
293
- - type: map_at_10
294
- value: 43.578
295
- - type: map_at_100
296
- value: 44.897
297
- - type: map_at_1000
298
- value: 45.023
299
- - type: map_at_3
300
- value: 40.282000000000004
301
- - type: map_at_5
302
- value: 42.117
303
- - type: mrr_at_1
304
- value: 40.510000000000005
305
- - type: mrr_at_10
306
- value: 49.428
307
- - type: mrr_at_100
308
- value: 50.068999999999996
309
- - type: mrr_at_1000
310
- value: 50.111000000000004
311
- - type: mrr_at_3
312
- value: 47.176
313
- - type: mrr_at_5
314
- value: 48.583999999999996
315
- - type: ndcg_at_1
316
- value: 40.510000000000005
317
- - type: ndcg_at_10
318
- value: 49.478
319
- - type: ndcg_at_100
320
- value: 53.852
321
- - type: ndcg_at_1000
322
- value: 55.782
323
- - type: ndcg_at_3
324
- value: 45.091
325
- - type: ndcg_at_5
326
- value: 47.19
327
- - type: precision_at_1
328
- value: 40.510000000000005
329
- - type: precision_at_10
330
- value: 9.363000000000001
331
- - type: precision_at_100
332
- value: 1.51
333
- - type: precision_at_1000
334
- value: 0.196
335
- - type: precision_at_3
336
- value: 21.741
337
- - type: precision_at_5
338
- value: 15.465000000000002
339
- - type: recall_at_1
340
- value: 32.614
341
- - type: recall_at_10
342
- value: 59.782000000000004
343
- - type: recall_at_100
344
- value: 78.012
345
- - type: recall_at_1000
346
- value: 90.319
347
- - type: recall_at_3
348
- value: 46.825
349
- - type: recall_at_5
350
- value: 52.688
351
- - task:
352
- type: Retrieval
353
- dataset:
354
- type: BeIR/cqadupstack
355
- name: MTEB CQADupstackGamingRetrieval
356
- config: default
357
- split: test
358
- revision: None
359
- metrics:
360
- - type: map_at_1
361
- value: 40.266000000000005
362
- - type: map_at_10
363
- value: 53.756
364
- - type: map_at_100
365
- value: 54.809
366
- - type: map_at_1000
367
- value: 54.855
368
- - type: map_at_3
369
- value: 50.073
370
- - type: map_at_5
371
- value: 52.293
372
- - type: mrr_at_1
373
- value: 46.332
374
- - type: mrr_at_10
375
- value: 57.116
376
- - type: mrr_at_100
377
- value: 57.767
378
- - type: mrr_at_1000
379
- value: 57.791000000000004
380
- - type: mrr_at_3
381
- value: 54.461999999999996
382
- - type: mrr_at_5
383
- value: 56.092
384
- - type: ndcg_at_1
385
- value: 46.332
386
- - type: ndcg_at_10
387
- value: 60.092
388
- - type: ndcg_at_100
389
- value: 64.034
390
- - type: ndcg_at_1000
391
- value: 64.937
392
- - type: ndcg_at_3
393
- value: 54.071000000000005
394
- - type: ndcg_at_5
395
- value: 57.254000000000005
396
- - type: precision_at_1
397
- value: 46.332
398
- - type: precision_at_10
399
- value: 9.799
400
- - type: precision_at_100
401
- value: 1.278
402
- - type: precision_at_1000
403
- value: 0.13899999999999998
404
- - type: precision_at_3
405
- value: 24.368000000000002
406
- - type: precision_at_5
407
- value: 16.89
408
- - type: recall_at_1
409
- value: 40.266000000000005
410
- - type: recall_at_10
411
- value: 75.41499999999999
412
- - type: recall_at_100
413
- value: 92.01700000000001
414
- - type: recall_at_1000
415
- value: 98.379
416
- - type: recall_at_3
417
- value: 59.476
418
- - type: recall_at_5
419
- value: 67.297
420
- - task:
421
- type: Retrieval
422
- dataset:
423
- type: BeIR/cqadupstack
424
- name: MTEB CQADupstackGisRetrieval
425
- config: default
426
- split: test
427
- revision: None
428
- metrics:
429
- - type: map_at_1
430
- value: 28.589
431
- - type: map_at_10
432
- value: 37.755
433
- - type: map_at_100
434
- value: 38.881
435
- - type: map_at_1000
436
- value: 38.954
437
- - type: map_at_3
438
- value: 34.759
439
- - type: map_at_5
440
- value: 36.544
441
- - type: mrr_at_1
442
- value: 30.734
443
- - type: mrr_at_10
444
- value: 39.742
445
- - type: mrr_at_100
446
- value: 40.774
447
- - type: mrr_at_1000
448
- value: 40.824
449
- - type: mrr_at_3
450
- value: 37.137
451
- - type: mrr_at_5
452
- value: 38.719
453
- - type: ndcg_at_1
454
- value: 30.734
455
- - type: ndcg_at_10
456
- value: 42.978
457
- - type: ndcg_at_100
458
- value: 48.309000000000005
459
- - type: ndcg_at_1000
460
- value: 50.068
461
- - type: ndcg_at_3
462
- value: 37.361
463
- - type: ndcg_at_5
464
- value: 40.268
465
- - type: precision_at_1
466
- value: 30.734
467
- - type: precision_at_10
468
- value: 6.565
469
- - type: precision_at_100
470
- value: 0.964
471
- - type: precision_at_1000
472
- value: 0.11499999999999999
473
- - type: precision_at_3
474
- value: 15.744
475
- - type: precision_at_5
476
- value: 11.096
477
- - type: recall_at_1
478
- value: 28.589
479
- - type: recall_at_10
480
- value: 57.126999999999995
481
- - type: recall_at_100
482
- value: 81.051
483
- - type: recall_at_1000
484
- value: 94.027
485
- - type: recall_at_3
486
- value: 42.045
487
- - type: recall_at_5
488
- value: 49.019
489
- - task:
490
- type: Retrieval
491
- dataset:
492
- type: BeIR/cqadupstack
493
- name: MTEB CQADupstackMathematicaRetrieval
494
- config: default
495
- split: test
496
- revision: None
497
- metrics:
498
- - type: map_at_1
499
- value: 18.5
500
- - type: map_at_10
501
- value: 27.950999999999997
502
- - type: map_at_100
503
- value: 29.186
504
- - type: map_at_1000
505
- value: 29.298000000000002
506
- - type: map_at_3
507
- value: 25.141000000000002
508
- - type: map_at_5
509
- value: 26.848
510
- - type: mrr_at_1
511
- value: 22.637
512
- - type: mrr_at_10
513
- value: 32.572
514
- - type: mrr_at_100
515
- value: 33.472
516
- - type: mrr_at_1000
517
- value: 33.533
518
- - type: mrr_at_3
519
- value: 29.747
520
- - type: mrr_at_5
521
- value: 31.482
522
- - type: ndcg_at_1
523
- value: 22.637
524
- - type: ndcg_at_10
525
- value: 33.73
526
- - type: ndcg_at_100
527
- value: 39.568
528
- - type: ndcg_at_1000
529
- value: 42.201
530
- - type: ndcg_at_3
531
- value: 28.505999999999997
532
- - type: ndcg_at_5
533
- value: 31.255
534
- - type: precision_at_1
535
- value: 22.637
536
- - type: precision_at_10
537
- value: 6.281000000000001
538
- - type: precision_at_100
539
- value: 1.073
540
- - type: precision_at_1000
541
- value: 0.14300000000000002
542
- - type: precision_at_3
543
- value: 13.847000000000001
544
- - type: precision_at_5
545
- value: 10.224
546
- - type: recall_at_1
547
- value: 18.5
548
- - type: recall_at_10
549
- value: 46.744
550
- - type: recall_at_100
551
- value: 72.072
552
- - type: recall_at_1000
553
- value: 91.03999999999999
554
- - type: recall_at_3
555
- value: 32.551
556
- - type: recall_at_5
557
- value: 39.533
558
- - task:
559
- type: Retrieval
560
- dataset:
561
- type: BeIR/cqadupstack
562
- name: MTEB CQADupstackPhysicsRetrieval
563
- config: default
564
- split: test
565
- revision: None
566
- metrics:
567
- - type: map_at_1
568
- value: 30.602
569
- - type: map_at_10
570
- value: 42.18
571
- - type: map_at_100
572
- value: 43.6
573
- - type: map_at_1000
574
- value: 43.704
575
- - type: map_at_3
576
- value: 38.413000000000004
577
- - type: map_at_5
578
- value: 40.626
579
- - type: mrr_at_1
580
- value: 37.344
581
- - type: mrr_at_10
582
- value: 47.638000000000005
583
- - type: mrr_at_100
584
- value: 48.485
585
- - type: mrr_at_1000
586
- value: 48.52
587
- - type: mrr_at_3
588
- value: 44.867000000000004
589
- - type: mrr_at_5
590
- value: 46.566
591
- - type: ndcg_at_1
592
- value: 37.344
593
- - type: ndcg_at_10
594
- value: 48.632
595
- - type: ndcg_at_100
596
- value: 54.215
597
- - type: ndcg_at_1000
598
- value: 55.981
599
- - type: ndcg_at_3
600
- value: 42.681999999999995
601
- - type: ndcg_at_5
602
- value: 45.732
603
- - type: precision_at_1
604
- value: 37.344
605
- - type: precision_at_10
606
- value: 8.932
607
- - type: precision_at_100
608
- value: 1.376
609
- - type: precision_at_1000
610
- value: 0.17099999999999999
611
- - type: precision_at_3
612
- value: 20.276
613
- - type: precision_at_5
614
- value: 14.726
615
- - type: recall_at_1
616
- value: 30.602
617
- - type: recall_at_10
618
- value: 62.273
619
- - type: recall_at_100
620
- value: 85.12100000000001
621
- - type: recall_at_1000
622
- value: 96.439
623
- - type: recall_at_3
624
- value: 45.848
625
- - type: recall_at_5
626
- value: 53.615
627
- - task:
628
- type: Retrieval
629
- dataset:
630
- type: BeIR/cqadupstack
631
- name: MTEB CQADupstackProgrammersRetrieval
632
- config: default
633
- split: test
634
- revision: None
635
- metrics:
636
- - type: map_at_1
637
- value: 23.952
638
- - type: map_at_10
639
- value: 35.177
640
- - type: map_at_100
641
- value: 36.59
642
- - type: map_at_1000
643
- value: 36.703
644
- - type: map_at_3
645
- value: 31.261
646
- - type: map_at_5
647
- value: 33.222
648
- - type: mrr_at_1
649
- value: 29.337999999999997
650
- - type: mrr_at_10
651
- value: 40.152
652
- - type: mrr_at_100
653
- value: 40.963
654
- - type: mrr_at_1000
655
- value: 41.016999999999996
656
- - type: mrr_at_3
657
- value: 36.91
658
- - type: mrr_at_5
659
- value: 38.685
660
- - type: ndcg_at_1
661
- value: 29.337999999999997
662
- - type: ndcg_at_10
663
- value: 41.994
664
- - type: ndcg_at_100
665
- value: 47.587
666
- - type: ndcg_at_1000
667
- value: 49.791000000000004
668
- - type: ndcg_at_3
669
- value: 35.27
670
- - type: ndcg_at_5
671
- value: 38.042
672
- - type: precision_at_1
673
- value: 29.337999999999997
674
- - type: precision_at_10
675
- value: 8.276
676
- - type: precision_at_100
677
- value: 1.276
678
- - type: precision_at_1000
679
- value: 0.164
680
- - type: precision_at_3
681
- value: 17.161
682
- - type: precision_at_5
683
- value: 12.671
684
- - type: recall_at_1
685
- value: 23.952
686
- - type: recall_at_10
687
- value: 57.267
688
- - type: recall_at_100
689
- value: 80.886
690
- - type: recall_at_1000
691
- value: 95.611
692
- - type: recall_at_3
693
- value: 38.622
694
- - type: recall_at_5
695
- value: 45.811
696
- - task:
697
- type: Retrieval
698
- dataset:
699
- type: BeIR/cqadupstack
700
- name: MTEB CQADupstackRetrieval
701
- config: default
702
- split: test
703
- revision: None
704
- metrics:
705
- - type: map_at_1
706
- value: 27.092083333333335
707
- - type: map_at_10
708
- value: 37.2925
709
- - type: map_at_100
710
- value: 38.57041666666666
711
- - type: map_at_1000
712
- value: 38.68141666666667
713
- - type: map_at_3
714
- value: 34.080000000000005
715
- - type: map_at_5
716
- value: 35.89958333333333
717
- - type: mrr_at_1
718
- value: 31.94758333333333
719
- - type: mrr_at_10
720
- value: 41.51049999999999
721
- - type: mrr_at_100
722
- value: 42.36099999999999
723
- - type: mrr_at_1000
724
- value: 42.4125
725
- - type: mrr_at_3
726
- value: 38.849583333333335
727
- - type: mrr_at_5
728
- value: 40.448249999999994
729
- - type: ndcg_at_1
730
- value: 31.94758333333333
731
- - type: ndcg_at_10
732
- value: 43.17633333333333
733
- - type: ndcg_at_100
734
- value: 48.45241666666668
735
- - type: ndcg_at_1000
736
- value: 50.513999999999996
737
- - type: ndcg_at_3
738
- value: 37.75216666666667
739
- - type: ndcg_at_5
740
- value: 40.393833333333326
741
- - type: precision_at_1
742
- value: 31.94758333333333
743
- - type: precision_at_10
744
- value: 7.688916666666666
745
- - type: precision_at_100
746
- value: 1.2250833333333333
747
- - type: precision_at_1000
748
- value: 0.1595
749
- - type: precision_at_3
750
- value: 17.465999999999998
751
- - type: precision_at_5
752
- value: 12.548083333333333
753
- - type: recall_at_1
754
- value: 27.092083333333335
755
- - type: recall_at_10
756
- value: 56.286583333333326
757
- - type: recall_at_100
758
- value: 79.09033333333333
759
- - type: recall_at_1000
760
- value: 93.27483333333335
761
- - type: recall_at_3
762
- value: 41.35325
763
- - type: recall_at_5
764
- value: 48.072750000000006
765
- - task:
766
- type: Retrieval
767
- dataset:
768
- type: BeIR/cqadupstack
769
- name: MTEB CQADupstackStatsRetrieval
770
- config: default
771
- split: test
772
- revision: None
773
- metrics:
774
- - type: map_at_1
775
- value: 25.825
776
- - type: map_at_10
777
- value: 33.723
778
- - type: map_at_100
779
- value: 34.74
780
- - type: map_at_1000
781
- value: 34.824
782
- - type: map_at_3
783
- value: 31.369000000000003
784
- - type: map_at_5
785
- value: 32.533
786
- - type: mrr_at_1
787
- value: 29.293999999999997
788
- - type: mrr_at_10
789
- value: 36.84
790
- - type: mrr_at_100
791
- value: 37.681
792
- - type: mrr_at_1000
793
- value: 37.742
794
- - type: mrr_at_3
795
- value: 34.79
796
- - type: mrr_at_5
797
- value: 35.872
798
- - type: ndcg_at_1
799
- value: 29.293999999999997
800
- - type: ndcg_at_10
801
- value: 38.385999999999996
802
- - type: ndcg_at_100
803
- value: 43.327
804
- - type: ndcg_at_1000
805
- value: 45.53
806
- - type: ndcg_at_3
807
- value: 33.985
808
- - type: ndcg_at_5
809
- value: 35.817
810
- - type: precision_at_1
811
- value: 29.293999999999997
812
- - type: precision_at_10
813
- value: 6.12
814
- - type: precision_at_100
815
- value: 0.9329999999999999
816
- - type: precision_at_1000
817
- value: 0.11900000000000001
818
- - type: precision_at_3
819
- value: 14.621999999999998
820
- - type: precision_at_5
821
- value: 10.030999999999999
822
- - type: recall_at_1
823
- value: 25.825
824
- - type: recall_at_10
825
- value: 49.647000000000006
826
- - type: recall_at_100
827
- value: 72.32300000000001
828
- - type: recall_at_1000
829
- value: 88.62400000000001
830
- - type: recall_at_3
831
- value: 37.366
832
- - type: recall_at_5
833
- value: 41.957
834
- - task:
835
- type: Retrieval
836
- dataset:
837
- type: BeIR/cqadupstack
838
- name: MTEB CQADupstackTexRetrieval
839
- config: default
840
- split: test
841
- revision: None
842
- metrics:
843
- - type: map_at_1
844
- value: 18.139
845
- - type: map_at_10
846
- value: 26.107000000000003
847
- - type: map_at_100
848
- value: 27.406999999999996
849
- - type: map_at_1000
850
- value: 27.535999999999998
851
- - type: map_at_3
852
- value: 23.445
853
- - type: map_at_5
854
- value: 24.916
855
- - type: mrr_at_1
856
- value: 21.817
857
- - type: mrr_at_10
858
- value: 29.99
859
- - type: mrr_at_100
860
- value: 31.052000000000003
861
- - type: mrr_at_1000
862
- value: 31.128
863
- - type: mrr_at_3
864
- value: 27.627000000000002
865
- - type: mrr_at_5
866
- value: 29.005
867
- - type: ndcg_at_1
868
- value: 21.817
869
- - type: ndcg_at_10
870
- value: 31.135
871
- - type: ndcg_at_100
872
- value: 37.108000000000004
873
- - type: ndcg_at_1000
874
- value: 39.965
875
- - type: ndcg_at_3
876
- value: 26.439
877
- - type: ndcg_at_5
878
- value: 28.655
879
- - type: precision_at_1
880
- value: 21.817
881
- - type: precision_at_10
882
- value: 5.757000000000001
883
- - type: precision_at_100
884
- value: 1.036
885
- - type: precision_at_1000
886
- value: 0.147
887
- - type: precision_at_3
888
- value: 12.537
889
- - type: precision_at_5
890
- value: 9.229
891
- - type: recall_at_1
892
- value: 18.139
893
- - type: recall_at_10
894
- value: 42.272999999999996
895
- - type: recall_at_100
896
- value: 68.657
897
- - type: recall_at_1000
898
- value: 88.93799999999999
899
- - type: recall_at_3
900
- value: 29.266
901
- - type: recall_at_5
902
- value: 34.892
903
- - task:
904
- type: Retrieval
905
- dataset:
906
- type: BeIR/cqadupstack
907
- name: MTEB CQADupstackUnixRetrieval
908
- config: default
909
- split: test
910
- revision: None
911
- metrics:
912
- - type: map_at_1
913
- value: 27.755000000000003
914
- - type: map_at_10
915
- value: 37.384
916
- - type: map_at_100
917
- value: 38.56
918
- - type: map_at_1000
919
- value: 38.655
920
- - type: map_at_3
921
- value: 34.214
922
- - type: map_at_5
923
- value: 35.96
924
- - type: mrr_at_1
925
- value: 32.369
926
- - type: mrr_at_10
927
- value: 41.625
928
- - type: mrr_at_100
929
- value: 42.449
930
- - type: mrr_at_1000
931
- value: 42.502
932
- - type: mrr_at_3
933
- value: 38.899
934
- - type: mrr_at_5
935
- value: 40.489999999999995
936
- - type: ndcg_at_1
937
- value: 32.369
938
- - type: ndcg_at_10
939
- value: 43.287
940
- - type: ndcg_at_100
941
- value: 48.504999999999995
942
- - type: ndcg_at_1000
943
- value: 50.552
944
- - type: ndcg_at_3
945
- value: 37.549
946
- - type: ndcg_at_5
947
- value: 40.204
948
- - type: precision_at_1
949
- value: 32.369
950
- - type: precision_at_10
951
- value: 7.425
952
- - type: precision_at_100
953
- value: 1.134
954
- - type: precision_at_1000
955
- value: 0.14200000000000002
956
- - type: precision_at_3
957
- value: 17.102
958
- - type: precision_at_5
959
- value: 12.107999999999999
960
- - type: recall_at_1
961
- value: 27.755000000000003
962
- - type: recall_at_10
963
- value: 57.071000000000005
964
- - type: recall_at_100
965
- value: 79.456
966
- - type: recall_at_1000
967
- value: 93.54299999999999
968
- - type: recall_at_3
969
- value: 41.298
970
- - type: recall_at_5
971
- value: 48.037
972
- - task:
973
- type: Retrieval
974
- dataset:
975
- type: BeIR/cqadupstack
976
- name: MTEB CQADupstackWebmastersRetrieval
977
- config: default
978
- split: test
979
- revision: None
980
- metrics:
981
- - type: map_at_1
982
- value: 24.855
983
- - type: map_at_10
984
- value: 34.53
985
- - type: map_at_100
986
- value: 36.167
987
- - type: map_at_1000
988
- value: 36.394999999999996
989
- - type: map_at_3
990
- value: 31.037
991
- - type: map_at_5
992
- value: 33.119
993
- - type: mrr_at_1
994
- value: 30.631999999999998
995
- - type: mrr_at_10
996
- value: 39.763999999999996
997
- - type: mrr_at_100
998
- value: 40.77
999
- - type: mrr_at_1000
1000
- value: 40.826
1001
- - type: mrr_at_3
1002
- value: 36.495
1003
- - type: mrr_at_5
1004
- value: 38.561
1005
- - type: ndcg_at_1
1006
- value: 30.631999999999998
1007
- - type: ndcg_at_10
1008
- value: 40.942
1009
- - type: ndcg_at_100
1010
- value: 47.07
1011
- - type: ndcg_at_1000
1012
- value: 49.363
1013
- - type: ndcg_at_3
1014
- value: 35.038000000000004
1015
- - type: ndcg_at_5
1016
- value: 38.161
1017
- - type: precision_at_1
1018
- value: 30.631999999999998
1019
- - type: precision_at_10
1020
- value: 7.983999999999999
1021
- - type: precision_at_100
1022
- value: 1.6070000000000002
1023
- - type: precision_at_1000
1024
- value: 0.246
1025
- - type: precision_at_3
1026
- value: 16.206
1027
- - type: precision_at_5
1028
- value: 12.253
1029
- - type: recall_at_1
1030
- value: 24.855
1031
- - type: recall_at_10
1032
- value: 53.291999999999994
1033
- - type: recall_at_100
1034
- value: 80.283
1035
- - type: recall_at_1000
1036
- value: 94.309
1037
- - type: recall_at_3
1038
- value: 37.257
1039
- - type: recall_at_5
1040
- value: 45.282
1041
- - task:
1042
- type: Retrieval
1043
- dataset:
1044
- type: BeIR/cqadupstack
1045
- name: MTEB CQADupstackWordpressRetrieval
1046
- config: default
1047
- split: test
1048
- revision: None
1049
- metrics:
1050
- - type: map_at_1
1051
- value: 21.208
1052
- - type: map_at_10
1053
- value: 30.512
1054
- - type: map_at_100
1055
- value: 31.496000000000002
1056
- - type: map_at_1000
1057
- value: 31.595000000000002
1058
- - type: map_at_3
1059
- value: 27.904
1060
- - type: map_at_5
1061
- value: 29.491
1062
- - type: mrr_at_1
1063
- value: 22.736
1064
- - type: mrr_at_10
1065
- value: 32.379999999999995
1066
- - type: mrr_at_100
1067
- value: 33.245000000000005
1068
- - type: mrr_at_1000
1069
- value: 33.315
1070
- - type: mrr_at_3
1071
- value: 29.945
1072
- - type: mrr_at_5
1073
- value: 31.488
1074
- - type: ndcg_at_1
1075
- value: 22.736
1076
- - type: ndcg_at_10
1077
- value: 35.643
1078
- - type: ndcg_at_100
1079
- value: 40.535
1080
- - type: ndcg_at_1000
1081
- value: 43.042
1082
- - type: ndcg_at_3
1083
- value: 30.625000000000004
1084
- - type: ndcg_at_5
1085
- value: 33.323
1086
- - type: precision_at_1
1087
- value: 22.736
1088
- - type: precision_at_10
1089
- value: 5.6930000000000005
1090
- - type: precision_at_100
1091
- value: 0.889
1092
- - type: precision_at_1000
1093
- value: 0.122
1094
- - type: precision_at_3
1095
- value: 13.431999999999999
1096
- - type: precision_at_5
1097
- value: 9.575
1098
- - type: recall_at_1
1099
- value: 21.208
1100
- - type: recall_at_10
1101
- value: 49.47
1102
- - type: recall_at_100
1103
- value: 71.71499999999999
1104
- - type: recall_at_1000
1105
- value: 90.55499999999999
1106
- - type: recall_at_3
1107
- value: 36.124
1108
- - type: recall_at_5
1109
- value: 42.606
1110
- - task:
1111
- type: Retrieval
1112
- dataset:
1113
- type: climate-fever
1114
- name: MTEB ClimateFEVER
1115
- config: default
1116
- split: test
1117
- revision: None
1118
- metrics:
1119
- - type: map_at_1
1120
- value: 11.363
1121
- - type: map_at_10
1122
- value: 20.312
1123
- - type: map_at_100
1124
- value: 22.225
1125
- - type: map_at_1000
1126
- value: 22.411
1127
- - type: map_at_3
1128
- value: 16.68
1129
- - type: map_at_5
1130
- value: 18.608
1131
- - type: mrr_at_1
1132
- value: 25.537
1133
- - type: mrr_at_10
1134
- value: 37.933
1135
- - type: mrr_at_100
1136
- value: 38.875
1137
- - type: mrr_at_1000
1138
- value: 38.911
1139
- - type: mrr_at_3
1140
- value: 34.387
1141
- - type: mrr_at_5
1142
- value: 36.51
1143
- - type: ndcg_at_1
1144
- value: 25.537
1145
- - type: ndcg_at_10
1146
- value: 28.82
1147
- - type: ndcg_at_100
1148
- value: 36.341
1149
- - type: ndcg_at_1000
1150
- value: 39.615
1151
- - type: ndcg_at_3
1152
- value: 23.01
1153
- - type: ndcg_at_5
1154
- value: 25.269000000000002
1155
- - type: precision_at_1
1156
- value: 25.537
1157
- - type: precision_at_10
1158
- value: 9.153
1159
- - type: precision_at_100
1160
- value: 1.7319999999999998
1161
- - type: precision_at_1000
1162
- value: 0.234
1163
- - type: precision_at_3
1164
- value: 17.22
1165
- - type: precision_at_5
1166
- value: 13.629
1167
- - type: recall_at_1
1168
- value: 11.363
1169
- - type: recall_at_10
1170
- value: 35.382999999999996
1171
- - type: recall_at_100
1172
- value: 61.367000000000004
1173
- - type: recall_at_1000
1174
- value: 79.699
1175
- - type: recall_at_3
1176
- value: 21.495
1177
- - type: recall_at_5
1178
- value: 27.42
1179
- - task:
1180
- type: Retrieval
1181
- dataset:
1182
- type: dbpedia-entity
1183
- name: MTEB DBPedia
1184
- config: default
1185
- split: test
1186
- revision: None
1187
- metrics:
1188
- - type: map_at_1
1189
- value: 9.65
1190
- - type: map_at_10
1191
- value: 20.742
1192
- - type: map_at_100
1193
- value: 29.614
1194
- - type: map_at_1000
1195
- value: 31.373
1196
- - type: map_at_3
1197
- value: 14.667
1198
- - type: map_at_5
1199
- value: 17.186
1200
- - type: mrr_at_1
1201
- value: 69.75
1202
- - type: mrr_at_10
1203
- value: 76.762
1204
- - type: mrr_at_100
1205
- value: 77.171
1206
- - type: mrr_at_1000
1207
- value: 77.179
1208
- - type: mrr_at_3
1209
- value: 75.125
1210
- - type: mrr_at_5
1211
- value: 76.287
1212
- - type: ndcg_at_1
1213
- value: 57.62500000000001
1214
- - type: ndcg_at_10
1215
- value: 42.370999999999995
1216
- - type: ndcg_at_100
1217
- value: 47.897
1218
- - type: ndcg_at_1000
1219
- value: 55.393
1220
- - type: ndcg_at_3
1221
- value: 46.317
1222
- - type: ndcg_at_5
1223
- value: 43.906
1224
- - type: precision_at_1
1225
- value: 69.75
1226
- - type: precision_at_10
1227
- value: 33.95
1228
- - type: precision_at_100
1229
- value: 10.885
1230
- - type: precision_at_1000
1231
- value: 2.2239999999999998
1232
- - type: precision_at_3
1233
- value: 49.75
1234
- - type: precision_at_5
1235
- value: 42.3
1236
- - type: recall_at_1
1237
- value: 9.65
1238
- - type: recall_at_10
1239
- value: 26.117
1240
- - type: recall_at_100
1241
- value: 55.084
1242
- - type: recall_at_1000
1243
- value: 78.62400000000001
1244
- - type: recall_at_3
1245
- value: 15.823
1246
- - type: recall_at_5
1247
- value: 19.652
1248
- - task:
1249
- type: Classification
1250
- dataset:
1251
- type: mteb/emotion
1252
- name: MTEB EmotionClassification
1253
- config: default
1254
- split: test
1255
- revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
1256
- metrics:
1257
- - type: accuracy
1258
- value: 47.885
1259
- - type: f1
1260
- value: 42.99567641346983
1261
- - task:
1262
- type: Retrieval
1263
- dataset:
1264
- type: fever
1265
- name: MTEB FEVER
1266
- config: default
1267
- split: test
1268
- revision: None
1269
- metrics:
1270
- - type: map_at_1
1271
- value: 70.97
1272
- - type: map_at_10
1273
- value: 80.34599999999999
1274
- - type: map_at_100
1275
- value: 80.571
1276
- - type: map_at_1000
1277
- value: 80.584
1278
- - type: map_at_3
1279
- value: 79.279
1280
- - type: map_at_5
1281
- value: 79.94
1282
- - type: mrr_at_1
1283
- value: 76.613
1284
- - type: mrr_at_10
1285
- value: 85.15700000000001
1286
- - type: mrr_at_100
1287
- value: 85.249
1288
- - type: mrr_at_1000
1289
- value: 85.252
1290
- - type: mrr_at_3
1291
- value: 84.33800000000001
1292
- - type: mrr_at_5
1293
- value: 84.89
1294
- - type: ndcg_at_1
1295
- value: 76.613
1296
- - type: ndcg_at_10
1297
- value: 84.53399999999999
1298
- - type: ndcg_at_100
1299
- value: 85.359
1300
- - type: ndcg_at_1000
1301
- value: 85.607
1302
- - type: ndcg_at_3
1303
- value: 82.76599999999999
1304
- - type: ndcg_at_5
1305
- value: 83.736
1306
- - type: precision_at_1
1307
- value: 76.613
1308
- - type: precision_at_10
1309
- value: 10.206
1310
- - type: precision_at_100
1311
- value: 1.083
1312
- - type: precision_at_1000
1313
- value: 0.11199999999999999
1314
- - type: precision_at_3
1315
- value: 31.913000000000004
1316
- - type: precision_at_5
1317
- value: 19.769000000000002
1318
- - type: recall_at_1
1319
- value: 70.97
1320
- - type: recall_at_10
1321
- value: 92.674
1322
- - type: recall_at_100
1323
- value: 95.985
1324
- - type: recall_at_1000
1325
- value: 97.57000000000001
1326
- - type: recall_at_3
1327
- value: 87.742
1328
- - type: recall_at_5
1329
- value: 90.28
1330
- - task:
1331
- type: Retrieval
1332
- dataset:
1333
- type: fiqa
1334
- name: MTEB FiQA2018
1335
- config: default
1336
- split: test
1337
- revision: None
1338
- metrics:
1339
- - type: map_at_1
1340
- value: 22.494
1341
- - type: map_at_10
1342
- value: 36.491
1343
- - type: map_at_100
1344
- value: 38.550000000000004
1345
- - type: map_at_1000
1346
- value: 38.726
1347
- - type: map_at_3
1348
- value: 31.807000000000002
1349
- - type: map_at_5
1350
- value: 34.299
1351
- - type: mrr_at_1
1352
- value: 44.907000000000004
1353
- - type: mrr_at_10
1354
- value: 53.146
1355
- - type: mrr_at_100
1356
- value: 54.013999999999996
1357
- - type: mrr_at_1000
1358
- value: 54.044000000000004
1359
- - type: mrr_at_3
1360
- value: 50.952
1361
- - type: mrr_at_5
1362
- value: 52.124
1363
- - type: ndcg_at_1
1364
- value: 44.907000000000004
1365
- - type: ndcg_at_10
1366
- value: 44.499
1367
- - type: ndcg_at_100
1368
- value: 51.629000000000005
1369
- - type: ndcg_at_1000
1370
- value: 54.367
1371
- - type: ndcg_at_3
1372
- value: 40.900999999999996
1373
- - type: ndcg_at_5
1374
- value: 41.737
1375
- - type: precision_at_1
1376
- value: 44.907000000000004
1377
- - type: precision_at_10
1378
- value: 12.346
1379
- - type: precision_at_100
1380
- value: 1.974
1381
- - type: precision_at_1000
1382
- value: 0.246
1383
- - type: precision_at_3
1384
- value: 27.366
1385
- - type: precision_at_5
1386
- value: 19.846
1387
- - type: recall_at_1
1388
- value: 22.494
1389
- - type: recall_at_10
1390
- value: 51.156
1391
- - type: recall_at_100
1392
- value: 77.11200000000001
1393
- - type: recall_at_1000
1394
- value: 93.44
1395
- - type: recall_at_3
1396
- value: 36.574
1397
- - type: recall_at_5
1398
- value: 42.361
1399
- - task:
1400
- type: Retrieval
1401
- dataset:
1402
- type: hotpotqa
1403
- name: MTEB HotpotQA
1404
- config: default
1405
- split: test
1406
- revision: None
1407
- metrics:
1408
- - type: map_at_1
1409
- value: 38.568999999999996
1410
- - type: map_at_10
1411
- value: 58.485
1412
- - type: map_at_100
1413
- value: 59.358999999999995
1414
- - type: map_at_1000
1415
- value: 59.429
1416
- - type: map_at_3
1417
- value: 55.217000000000006
1418
- - type: map_at_5
1419
- value: 57.236
1420
- - type: mrr_at_1
1421
- value: 77.137
1422
- - type: mrr_at_10
1423
- value: 82.829
1424
- - type: mrr_at_100
1425
- value: 83.04599999999999
1426
- - type: mrr_at_1000
1427
- value: 83.05399999999999
1428
- - type: mrr_at_3
1429
- value: 81.904
1430
- - type: mrr_at_5
1431
- value: 82.50800000000001
1432
- - type: ndcg_at_1
1433
- value: 77.137
1434
- - type: ndcg_at_10
1435
- value: 67.156
1436
- - type: ndcg_at_100
1437
- value: 70.298
1438
- - type: ndcg_at_1000
1439
- value: 71.65700000000001
1440
- - type: ndcg_at_3
1441
- value: 62.535
1442
- - type: ndcg_at_5
1443
- value: 65.095
1444
- - type: precision_at_1
1445
- value: 77.137
1446
- - type: precision_at_10
1447
- value: 13.911999999999999
1448
- - type: precision_at_100
1449
- value: 1.6389999999999998
1450
- - type: precision_at_1000
1451
- value: 0.182
1452
- - type: precision_at_3
1453
- value: 39.572
1454
- - type: precision_at_5
1455
- value: 25.766
1456
- - type: recall_at_1
1457
- value: 38.568999999999996
1458
- - type: recall_at_10
1459
- value: 69.56099999999999
1460
- - type: recall_at_100
1461
- value: 81.931
1462
- - type: recall_at_1000
1463
- value: 90.91799999999999
1464
- - type: recall_at_3
1465
- value: 59.358999999999995
1466
- - type: recall_at_5
1467
- value: 64.416
1468
- - task:
1469
- type: Classification
1470
- dataset:
1471
- type: mteb/imdb
1472
- name: MTEB ImdbClassification
1473
- config: default
1474
- split: test
1475
- revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
1476
- metrics:
1477
- - type: accuracy
1478
- value: 88.45600000000002
1479
- - type: ap
1480
- value: 84.09725115338568
1481
- - type: f1
1482
- value: 88.41874909080512
1483
- - task:
1484
- type: Retrieval
1485
- dataset:
1486
- type: msmarco
1487
- name: MTEB MSMARCO
1488
- config: default
1489
- split: dev
1490
- revision: None
1491
- metrics:
1492
- - type: map_at_1
1493
- value: 21.404999999999998
1494
- - type: map_at_10
1495
- value: 33.921
1496
- - type: map_at_100
1497
- value: 35.116
1498
- - type: map_at_1000
1499
- value: 35.164
1500
- - type: map_at_3
1501
- value: 30.043999999999997
1502
- - type: map_at_5
1503
- value: 32.327
1504
- - type: mrr_at_1
1505
- value: 21.977
1506
- - type: mrr_at_10
1507
- value: 34.505
1508
- - type: mrr_at_100
1509
- value: 35.638999999999996
1510
- - type: mrr_at_1000
1511
- value: 35.68
1512
- - type: mrr_at_3
1513
- value: 30.703999999999997
1514
- - type: mrr_at_5
1515
- value: 32.96
1516
- - type: ndcg_at_1
1517
- value: 21.963
1518
- - type: ndcg_at_10
1519
- value: 40.859
1520
- - type: ndcg_at_100
1521
- value: 46.614
1522
- - type: ndcg_at_1000
1523
- value: 47.789
1524
- - type: ndcg_at_3
1525
- value: 33.007999999999996
1526
- - type: ndcg_at_5
1527
- value: 37.084
1528
- - type: precision_at_1
1529
- value: 21.963
1530
- - type: precision_at_10
1531
- value: 6.493
1532
- - type: precision_at_100
1533
- value: 0.938
1534
- - type: precision_at_1000
1535
- value: 0.104
1536
- - type: precision_at_3
1537
- value: 14.155000000000001
1538
- - type: precision_at_5
1539
- value: 10.544
1540
- - type: recall_at_1
1541
- value: 21.404999999999998
1542
- - type: recall_at_10
1543
- value: 62.175000000000004
1544
- - type: recall_at_100
1545
- value: 88.786
1546
- - type: recall_at_1000
1547
- value: 97.738
1548
- - type: recall_at_3
1549
- value: 40.925
1550
- - type: recall_at_5
1551
- value: 50.722
1552
- - task:
1553
- type: Classification
1554
- dataset:
1555
- type: mteb/mtop_domain
1556
- name: MTEB MTOPDomainClassification (en)
1557
- config: en
1558
- split: test
1559
- revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
1560
- metrics:
1561
- - type: accuracy
1562
- value: 93.50661194710442
1563
- - type: f1
1564
- value: 93.30311193153668
1565
- - task:
1566
- type: Classification
1567
- dataset:
1568
- type: mteb/mtop_intent
1569
- name: MTEB MTOPIntentClassification (en)
1570
- config: en
1571
- split: test
1572
- revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
1573
- metrics:
1574
- - type: accuracy
1575
- value: 73.24669402644778
1576
- - type: f1
1577
- value: 54.23122108002977
1578
- - task:
1579
- type: Classification
1580
- dataset:
1581
- type: mteb/amazon_massive_intent
1582
- name: MTEB MassiveIntentClassification (en)
1583
- config: en
1584
- split: test
1585
- revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7
1586
- metrics:
1587
- - type: accuracy
1588
- value: 72.61936785474109
1589
- - type: f1
1590
- value: 70.52644941025565
1591
- - task:
1592
- type: Classification
1593
- dataset:
1594
- type: mteb/amazon_massive_scenario
1595
- name: MTEB MassiveScenarioClassification (en)
1596
- config: en
1597
- split: test
1598
- revision: 7d571f92784cd94a019292a1f45445077d0ef634
1599
- metrics:
1600
- - type: accuracy
1601
- value: 76.76529926025555
1602
- - type: f1
1603
- value: 77.26872729322514
1604
- - task:
1605
- type: Clustering
1606
- dataset:
1607
- type: mteb/medrxiv-clustering-p2p
1608
- name: MTEB MedrxivClusteringP2P
1609
- config: default
1610
- split: test
1611
- revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
1612
- metrics:
1613
- - type: v_measure
1614
- value: 33.39450293021839
1615
- - task:
1616
- type: Clustering
1617
- dataset:
1618
- type: mteb/medrxiv-clustering-s2s
1619
- name: MTEB MedrxivClusteringS2S
1620
- config: default
1621
- split: test
1622
- revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
1623
- metrics:
1624
- - type: v_measure
1625
- value: 31.757796879839294
1626
- - task:
1627
- type: Reranking
1628
- dataset:
1629
- type: mteb/mind_small
1630
- name: MTEB MindSmallReranking
1631
- config: default
1632
- split: test
1633
- revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69
1634
- metrics:
1635
- - type: map
1636
- value: 32.62512146657428
1637
- - type: mrr
1638
- value: 33.84624322066173
1639
- - task:
1640
- type: Retrieval
1641
- dataset:
1642
- type: nfcorpus
1643
- name: MTEB NFCorpus
1644
- config: default
1645
- split: test
1646
- revision: None
1647
- metrics:
1648
- - type: map_at_1
1649
- value: 6.462
1650
- - type: map_at_10
1651
- value: 14.947
1652
- - type: map_at_100
1653
- value: 19.344
1654
- - type: map_at_1000
1655
- value: 20.933
1656
- - type: map_at_3
1657
- value: 10.761999999999999
1658
- - type: map_at_5
1659
- value: 12.744
1660
- - type: mrr_at_1
1661
- value: 47.988
1662
- - type: mrr_at_10
1663
- value: 57.365
1664
- - type: mrr_at_100
1665
- value: 57.931
1666
- - type: mrr_at_1000
1667
- value: 57.96
1668
- - type: mrr_at_3
1669
- value: 54.85
1670
- - type: mrr_at_5
1671
- value: 56.569
1672
- - type: ndcg_at_1
1673
- value: 46.129999999999995
1674
- - type: ndcg_at_10
1675
- value: 38.173
1676
- - type: ndcg_at_100
1677
- value: 35.983
1678
- - type: ndcg_at_1000
1679
- value: 44.507000000000005
1680
- - type: ndcg_at_3
1681
- value: 42.495
1682
- - type: ndcg_at_5
1683
- value: 41.019
1684
- - type: precision_at_1
1685
- value: 47.678
1686
- - type: precision_at_10
1687
- value: 28.731
1688
- - type: precision_at_100
1689
- value: 9.232
1690
- - type: precision_at_1000
1691
- value: 2.202
1692
- - type: precision_at_3
1693
- value: 39.628
1694
- - type: precision_at_5
1695
- value: 35.851
1696
- - type: recall_at_1
1697
- value: 6.462
1698
- - type: recall_at_10
1699
- value: 18.968
1700
- - type: recall_at_100
1701
- value: 37.131
1702
- - type: recall_at_1000
1703
- value: 67.956
1704
- - type: recall_at_3
1705
- value: 11.905000000000001
1706
- - type: recall_at_5
1707
- value: 15.097
1708
- - task:
1709
- type: Retrieval
1710
- dataset:
1711
- type: nq
1712
- name: MTEB NQ
1713
- config: default
1714
- split: test
1715
- revision: None
1716
- metrics:
1717
- - type: map_at_1
1718
- value: 30.335
1719
- - type: map_at_10
1720
- value: 46.611999999999995
1721
- - type: map_at_100
1722
- value: 47.632000000000005
1723
- - type: map_at_1000
1724
- value: 47.661
1725
- - type: map_at_3
1726
- value: 41.876999999999995
1727
- - type: map_at_5
1728
- value: 44.799
1729
- - type: mrr_at_1
1730
- value: 34.125
1731
- - type: mrr_at_10
1732
- value: 49.01
1733
- - type: mrr_at_100
1734
- value: 49.75
1735
- - type: mrr_at_1000
1736
- value: 49.768
1737
- - type: mrr_at_3
1738
- value: 45.153
1739
- - type: mrr_at_5
1740
- value: 47.589999999999996
1741
- - type: ndcg_at_1
1742
- value: 34.125
1743
- - type: ndcg_at_10
1744
- value: 54.777
1745
- - type: ndcg_at_100
1746
- value: 58.914
1747
- - type: ndcg_at_1000
1748
- value: 59.521
1749
- - type: ndcg_at_3
1750
- value: 46.015
1751
- - type: ndcg_at_5
1752
- value: 50.861000000000004
1753
- - type: precision_at_1
1754
- value: 34.125
1755
- - type: precision_at_10
1756
- value: 9.166
1757
- - type: precision_at_100
1758
- value: 1.149
1759
- - type: precision_at_1000
1760
- value: 0.121
1761
- - type: precision_at_3
1762
- value: 21.147
1763
- - type: precision_at_5
1764
- value: 15.469
1765
- - type: recall_at_1
1766
- value: 30.335
1767
- - type: recall_at_10
1768
- value: 77.194
1769
- - type: recall_at_100
1770
- value: 94.812
1771
- - type: recall_at_1000
1772
- value: 99.247
1773
- - type: recall_at_3
1774
- value: 54.681000000000004
1775
- - type: recall_at_5
1776
- value: 65.86800000000001
1777
- - task:
1778
- type: Retrieval
1779
- dataset:
1780
- type: quora
1781
- name: MTEB QuoraRetrieval
1782
- config: default
1783
- split: test
1784
- revision: None
1785
- metrics:
1786
- - type: map_at_1
1787
- value: 70.62
1788
- - type: map_at_10
1789
- value: 84.536
1790
- - type: map_at_100
1791
- value: 85.167
1792
- - type: map_at_1000
1793
- value: 85.184
1794
- - type: map_at_3
1795
- value: 81.607
1796
- - type: map_at_5
1797
- value: 83.423
1798
- - type: mrr_at_1
1799
- value: 81.36
1800
- - type: mrr_at_10
1801
- value: 87.506
1802
- - type: mrr_at_100
1803
- value: 87.601
1804
- - type: mrr_at_1000
1805
- value: 87.601
1806
- - type: mrr_at_3
1807
- value: 86.503
1808
- - type: mrr_at_5
1809
- value: 87.179
1810
- - type: ndcg_at_1
1811
- value: 81.36
1812
- - type: ndcg_at_10
1813
- value: 88.319
1814
- - type: ndcg_at_100
1815
- value: 89.517
1816
- - type: ndcg_at_1000
1817
- value: 89.60900000000001
1818
- - type: ndcg_at_3
1819
- value: 85.423
1820
- - type: ndcg_at_5
1821
- value: 86.976
1822
- - type: precision_at_1
1823
- value: 81.36
1824
- - type: precision_at_10
1825
- value: 13.415
1826
- - type: precision_at_100
1827
- value: 1.529
1828
- - type: precision_at_1000
1829
- value: 0.157
1830
- - type: precision_at_3
1831
- value: 37.342999999999996
1832
- - type: precision_at_5
1833
- value: 24.534
1834
- - type: recall_at_1
1835
- value: 70.62
1836
- - type: recall_at_10
1837
- value: 95.57600000000001
1838
- - type: recall_at_100
1839
- value: 99.624
1840
- - type: recall_at_1000
1841
- value: 99.991
1842
- - type: recall_at_3
1843
- value: 87.22
1844
- - type: recall_at_5
1845
- value: 91.654
1846
- - task:
1847
- type: Clustering
1848
- dataset:
1849
- type: mteb/reddit-clustering
1850
- name: MTEB RedditClustering
1851
- config: default
1852
- split: test
1853
- revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
1854
- metrics:
1855
- - type: v_measure
1856
- value: 60.826438478212744
1857
- - task:
1858
- type: Clustering
1859
- dataset:
1860
- type: mteb/reddit-clustering-p2p
1861
- name: MTEB RedditClusteringP2P
1862
- config: default
1863
- split: test
1864
- revision: 282350215ef01743dc01b456c7f5241fa8937f16
1865
- metrics:
1866
- - type: v_measure
1867
- value: 64.24027467551447
1868
- - task:
1869
- type: Retrieval
1870
- dataset:
1871
- type: scidocs
1872
- name: MTEB SCIDOCS
1873
- config: default
1874
- split: test
1875
- revision: None
1876
- metrics:
1877
- - type: map_at_1
1878
- value: 4.997999999999999
1879
- - type: map_at_10
1880
- value: 14.267
1881
- - type: map_at_100
1882
- value: 16.843
1883
- - type: map_at_1000
1884
- value: 17.229
1885
- - type: map_at_3
1886
- value: 9.834
1887
- - type: map_at_5
1888
- value: 11.92
1889
- - type: mrr_at_1
1890
- value: 24.7
1891
- - type: mrr_at_10
1892
- value: 37.685
1893
- - type: mrr_at_100
1894
- value: 38.704
1895
- - type: mrr_at_1000
1896
- value: 38.747
1897
- - type: mrr_at_3
1898
- value: 34.150000000000006
1899
- - type: mrr_at_5
1900
- value: 36.075
1901
- - type: ndcg_at_1
1902
- value: 24.7
1903
- - type: ndcg_at_10
1904
- value: 23.44
1905
- - type: ndcg_at_100
1906
- value: 32.617000000000004
1907
- - type: ndcg_at_1000
1908
- value: 38.628
1909
- - type: ndcg_at_3
1910
- value: 21.747
1911
- - type: ndcg_at_5
1912
- value: 19.076
1913
- - type: precision_at_1
1914
- value: 24.7
1915
- - type: precision_at_10
1916
- value: 12.47
1917
- - type: precision_at_100
1918
- value: 2.564
1919
- - type: precision_at_1000
1920
- value: 0.4
1921
- - type: precision_at_3
1922
- value: 20.767
1923
- - type: precision_at_5
1924
- value: 17.06
1925
- - type: recall_at_1
1926
- value: 4.997999999999999
1927
- - type: recall_at_10
1928
- value: 25.3
1929
- - type: recall_at_100
1930
- value: 52.048
1931
- - type: recall_at_1000
1932
- value: 81.093
1933
- - type: recall_at_3
1934
- value: 12.642999999999999
1935
- - type: recall_at_5
1936
- value: 17.312
1937
- - task:
1938
- type: STS
1939
- dataset:
1940
- type: mteb/sickr-sts
1941
- name: MTEB SICK-R
1942
- config: default
1943
- split: test
1944
- revision: a6ea5a8cab320b040a23452cc28066d9beae2cee
1945
- metrics:
1946
- - type: cos_sim_pearson
1947
- value: 85.44942006292234
1948
- - type: cos_sim_spearman
1949
- value: 79.80930790660699
1950
- - type: euclidean_pearson
1951
- value: 82.93400777494863
1952
- - type: euclidean_spearman
1953
- value: 80.04664991110705
1954
- - type: manhattan_pearson
1955
- value: 82.93551681854949
1956
- - type: manhattan_spearman
1957
- value: 80.03156736837379
1958
- - task:
1959
- type: STS
1960
- dataset:
1961
- type: mteb/sts12-sts
1962
- name: MTEB STS12
1963
- config: default
1964
- split: test
1965
- revision: a0d554a64d88156834ff5ae9920b964011b16384
1966
- metrics:
1967
- - type: cos_sim_pearson
1968
- value: 85.63574059135726
1969
- - type: cos_sim_spearman
1970
- value: 76.80552915288186
1971
- - type: euclidean_pearson
1972
- value: 82.46368529820518
1973
- - type: euclidean_spearman
1974
- value: 76.60338474719275
1975
- - type: manhattan_pearson
1976
- value: 82.4558617035968
1977
- - type: manhattan_spearman
1978
- value: 76.57936082895705
1979
- - task:
1980
- type: STS
1981
- dataset:
1982
- type: mteb/sts13-sts
1983
- name: MTEB STS13
1984
- config: default
1985
- split: test
1986
- revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
1987
- metrics:
1988
- - type: cos_sim_pearson
1989
- value: 86.24116811084211
1990
- - type: cos_sim_spearman
1991
- value: 88.10998662068769
1992
- - type: euclidean_pearson
1993
- value: 87.04961732352689
1994
- - type: euclidean_spearman
1995
- value: 88.12543945864087
1996
- - type: manhattan_pearson
1997
- value: 86.9905224528854
1998
- - type: manhattan_spearman
1999
- value: 88.07827944705546
2000
- - task:
2001
- type: STS
2002
- dataset:
2003
- type: mteb/sts14-sts
2004
- name: MTEB STS14
2005
- config: default
2006
- split: test
2007
- revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
2008
- metrics:
2009
- - type: cos_sim_pearson
2010
- value: 84.74847296555048
2011
- - type: cos_sim_spearman
2012
- value: 82.66200957916445
2013
- - type: euclidean_pearson
2014
- value: 84.48132256004965
2015
- - type: euclidean_spearman
2016
- value: 82.67915286000596
2017
- - type: manhattan_pearson
2018
- value: 84.44950477268334
2019
- - type: manhattan_spearman
2020
- value: 82.63327639173352
2021
- - task:
2022
- type: STS
2023
- dataset:
2024
- type: mteb/sts15-sts
2025
- name: MTEB STS15
2026
- config: default
2027
- split: test
2028
- revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
2029
- metrics:
2030
- - type: cos_sim_pearson
2031
- value: 87.23056258027053
2032
- - type: cos_sim_spearman
2033
- value: 88.92791680286955
2034
- - type: euclidean_pearson
2035
- value: 88.13819235461933
2036
- - type: euclidean_spearman
2037
- value: 88.87294661361716
2038
- - type: manhattan_pearson
2039
- value: 88.14212133687899
2040
- - type: manhattan_spearman
2041
- value: 88.88551854529777
2042
- - task:
2043
- type: STS
2044
- dataset:
2045
- type: mteb/sts16-sts
2046
- name: MTEB STS16
2047
- config: default
2048
- split: test
2049
- revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
2050
- metrics:
2051
- - type: cos_sim_pearson
2052
- value: 82.64179522732887
2053
- - type: cos_sim_spearman
2054
- value: 84.25028809903114
2055
- - type: euclidean_pearson
2056
- value: 83.40175015236979
2057
- - type: euclidean_spearman
2058
- value: 84.23369296429406
2059
- - type: manhattan_pearson
2060
- value: 83.43768174261321
2061
- - type: manhattan_spearman
2062
- value: 84.27855229214734
2063
- - task:
2064
- type: STS
2065
- dataset:
2066
- type: mteb/sts17-crosslingual-sts
2067
- name: MTEB STS17 (en-en)
2068
- config: en-en
2069
- split: test
2070
- revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d
2071
- metrics:
2072
- - type: cos_sim_pearson
2073
- value: 88.20378955494732
2074
- - type: cos_sim_spearman
2075
- value: 88.46863559173111
2076
- - type: euclidean_pearson
2077
- value: 88.8249295811663
2078
- - type: euclidean_spearman
2079
- value: 88.6312737724905
2080
- - type: manhattan_pearson
2081
- value: 88.87744466378827
2082
- - type: manhattan_spearman
2083
- value: 88.82908423767314
2084
- - task:
2085
- type: STS
2086
- dataset:
2087
- type: mteb/sts22-crosslingual-sts
2088
- name: MTEB STS22 (en)
2089
- config: en
2090
- split: test
2091
- revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80
2092
- metrics:
2093
- - type: cos_sim_pearson
2094
- value: 69.91342028796086
2095
- - type: cos_sim_spearman
2096
- value: 69.71495021867864
2097
- - type: euclidean_pearson
2098
- value: 70.65334330405646
2099
- - type: euclidean_spearman
2100
- value: 69.4321253472211
2101
- - type: manhattan_pearson
2102
- value: 70.59743494727465
2103
- - type: manhattan_spearman
2104
- value: 69.11695509297482
2105
- - task:
2106
- type: STS
2107
- dataset:
2108
- type: mteb/stsbenchmark-sts
2109
- name: MTEB STSBenchmark
2110
- config: default
2111
- split: test
2112
- revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
2113
- metrics:
2114
- - type: cos_sim_pearson
2115
- value: 85.42451709766952
2116
- - type: cos_sim_spearman
2117
- value: 86.07166710670508
2118
- - type: euclidean_pearson
2119
- value: 86.12711421258899
2120
- - type: euclidean_spearman
2121
- value: 86.05232086925126
2122
- - type: manhattan_pearson
2123
- value: 86.15591089932126
2124
- - type: manhattan_spearman
2125
- value: 86.0890128623439
2126
- - task:
2127
- type: Reranking
2128
- dataset:
2129
- type: mteb/scidocs-reranking
2130
- name: MTEB SciDocsRR
2131
- config: default
2132
- split: test
2133
- revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
2134
- metrics:
2135
- - type: map
2136
- value: 87.1976344717285
2137
- - type: mrr
2138
- value: 96.3703145075694
2139
- - task:
2140
- type: Retrieval
2141
- dataset:
2142
- type: scifact
2143
- name: MTEB SciFact
2144
- config: default
2145
- split: test
2146
- revision: None
2147
- metrics:
2148
- - type: map_at_1
2149
- value: 59.511
2150
- - type: map_at_10
2151
- value: 69.724
2152
- - type: map_at_100
2153
- value: 70.208
2154
- - type: map_at_1000
2155
- value: 70.22800000000001
2156
- - type: map_at_3
2157
- value: 66.986
2158
- - type: map_at_5
2159
- value: 68.529
2160
- - type: mrr_at_1
2161
- value: 62.333000000000006
2162
- - type: mrr_at_10
2163
- value: 70.55
2164
- - type: mrr_at_100
2165
- value: 70.985
2166
- - type: mrr_at_1000
2167
- value: 71.004
2168
- - type: mrr_at_3
2169
- value: 68.611
2170
- - type: mrr_at_5
2171
- value: 69.728
2172
- - type: ndcg_at_1
2173
- value: 62.333000000000006
2174
- - type: ndcg_at_10
2175
- value: 74.265
2176
- - type: ndcg_at_100
2177
- value: 76.361
2178
- - type: ndcg_at_1000
2179
- value: 76.82900000000001
2180
- - type: ndcg_at_3
2181
- value: 69.772
2182
- - type: ndcg_at_5
2183
- value: 71.94800000000001
2184
- - type: precision_at_1
2185
- value: 62.333000000000006
2186
- - type: precision_at_10
2187
- value: 9.9
2188
- - type: precision_at_100
2189
- value: 1.093
2190
- - type: precision_at_1000
2191
- value: 0.11299999999999999
2192
- - type: precision_at_3
2193
- value: 27.444000000000003
2194
- - type: precision_at_5
2195
- value: 18
2196
- - type: recall_at_1
2197
- value: 59.511
2198
- - type: recall_at_10
2199
- value: 87.156
2200
- - type: recall_at_100
2201
- value: 96.5
2202
- - type: recall_at_1000
2203
- value: 100
2204
- - type: recall_at_3
2205
- value: 75.2
2206
- - type: recall_at_5
2207
- value: 80.661
2208
- - task:
2209
- type: PairClassification
2210
- dataset:
2211
- type: mteb/sprintduplicatequestions-pairclassification
2212
- name: MTEB SprintDuplicateQuestions
2213
- config: default
2214
- split: test
2215
- revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
2216
- metrics:
2217
- - type: cos_sim_accuracy
2218
- value: 99.81683168316832
2219
- - type: cos_sim_ap
2220
- value: 95.74716566563774
2221
- - type: cos_sim_f1
2222
- value: 90.64238745574103
2223
- - type: cos_sim_precision
2224
- value: 91.7093142272262
2225
- - type: cos_sim_recall
2226
- value: 89.60000000000001
2227
- - type: dot_accuracy
2228
- value: 99.69405940594059
2229
- - type: dot_ap
2230
- value: 91.09013507754594
2231
- - type: dot_f1
2232
- value: 84.54227113556779
2233
- - type: dot_precision
2234
- value: 84.58458458458459
2235
- - type: dot_recall
2236
- value: 84.5
2237
- - type: euclidean_accuracy
2238
- value: 99.81782178217821
2239
- - type: euclidean_ap
2240
- value: 95.6324301072609
2241
- - type: euclidean_f1
2242
- value: 90.58341862845445
2243
- - type: euclidean_precision
2244
- value: 92.76729559748428
2245
- - type: euclidean_recall
2246
- value: 88.5
2247
- - type: manhattan_accuracy
2248
- value: 99.81980198019802
2249
- - type: manhattan_ap
2250
- value: 95.68510494437183
2251
- - type: manhattan_f1
2252
- value: 90.58945191313342
2253
- - type: manhattan_precision
2254
- value: 93.79014989293361
2255
- - type: manhattan_recall
2256
- value: 87.6
2257
- - type: max_accuracy
2258
- value: 99.81980198019802
2259
- - type: max_ap
2260
- value: 95.74716566563774
2261
- - type: max_f1
2262
- value: 90.64238745574103
2263
- - task:
2264
- type: Clustering
2265
- dataset:
2266
- type: mteb/stackexchange-clustering
2267
- name: MTEB StackExchangeClustering
2268
- config: default
2269
- split: test
2270
- revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
2271
- metrics:
2272
- - type: v_measure
2273
- value: 67.63761899427078
2274
- - task:
2275
- type: Clustering
2276
- dataset:
2277
- type: mteb/stackexchange-clustering-p2p
2278
- name: MTEB StackExchangeClusteringP2P
2279
- config: default
2280
- split: test
2281
- revision: 815ca46b2622cec33ccafc3735d572c266efdb44
2282
- metrics:
2283
- - type: v_measure
2284
- value: 36.572473369697235
2285
- - task:
2286
- type: Reranking
2287
- dataset:
2288
- type: mteb/stackoverflowdupquestions-reranking
2289
- name: MTEB StackOverflowDupQuestions
2290
- config: default
2291
- split: test
2292
- revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
2293
- metrics:
2294
- - type: map
2295
- value: 53.63000245208579
2296
- - type: mrr
2297
- value: 54.504193722943725
2298
- - task:
2299
- type: Summarization
2300
- dataset:
2301
- type: mteb/summeval
2302
- name: MTEB SummEval
2303
- config: default
2304
- split: test
2305
- revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
2306
- metrics:
2307
- - type: cos_sim_pearson
2308
- value: 30.300791939416545
2309
- - type: cos_sim_spearman
2310
- value: 31.662904057924123
2311
- - type: dot_pearson
2312
- value: 26.21198530758316
2313
- - type: dot_spearman
2314
- value: 27.006921548904263
2315
- - task:
2316
- type: Retrieval
2317
- dataset:
2318
- type: trec-covid
2319
- name: MTEB TRECCOVID
2320
- config: default
2321
- split: test
2322
- revision: None
2323
- metrics:
2324
- - type: map_at_1
2325
- value: 0.197
2326
- - type: map_at_10
2327
- value: 1.752
2328
- - type: map_at_100
2329
- value: 10.795
2330
- - type: map_at_1000
2331
- value: 27.18
2332
- - type: map_at_3
2333
- value: 0.5890000000000001
2334
- - type: map_at_5
2335
- value: 0.938
2336
- - type: mrr_at_1
2337
- value: 74
2338
- - type: mrr_at_10
2339
- value: 85.833
2340
- - type: mrr_at_100
2341
- value: 85.833
2342
- - type: mrr_at_1000
2343
- value: 85.833
2344
- - type: mrr_at_3
2345
- value: 85.333
2346
- - type: mrr_at_5
2347
- value: 85.833
2348
- - type: ndcg_at_1
2349
- value: 69
2350
- - type: ndcg_at_10
2351
- value: 70.22
2352
- - type: ndcg_at_100
2353
- value: 55.785
2354
- - type: ndcg_at_1000
2355
- value: 52.93600000000001
2356
- - type: ndcg_at_3
2357
- value: 72.084
2358
- - type: ndcg_at_5
2359
- value: 71.184
2360
- - type: precision_at_1
2361
- value: 74
2362
- - type: precision_at_10
2363
- value: 75.2
2364
- - type: precision_at_100
2365
- value: 57.3
2366
- - type: precision_at_1000
2367
- value: 23.302
2368
- - type: precision_at_3
2369
- value: 77.333
2370
- - type: precision_at_5
2371
- value: 75.6
2372
- - type: recall_at_1
2373
- value: 0.197
2374
- - type: recall_at_10
2375
- value: 2.019
2376
- - type: recall_at_100
2377
- value: 14.257
2378
- - type: recall_at_1000
2379
- value: 50.922
2380
- - type: recall_at_3
2381
- value: 0.642
2382
- - type: recall_at_5
2383
- value: 1.043
2384
- - task:
2385
- type: Retrieval
2386
- dataset:
2387
- type: webis-touche2020
2388
- name: MTEB Touche2020
2389
- config: default
2390
- split: test
2391
- revision: None
2392
- metrics:
2393
- - type: map_at_1
2394
- value: 2.803
2395
- - type: map_at_10
2396
- value: 10.407
2397
- - type: map_at_100
2398
- value: 16.948
2399
- - type: map_at_1000
2400
- value: 18.424
2401
- - type: map_at_3
2402
- value: 5.405
2403
- - type: map_at_5
2404
- value: 6.908
2405
- - type: mrr_at_1
2406
- value: 36.735
2407
- - type: mrr_at_10
2408
- value: 50.221000000000004
2409
- - type: mrr_at_100
2410
- value: 51.388
2411
- - type: mrr_at_1000
2412
- value: 51.402
2413
- - type: mrr_at_3
2414
- value: 47.278999999999996
2415
- - type: mrr_at_5
2416
- value: 49.626
2417
- - type: ndcg_at_1
2418
- value: 34.694
2419
- - type: ndcg_at_10
2420
- value: 25.507
2421
- - type: ndcg_at_100
2422
- value: 38.296
2423
- - type: ndcg_at_1000
2424
- value: 49.492000000000004
2425
- - type: ndcg_at_3
2426
- value: 29.006999999999998
2427
- - type: ndcg_at_5
2428
- value: 25.979000000000003
2429
- - type: precision_at_1
2430
- value: 36.735
2431
- - type: precision_at_10
2432
- value: 22.041
2433
- - type: precision_at_100
2434
- value: 8.02
2435
- - type: precision_at_1000
2436
- value: 1.567
2437
- - type: precision_at_3
2438
- value: 28.571
2439
- - type: precision_at_5
2440
- value: 24.490000000000002
2441
- - type: recall_at_1
2442
- value: 2.803
2443
- - type: recall_at_10
2444
- value: 16.378
2445
- - type: recall_at_100
2446
- value: 50.489
2447
- - type: recall_at_1000
2448
- value: 85.013
2449
- - type: recall_at_3
2450
- value: 6.505
2451
- - type: recall_at_5
2452
- value: 9.243
2453
- - task:
2454
- type: Classification
2455
- dataset:
2456
- type: mteb/toxic_conversations_50k
2457
- name: MTEB ToxicConversationsClassification
2458
- config: default
2459
- split: test
2460
- revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c
2461
- metrics:
2462
- - type: accuracy
2463
- value: 70.55579999999999
2464
- - type: ap
2465
- value: 14.206982753316227
2466
- - type: f1
2467
- value: 54.372142814964285
2468
- - task:
2469
- type: Classification
2470
- dataset:
2471
- type: mteb/tweet_sentiment_extraction
2472
- name: MTEB TweetSentimentExtractionClassification
2473
- config: default
2474
- split: test
2475
- revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
2476
- metrics:
2477
- - type: accuracy
2478
- value: 56.57611771363893
2479
- - type: f1
2480
- value: 56.924172639063144
2481
- - task:
2482
- type: Clustering
2483
- dataset:
2484
- type: mteb/twentynewsgroups-clustering
2485
- name: MTEB TwentyNewsgroupsClustering
2486
- config: default
2487
- split: test
2488
- revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
2489
- metrics:
2490
- - type: v_measure
2491
- value: 52.82304915719759
2492
- - task:
2493
- type: PairClassification
2494
- dataset:
2495
- type: mteb/twittersemeval2015-pairclassification
2496
- name: MTEB TwitterSemEval2015
2497
- config: default
2498
- split: test
2499
- revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
2500
- metrics:
2501
- - type: cos_sim_accuracy
2502
- value: 85.92716218632653
2503
- - type: cos_sim_ap
2504
- value: 73.73359122546046
2505
- - type: cos_sim_f1
2506
- value: 68.42559487116262
2507
- - type: cos_sim_precision
2508
- value: 64.22124508215691
2509
- - type: cos_sim_recall
2510
- value: 73.21899736147758
2511
- - type: dot_accuracy
2512
- value: 80.38981939560112
2513
- - type: dot_ap
2514
- value: 54.61060862444974
2515
- - type: dot_f1
2516
- value: 53.45710627400769
2517
- - type: dot_precision
2518
- value: 44.87638839125761
2519
- - type: dot_recall
2520
- value: 66.09498680738787
2521
- - type: euclidean_accuracy
2522
- value: 86.02849138701794
2523
- - type: euclidean_ap
2524
- value: 73.95673761922404
2525
- - type: euclidean_f1
2526
- value: 68.6783042394015
2527
- - type: euclidean_precision
2528
- value: 65.1063829787234
2529
- - type: euclidean_recall
2530
- value: 72.66490765171504
2531
- - type: manhattan_accuracy
2532
- value: 85.9808070572808
2533
- - type: manhattan_ap
2534
- value: 73.9050720058029
2535
- - type: manhattan_f1
2536
- value: 68.57560618983794
2537
- - type: manhattan_precision
2538
- value: 63.70839936608558
2539
- - type: manhattan_recall
2540
- value: 74.24802110817942
2541
- - type: max_accuracy
2542
- value: 86.02849138701794
2543
- - type: max_ap
2544
- value: 73.95673761922404
2545
- - type: max_f1
2546
- value: 68.6783042394015
2547
- - task:
2548
- type: PairClassification
2549
- dataset:
2550
- type: mteb/twitterurlcorpus-pairclassification
2551
- name: MTEB TwitterURLCorpus
2552
- config: default
2553
- split: test
2554
- revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
2555
- metrics:
2556
- - type: cos_sim_accuracy
2557
- value: 88.72783017037295
2558
- - type: cos_sim_ap
2559
- value: 85.52705223340233
2560
- - type: cos_sim_f1
2561
- value: 77.91659078492079
2562
- - type: cos_sim_precision
2563
- value: 73.93378032764221
2564
- - type: cos_sim_recall
2565
- value: 82.35294117647058
2566
- - type: dot_accuracy
2567
- value: 85.41739434159972
2568
- - type: dot_ap
2569
- value: 77.17734818118443
2570
- - type: dot_f1
2571
- value: 71.63473589973144
2572
- - type: dot_precision
2573
- value: 66.96123719622415
2574
- - type: dot_recall
2575
- value: 77.00954727440714
2576
- - type: euclidean_accuracy
2577
- value: 88.68125897465751
2578
- - type: euclidean_ap
2579
- value: 85.47712213906692
2580
- - type: euclidean_f1
2581
- value: 77.81419950830664
2582
- - type: euclidean_precision
2583
- value: 75.37162649733006
2584
- - type: euclidean_recall
2585
- value: 80.42038805050817
2586
- - type: manhattan_accuracy
2587
- value: 88.67349710870494
2588
- - type: manhattan_ap
2589
- value: 85.46506475241955
2590
- - type: manhattan_f1
2591
- value: 77.87259084890393
2592
- - type: manhattan_precision
2593
- value: 74.54929577464789
2594
- - type: manhattan_recall
2595
- value: 81.50600554357868
2596
- - type: max_accuracy
2597
- value: 88.72783017037295
2598
- - type: max_ap
2599
- value: 85.52705223340233
2600
- - type: max_f1
2601
- value: 77.91659078492079
2602
- language:
2603
- - en
2604
- license: mit
2605
- ---
2606
-
2607
- # gte-large
2608
-
2609
- General Text Embeddings (GTE) model. [Towards General Text Embeddings with Multi-stage Contrastive Learning](https://arxiv.org/abs/2308.03281)
2610
-
2611
- The GTE models are trained by Alibaba DAMO Academy. They are mainly based on the BERT framework and currently offer three different sizes of models, including [GTE-large](https://huggingface.co/thenlper/gte-large), [GTE-base](https://huggingface.co/thenlper/gte-base), and [GTE-small](https://huggingface.co/thenlper/gte-small). The GTE models are trained on a large-scale corpus of relevance text pairs, covering a wide range of domains and scenarios. This enables the GTE models to be applied to various downstream tasks of text embeddings, including **information retrieval**, **semantic textual similarity**, **text reranking**, etc.
2612
-
2613
- ## Metrics
2614
-
2615
- We compared the performance of the GTE models with other popular text embedding models on the MTEB benchmark. For more detailed comparison results, please refer to the [MTEB leaderboard](https://huggingface.co/spaces/mteb/leaderboard).
2616
-
2617
-
2618
-
2619
- | Model Name | Model Size (GB) | Dimension | Sequence Length | Average (56) | Clustering (11) | Pair Classification (3) | Reranking (4) | Retrieval (15) | STS (10) | Summarization (1) | Classification (12) |
2620
- |:----:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
2621
- | [**gte-large**](https://huggingface.co/thenlper/gte-large) | 0.67 | 1024 | 512 | **63.13** | 46.84 | 85.00 | 59.13 | 52.22 | 83.35 | 31.66 | 73.33 |
2622
- | [**gte-base**](https://huggingface.co/thenlper/gte-base) | 0.22 | 768 | 512 | **62.39** | 46.2 | 84.57 | 58.61 | 51.14 | 82.3 | 31.17 | 73.01 |
2623
- | [e5-large-v2](https://huggingface.co/intfloat/e5-large-v2) | 1.34 | 1024| 512 | 62.25 | 44.49 | 86.03 | 56.61 | 50.56 | 82.05 | 30.19 | 75.24 |
2624
- | [e5-base-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.44 | 768 | 512 | 61.5 | 43.80 | 85.73 | 55.91 | 50.29 | 81.05 | 30.28 | 73.84 |
2625
- | [**gte-small**](https://huggingface.co/thenlper/gte-small) | 0.07 | 384 | 512 | **61.36** | 44.89 | 83.54 | 57.7 | 49.46 | 82.07 | 30.42 | 72.31 |
2626
- | [text-embedding-ada-002](https://platform.openai.com/docs/guides/embeddings) | - | 1536 | 8192 | 60.99 | 45.9 | 84.89 | 56.32 | 49.25 | 80.97 | 30.8 | 70.93 |
2627
- | [e5-small-v2](https://huggingface.co/intfloat/e5-base-v2) | 0.13 | 384 | 512 | 59.93 | 39.92 | 84.67 | 54.32 | 49.04 | 80.39 | 31.16 | 72.94 |
2628
- | [sentence-t5-xxl](https://huggingface.co/sentence-transformers/sentence-t5-xxl) | 9.73 | 768 | 512 | 59.51 | 43.72 | 85.06 | 56.42 | 42.24 | 82.63 | 30.08 | 73.42 |
2629
- | [all-mpnet-base-v2](https://huggingface.co/sentence-transformers/all-mpnet-base-v2) | 0.44 | 768 | 514 | 57.78 | 43.69 | 83.04 | 59.36 | 43.81 | 80.28 | 27.49 | 65.07 |
2630
- | [sgpt-bloom-7b1-msmarco](https://huggingface.co/bigscience/sgpt-bloom-7b1-msmarco) | 28.27 | 4096 | 2048 | 57.59 | 38.93 | 81.9 | 55.65 | 48.22 | 77.74 | 33.6 | 66.19 |
2631
- | [all-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2) | 0.13 | 384 | 512 | 56.53 | 41.81 | 82.41 | 58.44 | 42.69 | 79.8 | 27.9 | 63.21 |
2632
- | [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2) | 0.09 | 384 | 512 | 56.26 | 42.35 | 82.37 | 58.04 | 41.95 | 78.9 | 30.81 | 63.05 |
2633
- | [contriever-base-msmarco](https://huggingface.co/nthakur/contriever-base-msmarco) | 0.44 | 768 | 512 | 56.00 | 41.1 | 82.54 | 53.14 | 41.88 | 76.51 | 30.36 | 66.68 |
2634
- | [sentence-t5-base](https://huggingface.co/sentence-transformers/sentence-t5-base) | 0.22 | 768 | 512 | 55.27 | 40.21 | 85.18 | 53.09 | 33.63 | 81.14 | 31.39 | 69.81 |
2635
-
2636
-
2637
- ## Usage
2638
-
2639
- Code example
2640
-
2641
- ```python
2642
- import torch.nn.functional as F
2643
- from torch import Tensor
2644
- from transformers import AutoTokenizer, AutoModel
2645
-
2646
- def average_pool(last_hidden_states: Tensor,
2647
- attention_mask: Tensor) -> Tensor:
2648
- last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0)
2649
- return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None]
2650
-
2651
- input_texts = [
2652
- "what is the capital of China?",
2653
- "how to implement quick sort in python?",
2654
- "Beijing",
2655
- "sorting algorithms"
2656
- ]
2657
-
2658
- tokenizer = AutoTokenizer.from_pretrained("thenlper/gte-large")
2659
- model = AutoModel.from_pretrained("thenlper/gte-large")
2660
-
2661
- # Tokenize the input texts
2662
- batch_dict = tokenizer(input_texts, max_length=512, padding=True, truncation=True, return_tensors='pt')
2663
-
2664
- outputs = model(**batch_dict)
2665
- embeddings = average_pool(outputs.last_hidden_state, batch_dict['attention_mask'])
2666
-
2667
- # (Optionally) normalize embeddings
2668
- embeddings = F.normalize(embeddings, p=2, dim=1)
2669
- scores = (embeddings[:1] @ embeddings[1:].T) * 100
2670
- print(scores.tolist())
2671
- ```
2672
-
2673
- Use with sentence-transformers:
2674
- ```python
2675
- from sentence_transformers import SentenceTransformer
2676
- from sentence_transformers.util import cos_sim
2677
-
2678
- sentences = ['That is a happy person', 'That is a very happy person']
2679
-
2680
- model = SentenceTransformer('thenlper/gte-large')
2681
- embeddings = model.encode(sentences)
2682
- print(cos_sim(embeddings[0], embeddings[1]))
2683
- ```
2684
-
2685
- ### Limitation
2686
-
2687
- This model exclusively caters to English texts, and any lengthy texts will be truncated to a maximum of 512 tokens.
2688
-
2689
- ### Citation
2690
-
2691
- If you find our paper or models helpful, please consider citing them as follows:
2692
-
2693
- ```
2694
- @article{li2023towards,
2695
- title={Towards general text embeddings with multi-stage contrastive learning},
2696
- author={Li, Zehan and Zhang, Xin and Zhang, Yanzhao and Long, Dingkun and Xie, Pengjun and Zhang, Meishan},
2697
- journal={arXiv preprint arXiv:2308.03281},
2698
- year={2023}
2699
- }
2700
- ```