Datasets:
Tasks:
Text Retrieval
Modalities:
Text
Formats:
parquet
Sub-tasks:
document-retrieval
Languages:
English
Size:
10M - 100M
ArXiv:
License:
| annotations_creators: | |
| - derived | |
| language: | |
| - eng | |
| license: mit | |
| multilinguality: multilingual | |
| task_categories: | |
| - text-retrieval | |
| task_ids: | |
| - document-retrieval | |
| dataset_info: | |
| - config_name: lifestyle_forum-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 105280964 | |
| num_examples: 119461 | |
| - name: dev | |
| num_bytes: 262310604 | |
| num_examples: 268893 | |
| download_size: 223184916 | |
| dataset_size: 367591568 | |
| - config_name: lifestyle_forum-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 250245 | |
| num_examples: 10278 | |
| - name: dev | |
| num_bytes: 320043 | |
| num_examples: 12823 | |
| download_size: 193771 | |
| dataset_size: 570288 | |
| - config_name: lifestyle_forum-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 143910 | |
| num_examples: 2002 | |
| - name: dev | |
| num_bytes: 138708 | |
| num_examples: 2076 | |
| download_size: 189569 | |
| dataset_size: 282618 | |
| - config_name: lifestyle_search-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 105280964 | |
| num_examples: 119461 | |
| - name: dev | |
| num_bytes: 262310604 | |
| num_examples: 268893 | |
| download_size: 223184916 | |
| dataset_size: 367591568 | |
| - config_name: lifestyle_search-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 42801 | |
| num_examples: 1804 | |
| - name: dev | |
| num_bytes: 33522 | |
| num_examples: 1376 | |
| download_size: 29296 | |
| dataset_size: 76323 | |
| - config_name: lifestyle_search-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 35216 | |
| num_examples: 661 | |
| - name: dev | |
| num_bytes: 20139 | |
| num_examples: 417 | |
| download_size: 33950 | |
| dataset_size: 55355 | |
| - config_name: recreation_forum-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 133380077 | |
| num_examples: 166975 | |
| - name: dev | |
| num_bytes: 347645460 | |
| num_examples: 263025 | |
| download_size: 292259896 | |
| dataset_size: 481025537 | |
| - config_name: recreation_forum-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 170568 | |
| num_examples: 6947 | |
| - name: dev | |
| num_bytes: 318285 | |
| num_examples: 12752 | |
| download_size: 172706 | |
| dataset_size: 488853 | |
| - config_name: recreation_forum-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 126077 | |
| num_examples: 2002 | |
| - name: dev | |
| num_bytes: 139767 | |
| num_examples: 2002 | |
| download_size: 180194 | |
| dataset_size: 265844 | |
| - config_name: recreation_search-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 133380077 | |
| num_examples: 166975 | |
| - name: dev | |
| num_bytes: 347645460 | |
| num_examples: 263025 | |
| download_size: 292259896 | |
| dataset_size: 481025537 | |
| - config_name: recreation_search-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 48337 | |
| num_examples: 1991 | |
| - name: dev | |
| num_bytes: 43206 | |
| num_examples: 1754 | |
| download_size: 36662 | |
| dataset_size: 91543 | |
| - config_name: recreation_search-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 44413 | |
| num_examples: 924 | |
| - name: dev | |
| num_bytes: 25527 | |
| num_examples: 563 | |
| download_size: 45567 | |
| dataset_size: 69940 | |
| - config_name: science_forum-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 1441804833 | |
| num_examples: 1694164 | |
| - name: dev | |
| num_bytes: 424442207 | |
| num_examples: 343642 | |
| download_size: 1061154405 | |
| dataset_size: 1866247040 | |
| - config_name: science_forum-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 391310 | |
| num_examples: 15515 | |
| - name: dev | |
| num_bytes: 306747 | |
| num_examples: 12271 | |
| download_size: 239314 | |
| dataset_size: 698057 | |
| - config_name: science_forum-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 139089 | |
| num_examples: 2017 | |
| - name: dev | |
| num_bytes: 144667 | |
| num_examples: 2013 | |
| download_size: 185822 | |
| dataset_size: 283756 | |
| - config_name: science_search-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 1441804833 | |
| num_examples: 1694164 | |
| - name: dev | |
| num_bytes: 424442207 | |
| num_examples: 343642 | |
| download_size: 1061154405 | |
| dataset_size: 1866247040 | |
| - config_name: science_search-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 43745 | |
| num_examples: 1738 | |
| - name: dev | |
| num_bytes: 35821 | |
| num_examples: 1480 | |
| download_size: 30546 | |
| dataset_size: 79566 | |
| - config_name: science_search-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 35415 | |
| num_examples: 617 | |
| - name: dev | |
| num_bytes: 30276 | |
| num_examples: 538 | |
| download_size: 36961 | |
| dataset_size: 65691 | |
| - config_name: technology_forum-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 563417377 | |
| num_examples: 638509 | |
| - name: dev | |
| num_bytes: 906269308 | |
| num_examples: 1276222 | |
| download_size: 871612102 | |
| dataset_size: 1469686685 | |
| - config_name: technology_forum-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 399380 | |
| num_examples: 15890 | |
| - name: dev | |
| num_bytes: 402010 | |
| num_examples: 15741 | |
| download_size: 269289 | |
| dataset_size: 801390 | |
| - config_name: technology_forum-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 130210 | |
| num_examples: 2004 | |
| - name: dev | |
| num_bytes: 128093 | |
| num_examples: 2003 | |
| download_size: 169025 | |
| dataset_size: 258303 | |
| - config_name: technology_search-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 563417377 | |
| num_examples: 638509 | |
| - name: dev | |
| num_bytes: 906269308 | |
| num_examples: 1276222 | |
| download_size: 871612102 | |
| dataset_size: 1469686685 | |
| - config_name: technology_search-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 50435 | |
| num_examples: 2045 | |
| - name: dev | |
| num_bytes: 66585 | |
| num_examples: 2676 | |
| download_size: 43704 | |
| dataset_size: 117020 | |
| - config_name: technology_search-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 31347 | |
| num_examples: 596 | |
| - name: dev | |
| num_bytes: 47812 | |
| num_examples: 916 | |
| download_size: 45287 | |
| dataset_size: 79159 | |
| - config_name: writing_forum-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 145608880 | |
| num_examples: 199994 | |
| - name: dev | |
| num_bytes: 339346075 | |
| num_examples: 277072 | |
| download_size: 295248208 | |
| dataset_size: 484954955 | |
| - config_name: writing_forum-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 318255 | |
| num_examples: 12906 | |
| - name: dev | |
| num_bytes: 374196 | |
| num_examples: 15098 | |
| download_size: 227212 | |
| dataset_size: 692451 | |
| - config_name: writing_forum-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 128854 | |
| num_examples: 2000 | |
| - name: dev | |
| num_bytes: 133197 | |
| num_examples: 2003 | |
| download_size: 175913 | |
| dataset_size: 262051 | |
| - config_name: writing_search-corpus | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| - name: title | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 145608880 | |
| num_examples: 199994 | |
| - name: dev | |
| num_bytes: 339346075 | |
| num_examples: 277072 | |
| download_size: 295248208 | |
| dataset_size: 484954955 | |
| - config_name: writing_search-qrels | |
| features: | |
| - name: query-id | |
| dtype: string | |
| - name: corpus-id | |
| dtype: string | |
| - name: score | |
| dtype: int64 | |
| splits: | |
| - name: test | |
| num_bytes: 85273 | |
| num_examples: 3546 | |
| - name: dev | |
| num_bytes: 30570 | |
| num_examples: 1287 | |
| download_size: 46495 | |
| dataset_size: 115843 | |
| - config_name: writing_search-queries | |
| features: | |
| - name: _id | |
| dtype: string | |
| - name: text | |
| dtype: string | |
| splits: | |
| - name: test | |
| num_bytes: 54024 | |
| num_examples: 1071 | |
| - name: dev | |
| num_bytes: 25584 | |
| num_examples: 497 | |
| download_size: 44074 | |
| dataset_size: 79608 | |
| configs: | |
| - config_name: lifestyle_forum-corpus | |
| data_files: | |
| - split: test | |
| path: lifestyle_forum-corpus/test-* | |
| - split: dev | |
| path: lifestyle_forum-corpus/dev-* | |
| - config_name: lifestyle_forum-qrels | |
| data_files: | |
| - split: test | |
| path: lifestyle_forum-qrels/test-* | |
| - split: dev | |
| path: lifestyle_forum-qrels/dev-* | |
| - config_name: lifestyle_forum-queries | |
| data_files: | |
| - split: test | |
| path: lifestyle_forum-queries/test-* | |
| - split: dev | |
| path: lifestyle_forum-queries/dev-* | |
| - config_name: lifestyle_search-corpus | |
| data_files: | |
| - split: test | |
| path: lifestyle_search-corpus/test-* | |
| - split: dev | |
| path: lifestyle_search-corpus/dev-* | |
| - config_name: lifestyle_search-qrels | |
| data_files: | |
| - split: test | |
| path: lifestyle_search-qrels/test-* | |
| - split: dev | |
| path: lifestyle_search-qrels/dev-* | |
| - config_name: lifestyle_search-queries | |
| data_files: | |
| - split: test | |
| path: lifestyle_search-queries/test-* | |
| - split: dev | |
| path: lifestyle_search-queries/dev-* | |
| - config_name: recreation_forum-corpus | |
| data_files: | |
| - split: test | |
| path: recreation_forum-corpus/test-* | |
| - split: dev | |
| path: recreation_forum-corpus/dev-* | |
| - config_name: recreation_forum-qrels | |
| data_files: | |
| - split: test | |
| path: recreation_forum-qrels/test-* | |
| - split: dev | |
| path: recreation_forum-qrels/dev-* | |
| - config_name: recreation_forum-queries | |
| data_files: | |
| - split: test | |
| path: recreation_forum-queries/test-* | |
| - split: dev | |
| path: recreation_forum-queries/dev-* | |
| - config_name: recreation_search-corpus | |
| data_files: | |
| - split: test | |
| path: recreation_search-corpus/test-* | |
| - split: dev | |
| path: recreation_search-corpus/dev-* | |
| - config_name: recreation_search-qrels | |
| data_files: | |
| - split: test | |
| path: recreation_search-qrels/test-* | |
| - split: dev | |
| path: recreation_search-qrels/dev-* | |
| - config_name: recreation_search-queries | |
| data_files: | |
| - split: test | |
| path: recreation_search-queries/test-* | |
| - split: dev | |
| path: recreation_search-queries/dev-* | |
| - config_name: science_forum-corpus | |
| data_files: | |
| - split: test | |
| path: science_forum-corpus/test-* | |
| - split: dev | |
| path: science_forum-corpus/dev-* | |
| - config_name: science_forum-qrels | |
| data_files: | |
| - split: test | |
| path: science_forum-qrels/test-* | |
| - split: dev | |
| path: science_forum-qrels/dev-* | |
| - config_name: science_forum-queries | |
| data_files: | |
| - split: test | |
| path: science_forum-queries/test-* | |
| - split: dev | |
| path: science_forum-queries/dev-* | |
| - config_name: science_search-corpus | |
| data_files: | |
| - split: test | |
| path: science_search-corpus/test-* | |
| - split: dev | |
| path: science_search-corpus/dev-* | |
| - config_name: science_search-qrels | |
| data_files: | |
| - split: test | |
| path: science_search-qrels/test-* | |
| - split: dev | |
| path: science_search-qrels/dev-* | |
| - config_name: science_search-queries | |
| data_files: | |
| - split: test | |
| path: science_search-queries/test-* | |
| - split: dev | |
| path: science_search-queries/dev-* | |
| - config_name: technology_forum-corpus | |
| data_files: | |
| - split: test | |
| path: technology_forum-corpus/test-* | |
| - split: dev | |
| path: technology_forum-corpus/dev-* | |
| - config_name: technology_forum-qrels | |
| data_files: | |
| - split: test | |
| path: technology_forum-qrels/test-* | |
| - split: dev | |
| path: technology_forum-qrels/dev-* | |
| - config_name: technology_forum-queries | |
| data_files: | |
| - split: test | |
| path: technology_forum-queries/test-* | |
| - split: dev | |
| path: technology_forum-queries/dev-* | |
| - config_name: technology_search-corpus | |
| data_files: | |
| - split: test | |
| path: technology_search-corpus/test-* | |
| - split: dev | |
| path: technology_search-corpus/dev-* | |
| - config_name: technology_search-qrels | |
| data_files: | |
| - split: test | |
| path: technology_search-qrels/test-* | |
| - split: dev | |
| path: technology_search-qrels/dev-* | |
| - config_name: technology_search-queries | |
| data_files: | |
| - split: test | |
| path: technology_search-queries/test-* | |
| - split: dev | |
| path: technology_search-queries/dev-* | |
| - config_name: writing_forum-corpus | |
| data_files: | |
| - split: test | |
| path: writing_forum-corpus/test-* | |
| - split: dev | |
| path: writing_forum-corpus/dev-* | |
| - config_name: writing_forum-qrels | |
| data_files: | |
| - split: test | |
| path: writing_forum-qrels/test-* | |
| - split: dev | |
| path: writing_forum-qrels/dev-* | |
| - config_name: writing_forum-queries | |
| data_files: | |
| - split: test | |
| path: writing_forum-queries/test-* | |
| - split: dev | |
| path: writing_forum-queries/dev-* | |
| - config_name: writing_search-corpus | |
| data_files: | |
| - split: test | |
| path: writing_search-corpus/test-* | |
| - split: dev | |
| path: writing_search-corpus/dev-* | |
| - config_name: writing_search-qrels | |
| data_files: | |
| - split: test | |
| path: writing_search-qrels/test-* | |
| - split: dev | |
| path: writing_search-qrels/dev-* | |
| - config_name: writing_search-queries | |
| data_files: | |
| - split: test | |
| path: writing_search-queries/test-* | |
| - split: dev | |
| path: writing_search-queries/dev-* | |
| tags: | |
| - mteb | |
| - text | |
| <!-- adapted from https://github.com/huggingface/huggingface_hub/blob/v0.30.2/src/huggingface_hub/templates/datasetcard_template.md --> | |
| <div align="center" style="padding: 40px 20px; background-color: white; border-radius: 12px; box-shadow: 0 2px 10px rgba(0, 0, 0, 0.05); max-width: 600px; margin: 0 auto;"> | |
| <h1 style="font-size: 3.5rem; color: #1a1a1a; margin: 0 0 20px 0; letter-spacing: 2px; font-weight: 700;">LoTTE</h1> | |
| <div style="font-size: 1.5rem; color: #4a4a4a; margin-bottom: 5px; font-weight: 300;">An <a href="https://github.com/embeddings-benchmark/mteb" style="color: #2c5282; font-weight: 600; text-decoration: none;" onmouseover="this.style.textDecoration='underline'" onmouseout="this.style.textDecoration='none'">MTEB</a> dataset</div> | |
| <div style="font-size: 0.9rem; color: #2c5282; margin-top: 10px;">Massive Text Embedding Benchmark</div> | |
| </div> | |
| LoTTE (Long-Tail Topic-stratified Evaluation for IR) is designed to evaluate retrieval models on underrepresented, long-tail topics. Unlike MSMARCO or BEIR, LoTTE features domain-specific queries and passages from StackExchange (covering writing, recreation, science, technology, and lifestyle), providing a challenging out-of-domain generalization benchmark. | |
| | | | | |
| |---------------|---------------------------------------------| | |
| | Task category | t2t | | |
| | Domains | Academic, Web, Social | | |
| | Reference | https://github.com/stanford-futuredata/ColBERT/blob/main/LoTTE.md | | |
| ## How to evaluate on this task | |
| You can evaluate an embedding model on this dataset using the following code: | |
| ```python | |
| import mteb | |
| task = mteb.get_tasks(["LoTTE"]) | |
| evaluator = mteb.MTEB(task) | |
| model = mteb.get_model(YOUR_MODEL) | |
| evaluator.run(model) | |
| ``` | |
| <!-- Datasets want link to arxiv in readme to autolink dataset with paper --> | |
| To learn more about how to run models on `mteb` task check out the [GitHub repitory](https://github.com/embeddings-benchmark/mteb). | |
| ## Citation | |
| If you use this dataset, please cite the dataset as well as [mteb](https://github.com/embeddings-benchmark/mteb), as this dataset likely includes additional processing as a part of the [MMTEB Contribution](https://github.com/embeddings-benchmark/mteb/tree/main/docs/mmteb). | |
| ```bibtex | |
| @inproceedings{santhanam-etal-2022-colbertv2, | |
| address = {Seattle, United States}, | |
| author = {Santhanam, Keshav and | |
| Khattab, Omar and | |
| Saad-Falcon, Jon and | |
| Potts, Christopher and | |
| Zaharia, Matei}, | |
| booktitle = {Proceedings of the 2022 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies}, | |
| doi = {10.18653/v1/2022.naacl-main.272}, | |
| editor = {Carpuat, Marine and | |
| de Marneffe, Marie-Catherine and | |
| Meza Ruiz, Ivan Vladimir}, | |
| month = jul, | |
| pages = {3715--3734}, | |
| publisher = {Association for Computational Linguistics}, | |
| title = {{C}ol{BERT}v2: Effective and Efficient Retrieval via Lightweight Late Interaction}, | |
| url = {https://aclanthology.org/2022.naacl-main.272/}, | |
| year = {2022}, | |
| } | |
| @article{enevoldsen2025mmtebmassivemultilingualtext, | |
| title={MMTEB: Massive Multilingual Text Embedding Benchmark}, | |
| author={Kenneth Enevoldsen and Isaac Chung and Imene Kerboua and Márton Kardos and Ashwin Mathur and David Stap and Jay Gala and Wissam Siblini and Dominik Krzemiński and Genta Indra Winata and Saba Sturua and Saiteja Utpala and Mathieu Ciancone and Marion Schaeffer and Gabriel Sequeira and Diganta Misra and Shreeya Dhakal and Jonathan Rystrøm and Roman Solomatin and Ömer Çağatan and Akash Kundu and Martin Bernstorff and Shitao Xiao and Akshita Sukhlecha and Bhavish Pahwa and Rafał Poświata and Kranthi Kiran GV and Shawon Ashraf and Daniel Auras and Björn Plüster and Jan Philipp Harries and Loïc Magne and Isabelle Mohr and Mariya Hendriksen and Dawei Zhu and Hippolyte Gisserot-Boukhlef and Tom Aarsen and Jan Kostkan and Konrad Wojtasik and Taemin Lee and Marek Šuppa and Crystina Zhang and Roberta Rocca and Mohammed Hamdy and Andrianos Michail and John Yang and Manuel Faysse and Aleksei Vatolin and Nandan Thakur and Manan Dey and Dipam Vasani and Pranjal Chitale and Simone Tedeschi and Nguyen Tai and Artem Snegirev and Michael Günther and Mengzhou Xia and Weijia Shi and Xing Han Lù and Jordan Clive and Gayatri Krishnakumar and Anna Maksimova and Silvan Wehrli and Maria Tikhonova and Henil Panchal and Aleksandr Abramov and Malte Ostendorff and Zheng Liu and Simon Clematide and Lester James Miranda and Alena Fenogenova and Guangyu Song and Ruqiya Bin Safi and Wen-Ding Li and Alessia Borghini and Federico Cassano and Hongjin Su and Jimmy Lin and Howard Yen and Lasse Hansen and Sara Hooker and Chenghao Xiao and Vaibhav Adlakha and Orion Weller and Siva Reddy and Niklas Muennighoff}, | |
| publisher = {arXiv}, | |
| journal={arXiv preprint arXiv:2502.13595}, | |
| year={2025}, | |
| url={https://arxiv.org/abs/2502.13595}, | |
| doi = {10.48550/arXiv.2502.13595}, | |
| } | |
| @article{muennighoff2022mteb, | |
| author = {Muennighoff, Niklas and Tazi, Nouamane and Magne, Lo{\"\i}c and Reimers, Nils}, | |
| title = {MTEB: Massive Text Embedding Benchmark}, | |
| publisher = {arXiv}, | |
| journal={arXiv preprint arXiv:2210.07316}, | |
| year = {2022} | |
| url = {https://arxiv.org/abs/2210.07316}, | |
| doi = {10.48550/ARXIV.2210.07316}, | |
| } | |
| ``` | |
| # Dataset Statistics | |
| <details> | |
| <summary> Dataset Statistics</summary> | |
| The following code contains the descriptive statistics from the task. These can also be obtained using: | |
| ```python | |
| import mteb | |
| task = mteb.get_task("LoTTE") | |
| desc_stats = task.metadata.descriptive_stats | |
| ``` | |
| ```json | |
| { | |
| "test": { | |
| "num_samples": 5652100, | |
| "number_of_characters": 4673709006, | |
| "num_documents": 5638206, | |
| "min_document_length": 3, | |
| "average_document_length": 828.8094390307839, | |
| "max_document_length": 29665, | |
| "unique_documents": 5638206, | |
| "num_queries": 13894, | |
| "min_query_length": 8, | |
| "average_query_length": 51.148265438318695, | |
| "max_query_length": 150, | |
| "unique_queries": 13894, | |
| "none_queries": 0, | |
| "num_relevant_docs": 72660, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 5.229595508852742, | |
| "max_relevant_docs_per_query": 292, | |
| "unique_relevant_docs": 69957, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null, | |
| "hf_subset_descriptive_stats": { | |
| "writing_search": { | |
| "num_samples": 201065, | |
| "number_of_characters": 141634344, | |
| "num_documents": 199994, | |
| "min_document_length": 3, | |
| "average_document_length": 707.9815594467834, | |
| "max_document_length": 27915, | |
| "unique_documents": 199994, | |
| "num_queries": 1071, | |
| "min_query_length": 16, | |
| "average_query_length": 39.47712418300654, | |
| "max_query_length": 82, | |
| "unique_queries": 1071, | |
| "none_queries": 0, | |
| "num_relevant_docs": 3546, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 3.310924369747899, | |
| "max_relevant_docs_per_query": 20, | |
| "unique_relevant_docs": 2703, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "writing_forum": { | |
| "num_samples": 201994, | |
| "number_of_characters": 141697198, | |
| "num_documents": 199994, | |
| "min_document_length": 3, | |
| "average_document_length": 707.9815594467834, | |
| "max_document_length": 27915, | |
| "unique_documents": 199994, | |
| "num_queries": 2000, | |
| "min_query_length": 8, | |
| "average_query_length": 52.567, | |
| "max_query_length": 147, | |
| "unique_queries": 2000, | |
| "none_queries": 0, | |
| "num_relevant_docs": 12906, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 6.453, | |
| "max_relevant_docs_per_query": 110, | |
| "unique_relevant_docs": 12906, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "recreation_search": { | |
| "num_samples": 167899, | |
| "number_of_characters": 130282739, | |
| "num_documents": 166975, | |
| "min_document_length": 4, | |
| "average_document_length": 780.047192693517, | |
| "max_document_length": 25719, | |
| "unique_documents": 166975, | |
| "num_queries": 924, | |
| "min_query_length": 17, | |
| "average_query_length": 37.185064935064936, | |
| "max_query_length": 79, | |
| "unique_queries": 924, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1991, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 2.1547619047619047, | |
| "max_relevant_docs_per_query": 14, | |
| "unique_relevant_docs": 1563, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "recreation_forum": { | |
| "num_samples": 168977, | |
| "number_of_characters": 130351480, | |
| "num_documents": 166975, | |
| "min_document_length": 4, | |
| "average_document_length": 780.047192693517, | |
| "max_document_length": 25719, | |
| "unique_documents": 166975, | |
| "num_queries": 2002, | |
| "min_query_length": 15, | |
| "average_query_length": 51.4985014985015, | |
| "max_query_length": 140, | |
| "unique_queries": 2002, | |
| "none_queries": 0, | |
| "num_relevant_docs": 6947, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 3.47002997002997, | |
| "max_relevant_docs_per_query": 26, | |
| "unique_relevant_docs": 6947, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "science_search": { | |
| "num_samples": 1694781, | |
| "number_of_characters": 1410010994, | |
| "num_documents": 1694164, | |
| "min_document_length": 3, | |
| "average_document_length": 832.2584212626405, | |
| "max_document_length": 29665, | |
| "unique_documents": 1694164, | |
| "num_queries": 617, | |
| "min_query_length": 17, | |
| "average_query_length": 46.57698541329011, | |
| "max_query_length": 100, | |
| "unique_queries": 617, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1738, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 2.8168557536466774, | |
| "max_relevant_docs_per_query": 18, | |
| "unique_relevant_docs": 1327, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "science_forum": { | |
| "num_samples": 1696181, | |
| "number_of_characters": 1410098185, | |
| "num_documents": 1694164, | |
| "min_document_length": 3, | |
| "average_document_length": 832.2584212626405, | |
| "max_document_length": 29665, | |
| "unique_documents": 1694164, | |
| "num_queries": 2017, | |
| "min_query_length": 15, | |
| "average_query_length": 57.47595438770451, | |
| "max_query_length": 150, | |
| "unique_queries": 2017, | |
| "none_queries": 0, | |
| "num_relevant_docs": 15515, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 7.692117005453644, | |
| "max_relevant_docs_per_query": 141, | |
| "unique_relevant_docs": 15515, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "technology_search": { | |
| "num_samples": 639105, | |
| "number_of_characters": 551593366, | |
| "num_documents": 638509, | |
| "min_document_length": 4, | |
| "average_document_length": 863.838199618173, | |
| "max_document_length": 27784, | |
| "unique_documents": 638509, | |
| "num_queries": 596, | |
| "min_query_length": 18, | |
| "average_query_length": 41.78020134228188, | |
| "max_query_length": 97, | |
| "unique_queries": 596, | |
| "none_queries": 0, | |
| "num_relevant_docs": 2045, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 3.4312080536912752, | |
| "max_relevant_docs_per_query": 55, | |
| "unique_relevant_docs": 1578, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "technology_forum": { | |
| "num_samples": 640513, | |
| "number_of_characters": 551675684, | |
| "num_documents": 638509, | |
| "min_document_length": 4, | |
| "average_document_length": 863.838199618173, | |
| "max_document_length": 27784, | |
| "unique_documents": 638509, | |
| "num_queries": 2004, | |
| "min_query_length": 15, | |
| "average_query_length": 53.50249500998004, | |
| "max_query_length": 147, | |
| "unique_queries": 2004, | |
| "none_queries": 0, | |
| "num_relevant_docs": 15890, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 7.929141716566866, | |
| "max_relevant_docs_per_query": 292, | |
| "unique_relevant_docs": 15890, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "lifestyle_search": { | |
| "num_samples": 120122, | |
| "number_of_characters": 103136066, | |
| "num_documents": 119461, | |
| "min_document_length": 13, | |
| "average_document_length": 863.1102284427553, | |
| "max_document_length": 25158, | |
| "unique_documents": 119461, | |
| "num_queries": 661, | |
| "min_query_length": 20, | |
| "average_query_length": 42.443267776096825, | |
| "max_query_length": 88, | |
| "unique_queries": 661, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1804, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 2.729198184568835, | |
| "max_relevant_docs_per_query": 13, | |
| "unique_relevant_docs": 1250, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "lifestyle_forum": { | |
| "num_samples": 121463, | |
| "number_of_characters": 103228950, | |
| "num_documents": 119461, | |
| "min_document_length": 13, | |
| "average_document_length": 863.1102284427553, | |
| "max_document_length": 25158, | |
| "unique_documents": 119461, | |
| "num_queries": 2002, | |
| "min_query_length": 16, | |
| "average_query_length": 60.40909090909091, | |
| "max_query_length": 150, | |
| "unique_queries": 2002, | |
| "none_queries": 0, | |
| "num_relevant_docs": 10278, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 5.1338661338661336, | |
| "max_relevant_docs_per_query": 56, | |
| "unique_relevant_docs": 10278, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| } | |
| } | |
| }, | |
| "dev": { | |
| "num_samples": 4870736, | |
| "number_of_characters": 4468953764, | |
| "num_documents": 4857708, | |
| "min_document_length": 1, | |
| "average_document_length": 919.8305015451732, | |
| "max_document_length": 29562, | |
| "unique_documents": 4857708, | |
| "num_queries": 13028, | |
| "min_query_length": 13, | |
| "average_query_length": 52.63877801657967, | |
| "max_query_length": 148, | |
| "unique_queries": 13028, | |
| "none_queries": 0, | |
| "num_relevant_docs": 77258, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 5.930150445194965, | |
| "max_relevant_docs_per_query": 150, | |
| "unique_relevant_docs": 75561, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null, | |
| "hf_subset_descriptive_stats": { | |
| "writing_search": { | |
| "num_samples": 277569, | |
| "number_of_characters": 333970647, | |
| "num_documents": 277072, | |
| "min_document_length": 6, | |
| "average_document_length": 1205.2838973263267, | |
| "max_document_length": 29562, | |
| "unique_documents": 277072, | |
| "num_queries": 497, | |
| "min_query_length": 16, | |
| "average_query_length": 40.69818913480885, | |
| "max_query_length": 79, | |
| "unique_queries": 497, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1287, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 2.58953722334004, | |
| "max_relevant_docs_per_query": 14, | |
| "unique_relevant_docs": 1027, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "writing_forum": { | |
| "num_samples": 279075, | |
| "number_of_characters": 334060346, | |
| "num_documents": 277072, | |
| "min_document_length": 6, | |
| "average_document_length": 1205.2838973263267, | |
| "max_document_length": 29562, | |
| "unique_documents": 277072, | |
| "num_queries": 2003, | |
| "min_query_length": 13, | |
| "average_query_length": 54.88067898152771, | |
| "max_query_length": 146, | |
| "unique_queries": 2003, | |
| "none_queries": 0, | |
| "num_relevant_docs": 15098, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 7.537693459810285, | |
| "max_relevant_docs_per_query": 38, | |
| "unique_relevant_docs": 15098, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "recreation_search": { | |
| "num_samples": 263588, | |
| "number_of_characters": 342314339, | |
| "num_documents": 263025, | |
| "min_document_length": 9, | |
| "average_document_length": 1301.3777967873775, | |
| "max_document_length": 29372, | |
| "unique_documents": 263025, | |
| "num_queries": 563, | |
| "min_query_length": 18, | |
| "average_query_length": 34.53641207815275, | |
| "max_query_length": 75, | |
| "unique_queries": 563, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1754, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 3.1154529307282415, | |
| "max_relevant_docs_per_query": 14, | |
| "unique_relevant_docs": 1414, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "recreation_forum": { | |
| "num_samples": 265027, | |
| "number_of_characters": 342411603, | |
| "num_documents": 263025, | |
| "min_document_length": 9, | |
| "average_document_length": 1301.3777967873775, | |
| "max_document_length": 29372, | |
| "unique_documents": 263025, | |
| "num_queries": 2002, | |
| "min_query_length": 16, | |
| "average_query_length": 58.2957042957043, | |
| "max_query_length": 148, | |
| "unique_queries": 2002, | |
| "none_queries": 0, | |
| "num_relevant_docs": 12752, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 6.36963036963037, | |
| "max_relevant_docs_per_query": 41, | |
| "unique_relevant_docs": 12752, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "science_search": { | |
| "num_samples": 344180, | |
| "number_of_characters": 418054189, | |
| "num_documents": 343642, | |
| "min_document_length": 2, | |
| "average_document_length": 1216.468653424203, | |
| "max_document_length": 29033, | |
| "unique_documents": 343642, | |
| "num_queries": 538, | |
| "min_query_length": 17, | |
| "average_query_length": 45.479553903345725, | |
| "max_query_length": 93, | |
| "unique_queries": 538, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1480, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 2.7509293680297398, | |
| "max_relevant_docs_per_query": 20, | |
| "unique_relevant_docs": 1140, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "science_forum": { | |
| "num_samples": 345655, | |
| "number_of_characters": 418151272, | |
| "num_documents": 343642, | |
| "min_document_length": 2, | |
| "average_document_length": 1216.468653424203, | |
| "max_document_length": 29033, | |
| "unique_documents": 343642, | |
| "num_queries": 2013, | |
| "min_query_length": 15, | |
| "average_query_length": 60.38301043219076, | |
| "max_query_length": 146, | |
| "unique_queries": 2013, | |
| "none_queries": 0, | |
| "num_relevant_docs": 12271, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 6.095876800794834, | |
| "max_relevant_docs_per_query": 150, | |
| "unique_relevant_docs": 12271, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "technology_search": { | |
| "num_samples": 1277138, | |
| "number_of_characters": 882493509, | |
| "num_documents": 1276222, | |
| "min_document_length": 1, | |
| "average_document_length": 691.4593738393477, | |
| "max_document_length": 29201, | |
| "unique_documents": 1276222, | |
| "num_queries": 916, | |
| "min_query_length": 16, | |
| "average_query_length": 41.314410480349345, | |
| "max_query_length": 95, | |
| "unique_queries": 916, | |
| "none_queries": 0, | |
| "num_relevant_docs": 2676, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 2.921397379912664, | |
| "max_relevant_docs_per_query": 17, | |
| "unique_relevant_docs": 2173, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "technology_forum": { | |
| "num_samples": 1278225, | |
| "number_of_characters": 882560768, | |
| "num_documents": 1276222, | |
| "min_document_length": 1, | |
| "average_document_length": 691.4593738393477, | |
| "max_document_length": 29201, | |
| "unique_documents": 1276222, | |
| "num_queries": 2003, | |
| "min_query_length": 15, | |
| "average_query_length": 52.47279081377933, | |
| "max_query_length": 148, | |
| "unique_queries": 2003, | |
| "none_queries": 0, | |
| "num_relevant_docs": 15741, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 7.858711932101847, | |
| "max_relevant_docs_per_query": 149, | |
| "unique_relevant_docs": 15741, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "lifestyle_search": { | |
| "num_samples": 269310, | |
| "number_of_characters": 257418954, | |
| "num_documents": 268893, | |
| "min_document_length": 4, | |
| "average_document_length": 957.2703342965417, | |
| "max_document_length": 28879, | |
| "unique_documents": 268893, | |
| "num_queries": 417, | |
| "min_query_length": 16, | |
| "average_query_length": 37.55875299760192, | |
| "max_query_length": 70, | |
| "unique_queries": 417, | |
| "none_queries": 0, | |
| "num_relevant_docs": 1376, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 3.2997601918465227, | |
| "max_relevant_docs_per_query": 18, | |
| "unique_relevant_docs": 1122, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| }, | |
| "lifestyle_forum": { | |
| "num_samples": 270969, | |
| "number_of_characters": 257518137, | |
| "num_documents": 268893, | |
| "min_document_length": 4, | |
| "average_document_length": 957.2703342965417, | |
| "max_document_length": 28879, | |
| "unique_documents": 268893, | |
| "num_queries": 2076, | |
| "min_query_length": 15, | |
| "average_query_length": 55.32032755298651, | |
| "max_query_length": 141, | |
| "unique_queries": 2076, | |
| "none_queries": 0, | |
| "num_relevant_docs": 12823, | |
| "min_relevant_docs_per_query": 1, | |
| "average_relevant_docs_per_query": 6.176782273603083, | |
| "max_relevant_docs_per_query": 77, | |
| "unique_relevant_docs": 12823, | |
| "num_instructions": null, | |
| "min_instruction_length": null, | |
| "average_instruction_length": null, | |
| "max_instruction_length": null, | |
| "unique_instructions": null, | |
| "num_top_ranked": null, | |
| "min_top_ranked_per_query": null, | |
| "average_top_ranked_per_query": null, | |
| "max_top_ranked_per_query": null | |
| } | |
| } | |
| } | |
| } | |
| ``` | |
| </details> | |
| --- | |
| *This dataset card was automatically generated using [MTEB](https://github.com/embeddings-benchmark/mteb)* |