Add dataset card
Browse files
README.md
CHANGED
|
@@ -54,25 +54,25 @@ If you use this dataset, please cite the dataset as well as [mteb](https://githu
|
|
| 54 |
```bibtex
|
| 55 |
|
| 56 |
@inproceedings{cohan-etal-2020-specter,
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
}
|
| 77 |
|
| 78 |
|
|
|
|
| 54 |
```bibtex
|
| 55 |
|
| 56 |
@inproceedings{cohan-etal-2020-specter,
|
| 57 |
+
abstract = {Representation learning is a critical ingredient for natural language processing systems. Recent Transformer language models like BERT learn powerful textual representations, but these models are targeted towards token- and sentence-level training objectives and do not leverage information on inter-document relatedness, which limits their document-level representation power. For applications on scientific documents, such as classification and recommendation, accurate embeddings of documents are a necessity. We propose SPECTER, a new method to generate document-level embedding of scientific papers based on pretraining a Transformer language model on a powerful signal of document-level relatedness: the citation graph. Unlike existing pretrained language models, Specter can be easily applied to downstream applications without task-specific fine-tuning. Additionally, to encourage further research on document-level models, we introduce SciDocs, a new evaluation benchmark consisting of seven document-level tasks ranging from citation prediction, to document classification and recommendation. We show that Specter outperforms a variety of competitive baselines on the benchmark.},
|
| 58 |
+
address = {Online},
|
| 59 |
+
author = {Cohan, Arman and
|
| 60 |
+
Feldman, Sergey and
|
| 61 |
+
Beltagy, Iz and
|
| 62 |
+
Downey, Doug and
|
| 63 |
+
Weld, Daniel},
|
| 64 |
+
booktitle = {Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics},
|
| 65 |
+
doi = {10.18653/v1/2020.acl-main.207},
|
| 66 |
+
editor = {Jurafsky, Dan and
|
| 67 |
+
Chai, Joyce and
|
| 68 |
+
Schluter, Natalie and
|
| 69 |
+
Tetreault, Joel},
|
| 70 |
+
month = jul,
|
| 71 |
+
pages = {2270--2282},
|
| 72 |
+
publisher = {Association for Computational Linguistics},
|
| 73 |
+
title = {{SPECTER}: Document-level Representation Learning using Citation-informed Transformers},
|
| 74 |
+
url = {https://aclanthology.org/2020.acl-main.207},
|
| 75 |
+
year = {2020},
|
| 76 |
}
|
| 77 |
|
| 78 |
|