Commit
·
070d706
1
Parent(s):
fccd128
Update README.md
Browse files
README.md
CHANGED
|
@@ -19,14 +19,16 @@ HingBERT is a Hindi-English code-mixed BERT model trained on roman text. It is a
|
|
| 19 |
More details on the dataset, models, and baseline results can be found in our [paper] (https://arxiv.org/abs/2204.08398)
|
| 20 |
|
| 21 |
```
|
| 22 |
-
@
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
|
|
|
|
|
|
| 31 |
}
|
| 32 |
```
|
|
|
|
| 19 |
More details on the dataset, models, and baseline results can be found in our [paper] (https://arxiv.org/abs/2204.08398)
|
| 20 |
|
| 21 |
```
|
| 22 |
+
@inproceedings{nayak-joshi-2022-l3cube,
|
| 23 |
+
title = "{L}3{C}ube-{H}ing{C}orpus and {H}ing{BERT}: A Code Mixed {H}indi-{E}nglish Dataset and {BERT} Language Models",
|
| 24 |
+
author = "Nayak, Ravindra and
|
| 25 |
+
Joshi, Raviraj",
|
| 26 |
+
booktitle = "Proceedings of the WILDRE-6 Workshop within the 13th Language Resources and Evaluation Conference",
|
| 27 |
+
month = jun,
|
| 28 |
+
year = "2022",
|
| 29 |
+
address = "Marseille, France",
|
| 30 |
+
publisher = "European Language Resources Association",
|
| 31 |
+
url = "https://aclanthology.org/2022.wildre-1.2",
|
| 32 |
+
pages = "7--12",
|
| 33 |
}
|
| 34 |
```
|