Update README.md
Browse files
README.md
CHANGED
|
@@ -9,7 +9,9 @@ Trained Strategy :
|
|
| 9 |
- In-batch Negatives : 12 Epoch, by KLUE MRC dataset, random sampling between Sparse Retrieval(TF-IDF) top 100 passage per each query
|
| 10 |
|
| 11 |
I'm not confident about this model will work in other dataset or corpus.
|
| 12 |
-
|
|
|
|
|
|
|
| 13 |
from Transformers import AutoTokenizer, BertPreTrainedModel, BertModel
|
| 14 |
|
| 15 |
class BertEncoder(BertPreTrainedModel):
|
|
@@ -31,4 +33,5 @@ tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
| 31 |
q_encoder = BertEncoder.from_pretrained("thingsu/koDPR_question")
|
| 32 |
p_encoder = BertEncoder.from_pretrained("thingsu/koDPR_context")
|
| 33 |
|
| 34 |
-
|
|
|
|
|
|
| 9 |
- In-batch Negatives : 12 Epoch, by KLUE MRC dataset, random sampling between Sparse Retrieval(TF-IDF) top 100 passage per each query
|
| 10 |
|
| 11 |
I'm not confident about this model will work in other dataset or corpus.
|
| 12 |
+
|
| 13 |
+
<pre>
|
| 14 |
+
<code>
|
| 15 |
from Transformers import AutoTokenizer, BertPreTrainedModel, BertModel
|
| 16 |
|
| 17 |
class BertEncoder(BertPreTrainedModel):
|
|
|
|
| 33 |
q_encoder = BertEncoder.from_pretrained("thingsu/koDPR_question")
|
| 34 |
p_encoder = BertEncoder.from_pretrained("thingsu/koDPR_context")
|
| 35 |
|
| 36 |
+
</code>
|
| 37 |
+
</pre>
|