Update README.md
Browse files
README.md
CHANGED
|
@@ -57,13 +57,13 @@ The easiest way is to load the inference api from huggingface and second method
|
|
| 57 |
```python
|
| 58 |
# Use a pipeline as a high-level helper
|
| 59 |
from transformers import pipeline
|
| 60 |
-
pipe = pipeline("token-classification", model="
|
| 61 |
|
| 62 |
# Load model directly
|
| 63 |
from transformers import AutoTokenizer, AutoModelForTokenClassification
|
| 64 |
|
| 65 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 66 |
-
model = AutoModelForTokenClassification.from_pretrained("
|
| 67 |
```
|
| 68 |
|
| 69 |
|
|
|
|
| 57 |
```python
|
| 58 |
# Use a pipeline as a high-level helper
|
| 59 |
from transformers import pipeline
|
| 60 |
+
pipe = pipeline("token-classification", model="Clinical-AI-Apollo/Medical-NER", aggregation_strategy='simple')
|
| 61 |
|
| 62 |
# Load model directly
|
| 63 |
from transformers import AutoTokenizer, AutoModelForTokenClassification
|
| 64 |
|
| 65 |
+
tokenizer = AutoTokenizer.from_pretrained("Clinical-AI-Apollo/Medical-NER")
|
| 66 |
+
model = AutoModelForTokenClassification.from_pretrained("Clinical-AI-Apollo/Medical-NER")
|
| 67 |
```
|
| 68 |
|
| 69 |
|