Update README.md
Browse files
README.md
CHANGED
|
@@ -6,7 +6,7 @@ Fine-tuned Turkish instruct model (law domain) based on `nurcunal/BEDAI-2B`, mer
|
|
| 6 |
```python
|
| 7 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 8 |
import torch
|
| 9 |
-
m = "nurcunal/BEDAI-
|
| 10 |
tok = AutoTokenizer.from_pretrained(m, use_fast=True, trust_remote_code=True)
|
| 11 |
mdl = AutoModelForCausalLM.from_pretrained(m, torch_dtype=torch.bfloat16, device_map="auto", trust_remote_code=True)
|
| 12 |
if tok.pad_token_id is None and tok.eos_token_id is not None:
|
|
@@ -180,4 +180,4 @@ model-index:
|
|
| 180 |
|
| 181 |
> **Notes**
|
| 182 |
> • QA = mean F1 over **TQuAD (TR)** and **XQuAD (TR)** for this run.
|
| 183 |
-
|
|
|
|
| 6 |
```python
|
| 7 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 8 |
import torch
|
| 9 |
+
m = "nurcunal/BEDAI-2.4B"
|
| 10 |
tok = AutoTokenizer.from_pretrained(m, use_fast=True, trust_remote_code=True)
|
| 11 |
mdl = AutoModelForCausalLM.from_pretrained(m, torch_dtype=torch.bfloat16, device_map="auto", trust_remote_code=True)
|
| 12 |
if tok.pad_token_id is None and tok.eos_token_id is not None:
|
|
|
|
| 180 |
|
| 181 |
> **Notes**
|
| 182 |
> • QA = mean F1 over **TQuAD (TR)** and **XQuAD (TR)** for this run.
|
| 183 |
+
|