Update usage example with trust_remote_code parameter
Browse files
README.md
CHANGED
|
@@ -98,9 +98,12 @@ Training and inference on M2 Pro (measured at checkpoint 20000):
|
|
| 98 |
```python
|
| 99 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 100 |
|
| 101 |
-
# Load model and tokenizer
|
| 102 |
tokenizer = AutoTokenizer.from_pretrained("jacksuuuu/nanogpt-mlx-53m-finewebedu")
|
| 103 |
-
model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
|
|
|
|
|
|
| 104 |
|
| 105 |
# Generate text
|
| 106 |
prompt = "Once upon a time"
|
|
|
|
| 98 |
```python
|
| 99 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 100 |
|
| 101 |
+
# Load model and tokenizer (requires trust_remote_code for custom architecture)
|
| 102 |
tokenizer = AutoTokenizer.from_pretrained("jacksuuuu/nanogpt-mlx-53m-finewebedu")
|
| 103 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 104 |
+
"jacksuuuu/nanogpt-mlx-53m-finewebedu",
|
| 105 |
+
trust_remote_code=True
|
| 106 |
+
)
|
| 107 |
|
| 108 |
# Generate text
|
| 109 |
prompt = "Once upon a time"
|