fix AutoModelForCausalLM import
Browse files
README.md
CHANGED
|
@@ -49,10 +49,10 @@ July 25, 2023
|
|
| 49 |
|
| 50 |
```python
|
| 51 |
import torch
|
| 52 |
-
from transformers import AutoTokenizer,
|
| 53 |
|
| 54 |
# load base LLM model and tokenizer
|
| 55 |
-
model =
|
| 56 |
"philschmid/llama-2-7b-instruction-generator",
|
| 57 |
low_cpu_mem_usage=True,
|
| 58 |
torch_dtype=torch.float16,
|
|
|
|
| 49 |
|
| 50 |
```python
|
| 51 |
import torch
|
| 52 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 53 |
|
| 54 |
# load base LLM model and tokenizer
|
| 55 |
+
model = AutoModelForCausalLM.from_pretrained(
|
| 56 |
"philschmid/llama-2-7b-instruction-generator",
|
| 57 |
low_cpu_mem_usage=True,
|
| 58 |
torch_dtype=torch.float16,
|