truncation=True added to pipeline
Browse files
README.md
CHANGED
|
@@ -38,7 +38,7 @@ tokenizer = AutoTokenizer.from_pretrained("nirajandhakal/LLaMA3-Reasoning")
|
|
| 38 |
model = AutoModelForCausalLM.from_pretrained("nirajandhakal/LLaMA3-Reasoning")
|
| 39 |
|
| 40 |
|
| 41 |
-
pipe = pipeline("text-generation", model="nirajandhakal/LLaMA3-Reasoning")
|
| 42 |
|
| 43 |
# Define a prompt for the model
|
| 44 |
prompt = "What are the benefits of using artificial intelligence in healthcare?"
|
|
|
|
| 38 |
model = AutoModelForCausalLM.from_pretrained("nirajandhakal/LLaMA3-Reasoning")
|
| 39 |
|
| 40 |
|
| 41 |
+
pipe = pipeline("text-generation", model="nirajandhakal/LLaMA3-Reasoning", truncation=True)
|
| 42 |
|
| 43 |
# Define a prompt for the model
|
| 44 |
prompt = "What are the benefits of using artificial intelligence in healthcare?"
|