Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -14,7 +14,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
| 14 |
|
| 15 |
def generate_text(prompt):
|
| 16 |
inputs = tokenizer(prompt, return_tensors="pt")
|
| 17 |
-
output = model.generate(**inputs, max_new_tokens=
|
| 18 |
response = tokenizer.decode(output[0], skip_special_tokens=True)
|
| 19 |
return response
|
| 20 |
|
|
|
|
| 14 |
|
| 15 |
def generate_text(prompt):
|
| 16 |
inputs = tokenizer(prompt, return_tensors="pt")
|
| 17 |
+
output = model.generate(**inputs, max_new_tokens=50, do_sample=True, temperature=0.1)
|
| 18 |
response = tokenizer.decode(output[0], skip_special_tokens=True)
|
| 19 |
return response
|
| 20 |
|