Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,7 +10,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
| 10 |
def generate_text(prompt):
|
| 11 |
inputs = tokenizer(prompt, return_tensors="pt")
|
| 12 |
outputs = model.generate(inputs['input_ids'], max_length=100, num_return_sequences=1)
|
| 13 |
-
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=
|
| 14 |
return generated_text
|
| 15 |
|
| 16 |
# Set up the Gradio interface
|
|
|
|
| 10 |
def generate_text(prompt):
|
| 11 |
inputs = tokenizer(prompt, return_tensors="pt")
|
| 12 |
outputs = model.generate(inputs['input_ids'], max_length=100, num_return_sequences=1)
|
| 13 |
+
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=False)
|
| 14 |
return generated_text
|
| 15 |
|
| 16 |
# Set up the Gradio interface
|