Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,13 +3,13 @@ from transformers import pipeline
|
|
| 3 |
from transformers import TextDataset, DataCollatorForLanguageModeling
|
| 4 |
from transformers import Trainer, TrainingArguments,AutoModelWithLMHead
|
| 5 |
|
| 6 |
-
chef = pipeline('text-generation', model="./en_gpt2-medium_rachel_replics/en_gpt2-medium_rachel_replics", tokenizer=
|
| 7 |
|
| 8 |
|
| 9 |
# gradio part
|
| 10 |
def echo(message, history, model):
|
| 11 |
|
| 12 |
-
chef = pipeline('text-generation', model="./models/en_gpt2-medium_rachel_replics", tokenizer=model_type)
|
| 13 |
|
| 14 |
if model=="gpt2-medium":
|
| 15 |
answer = chef(f"<s>NOTFRIEND: {message}\nRACHEL:")[0]['generated_text']
|
|
|
|
| 3 |
from transformers import TextDataset, DataCollatorForLanguageModeling
|
| 4 |
from transformers import Trainer, TrainingArguments,AutoModelWithLMHead
|
| 5 |
|
| 6 |
+
chef = pipeline('text-generation', model="./en_gpt2-medium_rachel_replics/en_gpt2-medium_rachel_replics", tokenizer="gpt2-medium")
|
| 7 |
|
| 8 |
|
| 9 |
# gradio part
|
| 10 |
def echo(message, history, model):
|
| 11 |
|
| 12 |
+
#chef = pipeline('text-generation', model="./models/en_gpt2-medium_rachel_replics", tokenizer=model_type)
|
| 13 |
|
| 14 |
if model=="gpt2-medium":
|
| 15 |
answer = chef(f"<s>NOTFRIEND: {message}\nRACHEL:")[0]['generated_text']
|