Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -15,7 +15,8 @@ torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
|
| 15 |
question_answering = pipeline("question-answering", model="deepset/roberta-base-squad2", device=device)
|
| 16 |
code_generation = pipeline("text-generation", model="Salesforce/codegen-350M-mono", device=device)
|
| 17 |
summarization = pipeline("summarization", model="facebook/bart-large-cnn", device=device)
|
| 18 |
-
translation = pipeline("translation_en_to_fr", model="Helsinki-NLP/opus-mt-en-fr", device=device)
|
|
|
|
| 19 |
text_generation = pipeline("text-generation", model="gpt2", device=device)
|
| 20 |
text_classification = pipeline("text-classification", model="distilbert-base-uncased-finetuned-sst-2-english", device=device)
|
| 21 |
|
|
|
|
| 15 |
question_answering = pipeline("question-answering", model="deepset/roberta-base-squad2", device=device)
|
| 16 |
code_generation = pipeline("text-generation", model="Salesforce/codegen-350M-mono", device=device)
|
| 17 |
summarization = pipeline("summarization", model="facebook/bart-large-cnn", device=device)
|
| 18 |
+
#translation = pipeline("translation_en_to_fr", model="Helsinki-NLP/opus-mt-en-fr", device=device)
|
| 19 |
+
translation = pipeline("translation", model="facebook/m2m100_418M", device=device)
|
| 20 |
text_generation = pipeline("text-generation", model="gpt2", device=device)
|
| 21 |
text_classification = pipeline("text-classification", model="distilbert-base-uncased-finetuned-sst-2-english", device=device)
|
| 22 |
|