aliabd commited on
Commit
6c5635d
·
1 Parent(s): a59f6f2

Upload with huggingface_hub

Browse files
Files changed (2) hide show
  1. README.md +2 -1
  2. app.py +33 -0
README.md CHANGED
@@ -6,6 +6,7 @@ colorFrom: indigo
6
  colorTo: indigo
7
  sdk: gradio
8
  sdk_version: 3.4.1
9
- app_file: run.py
 
10
  pinned: false
11
  ---
 
6
  colorTo: indigo
7
  sdk: gradio
8
  sdk_version: 3.4.1
9
+
10
+ app_file: app.py
11
  pinned: false
12
  ---
app.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
3
+ import torch
4
+
5
+ # this model was loaded from https://hf.co/models
6
+ model = AutoModelForSeq2SeqLM.from_pretrained("facebook/nllb-200-distilled-600M")
7
+ tokenizer = AutoTokenizer.from_pretrained("facebook/nllb-200-distilled-600M")
8
+ device = 0 if torch.cuda.is_available() else -1
9
+ LANGS = ["ace_Arab", "eng_Latn", "fra_Latn", "spa_Latn"]
10
+
11
+ def translate(text, src_lang, tgt_lang):
12
+ """
13
+ Translate the text from source lang to target lang
14
+ """
15
+ translation_pipeline = pipeline("translation", model=model, tokenizer=tokenizer, src_lang=src_lang, tgt_lang=tgt_lang, max_length=400, device=device)
16
+ result = translation_pipeline(text)
17
+ return result[0]['translation_text']
18
+
19
+ demo = gr.Interface(
20
+ fn=translate,
21
+ inputs=[
22
+ gr.components.Textbox(label="Text"),
23
+ gr.components.Dropdown(label="Source Language", choices=LANGS),
24
+ gr.components.Dropdown(label="Target Language", choices=LANGS),
25
+ ],
26
+ outputs=["text"],
27
+ examples=[["Building a translation demo with Gradio is so easy!", "eng_Latn", "spa_Latn"]],
28
+ cache_examples=False,
29
+ title="Translation Demo",
30
+ description="This demo is a simplified version of the original [NLLB-Translator](https://huggingface.co/spaces/Narrativaai/NLLB-Translator) space"
31
+ )
32
+
33
+ demo.launch()