Spaces:
Running
Running
Commit
Β·
bd6cbf6
1
Parent(s):
1387c21
Update app.py
Browse files
app.py
CHANGED
|
@@ -107,69 +107,89 @@
|
|
| 107 |
|
| 108 |
# love
|
| 109 |
|
| 110 |
-
import os
|
| 111 |
-
from huggingface_hub import InferenceClient
|
| 112 |
-
import gradio as gr
|
| 113 |
-
from dotenv import load_dotenv
|
| 114 |
|
| 115 |
-
# Load env
|
| 116 |
-
load_dotenv()
|
| 117 |
-
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 118 |
-
|
| 119 |
-
# Init client
|
| 120 |
-
client = InferenceClient(token=HF_TOKEN)
|
| 121 |
-
|
| 122 |
-
# Languages supported
|
| 123 |
-
LANGUAGES = {
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
}
|
| 134 |
-
|
| 135 |
-
MODEL_NAME = "facebook/nllb-200-distilled-600M"
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
def translate(input_text: str, language_label: str) -> str:
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
# Gradio UI
|
| 157 |
-
translator = gr.Interface(
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
)
|
| 167 |
-
|
| 168 |
-
translator.launch()
|
| 169 |
|
| 170 |
|
| 171 |
# hate
|
| 172 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
|
| 174 |
# import requests
|
| 175 |
# import gradio as gr
|
|
|
|
| 107 |
|
| 108 |
# love
|
| 109 |
|
| 110 |
+
# import os
|
| 111 |
+
# from huggingface_hub import InferenceClient
|
| 112 |
+
# import gradio as gr
|
| 113 |
+
# from dotenv import load_dotenv
|
| 114 |
|
| 115 |
+
# # Load env
|
| 116 |
+
# load_dotenv()
|
| 117 |
+
# HF_TOKEN = os.getenv("HF_TOKEN")
|
| 118 |
+
|
| 119 |
+
# # Init client
|
| 120 |
+
# client = InferenceClient(token=HF_TOKEN)
|
| 121 |
+
|
| 122 |
+
# # Languages supported
|
| 123 |
+
# LANGUAGES = {
|
| 124 |
+
# "English β Afrikaans": "afr",
|
| 125 |
+
# "English β Xhosa": "xho",
|
| 126 |
+
# "English β Zulu": "zul",
|
| 127 |
+
# "English β Sesotho": "sot",
|
| 128 |
+
# "English β Tswana": "tsn",
|
| 129 |
+
# "English β Northern Sotho": "nso",
|
| 130 |
+
# "English β Swati": "ssw",
|
| 131 |
+
# "English β Tsonga": "tso",
|
| 132 |
+
# "English β Venda": "ven",
|
| 133 |
+
# }
|
| 134 |
+
|
| 135 |
+
# MODEL_NAME = "facebook/nllb-200-distilled-600M"
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
# def translate(input_text: str, language_label: str) -> str:
|
| 139 |
+
# if not input_text.strip():
|
| 140 |
+
# return "Error: Please enter text to translate."
|
| 141 |
+
|
| 142 |
+
# lang_code = LANGUAGES[language_label]
|
| 143 |
+
# formatted_input = f">>{lang_code}<< {input_text}"
|
| 144 |
+
|
| 145 |
+
# try:
|
| 146 |
+
# response = client.text_generation(
|
| 147 |
+
# prompt=formatted_input,
|
| 148 |
+
# model=MODEL_NAME,
|
| 149 |
+
# max_new_tokens=200,
|
| 150 |
+
# )
|
| 151 |
+
# return response.strip()
|
| 152 |
+
# except Exception as e:
|
| 153 |
+
# return f"Error: {str(e)}"
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
# # Gradio UI
|
| 157 |
+
# translator = gr.Interface(
|
| 158 |
+
# fn=translate,
|
| 159 |
+
# inputs=[
|
| 160 |
+
# gr.Textbox(label="Input Text", placeholder="Type English text here..."),
|
| 161 |
+
# gr.Dropdown(list(LANGUAGES.keys()), label="Target Language"),
|
| 162 |
+
# ],
|
| 163 |
+
# outputs=gr.Textbox(label="Translation"),
|
| 164 |
+
# title="NLLB-200 Translator",
|
| 165 |
+
# description="Translate English to South African languages using Meta's NLLB model",
|
| 166 |
+
# )
|
| 167 |
+
|
| 168 |
+
# translator.launch()
|
| 169 |
|
| 170 |
|
| 171 |
# hate
|
| 172 |
|
| 173 |
+
import os
|
| 174 |
+
from dotenv import load_dotenv
|
| 175 |
+
from huggingface_hub import InferenceClient
|
| 176 |
+
|
| 177 |
+
load_dotenv()
|
| 178 |
+
token = os.getenv("HF_TOKEN")
|
| 179 |
+
client = InferenceClient(token=token)
|
| 180 |
+
|
| 181 |
+
try:
|
| 182 |
+
prompt = ">>zul<< Hello, how are you?"
|
| 183 |
+
response = client.text_generation(
|
| 184 |
+
prompt=prompt, model="facebook/nllb-200-distilled-600M", max_new_tokens=100
|
| 185 |
+
)
|
| 186 |
+
print("Result:", response.strip())
|
| 187 |
+
except Exception as e:
|
| 188 |
+
import traceback
|
| 189 |
+
|
| 190 |
+
print("Error occurred:", str(e))
|
| 191 |
+
print(traceback.format_exc())
|
| 192 |
+
|
| 193 |
|
| 194 |
# import requests
|
| 195 |
# import gradio as gr
|