LouisMonawe commited on
Commit
23ef7b2
Β·
1 Parent(s): bd6cbf6
Files changed (2) hide show
  1. app.py +55 -314
  2. main.py +335 -7
app.py CHANGED
@@ -1,346 +1,87 @@
1
- # import requests
2
  # import gradio as gr
3
- # from dotenv import load_dotenv
4
- # import os
5
 
6
- # # Load environment variables from .env file
7
- # load_dotenv()
8
- # HF_TOKEN = os.getenv("HF_TOKEN")
 
9
 
10
- # model_name = "Helsinki-NLP/opus-mt-en-nso"
11
- # API_URL = f"https://api-inference.huggingface.co/models/{model_name}"
12
- # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
13
-
14
-
15
- # def query(payload):
16
- # # HTTP POST Request
17
- # response = requests.post(API_URL, headers=headers, json=payload)
18
- # return response.json()
19
-
20
-
21
- # def translate(input_text):
22
- # # API Request:
23
- # response = query({"inputs": input_text, "options": {"wait_for_model": True}})
24
-
25
- # translation = response[0]["translation_text"]
26
-
27
- # return translation
28
-
29
-
30
- # translator = gr.Interface(
31
- # fn=translate,
32
- # inputs=[gr.Textbox(label="Input Text", placeholder="Input Text To Be Translated")],
33
- # outputs=gr.Textbox(label="Translation"),
34
- # title="Translademia",
35
- # )
36
-
37
- # translator.launch()
38
-
39
-
40
- # # The one we are going with
41
- # import requests
42
- # import gradio as gr
43
- # from dotenv import load_dotenv
44
- # import os
45
-
46
- # # Load environment variables
47
- # load_dotenv()
48
- # HF_TOKEN = os.getenv("HF_TOKEN")
49
- # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
50
-
51
- # # Language to ISO 639-3 codes (used for NLLB-200)
52
  # LANGUAGES = {
53
- # "English β†’ Afrikaans": "afr",
54
- # "English β†’ Xhosa": "xho",
55
- # "English β†’ Zulu": "zul",
56
- # "English β†’ Sesotho": "sot",
57
- # "English β†’ Tswana": "tsn",
58
- # "English β†’ Northern Sotho": "nso",
59
- # "English β†’ Swati": "ssw",
60
- # "English β†’ Tsonga": "tso",
61
- # "English β†’ Venda": "ven",
62
  # }
63
 
64
- # MODEL_NAME = "facebook/nllb-200-distilled-600M"
65
- # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
66
-
67
-
68
- # def query(payload):
69
- # response = requests.post(API_URL, headers=headers, json=payload)
70
-
71
- # if response.status_code != 200:
72
- # print(f"[ERROR] API failed: {response.status_code} - {response.text}")
73
- # return {"error": f"Request failed with {response.status_code}"}
74
-
75
- # try:
76
- # return response.json()
77
- # except requests.exceptions.JSONDecodeError:
78
- # print(f"[ERROR] Failed to parse JSON: {response.text}")
79
- # return {"error": "Invalid JSON from API"}
80
-
81
 
82
- # def translate(input_text, language_label):
83
- # language_code = LANGUAGES[language_label]
84
- # formatted_input = f">>{language_code}<< {input_text}"
85
 
86
- # response = query({"inputs": formatted_input, "options": {"wait_for_model": True}})
 
 
87
 
88
- # if "error" in response:
89
- # return f"Error: {response['error']}"
 
 
90
 
91
- # return response[0]["translation_text"]
92
 
93
-
94
- # translator = gr.Interface(
95
  # fn=translate,
96
  # inputs=[
97
- # gr.Textbox(label="Input Text", placeholder="Type text here..."),
98
- # gr.Dropdown(list(LANGUAGES.keys()), label="Select Language Target"),
99
- # ],
100
- # outputs=gr.Textbox(label="Translation"),
101
- # title="Translademia",
102
- # description="Translate English text to South African languages using Meta's NLLB-200 model.",
103
- # )
104
-
105
- # translator.launch()
106
-
107
-
108
- # love
109
-
110
- # import os
111
- # from huggingface_hub import InferenceClient
112
- # import gradio as gr
113
- # from dotenv import load_dotenv
114
-
115
- # # Load env
116
- # load_dotenv()
117
- # HF_TOKEN = os.getenv("HF_TOKEN")
118
-
119
- # # Init client
120
- # client = InferenceClient(token=HF_TOKEN)
121
-
122
- # # Languages supported
123
- # LANGUAGES = {
124
- # "English β†’ Afrikaans": "afr",
125
- # "English β†’ Xhosa": "xho",
126
- # "English β†’ Zulu": "zul",
127
- # "English β†’ Sesotho": "sot",
128
- # "English β†’ Tswana": "tsn",
129
- # "English β†’ Northern Sotho": "nso",
130
- # "English β†’ Swati": "ssw",
131
- # "English β†’ Tsonga": "tso",
132
- # "English β†’ Venda": "ven",
133
- # }
134
-
135
- # MODEL_NAME = "facebook/nllb-200-distilled-600M"
136
-
137
-
138
- # def translate(input_text: str, language_label: str) -> str:
139
- # if not input_text.strip():
140
- # return "Error: Please enter text to translate."
141
-
142
- # lang_code = LANGUAGES[language_label]
143
- # formatted_input = f">>{lang_code}<< {input_text}"
144
-
145
- # try:
146
- # response = client.text_generation(
147
- # prompt=formatted_input,
148
- # model=MODEL_NAME,
149
- # max_new_tokens=200,
150
- # )
151
- # return response.strip()
152
- # except Exception as e:
153
- # return f"Error: {str(e)}"
154
-
155
-
156
- # # Gradio UI
157
- # translator = gr.Interface(
158
- # fn=translate,
159
- # inputs=[
160
- # gr.Textbox(label="Input Text", placeholder="Type English text here..."),
161
  # gr.Dropdown(list(LANGUAGES.keys()), label="Target Language"),
162
  # ],
163
- # outputs=gr.Textbox(label="Translation"),
164
- # title="NLLB-200 Translator",
165
- # description="Translate English to South African languages using Meta's NLLB model",
166
  # )
167
 
168
- # translator.launch()
169
-
170
 
171
- # hate
172
 
173
- import os
 
174
  from dotenv import load_dotenv
175
- from huggingface_hub import InferenceClient
176
 
 
177
  load_dotenv()
178
- token = os.getenv("HF_TOKEN")
179
- client = InferenceClient(token=token)
180
-
181
- try:
182
- prompt = ">>zul<< Hello, how are you?"
183
- response = client.text_generation(
184
- prompt=prompt, model="facebook/nllb-200-distilled-600M", max_new_tokens=100
185
- )
186
- print("Result:", response.strip())
187
- except Exception as e:
188
- import traceback
189
-
190
- print("Error occurred:", str(e))
191
- print(traceback.format_exc())
192
-
193
-
194
- # import requests
195
- # import gradio as gr
196
- # from dotenv import load_dotenv
197
- # import os
198
-
199
- # # Load Hugging Face token from .env
200
- # load_dotenv()
201
- # HF_TOKEN = os.getenv("HF_TOKEN")
202
- # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
203
-
204
- # # NLLB model name
205
- # MODEL_NAME = "facebook/nllb-200-3.3B"
206
- # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
207
-
208
- # # Define supported language pairs and NLLB codes
209
- # LANGUAGE_PAIRS = {
210
- # "English β†’ Afrikaans": ("eng_Latn", "afr_Latn"),
211
- # "English β†’ Xhosa": ("eng_Latn", "xho_Latn"),
212
- # "English β†’ Zulu": ("eng_Latn", "zul_Latn"),
213
- # "English β†’ Sesotho": ("eng_Latn", "sot_Latn"),
214
- # "English β†’ Tswana": ("eng_Latn", "tsn_Latn"),
215
- # "English β†’ Northern Sotho": ("eng_Latn", "nso_Latn"),
216
- # "English β†’ Swati": ("eng_Latn", "ssw_Latn"),
217
- # "English β†’ Tsonga": ("eng_Latn", "tso_Latn"),
218
- # "Afrikaans β†’ English": ("afr_Latn", "eng_Latn"),
219
- # "Xhosa β†’ English": ("xho_Latn", "eng_Latn"),
220
- # "Zulu β†’ English": ("zul_Latn", "eng_Latn"),
221
- # "Sesotho β†’ English": ("sot_Latn", "eng_Latn"),
222
- # "Tswana β†’ English": ("tsn_Latn", "eng_Latn"),
223
- # "Northern Sotho β†’ English": ("nso_Latn", "eng_Latn"),
224
- # "Swati β†’ English": ("ssw_Latn", "eng_Latn"),
225
- # "Tsonga β†’ English": ("tso_Latn", "eng_Latn"),
226
- # }
227
-
228
-
229
- # def translate(input_text, language_pair):
230
- # src_lang, tgt_lang = LANGUAGE_PAIRS[language_pair]
231
-
232
- # payload = {
233
- # "inputs": input_text,
234
- # "parameters": {
235
- # "src_lang": src_lang,
236
- # "tgt_lang": tgt_lang,
237
- # },
238
- # "options": {"wait_for_model": True},
239
- # }
240
-
241
- # response = requests.post(API_URL, headers=headers, json=payload)
242
-
243
- # if response.status_code != 200:
244
- # return f"[ERROR] {response.status_code}: {response.text}"
245
-
246
- # try:
247
- # output = response.json()
248
- # return output[0]["translation_text"]
249
- # except Exception as e:
250
- # return f"[ERROR] Failed to parse response: {e}"
251
 
 
 
 
252
 
253
- # # Gradio UI
254
- # translator = gr.Interface(
255
- # fn=translate,
256
- # inputs=[
257
- # gr.Textbox(label="Input Text", placeholder="Type text here..."),
258
- # gr.Dropdown(choices=list(LANGUAGE_PAIRS.keys()), label="Select Language Pair"),
259
- # ],
260
- # outputs=gr.Textbox(label="Translation"),
261
- # title="Translademia (NLLB Edition)",
262
- # description="Translate between English and official South African languages using Meta's NLLB-200 model.",
263
- # )
264
-
265
- # translator.launch(share=True)
266
-
267
-
268
- # ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
269
- # Using Unesco API
270
-
271
- # import requests
272
- # import gradio as gr
273
- # from dotenv import load_dotenv
274
- # import os
275
-
276
- # # Load Hugging Face token from .env
277
- # load_dotenv()
278
- # HF_TOKEN = os.getenv("HF_TOKEN")
279
- # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
280
 
281
- # # NLLB model endpoint
282
- # MODEL_NAME = "facebook/nllb-200-3.3B"
283
- # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
 
284
 
285
- # # Define supported language pairs and NLLB codes
286
- # LANGUAGE_PAIRS = {
287
- # "English β†’ Afrikaans": ("eng_Latn", "afr_Latn"),
288
- # "English β†’ Xhosa": ("eng_Latn", "xho_Latn"),
289
- # "English β†’ Zulu": ("eng_Latn", "zul_Latn"),
290
- # "English β†’ Sesotho": ("eng_Latn", "sot_Latn"),
291
- # "English β†’ Tswana": ("eng_Latn", "tsn_Latn"),
292
- # "English β†’ Northern Sotho": ("eng_Latn", "nso_Latn"),
293
- # "English β†’ Swati": ("eng_Latn", "ssw_Latn"),
294
- # "English β†’ Tsonga": ("eng_Latn", "tso_Latn"),
295
- # "Afrikaans β†’ English": ("afr_Latn", "eng_Latn"),
296
- # "Xhosa β†’ English": ("xho_Latn", "eng_Latn"),
297
- # "Zulu β†’ English": ("zul_Latn", "eng_Latn"),
298
- # "Sesotho β†’ English": ("sot_Latn", "eng_Latn"),
299
- # "Tswana β†’ English": ("tsn_Latn", "eng_Latn"),
300
- # "Northern Sotho β†’ English": ("nso_Latn", "eng_Latn"),
301
- # "Swati β†’ English": ("ssw_Latn", "eng_Latn"),
302
- # "Tsonga β†’ English": ("tso_Latn", "eng_Latn"),
303
- # }
304
-
305
-
306
- # def translate(input_text, language_pair):
307
- # if not input_text.strip():
308
- # return "[ERROR] Please enter some text to translate."
309
-
310
- # # Get source and target language codes
311
- # src_lang, tgt_lang = LANGUAGE_PAIRS[language_pair]
312
-
313
- # # Prepend target language token to the input
314
- # formatted_input = f">>{tgt_lang}<< {input_text.strip()}"
315
 
316
- # # Send request to Hugging Face Inference API
317
- # payload = {
318
- # "inputs": formatted_input,
319
- # "options": {"wait_for_model": True},
320
- # }
321
 
322
- # response = requests.post(API_URL, headers=headers, json=payload)
323
 
324
- # if response.status_code != 200:
325
- # return f"[ERROR] {response.status_code}: {response.text}"
326
 
327
- # try:
328
- # output = response.json()
329
- # return output[0]["translation_text"]
330
- # except Exception as e:
331
- # return f"[ERROR] Failed to parse response: {e}"
332
 
 
 
 
 
 
 
333
 
334
- # # Gradio UI
335
- # translator = gr.Interface(
336
- # fn=translate,
337
- # inputs=[
338
- # gr.Textbox(label="Input Text", placeholder="Type text here..."),
339
- # gr.Dropdown(choices=list(LANGUAGE_PAIRS.keys()), label="Select Language Pair"),
340
- # ],
341
- # outputs=gr.Textbox(label="Translation"),
342
- # title="Translademia (NLLB Edition)",
343
- # description="Translate between English and South African languages using Meta's NLLB-200 multilingual model.",
344
- # )
345
-
346
- # translator.launch(share=True)
 
 
1
  # import gradio as gr
2
+ # from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
 
3
 
4
+ # # Load tokenizer and model (this will download ~3.5GB)
5
+ # model_name = "facebook/nllb-200-distilled-600M"
6
+ # tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ # model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
8
 
9
+ # # Supported South African languages codes for NLLB
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  # LANGUAGES = {
11
+ # "English β†’ Afrikaans": "afr_Latn",
12
+ # "English β†’ Xhosa": "xho_Latn",
13
+ # "English β†’ Zulu": "zul_Latn",
14
+ # "English β†’ Sesotho": "sot_Latn",
15
+ # "English β†’ Tswana": "tsn_Latn",
16
+ # "English β†’ Northern Sotho": "nso_Latn",
17
+ # "English β†’ Swati": "ssw_Latn",
18
+ # "English β†’ Tsonga": "tso_Latn",
19
+ # "English β†’ Venda": "ven_Latn",
20
  # }
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
+ # def translate(text, lang_label):
24
+ # if not text.strip():
25
+ # return "Please enter some text to translate."
26
 
27
+ # target_lang = LANGUAGES[lang_label]
28
+ # # Format input for NLLB: prefix target language token
29
+ # input_text = f">>{target_lang}<< {text}"
30
 
31
+ # inputs = tokenizer(input_text, return_tensors="pt", max_length=512, truncation=True)
32
+ # outputs = model.generate(**inputs, max_length=512)
33
+ # translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
34
+ # return translated_text
35
 
 
36
 
37
+ # iface = gr.Interface(
 
38
  # fn=translate,
39
  # inputs=[
40
+ # gr.Textbox(label="English Text"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  # gr.Dropdown(list(LANGUAGES.keys()), label="Target Language"),
42
  # ],
43
+ # outputs="text",
44
+ # title="NLLB-200 English to South African Languages",
45
+ # description="Translate English text to South African languages using Meta's NLLB-200 model locally.",
46
  # )
47
 
48
+ # iface.launch()
 
49
 
 
50
 
51
+ import requests
52
+ import gradio as gr
53
  from dotenv import load_dotenv
54
+ import os
55
 
56
+ # Load environment variables from .env file
57
  load_dotenv()
58
+ HF_TOKEN = os.getenv("HF_TOKEN")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
+ model_name = "Helsinki-NLP/opus-mt-en-nso"
61
+ API_URL = f"https://api-inference.huggingface.co/models/{model_name}"
62
+ headers = {"Authorization": f"Bearer {HF_TOKEN}"}
63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
+ def query(payload):
66
+ # HTTP POST Request
67
+ response = requests.post(API_URL, headers=headers, json=payload)
68
+ return response.json()
69
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
 
71
+ def translate(input_text):
72
+ # API Request:
73
+ response = query({"inputs": input_text, "options": {"wait_for_model": True}})
 
 
74
 
75
+ translation = response[0]["translation_text"]
76
 
77
+ return translation
 
78
 
 
 
 
 
 
79
 
80
+ translator = gr.Interface(
81
+ fn=translate,
82
+ inputs=[gr.Textbox(label="Input Text", placeholder="Input Text To Be Translated")],
83
+ outputs=gr.Textbox(label="Translation"),
84
+ title="Translademia",
85
+ )
86
 
87
+ translator.launch()
 
 
 
 
 
 
 
 
 
 
 
 
main.py CHANGED
@@ -1,10 +1,338 @@
1
- # from transformers import pipeline
2
 
3
- # # Create translation pipeline
4
- # translator = pipeline("translation", model="facebook/nllb-200-3.3B")
5
 
6
- # # Translate English to Zulu (you prepend the target language code in input)
7
- # input_text = ">>zul_Latn<< Hello, how are you?"
8
 
9
- # result = translator(input_text)
10
- # print(result[0]["translation_text"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import pipeline
2
 
3
+ # Create translation pipeline
4
+ translator = pipeline("translation", model="facebook/nllb-200-3.3B")
5
 
6
+ # Translate English to Zulu (you prepend the target language code in input)
7
+ input_text = ">>zul_Latn<< Hello, how are you?"
8
 
9
+ result = translator(input_text)
10
+ print(result[0]["translation_text"])
11
+
12
+
13
+ # import requests
14
+ # import gradio as gr
15
+ # from dotenv import load_dotenv
16
+ # import os
17
+
18
+ # # Load environment variables from .env file
19
+ # load_dotenv()
20
+ # HF_TOKEN = os.getenv("HF_TOKEN")
21
+
22
+ # model_name = "Helsinki-NLP/opus-mt-en-nso"
23
+ # API_URL = f"https://api-inference.huggingface.co/models/{model_name}"
24
+ # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
25
+
26
+
27
+ # def query(payload):
28
+ # # HTTP POST Request
29
+ # response = requests.post(API_URL, headers=headers, json=payload)
30
+ # return response.json()
31
+
32
+
33
+ # def translate(input_text):
34
+ # # API Request:
35
+ # response = query({"inputs": input_text, "options": {"wait_for_model": True}})
36
+
37
+ # translation = response[0]["translation_text"]
38
+
39
+ # return translation
40
+
41
+
42
+ # translator = gr.Interface(
43
+ # fn=translate,
44
+ # inputs=[gr.Textbox(label="Input Text", placeholder="Input Text To Be Translated")],
45
+ # outputs=gr.Textbox(label="Translation"),
46
+ # title="Translademia",
47
+ # )
48
+
49
+ # translator.launch()
50
+
51
+
52
+ # # The one we are going with
53
+ # import requests
54
+ # import gradio as gr
55
+ # from dotenv import load_dotenv
56
+ # import os
57
+
58
+ # # Load environment variables
59
+ # load_dotenv()
60
+ # HF_TOKEN = os.getenv("HF_TOKEN")
61
+ # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
62
+
63
+ # # Language to ISO 639-3 codes (used for NLLB-200)
64
+ # LANGUAGES = {
65
+ # "English β†’ Afrikaans": "afr",
66
+ # "English β†’ Xhosa": "xho",
67
+ # "English β†’ Zulu": "zul",
68
+ # "English β†’ Sesotho": "sot",
69
+ # "English β†’ Tswana": "tsn",
70
+ # "English β†’ Northern Sotho": "nso",
71
+ # "English β†’ Swati": "ssw",
72
+ # "English β†’ Tsonga": "tso",
73
+ # "English β†’ Venda": "ven",
74
+ # }
75
+
76
+ # MODEL_NAME = "facebook/nllb-200-distilled-600M"
77
+ # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
78
+
79
+
80
+ # def query(payload):
81
+ # response = requests.post(API_URL, headers=headers, json=payload)
82
+
83
+ # if response.status_code != 200:
84
+ # print(f"[ERROR] API failed: {response.status_code} - {response.text}")
85
+ # return {"error": f"Request failed with {response.status_code}"}
86
+
87
+ # try:
88
+ # return response.json()
89
+ # except requests.exceptions.JSONDecodeError:
90
+ # print(f"[ERROR] Failed to parse JSON: {response.text}")
91
+ # return {"error": "Invalid JSON from API"}
92
+
93
+
94
+ # def translate(input_text, language_label):
95
+ # language_code = LANGUAGES[language_label]
96
+ # formatted_input = f">>{language_code}<< {input_text}"
97
+
98
+ # response = query({"inputs": formatted_input, "options": {"wait_for_model": True}})
99
+
100
+ # if "error" in response:
101
+ # return f"Error: {response['error']}"
102
+
103
+ # return response[0]["translation_text"]
104
+
105
+
106
+ # translator = gr.Interface(
107
+ # fn=translate,
108
+ # inputs=[
109
+ # gr.Textbox(label="Input Text", placeholder="Type text here..."),
110
+ # gr.Dropdown(list(LANGUAGES.keys()), label="Select Language Target"),
111
+ # ],
112
+ # outputs=gr.Textbox(label="Translation"),
113
+ # title="Translademia",
114
+ # description="Translate English text to South African languages using Meta's NLLB-200 model.",
115
+ # )
116
+
117
+ # translator.launch()
118
+
119
+
120
+ # love
121
+
122
+ # import os
123
+ # from huggingface_hub import InferenceClient
124
+ # import gradio as gr
125
+ # from dotenv import load_dotenv
126
+
127
+ # # Load env
128
+ # load_dotenv()
129
+ # HF_TOKEN = os.getenv("HF_TOKEN")
130
+
131
+ # # Init client
132
+ # client = InferenceClient(token=HF_TOKEN)
133
+
134
+ # # Languages supported
135
+ # LANGUAGES = {
136
+ # "English β†’ Afrikaans": "afr",
137
+ # "English β†’ Xhosa": "xho",
138
+ # "English β†’ Zulu": "zul",
139
+ # "English β†’ Sesotho": "sot",
140
+ # "English β†’ Tswana": "tsn",
141
+ # "English β†’ Northern Sotho": "nso",
142
+ # "English β†’ Swati": "ssw",
143
+ # "English β†’ Tsonga": "tso",
144
+ # "English β†’ Venda": "ven",
145
+ # }
146
+
147
+ # MODEL_NAME = "facebook/nllb-200-distilled-600M"
148
+
149
+
150
+ # def translate(input_text: str, language_label: str) -> str:
151
+ # if not input_text.strip():
152
+ # return "Error: Please enter text to translate."
153
+
154
+ # lang_code = LANGUAGES[language_label]
155
+ # formatted_input = f">>{lang_code}<< {input_text}"
156
+
157
+ # try:
158
+ # response = client.text_generation(
159
+ # prompt=formatted_input,
160
+ # model=MODEL_NAME,
161
+ # max_new_tokens=200,
162
+ # )
163
+ # return response.strip()
164
+ # except Exception as e:
165
+ # return f"Error: {str(e)}"
166
+
167
+
168
+ # # Gradio UI
169
+ # translator = gr.Interface(
170
+ # fn=translate,
171
+ # inputs=[
172
+ # gr.Textbox(label="Input Text", placeholder="Type English text here..."),
173
+ # gr.Dropdown(list(LANGUAGES.keys()), label="Target Language"),
174
+ # ],
175
+ # outputs=gr.Textbox(label="Translation"),
176
+ # title="NLLB-200 Translator",
177
+ # description="Translate English to South African languages using Meta's NLLB model",
178
+ # )
179
+
180
+ # translator.launch()
181
+
182
+
183
+ # hate
184
+
185
+
186
+ # import requests
187
+ # import gradio as gr
188
+ # from dotenv import load_dotenv
189
+ # import os
190
+
191
+ # # Load Hugging Face token from .env
192
+ # load_dotenv()
193
+ # HF_TOKEN = os.getenv("HF_TOKEN")
194
+ # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
195
+
196
+ # # NLLB model name
197
+ # MODEL_NAME = "facebook/nllb-200-3.3B"
198
+ # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
199
+
200
+ # # Define supported language pairs and NLLB codes
201
+ # LANGUAGE_PAIRS = {
202
+ # "English β†’ Afrikaans": ("eng_Latn", "afr_Latn"),
203
+ # "English β†’ Xhosa": ("eng_Latn", "xho_Latn"),
204
+ # "English β†’ Zulu": ("eng_Latn", "zul_Latn"),
205
+ # "English β†’ Sesotho": ("eng_Latn", "sot_Latn"),
206
+ # "English β†’ Tswana": ("eng_Latn", "tsn_Latn"),
207
+ # "English β†’ Northern Sotho": ("eng_Latn", "nso_Latn"),
208
+ # "English β†’ Swati": ("eng_Latn", "ssw_Latn"),
209
+ # "English β†’ Tsonga": ("eng_Latn", "tso_Latn"),
210
+ # "Afrikaans β†’ English": ("afr_Latn", "eng_Latn"),
211
+ # "Xhosa β†’ English": ("xho_Latn", "eng_Latn"),
212
+ # "Zulu β†’ English": ("zul_Latn", "eng_Latn"),
213
+ # "Sesotho β†’ English": ("sot_Latn", "eng_Latn"),
214
+ # "Tswana β†’ English": ("tsn_Latn", "eng_Latn"),
215
+ # "Northern Sotho β†’ English": ("nso_Latn", "eng_Latn"),
216
+ # "Swati β†’ English": ("ssw_Latn", "eng_Latn"),
217
+ # "Tsonga β†’ English": ("tso_Latn", "eng_Latn"),
218
+ # }
219
+
220
+
221
+ # def translate(input_text, language_pair):
222
+ # src_lang, tgt_lang = LANGUAGE_PAIRS[language_pair]
223
+
224
+ # payload = {
225
+ # "inputs": input_text,
226
+ # "parameters": {
227
+ # "src_lang": src_lang,
228
+ # "tgt_lang": tgt_lang,
229
+ # },
230
+ # "options": {"wait_for_model": True},
231
+ # }
232
+
233
+ # response = requests.post(API_URL, headers=headers, json=payload)
234
+
235
+ # if response.status_code != 200:
236
+ # return f"[ERROR] {response.status_code}: {response.text}"
237
+
238
+ # try:
239
+ # output = response.json()
240
+ # return output[0]["translation_text"]
241
+ # except Exception as e:
242
+ # return f"[ERROR] Failed to parse response: {e}"
243
+
244
+
245
+ # # Gradio UI
246
+ # translator = gr.Interface(
247
+ # fn=translate,
248
+ # inputs=[
249
+ # gr.Textbox(label="Input Text", placeholder="Type text here..."),
250
+ # gr.Dropdown(choices=list(LANGUAGE_PAIRS.keys()), label="Select Language Pair"),
251
+ # ],
252
+ # outputs=gr.Textbox(label="Translation"),
253
+ # title="Translademia (NLLB Edition)",
254
+ # description="Translate between English and official South African languages using Meta's NLLB-200 model.",
255
+ # )
256
+
257
+ # translator.launch(share=True)
258
+
259
+
260
+ # ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
261
+ # Using Unesco API
262
+
263
+ # import requests
264
+ # import gradio as gr
265
+ # from dotenv import load_dotenv
266
+ # import os
267
+
268
+ # # Load Hugging Face token from .env
269
+ # load_dotenv()
270
+ # HF_TOKEN = os.getenv("HF_TOKEN")
271
+ # headers = {"Authorization": f"Bearer {HF_TOKEN}"}
272
+
273
+ # # NLLB model endpoint
274
+ # MODEL_NAME = "facebook/nllb-200-3.3B"
275
+ # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
276
+
277
+ # # Define supported language pairs and NLLB codes
278
+ # LANGUAGE_PAIRS = {
279
+ # "English β†’ Afrikaans": ("eng_Latn", "afr_Latn"),
280
+ # "English β†’ Xhosa": ("eng_Latn", "xho_Latn"),
281
+ # "English β†’ Zulu": ("eng_Latn", "zul_Latn"),
282
+ # "English β†’ Sesotho": ("eng_Latn", "sot_Latn"),
283
+ # "English β†’ Tswana": ("eng_Latn", "tsn_Latn"),
284
+ # "English β†’ Northern Sotho": ("eng_Latn", "nso_Latn"),
285
+ # "English β†’ Swati": ("eng_Latn", "ssw_Latn"),
286
+ # "English β†’ Tsonga": ("eng_Latn", "tso_Latn"),
287
+ # "Afrikaans β†’ English": ("afr_Latn", "eng_Latn"),
288
+ # "Xhosa β†’ English": ("xho_Latn", "eng_Latn"),
289
+ # "Zulu β†’ English": ("zul_Latn", "eng_Latn"),
290
+ # "Sesotho β†’ English": ("sot_Latn", "eng_Latn"),
291
+ # "Tswana β†’ English": ("tsn_Latn", "eng_Latn"),
292
+ # "Northern Sotho β†’ English": ("nso_Latn", "eng_Latn"),
293
+ # "Swati β†’ English": ("ssw_Latn", "eng_Latn"),
294
+ # "Tsonga β†’ English": ("tso_Latn", "eng_Latn"),
295
+ # }
296
+
297
+
298
+ # def translate(input_text, language_pair):
299
+ # if not input_text.strip():
300
+ # return "[ERROR] Please enter some text to translate."
301
+
302
+ # # Get source and target language codes
303
+ # src_lang, tgt_lang = LANGUAGE_PAIRS[language_pair]
304
+
305
+ # # Prepend target language token to the input
306
+ # formatted_input = f">>{tgt_lang}<< {input_text.strip()}"
307
+
308
+ # # Send request to Hugging Face Inference API
309
+ # payload = {
310
+ # "inputs": formatted_input,
311
+ # "options": {"wait_for_model": True},
312
+ # }
313
+
314
+ # response = requests.post(API_URL, headers=headers, json=payload)
315
+
316
+ # if response.status_code != 200:
317
+ # return f"[ERROR] {response.status_code}: {response.text}"
318
+
319
+ # try:
320
+ # output = response.json()
321
+ # return output[0]["translation_text"]
322
+ # except Exception as e:
323
+ # return f"[ERROR] Failed to parse response: {e}"
324
+
325
+
326
+ # # Gradio UI
327
+ # translator = gr.Interface(
328
+ # fn=translate,
329
+ # inputs=[
330
+ # gr.Textbox(label="Input Text", placeholder="Type text here..."),
331
+ # gr.Dropdown(choices=list(LANGUAGE_PAIRS.keys()), label="Select Language Pair"),
332
+ # ],
333
+ # outputs=gr.Textbox(label="Translation"),
334
+ # title="Translademia (NLLB Edition)",
335
+ # description="Translate between English and South African languages using Meta's NLLB-200 multilingual model.",
336
+ # )
337
+
338
+ # translator.launch(share=True)