Spaces:
Running
Running
| from transformers import pipeline | |
| # Create translation pipeline | |
| translator = pipeline("translation", model="facebook/nllb-200-3.3B") | |
| # Translate English to Zulu (you prepend the target language code in input) | |
| input_text = ">>zul_Latn<< Hello, how are you?" | |
| result = translator(input_text) | |
| print(result[0]["translation_text"]) | |
| # import requests | |
| # import gradio as gr | |
| # from dotenv import load_dotenv | |
| # import os | |
| # # Load environment variables from .env file | |
| # load_dotenv() | |
| # HF_TOKEN = os.getenv("HF_TOKEN") | |
| # model_name = "Helsinki-NLP/opus-mt-en-nso" | |
| # API_URL = f"https://api-inference.huggingface.co/models/{model_name}" | |
| # headers = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| # def query(payload): | |
| # # HTTP POST Request | |
| # response = requests.post(API_URL, headers=headers, json=payload) | |
| # return response.json() | |
| # def translate(input_text): | |
| # # API Request: | |
| # response = query({"inputs": input_text, "options": {"wait_for_model": True}}) | |
| # translation = response[0]["translation_text"] | |
| # return translation | |
| # translator = gr.Interface( | |
| # fn=translate, | |
| # inputs=[gr.Textbox(label="Input Text", placeholder="Input Text To Be Translated")], | |
| # outputs=gr.Textbox(label="Translation"), | |
| # title="Translademia", | |
| # ) | |
| # translator.launch() | |
| # # The one we are going with | |
| # import requests | |
| # import gradio as gr | |
| # from dotenv import load_dotenv | |
| # import os | |
| # # Load environment variables | |
| # load_dotenv() | |
| # HF_TOKEN = os.getenv("HF_TOKEN") | |
| # headers = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| # # Language to ISO 639-3 codes (used for NLLB-200) | |
| # LANGUAGES = { | |
| # "English β Afrikaans": "afr", | |
| # "English β Xhosa": "xho", | |
| # "English β Zulu": "zul", | |
| # "English β Sesotho": "sot", | |
| # "English β Tswana": "tsn", | |
| # "English β Northern Sotho": "nso", | |
| # "English β Swati": "ssw", | |
| # "English β Tsonga": "tso", | |
| # "English β Venda": "ven", | |
| # } | |
| # MODEL_NAME = "facebook/nllb-200-distilled-600M" | |
| # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}" | |
| # def query(payload): | |
| # response = requests.post(API_URL, headers=headers, json=payload) | |
| # if response.status_code != 200: | |
| # print(f"[ERROR] API failed: {response.status_code} - {response.text}") | |
| # return {"error": f"Request failed with {response.status_code}"} | |
| # try: | |
| # return response.json() | |
| # except requests.exceptions.JSONDecodeError: | |
| # print(f"[ERROR] Failed to parse JSON: {response.text}") | |
| # return {"error": "Invalid JSON from API"} | |
| # def translate(input_text, language_label): | |
| # language_code = LANGUAGES[language_label] | |
| # formatted_input = f">>{language_code}<< {input_text}" | |
| # response = query({"inputs": formatted_input, "options": {"wait_for_model": True}}) | |
| # if "error" in response: | |
| # return f"Error: {response['error']}" | |
| # return response[0]["translation_text"] | |
| # translator = gr.Interface( | |
| # fn=translate, | |
| # inputs=[ | |
| # gr.Textbox(label="Input Text", placeholder="Type text here..."), | |
| # gr.Dropdown(list(LANGUAGES.keys()), label="Select Language Target"), | |
| # ], | |
| # outputs=gr.Textbox(label="Translation"), | |
| # title="Translademia", | |
| # description="Translate English text to South African languages using Meta's NLLB-200 model.", | |
| # ) | |
| # translator.launch() | |
| # love | |
| # import os | |
| # from huggingface_hub import InferenceClient | |
| # import gradio as gr | |
| # from dotenv import load_dotenv | |
| # # Load env | |
| # load_dotenv() | |
| # HF_TOKEN = os.getenv("HF_TOKEN") | |
| # # Init client | |
| # client = InferenceClient(token=HF_TOKEN) | |
| # # Languages supported | |
| # LANGUAGES = { | |
| # "English β Afrikaans": "afr", | |
| # "English β Xhosa": "xho", | |
| # "English β Zulu": "zul", | |
| # "English β Sesotho": "sot", | |
| # "English β Tswana": "tsn", | |
| # "English β Northern Sotho": "nso", | |
| # "English β Swati": "ssw", | |
| # "English β Tsonga": "tso", | |
| # "English β Venda": "ven", | |
| # } | |
| # MODEL_NAME = "facebook/nllb-200-distilled-600M" | |
| # def translate(input_text: str, language_label: str) -> str: | |
| # if not input_text.strip(): | |
| # return "Error: Please enter text to translate." | |
| # lang_code = LANGUAGES[language_label] | |
| # formatted_input = f">>{lang_code}<< {input_text}" | |
| # try: | |
| # response = client.text_generation( | |
| # prompt=formatted_input, | |
| # model=MODEL_NAME, | |
| # max_new_tokens=200, | |
| # ) | |
| # return response.strip() | |
| # except Exception as e: | |
| # return f"Error: {str(e)}" | |
| # # Gradio UI | |
| # translator = gr.Interface( | |
| # fn=translate, | |
| # inputs=[ | |
| # gr.Textbox(label="Input Text", placeholder="Type English text here..."), | |
| # gr.Dropdown(list(LANGUAGES.keys()), label="Target Language"), | |
| # ], | |
| # outputs=gr.Textbox(label="Translation"), | |
| # title="NLLB-200 Translator", | |
| # description="Translate English to South African languages using Meta's NLLB model", | |
| # ) | |
| # translator.launch() | |
| # hate | |
| # import requests | |
| # import gradio as gr | |
| # from dotenv import load_dotenv | |
| # import os | |
| # # Load Hugging Face token from .env | |
| # load_dotenv() | |
| # HF_TOKEN = os.getenv("HF_TOKEN") | |
| # headers = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| # # NLLB model name | |
| # MODEL_NAME = "facebook/nllb-200-3.3B" | |
| # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}" | |
| # # Define supported language pairs and NLLB codes | |
| # LANGUAGE_PAIRS = { | |
| # "English β Afrikaans": ("eng_Latn", "afr_Latn"), | |
| # "English β Xhosa": ("eng_Latn", "xho_Latn"), | |
| # "English β Zulu": ("eng_Latn", "zul_Latn"), | |
| # "English β Sesotho": ("eng_Latn", "sot_Latn"), | |
| # "English β Tswana": ("eng_Latn", "tsn_Latn"), | |
| # "English β Northern Sotho": ("eng_Latn", "nso_Latn"), | |
| # "English β Swati": ("eng_Latn", "ssw_Latn"), | |
| # "English β Tsonga": ("eng_Latn", "tso_Latn"), | |
| # "Afrikaans β English": ("afr_Latn", "eng_Latn"), | |
| # "Xhosa β English": ("xho_Latn", "eng_Latn"), | |
| # "Zulu β English": ("zul_Latn", "eng_Latn"), | |
| # "Sesotho β English": ("sot_Latn", "eng_Latn"), | |
| # "Tswana β English": ("tsn_Latn", "eng_Latn"), | |
| # "Northern Sotho β English": ("nso_Latn", "eng_Latn"), | |
| # "Swati β English": ("ssw_Latn", "eng_Latn"), | |
| # "Tsonga β English": ("tso_Latn", "eng_Latn"), | |
| # } | |
| # def translate(input_text, language_pair): | |
| # src_lang, tgt_lang = LANGUAGE_PAIRS[language_pair] | |
| # payload = { | |
| # "inputs": input_text, | |
| # "parameters": { | |
| # "src_lang": src_lang, | |
| # "tgt_lang": tgt_lang, | |
| # }, | |
| # "options": {"wait_for_model": True}, | |
| # } | |
| # response = requests.post(API_URL, headers=headers, json=payload) | |
| # if response.status_code != 200: | |
| # return f"[ERROR] {response.status_code}: {response.text}" | |
| # try: | |
| # output = response.json() | |
| # return output[0]["translation_text"] | |
| # except Exception as e: | |
| # return f"[ERROR] Failed to parse response: {e}" | |
| # # Gradio UI | |
| # translator = gr.Interface( | |
| # fn=translate, | |
| # inputs=[ | |
| # gr.Textbox(label="Input Text", placeholder="Type text here..."), | |
| # gr.Dropdown(choices=list(LANGUAGE_PAIRS.keys()), label="Select Language Pair"), | |
| # ], | |
| # outputs=gr.Textbox(label="Translation"), | |
| # title="Translademia (NLLB Edition)", | |
| # description="Translate between English and official South African languages using Meta's NLLB-200 model.", | |
| # ) | |
| # translator.launch(share=True) | |
| # /////////////////////////////////////////////////////////////////////////////////////////////////////////////// | |
| # Using Unesco API | |
| # import requests | |
| # import gradio as gr | |
| # from dotenv import load_dotenv | |
| # import os | |
| # # Load Hugging Face token from .env | |
| # load_dotenv() | |
| # HF_TOKEN = os.getenv("HF_TOKEN") | |
| # headers = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| # # NLLB model endpoint | |
| # MODEL_NAME = "facebook/nllb-200-3.3B" | |
| # API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}" | |
| # # Define supported language pairs and NLLB codes | |
| # LANGUAGE_PAIRS = { | |
| # "English β Afrikaans": ("eng_Latn", "afr_Latn"), | |
| # "English β Xhosa": ("eng_Latn", "xho_Latn"), | |
| # "English β Zulu": ("eng_Latn", "zul_Latn"), | |
| # "English β Sesotho": ("eng_Latn", "sot_Latn"), | |
| # "English β Tswana": ("eng_Latn", "tsn_Latn"), | |
| # "English β Northern Sotho": ("eng_Latn", "nso_Latn"), | |
| # "English β Swati": ("eng_Latn", "ssw_Latn"), | |
| # "English β Tsonga": ("eng_Latn", "tso_Latn"), | |
| # "Afrikaans β English": ("afr_Latn", "eng_Latn"), | |
| # "Xhosa β English": ("xho_Latn", "eng_Latn"), | |
| # "Zulu β English": ("zul_Latn", "eng_Latn"), | |
| # "Sesotho β English": ("sot_Latn", "eng_Latn"), | |
| # "Tswana β English": ("tsn_Latn", "eng_Latn"), | |
| # "Northern Sotho β English": ("nso_Latn", "eng_Latn"), | |
| # "Swati β English": ("ssw_Latn", "eng_Latn"), | |
| # "Tsonga β English": ("tso_Latn", "eng_Latn"), | |
| # } | |
| # def translate(input_text, language_pair): | |
| # if not input_text.strip(): | |
| # return "[ERROR] Please enter some text to translate." | |
| # # Get source and target language codes | |
| # src_lang, tgt_lang = LANGUAGE_PAIRS[language_pair] | |
| # # Prepend target language token to the input | |
| # formatted_input = f">>{tgt_lang}<< {input_text.strip()}" | |
| # # Send request to Hugging Face Inference API | |
| # payload = { | |
| # "inputs": formatted_input, | |
| # "options": {"wait_for_model": True}, | |
| # } | |
| # response = requests.post(API_URL, headers=headers, json=payload) | |
| # if response.status_code != 200: | |
| # return f"[ERROR] {response.status_code}: {response.text}" | |
| # try: | |
| # output = response.json() | |
| # return output[0]["translation_text"] | |
| # except Exception as e: | |
| # return f"[ERROR] Failed to parse response: {e}" | |
| # # Gradio UI | |
| # translator = gr.Interface( | |
| # fn=translate, | |
| # inputs=[ | |
| # gr.Textbox(label="Input Text", placeholder="Type text here..."), | |
| # gr.Dropdown(choices=list(LANGUAGE_PAIRS.keys()), label="Select Language Pair"), | |
| # ], | |
| # outputs=gr.Textbox(label="Translation"), | |
| # title="Translademia (NLLB Edition)", | |
| # description="Translate between English and South African languages using Meta's NLLB-200 multilingual model.", | |
| # ) | |
| # translator.launch(share=True) | |