Spaces:
Sleeping
Sleeping
| import os | |
| import logging | |
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| # Configure logging | |
| logging.basicConfig( | |
| level=logging.INFO, | |
| format="%(asctime)s | %(levelname)s | %(message)s", | |
| datefmt="%Y-%m-%d %H:%M:%S", | |
| ) | |
| logger = logging.getLogger(__name__) | |
| # Environment variables for configuration | |
| HF_TOKEN = os.environ.get("HF_TOKEN", "") | |
| logger.info(f"HF_TOKEN configured: {bool(HF_TOKEN)}") | |
| client = InferenceClient(token=HF_TOKEN) if HF_TOKEN else InferenceClient() | |
| logger.info("InferenceClient initialized") | |
| # Language pairs with their MarianMT models (configurable via env vars) | |
| LANGUAGE_PAIRS = { | |
| "English β French": os.environ.get("MODEL_EN_FR", "Helsinki-NLP/opus-mt-en-fr"), | |
| "English β Spanish": os.environ.get("MODEL_EN_ES", "Helsinki-NLP/opus-mt-en-es"), | |
| "English β German": os.environ.get("MODEL_EN_DE", "Helsinki-NLP/opus-mt-en-de"), | |
| "French β English": os.environ.get("MODEL_FR_EN", "Helsinki-NLP/opus-mt-fr-en"), | |
| "Spanish β English": os.environ.get("MODEL_ES_EN", "Helsinki-NLP/opus-mt-es-en"), | |
| "German β English": os.environ.get("MODEL_DE_EN", "Helsinki-NLP/opus-mt-de-en"), | |
| } | |
| logger.info(f"Loaded {len(LANGUAGE_PAIRS)} language pairs") | |
| def translate(text: str, language_pair: str) -> str: | |
| """Translate text using selected language pair.""" | |
| logger.info(f"translate() called | text_len={len(text)} | pair={language_pair}") | |
| if not text.strip(): | |
| logger.warning("Empty text received") | |
| return "π Enter text to translate!" | |
| try: | |
| model = LANGUAGE_PAIRS[language_pair] | |
| logger.info(f"Calling translation | model={model}") | |
| result = client.translation(text, model=model) | |
| logger.info(f"Translation: {result.translation_text[:50]}...") | |
| return result.translation_text | |
| except Exception as e: | |
| logger.error(f"API error: {e}") | |
| return f"β Error: {e}" | |
| logger.info("Building Gradio interface...") | |
| with gr.Blocks(title="Instant Translator") as demo: | |
| gr.Markdown("# π Instant Translator\nTranslate text between languages instantly!") | |
| with gr.Row(equal_height=True): | |
| with gr.Column(): | |
| input_text = gr.Textbox( | |
| label="Source text", | |
| placeholder="Hello, how are you today?", | |
| lines=4, | |
| autofocus=True, | |
| ) | |
| language_pair = gr.Dropdown( | |
| choices=list(LANGUAGE_PAIRS.keys()), | |
| value="English β French", | |
| label="Language pair", | |
| ) | |
| btn = gr.Button("Translate π", variant="primary") | |
| with gr.Column(): | |
| output_text = gr.Textbox( | |
| label="Translation", | |
| lines=4, | |
| interactive=False, | |
| ) | |
| btn.click(translate, inputs=[input_text, language_pair], outputs=output_text) | |
| input_text.submit(translate, inputs=[input_text, language_pair], outputs=output_text) | |
| gr.Examples( | |
| examples=[ | |
| ["Hello, how are you today?", "English β French"], | |
| ["Machine learning is fascinating.", "English β Spanish"], | |
| ["I love programming with Python.", "English β German"], | |
| ], | |
| inputs=[input_text, language_pair], | |
| ) | |
| demo.queue() | |
| logger.info("Starting Gradio server...") | |
| demo.launch() | |