Spaces:
Running
Running
| import os | |
| import gradio as gr | |
| import json | |
| from huggingface_hub import InferenceClient | |
| # Load your Hugging Face token from environment variable | |
| HF_TOKEN = os.getenv("HF_API_TOKEN") | |
| # Initialize the inference client | |
| client = InferenceClient( | |
| model="deepseek-ai/DeepSeek-V3", | |
| token=HF_TOKEN | |
| ) | |
| # Load Quran words and language list | |
| with open("words.json", encoding="utf-8") as f: | |
| word_list = json.load(f) | |
| with open("language_list.json", encoding="utf-8") as f: | |
| language_list = json.load(f) | |
| # Prepare dropdown options | |
| word_options = [f"{word['text']} ({word['english']})" for word in word_list] | |
| language_options = [f"{lang['name']} ({lang['code']})" for lang in language_list] | |
| def create_messages(word_entry, language_name): | |
| return [ | |
| { | |
| "role": "system", | |
| "content": "You are a helpful and friendly assistant that explains Quranic words in a simple way.", | |
| }, | |
| { | |
| "role": "user", | |
| "content": f""" | |
| Explain the Quranic word "{word_entry['text']}" (which means "{word_entry['english']}") in {language_name}. | |
| Please include: | |
| 1. Translation in {language_name} | |
| 2. Root word and derivatives | |
| 3. Occurrences in the Qur'an (Surah & Verse) | |
| 4. Explanation of each occurrence using easy-to-understand {language_name} | |
| """, | |
| }, | |
| ] | |
| # Keep a global/local cache (dict) to store responses | |
| response_cache = {} | |
| def process(word_label, lang_label): | |
| cache_key = (word_label, lang_label) | |
| # β Return cached response directly if exists | |
| if cache_key in response_cache: | |
| yield response_cache[cache_key] | |
| return | |
| selected_word = next((w for w in word_list if w['text'] in word_label), None) | |
| language_name = lang_label.split("(")[1].strip() if "(" in lang_label else lang_label.strip() | |
| if not selected_word: | |
| yield "β Word not found." | |
| return | |
| messages = create_messages(selected_word, language_name) | |
| try: | |
| stream = client.chat.completions.create( | |
| messages=messages, | |
| temperature=0.7, | |
| top_p=0.9, | |
| max_tokens=1024, | |
| stream=True | |
| ) | |
| output = "" | |
| for chunk in stream: | |
| if chunk.choices and chunk.choices[0].delta.content: | |
| output += chunk.choices[0].delta.content | |
| yield output | |
| # β Store the final output in cache | |
| response_cache[cache_key] = output | |
| except Exception as e: | |
| yield f"β Error: {e}" | |
| # Build Gradio UI | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## π Quran Word Explorer (with DeepSeek-V3) β Streaming Enabled") | |
| with gr.Row(): | |
| word_input = gr.Dropdown(choices=word_options, label="π€ Select Quranic Word") | |
| lang_input = gr.Dropdown(choices=language_options, label="π Select Language") | |
| run_btn = gr.Button("π Get Explanation") | |
| output = gr.Textbox(label="π Output", lines=20) | |
| run_btn.click(fn=process, inputs=[word_input, lang_input], outputs=output) | |
| demo.launch() | |