|
|
import os |
|
|
import gradio as gr |
|
|
import requests |
|
|
|
|
|
API_KEY = os.getenv("HF_TOKEN") |
|
|
MODEL_NAME = "openai/gpt-oss-120b" |
|
|
|
|
|
def generate_main_question_gemini(paragraph: str): |
|
|
if not paragraph or paragraph.strip() == "": |
|
|
return "الرجاء إدخال فقرة أولاً." |
|
|
|
|
|
prompt = f""" |
|
|
الفقرة التالية: |
|
|
{paragraph} |
|
|
المطلوب: |
|
|
اقرأ الفقرة السابقة ، |
|
|
اعتمادا عليها قم بتوليد سؤال اساسي للفقرة ليكون مفتاحا لعملية تسميع الطالب للفقرة |
|
|
اكتب السؤال المولد فقط بدون شرح ولا اي تفسير |
|
|
""" |
|
|
|
|
|
try: |
|
|
response = requests.post( |
|
|
f"https://router.huggingface.co/hf-inference/models/{MODEL_NAME}", |
|
|
headers={"Authorization": f"Bearer {API_KEY}"}, |
|
|
json={"inputs": prompt} |
|
|
) |
|
|
|
|
|
if response.status_code != 200: |
|
|
return f"Error while connecting to API: {response.text}" |
|
|
|
|
|
result = response.json() |
|
|
|
|
|
|
|
|
if isinstance(result, list): |
|
|
return result[0].get("generated_text", "").strip() |
|
|
|
|
|
return "Unexpected response format." |
|
|
|
|
|
except Exception as e: |
|
|
return f"Error while connecting to API: {e}" |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("## MainQuestion — Basic Question Generator (Arabic Output)") |
|
|
|
|
|
with gr.Row(): |
|
|
paragraph = gr.Textbox( |
|
|
label="Paragraph (Input text)", |
|
|
lines=8, |
|
|
placeholder="Paste the paragraph here..." |
|
|
) |
|
|
|
|
|
output = gr.Textbox(label="Generated Question (Arabic)", lines=3) |
|
|
|
|
|
submit_btn = gr.Button("Submit") |
|
|
submit_btn.click(fn=generate_main_question_gemini, inputs=paragraph, outputs=output) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch(share=True, show_error=True) |
|
|
|