Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| from gtts import gTTS | |
| import re | |
| import os | |
| # Branding | |
| TITLE = "🇧🇩 Polymath-Bengali-Tutor" | |
| DESC = "A Neuro-Symbolic AI Tutor for Rural Education in Bangladesh" | |
| # Load Model (Fallback logic included) | |
| model_id = "Qwen/Qwen2.5-1.5B-Instruct" | |
| try: | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto") | |
| except: | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto") | |
| # Logic | |
| def safe_calculator(text): | |
| bangla_digits = "০১২৩৪৫৬৭৮৯" | |
| english_digits = "0123456789" | |
| text = text.translate(str.maketrans(bangla_digits, english_digits)) | |
| nums = [int(n) for n in re.findall(r'\d+', text)] | |
| if not nums: return None, None, [] | |
| if any(x in text for x in ["বাকি", "বাদ", "বিয়োগ", "-", "খরচ"]): | |
| if len(nums)>=2: return nums[0]-nums[1], "বিয়োগ", nums | |
| elif any(x in text for x in ["ভাগ", "অংশ"]): | |
| if len(nums)>=2: return nums[0]/nums[1], "ভাগ", nums | |
| elif any(x in text for x in ["গুণ", "দাম"]): | |
| if len(nums)>=2: return nums[0]*nums[1], "গুণ", nums | |
| elif any(x in text for x in ["যোগ", "মোট", "পেল"]): | |
| return sum(nums), "যোগ", nums | |
| return None, None, [] | |
| def ai_tutor(user_input): | |
| val, op, nums = safe_calculator(user_input) | |
| if val is not None: | |
| if op == "যোগ": ans = f"সঠিক উত্তর {val}। কারণ {nums[0]} আর {nums[1]} যোগ করলে {val} হয়।" | |
| elif op == "বিয়োগ": ans = f"উত্তর {val}। {nums[0]} থেকে {nums[1]} বাদ দিলে {val} থাকে।" | |
| elif op == "গুণ": ans = f"উত্তর {val} টাকা।" | |
| elif op == "ভাগ": ans = f"উত্তর {val}।" | |
| else: ans = f"উত্তর হলো {val}।" | |
| else: | |
| inp = f"<|im_start|>user\n{user_input}<|im_end|>\n<|im_start|>assistant\n" | |
| inputs = tokenizer([inp], return_tensors="pt").to("cuda") | |
| out = model.generate(inputs.input_ids, max_new_tokens=100) | |
| ans = tokenizer.decode(out[0], skip_special_tokens=True).split("assistant")[-1].strip() | |
| try: | |
| tts = gTTS(ans, lang='bn') | |
| tts.save("voice.mp3") | |
| return ans, "voice.mp3" | |
| except: | |
| return ans, None | |
| # UI LAUNCH (Removed 'theme' argument to fix error) | |
| with gr.Blocks() as demo: | |
| gr.Markdown(f"# {TITLE}") | |
| gr.Markdown(f"### {DESC}") | |
| with gr.Row(): | |
| inp = gr.Textbox(label="Question") | |
| btn = gr.Button("Ask Polymath 🧠", variant="primary") | |
| with gr.Row(): | |
| out_txt = gr.Textbox(label="Answer") | |
| out_aud = gr.Audio(label="Voice", autoplay=True) | |
| btn.click(ai_tutor, inputs=[inp], outputs=[out_txt, out_aud]) | |
| demo.launch() | |