Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
| import time | |
| # Load Phi-2 model (small, fast LLM) | |
| model_name = "microsoft/phi-2" | |
| try: | |
| tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_name, | |
| torch_dtype=torch.float16, | |
| device_map="auto", | |
| trust_remote_code=True | |
| ) | |
| model_loaded = True | |
| except Exception as e: | |
| model_loaded = False | |
| error_msg = str(e) | |
| def chat_response(message, language="English"): | |
| """Generate chat response""" | |
| if not model_loaded: | |
| return f"⚠️ Model not loaded: {error_msg}. Using demo mode." | |
| try: | |
| if language == "Malayalam": | |
| prompt = f"നിങ്ങൾ ഒരു സഹായകമായ എഐ അസിസ്റ്റന്റാണ്. ചോദ്യത്തിന് ചെറിയ, സ്പഷ്ടമായ ഉത്തരം നൽകുക.\n\nUser: {message}\nAssistant:" | |
| else: | |
| prompt = f"You are a helpful AI assistant. Provide short, clear answers.\n\nUser: {message}\nAssistant:" | |
| inputs = tokenizer(prompt, return_tensors="pt", max_length=512, truncation=True) | |
| outputs = model.generate( | |
| **inputs, | |
| max_new_tokens=100, | |
| temperature=0.7, | |
| top_p=0.9, | |
| do_sample=True | |
| ) | |
| response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
| response = response.split("Assistant:")[-1].strip() | |
| return response[:300] | |
| except Exception as e: | |
| return f"Error: {str(e)}. Try a simpler question." | |
| # Create Gradio interface | |
| with gr.Blocks(theme=gr.themes.Soft()) as demo: | |
| gr.Markdown( | |
| """ | |
| # 🌍 Multilingual Voice Chatbot | |
| **AI Chatbot supporting English & Malayalam** | Built with Phi-2 LLM | |
| Ask me anything! Currently text-based (voice features coming soon) | |
| """ | |
| ) | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| language = gr.Radio( | |
| ["English", "Malayalam"], | |
| value="English", | |
| label="🌐 Select Language" | |
| ) | |
| chatbot = gr.Chatbot(label="💬 Chat History", height=400) | |
| msg = gr.Textbox( | |
| label="Your Message", | |
| placeholder="Type your message here...", | |
| lines=2 | |
| ) | |
| with gr.Row(): | |
| submit = gr.Button("Send 📤", variant="primary") | |
| clear = gr.Button("Clear 🗑️") | |
| with gr.Column(scale=1): | |
| gr.Markdown( | |
| """ | |
| ### Features | |
| ✅ English & Malayalam | |
| ✅ Phi-2 LLM (2.7B) | |
| ✅ Fast responses | |
| ✅ Web-based UI | |
| ### Coming Soon | |
| 🎤 Speech input | |
| 🔊 Voice output | |
| 🌏 More languages | |
| ### Tech Stack | |
| - **LLM**: Phi-2 | |
| - **Framework**: Gradio | |
| - **Platform**: HF Spaces | |
| """ | |
| ) | |
| def respond(message, chat_history, lang): | |
| if not message.strip(): | |
| return chat_history, "" | |
| bot_response = chat_response(message, lang) | |
| chat_history.append((message, bot_response)) | |
| return chat_history, "" | |
| submit.click( | |
| respond, | |
| inputs=[msg, chatbot, language], | |
| outputs=[chatbot, msg] | |
| ) | |
| msg.submit( | |
| respond, | |
| inputs=[msg, chatbot, language], | |
| outputs=[chatbot, msg] | |
| ) | |
| clear.click(lambda: [], outputs=[chatbot]) | |
| gr.Markdown( | |
| """ | |
| --- | |
| Built with ❤️ by **Amal SP** | Data Science & AI Enthusiast | March 2026 | |
| **Project Goal**: Demonstrate multilingual AI capabilities with production-ready deployment | |
| """ | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |