Spaces:
Sleeping
Sleeping
| import subprocess | |
| import gradio as gr | |
| def chat_with_ollama(user_message, chat_history): | |
| """ | |
| Sends the full chat history + current user message to Ollama model, | |
| captures the response, and appends it to the history. | |
| """ | |
| # Prepare prompt by joining past conversation + current message | |
| prompt = "" | |
| for i, (user, bot) in enumerate(chat_history): | |
| prompt += f"User: {user}\nAssistant: {bot}\n" | |
| prompt += f"User: {user_message}\nAssistant:" | |
| # Call Ollama CLI to get model response | |
| try: | |
| result = subprocess.run( | |
| ["ollama", "run", "township_business_growth_coach", prompt], | |
| capture_output=True, | |
| text=True, | |
| timeout=30 | |
| ) | |
| if result.returncode == 0: | |
| bot_reply = result.stdout.strip() | |
| else: | |
| bot_reply = f"Error from Ollama: {result.stderr.strip()}" | |
| except Exception as e: | |
| bot_reply = f"Exception: {str(e)}" | |
| # Update chat history | |
| chat_history.append((user_message, bot_reply)) | |
| # Format output as list of dicts for chat UI | |
| chat_formatted = [{"User": u, "Bot": b} for u, b in chat_history] | |
| return chat_formatted, chat_history | |
| with gr.Blocks(title="Township Business Growth Coach Chatbot") as demo: | |
| gr.Markdown("## 💬 Township Business Growth Coach Chatbot") | |
| chatbox = gr.Chatbot(label="Chat History") | |
| msg = gr.Textbox(placeholder="Ask your township business growth question here...") | |
| state = gr.State([]) # stores history as list of (user, bot) tuples | |
| submit = gr.Button("Send") | |
| submit.click(chat_with_ollama, inputs=[msg, state], outputs=[chatbox, state]) | |
| msg.submit(chat_with_ollama, inputs=[msg, state], outputs=[chatbox, state]) | |
| demo.launch(server_name="0.0.0.0", server_port=7860) | |