Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import requests | |
| import json | |
| # FastAPI configuration | |
| FASTAPI_URL = "http://localhost:7860/ask" | |
| st.set_page_config(page_title="Ollama AI Assistant", page_icon="π€", layout="wide") | |
| # --- Session state for chat history --- | |
| if 'chat_history' not in st.session_state: | |
| st.session_state.chat_history = [ | |
| {"role": "assistant", "message": "Hello! How can I assist you today?"} | |
| ] | |
| # --- App Header --- | |
| st.title("π€ Ollama AI Assistant") | |
| st.caption("Start chatting with our AI assistant. Type your message below and press send.") | |
| # --- Chat Display --- | |
| st.markdown("---") | |
| for chat in st.session_state.chat_history: | |
| if chat["role"] == "assistant": | |
| with st.chat_message("assistant", avatar="π€"): | |
| st.write(chat["message"]) | |
| else: | |
| with st.chat_message("user"): | |
| st.write(chat["message"]) | |
| # --- Input Area --- | |
| with st.form("chat_form", clear_on_submit=True): | |
| user_prompt = st.text_area( | |
| "Type your message here...", | |
| height=100, | |
| placeholder="e.g., Explain quantum computing in simple terms.", | |
| label_visibility="collapsed", | |
| key="user_input_text_area" | |
| ) | |
| submitted = st.form_submit_button("Send") | |
| if submitted and user_prompt: | |
| st.session_state.chat_history.append({"role": "user", "message": user_prompt}) | |
| with st.chat_message("assistant", avatar="π€"): | |
| st.write("Thinking...") | |
| try: | |
| payload = {"text": user_prompt} | |
| headers = {"Content-Type": "application/json"} | |
| response = requests.post(FASTAPI_URL, data=json.dumps(payload), headers=headers) | |
| if response.status_code == 200: | |
| llm_response = response.json().get("response", "No response received.") | |
| else: | |
| llm_response = f"Error: FastAPI server returned {response.status_code}. Details: {response.text}" | |
| except requests.exceptions.ConnectionError: | |
| llm_response = f"Error: Cannot connect to the FastAPI server at {FASTAPI_URL}." | |
| except Exception as e: | |
| llm_response = f"Unexpected error: {e}" | |
| st.session_state.chat_history.append({"role": "assistant", "message": llm_response}) | |
| st.rerun() | |
| elif submitted and not user_prompt: | |
| st.warning("Please enter a prompt before clicking 'Send'.") | |
| # --- Footer --- | |
| st.markdown("---") | |
| st.caption("Powered by Ollama, FastAPI, and Streamlit.") | |