File size: 2,460 Bytes
7dfedd8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import streamlit as st
import requests
import json

# FastAPI configuration
FASTAPI_URL = "http://localhost:7860/ask"

st.set_page_config(page_title="Ollama AI Assistant", page_icon="🤖", layout="wide")

# --- Session state for chat history ---
if 'chat_history' not in st.session_state:
    st.session_state.chat_history = [
        {"role": "assistant", "message": "Hello! How can I assist you today?"}
    ]

# --- App Header ---
st.title("🤖 Ollama AI Assistant")
st.caption("Start chatting with our AI assistant. Type your message below and press send.")

# --- Chat Display ---
st.markdown("---")
for chat in st.session_state.chat_history:
    if chat["role"] == "assistant":
        with st.chat_message("assistant", avatar="🤖"):
            st.write(chat["message"])
    else:
        with st.chat_message("user"):
            st.write(chat["message"])

# --- Input Area ---
with st.form("chat_form", clear_on_submit=True):
    user_prompt = st.text_area(
        "Type your message here...",
        height=100,
        placeholder="e.g., Explain quantum computing in simple terms.",
        label_visibility="collapsed",
        key="user_input_text_area"
    )
    submitted = st.form_submit_button("Send")

    if submitted and user_prompt:
        st.session_state.chat_history.append({"role": "user", "message": user_prompt})
        with st.chat_message("assistant", avatar="🤖"):
            st.write("Thinking...")

        try:
            payload = {"text": user_prompt}
            headers = {"Content-Type": "application/json"}
            response = requests.post(FASTAPI_URL, data=json.dumps(payload), headers=headers)

            if response.status_code == 200:
                llm_response = response.json().get("response", "No response received.")
            else:
                llm_response = f"Error: FastAPI server returned {response.status_code}. Details: {response.text}"

        except requests.exceptions.ConnectionError:
            llm_response = f"Error: Cannot connect to the FastAPI server at {FASTAPI_URL}."
        except Exception as e:
            llm_response = f"Unexpected error: {e}"

        st.session_state.chat_history.append({"role": "assistant", "message": llm_response})
        st.rerun()

    elif submitted and not user_prompt:
        st.warning("Please enter a prompt before clicking 'Send'.")

# --- Footer ---
st.markdown("---")
st.caption("Powered by Ollama, FastAPI, and Streamlit.")