sp / src /streamlit_app.py
nnitiwe's picture
Update src/streamlit_app.py
04dec8c verified
import streamlit as st
import uuid
import requests
import json
cookies = uuid.uuid4()
def ask_chat_bot(message, session_id, uri, database, username, password, model="openai_gpt_4o"):
"""
Sends a message to the chatbot and returns the response.
Parameters:
- message (str): The question or message to send to the chatbot.
- session_id (str): The session ID to maintain context.
- uri (str): The Neo4j database URI.
- database (str): The Neo4j database name.
- username (str): The database username.
- password (str): The database password.
- model (str): The model to use for the chatbot. Default is 'openai_gpt_4o'.
Returns:
- response.text (str): The chatbot's response.
"""
url = "https://dev-backend-967196130891.us-central1.run.app/chat_bot"
# Payload configuration
payload = {
'question': message,
'session_id': session_id,
'model': model,
'mode': 'graph_vector_fulltext', #entity_vector
'document_names': '[]',
'uri': uri,
'database': database,
'userName': username,
'password': password
}
headers = {
'accept': 'application/json, text/plain, */*',
'accept-language': 'en-US,en;q=0.9',
'origin': 'https://dev-frontend-dcavk67s4a-uc.a.run.app',
'priority': 'u=1, i',
'referer': 'https://dev-frontend-dcavk67s4a-uc.a.run.app/',
'sec-ch-ua': '"Chromium";v="136", "Google Chrome";v="136", "Not.A/Brand";v="99"',
'sec-ch-ua-mobile': '?1',
'sec-ch-ua-platform': '"Android"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'cross-site',
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Mobile Safari/537.36'
}
# Send the request
response = requests.post(url, headers=headers, data=payload)
# Check for errors
if response.status_code == 200:
return response.text
else:
return f"Error: {response.status_code} - {response.text}"
# App title
st.set_page_config(page_title="💬 Specter AI")
st.markdown("# 🧊 Chat with Specter AI")
# Store LLM generated responses
if "messages" not in st.session_state.keys():
st.session_state.messages = [{"role": "assistant", "content": "Hello, This is Specter AI! How may I help you?"}]
# Display chat messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.write(message["content"])
# Function for generating LLM response
def generate_response(prompt_input):
# Hugging Face Login
#sign.login()
# Create ChatBot
response = ask_chat_bot(
message=prompt_input,
session_id=cookies,
uri='neo4j+s://60701806.databases.neo4j.io:7687',
database='neo4j',
username='neo4j',
password='0Vo7ni1a6nfxvi3gB42Y2rX8hhL7AzIIaGxUBTb2CEM'
)#hugchat.ChatBot(cookies=cookies.get_dict())
response = json.loads(response)
return response["data"]["message"]+"<br><br><strong>Sources:</strong><br><ol>"+"<br>".join(f'<li><a href="{source}" target="_blank">{source}</a></li>' for source in response["data"]["info"]["sources"])+"</ol>"#.chat(prompt_input)
# User-provided prompt
if prompt := st.chat_input("Type your message here..."):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.write(prompt)
# Generate a new response if last message is not from assistant
if st.session_state.messages[-1]["role"] != "assistant":
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
response = generate_response(prompt)
st.markdown(response,unsafe_allow_html=True)
message = {"role": "assistant", "content": response}
st.session_state.messages.append(message)