File size: 4,003 Bytes
baedd0b
33da397
 
 
baedd0b
33da397
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
04dec8c
33da397
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9014a54
33da397
 
9014a54
33da397
 
73f147b
33da397
 
 
 
 
 
 
 
 
 
 
 
9014a54
33da397
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import streamlit as st
import uuid
import requests
import json

cookies = uuid.uuid4()
def ask_chat_bot(message, session_id, uri, database, username, password, model="openai_gpt_4o"):
    """
    Sends a message to the chatbot and returns the response.

    Parameters:
    - message (str): The question or message to send to the chatbot.
    - session_id (str): The session ID to maintain context.
    - uri (str): The Neo4j database URI.
    - database (str): The Neo4j database name.
    - username (str): The database username.
    - password (str): The database password.
    - model (str): The model to use for the chatbot. Default is 'openai_gpt_4o'.

    Returns:
    - response.text (str): The chatbot's response.
    """
    url = "https://dev-backend-967196130891.us-central1.run.app/chat_bot"
    
    # Payload configuration
    payload = {
        'question': message,
        'session_id': session_id,
        'model': model,
        'mode': 'graph_vector_fulltext', #entity_vector
        'document_names': '[]',
        'uri': uri,
        'database': database,
        'userName': username,
        'password': password
    }
    
    headers = {
        'accept': 'application/json, text/plain, */*',
        'accept-language': 'en-US,en;q=0.9',
        'origin': 'https://dev-frontend-dcavk67s4a-uc.a.run.app',
        'priority': 'u=1, i',
        'referer': 'https://dev-frontend-dcavk67s4a-uc.a.run.app/',
        'sec-ch-ua': '"Chromium";v="136", "Google Chrome";v="136", "Not.A/Brand";v="99"',
        'sec-ch-ua-mobile': '?1',
        'sec-ch-ua-platform': '"Android"',
        'sec-fetch-dest': 'empty',
        'sec-fetch-mode': 'cors',
        'sec-fetch-site': 'cross-site',
        'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Mobile Safari/537.36'
    }
    
    # Send the request
    response = requests.post(url, headers=headers, data=payload)
    
    # Check for errors
    if response.status_code == 200:
        return response.text
    else:
        return f"Error: {response.status_code} - {response.text}"

# App title
st.set_page_config(page_title="💬 Specter AI")

st.markdown("# 🧊 Chat with Specter AI")
# Store LLM generated responses
if "messages" not in st.session_state.keys():
    st.session_state.messages = [{"role": "assistant", "content": "Hello, This is Specter AI! How may I help you?"}]

# Display chat messages
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.write(message["content"])

# Function for generating LLM response
def generate_response(prompt_input):
    # Hugging Face Login
    #sign.login()
    # Create ChatBot                        
    response = ask_chat_bot(
                message=prompt_input,
                session_id=cookies,
                uri='neo4j+s://60701806.databases.neo4j.io:7687',
                database='neo4j',
                username='neo4j',
                password='0Vo7ni1a6nfxvi3gB42Y2rX8hhL7AzIIaGxUBTb2CEM'
            )#hugchat.ChatBot(cookies=cookies.get_dict())
    response = json.loads(response)
    return response["data"]["message"]+"<br><br><strong>Sources:</strong><br><ol>"+"<br>".join(f'<li><a href="{source}" target="_blank">{source}</a></li>' for source in response["data"]["info"]["sources"])+"</ol>"#.chat(prompt_input)

# User-provided prompt
if prompt := st.chat_input("Type your message here..."):
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("user"):
        st.write(prompt)

# Generate a new response if last message is not from assistant
if st.session_state.messages[-1]["role"] != "assistant":
    with st.chat_message("assistant"):
        with st.spinner("Thinking..."):
            response = generate_response(prompt) 
            st.markdown(response,unsafe_allow_html=True) 
    message = {"role": "assistant", "content": response}
    st.session_state.messages.append(message)