Spaces:
Build error
Build error
T-K-O-H
commited on
Commit
·
b61785d
1
Parent(s):
275af38
Update sessions and stuff
Browse files
app.py
CHANGED
|
@@ -1,16 +1,17 @@
|
|
| 1 |
import os
|
| 2 |
import chainlit as cl
|
| 3 |
from agent_graph import agent_node
|
| 4 |
-
from dotenv import load_dotenv
|
| 5 |
from typing import List, Dict
|
| 6 |
import time
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# Load environment variables from .env file
|
| 9 |
load_dotenv()
|
| 10 |
|
| 11 |
# Ensure your OpenAI API key is set up in environment variables
|
| 12 |
openai_api_key = os.getenv("OPENAI_API_KEY")
|
| 13 |
-
|
| 14 |
if not openai_api_key:
|
| 15 |
raise ValueError("OpenAI API key is missing in the .env file")
|
| 16 |
|
|
@@ -18,22 +19,17 @@ if not openai_api_key:
|
|
| 18 |
chat_histories: Dict[str, List[Dict[str, str]]] = {}
|
| 19 |
|
| 20 |
def get_unique_session_id():
|
| 21 |
-
"""Generate a unique session ID
|
| 22 |
-
|
| 23 |
-
timestamp = int(time.time() * 1000) # Current time in milliseconds
|
| 24 |
-
return f"{user_id}_{timestamp}"
|
| 25 |
|
| 26 |
@cl.on_chat_start
|
| 27 |
async def start_chat():
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
chat_histories[session_id] = []
|
| 35 |
-
|
| 36 |
-
welcome_message = """📈 Welcome to the AI Stock Assistant!
|
| 37 |
|
| 38 |
I'm your intelligent stock market companion. Here's what I can do:
|
| 39 |
|
|
@@ -56,60 +52,51 @@ Popular stocks to try:
|
|
| 56 |
• Retail: WMT (Walmart), COST (Costco)
|
| 57 |
|
| 58 |
What would you like to know about the stock market?"""
|
| 59 |
-
|
| 60 |
-
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
@cl.on_message
|
| 63 |
async def handle_message(message: cl.Message):
|
| 64 |
try:
|
| 65 |
-
# Get the unique session ID
|
| 66 |
session_id = cl.user_session.get("session_id")
|
| 67 |
if not session_id:
|
| 68 |
-
# If session ID is missing, generate a new one
|
| 69 |
session_id = get_unique_session_id()
|
| 70 |
cl.user_session.set("session_id", session_id)
|
| 71 |
chat_histories[session_id] = []
|
| 72 |
-
|
| 73 |
-
# Get chat history for this session
|
| 74 |
history = chat_histories.get(session_id, [])
|
| 75 |
-
|
| 76 |
-
# Add current message to history
|
| 77 |
history.append({"role": "user", "content": message.content})
|
| 78 |
-
|
| 79 |
-
# Create state dictionary with history
|
| 80 |
state = {
|
| 81 |
"input": message.content,
|
| 82 |
"chat_history": history
|
| 83 |
}
|
| 84 |
-
|
| 85 |
-
# Process the message with agent_node
|
| 86 |
print(f"[Debug] Processing message: {message.content}")
|
| 87 |
-
response = agent_node
|
| 88 |
print(f"[Debug] Agent response: {response}")
|
| 89 |
-
|
| 90 |
-
# Send the response back to the user
|
| 91 |
if isinstance(response, dict) and "output" in response:
|
| 92 |
-
# Add response to history
|
| 93 |
history.append({"role": "assistant", "content": response["output"]})
|
| 94 |
-
print(f"[Debug] Sending response to user: {response['output']}")
|
| 95 |
await cl.Message(content=response["output"]).send()
|
| 96 |
-
print("[Debug] Response sent successfully")
|
| 97 |
else:
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
await cl.Message(content=error_msg).send()
|
| 101 |
-
|
| 102 |
-
# Update history in storage
|
| 103 |
chat_histories[session_id] = history
|
| 104 |
-
|
| 105 |
except Exception as e:
|
| 106 |
-
print(f"[
|
| 107 |
-
|
| 108 |
-
await cl.Message(content=error_msg).send()
|
| 109 |
|
| 110 |
@cl.on_chat_end
|
| 111 |
async def end_chat():
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
import chainlit as cl
|
| 3 |
from agent_graph import agent_node
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
from typing import List, Dict
|
| 6 |
import time
|
| 7 |
+
import uuid
|
| 8 |
+
import asyncio
|
| 9 |
|
| 10 |
# Load environment variables from .env file
|
| 11 |
load_dotenv()
|
| 12 |
|
| 13 |
# Ensure your OpenAI API key is set up in environment variables
|
| 14 |
openai_api_key = os.getenv("OPENAI_API_KEY")
|
|
|
|
| 15 |
if not openai_api_key:
|
| 16 |
raise ValueError("OpenAI API key is missing in the .env file")
|
| 17 |
|
|
|
|
| 19 |
chat_histories: Dict[str, List[Dict[str, str]]] = {}
|
| 20 |
|
| 21 |
def get_unique_session_id():
|
| 22 |
+
"""Generate a unique session ID using UUID."""
|
| 23 |
+
return str(uuid.uuid4())
|
|
|
|
|
|
|
| 24 |
|
| 25 |
@cl.on_chat_start
|
| 26 |
async def start_chat():
|
| 27 |
+
try:
|
| 28 |
+
session_id = get_unique_session_id()
|
| 29 |
+
cl.user_session.set("session_id", session_id)
|
| 30 |
+
chat_histories[session_id] = []
|
| 31 |
+
|
| 32 |
+
welcome_message = """📈 Welcome to the AI Stock Assistant!
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
I'm your intelligent stock market companion. Here's what I can do:
|
| 35 |
|
|
|
|
| 52 |
• Retail: WMT (Walmart), COST (Costco)
|
| 53 |
|
| 54 |
What would you like to know about the stock market?"""
|
| 55 |
+
|
| 56 |
+
await cl.Message(content=welcome_message).send()
|
| 57 |
+
except Exception as e:
|
| 58 |
+
print(f"[Error] Failed to start chat: {e}")
|
| 59 |
+
await cl.Message(content=f"⚠️ Error starting chat: {str(e)}").send()
|
| 60 |
|
| 61 |
@cl.on_message
|
| 62 |
async def handle_message(message: cl.Message):
|
| 63 |
try:
|
|
|
|
| 64 |
session_id = cl.user_session.get("session_id")
|
| 65 |
if not session_id:
|
|
|
|
| 66 |
session_id = get_unique_session_id()
|
| 67 |
cl.user_session.set("session_id", session_id)
|
| 68 |
chat_histories[session_id] = []
|
| 69 |
+
|
|
|
|
| 70 |
history = chat_histories.get(session_id, [])
|
|
|
|
|
|
|
| 71 |
history.append({"role": "user", "content": message.content})
|
| 72 |
+
|
|
|
|
| 73 |
state = {
|
| 74 |
"input": message.content,
|
| 75 |
"chat_history": history
|
| 76 |
}
|
| 77 |
+
|
|
|
|
| 78 |
print(f"[Debug] Processing message: {message.content}")
|
| 79 |
+
response = await asyncio.to_thread(agent_node, state)
|
| 80 |
print(f"[Debug] Agent response: {response}")
|
| 81 |
+
|
|
|
|
| 82 |
if isinstance(response, dict) and "output" in response:
|
|
|
|
| 83 |
history.append({"role": "assistant", "content": response["output"]})
|
|
|
|
| 84 |
await cl.Message(content=response["output"]).send()
|
|
|
|
| 85 |
else:
|
| 86 |
+
await cl.Message(content="❌ Received an invalid response format from the agent.").send()
|
| 87 |
+
|
|
|
|
|
|
|
|
|
|
| 88 |
chat_histories[session_id] = history
|
| 89 |
+
|
| 90 |
except Exception as e:
|
| 91 |
+
print(f"[Error] Error in handle_message: {e}")
|
| 92 |
+
await cl.Message(content=f"⚠️ Error: {str(e)}").send()
|
|
|
|
| 93 |
|
| 94 |
@cl.on_chat_end
|
| 95 |
async def end_chat():
|
| 96 |
+
try:
|
| 97 |
+
session_id = cl.user_session.get("session_id")
|
| 98 |
+
if session_id and session_id in chat_histories:
|
| 99 |
+
del chat_histories[session_id]
|
| 100 |
+
cl.user_session.clear()
|
| 101 |
+
except Exception as e:
|
| 102 |
+
print(f"[Error] Failed to clean up chat history: {e}")
|