Spaces:
Build error
Build error
Upload folder using huggingface_hub
Browse files- appagents/research_agent.py +14 -5
- prompts/market_sentiment.txt +1 -2
- prompts/news_headlines.txt +3 -0
- prompts/trade_recommendation.txt +2 -0
- prompts/upcoming_earnings.txt +1 -0
- tools/time_tools.py +22 -0
- ui/app.py +124 -52
appagents/research_agent.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
from tools.google_tools import GoogleTools
|
| 2 |
from tools.news_tools import NewsTools
|
| 3 |
from tools.yahoo_tools import FinanceTools
|
|
|
|
| 4 |
from agents import Agent
|
| 5 |
|
| 6 |
class MarketResearchAgent:
|
|
@@ -14,21 +15,29 @@ class MarketResearchAgent:
|
|
| 14 |
Returns a configured Agent instance ready for use.
|
| 15 |
"""
|
| 16 |
tools = [
|
|
|
|
| 17 |
GoogleTools.search,
|
| 18 |
FinanceTools.get_summary,
|
| 19 |
FinanceTools.get_history,
|
| 20 |
-
NewsTools.top_headlines,
|
| 21 |
-
NewsTools.search_news,
|
| 22 |
]
|
| 23 |
|
| 24 |
instructions = """
|
| 25 |
-
|
| 26 |
|
| 27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
-
|
| 30 |
"""
|
| 31 |
|
|
|
|
| 32 |
agent = Agent(
|
| 33 |
name="AI Assistant",
|
| 34 |
tools=tools,
|
|
|
|
| 1 |
from tools.google_tools import GoogleTools
|
| 2 |
from tools.news_tools import NewsTools
|
| 3 |
from tools.yahoo_tools import FinanceTools
|
| 4 |
+
from tools.time_tools import TimeTools
|
| 5 |
from agents import Agent
|
| 6 |
|
| 7 |
class MarketResearchAgent:
|
|
|
|
| 15 |
Returns a configured Agent instance ready for use.
|
| 16 |
"""
|
| 17 |
tools = [
|
| 18 |
+
TimeTools.current_datetime,
|
| 19 |
GoogleTools.search,
|
| 20 |
FinanceTools.get_summary,
|
| 21 |
FinanceTools.get_history,
|
| 22 |
+
# NewsTools.top_headlines,
|
| 23 |
+
# NewsTools.search_news,
|
| 24 |
]
|
| 25 |
|
| 26 |
instructions = """
|
| 27 |
+
You are an AI research assistant designed to deliver accurate, concise, and verifiable answers.
|
| 28 |
|
| 29 |
+
Your priorities are:
|
| 30 |
+
0. **Current Date and Time:** Always be aware of the current date and time using the Time Tool. This is crucial for context in your responses.
|
| 31 |
+
1. **Accuracy and Currency:** Always ensure your responses are based on the most recent and reliable information available.
|
| 32 |
+
2. **Tool Usage:** For any query involving current events, factual data, statistics, or evolving topics, you must invoke the appropriate tools (e.g., Google Search, News, or other connected APIs) to confirm or update information before responding.
|
| 33 |
+
3. **Synthesis:** When multiple tools provide information, synthesize and summarize results into a coherent, easy-to-understand response.
|
| 34 |
+
4. **Source Transparency:** Always cite your information sources clearly, preferably with links or named publications.
|
| 35 |
+
5. **Clarity and Brevity:** Use plain, professional language. Avoid speculation, filler text, or unnecessary verbosity.
|
| 36 |
|
| 37 |
+
If you cannot verify an answer, explicitly state that the data could not be confirmed or is unavailable.
|
| 38 |
"""
|
| 39 |
|
| 40 |
+
|
| 41 |
agent = Agent(
|
| 42 |
name="AI Assistant",
|
| 43 |
tools=tools,
|
prompts/market_sentiment.txt
CHANGED
|
@@ -1,2 +1 @@
|
|
| 1 |
-
What is the current market sentiment based on news and
|
| 2 |
-
Provide a brief analysis of investor confidence and market outlook.
|
|
|
|
| 1 |
+
What is the current market sentiment based on the news and market index?
|
|
|
prompts/news_headlines.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Read me top 3 news headlines today.
|
| 2 |
+
Present the data in markdown format and read me 3 lines for each headlines.
|
| 3 |
+
Place the date and time of the headline publish on the right of the headline title.
|
prompts/trade_recommendation.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Recommend 3 option spreades which has more than 80% likelihood of profiting me.
|
| 2 |
+
You must do a thorough analysis of the stock 3 months trend and study the market sentiment to conclude into the answer.
|
prompts/upcoming_earnings.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Provide me the upcoming critical earnings in next 2 weeks.
|
tools/time_tools.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from agents import function_tool
|
| 3 |
+
from core.logger import log_call
|
| 4 |
+
|
| 5 |
+
class TimeTools:
|
| 6 |
+
"""Provides tools related to current date and time."""
|
| 7 |
+
|
| 8 |
+
@staticmethod
|
| 9 |
+
@function_tool
|
| 10 |
+
@log_call
|
| 11 |
+
def current_datetime(format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
| 12 |
+
"""
|
| 13 |
+
Returns the current date and time as a formatted string.
|
| 14 |
+
|
| 15 |
+
Args:
|
| 16 |
+
format (str): Optional datetime format (default: "YYYY-MM-DD HH:MM:SS")
|
| 17 |
+
|
| 18 |
+
Returns:
|
| 19 |
+
str: Current date and time in the specified format
|
| 20 |
+
"""
|
| 21 |
+
now = datetime.now()
|
| 22 |
+
return now.strftime(format)
|
ui/app.py
CHANGED
|
@@ -15,19 +15,105 @@ from agents import Runner, trace
|
|
| 15 |
# -----------------------------
|
| 16 |
def load_prompts(folder="prompts"):
|
| 17 |
prompts = []
|
|
|
|
| 18 |
for file_path in glob.glob(os.path.join(folder, "*.txt")):
|
| 19 |
with open(file_path, "r") as f:
|
| 20 |
content = f.read().strip()
|
| 21 |
if content:
|
| 22 |
prompts.append(content)
|
| 23 |
-
|
|
|
|
| 24 |
|
| 25 |
-
prompts = load_prompts()
|
| 26 |
|
| 27 |
# -----------------------------
|
| 28 |
# Streamlit page config
|
| 29 |
# -----------------------------
|
| 30 |
-
st.set_page_config(page_title="
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
|
| 32 |
# -----------------------------
|
| 33 |
# Session state
|
|
@@ -38,6 +124,9 @@ if "chat_history" not in st.session_state:
|
|
| 38 |
if "input_value" not in st.session_state:
|
| 39 |
st.session_state.input_value = ""
|
| 40 |
|
|
|
|
|
|
|
|
|
|
| 41 |
# -----------------------------
|
| 42 |
# Function to fetch AI response
|
| 43 |
# -----------------------------
|
|
@@ -48,35 +137,32 @@ async def get_ai_response(prompt):
|
|
| 48 |
return result.final_output
|
| 49 |
|
| 50 |
# -----------------------------
|
| 51 |
-
# Sidebar
|
| 52 |
# -----------------------------
|
| 53 |
-
st.sidebar.title("
|
| 54 |
|
| 55 |
for idx, prompt_text in enumerate(prompts):
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
with st.sidebar.container():
|
| 62 |
-
# Show the truncated prompt as a button
|
| 63 |
-
if st.button(truncated, key=f"prompt_{idx}"):
|
| 64 |
-
# Add user message
|
| 65 |
-
st.session_state.chat_history.insert(0, {"role": "user", "message": prompt_text})
|
| 66 |
-
# Fetch AI response
|
| 67 |
-
response = asyncio.run(get_ai_response(prompt_text))
|
| 68 |
-
st.session_state.chat_history.insert(0, {"role": "assistant", "message": response})
|
| 69 |
-
# Show tooltip below (hover shows full prompt)
|
| 70 |
-
st.markdown(f"<span title='{prompt_text}' style='font-size:10px;color:gray;'>Hover to see full prompt</span>", unsafe_allow_html=True)
|
| 71 |
-
|
| 72 |
|
| 73 |
# -----------------------------
|
| 74 |
-
#
|
| 75 |
# -----------------------------
|
| 76 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
| 78 |
# -----------------------------
|
| 79 |
-
# Chat input
|
| 80 |
# -----------------------------
|
| 81 |
with st.form(key="chat_form", clear_on_submit=True):
|
| 82 |
user_input = st.text_input(
|
|
@@ -87,49 +173,35 @@ with st.form(key="chat_form", clear_on_submit=True):
|
|
| 87 |
)
|
| 88 |
send_button = st.form_submit_button("Send")
|
| 89 |
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
st.session_state.chat_history.insert(0, {"role": "user", "message": message})
|
| 94 |
-
|
| 95 |
-
|
| 96 |
st.session_state.chat_history.insert(0, {"role": "assistant", "message": response})
|
| 97 |
-
st.session_state.input_value = "" #
|
|
|
|
| 98 |
|
| 99 |
# -----------------------------
|
| 100 |
-
#
|
| 101 |
# -----------------------------
|
| 102 |
if st.session_state.chat_history:
|
| 103 |
-
chat_style = """
|
| 104 |
-
<style>
|
| 105 |
-
.chat-container {
|
| 106 |
-
display: flex;
|
| 107 |
-
flex-direction: column; /* latest messages on top */
|
| 108 |
-
border: 1px solid #ccc;
|
| 109 |
-
padding: 10px;
|
| 110 |
-
border-radius: 8px;
|
| 111 |
-
background-color: #fafafa;
|
| 112 |
-
}
|
| 113 |
-
</style>
|
| 114 |
-
"""
|
| 115 |
-
st.markdown(chat_style, unsafe_allow_html=True)
|
| 116 |
-
|
| 117 |
chat_html = '<div class="chat-container">'
|
| 118 |
for chat in st.session_state.chat_history:
|
| 119 |
if chat["role"] == "user":
|
| 120 |
chat_html += (
|
| 121 |
-
f"<div style='display:flex;
|
| 122 |
-
f"<div
|
| 123 |
-
f"<span style='font-size:
|
| 124 |
f"</div>"
|
| 125 |
)
|
| 126 |
else:
|
| 127 |
chat_html += (
|
| 128 |
-
f"<div style='display:flex;
|
| 129 |
-
f"<span style='font-size:
|
| 130 |
-
f"<div
|
| 131 |
f"</div>"
|
| 132 |
)
|
| 133 |
chat_html += '</div>'
|
| 134 |
-
|
| 135 |
st.markdown(chat_html, unsafe_allow_html=True)
|
|
|
|
| 15 |
# -----------------------------
|
| 16 |
def load_prompts(folder="prompts"):
|
| 17 |
prompts = []
|
| 18 |
+
prompt_lables = []
|
| 19 |
for file_path in glob.glob(os.path.join(folder, "*.txt")):
|
| 20 |
with open(file_path, "r") as f:
|
| 21 |
content = f.read().strip()
|
| 22 |
if content:
|
| 23 |
prompts.append(content)
|
| 24 |
+
prompt_lables.append(os.path.basename(file_path).replace("_", " ").replace(".txt", "").title())
|
| 25 |
+
return prompts, prompt_lables
|
| 26 |
|
| 27 |
+
prompts, prompt_labels = load_prompts()
|
| 28 |
|
| 29 |
# -----------------------------
|
| 30 |
# Streamlit page config
|
| 31 |
# -----------------------------
|
| 32 |
+
st.set_page_config(page_title="AI Chat", layout="wide")
|
| 33 |
+
|
| 34 |
+
# -----------------------------
|
| 35 |
+
# Custom CSS
|
| 36 |
+
# -----------------------------
|
| 37 |
+
st.markdown("""
|
| 38 |
+
<style>
|
| 39 |
+
.block-container {
|
| 40 |
+
padding-top: 0 !important;
|
| 41 |
+
margin-top: -3rem !important;
|
| 42 |
+
}
|
| 43 |
+
header[data-testid="stHeader"] {
|
| 44 |
+
display: none !important;
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
.hero-banner {
|
| 48 |
+
width: 100%;
|
| 49 |
+
background: linear-gradient(90deg, #1f1c2c 0%, #928DAB 100%);
|
| 50 |
+
color: white;
|
| 51 |
+
padding: 1.8rem 2rem;
|
| 52 |
+
border-radius: 0 0 12px 12px;
|
| 53 |
+
margin-bottom: 1rem;
|
| 54 |
+
display: flex;
|
| 55 |
+
align-items: center;
|
| 56 |
+
justify-content: flex-start;
|
| 57 |
+
box-shadow: 0 6px 16px rgba(0,0,0,0.3);
|
| 58 |
+
position: relative;
|
| 59 |
+
z-index: 5;
|
| 60 |
+
}
|
| 61 |
+
.hero-logo {
|
| 62 |
+
font-size: 3.2rem;
|
| 63 |
+
margin-right: 18px;
|
| 64 |
+
animation: pulse 2s infinite;
|
| 65 |
+
}
|
| 66 |
+
@keyframes pulse {
|
| 67 |
+
0% { transform: scale(1); opacity: 1; }
|
| 68 |
+
50% { transform: scale(1.1); opacity: 0.9; }
|
| 69 |
+
100% { transform: scale(1); opacity: 1; }
|
| 70 |
+
}
|
| 71 |
+
.hero-text {
|
| 72 |
+
font-size: 1.8rem;
|
| 73 |
+
font-weight: 700;
|
| 74 |
+
letter-spacing: 0.6px;
|
| 75 |
+
}
|
| 76 |
+
.hero-subtext {
|
| 77 |
+
font-size: 1rem;
|
| 78 |
+
opacity: 0.9;
|
| 79 |
+
margin-top: 0.25rem;
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
section[data-testid="stSidebar"] {
|
| 83 |
+
padding-top: 0.5rem !important;
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
.chat-container {
|
| 87 |
+
display: flex;
|
| 88 |
+
flex-direction: column;
|
| 89 |
+
border: 1px solid #ccc;
|
| 90 |
+
padding: 14px;
|
| 91 |
+
border-radius: 10px;
|
| 92 |
+
background-color: #fafafa;
|
| 93 |
+
height: 70vh;
|
| 94 |
+
overflow-y: auto;
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
.user-bubble {
|
| 98 |
+
background-color: #daf1fc;
|
| 99 |
+
border-radius: 12px;
|
| 100 |
+
padding: 10px 14px;
|
| 101 |
+
margin: 6px 0;
|
| 102 |
+
max-width: 70%;
|
| 103 |
+
word-wrap: break-word;
|
| 104 |
+
box-shadow: 0 1px 4px rgba(0,0,0,0.1);
|
| 105 |
+
}
|
| 106 |
+
.ai-bubble {
|
| 107 |
+
background-color: #f1f0f0;
|
| 108 |
+
border-radius: 12px;
|
| 109 |
+
padding: 10px 14px;
|
| 110 |
+
margin: 6px 0;
|
| 111 |
+
max-width: 70%;
|
| 112 |
+
word-wrap: break-word;
|
| 113 |
+
box-shadow: 0 1px 4px rgba(0,0,0,0.1);
|
| 114 |
+
}
|
| 115 |
+
</style>
|
| 116 |
+
""", unsafe_allow_html=True)
|
| 117 |
|
| 118 |
# -----------------------------
|
| 119 |
# Session state
|
|
|
|
| 124 |
if "input_value" not in st.session_state:
|
| 125 |
st.session_state.input_value = ""
|
| 126 |
|
| 127 |
+
if "send_triggered" not in st.session_state:
|
| 128 |
+
st.session_state.send_triggered = False # new flag
|
| 129 |
+
|
| 130 |
# -----------------------------
|
| 131 |
# Function to fetch AI response
|
| 132 |
# -----------------------------
|
|
|
|
| 137 |
return result.final_output
|
| 138 |
|
| 139 |
# -----------------------------
|
| 140 |
+
# Sidebar with prompts
|
| 141 |
# -----------------------------
|
| 142 |
+
st.sidebar.title("💡 Quick Prompts")
|
| 143 |
|
| 144 |
for idx, prompt_text in enumerate(prompts):
|
| 145 |
+
truncated = prompt_text[:157] + "..." if len(prompt_text) > 160 else prompt_text
|
| 146 |
+
if st.sidebar.button(prompt_labels[idx], key=f"prompt_{idx}", help=prompt_text):
|
| 147 |
+
# Instead of sending directly, load it into chatbox
|
| 148 |
+
st.session_state.input_value = prompt_text
|
| 149 |
+
st.session_state.send_triggered = True # simulate send button click
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 150 |
|
| 151 |
# -----------------------------
|
| 152 |
+
# Hero banner
|
| 153 |
# -----------------------------
|
| 154 |
+
st.markdown("""
|
| 155 |
+
<div class="hero-banner">
|
| 156 |
+
<span class="hero-logo">🤖</span>
|
| 157 |
+
<div>
|
| 158 |
+
<div class="hero-text">AI Chatbot</div>
|
| 159 |
+
<div class="hero-subtext">Your intelligent assistant for insights, trends, and strategy exploration.</div>
|
| 160 |
+
</div>
|
| 161 |
+
</div>
|
| 162 |
+
""", unsafe_allow_html=True)
|
| 163 |
|
| 164 |
# -----------------------------
|
| 165 |
+
# Chat input area
|
| 166 |
# -----------------------------
|
| 167 |
with st.form(key="chat_form", clear_on_submit=True):
|
| 168 |
user_input = st.text_input(
|
|
|
|
| 173 |
)
|
| 174 |
send_button = st.form_submit_button("Send")
|
| 175 |
|
| 176 |
+
# If either user manually clicks Send or a predefined prompt triggered it
|
| 177 |
+
if (send_button or st.session_state.send_triggered) and (user_input or st.session_state.input_value):
|
| 178 |
+
message = user_input.strip() if user_input else st.session_state.input_value.strip()
|
| 179 |
st.session_state.chat_history.insert(0, {"role": "user", "message": message})
|
| 180 |
+
with st.spinner("⏳ AI is thinking..."):
|
| 181 |
+
response = asyncio.run(get_ai_response(message))
|
| 182 |
st.session_state.chat_history.insert(0, {"role": "assistant", "message": response})
|
| 183 |
+
st.session_state.input_value = "" # clear after send
|
| 184 |
+
st.session_state.send_triggered = False
|
| 185 |
|
| 186 |
# -----------------------------
|
| 187 |
+
# Chat history display
|
| 188 |
# -----------------------------
|
| 189 |
if st.session_state.chat_history:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 190 |
chat_html = '<div class="chat-container">'
|
| 191 |
for chat in st.session_state.chat_history:
|
| 192 |
if chat["role"] == "user":
|
| 193 |
chat_html += (
|
| 194 |
+
f"<div style='display:flex; justify-content:flex-end; align-items:center;'>"
|
| 195 |
+
f"<div class='user-bubble'>{chat['message']}</div>"
|
| 196 |
+
f"<span style='font-size:34px; margin-left:8px;'>👤</span>"
|
| 197 |
f"</div>"
|
| 198 |
)
|
| 199 |
else:
|
| 200 |
chat_html += (
|
| 201 |
+
f"<div style='display:flex; justify-content:flex-start; align-items:center;'>"
|
| 202 |
+
f"<span style='font-size:34px; margin-right:8px;'>🤖</span>"
|
| 203 |
+
f"<div class='ai-bubble'>{chat['message']}</div>"
|
| 204 |
f"</div>"
|
| 205 |
)
|
| 206 |
chat_html += '</div>'
|
|
|
|
| 207 |
st.markdown(chat_html, unsafe_allow_html=True)
|