Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -49,6 +49,18 @@ memory = MemorySaver()
|
|
| 49 |
def ai_assistance(state: State):
|
| 50 |
result =code_generator.invoke(state["messages"])
|
| 51 |
return {"messages": state['messages']+[result]}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
|
| 53 |
import time
|
| 54 |
def generate_code(state: State):
|
|
@@ -184,7 +196,6 @@ def get_all_tools():
|
|
| 184 |
execute_python_code
|
| 185 |
]
|
| 186 |
|
| 187 |
-
|
| 188 |
def router(state):
|
| 189 |
user_input = state["input"].lower()
|
| 190 |
if "generate" in user_input:
|
|
@@ -304,16 +315,16 @@ col1, col2, col3 = st.columns(3)
|
|
| 304 |
user_prompt = st.session_state.get("latest_code", "") or user_input # fallback to user_input if needed
|
| 305 |
|
| 306 |
with st.container():
|
| 307 |
-
if col1.button("⚙️ Run Python Code"):
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
else:
|
| 314 |
st.warning("Please enter Python code in the input box.")
|
| 315 |
|
| 316 |
-
if
|
| 317 |
if user_prompt:
|
| 318 |
with st.spinner("Searching the web..."):
|
| 319 |
result = web_search.invoke({"query": user_prompt})
|
|
@@ -322,7 +333,7 @@ with st.container():
|
|
| 322 |
else:
|
| 323 |
st.warning("Please enter a search query.")
|
| 324 |
|
| 325 |
-
if
|
| 326 |
if user_prompt:
|
| 327 |
with st.spinner("Thinking deeply..."):
|
| 328 |
result = deep_think.invoke({"prompt": user_prompt})
|
|
|
|
| 49 |
def ai_assistance(state: State):
|
| 50 |
result =code_generator.invoke(state["messages"])
|
| 51 |
return {"messages": state['messages']+[result]}
|
| 52 |
+
|
| 53 |
+
def agent_node(state: State):
|
| 54 |
+
# Use your LLM here (e.g., Together, OpenAI, etc.)
|
| 55 |
+
model = ChatGoogleGenerativeAI(model = "gemini-2.0-flash-001").bind_tools(tools)
|
| 56 |
+
follow_up_prompt = "Break down this task into subtasks and ask follow-up questions if needed:\n\n"
|
| 57 |
+
last_user_msg = state["messages"][-1].content
|
| 58 |
+
full_prompt = follow_up_prompt + last_user_msg
|
| 59 |
+
|
| 60 |
+
response = model.invoke(full_prompt)
|
| 61 |
+
|
| 62 |
+
return {"messages": state["messages"] + [AIMessage(content=response)]}
|
| 63 |
+
|
| 64 |
|
| 65 |
import time
|
| 66 |
def generate_code(state: State):
|
|
|
|
| 196 |
execute_python_code
|
| 197 |
]
|
| 198 |
|
|
|
|
| 199 |
def router(state):
|
| 200 |
user_input = state["input"].lower()
|
| 201 |
if "generate" in user_input:
|
|
|
|
| 315 |
user_prompt = st.session_state.get("latest_code", "") or user_input # fallback to user_input if needed
|
| 316 |
|
| 317 |
with st.container():
|
| 318 |
+
# if col1.button("⚙️ Run Python Code"):
|
| 319 |
+
# if user_prompt:
|
| 320 |
+
# with st.spinner("Executing your Python code..."):
|
| 321 |
+
# result = execute_python_code.invoke({"code": user_prompt})
|
| 322 |
+
# st.success("✅ Output:")
|
| 323 |
+
# st.code(result, language="python")
|
| 324 |
else:
|
| 325 |
st.warning("Please enter Python code in the input box.")
|
| 326 |
|
| 327 |
+
if col1.button("🌐 Web Search"):
|
| 328 |
if user_prompt:
|
| 329 |
with st.spinner("Searching the web..."):
|
| 330 |
result = web_search.invoke({"query": user_prompt})
|
|
|
|
| 333 |
else:
|
| 334 |
st.warning("Please enter a search query.")
|
| 335 |
|
| 336 |
+
if col2.button("🧠 Deep Think"):
|
| 337 |
if user_prompt:
|
| 338 |
with st.spinner("Thinking deeply..."):
|
| 339 |
result = deep_think.invoke({"prompt": user_prompt})
|