Anshini commited on
Commit
882b438
Β·
verified Β·
1 Parent(s): 13cc31a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -25
app.py CHANGED
@@ -21,6 +21,7 @@ from langchain.prompts import PromptTemplate
21
  from langchain.chains import LLMChain
22
  from langchain_core.messages import AIMessage
23
  from typing import List
 
24
  # Load environment
25
  load_dotenv()
26
  # os.environ["TAVILY_API_KEY"] = os.getenv("TAVILY_API_KEY")
@@ -66,7 +67,10 @@ def generate_questions(state: State):
66
  "next": "wait_for_answers"
67
  }
68
 
69
-
 
 
 
70
  def handle_answers(state: State):
71
  full_input = state["input"] + "\n\n" + "\n".join(state["answers"])
72
  return {**state, "input": full_input}
@@ -291,26 +295,47 @@ def router(state):
291
  return "Generate_Code"
292
  else:
293
  return "AI_Assistance"
 
 
294
  builder = StateGraph(State)
295
 
296
- # Define All Nodes
297
  builder.add_node("LLM_Agent", agent_node)
 
298
  builder.add_node("Generate_Questions", generate_questions)
299
  builder.add_node("Handle_Answers", handle_answers)
 
300
  builder.add_node("Generate_Code", generate_code)
301
  builder.add_node("Code_Explainer", explain_code)
302
 
303
  # Set Entry Point
304
  builder.set_entry_point("LLM_Agent")
305
 
 
 
 
 
 
 
 
306
  # Define Flow
307
  builder.add_edge("LLM_Agent", "Generate_Questions")
308
- builder.add_edge("Generate_Questions", "Handle_Answers")
 
 
 
 
 
 
 
 
 
309
  builder.add_edge("Handle_Answers", "Generate_Code")
310
  builder.add_edge("Generate_Code", "Code_Explainer")
311
  builder.add_edge("Code_Explainer", END)
312
 
313
-
 
314
 
315
  graph = builder.compile(checkpointer=memory)
316
  # Streamlit UI setup
@@ -423,9 +448,9 @@ with st.container():
423
  # ================================
424
  # TOOL BUTTONS SECTION
425
  # ================================
426
- col1, col2, col3 = st.columns(3)
427
 
428
- user_prompt = st.session_state.get("latest_code", "") or user_input # fallback to user_input if needed
429
 
430
  with st.container():
431
  # if col1.button("βš™οΈ Run Python Code"):
@@ -435,23 +460,23 @@ with st.container():
435
  # st.success("βœ… Output:")
436
  # st.code(result, language="python")
437
  # else:
438
- # st.warning("Please enter Python code in the input box.")
439
-
440
- if col1.button("🌐 Web Search"):
441
- if user_prompt:
442
- with st.spinner("Searching the web..."):
443
- result = web_search.invoke({"query": user_prompt})
444
- st.success("πŸ”Ž Search Result:")
445
- st.write(result)
446
- else:
447
- st.warning("Please enter a search query.")
448
-
449
- if col2.button("🧠 Deep Think"):
450
- if user_prompt:
451
- with st.spinner("Thinking deeply..."):
452
- result = deep_think.invoke({"prompt": user_prompt})
453
- st.success("🧠 Reasoned Output:")
454
- st.write(result)
455
- else:
456
- st.warning("Please enter a prompt.")
457
 
 
21
  from langchain.chains import LLMChain
22
  from langchain_core.messages import AIMessage
23
  from typing import List
24
+ from langgraph.graph import StateGraph, END
25
  # Load environment
26
  load_dotenv()
27
  # os.environ["TAVILY_API_KEY"] = os.getenv("TAVILY_API_KEY")
 
67
  "next": "wait_for_answers"
68
  }
69
 
70
+ def wait_for_answers(state: State):
71
+ # Just pass through until answers are submitted
72
+ return state
73
+
74
  def handle_answers(state: State):
75
  full_input = state["input"] + "\n\n" + "\n".join(state["answers"])
76
  return {**state, "input": full_input}
 
295
  return "Generate_Code"
296
  else:
297
  return "AI_Assistance"
298
+
299
+ # Define your graph builder with the state schema
300
  builder = StateGraph(State)
301
 
302
+ # Add Nodes
303
  builder.add_node("LLM_Agent", agent_node)
304
+ builder.add_node("AI_Assistance", ai_assistance)
305
  builder.add_node("Generate_Questions", generate_questions)
306
  builder.add_node("Handle_Answers", handle_answers)
307
+ builder.add_node("Wait_For_Answers", wait_for_answers) # this must be defined
308
  builder.add_node("Generate_Code", generate_code)
309
  builder.add_node("Code_Explainer", explain_code)
310
 
311
  # Set Entry Point
312
  builder.set_entry_point("LLM_Agent")
313
 
314
+ # Define Conditional Function
315
+ def check_if_answered(state: State) -> str:
316
+ if "questions" in state and state["questions"]:
317
+ return "answered"
318
+ else:
319
+ return "not_answered"
320
+
321
  # Define Flow
322
  builder.add_edge("LLM_Agent", "Generate_Questions")
323
+
324
+ builder.add_conditional_edges(
325
+ "Generate_Questions",
326
+ check_if_answered,
327
+ {
328
+ "answered": "Handle_Answers",
329
+ "not_answered": "Wait_For_Answers"
330
+ }
331
+ )
332
+
333
  builder.add_edge("Handle_Answers", "Generate_Code")
334
  builder.add_edge("Generate_Code", "Code_Explainer")
335
  builder.add_edge("Code_Explainer", END)
336
 
337
+ # Optionally: define what happens after waiting (if it's a loop)
338
+ builder.add_edge("Wait_For_Answers", "Generate_Questions") # retry loop
339
 
340
  graph = builder.compile(checkpointer=memory)
341
  # Streamlit UI setup
 
448
  # ================================
449
  # TOOL BUTTONS SECTION
450
  # ================================
451
+ # col1, col2, col3 = st.columns(3)
452
 
453
+ # user_prompt = st.session_state.get("latest_code", "") or user_input # fallback to user_input if needed
454
 
455
  with st.container():
456
  # if col1.button("βš™οΈ Run Python Code"):
 
460
  # st.success("βœ… Output:")
461
  # st.code(result, language="python")
462
  # else:
463
+ # # st.warning("Please enter Python code in the input box.")
464
+
465
+ # if col1.button("🌐 Web Search"):
466
+ # if user_prompt:
467
+ # with st.spinner("Searching the web..."):
468
+ # result = web_search.invoke({"query": user_prompt})
469
+ # st.success("πŸ”Ž Search Result:")
470
+ # st.write(result)
471
+ # else:
472
+ # st.warning("Please enter a search query.")
473
+
474
+ # if col2.button("🧠 Deep Think"):
475
+ # if user_prompt:
476
+ # with st.spinner("Thinking deeply..."):
477
+ # result = deep_think.invoke({"prompt": user_prompt})
478
+ # st.success("🧠 Reasoned Output:")
479
+ # st.write(result)
480
+ # else:
481
+ # st.warning("Please enter a prompt.")
482