Anshini commited on
Commit
8a1a0f0
Β·
verified Β·
1 Parent(s): 2002f39

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -13
app.py CHANGED
@@ -37,6 +37,8 @@ class State(TypedDict):
37
  answers:List[str]
38
  code : str
39
  explanation:str
 
 
40
  # LLM
41
  code_generator = Together(
42
  model="deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free",
@@ -298,23 +300,24 @@ def router(state):
298
  builder = StateGraph(State)
299
 
300
  # Define All Nodes
301
- builder.add_node("AI_Assistance", ai_assistance)
302
  builder.add_node("Generate_Questions", generate_questions)
303
  builder.add_node("Handle_Answers", handle_answers)
304
  builder.add_node("Generate_Code", generate_code)
305
  builder.add_node("Code_Explainer", explain_code)
306
 
307
  # Set Entry Point
308
- builder.set_entry_point("AI_Assistance")
309
 
310
  # Define Flow
311
- builder.add_edge("AI_Assistance", "Generate_Questions")
312
  builder.add_edge("Generate_Questions", "Handle_Answers")
313
  builder.add_edge("Handle_Answers", "Generate_Code")
314
  builder.add_edge("Generate_Code", "Code_Explainer")
315
  builder.add_edge("Code_Explainer", END)
316
 
317
 
 
318
  graph = builder.compile(checkpointer=memory)
319
  # Streamlit UI setup
320
  st.set_page_config(page_title="MitraVerse", layout="wide")
@@ -374,36 +377,77 @@ for msg in st.session_state.chat_history:
374
  st.markdown(f"<div class='stChatMessage {role}'>{msg.content}</div>", unsafe_allow_html=True)
375
 
376
  with st.container():
 
377
  with st.form("chat_form", clear_on_submit=True):
378
- st.markdown('<div id="floating-container">', unsafe_allow_html=True)
379
- st.markdown('</div>', unsafe_allow_html=True)
380
-
381
  user_input = st.text_input("Ask me", label_visibility="collapsed", placeholder="Ask me Anything")
382
  submitted = st.form_submit_button(label="Send")
383
-
384
  if submitted and user_input:
385
  st.session_state.chat_history.append(HumanMessage(content=user_input))
386
-
387
  config = {"configurable": {"thread_id": st.session_state.thread_id}}
388
  state_input = {
389
  "messages": st.session_state.chat_history,
390
  "input": user_input,
391
  }
392
-
393
  result = graph.invoke(state_input, config=config)
394
-
 
 
 
 
 
 
395
  if result.get("code"):
396
  st.session_state.latest_code = result["code"]
397
  st.session_state.chat_history.append(
398
  AIMessage(content="**πŸ’» Generated Code:**\n\n```python\n" + result["code"] + "\n```")
399
- )
 
400
  if result.get("explanation"):
401
  st.session_state.latest_explanation = result["explanation"]
402
  st.session_state.chat_history.append(
403
  AIMessage(content="**πŸ” Code Explanation:**\n\n```\n" + result["explanation"] + "\n```")
404
- )
405
- elif result.get("messages"):
 
 
 
 
 
 
406
  st.session_state.chat_history.append(result["messages"][-1])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
407
  st.rerun()
408
  st.markdown("<script>window.scrollTo(0, document.body.scrollHeight);</script>", unsafe_allow_html=True)
409
 
 
37
  answers:List[str]
38
  code : str
39
  explanation:str
40
+ subtasks: List[str]
41
+ follow_up_questions: List[str]
42
  # LLM
43
  code_generator = Together(
44
  model="deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free",
 
300
  builder = StateGraph(State)
301
 
302
  # Define All Nodes
303
+ builder.add_node("LLM_Agent", agent_node)
304
  builder.add_node("Generate_Questions", generate_questions)
305
  builder.add_node("Handle_Answers", handle_answers)
306
  builder.add_node("Generate_Code", generate_code)
307
  builder.add_node("Code_Explainer", explain_code)
308
 
309
  # Set Entry Point
310
+ builder.set_entry_point("LLM_Agent")
311
 
312
  # Define Flow
313
+ builder.add_edge("LLM_Agent", "Generate_Questions")
314
  builder.add_edge("Generate_Questions", "Handle_Answers")
315
  builder.add_edge("Handle_Answers", "Generate_Code")
316
  builder.add_edge("Generate_Code", "Code_Explainer")
317
  builder.add_edge("Code_Explainer", END)
318
 
319
 
320
+
321
  graph = builder.compile(checkpointer=memory)
322
  # Streamlit UI setup
323
  st.set_page_config(page_title="MitraVerse", layout="wide")
 
377
  st.markdown(f"<div class='stChatMessage {role}'>{msg.content}</div>", unsafe_allow_html=True)
378
 
379
  with st.container():
380
+ # First Form: Ask Question
381
  with st.form("chat_form", clear_on_submit=True):
 
 
 
382
  user_input = st.text_input("Ask me", label_visibility="collapsed", placeholder="Ask me Anything")
383
  submitted = st.form_submit_button(label="Send")
384
+
385
  if submitted and user_input:
386
  st.session_state.chat_history.append(HumanMessage(content=user_input))
 
387
  config = {"configurable": {"thread_id": st.session_state.thread_id}}
388
  state_input = {
389
  "messages": st.session_state.chat_history,
390
  "input": user_input,
391
  }
392
+
393
  result = graph.invoke(state_input, config=config)
394
+ st.session_state.last_result = result # store temporarily
395
+
396
+ if result.get("questions"):
397
+ st.session_state.pending_questions = result["questions"]
398
+ else:
399
+ st.session_state.pending_questions = []
400
+
401
  if result.get("code"):
402
  st.session_state.latest_code = result["code"]
403
  st.session_state.chat_history.append(
404
  AIMessage(content="**πŸ’» Generated Code:**\n\n```python\n" + result["code"] + "\n```")
405
+ )
406
+
407
  if result.get("explanation"):
408
  st.session_state.latest_explanation = result["explanation"]
409
  st.session_state.chat_history.append(
410
  AIMessage(content="**πŸ” Code Explanation:**\n\n```\n" + result["explanation"] + "\n```")
411
+ )
412
+
413
+ if result.get("subtasks"):
414
+ st.markdown("### 🧩 Subtasks:")
415
+ for s in result["subtasks"]:
416
+ st.markdown(f"- {s}")
417
+
418
+ if result.get("messages"):
419
  st.session_state.chat_history.append(result["messages"][-1])
420
+
421
+ # Second Form: Answer Follow-up Questions
422
+ if "pending_questions" in st.session_state and st.session_state.pending_questions:
423
+ with st.form("answer_form"):
424
+ st.markdown("### πŸ€– Please answer the following:")
425
+ for q in st.session_state.pending_questions:
426
+ st.text_input(q, key=q)
427
+ submit_answers = st.form_submit_button("Submit Answers")
428
+
429
+ if submit_answers:
430
+ answers = [st.session_state[q] for q in st.session_state.pending_questions]
431
+ new_input = {
432
+ "messages": st.session_state.chat_history,
433
+ "answers": answers,
434
+ "questions": [],
435
+ }
436
+ result = graph.invoke(new_input, config={"configurable": {"thread_id": st.session_state.thread_id}})
437
+ st.session_state.pending_questions = []
438
+
439
+ if result.get("code"):
440
+ st.session_state.latest_code = result["code"]
441
+ st.session_state.chat_history.append(
442
+ AIMessage(content="**πŸ’» Generated Code:**\n\n```python\n" + result["code"] + "\n```")
443
+ )
444
+
445
+ if result.get("explanation"):
446
+ st.session_state.latest_explanation = result["explanation"]
447
+ st.session_state.chat_history.append(
448
+ AIMessage(content="**πŸ” Code Explanation:**\n\n```\n" + result["explanation"] + "\n```")
449
+ )
450
+
451
  st.rerun()
452
  st.markdown("<script>window.scrollTo(0, document.body.scrollHeight);</script>", unsafe_allow_html=True)
453