nikhmr1235 commited on
Commit
cea9d6a
·
verified ·
1 Parent(s): 8177102

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -30
app.py CHANGED
@@ -1,6 +1,5 @@
1
  import gradio as gr
2
  from langgraph.graph import StateGraph, END
3
- # We are using MemorySaver now
4
  from langgraph.checkpoint.memory import MemorySaver
5
  import operator
6
  from typing import TypedDict, Annotated, Optional
@@ -21,8 +20,6 @@ def greeting_node(state: GraphState) -> GraphState:
21
  print(greeting)
22
  return {"greeting_message": greeting, "current_node": "greeting"}
23
 
24
- # IMPORTANT: This node will now run *after* human input is in the state.
25
- # It does NOT ask for input.
26
  def human_input_node(state: GraphState) -> GraphState:
27
  """
28
  Node 2: This node processes the human input that was added to the state
@@ -68,30 +65,23 @@ def start_graph(thread_id_state):
68
  new_thread_id = str(uuid.uuid4())
69
  print(f"\n--- Starting New Graph Execution with thread_id: {new_thread_id} ---")
70
 
71
- # In-memory saver does not need explicit drop for new UUIDs, but good for resetting if reusing IDs
72
- # try:
73
- # global_memory_saver.drop(config={"configurable": {"thread_id": new_thread_id}})
74
- # except Exception as e:
75
- # print(f"Could not drop previous state for {new_thread_id}: {e}")
76
-
77
  try:
78
- # Initial run: Graph executes greeting_node, then interrupts BEFORE human_input_node.
79
- # We pass None as input, as the state is managed by the graph's execution up to the breakpoint.
80
- for s in global_graph.stream(None, {"configurable": {"thread_id": new_thread_id}}):
81
  if "__end__" in s:
82
  break
83
  elif "__interrupt__" in s:
84
  print(f"Graph interrupted BEFORE {s.get('__interrupt__', 'Unknown')} node.")
85
  break
86
  else:
87
- pass # You can print s here to see node outputs during initial run
88
 
89
  current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": new_thread_id}})
90
 
91
  output_message = current_state_snapshot.values.get("greeting_message", "No greeting yet.")
92
  output_message += "\n\n" + "Please type your response in the 'Your Input' box and click 'Resume Graph'."
93
 
94
- # Enable human input textbox and resume button, disable start button
95
  return (output_message, gr.update(interactive=True), gr.update(interactive=True), gr.update(interactive=False), new_thread_id)
96
 
97
  except Exception as e:
@@ -103,38 +93,24 @@ def resume_graph(human_input_from_ui: str, thread_id_state):
103
  print(f"Human input received from UI: {human_input_from_ui}")
104
 
105
  try:
106
- # --- CRITICAL CHANGES FOR RESUMPTION WITH INTERRUPT_BEFORE ---
107
- # 1. Get the current state from the checkpointer.
108
  current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
109
-
110
- # 2. Extract the current values dictionary from the snapshot.
111
  current_state_values = current_state_snapshot.values
112
 
113
- # 3. Manually update the 'human_input' field in this dictionary.
114
- # The operator.add on Annotated[Optional[str], operator.add] will implicitly
115
- # handle combining if it was a list or similar, but for a simple string
116
- # it typically means setting the value.
117
  current_state_values["human_input"] = human_input_from_ui
118
 
119
- # 4. Save this modified state back to the checkpointer.
120
- # This is how the human's input is "injected" into the graph's memory.
121
  global_memory_saver.put_state(current_state_values, {"configurable": {"thread_id": thread_id_state}})
122
 
123
- # 5. Resume the graph with None as input.
124
- # The graph will now load the state (which includes the injected human_input)
125
- # and proceed to execute the 'human_input_interrupt' node (which it was paused before)
126
- # and subsequent nodes.
127
  for s in global_graph.stream(None, {"configurable": {"thread_id": thread_id_state}}):
128
  if "__end__" in s:
129
  break
130
  else:
131
- pass # You can print s here to see node outputs during resumption
132
 
133
  final_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
134
  final_state_values = final_state_snapshot.values
135
 
136
  final_message = final_state_values.get("final_response", "Graph finished without final response.")
137
- # Disable input, resume button, enable start button for new conversation
138
  return (final_message, gr.update(interactive=False), gr.update(interactive=False), gr.update(interactive=True), thread_id_state)
139
 
140
  except Exception as e:
 
1
  import gradio as gr
2
  from langgraph.graph import StateGraph, END
 
3
  from langgraph.checkpoint.memory import MemorySaver
4
  import operator
5
  from typing import TypedDict, Annotated, Optional
 
20
  print(greeting)
21
  return {"greeting_message": greeting, "current_node": "greeting"}
22
 
 
 
23
  def human_input_node(state: GraphState) -> GraphState:
24
  """
25
  Node 2: This node processes the human input that was added to the state
 
65
  new_thread_id = str(uuid.uuid4())
66
  print(f"\n--- Starting New Graph Execution with thread_id: {new_thread_id} ---")
67
 
 
 
 
 
 
 
68
  try:
69
+ # CRITICAL FIX: Pass an empty dictionary {} as input for the first stream call.
70
+ # This tells LangGraph to start from the entry point with an initial empty state.
71
+ for s in global_graph.stream({}, {"configurable": {"thread_id": new_thread_id}}):
72
  if "__end__" in s:
73
  break
74
  elif "__interrupt__" in s:
75
  print(f"Graph interrupted BEFORE {s.get('__interrupt__', 'Unknown')} node.")
76
  break
77
  else:
78
+ pass
79
 
80
  current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": new_thread_id}})
81
 
82
  output_message = current_state_snapshot.values.get("greeting_message", "No greeting yet.")
83
  output_message += "\n\n" + "Please type your response in the 'Your Input' box and click 'Resume Graph'."
84
 
 
85
  return (output_message, gr.update(interactive=True), gr.update(interactive=True), gr.update(interactive=False), new_thread_id)
86
 
87
  except Exception as e:
 
93
  print(f"Human input received from UI: {human_input_from_ui}")
94
 
95
  try:
 
 
96
  current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
 
 
97
  current_state_values = current_state_snapshot.values
98
 
 
 
 
 
99
  current_state_values["human_input"] = human_input_from_ui
100
 
 
 
101
  global_memory_saver.put_state(current_state_values, {"configurable": {"thread_id": thread_id_state}})
102
 
103
+ # Resume the graph with None as input after manually updating the state.
 
 
 
104
  for s in global_graph.stream(None, {"configurable": {"thread_id": thread_id_state}}):
105
  if "__end__" in s:
106
  break
107
  else:
108
+ pass
109
 
110
  final_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
111
  final_state_values = final_state_snapshot.values
112
 
113
  final_message = final_state_values.get("final_response", "Graph finished without final response.")
 
114
  return (final_message, gr.update(interactive=False), gr.update(interactive=False), gr.update(interactive=True), thread_id_state)
115
 
116
  except Exception as e: