nikhmr1235 commited on
Commit
8177102
·
verified ·
1 Parent(s): f46b2b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -34
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  from langgraph.graph import StateGraph, END
3
- from langgraph.checkpoint.sqlite import SqliteSaver
4
  from langgraph.checkpoint.memory import MemorySaver
5
  import operator
6
  from typing import TypedDict, Annotated, Optional
@@ -21,22 +21,20 @@ def greeting_node(state: GraphState) -> GraphState:
21
  print(greeting)
22
  return {"greeting_message": greeting, "current_node": "greeting"}
23
 
24
- # IMPORTANT CHANGE HERE: Remove input() from this node!
 
25
  def human_input_node(state: GraphState) -> GraphState:
26
  """
27
- Node 2: This node now simply acts as the interruption point.
28
- The human input will be provided by the Gradio UI *after* this node runs and interrupts.
29
  """
30
- print("\n✋ Human Input Node Executed (This is where graph will interrupt AFTER running this node):")
31
- # This node returns its current_node state. The actual human_input will be added
32
- # to the state during the RESUME phase from the Gradio UI.
33
- # Since we are using 'interrupt_after' and human_input_node doesn't take input directly,
34
- # the 'human_input' will only appear in the state after 'resume_graph' sends it.
35
- return {"current_node": "human_input_interrupt"}
36
-
37
 
38
  def human_response_display_node(state: GraphState) -> GraphState:
39
- human_response = state.get("human_input", "No human input received.")
40
  final_message = f"You said: '{human_response}'. Thank you for your input!"
41
  print("\n✅ Human Response Display Node Executed:")
42
  print(final_message)
@@ -45,23 +43,22 @@ def human_response_display_node(state: GraphState) -> GraphState:
45
  # --- 3. Build the Graph ---
46
  builder = StateGraph(GraphState)
47
  builder.add_node("greeting", greeting_node)
48
- builder.add_node("human_input_interrupt", human_input_node)
49
  builder.add_node("human_response_display", human_response_display_node)
50
 
51
  builder.set_entry_point("greeting")
52
- builder.add_edge("greeting", "human_input_interrupt")
53
  builder.add_edge("human_response_display", END)
54
 
55
  # --- Checkpointer and Graph Compilation ---
56
- #SQLITE_DB_PATH = "langgraph_checkpoints.sqlite"
57
- #global_memory_saver = SqliteSaver.from_conn_string(SQLITE_DB_PATH)
58
  global_memory_saver = MemorySaver()
59
 
60
  global_graph = builder.compile(
61
  checkpointer=global_memory_saver,
62
- # Keep interrupt_after for human_input_interrupt.
63
- # This means human_input_node runs, then graph interrupts.
64
- interrupt_after=["human_input_interrupt"]
65
  )
66
 
67
  # --- Gradio UI Logic ---
@@ -71,25 +68,23 @@ def start_graph(thread_id_state):
71
  new_thread_id = str(uuid.uuid4())
72
  print(f"\n--- Starting New Graph Execution with thread_id: {new_thread_id} ---")
73
 
74
- # You might want to explicitly drop previous thread state if it exists for this ID
75
- # (Uncomment if you want to explicitly clear old data for a potentially re-used UUID,
76
- # though with uuid4 it's highly unlikely)
77
  # try:
78
  # global_memory_saver.drop(config={"configurable": {"thread_id": new_thread_id}})
79
  # except Exception as e:
80
  # print(f"Could not drop previous state for {new_thread_id}: {e}")
81
 
82
  try:
83
- # Pass an empty input for the first run.
84
- # The graph will run greeting_node, then human_input_node, then interrupt.
85
- for s in global_graph.stream({"greeting_message": ""}, {"configurable": {"thread_id": new_thread_id}}):
86
  if "__end__" in s:
87
  break
88
  elif "__interrupt__" in s:
89
- print(f"Graph interrupted at {s.get('__interrupt__', 'Unknown')}")
90
  break
91
  else:
92
- pass
93
 
94
  current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": new_thread_id}})
95
 
@@ -108,13 +103,32 @@ def resume_graph(human_input_from_ui: str, thread_id_state):
108
  print(f"Human input received from UI: {human_input_from_ui}")
109
 
110
  try:
111
- # Pass the human_input_from_ui directly to the graph stream.
112
- # This input will be merged into the graph's state when it resumes.
113
- for s in global_graph.stream({"human_input": human_input_from_ui}, {"configurable": {"thread_id": thread_id_state}}):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  if "__end__" in s:
115
  break
116
  else:
117
- pass
118
 
119
  final_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
120
  final_state_values = final_state_snapshot.values
@@ -129,11 +143,10 @@ def resume_graph(human_input_from_ui: str, thread_id_state):
129
 
130
  # Gradio Interface setup
131
  with gr.Blocks() as demo:
132
- gr.Markdown("# LangGraph Human Approval Demo (Persistent on Hugging Face)")
133
- gr.Markdown(f"Graph state will be saved persistently in MEMORY.")
134
 
135
  output_textbox = gr.Textbox(label="AI Assistant Output", lines=5, interactive=False)
136
- # The human input now comes ONLY from this textbox
137
  human_input_textbox = gr.Textbox(label="Your Input", placeholder="Type your response here...", interactive=False)
138
 
139
  thread_id_state = gr.State("")
 
1
  import gradio as gr
2
  from langgraph.graph import StateGraph, END
3
+ # We are using MemorySaver now
4
  from langgraph.checkpoint.memory import MemorySaver
5
  import operator
6
  from typing import TypedDict, Annotated, Optional
 
21
  print(greeting)
22
  return {"greeting_message": greeting, "current_node": "greeting"}
23
 
24
+ # IMPORTANT: This node will now run *after* human input is in the state.
25
+ # It does NOT ask for input.
26
  def human_input_node(state: GraphState) -> GraphState:
27
  """
28
+ Node 2: This node processes the human input that was added to the state
29
+ before its execution (during the resume phase via put_state).
30
  """
31
+ human_response = state.get("human_input", "No human input found in state when human_input_node ran.")
32
+ print(f"\n✋ Human Input Node Executed (Processing input: '{human_response}'):")
33
+ # You could add validation or further processing of state['human_input'] here if needed.
34
+ return {"current_node": "human_input_processed"}
 
 
 
35
 
36
  def human_response_display_node(state: GraphState) -> GraphState:
37
+ human_response = state.get("human_input", "No human input received for final display.")
38
  final_message = f"You said: '{human_response}'. Thank you for your input!"
39
  print("\n✅ Human Response Display Node Executed:")
40
  print(final_message)
 
43
  # --- 3. Build the Graph ---
44
  builder = StateGraph(GraphState)
45
  builder.add_node("greeting", greeting_node)
46
+ builder.add_node("human_input_interrupt", human_input_node) # Node name remains for clarity
47
  builder.add_node("human_response_display", human_response_display_node)
48
 
49
  builder.set_entry_point("greeting")
50
+ builder.add_edge("greeting", "human_input_interrupt") # Still connects normally
51
  builder.add_edge("human_response_display", END)
52
 
53
  # --- Checkpointer and Graph Compilation ---
54
+ # Using MemorySaver as requested
 
55
  global_memory_saver = MemorySaver()
56
 
57
  global_graph = builder.compile(
58
  checkpointer=global_memory_saver,
59
+ # CRITICAL CHANGE: Interrupt BEFORE human_input_interrupt
60
+ # This means the graph will pause *before* executing the human_input_node
61
+ interrupt_before=["human_input_interrupt"]
62
  )
63
 
64
  # --- Gradio UI Logic ---
 
68
  new_thread_id = str(uuid.uuid4())
69
  print(f"\n--- Starting New Graph Execution with thread_id: {new_thread_id} ---")
70
 
71
+ # In-memory saver does not need explicit drop for new UUIDs, but good for resetting if reusing IDs
 
 
72
  # try:
73
  # global_memory_saver.drop(config={"configurable": {"thread_id": new_thread_id}})
74
  # except Exception as e:
75
  # print(f"Could not drop previous state for {new_thread_id}: {e}")
76
 
77
  try:
78
+ # Initial run: Graph executes greeting_node, then interrupts BEFORE human_input_node.
79
+ # We pass None as input, as the state is managed by the graph's execution up to the breakpoint.
80
+ for s in global_graph.stream(None, {"configurable": {"thread_id": new_thread_id}}):
81
  if "__end__" in s:
82
  break
83
  elif "__interrupt__" in s:
84
+ print(f"Graph interrupted BEFORE {s.get('__interrupt__', 'Unknown')} node.")
85
  break
86
  else:
87
+ pass # You can print s here to see node outputs during initial run
88
 
89
  current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": new_thread_id}})
90
 
 
103
  print(f"Human input received from UI: {human_input_from_ui}")
104
 
105
  try:
106
+ # --- CRITICAL CHANGES FOR RESUMPTION WITH INTERRUPT_BEFORE ---
107
+ # 1. Get the current state from the checkpointer.
108
+ current_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
109
+
110
+ # 2. Extract the current values dictionary from the snapshot.
111
+ current_state_values = current_state_snapshot.values
112
+
113
+ # 3. Manually update the 'human_input' field in this dictionary.
114
+ # The operator.add on Annotated[Optional[str], operator.add] will implicitly
115
+ # handle combining if it was a list or similar, but for a simple string
116
+ # it typically means setting the value.
117
+ current_state_values["human_input"] = human_input_from_ui
118
+
119
+ # 4. Save this modified state back to the checkpointer.
120
+ # This is how the human's input is "injected" into the graph's memory.
121
+ global_memory_saver.put_state(current_state_values, {"configurable": {"thread_id": thread_id_state}})
122
+
123
+ # 5. Resume the graph with None as input.
124
+ # The graph will now load the state (which includes the injected human_input)
125
+ # and proceed to execute the 'human_input_interrupt' node (which it was paused before)
126
+ # and subsequent nodes.
127
+ for s in global_graph.stream(None, {"configurable": {"thread_id": thread_id_state}}):
128
  if "__end__" in s:
129
  break
130
  else:
131
+ pass # You can print s here to see node outputs during resumption
132
 
133
  final_state_snapshot = global_graph.get_state({"configurable": {"thread_id": thread_id_state}})
134
  final_state_values = final_state_snapshot.values
 
143
 
144
  # Gradio Interface setup
145
  with gr.Blocks() as demo:
146
+ gr.Markdown("# LangGraph Human-in-the-Loop Demo (MemorySaver)")
147
+ gr.Markdown("Graph state will be saved persistently in **memory** for the current session. Click 'Start Conversation' to begin. Input is taken via Gradio UI.")
148
 
149
  output_textbox = gr.Textbox(label="AI Assistant Output", lines=5, interactive=False)
 
150
  human_input_textbox = gr.Textbox(label="Your Input", placeholder="Type your response here...", interactive=False)
151
 
152
  thread_id_state = gr.State("")