vikramvasudevan commited on
Commit
7c08cff
·
verified ·
1 Parent(s): eec1daf

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app2.py +19 -17
app2.py CHANGED
@@ -10,21 +10,21 @@ class ChatState(TypedDict):
10
  messages: List[dict]
11
 
12
 
 
 
 
 
 
 
13
  # 2. Respond Node — stream word by word, then ⏳
14
  async def respond_node(state: ChatState) -> AsyncIterator[ChatState]:
15
  messages = state["messages"]
16
- user_msg = messages[-1]["content"]
17
- reply = f"respond function: {user_msg}"
18
- partial = ""
19
-
20
- for word in reply.split():
21
- partial += word + " "
22
- await asyncio.sleep(0.3)
23
- yield {"messages": messages + [{"role": "assistant", "content": partial.strip()}]}
24
-
25
- # Add ⏳ and yield
26
- final_msg = partial.strip()
27
- messages.append({"role": "assistant", "content": final_msg + " ⏳"})
28
  yield {"messages": messages}
29
 
30
 
@@ -34,17 +34,19 @@ async def post_process_node(state: ChatState) -> ChatState:
34
  await asyncio.sleep(1.5)
35
 
36
  if messages[-1]["role"] == "assistant" and "⏳" in messages[-1]["content"]:
37
- messages[-1]["content"] = messages[-1]["content"].replace("⏳", "✅")
38
 
39
  return {"messages": messages}
40
 
41
 
42
  # 4. Define the graph
43
  graph_builder = StateGraph(ChatState)
 
44
  graph_builder.add_node("respond", respond_node)
45
  graph_builder.add_node("post_process", post_process_node)
46
 
47
- graph_builder.set_entry_point("respond")
 
48
  graph_builder.add_edge("respond", "post_process")
49
  graph_builder.add_edge("post_process", END)
50
 
@@ -54,7 +56,7 @@ graph = graph_builder.compile()
54
  # 5. Gradio streaming handler
55
  async def bot_respond_streaming(message: str, history: List[dict]) -> AsyncIterator:
56
  state = {"messages": (history or []) + [{"role": "user", "content": message}]}
57
-
58
  async for step in graph.astream(state):
59
  # LangGraph yields steps like {"respond": ChatState, "post_process": ChatState}
60
  for node_output in step.values():
@@ -67,10 +69,10 @@ with gr.Blocks() as demo:
67
  with gr.Row():
68
  textbox = gr.Textbox(placeholder="Ask something...", scale=8, container=False)
69
  send_btn = gr.Button("Send", scale=1)
70
-
71
  def user_submit(message, history):
72
  return "", history + [{"role": "user", "content": message}]
73
-
74
  send_event = send_btn.click(user_submit, [textbox, chatbot], [textbox, chatbot])
75
  send_event.then(bot_respond_streaming, [textbox, chatbot], chatbot)
76
 
 
10
  messages: List[dict]
11
 
12
 
13
+ def start_node(state: ChatState):
14
+ messages = state["messages"] + [{"role": "assistant", "content": "start node: Thinking ...⏳"}]
15
+ return {"messages": messages}
16
+
17
+
18
+ # 2. Respond Node — stream word by word, then ⏳
19
  # 2. Respond Node — stream word by word, then ⏳
20
  async def respond_node(state: ChatState) -> AsyncIterator[ChatState]:
21
  messages = state["messages"]
22
+ print(messages)
23
+
24
+ await asyncio.sleep(1) # adjust timing as needed
25
+ thinking_msg = "respond_node: give me some more time. please"
26
+ await asyncio.sleep(0.2) # adjust timing as needed
27
+ messages[-1]["content"] = thinking_msg + " "
 
 
 
 
 
 
28
  yield {"messages": messages}
29
 
30
 
 
34
  await asyncio.sleep(1.5)
35
 
36
  if messages[-1]["role"] == "assistant" and "⏳" in messages[-1]["content"]:
37
+ messages[-1]["content"] = "post_process_node: here is your final result!" + "✅"
38
 
39
  return {"messages": messages}
40
 
41
 
42
  # 4. Define the graph
43
  graph_builder = StateGraph(ChatState)
44
+ graph_builder.add_node("start", start_node)
45
  graph_builder.add_node("respond", respond_node)
46
  graph_builder.add_node("post_process", post_process_node)
47
 
48
+ graph_builder.set_entry_point("start")
49
+ graph_builder.add_edge("start", "respond")
50
  graph_builder.add_edge("respond", "post_process")
51
  graph_builder.add_edge("post_process", END)
52
 
 
56
  # 5. Gradio streaming handler
57
  async def bot_respond_streaming(message: str, history: List[dict]) -> AsyncIterator:
58
  state = {"messages": (history or []) + [{"role": "user", "content": message}]}
59
+
60
  async for step in graph.astream(state):
61
  # LangGraph yields steps like {"respond": ChatState, "post_process": ChatState}
62
  for node_output in step.values():
 
69
  with gr.Row():
70
  textbox = gr.Textbox(placeholder="Ask something...", scale=8, container=False)
71
  send_btn = gr.Button("Send", scale=1)
72
+
73
  def user_submit(message, history):
74
  return "", history + [{"role": "user", "content": message}]
75
+
76
  send_event = send_btn.click(user_submit, [textbox, chatbot], [textbox, chatbot])
77
  send_event.then(bot_respond_streaming, [textbox, chatbot], chatbot)
78