KheemDH commited on
Commit
4c2d161
Β·
1 Parent(s): 178b9ef

Updated App

Browse files
Files changed (1) hide show
  1. app.py +51 -45
app.py CHANGED
@@ -9,14 +9,14 @@ from langchain_core.prompts import ChatPromptTemplate
9
  from langchain_openai import ChatOpenAI
10
  from browser_use import Agent
11
 
12
- # ──────────────────────────────────────────────────────────────────────────
13
  # 1) Load environment
14
- # ──────────────────────────────────────────────────────────────────────────
15
  load_dotenv()
16
 
17
- # ──────────────────────────────────────────────────────────────────────────
18
  # 2) Helper to get ChatOpenAI from environment
19
- # ──────────────────────────────────────────────────────────────────────────
20
  def get_llm():
21
  """Returns a ChatOpenAI instance using the OPENAI_API_KEY from environment."""
22
  return ChatOpenAI(
@@ -32,18 +32,18 @@ def get_llm_browser():
32
  openai_api_key=os.getenv("OPENAI_API_KEY")
33
  )
34
 
35
- # ──────────────────────────────────────────────────────────────────────────
36
- # 3) TypedDict for internal state
37
- # ──────────────────────────────────────────────────────────────────────────
38
  class State(TypedDict):
39
  query: str
40
  category: str
41
  sentiment: str
42
  response: str
43
 
44
- # ──────────────────────────────────────────────────────────────────────────
45
- # 4) Individual node-like functions
46
- # ──────────────────────────────────────────────────────────────────────────
47
 
48
  def categorize(state: State) -> State:
49
  prompt = ChatPromptTemplate.from_template(
@@ -85,20 +85,27 @@ def handle_billing(state: State) -> State:
85
  return state
86
 
87
  async def run_browser_agent(task: str) -> str:
88
- """Helper to run the browser-use Agent asynchronously."""
 
 
 
89
  agent = Agent(task=task, llm=get_llm_browser())
90
  result = await agent.run()
91
  return result
92
 
93
- def handle_general(state: State) -> State:
94
- """For general queries, we use the browser agent to consult online resources."""
 
 
 
95
  task = (
96
  "You are a customer support agent that consults online sources. "
97
  f"Provide a detailed, informed response to this customer query: {state['query']}"
98
  )
99
- result = asyncio.run(run_browser_agent(task))
100
- final_text = ""
101
 
 
102
  if isinstance(result, str):
103
  final_text = result.strip()
104
  elif hasattr(result, "all_results"):
@@ -131,71 +138,70 @@ def route_query(state: State) -> str:
131
  else:
132
  return "handle_general"
133
 
134
- # ──────────────────────────────────────────────────────────────────────────
135
- # 5) A simple "workflow" function (manual, no langgraph)
136
- # ──────────────────────────────────────────────────────────────────────────
137
-
138
- def run_workflow(state: State) -> State:
139
  """
140
- Manually steps through:
141
  1) categorize
142
- 2) sentiment
143
  3) route
144
- 4) handle_x or escalate
145
  """
146
  # Step 1
147
  state = categorize(state)
148
  # Step 2
149
  state = analyze_sentiment(state)
150
- # Step 3 - route
151
  next_step = route_query(state)
152
-
 
153
  if next_step == "handle_technical":
154
- state = handle_technical(state)
155
  elif next_step == "handle_billing":
156
- state = handle_billing(state)
157
  elif next_step == "handle_general":
158
- state = handle_general(state)
 
159
  else:
 
160
  state = escalate(state)
161
-
162
- return state
163
 
164
- # ──────────────────────────────────────────────────────────────────────────
165
- # 6) Gradio callback
166
- # ──────────────────────────────────────────────────────────────────────────
167
 
 
 
 
168
  async def run_customer_support(query: str, api_key: str = "") -> str:
169
  """
170
- Main function called by Gradio upon submit.
171
- - If user provided an API key, set it in the environment.
172
- - Then run the manual "workflow" on the user's query.
173
- - Return the final response from the final state.
 
174
  """
175
- # Check key
176
  if not api_key and not os.getenv("OPENAI_API_KEY"):
177
  return "Error: Please provide an OpenAI API key."
178
-
179
- # Set user-provided key
180
  if api_key:
181
  os.environ["OPENAI_API_KEY"] = api_key
182
 
183
  try:
184
- # Initialize the state
185
  state: State = {
186
  "query": query,
187
  "category": "",
188
  "sentiment": "",
189
  "response": ""
190
  }
191
- final_state = run_workflow(state) # Manually run the chain of steps
192
  return final_state["response"]
193
  except Exception as e:
194
  return f"Error: {str(e)}"
195
 
196
- # ──────────────────────────────────────────────────────────────────────────
197
  # 7) Build the Gradio UI
198
- # ──────────────────────────────────────────────────────────────────────────
199
  with gr.Blocks(title="Customer Support Agent with Browser Use") as demo:
200
  gr.Markdown("# Customer Support Agent with Browser Use")
201
  gr.Markdown(
@@ -223,7 +229,7 @@ with gr.Blocks(title="Customer Support Agent with Browser Use") as demo:
223
  interactive=False
224
  )
225
 
226
- # The order of inputs in submit_btn.click must match run_customer_support signature
227
  submit_btn.click(
228
  fn=run_customer_support,
229
  inputs=[query_input, api_key_input],
 
9
  from langchain_openai import ChatOpenAI
10
  from browser_use import Agent
11
 
12
+ # ─────────────────────────────────────────────────────────────────────
13
  # 1) Load environment
14
+ # ─────────────────────────────────────────────────────────────────────
15
  load_dotenv()
16
 
17
+ # ─────────────────────────────────────────────────────────────────────
18
  # 2) Helper to get ChatOpenAI from environment
19
+ # ─────────────────────────────────────────────────────────────────────
20
  def get_llm():
21
  """Returns a ChatOpenAI instance using the OPENAI_API_KEY from environment."""
22
  return ChatOpenAI(
 
32
  openai_api_key=os.getenv("OPENAI_API_KEY")
33
  )
34
 
35
+ # ─────────────────────────────────────────────────────────────────────
36
+ # 3) TypedDict for state
37
+ # ─────────────────────────────────────────────────────────────────────
38
  class State(TypedDict):
39
  query: str
40
  category: str
41
  sentiment: str
42
  response: str
43
 
44
+ # ─────────────────────────────────────────────────────────────────────
45
+ # 4) "Node" functions
46
+ # ─────────────────────────────────────────────────────────────────────
47
 
48
  def categorize(state: State) -> State:
49
  prompt = ChatPromptTemplate.from_template(
 
85
  return state
86
 
87
  async def run_browser_agent(task: str) -> str:
88
+ """
89
+ Helper to run the browser-use Agent asynchronously.
90
+ Because we're already in an event loop, we just 'await agent.run()'.
91
+ """
92
  agent = Agent(task=task, llm=get_llm_browser())
93
  result = await agent.run()
94
  return result
95
 
96
+ # Make 'handle_general' async so it can 'await run_browser_agent(...)'
97
+ async def handle_general(state: State) -> State:
98
+ """
99
+ For general queries, we use the browser agent to consult online resources.
100
+ """
101
  task = (
102
  "You are a customer support agent that consults online sources. "
103
  f"Provide a detailed, informed response to this customer query: {state['query']}"
104
  )
105
+ # Directly await run_browser_agent(...) with no asyncio.run()
106
+ result = await run_browser_agent(task)
107
 
108
+ final_text = ""
109
  if isinstance(result, str):
110
  final_text = result.strip()
111
  elif hasattr(result, "all_results"):
 
138
  else:
139
  return "handle_general"
140
 
141
+ # ─────────────────────────────────────────────────────────────────────
142
+ # 5) A manual workflow function in async
143
+ # ─────────────────────────────────────────────────────────────────────
144
+ async def run_workflow(state: State) -> State:
 
145
  """
146
+ Steps:
147
  1) categorize
148
+ 2) analyze_sentiment
149
  3) route
150
+ 4) run the appropriate function (some are sync, some are async)
151
  """
152
  # Step 1
153
  state = categorize(state)
154
  # Step 2
155
  state = analyze_sentiment(state)
156
+ # Step 3
157
  next_step = route_query(state)
158
+
159
+ # Step 4
160
  if next_step == "handle_technical":
161
+ state = handle_technical(state) # sync function
162
  elif next_step == "handle_billing":
163
+ state = handle_billing(state) # sync function
164
  elif next_step == "handle_general":
165
+ # handle_general is async, so we must 'await' it
166
+ state = await handle_general(state)
167
  else:
168
+ # escalate is sync
169
  state = escalate(state)
 
 
170
 
171
+ return state
 
 
172
 
173
+ # ─────────────────────────────────────────────────────────────────────
174
+ # 6) Gradio callback (async)
175
+ # ─────────────────────────────────────────────────────────────────────
176
  async def run_customer_support(query: str, api_key: str = "") -> str:
177
  """
178
+ Called by Gradio upon submit. We do:
179
+ - Possibly set OS env for OPENAI_API_KEY
180
+ - Create initial state
181
+ - 'await run_workflow(...)'
182
+ - Return final answer
183
  """
 
184
  if not api_key and not os.getenv("OPENAI_API_KEY"):
185
  return "Error: Please provide an OpenAI API key."
186
+
 
187
  if api_key:
188
  os.environ["OPENAI_API_KEY"] = api_key
189
 
190
  try:
 
191
  state: State = {
192
  "query": query,
193
  "category": "",
194
  "sentiment": "",
195
  "response": ""
196
  }
197
+ final_state = await run_workflow(state)
198
  return final_state["response"]
199
  except Exception as e:
200
  return f"Error: {str(e)}"
201
 
202
+ # ─────────────────────────────────────────────────────���───────────────
203
  # 7) Build the Gradio UI
204
+ # ─────────────────────────────────────────────────────────────────────
205
  with gr.Blocks(title="Customer Support Agent with Browser Use") as demo:
206
  gr.Markdown("# Customer Support Agent with Browser Use")
207
  gr.Markdown(
 
229
  interactive=False
230
  )
231
 
232
+ # The callback is async; Gradio can handle async if the function is declared async.
233
  submit_btn.click(
234
  fn=run_customer_support,
235
  inputs=[query_input, api_key_input],