staedi commited on
Commit
e0d521f
·
verified ·
1 Parent(s): 5584281

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +46 -8
agent.py CHANGED
@@ -1,7 +1,45 @@
 
1
  import tools
2
  from llama_index.core import Settings
3
  from llama_index.core.agent.workflow import AgentWorkflow
4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  # Initialize model (defaults to llama3.1)
6
  def init_model(model:str='llama3.1:8b-instruct-q4_0'):
7
  # from llama_index.llms.ollama import Ollama
@@ -64,12 +102,12 @@ def init_agent():
64
 
65
  return agent
66
 
67
- # # Run the agent
68
- # async def run_agent(agent,query:str):
69
- # response = await agent.run(query)
70
- # return response
71
 
72
- # # Await function for async
73
- # async def await_result(agent,query:str):
74
- # response = await agent.run_agent(query)
75
- # return response.response.blocks[0].text
 
1
+ import asyncio
2
  import tools
3
  from llama_index.core import Settings
4
  from llama_index.core.agent.workflow import AgentWorkflow
5
 
6
+ # Silence the torch error
7
+ def init_async():
8
+ import torch
9
+ torch.classes.__path__ = [] # add this line to manually set it to empty.
10
+
11
+
12
+ def run_async_task(async_func, *args):
13
+ """
14
+ Run an asynchronous function in a new event loop.
15
+
16
+ Args:
17
+ async_func (coroutine): The asynchronous function to execute.
18
+ *args: Arguments to pass to the asynchronous function.
19
+
20
+ Returns:
21
+ None
22
+ """
23
+
24
+ loop = None
25
+
26
+ try:
27
+ loop = asyncio.new_event_loop()
28
+ loop.run_until_complete(async_func(*args))
29
+ except:
30
+ # Close the existing loop if open
31
+ if loop is not None:
32
+ loop.close()
33
+
34
+ # Create a new loop for retry
35
+ loop = asyncio.new_event_loop()
36
+
37
+ loop.run_until_complete(async_func(*args))
38
+ finally:
39
+ if loop is not None:
40
+ loop.close()
41
+
42
+
43
  # Initialize model (defaults to llama3.1)
44
  def init_model(model:str='llama3.1:8b-instruct-q4_0'):
45
  # from llama_index.llms.ollama import Ollama
 
102
 
103
  return agent
104
 
105
+ # Run the agent
106
+ async def run_agent(agent,query:str):
107
+ response = await agent.run(query)
108
+ return response
109
 
110
+ # Await function for async
111
+ async def await_result(agent,query:str):
112
+ response = await agent.run_agent(query)
113
+ return response.response.blocks[0].text