Psiska commited on
Commit
caf2882
·
1 Parent(s): 7e21f4d

Make the models work in parallel 5

Browse files
Files changed (2) hide show
  1. __pycache__/crew.cpython-310.pyc +0 -0
  2. crew.py +17 -14
__pycache__/crew.cpython-310.pyc CHANGED
Binary files a/__pycache__/crew.cpython-310.pyc and b/__pycache__/crew.cpython-310.pyc differ
 
crew.py CHANGED
@@ -146,6 +146,8 @@ class GAIACrew():
146
 
147
  import concurrent.futures
148
 
 
 
149
  def run_parallel_crew(question: str, file_path: str):
150
  """
151
  1) Prepares the prompt (including file data if any).
@@ -164,31 +166,30 @@ def run_parallel_crew(question: str, file_path: str):
164
  else:
165
  final_question = f"{question} File path: {file_path}."
166
 
167
- # 2) Instantiate your crew and split manager vs workers by zipping names → agents
168
  crew_instance = GAIACrew()
169
  names = list(crew_instance.agents_config.keys())
170
  agents = crew_instance.agents
 
171
 
172
- # Build (name, agent) pairs
173
- pairs = list(zip(names, agents))
174
- workers = [agent for name, agent in pairs if name != "manager_agent"]
175
- manager = next(agent for name, agent in pairs if name == "manager_agent")
176
 
177
- # 3) Run workers in parallel
178
  results = {}
179
  with concurrent.futures.ThreadPoolExecutor(max_workers=len(workers)) as pool:
180
- futures = {
181
- pool.submit(lambda ag: ag.kickoff({"question": final_question}), ag): name
182
- for name, ag in pairs if name != "manager_agent"
183
  }
184
- for fut in concurrent.futures.as_completed(futures):
185
- name = futures[fut]
186
  try:
187
  results[name] = fut.result()
188
  except Exception as e:
189
  results[name] = f"<error: {e}>"
190
 
191
- # 4) Compose a manager prompt that includes all worker outputs
192
  combined = "\n\n".join(f"--- {n} output ---\n{out}"
193
  for n, out in results.items())
194
  manager_prompt = (
@@ -198,8 +199,10 @@ def run_parallel_crew(question: str, file_path: str):
198
  f"Original question: {question}"
199
  )
200
 
201
- # 5) Run the manager for the final answer
202
- final = manager.kickoff({"question": manager_prompt})
 
 
203
  return get_final_answer(FINAL_ANSWER_MODEL, question, str(final))
204
 
205
 
 
146
 
147
  import concurrent.futures
148
 
149
+ import concurrent.futures
150
+
151
  def run_parallel_crew(question: str, file_path: str):
152
  """
153
  1) Prepares the prompt (including file data if any).
 
166
  else:
167
  final_question = f"{question} File path: {file_path}."
168
 
169
+ # 2) Instantiate your crew and split manager vs workers
170
  crew_instance = GAIACrew()
171
  names = list(crew_instance.agents_config.keys())
172
  agents = crew_instance.agents
173
+ pairs = list(zip(names, agents))
174
 
175
+ workers = [(n, a) for n, a in pairs if n != "manager_agent"]
176
+ manager_name, manager = next((n, a) for n, a in pairs if n == "manager_agent")
 
 
177
 
178
+ # 3) Run workers in parallel, giving each the plain-string prompt
179
  results = {}
180
  with concurrent.futures.ThreadPoolExecutor(max_workers=len(workers)) as pool:
181
+ future_to_name = {
182
+ pool.submit(agent.kickoff, final_question): name
183
+ for name, agent in workers
184
  }
185
+ for fut in concurrent.futures.as_completed(future_to_name):
186
+ name = future_to_name[fut]
187
  try:
188
  results[name] = fut.result()
189
  except Exception as e:
190
  results[name] = f"<error: {e}>"
191
 
192
+ # 4) Compose a manager prompt with all the raw outputs
193
  combined = "\n\n".join(f"--- {n} output ---\n{out}"
194
  for n, out in results.items())
195
  manager_prompt = (
 
199
  f"Original question: {question}"
200
  )
201
 
202
+ # 5) Run the manager agent on the combined prompt
203
+ final = manager.kickoff(manager_prompt)
204
+
205
+ # 6) Post-process via your final-answer model
206
  return get_final_answer(FINAL_ANSWER_MODEL, question, str(final))
207
 
208