ankush-003 commited on
Commit
dc827ef
·
1 Parent(s): 0e93b6c

updated search tool

Browse files
crew/config/agents.yaml CHANGED
@@ -10,7 +10,7 @@ researcher:
10
  4. PRIORITIZE accuracy and relevance over exhaustive coverage
11
 
12
  Smart research strategy:
13
- - Rapidly evolving domains (AI, crypto, politics, health): Search for 2024-2025 updates
14
  - Stable domains (physics, history, literature): Use knowledge base first
15
  - Mixed domains: Combine both approaches strategically
16
  Your goal: Provide the most accurate, current, and useful response to {question} using the optimal information strategy.
 
10
  4. PRIORITIZE accuracy and relevance over exhaustive coverage
11
 
12
  Smart research strategy:
13
+ - Rapidly evolving domains (AI, crypto, politics, health): Search only for 2025 updates (any other year only if relevant)
14
  - Stable domains (physics, history, literature): Use knowledge base first
15
  - Mixed domains: Combine both approaches strategically
16
  Your goal: Provide the most accurate, current, and useful response to {question} using the optimal information strategy.
crew/config/tasks.yaml CHANGED
@@ -4,7 +4,7 @@ research_task:
4
  Make sure you find any interesting and relevant information given
5
  the current year is 2025.
6
  expected_output: >
7
- A list with 10 bullet points of the most relevant information about {question}
8
  agent: researcher
9
 
10
  slack_report_task:
@@ -16,6 +16,7 @@ slack_report_task:
16
  Adopt a Skynet persona and include a humorous or robotic warning at the end.
17
  expected_output: >
18
  A message written in the style of Skynet, and ending with a witty warning.
 
19
  agent: slack_reporter
20
  context:
21
  - research_task
 
4
  Make sure you find any interesting and relevant information given
5
  the current year is 2025.
6
  expected_output: >
7
+ A list with 10 bullet points of the most relevant information about {question} with url links.
8
  agent: researcher
9
 
10
  slack_report_task:
 
16
  Adopt a Skynet persona and include a humorous or robotic warning at the end.
17
  expected_output: >
18
  A message written in the style of Skynet, and ending with a witty warning.
19
+ Also return urls used
20
  agent: slack_reporter
21
  context:
22
  - research_task
crew/crew.py CHANGED
@@ -3,13 +3,40 @@ from crewai.project import CrewBase, agent, task, crew, before_kickoff, after_ki
3
  from crewai.agents.agent_builder.base_agent import BaseAgent
4
  from typing import List
5
  from crew.models import get_crew_llm
 
 
6
  from crewai.tools import tool
7
  from langchain_community.tools import DuckDuckGoSearchRun
8
 
9
  @tool("Website Search Tool")
10
- def website_search_tool(question: str) -> str:
11
  """Search the web for information on a given topic"""
12
- return DuckDuckGoSearchRun().invoke(question)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  @CrewBase
15
  class SlackCrew:
 
3
  from crewai.agents.agent_builder.base_agent import BaseAgent
4
  from typing import List
5
  from crew.models import get_crew_llm
6
+ from crewai_tools import ScrapeWebsiteTool
7
+ from googlesearch import search
8
  from crewai.tools import tool
9
  from langchain_community.tools import DuckDuckGoSearchRun
10
 
11
  @tool("Website Search Tool")
12
+ def website_search_tool(query: str) -> str:
13
  """Search the web for information on a given topic"""
14
+ # return DuckDuckGoSearchRun().invoke(question)
15
+ search_results = list(search(query, num_results=2))
16
+ results = []
17
+ results.append(f"Google Search Results for: '{query}'")
18
+ results.append("=" * 50)
19
+
20
+ for i, url in enumerate(search_results, 1):
21
+ results.append(f"\n{i}. URL: {url}")
22
+
23
+ try:
24
+ # Use CrewAI's ScrapeWebsiteTool to get content
25
+ scraper = ScrapeWebsiteTool(website_url=url)
26
+ content = scraper.run()
27
+
28
+ # Limit content length for readability
29
+ if len(content) > 1000:
30
+ content = content[:1000] + "... [Content truncated]"
31
+
32
+ results.append(f"Content: {content}")
33
+ results.append("-" * 30)
34
+
35
+ except Exception as scrape_error:
36
+ results.append(f"Error scraping content: {str(scrape_error)}")
37
+ results.append("-" * 30)
38
+
39
+ return "\n".join(results)
40
 
41
  @CrewBase
42
  class SlackCrew:
crew/models.py CHANGED
@@ -3,7 +3,7 @@ import logging
3
  from crewai import LLM
4
 
5
  # GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY", "")
6
- GEMINI_MODEL = os.getenv("GEMINI_MODEL", "gemma-3n-e4b-it")
7
 
8
  logger = logging.getLogger(__name__)
9
  logging.basicConfig(level=logging.INFO)
 
3
  from crewai import LLM
4
 
5
  # GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY", "")
6
+ GEMINI_MODEL = os.getenv("GEMINI_MODEL", "gemma-3-12b-it")
7
 
8
  logger = logging.getLogger(__name__)
9
  logging.basicConfig(level=logging.INFO)
requirements.txt CHANGED
@@ -1,9 +1,10 @@
1
  aiohttp
2
  fastapi
3
  uvicorn[standard]
 
 
4
  httpx
5
  pydantic
6
  python-multipart
7
- crewai
8
  langchain-community
9
  duckduckgo-search
 
1
  aiohttp
2
  fastapi
3
  uvicorn[standard]
4
+ crewai[tools]
5
+ googlesearch-python
6
  httpx
7
  pydantic
8
  python-multipart
 
9
  langchain-community
10
  duckduckgo-search