Hetfield08 commited on
Commit
69252a7
·
verified ·
1 Parent(s): 369f74e

Claude corrections

Browse files
Files changed (1) hide show
  1. app.py +17 -32
app.py CHANGED
@@ -24,52 +24,37 @@ def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return
24
  #from datasets import load_dataset
25
  #ds = load_dataset("nfliu/decontextualization")
26
 
27
- @tool
28
- def answer_nfl_question_tool(question: str ) -> str:
29
-
30
  """
31
- A tool that answer the user's questions about the NFL rules, tactics and players.
32
 
33
  Args:
34
- question: The user's question that agent have to lookup the answer using the DuckDuckGo browser
35
  """
36
-
37
  # Prepare the duckduckgo query
38
- search_query = f"{question}"
39
- print(f"🔍 Searching DuckDuckGo with query: {search_query}") # Debug
 
 
40
  with DDGS() as ddgs:
41
- search_results = list(ddgs.text(search_query, max_results=3)) # Otteniamo i primi 5 risultati
42
 
43
- print(f"📊 Found {len(search_results)} results") # Debug
44
 
45
  if not search_results:
46
- return ["❌ We haven't founded answers to this question."]
47
 
48
- # SHow results
49
- for r in search_results:
50
- print(f"📌 {r['title']} - {r['href']}")
51
-
52
- #return [f"{r['title']} - {r['href']}" for r in search_results]
53
-
54
- # Extract URLs from the markdown-formatted search results
55
- import re
56
- urls = re.findall(r'\((https?://[^\)]+)\)', search_results)
57
-
58
- # Visit each webpage and collect content
59
  detailed_results = []
60
  detailed_results.append(f"Search Results for: {search_query}\n")
61
  detailed_results.append("=" * 50 + "\n")
62
 
63
- visit_webpage_tool = VisitWebpageTool()
64
-
65
- for i, url in enumerate(urls[:3], 1):
66
- try:
67
- page_content = visit_webpage_tool.forward(url)
68
- detailed_results.append(f"\nSource {i}: {url}\n")
69
- detailed_results.append("-" * 30 + "\n")
70
- detailed_results.append(page_content[:1000] + "...\n") # Truncate long pages
71
- except Exception as e:
72
- detailed_results.append(f"\nError accessing {url}: {str(e)}\n")
73
 
74
  return "\n".join(detailed_results)
75
 
 
24
  #from datasets import load_dataset
25
  #ds = load_dataset("nfliu/decontextualization")
26
 
27
+ def answer_nfl_question_tool(question: str) -> str:
 
 
28
  """
29
+ A tool that answers the user's questions about the NFL rules, tactics and players.
30
 
31
  Args:
32
+ question: The user's question that agent have to lookup the answer using the DuckDuckGo browser
33
  """
 
34
  # Prepare the duckduckgo query
35
+ search_query = f"NFL {question}"
36
+ print(f"🔍 Searching DuckDuckGo with query: {search_query}")
37
+
38
+ # Search using DuckDuckGo
39
  with DDGS() as ddgs:
40
+ search_results = list(ddgs.text(search_query, max_results=3))
41
 
42
+ print(f"📊 Found {len(search_results)} results")
43
 
44
  if not search_results:
45
+ return "❌ We haven't found answers to this question."
46
 
47
+ # Format results
 
 
 
 
 
 
 
 
 
 
48
  detailed_results = []
49
  detailed_results.append(f"Search Results for: {search_query}\n")
50
  detailed_results.append("=" * 50 + "\n")
51
 
52
+ # Process each search result
53
+ for i, result in enumerate(search_results, 1):
54
+ detailed_results.append(f"\nSource {i}: {result['link']}\n")
55
+ detailed_results.append("-" * 30 + "\n")
56
+ detailed_results.append(f"Title: {result['title']}\n")
57
+ detailed_results.append(f"Description: {result['body']}\n")
 
 
 
 
58
 
59
  return "\n".join(detailed_results)
60