snrspeaks commited on
Commit
75d6091
·
1 Parent(s): 6715806

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -29
app.py CHANGED
@@ -17,47 +17,74 @@ from langchain.schema import SystemMessage
17
  import chainlit as cl
18
  from newsplease import NewsPlease
19
  import time
 
 
20
 
21
- serper_api_key = os.environ.get('SERPER_API_KEY')
22
 
23
  # 1. Tool for search
24
-
25
-
26
  def search(query, max_retries=5):
27
- url = "https://google.serper.dev/search"
28
-
29
- payload = json.dumps({
30
- "q": query
31
- })
32
-
33
- headers = {
34
- 'X-API-KEY': serper_api_key,
35
- 'Content-Type': 'application/json'
36
- }
37
-
38
  for attempt in range(max_retries):
39
  try:
40
- response = requests.request("POST", url, headers=headers, data=payload, verify=False)
41
-
42
- # Check if response is successful (e.g., HTTP 200 OK)
43
- if response.status_code == 200:
44
- print(response.text)
45
- return response.text
46
- else:
47
- print(f"Attempt {attempt + 1} failed with status code {response.status_code}. Retrying...")
48
- if attempt < max_retries - 1: # no need to sleep on the last attempt
49
- time.sleep(1)
50
- else:
51
- print("Max retries reached. Exiting...")
52
 
53
  except requests.RequestException as e:
54
  print(f"Attempt {attempt + 1} raised an error: {e}. Retrying...")
55
  if attempt < max_retries - 1: # no need to sleep on the last attempt
56
  time.sleep(1)
57
- else:
58
- print("Max retries reached. Exiting...")
59
 
60
- return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
 
63
  # 2. Tool for scraping
 
17
  import chainlit as cl
18
  from newsplease import NewsPlease
19
  import time
20
+ from duckduckgo_search import DDGS
21
+ from itertools import islice
22
 
23
+ # serper_api_key = os.environ.get('SERPER_API_KEY')
24
 
25
  # 1. Tool for search
 
 
26
  def search(query, max_retries=5):
 
 
 
 
 
 
 
 
 
 
 
27
  for attempt in range(max_retries):
28
  try:
29
+ result = []
30
+
31
+ with DDGS() as ddgs:
32
+ response = ddgs.text(query, region='wt-wt', safesearch='Off', timelimit='y')
33
+ for r in islice(response, 20):
34
+ result.append({'title': r['title'], 'url': r['href']})
35
+ return result
 
 
 
 
 
36
 
37
  except requests.RequestException as e:
38
  print(f"Attempt {attempt + 1} raised an error: {e}. Retrying...")
39
  if attempt < max_retries - 1: # no need to sleep on the last attempt
40
  time.sleep(1)
 
 
41
 
42
+ except Exception as e: # Generic error handling
43
+ print(f"An unexpected error occurred on attempt {attempt + 1}: {e}. Retrying...")
44
+ if attempt < max_retries - 1:
45
+ time.sleep(1)
46
+
47
+ else:
48
+ print("Max retries reached. Exiting...")
49
+ return None
50
+
51
+
52
+
53
+ # def search(query, max_retries=5):
54
+ # url = "https://google.serper.dev/search"
55
+
56
+ # payload = json.dumps({
57
+ # "q": query
58
+ # })
59
+
60
+ # headers = {
61
+ # 'X-API-KEY': serper_api_key,
62
+ # 'Content-Type': 'application/json'
63
+ # }
64
+
65
+ # for attempt in range(max_retries):
66
+ # try:
67
+ # response = requests.request("POST", url, headers=headers, data=payload, verify=False)
68
+
69
+ # # Check if response is successful (e.g., HTTP 200 OK)
70
+ # if response.status_code == 200:
71
+ # print(response.text)
72
+ # return response.text
73
+ # else:
74
+ # print(f"Attempt {attempt + 1} failed with status code {response.status_code}. Retrying...")
75
+ # if attempt < max_retries - 1: # no need to sleep on the last attempt
76
+ # time.sleep(1)
77
+ # else:
78
+ # print("Max retries reached. Exiting...")
79
+
80
+ # except requests.RequestException as e:
81
+ # print(f"Attempt {attempt + 1} raised an error: {e}. Retrying...")
82
+ # if attempt < max_retries - 1: # no need to sleep on the last attempt
83
+ # time.sleep(1)
84
+ # else:
85
+ # print("Max retries reached. Exiting...")
86
+
87
+ # return None
88
 
89
 
90
  # 2. Tool for scraping