peterpeter8585 commited on
Commit
eda3ee2
ยท
verified ยท
1 Parent(s): 756c2f7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -1
app.py CHANGED
@@ -207,6 +207,60 @@ import requests
207
  from datetime import datetime as dt1
208
  import requests
209
  from datetime import datetime as dt1, timezone, timedelta
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
210
  '''
211
  tools = load_tools(["ddg-search", "arxiv"], llm=llm,allow_dangerous_tools=True)
212
  tools.append(Tool(name="python_repl", func=PythonREPLTool().run, description="Python ์ฝ”๋“œ ์‹คํ–‰ ๋„๊ตฌ"))
@@ -263,8 +317,9 @@ def chat(message, history):
263
  '''
264
  tools = load_tools(["arxiv"], llm=llm,allow_dangerous_tools=True)
265
  tools.append(Tool(name="python_repl", func=PythonREPLTool().run, description="Python ์ฝ”๋“œ ์‹คํ–‰ ๋„๊ตฌ"))
266
- tools.append(Tool(name="duckduckgo_search", func=DDGS().run, description="์ธํ„ฐ๋„ท ๊ฒ€์ƒ‰ ๋„๊ตฌ"))
267
  retriever = WikipediaRetriever(lang="ko")
 
268
  tools.append(Tool(name="wiki", func=retriever.get_relevant_documents, description="์œ„ํ‚ค๋ฐฑ๊ณผ ๊ฒ€์ƒ‰๋„๊ตฌ"))
269
  # โœ… ๋Œ€ํ™” ๊ธฐ์–ต ๋ฉ”๋ชจ๋ฆฌ
270
  from langchain_community.tools import ShellTool
 
207
  from datetime import datetime as dt1
208
  import requests
209
  from datetime import datetime as dt1, timezone, timedelta
210
+ import requests
211
+ from bs4 import BeautifulSoup
212
+ import random
213
+ import time
214
+
215
+ USER_AGENTS = [
216
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
217
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36",
218
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:123.0) Gecko/20100101 Firefox/123.0",
219
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:122.0) Gecko/20100101 Firefox/122.0",
220
+ ]
221
+
222
+
223
+ def bing_search_top3(keyword: str):
224
+ url = "https://www.bing.com/search"
225
+ params = {"q": keyword}
226
+
227
+ headers = {
228
+ "User-Agent": USER_AGENTS[2],
229
+ "Accept-Language": "ko-KR,ko;q=0.9",
230
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
231
+ "Connection": "keep-alive",
232
+ }
233
+
234
+ # ๋žœ๋คํ•œ ๋”œ๋ ˆ์ด(์ฐจ๋‹จ ๋ฐฉ์ง€)
235
+ time.sleep(random.uniform(1.0, 2.5))
236
+
237
+ resp = requests.get(url, headers=headers, params=params, timeout=10)
238
+ resp.raise_for_status()
239
+ soup = BeautifulSoup(resp.text, "html.parser")
240
+
241
+ results = []
242
+
243
+ # Bing ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋ธ”๋ก
244
+ items = soup.select("li.b_algo")
245
+ for item in items[:3]:
246
+ title_tag = item.select_one("h2")
247
+ link_tag = item.select_one("h2 a")
248
+ snippet_tag = item.select_one("p")
249
+
250
+ if not title_tag or not link_tag:
251
+ continue
252
+
253
+ title = title_tag.get_text(strip=True)
254
+ link = link_tag.get("href")
255
+ snippet = snippet_tag.get_text(" ", strip=True) if snippet_tag else ""
256
+
257
+ results.append({
258
+ "title": title,
259
+ "snippet": snippet,
260
+ "link": link
261
+ })
262
+
263
+ return results
264
  '''
265
  tools = load_tools(["ddg-search", "arxiv"], llm=llm,allow_dangerous_tools=True)
266
  tools.append(Tool(name="python_repl", func=PythonREPLTool().run, description="Python ์ฝ”๋“œ ์‹คํ–‰ ๋„๊ตฌ"))
 
317
  '''
318
  tools = load_tools(["arxiv"], llm=llm,allow_dangerous_tools=True)
319
  tools.append(Tool(name="python_repl", func=PythonREPLTool().run, description="Python ์ฝ”๋“œ ์‹คํ–‰ ๋„๊ตฌ"))
320
+ #tools.append(Tool(name="duckduckgo_search", func=DDGS().run, description="์ธํ„ฐ๋„ท ๊ฒ€์ƒ‰ ๋„๊ตฌ"))
321
  retriever = WikipediaRetriever(lang="ko")
322
+ tools.append(Tool(name="search_bing",func=bing_search_top3,description="bing ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํƒ‘3"))
323
  tools.append(Tool(name="wiki", func=retriever.get_relevant_documents, description="์œ„ํ‚ค๋ฐฑ๊ณผ ๊ฒ€์ƒ‰๋„๊ตฌ"))
324
  # โœ… ๋Œ€ํ™” ๊ธฐ์–ต ๋ฉ”๋ชจ๋ฆฌ
325
  from langchain_community.tools import ShellTool