rkihacker commited on
Commit
2bdf25f
verified
1 Parent(s): 5119d9b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +157 -248
main.py CHANGED
@@ -1,47 +1,58 @@
1
- from fastapi import FastAPI, HTTPException, Query
2
- from typing import List, Optional
3
- from pydantic import BaseModel
4
- from time import sleep
5
- from curl_cffi.requests import Session
6
- from urllib.parse import urlencode, unquote, urlparse, parse_qs
7
  import base64
8
- from typing import Dict, Any
 
9
  from concurrent.futures import ThreadPoolExecutor
10
- from webscout.litagent import LitAgent
 
 
 
 
 
11
  from bs4 import BeautifulSoup
12
- import json
13
 
14
- app = FastAPI(
15
- title="Bing Search API",
16
- description="A FastAPI wrapper for the BingSearch library with advanced features.",
17
- version="1.0.0",
18
- )
19
 
20
- # --- BingSearch Library Code ---
21
- # The provided BingSearch code is integrated here directly.
22
 
23
  class BingSearchResult(BaseModel):
24
- url: str
25
- title: str
26
- description: str
27
- metadata: Dict[str, Any] = {}
28
 
29
  class BingImageResult(BaseModel):
30
- title: str
31
- image: str
32
- thumbnail: str
33
- url: str
34
- source: str
35
 
36
  class BingNewsResult(BaseModel):
37
- title: str
38
- url: str
39
- description: str
40
- source: str = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  class BingSearch:
43
- """Bing search implementation with configurable parameters and advanced features."""
44
- _executor: ThreadPoolExecutor = ThreadPoolExecutor()
45
 
46
  def __init__(
47
  self,
@@ -49,98 +60,66 @@ class BingSearch:
49
  proxies: Optional[Dict[str, str]] = None,
50
  verify: bool = True,
51
  lang: str = "en-US",
52
- sleep_interval: float = 0.0,
53
  impersonate: str = "chrome110"
54
  ):
55
  self.timeout = timeout
56
  self.proxies = proxies if proxies else {}
57
  self.verify = verify
58
  self.lang = lang
59
- self.sleep_interval = sleep_interval
60
  self._base_url = "https://www.bing.com"
61
- self.session = Session(
62
  proxies=self.proxies,
63
  verify=self.verify,
64
  timeout=self.timeout,
65
  impersonate=impersonate
66
  )
67
- # It's good practice to set a realistic User-Agent
68
  self.session.headers.update({
69
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36"
70
  })
71
 
72
- # FIX: Updated selectors to be more robust against Bing UI changes.
73
- def _selectors(self, element):
74
- selectors = {
75
- 'links': 'ol#b_results > li', # More generic selector for any list item in results
76
- 'next': 'a.sb_pagN' # Selector for the "Next" page button
77
- }
78
- return selectors[element]
79
-
80
- def _first_page(self, query):
81
- url = f'{self._base_url}/search?q={query}&search=&form=QBLH'
82
- return {'url': url, 'data': None}
83
-
84
- def _next_page(self, soup):
85
- selector = self._selectors('next')
86
- next_page_tag = soup.select_one(selector)
87
- url = None
88
- if next_page_tag and next_page_tag.get('href'):
89
- url = self._base_url + next_page_tag['href']
90
- return {'url': url, 'data': None}
91
 
92
  def _get_url(self, tag):
93
  url = tag.get('href', '')
94
- resp = url
95
  try:
96
  parsed_url = urlparse(url)
97
  query_params = parse_qs(parsed_url.query)
98
  if "u" in query_params:
99
  encoded_url = query_params["u"][0][2:]
100
- try:
101
- decoded_bytes = base64.urlsafe_b64decode(encoded_url + '===')
102
- except base64.binascii.Error as e:
103
- print(f"Error decoding Base64 string: {e}")
104
- return url
105
- resp = decoded_bytes.decode('utf-8')
106
- except Exception as e:
107
- print(f"Error decoding Base64 string: {e}")
108
- return resp
109
 
110
- # FIX: The entire text parsing logic is updated to handle modern Bing HTML structure.
111
- def text(
112
  self,
113
  keywords: str,
114
- region: str = None,
115
  safesearch: str = "moderate",
116
  max_results: int = 10,
117
- unique: bool = True
118
  ) -> List[BingSearchResult]:
119
  if not keywords:
120
- raise ValueError("Search keywords cannot be empty")
121
 
122
  fetched_results = []
123
  fetched_links = set()
124
 
125
- def fetch_page(url):
126
- try:
127
- resp = self.session.get(url)
128
- resp.raise_for_status()
129
- return resp.text
130
- except Exception as e:
131
- raise Exception(f"Bing search failed: {str(e)}")
132
 
133
- current_url = self._first_page(keywords)['url']
134
-
135
- while current_url and len(fetched_results) < max_results:
136
- html = fetch_page(current_url)
137
  soup = BeautifulSoup(html, "html.parser")
138
 
139
- # Use the more generic selector for result blocks
140
- result_blocks = soup.select(self._selectors('links'))
141
-
142
- for result in result_blocks:
143
- # Find the title and link, which are usually in an <h2> tag
144
  title_tag = result.find('h2')
145
  if not title_tag:
146
  continue
@@ -152,255 +131,185 @@ class BingSearch:
152
  url_val = self._get_url(link_tag)
153
  title = title_tag.get_text(strip=True)
154
 
155
- # Find the description, often in a div with class 'b_caption'
156
  desc_container = result.find('div', class_='b_caption')
157
- description = ''
158
- if desc_container:
159
- # Find the paragraph within the caption, or use the whole caption text
160
- desc_p = desc_container.find('p')
161
- if desc_p:
162
- description = desc_p.get_text(strip=True)
163
- else:
164
- description = desc_container.get_text(strip=True)
165
 
166
- # Fallback if no 'b_caption' is found
167
- if not description:
168
- p_tag = result.find('p')
169
- if p_tag:
170
- description = p_tag.get_text(strip=True)
171
-
172
- if url_val and title:
173
- if unique and url_val in fetched_links:
174
- continue
175
-
176
  fetched_results.append(BingSearchResult(url=url_val, title=title, description=description))
177
  fetched_links.add(url_val)
178
-
179
  if len(fetched_results) >= max_results:
180
  break
181
 
182
  if len(fetched_results) >= max_results:
183
  break
184
-
185
- # Find the next page URL
186
- next_page_info = self._next_page(soup)
187
- current_url = next_page_info['url']
188
- if current_url:
189
- sleep(self.sleep_interval)
190
 
191
  return fetched_results[:max_results]
192
 
193
-
194
- def suggestions(self, query: str, region: str = None) -> List[str]:
195
  if not query:
196
- raise ValueError("Search query cannot be empty")
197
- params = {
198
- "query": query,
199
- "mkt": region if region else "en-US"
200
- }
201
  url = f"https://api.bing.com/osjson.aspx?{urlencode(params)}"
202
  try:
203
- resp = self.session.get(url)
204
  resp.raise_for_status()
205
  data = resp.json()
206
- if isinstance(data, list) and len(data) > 1 and isinstance(data[1], list):
207
- return data[1]
208
- return []
209
  except Exception as e:
210
- if hasattr(e, 'response') and e.response is not None:
211
- raise Exception(f"Bing suggestions failed with status {e.response.status_code}: {str(e)}")
212
- else:
213
- raise Exception(f"Bing suggestions failed: {str(e)}")
214
 
215
- def images(
216
  self,
217
  keywords: str,
218
- region: str = None,
219
  safesearch: str = "moderate",
220
  max_results: int = 10
221
  ) -> List[BingImageResult]:
222
  if not keywords:
223
- raise ValueError("Search keywords cannot be empty")
224
- safe_map = {
225
- "on": "Strict",
226
- "moderate": "Moderate",
227
- "off": "Off"
228
- }
229
- safe = safe_map.get(safesearch.lower(), "Moderate")
230
  params = {
231
- "q": keywords,
232
- "count": max_results,
233
- "setlang": self.lang,
234
- "safeSearch": safe,
235
  }
236
  if region:
237
  params["mkt"] = region
 
238
  url = f"{self._base_url}/images/search?{urlencode(params)}"
239
- try:
240
- resp = self.session.get(url)
241
- resp.raise_for_status()
242
- html = resp.text
243
- except Exception as e:
244
- if hasattr(e, 'response') and e.response is not None:
245
- raise Exception(f"Bing image search failed with status {e.response.status_code}: {str(e)}")
246
- else:
247
- raise Exception(f"Bing image search failed: {str(e)}")
248
  soup = BeautifulSoup(html, "html.parser")
249
  results = []
 
250
  for item in soup.select("a.iusc"):
251
  try:
252
- m = item.get("m")
253
- meta = json.loads(m) if m else {}
254
- image_url = meta.get("murl", "")
255
- thumb_url = meta.get("turl", "")
256
- title = meta.get("t", "")
257
- page_url = meta.get("purl", "")
258
- source = meta.get("surl", "")
259
- if image_url:
260
- results.append(BingImageResult(title=title, image=image_url, thumbnail=thumb_url, url=page_url, source=source))
 
 
261
  if len(results) >= max_results:
262
  break
263
- except Exception:
264
  continue
265
  return results[:max_results]
266
 
267
- def news(
268
  self,
269
  keywords: str,
270
- region: str = None,
271
  safesearch: str = "moderate",
272
  max_results: int = 10,
273
- ) -> List['BingNewsResult']:
274
  if not keywords:
275
- raise ValueError("Search keywords cannot be empty")
276
- safe_map = {
277
- "on": "Strict",
278
- "moderate": "Moderate",
279
- "off": "Off"
280
- }
281
- safe = safe_map.get(safesearch.lower(), "Moderate")
282
  params = {
283
- "q": keywords,
284
- "form": "QBNH",
285
- "safeSearch": safe,
286
  }
287
  if region:
288
  params["mkt"] = region
 
289
  url = f"{self._base_url}/news/search?{urlencode(params)}"
290
- try:
291
- resp = self.session.get(url)
292
- resp.raise_for_status()
293
- except Exception as e:
294
- if hasattr(e, 'response') and e.response is not None:
295
- raise Exception(f"Bing news search failed with status {e.response.status_code}: {str(e)}")
296
- else:
297
- raise Exception(f"Bing news search failed: {str(e)}")
298
- soup = BeautifulSoup(resp.text, "html.parser")
299
  results = []
300
- for item in soup.select("div.news-card, div.card, div.newsitem, div.card-content, div.t_s_main"):
301
- a_tag = item.find("a")
302
- title = a_tag.get_text(strip=True) if a_tag else ''
303
- url_val = a_tag['href'] if a_tag and a_tag.has_attr('href') else ''
304
- desc_tag = item.find("div", class_="snippet") or item.find("div", class_="news-card-snippet") or item.find("div", class_="snippetText")
305
- description = desc_tag.get_text(strip=True) if desc_tag else ''
306
- source_tag = item.find("div", class_="source")
307
- source = source_tag.get_text(strip=True) if source_tag else ''
308
- if url_val and title:
309
- results.append(BingNewsResult(title=title, url=url_val, description=description, source=source))
310
- if len(results) >= max_results:
311
- break
312
- if not results:
313
- for item in soup.select("a.title"):
314
- title = item.get_text(strip=True)
315
- url_val = item['href'] if item.has_attr('href') else ''
316
- description = ''
317
- source = ''
318
- if url_val and title:
319
- results.append(BingNewsResult(title=title, url=url_val, description=description, source=source))
320
- if len(results) >= max_results:
321
- break
322
  return results[:max_results]
323
 
324
 
325
- bing = BingSearch()
326
 
327
- @app.get("/search", response_model=List[BingSearchResult])
 
 
 
 
 
 
 
 
 
 
 
 
328
  async def text_search(
329
  query: str = Query(..., description="The search keywords."),
330
  region: Optional[str] = Query(None, description="The region for the search (e.g., 'us-US')."),
331
  safesearch: str = Query("moderate", description="Safe search level ('on', 'moderate', 'off')."),
332
  max_results: int = Query(10, description="Maximum number of results to return."),
333
  ):
334
- """
335
- Perform a text search on Bing.
336
- """
337
  try:
338
- results = bing.text(
339
- keywords=query,
340
- region=region,
341
- safesearch=safesearch,
342
- max_results=max_results,
343
  )
344
- return results
345
- except Exception as e:
346
- raise HTTPException(status_code=500, detail=str(e))
347
 
348
- @app.get("/suggestions", response_model=List[str])
349
  async def get_suggestions(
350
  query: str = Query(..., description="The search query for which to fetch suggestions."),
351
  region: Optional[str] = Query(None, description="The region for the suggestions (e.g., 'en-US')."),
352
  ):
353
- """
354
- Fetches search suggestions for a given query.
355
- """
356
  try:
357
- suggestions = bing.suggestions(query=query, region=region)
358
- return suggestions
359
- except Exception as e:
360
- raise HTTPException(status_code=500, detail=str(e))
361
 
362
- @app.get("/images", response_model=List[BingImageResult])
363
  async def image_search(
364
  query: str = Query(..., description="The search keywords for images."),
365
  region: Optional[str] = Query(None, description="The region for the image search (e.g., 'us-US')."),
366
  safesearch: str = Query("moderate", description="Safe search level ('on', 'moderate', 'off')."),
367
  max_results: int = Query(10, description="Maximum number of image results to return."),
368
  ):
369
- """
370
- Perform an image search on Bing.
371
- """
372
  try:
373
- results = bing.images(
374
- keywords=query,
375
- region=region,
376
- safesearch=safesearch,
377
- max_results=max_results,
378
  )
379
- return results
380
- except Exception as e:
381
- raise HTTPException(status_code=500, detail=str(e))
382
 
383
- @app.get("/news", response_model=List[BingNewsResult])
384
  async def news_search(
385
  query: str = Query(..., description="The search keywords for news."),
386
  region: Optional[str] = Query(None, description="The region for the news search (e.g., 'us-US')."),
387
  safesearch: str = Query("moderate", description="Safe search level ('on', 'moderate', 'off')."),
388
  max_results: int = Query(10, description="Maximum number of news results to return."),
389
  ):
390
- """
391
- Perform a news search on Bing.
392
- """
393
  try:
394
- results = bing.news(
395
- keywords=query,
396
- region=region,
397
- safesearch=safesearch,
398
- max_results=max_results,
399
  )
400
- return results
401
- except Exception as e:
402
- raise HTTPException(status_code=500, detail=str(e))
403
 
404
  if __name__ == "__main__":
405
- import uvicorn
406
  uvicorn.run(app, host="0.0.0.0", port=8000)
 
1
+ import time
2
+ import json
 
 
 
 
3
  import base64
4
+ from typing import List, Optional, Dict, Any
5
+ from urllib.parse import urlencode, urlparse, parse_qs
6
  from concurrent.futures import ThreadPoolExecutor
7
+
8
+ import uvicorn
9
+ from fastapi import FastAPI, HTTPException, Query, Request, Response
10
+ from pydantic import BaseModel, Field
11
+ from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
12
+ from curl_cffi.requests import AsyncSession
13
  from bs4 import BeautifulSoup
 
14
 
 
 
 
 
 
15
 
16
+ # --- Pydantic Models for API Responses ---
 
17
 
18
  class BingSearchResult(BaseModel):
19
+ url: str = Field(..., description="The URL of the search result.")
20
+ title: str = Field(..., description="The title of the search result.")
21
+ description: str = Field(..., description="A brief description of the search result.")
22
+ metadata: Dict[str, Any] = Field({}, description="Additional metadata for the result.")
23
 
24
  class BingImageResult(BaseModel):
25
+ title: str = Field(..., description="The title of the image.")
26
+ image_url: str = Field(..., description="The direct URL to the full-size image.")
27
+ thumbnail_url: str = Field(..., description="The URL to the thumbnail of the image.")
28
+ page_url: str = Field(..., description="The URL of the page where the image was found.")
29
+ source: str = Field(..., description="The source or domain of the image.")
30
 
31
  class BingNewsResult(BaseModel):
32
+ title: str = Field(..., description="The title of the news article.")
33
+ url: str = Field(..., description="The URL to the news article.")
34
+ description: str = Field(..., description="A snippet from the news article.")
35
+ source: str = Field("", description="The source of the news article.")
36
+
37
+
38
+ # --- Custom Middleware for Response Headers ---
39
+
40
+ class CustomHeaderMiddleware(BaseHTTPMiddleware):
41
+ async def dispatch(
42
+ self, request: Request, call_next: RequestResponseEndpoint
43
+ ) -> Response:
44
+ start_time = time.time()
45
+ response = await call_next(request)
46
+ process_time = time.time() - start_time
47
+ response.headers["X-Response-Time"] = f"{process_time:.4f}s"
48
+ response.headers["X-Powered-By"] = "NiansuhAI"
49
+ return response
50
+
51
+
52
+ # --- Bing Search Service ---
53
 
54
  class BingSearch:
55
+ """Asynchronous Bing search implementation with configurable parameters and advanced features."""
 
56
 
57
  def __init__(
58
  self,
 
60
  proxies: Optional[Dict[str, str]] = None,
61
  verify: bool = True,
62
  lang: str = "en-US",
 
63
  impersonate: str = "chrome110"
64
  ):
65
  self.timeout = timeout
66
  self.proxies = proxies if proxies else {}
67
  self.verify = verify
68
  self.lang = lang
 
69
  self._base_url = "https://www.bing.com"
70
+ self.session = AsyncSession(
71
  proxies=self.proxies,
72
  verify=self.verify,
73
  timeout=self.timeout,
74
  impersonate=impersonate
75
  )
 
76
  self.session.headers.update({
77
  "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36"
78
  })
79
 
80
+ async def _fetch_html(self, url: str) -> str:
81
+ try:
82
+ resp = await self.session.get(url)
83
+ resp.raise_for_status()
84
+ return resp.text
85
+ except Exception as e:
86
+ raise HTTPException(status_code=500, detail=f"Failed to fetch Bing search results: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
  def _get_url(self, tag):
89
  url = tag.get('href', '')
 
90
  try:
91
  parsed_url = urlparse(url)
92
  query_params = parse_qs(parsed_url.query)
93
  if "u" in query_params:
94
  encoded_url = query_params["u"][0][2:]
95
+ decoded_bytes = base64.urlsafe_b64decode(encoded_url + '===')
96
+ return decoded_bytes.decode('utf-8')
97
+ except Exception:
98
+ return url
99
+ return url
 
 
 
 
100
 
101
+ async def text(
 
102
  self,
103
  keywords: str,
104
+ region: Optional[str] = None,
105
  safesearch: str = "moderate",
106
  max_results: int = 10,
 
107
  ) -> List[BingSearchResult]:
108
  if not keywords:
109
+ raise ValueError("Search keywords cannot be empty.")
110
 
111
  fetched_results = []
112
  fetched_links = set()
113
 
114
+ url = f'{self._base_url}/search?q={keywords}&form=QBLH'
115
+ if region:
116
+ url += f"&setmkt={region}"
 
 
 
 
117
 
118
+ while url and len(fetched_results) < max_results:
119
+ html = await self._fetch_html(url)
 
 
120
  soup = BeautifulSoup(html, "html.parser")
121
 
122
+ for result in soup.select('ol#b_results > li.b_algo'):
 
 
 
 
123
  title_tag = result.find('h2')
124
  if not title_tag:
125
  continue
 
131
  url_val = self._get_url(link_tag)
132
  title = title_tag.get_text(strip=True)
133
 
 
134
  desc_container = result.find('div', class_='b_caption')
135
+ description = desc_container.get_text(strip=True) if desc_container else ''
 
 
 
 
 
 
 
136
 
137
+ if url_val and title and url_val not in fetched_links:
 
 
 
 
 
 
 
 
 
138
  fetched_results.append(BingSearchResult(url=url_val, title=title, description=description))
139
  fetched_links.add(url_val)
 
140
  if len(fetched_results) >= max_results:
141
  break
142
 
143
  if len(fetched_results) >= max_results:
144
  break
145
+
146
+ next_page_tag = soup.select_one('a.sb_pagN')
147
+ url = self._base_url + next_page_tag['href'] if next_page_tag and next_page_tag.get('href') else None
 
 
 
148
 
149
  return fetched_results[:max_results]
150
 
151
+ async def suggestions(self, query: str, region: Optional[str] = None) -> List[str]:
 
152
  if not query:
153
+ raise ValueError("Search query cannot be empty.")
154
+ params = {"query": query, "mkt": region if region else "en-US"}
 
 
 
155
  url = f"https://api.bing.com/osjson.aspx?{urlencode(params)}"
156
  try:
157
+ resp = await self.session.get(url)
158
  resp.raise_for_status()
159
  data = resp.json()
160
+ return data[1] if isinstance(data, list) and len(data) > 1 else []
 
 
161
  except Exception as e:
162
+ raise HTTPException(status_code=500, detail=f"Failed to fetch suggestions: {e}")
 
 
 
163
 
164
+ async def images(
165
  self,
166
  keywords: str,
167
+ region: Optional[str] = None,
168
  safesearch: str = "moderate",
169
  max_results: int = 10
170
  ) -> List[BingImageResult]:
171
  if not keywords:
172
+ raise ValueError("Search keywords cannot be empty.")
173
+
174
+ safe_map = {"on": "Strict", "moderate": "Moderate", "off": "Off"}
 
 
 
 
175
  params = {
176
+ "q": keywords, "count": max_results, "setlang": self.lang,
177
+ "safeSearch": safe_map.get(safesearch.lower(), "Moderate"),
 
 
178
  }
179
  if region:
180
  params["mkt"] = region
181
+
182
  url = f"{self._base_url}/images/search?{urlencode(params)}"
183
+ html = await self._fetch_html(url)
 
 
 
 
 
 
 
 
184
  soup = BeautifulSoup(html, "html.parser")
185
  results = []
186
+
187
  for item in soup.select("a.iusc"):
188
  try:
189
+ meta = json.loads(item.get("m", '{}'))
190
+ if meta.get("murl"):
191
+ results.append(
192
+ BingImageResult(
193
+ title=meta.get("t", ""),
194
+ image_url=meta.get("murl", ""),
195
+ thumbnail_url=meta.get("turl", ""),
196
+ page_url=meta.get("purl", ""),
197
+ source=meta.get("surl", "")
198
+ )
199
+ )
200
  if len(results) >= max_results:
201
  break
202
+ except (json.JSONDecodeError, KeyError):
203
  continue
204
  return results[:max_results]
205
 
206
+ async def news(
207
  self,
208
  keywords: str,
209
+ region: Optional[str] = None,
210
  safesearch: str = "moderate",
211
  max_results: int = 10,
212
+ ) -> List[BingNewsResult]:
213
  if not keywords:
214
+ raise ValueError("Search keywords cannot be empty.")
215
+
216
+ safe_map = {"on": "Strict", "moderate": "Moderate", "off": "Off"}
 
 
 
 
217
  params = {
218
+ "q": keywords, "form": "QBNH",
219
+ "safeSearch": safe_map.get(safesearch.lower(), "Moderate"),
 
220
  }
221
  if region:
222
  params["mkt"] = region
223
+
224
  url = f"{self._base_url}/news/search?{urlencode(params)}"
225
+ html = await self._fetch_html(url)
226
+ soup = BeautifulSoup(html, "html.parser")
 
 
 
 
 
 
 
227
  results = []
228
+
229
+ for item in soup.select("div.news-card"):
230
+ a_tag = item.find("a", class_="title")
231
+ if not a_tag:
232
+ continue
233
+
234
+ results.append(
235
+ BingNewsResult(
236
+ title=a_tag.get_text(strip=True),
237
+ url=a_tag.get('href', ''),
238
+ description=item.find("div", class_="snippet").get_text(strip=True) if item.find("div", class_="snippet") else "",
239
+ source=item.find("div", class_="source").get_text(strip=True).split('路')[0].strip() if item.find("div", class_="source") else "",
240
+ )
241
+ )
242
+ if len(results) >= max_results:
243
+ break
 
 
 
 
 
 
244
  return results[:max_results]
245
 
246
 
247
+ # --- FastAPI Application Setup ---
248
 
249
+ app = FastAPI(
250
+ title="Bing Search API",
251
+ description="A FastAPI wrapper for the BingSearch library with advanced features, powered by NiansuhAI.",
252
+ version="2.0.0",
253
+ )
254
+
255
+ app.add_middleware(CustomHeaderMiddleware)
256
+ bing_search_service = BingSearch()
257
+
258
+
259
+ # --- API Endpoints ---
260
+
261
+ @app.get("/search", response_model=List[BingSearchResult], summary="Perform a text search")
262
  async def text_search(
263
  query: str = Query(..., description="The search keywords."),
264
  region: Optional[str] = Query(None, description="The region for the search (e.g., 'us-US')."),
265
  safesearch: str = Query("moderate", description="Safe search level ('on', 'moderate', 'off')."),
266
  max_results: int = Query(10, description="Maximum number of results to return."),
267
  ):
 
 
 
268
  try:
269
+ return await bing_search_service.text(
270
+ keywords=query, region=region, safesearch=safesearch, max_results=max_results
 
 
 
271
  )
272
+ except ValueError as e:
273
+ raise HTTPException(status_code=400, detail=str(e))
 
274
 
275
+ @app.get("/suggestions", response_model=List[str], summary="Get search suggestions")
276
  async def get_suggestions(
277
  query: str = Query(..., description="The search query for which to fetch suggestions."),
278
  region: Optional[str] = Query(None, description="The region for the suggestions (e.g., 'en-US')."),
279
  ):
 
 
 
280
  try:
281
+ return await bing_search_service.suggestions(query=query, region=region)
282
+ except ValueError as e:
283
+ raise HTTPException(status_code=400, detail=str(e))
 
284
 
285
+ @app.get("/images", response_model=List[BingImageResult], summary="Perform an image search")
286
  async def image_search(
287
  query: str = Query(..., description="The search keywords for images."),
288
  region: Optional[str] = Query(None, description="The region for the image search (e.g., 'us-US')."),
289
  safesearch: str = Query("moderate", description="Safe search level ('on', 'moderate', 'off')."),
290
  max_results: int = Query(10, description="Maximum number of image results to return."),
291
  ):
 
 
 
292
  try:
293
+ return await bing_search_service.images(
294
+ keywords=query, region=region, safesearch=safesearch, max_results=max_results
 
 
 
295
  )
296
+ except ValueError as e:
297
+ raise HTTPException(status_code=400, detail=str(e))
 
298
 
299
+ @app.get("/news", response_model=List[BingNewsResult], summary="Perform a news search")
300
  async def news_search(
301
  query: str = Query(..., description="The search keywords for news."),
302
  region: Optional[str] = Query(None, description="The region for the news search (e.g., 'us-US')."),
303
  safesearch: str = Query("moderate", description="Safe search level ('on', 'moderate', 'off')."),
304
  max_results: int = Query(10, description="Maximum number of news results to return."),
305
  ):
 
 
 
306
  try:
307
+ return await bing_search_service.news(
308
+ keywords=query, region=region, safesearch=safesearch, max_results=max_results
 
 
 
309
  )
310
+ except ValueError as e:
311
+ raise HTTPException(status_code=400, detail=str(e))
312
+
313
 
314
  if __name__ == "__main__":
 
315
  uvicorn.run(app, host="0.0.0.0", port=8000)