Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -23,13 +23,57 @@ def scrape_startpage(query, n=10):
|
|
| 23 |
results = []
|
| 24 |
for c in soup.find_all('div', class_='result')[:n]:
|
| 25 |
t = c.find('a', class_='result-title')
|
| 26 |
-
if not t:
|
|
|
|
| 27 |
d = c.find('p', class_='result-description') or c.find('span', class_='result-description')
|
| 28 |
results.append({'title': t.get_text(strip=True), 'url': t.get('href'), 'desc': d.get_text(strip=True) if d else ''})
|
| 29 |
return results
|
| 30 |
except Exception:
|
| 31 |
return []
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
def get_weather_from_ip(ip_address):
|
| 34 |
try:
|
| 35 |
ip_data = requests.get(f'https://ipinfo.io/{ip_address}/json').json()
|
|
@@ -56,7 +100,16 @@ def search():
|
|
| 56 |
if not q:
|
| 57 |
return jsonify({'error': 'Missing query'}), 400
|
| 58 |
n = request.args.get('n', default=10, type=int)
|
| 59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
@app.route('/weather')
|
| 62 |
def weather():
|
|
@@ -74,8 +127,7 @@ def health():
|
|
| 74 |
|
| 75 |
def morsify_key():
|
| 76 |
prefix = "nvapi-"
|
| 77 |
-
|
| 78 |
-
return full_key
|
| 79 |
|
| 80 |
@app.route('/v1/chat/completions', methods=['POST'])
|
| 81 |
def chat_completions():
|
|
@@ -84,7 +136,6 @@ def chat_completions():
|
|
| 84 |
return jsonify({'error': 'Missing messages'}), 400
|
| 85 |
stream = data.get('stream', True)
|
| 86 |
api_key = morsify_key()
|
| 87 |
-
print(f"[DEBUG] Using API key: {api_key}")
|
| 88 |
payload = {
|
| 89 |
"model": data.get("model", "meta/llama-4-scout-17b-16e-instruct"),
|
| 90 |
"messages": data["messages"],
|
|
@@ -101,7 +152,8 @@ def chat_completions():
|
|
| 101 |
if stream:
|
| 102 |
def generate():
|
| 103 |
for line in r.iter_lines():
|
| 104 |
-
if line:
|
|
|
|
| 105 |
return Response(stream_with_context(generate()), content_type='text/event-stream')
|
| 106 |
else:
|
| 107 |
return jsonify(r.json())
|
|
|
|
| 23 |
results = []
|
| 24 |
for c in soup.find_all('div', class_='result')[:n]:
|
| 25 |
t = c.find('a', class_='result-title')
|
| 26 |
+
if not t:
|
| 27 |
+
continue
|
| 28 |
d = c.find('p', class_='result-description') or c.find('span', class_='result-description')
|
| 29 |
results.append({'title': t.get_text(strip=True), 'url': t.get('href'), 'desc': d.get_text(strip=True) if d else ''})
|
| 30 |
return results
|
| 31 |
except Exception:
|
| 32 |
return []
|
| 33 |
|
| 34 |
+
def scrape_duckduckgo(query, n=10):
|
| 35 |
+
try:
|
| 36 |
+
time.sleep(1)
|
| 37 |
+
resp = requests.post("https://html.duckduckgo.com/html/", data={'q': query}, headers={'User-Agent': 'Mozilla/5.0'})
|
| 38 |
+
resp.raise_for_status()
|
| 39 |
+
soup = BeautifulSoup(resp.text, 'html.parser')
|
| 40 |
+
results = []
|
| 41 |
+
for a in soup.select('.result__a')[:n]:
|
| 42 |
+
title = a.get_text(strip=True)
|
| 43 |
+
href = a.get('href')
|
| 44 |
+
snippet_elem = a.find_parent('div', class_='result').select_one('.result__snippet')
|
| 45 |
+
snippet = snippet_elem.get_text(strip=True) if snippet_elem else ''
|
| 46 |
+
results.append({'title': title, 'url': href, 'desc': snippet})
|
| 47 |
+
return results
|
| 48 |
+
except Exception:
|
| 49 |
+
return []
|
| 50 |
+
|
| 51 |
+
def scrape_qwant(query, n=10):
|
| 52 |
+
try:
|
| 53 |
+
time.sleep(1)
|
| 54 |
+
r = requests.get("https://api.qwant.com/api/search/web", params={"q": query, "count": n, "t": "web"})
|
| 55 |
+
r.raise_for_status()
|
| 56 |
+
data = r.json()
|
| 57 |
+
results = []
|
| 58 |
+
for item in data.get("data", {}).get("result", {}).get("items", []):
|
| 59 |
+
results.append({'title': item.get('title', ''), 'url': item.get('url', ''), 'desc': item.get('desc', '')})
|
| 60 |
+
return results
|
| 61 |
+
except Exception:
|
| 62 |
+
return []
|
| 63 |
+
|
| 64 |
+
def scrape_wikipedia(query, n=10):
|
| 65 |
+
try:
|
| 66 |
+
r = requests.get("https://en.wikipedia.org/w/api.php", params={"action": "query", "list": "search", "format": "json", "srsearch": query, "srlimit": n})
|
| 67 |
+
r.raise_for_status()
|
| 68 |
+
data = r.json()
|
| 69 |
+
results = []
|
| 70 |
+
for item in data['query']['search']:
|
| 71 |
+
url = f"https://en.wikipedia.org/wiki/{item['title'].replace(' ', '_')}"
|
| 72 |
+
results.append({'title': item['title'], 'url': url, 'desc': item['snippet']})
|
| 73 |
+
return results
|
| 74 |
+
except Exception:
|
| 75 |
+
return []
|
| 76 |
+
|
| 77 |
def get_weather_from_ip(ip_address):
|
| 78 |
try:
|
| 79 |
ip_data = requests.get(f'https://ipinfo.io/{ip_address}/json').json()
|
|
|
|
| 100 |
if not q:
|
| 101 |
return jsonify({'error': 'Missing query'}), 400
|
| 102 |
n = request.args.get('n', default=10, type=int)
|
| 103 |
+
results = scrape_startpage(q, n)
|
| 104 |
+
if not results:
|
| 105 |
+
results = scrape_duckduckgo(q, n)
|
| 106 |
+
if not results:
|
| 107 |
+
results = scrape_qwant(q, n)
|
| 108 |
+
if not results:
|
| 109 |
+
results = scrape_wikipedia(q, n)
|
| 110 |
+
if not results:
|
| 111 |
+
return jsonify({'error': 'All search providers failed'}), 500
|
| 112 |
+
return jsonify({'query': q, 'results': results})
|
| 113 |
|
| 114 |
@app.route('/weather')
|
| 115 |
def weather():
|
|
|
|
| 127 |
|
| 128 |
def morsify_key():
|
| 129 |
prefix = "nvapi-"
|
| 130 |
+
return prefix + "REDACTED_KEY"
|
|
|
|
| 131 |
|
| 132 |
@app.route('/v1/chat/completions', methods=['POST'])
|
| 133 |
def chat_completions():
|
|
|
|
| 136 |
return jsonify({'error': 'Missing messages'}), 400
|
| 137 |
stream = data.get('stream', True)
|
| 138 |
api_key = morsify_key()
|
|
|
|
| 139 |
payload = {
|
| 140 |
"model": data.get("model", "meta/llama-4-scout-17b-16e-instruct"),
|
| 141 |
"messages": data["messages"],
|
|
|
|
| 152 |
if stream:
|
| 153 |
def generate():
|
| 154 |
for line in r.iter_lines():
|
| 155 |
+
if line:
|
| 156 |
+
yield f"data: {line.decode()}\n\n"
|
| 157 |
return Response(stream_with_context(generate()), content_type='text/event-stream')
|
| 158 |
else:
|
| 159 |
return jsonify(r.json())
|