searchengine / app.py
GamerC0der's picture
Update app.py
c1df642 verified
import os, time, json, base64, requests
from bs4 import BeautifulSoup
from flask import Flask, request, jsonify, Response, stream_with_context
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
def scrape_startpage(query, n=10):
s = requests.Session()
s.headers.update({
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate',
'Connection': 'keep-alive'
})
try:
time.sleep(1)
r = s.get('https://www.startpage.com/sp/search', params={'query': query, 'cat': 'web', 'pl': 'opensearch'})
r.raise_for_status()
soup = BeautifulSoup(r.content, 'html.parser')
results = []
for c in soup.find_all('div', class_='result')[:n]:
t = c.find('a', class_='result-title')
if not t:
continue
d = c.find('p', class_='result-description') or c.find('span', class_='result-description')
results.append({'title': t.get_text(strip=True), 'url': t.get('href'), 'desc': d.get_text(strip=True) if d else ''})
return results
except Exception:
return []
def scrape_duckduckgo(query, n=10):
try:
time.sleep(1)
resp = requests.post("https://html.duckduckgo.com/html/", data={'q': query}, headers={'User-Agent': 'Mozilla/5.0'})
resp.raise_for_status()
soup = BeautifulSoup(resp.text, 'html.parser')
results = []
for a in soup.select('.result__a')[:n]:
title = a.get_text(strip=True)
href = a.get('href')
snippet_elem = a.find_parent('div', class_='result').select_one('.result__snippet')
snippet = snippet_elem.get_text(strip=True) if snippet_elem else ''
results.append({'title': title, 'url': href, 'desc': snippet})
return results
except Exception:
return []
def scrape_qwant(query, n=10):
try:
time.sleep(1)
r = requests.get("https://api.qwant.com/api/search/web", params={"q": query, "count": n, "t": "web"})
r.raise_for_status()
data = r.json()
results = []
for item in data.get("data", {}).get("result", {}).get("items", []):
results.append({'title': item.get('title', ''), 'url': item.get('url', ''), 'desc': item.get('desc', '')})
return results
except Exception:
return []
def scrape_wikipedia(query, n=10):
try:
r = requests.get("https://en.wikipedia.org/w/api.php", params={"action": "query", "list": "search", "format": "json", "srsearch": query, "srlimit": n})
r.raise_for_status()
data = r.json()
results = []
for item in data['query']['search']:
url = f"https://en.wikipedia.org/wiki/{item['title'].replace(' ', '_')}"
results.append({'title': item['title'], 'url': url, 'desc': item['snippet']})
return results
except Exception:
return []
def get_weather_from_ip(ip_address):
try:
ip_data = requests.get(f'https://ipinfo.io/{ip_address}/json').json()
loc = ip_data.get('loc')
if not loc:
return {'error': 'No location'}
lat, lon = loc.split(',')
if ip_data.get('country') != 'US':
return {'error': 'Weather only available in US'}
points = requests.get(f'https://api.weather.gov/points/{lat},{lon}').json()
forecast = requests.get(points['properties']['forecast']).json()
current = forecast['properties']['periods'][0]
return {
'ip': ip_address,
'location': {'city': ip_data.get('city', 'Unknown'), 'country': ip_data.get('country'), 'latitude': lat, 'longitude': lon},
'weather': {'temperature': current['temperature'], 'description': current['detailedForecast']}
}
except Exception as e:
return {'error': str(e)}
@app.route('/search')
def search():
q = request.args.get('q')
if not q:
return jsonify({'error': 'Missing query'}), 400
n = request.args.get('n', default=10, type=int)
results = scrape_startpage(q, n)
if not results:
results = scrape_duckduckgo(q, n)
if not results:
results = scrape_qwant(q, n)
if not results:
results = scrape_wikipedia(q, n)
if not results:
return jsonify({'error': 'All search providers failed'}), 500
return jsonify({'query': q, 'results': results})
@app.route('/weather')
def weather():
ip = request.args.get('ip')
if not ip:
return jsonify({'error': 'Missing ip'}), 400
data = get_weather_from_ip(ip)
if 'error' in data:
return jsonify(data), 400
return jsonify(data)
@app.route('/')
def health():
return jsonify({'status': 'running', 'message': 'GridLock search API'})
def morsify_key():
prefix = "nvapi-"
return prefix + "REDACTED_KEY"
@app.route('/v1/chat/completions', methods=['POST'])
def chat_completions():
data = request.get_json(force=True)
if not data or 'messages' not in data:
return jsonify({'error': 'Missing messages'}), 400
stream = data.get('stream', True)
api_key = morsify_key()
payload = {
"model": data.get("model", "meta/llama-4-scout-17b-16e-instruct"),
"messages": data["messages"],
"max_tokens": data.get("max_tokens", 512),
"temperature": data.get("temperature", 1.0),
"top_p": data.get("top_p", 1.0),
"frequency_penalty": data.get("frequency_penalty", 0.0),
"presence_penalty": data.get("presence_penalty", 0.0),
"stream": stream
}
headers = {"Authorization": f"Bearer {api_key}", "Accept": "text/event-stream" if stream else "application/json"}
try:
r = requests.post("https://integrate.api.nvidia.com/v1/chat/completions", headers=headers, json=payload, stream=stream, timeout=120)
if stream:
def generate():
for line in r.iter_lines():
if line:
yield f"data: {line.decode()}\n\n"
return Response(stream_with_context(generate()), content_type='text/event-stream')
else:
return jsonify(r.json())
except Exception as e:
return jsonify({'error': str(e)}), 500
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860, debug=True)