Spaces:
Running
Running
| import json | |
| import logging | |
| import os | |
| from datetime import datetime | |
| import httpx | |
| from fastapi import APIRouter, BackgroundTasks, FastAPI, HTTPException, Request | |
| from fastapi.middleware.gzip import GZipMiddleware | |
| from fastapi.responses import JSONResponse, RedirectResponse | |
| from fastapi.staticfiles import StaticFiles | |
| from config import NY | |
| from core.hf_manager import HFManager | |
| from core.scanner_service import ScannerService | |
| from core.stats_table_service import StatsTableService | |
| from core.utils.paths import FILTERS_FILE, WATCHLIST_FILE | |
| logger = logging.getLogger(__name__) | |
| app = FastAPI(title="Stock Scanner API") | |
| app.add_middleware(GZipMiddleware, minimum_size=1000) | |
| scanner_service = ScannerService() | |
| stats_table_service = StatsTableService(scanner_service.data_manager, NY) | |
| # --- Models (Internal) --- | |
| # --- API Router --- | |
| api_router = APIRouter(prefix="/api") | |
| async def scan(filter: str = "", symbols: str = "", timeframe: str = "1D", start: str = "", end: str | None = None): | |
| try: | |
| results = scanner_service.get_scan_results(filter, symbols, timeframe, start, end) | |
| return {"count": len(results), "results": results} | |
| except Exception as e: | |
| logger.error(f"Error in scan: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def scan_calendar(filter: str = "", year: int = 2026, month: int = 1, symbols: str = ""): | |
| try: | |
| data = scanner_service.get_scan_calendar_data(filter, year, month, symbols) | |
| return {"data": data, "year": year, "month": month} | |
| except Exception as e: | |
| logger.error(f"Error in scan_calendar: {e}") | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_gainers(timeframe: str = "today"): | |
| if timeframe not in ["today", "5d", "1m"]: | |
| timeframe = "today" | |
| try: | |
| data = scanner_service.get_top_gainers(timeframe) | |
| return {"gainers": data, "count": len(data), "cachedAt": datetime.now().isoformat()} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_finviz_signals(signal: str = "Top Gainers"): | |
| try: | |
| data = scanner_service.get_finviz_signals(signal) | |
| return {"results": data, "count": len(data), "cachedAt": datetime.now().isoformat()} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_tradingview_signals(signal: str = "premarket_gainers"): | |
| try: | |
| data = scanner_service.get_tradingview_signals(signal) | |
| return {"results": data, "count": len(data), "cachedAt": datetime.now().isoformat()} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_watchlist( | |
| action: str = "load", symbol: str = "", data: str | None = None, background_tasks: BackgroundTasks = None | |
| ): | |
| watchlist = [] | |
| if WATCHLIST_FILE.exists(): | |
| with open(WATCHLIST_FILE) as f: | |
| watchlist = json.load(f) | |
| symbol = symbol.upper() | |
| modified = False | |
| if action == "add": | |
| if symbol and not any(s["symbol"] == symbol for s in watchlist): | |
| watchlist.append({"symbol": symbol, "addedAt": datetime.now().isoformat()}) | |
| modified = True | |
| elif action == "remove": | |
| watchlist = [s for s in watchlist if s["symbol"] != symbol] | |
| modified = True | |
| elif action == "save" and data: | |
| watchlist = json.loads(data) | |
| modified = True | |
| elif action == "clear": | |
| watchlist = [] | |
| modified = True | |
| if modified: | |
| with open(WATCHLIST_FILE, "w") as f: | |
| json.dump(watchlist, f) | |
| background_tasks.add_task(HFManager().upload_file, WATCHLIST_FILE) | |
| return {"watchlist": watchlist} | |
| async def get_filters( | |
| action: str = "load", name: str = "", expression: str = "", background_tasks: BackgroundTasks = None | |
| ): | |
| default_filters = [ | |
| { | |
| "name": "Liquidity Trap", | |
| "expression": "range_pct > 0.6 and volume / 7 > volume[1] and rel_vol > 10 and volume > 10_000_000 sort date desc", | |
| }, | |
| {"name": "52-Week Breakout", "expression": "close > max(close, 252) and volume > 1_000_000"}, | |
| ] | |
| filters = [] | |
| if FILTERS_FILE.exists(): | |
| with open(FILTERS_FILE) as f: | |
| filters = json.load(f) | |
| else: | |
| filters = default_filters | |
| with open(FILTERS_FILE, "w") as f: | |
| json.dump(filters, f) | |
| if action == "save" and name and expression: | |
| found = False | |
| for f in filters: | |
| if f["name"] == name: | |
| f["expression"] = expression | |
| found = True | |
| break | |
| if not found: | |
| filters.append({"name": name, "expression": expression}) | |
| with open(FILTERS_FILE, "w") as f: | |
| json.dump(filters, f) | |
| background_tasks.add_task(HFManager().upload_file, FILTERS_FILE) | |
| return {"filters": filters} | |
| async def get_reverse_splits(symbol: str | None = None, symbols: str | None = None, days: int = 90): | |
| try: | |
| if symbols: | |
| symbol_list = [s.strip().upper() for s in symbols.split(",") if s.strip()] | |
| all_results = [] | |
| for sym in symbol_list: | |
| result = scanner_service.data_manager.get_reverse_splits(sym, days) | |
| all_results.extend(result.to_dict(orient="records")) | |
| return {"count": len(all_results), "reverse_splits": all_results} | |
| else: | |
| result = scanner_service.data_manager.get_reverse_splits(symbol, days) | |
| return {"count": len(result), "reverse_splits": result.to_dict(orient="records")} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_stock_data(symbol: str, data_type: str = "summary"): | |
| symbol = symbol.upper() | |
| try: | |
| if data_type in ["summary", "catalysts", "dilution", "filings", "insider", "fundamentals"]: | |
| result = scanner_service.get_stock_data(symbol) | |
| elif data_type == "refresh": | |
| result = scanner_service.get_stock_data_no_cache(symbol) | |
| else: | |
| raise HTTPException(status_code=400, detail=f"Unknown data type: {data_type}") | |
| return result | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_stock_data_batch(symbols: str): | |
| symbol_list = [s.strip().upper() for s in symbols.split(",") if s.strip()] | |
| try: | |
| results = {} | |
| for symbol in symbol_list[:20]: | |
| results[symbol] = scanner_service.get_stock_data(symbol) | |
| return {"count": len(results), "data": results} | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def get_stock_catalysts(symbol: str): | |
| # This logic was in server.py, but it's better to move it to scanner_service. | |
| # For now, I'll implement it here or call a method in scanner_service if it exists. | |
| # ScannerService doesn't have a direct 'get_stock_catalysts' but server.py had one. | |
| # Re-implementing the logic from server.py here for now to avoid modifying ScannerService yet. | |
| import concurrent.futures | |
| symbol = symbol.upper() | |
| try: | |
| with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor: | |
| catalyst_future = executor.submit(scanner_service.finviz.fetch_catalyst, symbol) | |
| fundamentals_future = executor.submit(scanner_service.finviz.fetch_fundamentals, symbol) | |
| sec_future = executor.submit(lambda: scanner_service.sec.fetch_key_filings(symbol, days=365)) | |
| def get_rs(s): | |
| df = scanner_service.data_manager.get_reverse_splits(s, days=730) | |
| if df is not None and not df.empty: | |
| row = df.iloc[0] | |
| return { | |
| "ex_date": row.get("ex_date"), | |
| "ratio": row.get("ratio", ""), | |
| "symbol": row.get("symbol", s), | |
| } | |
| return None | |
| split_future = executor.submit(get_rs, symbol) | |
| finviz_catalyst = catalyst_future.result(timeout=8) | |
| finviz_overview = fundamentals_future.result(timeout=8) | |
| sec_filings = sec_future.result(timeout=8) | |
| reverse_split = split_future.result(timeout=8) | |
| offerings = sec_filings.get("offerings", [])[:5] | |
| warrants = [f for f in sec_filings.get("material", [])[:10] if "warrant" in f.get("description", "").lower()] | |
| return { | |
| "catalysts": {"finviz": finviz_catalyst}, | |
| "company": { | |
| "sector": finviz_overview.get("sector"), | |
| "industry": finviz_overview.get("industry"), | |
| "country": finviz_overview.get("country"), | |
| "employees": finviz_overview.get("employees"), | |
| }, | |
| "metrics": { | |
| "price": finviz_overview.get("price"), | |
| "volume": finviz_overview.get("volume"), | |
| "avg_volume": finviz_overview.get("avg_volume"), | |
| "market_cap": finviz_overview.get("market_cap"), | |
| "float_shares": finviz_overview.get("float_shares"), | |
| "rel_volume": finviz_overview.get("rel_volume"), | |
| }, | |
| "corporate_actions": { | |
| "last_reverse_split": reverse_split, | |
| "offerings": offerings, | |
| "warrants": warrants[:5], | |
| }, | |
| } | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def backtest( | |
| symbols: str = "", | |
| candidates: str = "", | |
| entry: str = "time == '09:30'", | |
| exit: str = "time == '16:00'", | |
| offset: int = 0, | |
| ): | |
| try: | |
| result = scanner_service.run_backtest(symbols, candidates, entry, exit, offset) | |
| return result | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| async def calculate_stats(data: dict): | |
| candidates = data.get("candidates", []) | |
| max_offset = data.get("max_offset", data.get("offset", 2)) | |
| if not candidates: | |
| raise HTTPException(status_code=400, detail="No candidates provided") | |
| try: | |
| result = stats_table_service.analyze(candidates, max_offset=max_offset) | |
| return result | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| # ------------------------------------------------------------------ | |
| async def test_connectivity(): | |
| import sys | |
| import duckdb | |
| results = {} | |
| results["env"] = { | |
| "SPACE_ID": os.getenv("SPACE_ID"), | |
| "HF_TOKEN": "set" if os.getenv("HF_TOKEN") else "not set", | |
| "PYTHON_VERSION": sys.version, | |
| } | |
| try: | |
| duckdb.execute("INSTALL httpfs; LOAD httpfs;") | |
| url = "https://huggingface.co/datasets/Arrechenash/stock-scanner-data/resolve/main/alpaca_merged.parquet" | |
| res = duckdb.query(f"SELECT * FROM '{url}' LIMIT 1").fetchone() | |
| results["duckdb"] = {"status": "ok", "sample": str(res[0])} | |
| except Exception as e: | |
| results["duckdb"] = {"status": "error", "error": str(e)} | |
| return results | |
| async def hf_status(): | |
| hf = HFManager() | |
| return { | |
| "repo": hf.repo_id, | |
| "token_set": hf.token is not None, | |
| "last_sync": datetime.fromtimestamp(os.path.getmtime(FILTERS_FILE)).isoformat() | |
| if FILTERS_FILE.exists() | |
| else "Never", | |
| } | |
| async def hf_push_config(): | |
| success = HFManager().push_config() | |
| return {"success": success} | |
| async def hf_sync(background_tasks: BackgroundTasks): | |
| def run_full_sync(): | |
| logger.info("Starting background full sync...") | |
| try: | |
| from core.data_enricher import DataEnricher | |
| from core.data_sync import DataSync | |
| DataSync().sync_bars(window_years=2) | |
| DataEnricher().enrich() | |
| local_merged = "data/alpaca_merged.parquet" | |
| if os.path.exists(local_merged): | |
| HFManager().upload_file(local_merged, "alpaca_merged.parquet") | |
| logger.info("Background full sync COMPLETED.") | |
| except Exception as e: | |
| logger.error(f"Background sync failed: {e}") | |
| background_tasks.add_task(run_full_sync) | |
| return {"status": "started"} | |
| # SECURE PROXY REPLACEMENT | |
| async def yahoo_proxy(url: str): | |
| if not url.startswith("https://query1.finance.yahoo.com/"): | |
| raise HTTPException(status_code=403, detail="Proxy restricted to Yahoo Finance") | |
| async with httpx.AsyncClient() as client: | |
| try: | |
| resp = await client.get(url, headers={"User-Agent": "Mozilla/5.0"}, timeout=10) | |
| return JSONResponse(content=resp.json(), status_code=resp.status_code) | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) from e | |
| app.include_router(api_router) | |
| # --- Static Files & Redirects --- | |
| async def root(): | |
| return RedirectResponse(url="/scan") | |
| async def scan_page(): | |
| return RedirectResponse(url="/static/scan.html") | |
| async def gainers_page(): | |
| return RedirectResponse(url="/static/gainers.html") | |
| async def analyze_page(request: Request): | |
| symbol = request.query_params.get("symbol") | |
| if symbol: | |
| return RedirectResponse(url=f"/static/analyze.html?symbol={symbol}") | |
| return RedirectResponse(url="/static/analyze.html") | |
| async def watchlist_page(): | |
| return RedirectResponse(url="/static/watchlist.html") | |
| async def settings_page(): | |
| return RedirectResponse(url="/static/settings.html") | |
| # Serve static files from the static directory | |
| app.mount("/static", StaticFiles(directory="static"), name="static") | |
| # Map /js and /css to static/js and static/css for compatibility | |
| app.mount("/js", StaticFiles(directory="static/js"), name="js") | |
| app.mount("/css", StaticFiles(directory="static/css"), name="css") | |
| async def favicon(): | |
| return RedirectResponse(url="/static/favicon.ico") | |