Spaces:
Sleeping
Sleeping
| from datetime import timedelta | |
| import json | |
| from django.contrib.auth import authenticate, login | |
| from django.contrib.auth.models import User | |
| from django.contrib.auth.password_validation import validate_password | |
| from django.core.exceptions import ValidationError as DjangoValidationError | |
| from django.utils import timezone | |
| import httpx | |
| from rest_framework import status | |
| from rest_framework.decorators import api_view | |
| from rest_framework.response import Response | |
| from .models import Screener, ScreenerSymbolCache | |
| from apps.copilot.services import run_chat_completion | |
| def health_check(request): | |
| return Response( | |
| { | |
| "status": "ok", | |
| "service": "fpna-copilot-api", | |
| } | |
| ) | |
| def login_user(request): | |
| username = str(request.data.get("username", "")).strip() | |
| password = str(request.data.get("password", "")) | |
| if not username or not password: | |
| return Response( | |
| {"detail": "Username and password are required."}, | |
| status=status.HTTP_400_BAD_REQUEST, | |
| ) | |
| user = authenticate(request, username=username, password=password) | |
| if not user: | |
| return Response( | |
| {"detail": "Invalid username or password."}, | |
| status=status.HTTP_401_UNAUTHORIZED, | |
| ) | |
| login(request, user) | |
| return Response( | |
| { | |
| "status": "ok", | |
| "message": "Login successful.", | |
| "user": {"id": user.id, "username": user.username, "email": user.email}, | |
| } | |
| ) | |
| def register_user(request): | |
| username = str(request.data.get("username", "")).strip() | |
| password = str(request.data.get("password", "")) | |
| confirm_password = str(request.data.get("confirmPassword", "")) | |
| email = str(request.data.get("email", "")).strip() | |
| if not username or not password: | |
| return Response( | |
| {"detail": "Username and password are required."}, | |
| status=status.HTTP_400_BAD_REQUEST, | |
| ) | |
| if confirm_password and password != confirm_password: | |
| return Response( | |
| {"detail": "Passwords do not match."}, | |
| status=status.HTTP_400_BAD_REQUEST, | |
| ) | |
| if User.objects.filter(username=username).exists(): | |
| return Response( | |
| {"detail": "Username is already taken."}, | |
| status=status.HTTP_400_BAD_REQUEST, | |
| ) | |
| if email and User.objects.filter(email=email).exists(): | |
| return Response( | |
| {"detail": "Email is already in use."}, | |
| status=status.HTTP_400_BAD_REQUEST, | |
| ) | |
| try: | |
| validate_password(password) | |
| except DjangoValidationError as exc: | |
| return Response({"detail": " ".join(exc.messages)}, status=status.HTTP_400_BAD_REQUEST) | |
| user = User.objects.create_user( | |
| username=username, | |
| password=password, | |
| email=email, | |
| ) | |
| login(request, user) | |
| return Response( | |
| { | |
| "status": "ok", | |
| "message": "Registration successful.", | |
| "user": {"id": user.id, "username": user.username, "email": user.email}, | |
| }, | |
| status=status.HTTP_201_CREATED, | |
| ) | |
| def _serialize_screener(screener): | |
| return { | |
| "id": screener.id, | |
| "userId": screener.user_id, | |
| "name": screener.name, | |
| "filters": screener.filters or {}, | |
| "columns": screener.columns or [], | |
| "selectedSymbols": screener.selected_symbols or [], | |
| "createdAt": screener.created_at.isoformat(), | |
| "updatedAt": screener.updated_at.isoformat(), | |
| } | |
| def _resolve_user(request): | |
| if request.user and request.user.is_authenticated: | |
| return request.user | |
| header_user_id = request.headers.get("X-User-Id") | |
| query_user_id = request.query_params.get("userId") | |
| body_user_id = request.data.get("userId") if isinstance(request.data, dict) else None | |
| user_id = header_user_id or query_user_id or body_user_id | |
| if not user_id: | |
| return None | |
| try: | |
| normalized_user_id = int(user_id) | |
| except (TypeError, ValueError): | |
| return None | |
| return User.objects.filter(id=normalized_user_id).first() | |
| def _require_user(request): | |
| user = _resolve_user(request) | |
| if user: | |
| return user, None | |
| return None, Response( | |
| {"detail": "Authentication required. Pass a valid session or X-User-Id header."}, | |
| status=status.HTTP_401_UNAUTHORIZED, | |
| ) | |
| def screeners_collection(request): | |
| user, error_response = _require_user(request) | |
| if error_response: | |
| return error_response | |
| if request.method == "GET": | |
| screeners = Screener.objects.filter(user=user) | |
| return Response([_serialize_screener(screener) for screener in screeners]) | |
| name = str(request.data.get("name", "")).strip() | |
| if not name: | |
| return Response({"detail": "Screener name is required."}, status=status.HTTP_400_BAD_REQUEST) | |
| filters = request.data.get("filters", {}) | |
| columns = request.data.get("columns", []) | |
| selected_symbols = request.data.get("selectedSymbols", []) | |
| if filters is None: | |
| filters = {} | |
| if columns is None: | |
| columns = [] | |
| if not isinstance(filters, dict): | |
| return Response({"detail": "'filters' must be a JSON object."}, status=status.HTTP_400_BAD_REQUEST) | |
| if not isinstance(columns, list): | |
| return Response({"detail": "'columns' must be a JSON array."}, status=status.HTTP_400_BAD_REQUEST) | |
| if not isinstance(selected_symbols, list): | |
| return Response({"detail": "'selectedSymbols' must be a JSON array."}, status=status.HTTP_400_BAD_REQUEST) | |
| normalized_selected_symbols = [] | |
| for value in selected_symbols: | |
| symbol = str(value).strip().upper() | |
| if symbol and symbol not in normalized_selected_symbols: | |
| normalized_selected_symbols.append(symbol) | |
| screener = Screener.objects.create( | |
| user=user, | |
| name=name, | |
| filters=filters, | |
| columns=columns, | |
| selected_symbols=normalized_selected_symbols, | |
| ) | |
| return Response(_serialize_screener(screener), status=status.HTTP_201_CREATED) | |
| def screener_detail(request, screener_id): | |
| user, error_response = _require_user(request) | |
| if error_response: | |
| return error_response | |
| screener = Screener.objects.filter(user=user, id=screener_id).first() | |
| if not screener: | |
| return Response({"detail": "Screener not found."}, status=status.HTTP_404_NOT_FOUND) | |
| if request.method == "GET": | |
| return Response(_serialize_screener(screener)) | |
| if request.method == "DELETE": | |
| screener.delete() | |
| return Response(status=status.HTTP_204_NO_CONTENT) | |
| name = request.data.get("name") | |
| filters = request.data.get("filters") | |
| columns = request.data.get("columns") | |
| selected_symbols = request.data.get("selectedSymbols") | |
| if name is not None: | |
| normalized_name = str(name).strip() | |
| if not normalized_name: | |
| return Response({"detail": "Screener name cannot be empty."}, status=status.HTTP_400_BAD_REQUEST) | |
| screener.name = normalized_name | |
| if filters is not None: | |
| if not isinstance(filters, dict): | |
| return Response({"detail": "'filters' must be a JSON object."}, status=status.HTTP_400_BAD_REQUEST) | |
| screener.filters = filters | |
| if columns is not None: | |
| if not isinstance(columns, list): | |
| return Response({"detail": "'columns' must be a JSON array."}, status=status.HTTP_400_BAD_REQUEST) | |
| screener.columns = columns | |
| if selected_symbols is not None: | |
| if not isinstance(selected_symbols, list): | |
| return Response({"detail": "'selectedSymbols' must be a JSON array."}, status=status.HTTP_400_BAD_REQUEST) | |
| normalized_selected_symbols = [] | |
| for value in selected_symbols: | |
| symbol = str(value).strip().upper() | |
| if symbol and symbol not in normalized_selected_symbols: | |
| normalized_selected_symbols.append(symbol) | |
| screener.selected_symbols = normalized_selected_symbols | |
| if filters is not None or selected_symbols is not None: | |
| screener.cached_results = None | |
| screener.last_run_at = None | |
| screener.save() | |
| return Response(_serialize_screener(screener)) | |
| def _fetch_fmp_json(endpoint, params=None): | |
| raise NotImplementedError("Legacy REST helper has been replaced by MCP helper.") | |
| class FmpMcpError(Exception): | |
| pass | |
| class FmpMcpAccessError(FmpMcpError): | |
| pass | |
| FMP_DEFAULT_SECTORS = [ | |
| "Technology", | |
| "Financial Services", | |
| "Healthcare", | |
| "Consumer Cyclical", | |
| "Communication Services", | |
| "Industrials", | |
| "Consumer Defensive", | |
| "Energy", | |
| "Utilities", | |
| "Real Estate", | |
| "Basic Materials", | |
| ] | |
| FMP_DEFAULT_COUNTRIES = [ | |
| "US", | |
| "CA", | |
| "GB", | |
| "DE", | |
| "FR", | |
| "JP", | |
| "IN", | |
| "AU", | |
| "SG", | |
| ] | |
| FMP_DEFAULT_EXCHANGES = [ | |
| "NASDAQ", | |
| "NYSE", | |
| "AMEX", | |
| "TSX", | |
| "LSE", | |
| "EURONEXT", | |
| ] | |
| TRADINGVIEW_SCAN_URL = "https://scanner.tradingview.com/america/scan" | |
| TRADINGVIEW_SCAN_PARAMS = {"label-product": "screener-stock"} | |
| TRADINGVIEW_SCAN_HEADERS = { | |
| "Accept": "application/json", | |
| "Accept-Language": "en-US,en;q=0.9", | |
| "Cache-Control": "no-cache", | |
| "Content-Type": "application/json;charset=UTF-8", | |
| "Origin": "https://in.tradingview.com", | |
| "Pragma": "no-cache", | |
| "Referer": "https://in.tradingview.com/", | |
| "User-Agent": ( | |
| "Mozilla/5.0 (Windows NT 10.0; Win64; x64) " | |
| "AppleWebKit/537.36 (KHTML, like Gecko) " | |
| "Chrome/145.0.0.0 Safari/537.36" | |
| ), | |
| } | |
| TRADINGVIEW_COLUMNS = ["ticker-view","close","type","typespecs","pricescale","minmov","fractional","minmove2","currency","change","Perf.W","Perf.1M","Perf.3M","Perf.6M","Perf.YTD","Perf.Y","Perf.5Y","Perf.10Y","Perf.All","Volatility.W","Volatility.M","premarket_close","premarket_change","premarket_gap","premarket_volume","gap","volume","volume_change","postmarket_close","postmarket_change","postmarket_volume","market_cap_basic","fundamental_currency_code","Perf.1Y.MarketCap","price_earnings_ttm","price_earnings_growth_ttm","price_sales_current","price_book_fq","price_to_cash_f_operating_activities_ttm","price_free_cash_flow_ttm","price_to_cash_ratio","enterprise_value_current","enterprise_value_to_revenue_ttm","enterprise_value_to_ebit_ttm","enterprise_value_ebitda_ttm","dps_common_stock_prim_issue_fy","dps_common_stock_prim_issue_fq","dividends_yield_current","dividends_yield","dividend_payout_ratio_ttm","dps_common_stock_prim_issue_yoy_growth_fy","continuous_dividend_payout","continuous_dividend_growth","gross_margin_ttm","operating_margin_ttm","pre_tax_margin_ttm","net_margin_ttm","free_cash_flow_margin_ttm","return_on_assets_fq","return_on_equity_fq","return_on_invested_capital_fq","research_and_dev_ratio_ttm","sell_gen_admin_exp_other_ratio_ttm","fiscal_period_current","fiscal_period_end_current","total_revenue_ttm","total_revenue_yoy_growth_ttm","gross_profit_ttm","oper_income_ttm","net_income_ttm","ebitda_ttm","earnings_per_share_diluted_ttm","earnings_per_share_diluted_yoy_growth_ttm","total_assets_fq","total_current_assets_fq","cash_n_short_term_invest_fq","total_liabilities_fq","total_debt_fq","net_debt_fq","total_equity_fq","current_ratio_fq","quick_ratio_fq","debt_to_equity_fq","cash_n_short_term_invest_to_total_debt_fq","cash_f_operating_activities_ttm","cash_f_investing_activities_ttm","cash_f_financing_activities_ttm","free_cash_flow_ttm","neg_capital_expenditures_ttm","revenue_per_share_ttm","earnings_per_share_basic_ttm","operating_cash_flow_per_share_ttm","free_cash_flow_per_share_ttm","ebit_per_share_ttm","ebitda_per_share_ttm","book_value_per_share_fq","total_debt_per_share_fq","cash_per_share_fq","TechRating_1D","TechRating_1D.tr","MARating_1D","MARating_1D.tr","OsRating_1D","OsRating_1D.tr","RSI","Mom","AO","CCI20","Stoch.K","Stoch.D","Candle.3BlackCrows","Candle.3WhiteSoldiers","Candle.AbandonedBaby.Bearish","Candle.AbandonedBaby.Bullish","Candle.Doji","Candle.Doji.Dragonfly","Candle.Doji.Gravestone","Candle.Engulfing.Bearish","Candle.Engulfing.Bullish","Candle.EveningStar","Candle.Hammer","Candle.HangingMan","Candle.Harami.Bearish","Candle.Harami.Bullish","Candle.InvertedHammer","Candle.Kicking.Bearish","Candle.Kicking.Bullish","Candle.LongShadow.Lower","Candle.LongShadow.Upper","Candle.Marubozu.Black","Candle.Marubozu.White","Candle.MorningStar","Candle.ShootingStar","Candle.SpinningTop.Black","Candle.SpinningTop.White","Candle.TriStar.Bearish","Candle.TriStar.Bullish"] | |
| TRADINGVIEW_COLUMN_INDEX = {name: idx for idx, name in enumerate(TRADINGVIEW_COLUMNS)} | |
| TRADINGVIEW_DEFAULT_FILTER2 = { | |
| "operator": "and", | |
| "operands": [ | |
| { | |
| "operation": { | |
| "operator": "or", | |
| "operands": [ | |
| { | |
| "operation": { | |
| "operator": "and", | |
| "operands": [ | |
| {"expression": {"left": "type", "operation": "equal", "right": "stock"}}, | |
| {"expression": {"left": "typespecs", "operation": "has", "right": ["common"]}}, | |
| ], | |
| } | |
| }, | |
| { | |
| "operation": { | |
| "operator": "and", | |
| "operands": [ | |
| {"expression": {"left": "type", "operation": "equal", "right": "stock"}}, | |
| {"expression": {"left": "typespecs", "operation": "has", "right": ["preferred"]}}, | |
| ], | |
| } | |
| }, | |
| {"operation": {"operator": "and", "operands": [{"expression": {"left": "type", "operation": "equal", "right": "dr"}}]}}, | |
| { | |
| "operation": { | |
| "operator": "and", | |
| "operands": [ | |
| {"expression": {"left": "type", "operation": "equal", "right": "fund"}}, | |
| {"expression": {"left": "typespecs", "operation": "has_none_of", "right": ["etf"]}}, | |
| ], | |
| } | |
| }, | |
| ], | |
| } | |
| }, | |
| {"expression": {"left": "typespecs", "operation": "has_none_of", "right": ["pre-ipo"]}}, | |
| ], | |
| } | |
| SYMBOL_CACHE_TTL = timedelta(hours=1) | |
| class TradingViewError(Exception): | |
| pass | |
| AI_FIELD_MAP = { | |
| "symbol": "symbol", | |
| "company": "companyName", | |
| "company_name": "companyName", | |
| "name": "companyName", | |
| "price": "price", | |
| "volume": "volume", | |
| "market_cap": "marketCap", | |
| "marketcap": "marketCap", | |
| "pe": "pe", | |
| "p_e": "pe", | |
| "eps": "eps", | |
| "change": "changePercentage", | |
| "change_pct": "changePercentage", | |
| "change_percentage": "changePercentage", | |
| "dividend_yield": "dividendYieldTTM", | |
| "sector": "sector", | |
| "country": "country", | |
| "analyst_rating": "analystRating", | |
| } | |
| AI_ALLOWED_OPERATORS = {"=", "==", "!=", ">", ">=", "<", "<=", "contains", "not_contains"} | |
| def _extract_json_payload(raw_text): | |
| text = str(raw_text or "").strip() | |
| if not text: | |
| return None | |
| if text.startswith("```"): | |
| text = text.strip("`") | |
| if "\n" in text: | |
| text = text.split("\n", 1)[1] | |
| text = text.rsplit("```", 1)[0].strip() | |
| try: | |
| return json.loads(text) | |
| except json.JSONDecodeError: | |
| pass | |
| start = text.find("{") | |
| end = text.rfind("}") | |
| if start >= 0 and end > start: | |
| candidate = text[start : end + 1] | |
| try: | |
| return json.loads(candidate) | |
| except json.JSONDecodeError: | |
| return None | |
| return None | |
| def _parse_human_number(value): | |
| if isinstance(value, (int, float)): | |
| return float(value) | |
| raw = str(value or "").strip().replace(",", "") | |
| if not raw: | |
| return None | |
| multiplier = 1.0 | |
| suffix = raw[-1].lower() | |
| if suffix == "k": | |
| multiplier = 1e3 | |
| raw = raw[:-1] | |
| elif suffix == "m": | |
| multiplier = 1e6 | |
| raw = raw[:-1] | |
| elif suffix == "b": | |
| multiplier = 1e9 | |
| raw = raw[:-1] | |
| elif suffix == "t": | |
| multiplier = 1e12 | |
| raw = raw[:-1] | |
| raw = raw.replace("%", "") | |
| try: | |
| return float(raw) * multiplier | |
| except ValueError: | |
| return None | |
| def _normalize_ai_condition(raw_condition): | |
| if not isinstance(raw_condition, dict): | |
| return None | |
| field_raw = str(raw_condition.get("field", "")).strip().lower() | |
| operator = str(raw_condition.get("operator", "")).strip().lower() | |
| value = raw_condition.get("value") | |
| if not field_raw or operator not in AI_ALLOWED_OPERATORS: | |
| return None | |
| normalized_field = AI_FIELD_MAP.get(field_raw, field_raw) | |
| if normalized_field not in AI_FIELD_MAP.values(): | |
| return None | |
| return {"field": normalized_field, "operator": operator, "value": value} | |
| def _compare_value(actual, operator, expected): | |
| if operator in {"contains", "not_contains"}: | |
| haystack = str(actual or "").lower() | |
| needle = str(expected or "").lower() | |
| matched = needle in haystack | |
| return matched if operator == "contains" else not matched | |
| actual_num = _parse_human_number(actual) | |
| expected_num = _parse_human_number(expected) | |
| if actual_num is not None and expected_num is not None: | |
| if operator in {"=", "=="}: | |
| return actual_num == expected_num | |
| if operator == "!=": | |
| return actual_num != expected_num | |
| if operator == ">": | |
| return actual_num > expected_num | |
| if operator == ">=": | |
| return actual_num >= expected_num | |
| if operator == "<": | |
| return actual_num < expected_num | |
| if operator == "<=": | |
| return actual_num <= expected_num | |
| return False | |
| actual_str = str(actual or "").lower() | |
| expected_str = str(expected or "").lower() | |
| if operator in {"=", "=="}: | |
| return actual_str == expected_str | |
| if operator == "!=": | |
| return actual_str != expected_str | |
| if operator == ">": | |
| return actual_str > expected_str | |
| if operator == ">=": | |
| return actual_str >= expected_str | |
| if operator == "<": | |
| return actual_str < expected_str | |
| if operator == "<=": | |
| return actual_str <= expected_str | |
| return False | |
| def _row_matches_conditions(row_data, conditions, mode): | |
| checks = [] | |
| for cond in conditions: | |
| field = cond["field"] | |
| actual = row_data.get(field) if field != "symbol" else row_data.get("symbol") | |
| checks.append(_compare_value(actual, cond["operator"], cond["value"])) | |
| if not checks: | |
| return False | |
| if mode == "or": | |
| return any(checks) | |
| return all(checks) | |
| def _build_ai_conditions_from_prompt(user_prompt): | |
| prompt = ( | |
| "You are an API parser for stock screener filters.\n" | |
| "Convert the user prompt into JSON only, no markdown.\n" | |
| "Return strict object schema:\n" | |
| '{ "mode": "and" | "or", "conditions": [ {"field": string, "operator": string, "value": string|number} ] }\n' | |
| "Allowed fields: symbol, company_name, price, volume, market_cap, pe, eps, change_percentage, dividend_yield, sector, country, analyst_rating.\n" | |
| "Allowed operators: =, ==, !=, >, >=, <, <=, contains, not_contains.\n" | |
| f"User prompt: {user_prompt}" | |
| ) | |
| result = run_chat_completion(prompt) | |
| payload = _extract_json_payload(result.get("content", "")) | |
| if not isinstance(payload, dict): | |
| return [], "and" | |
| raw_conditions = payload.get("conditions", []) | |
| mode = str(payload.get("mode", "and")).strip().lower() | |
| mode = "or" if mode == "or" else "and" | |
| normalized = [] | |
| for c in raw_conditions if isinstance(raw_conditions, list) else []: | |
| nc = _normalize_ai_condition(c) | |
| if nc: | |
| normalized.append(nc) | |
| return normalized, mode | |
| def _parse_int(value, default, minimum=None, maximum=None): | |
| try: | |
| parsed = int(value) | |
| except (TypeError, ValueError): | |
| parsed = default | |
| if minimum is not None: | |
| parsed = max(minimum, parsed) | |
| if maximum is not None: | |
| parsed = min(maximum, parsed) | |
| return parsed | |
| def _resolve_range(request, default_limit=100, max_limit=200): | |
| raw_limit = request.query_params.get("limit") | |
| limit = _parse_int(raw_limit, default_limit, minimum=1, maximum=max_limit) | |
| page_size = _parse_int(request.query_params.get("pageSize"), limit, minimum=1, maximum=max_limit) | |
| limit = page_size | |
| page = _parse_int(request.query_params.get("page"), 1, minimum=1) | |
| offset = _parse_int(request.query_params.get("offset"), (page - 1) * limit, minimum=0) | |
| return offset, limit | |
| def _resolve_sort(request, default_sort_by="market_cap_basic", default_sort_order="desc"): | |
| sort_by = str(request.query_params.get("sortBy", default_sort_by)).strip() | |
| if sort_by not in TRADINGVIEW_COLUMN_INDEX: | |
| sort_by = default_sort_by | |
| sort_order = str(request.query_params.get("sortOrder", default_sort_order)).strip().lower() | |
| if sort_order not in {"asc", "desc"}: | |
| sort_order = default_sort_order | |
| return sort_by, sort_order | |
| def _build_tradingview_scan_payload(extra_filters, offset, limit, sort_by="market_cap_basic", sort_order="desc"): | |
| return { | |
| "columns": TRADINGVIEW_COLUMNS, | |
| "filter": extra_filters, | |
| "ignore_unknown_fields": False, | |
| "options": {"lang": "en"}, | |
| "range": [offset, offset + limit], | |
| "sort": {"sortBy": sort_by, "sortOrder": sort_order}, | |
| "symbols": {}, | |
| "markets": ["america"], | |
| "filter2": TRADINGVIEW_DEFAULT_FILTER2, | |
| } | |
| def _scan_tradingview(payload): | |
| try: | |
| response = httpx.post( | |
| TRADINGVIEW_SCAN_URL, | |
| params=TRADINGVIEW_SCAN_PARAMS, | |
| headers=TRADINGVIEW_SCAN_HEADERS, | |
| json=payload, | |
| timeout=30.0, | |
| ) | |
| response.raise_for_status() | |
| parsed = response.json() | |
| except (httpx.HTTPError, ValueError) as exc: | |
| raise TradingViewError(str(exc)) from exc | |
| if not isinstance(parsed, dict): | |
| raise TradingViewError("TradingView returned an invalid response payload.") | |
| return parsed | |
| def _extract_symbol(raw_identifier, row_info): | |
| if isinstance(row_info, dict): | |
| info_symbol = str(row_info.get("name", "")).strip().upper() | |
| if info_symbol: | |
| return info_symbol | |
| raw = str(raw_identifier or "").strip() | |
| if ":" in raw: | |
| return raw.rsplit(":", 1)[-1].strip().upper() | |
| return raw.upper() | |
| def _tv_value(row_values, field_name): | |
| if not isinstance(row_values, list): | |
| return None | |
| idx = TRADINGVIEW_COLUMN_INDEX.get(field_name) | |
| if idx is None or idx >= len(row_values): | |
| return None | |
| value = row_values[idx] | |
| if value == "None": | |
| return None | |
| return value | |
| def _normalize_tradingview_row(raw_row): | |
| if not isinstance(raw_row, dict): | |
| return None | |
| values = raw_row.get("d") or [] | |
| base_info = values[0] if values and isinstance(values[0], dict) else {} | |
| symbol = _extract_symbol(raw_row.get("s"), base_info) | |
| if not symbol: | |
| return None | |
| logo = base_info.get("logo") if isinstance(base_info.get("logo"), dict) else {} | |
| logoid = str(logo.get("logoid") or base_info.get("logoid") or "").strip() | |
| logo_url = f"https://s3-symbol-logo.tradingview.com/{logoid}.svg" if logoid else None | |
| return { | |
| "symbol": symbol, | |
| "companyName": str(base_info.get("description", "")).strip() or None, | |
| "logoId": logoid or None, | |
| "logoUrl": logo_url, | |
| "price": _tv_value(values, "close"), | |
| "changePercentage": _tv_value(values, "change"), | |
| "volume": _tv_value(values, "volume"), | |
| "relativeVolume": None, | |
| "marketCap": _tv_value(values, "market_cap_basic"), | |
| "pe": _tv_value(values, "price_earnings_ttm"), | |
| "eps": _tv_value(values, "earnings_per_share_diluted_ttm"), | |
| "epsGrowthTTM": _tv_value(values, "earnings_per_share_diluted_yoy_growth_ttm"), | |
| "dividendYieldTTM": _tv_value(values, "dividends_yield_current"), | |
| "sector": None, | |
| "analystRating": _tv_value(values, "OsRating_1D.tr"), | |
| "country": None, | |
| "exchange": str(base_info.get("exchange", "")).strip(), | |
| "exchangeFullName": str(base_info.get("exchange", "")).strip(), | |
| "currency": _tv_value(values, "currency"), | |
| } | |
| def _first_row(payload): | |
| if isinstance(payload, list) and payload: | |
| first = payload[0] | |
| return first if isinstance(first, dict) else None | |
| if isinstance(payload, dict): | |
| return payload | |
| return None | |
| def _build_rows_from_selected_symbols(symbols): | |
| rows = [] | |
| for raw_symbol in symbols: | |
| row = _get_symbol_row_cached(str(raw_symbol).strip().upper()) | |
| if row: | |
| rows.append(row) | |
| return rows | |
| def _merge_symbol_rows(existing_row, new_row): | |
| merged = dict(existing_row or {}) | |
| for key, value in (new_row or {}).items(): | |
| if value not in (None, "", []): | |
| merged[key] = value | |
| return merged | |
| def _write_symbol_cache(symbol, row): | |
| normalized_symbol = str(symbol).strip().upper() | |
| if not normalized_symbol: | |
| return | |
| cache = ScreenerSymbolCache.objects.filter(symbol=normalized_symbol).first() | |
| now = timezone.now() | |
| if cache: | |
| cache.data = _merge_symbol_rows(cache.data or {}, row or {}) | |
| cache.last_fetched_at = now | |
| cache.save(update_fields=["data", "last_fetched_at", "updated_at"]) | |
| return | |
| ScreenerSymbolCache.objects.create( | |
| symbol=normalized_symbol, | |
| data=dict(row or {}), | |
| last_fetched_at=now, | |
| ) | |
| def _is_cache_fresh(cache_row): | |
| if not cache_row or not cache_row.last_fetched_at: | |
| return False | |
| return timezone.now() - cache_row.last_fetched_at <= SYMBOL_CACHE_TTL | |
| def _fetch_symbol_live_row(symbol): | |
| payload = _build_tradingview_scan_payload( | |
| extra_filters=[ | |
| {"left": "ticker-view-filter", "operation": "match", "right": symbol}, | |
| {"left": "is_primary", "operation": "equal", "right": True}, | |
| ], | |
| offset=0, | |
| limit=25, | |
| sort_by="market_cap_basic", | |
| sort_order="desc", | |
| ) | |
| response_payload = _scan_tradingview(payload) | |
| for raw_row in response_payload.get("data") or []: | |
| normalized = _normalize_tradingview_row(raw_row) | |
| if normalized and normalized.get("symbol") == symbol: | |
| return normalized | |
| # TradingView can still return fuzzy symbol matches, so use the first if an exact symbol is not available. | |
| data = response_payload.get("data") or [] | |
| if data: | |
| fallback_row = _normalize_tradingview_row(data[0]) | |
| if fallback_row: | |
| return fallback_row | |
| return {"symbol": symbol} | |
| def _get_symbol_row_cached(symbol): | |
| normalized_symbol = str(symbol).strip().upper() | |
| if not normalized_symbol: | |
| return None | |
| cache = ScreenerSymbolCache.objects.filter(symbol=normalized_symbol).first() | |
| if cache and _is_cache_fresh(cache): | |
| return _merge_symbol_rows({"symbol": normalized_symbol}, cache.data or {}) | |
| try: | |
| live_row = _fetch_symbol_live_row(normalized_symbol) | |
| except TradingViewError: | |
| live_row = None | |
| if live_row and any(value not in (None, "", []) for key, value in live_row.items() if key != "symbol"): | |
| merged_live = _merge_symbol_rows(cache.data if cache else {}, live_row) | |
| _write_symbol_cache(normalized_symbol, merged_live) | |
| return merged_live | |
| if cache and cache.data: | |
| # Network or provider limitation fallback: return stale cache. | |
| return _merge_symbol_rows({"symbol": normalized_symbol}, cache.data) | |
| return {"symbol": normalized_symbol} | |
| def screener_filter_options(request): | |
| _, error_response = _require_user(request) | |
| if error_response: | |
| return error_response | |
| return Response( | |
| { | |
| "sectors": FMP_DEFAULT_SECTORS, | |
| "industries": [], | |
| "countries": FMP_DEFAULT_COUNTRIES, | |
| "exchanges": FMP_DEFAULT_EXCHANGES, | |
| "warnings": [], | |
| } | |
| ) | |
| def run_screener(request, screener_id): | |
| user, error_response = _require_user(request) | |
| if error_response: | |
| return error_response | |
| screener = Screener.objects.filter(user=user, id=screener_id).first() | |
| if not screener: | |
| return Response({"detail": "Screener not found."}, status=status.HTTP_404_NOT_FOUND) | |
| selected_symbols = screener.selected_symbols or [] | |
| if selected_symbols: | |
| symbols_upper = [str(s).strip().upper() for s in selected_symbols if str(s).strip()] | |
| cache_rows = ScreenerSymbolCache.objects.filter(symbol__in=symbols_upper) | |
| rows = [] | |
| for cache_row in cache_rows: | |
| d = dict(cache_row.data or {}) | |
| d["symbol"] = cache_row.symbol | |
| rows.append(d) | |
| return Response( | |
| { | |
| "screener": _serialize_screener(screener), | |
| "results": rows, | |
| "cached": True, | |
| "pagination": { | |
| "offset": 0, | |
| "limit": len(rows), | |
| "returned": len(rows), | |
| "totalCount": len(rows), | |
| "sortBy": "symbol", | |
| "sortOrder": "asc", | |
| }, | |
| } | |
| ) | |
| return Response( | |
| { | |
| "screener": _serialize_screener(screener), | |
| "results": [], | |
| "cached": True, | |
| "pagination": {"offset": 0, "limit": 0, "returned": 0, "totalCount": 0, "sortBy": "symbol", "sortOrder": "asc"}, | |
| } | |
| ) | |
| def ai_generate_screener_symbols(request, screener_id): | |
| user, error_response = _require_user(request) | |
| if error_response: | |
| return error_response | |
| screener = Screener.objects.filter(user=user, id=screener_id).first() | |
| if not screener: | |
| return Response({"detail": "Screener not found."}, status=status.HTTP_404_NOT_FOUND) | |
| user_prompt = str(request.data.get("prompt", "")).strip() | |
| if not user_prompt: | |
| return Response({"detail": "Request must include non-empty 'prompt'."}, status=status.HTTP_400_BAD_REQUEST) | |
| try: | |
| conditions, mode = _build_ai_conditions_from_prompt(user_prompt) | |
| except Exception as exc: | |
| return Response( | |
| {"detail": "Failed to parse screener prompt with LLM.", "error": str(exc)}, | |
| status=status.HTTP_502_BAD_GATEWAY, | |
| ) | |
| if not conditions: | |
| return Response( | |
| { | |
| "detail": "Could not derive valid filters from prompt. Try explicit fields like '@market_cap > 50M and @sector contains tech'." | |
| }, | |
| status=status.HTTP_400_BAD_REQUEST, | |
| ) | |
| matched_symbols = [] | |
| for cache_row in ScreenerSymbolCache.objects.all().iterator(): | |
| row_data = dict(cache_row.data or {}) | |
| row_data["symbol"] = cache_row.symbol | |
| if _row_matches_conditions(row_data, conditions, mode): | |
| matched_symbols.append(cache_row.symbol) | |
| existing = [str(s).strip().upper() for s in (screener.selected_symbols or []) if str(s).strip()] | |
| merged = [] | |
| seen = set() | |
| for symbol in existing + matched_symbols: | |
| s = str(symbol).strip().upper() | |
| if s and s not in seen: | |
| seen.add(s) | |
| merged.append(s) | |
| added_count = len([s for s in merged if s not in set(existing)]) | |
| screener.selected_symbols = merged | |
| screener.cached_results = None | |
| screener.last_run_at = None | |
| screener.save(update_fields=["selected_symbols", "cached_results", "last_run_at", "updated_at"]) | |
| return Response( | |
| { | |
| "status": "ok", | |
| "matchedCount": len(matched_symbols), | |
| "addedCount": added_count, | |
| "selectedSymbols": merged, | |
| "conditions": conditions, | |
| "mode": mode, | |
| } | |
| ) | |
| def stock_symbol_search(request): | |
| from django.db.models import Q | |
| _, error_response = _require_user(request) | |
| if error_response: | |
| return error_response | |
| query = str(request.query_params.get("query", "")).strip() | |
| if len(query) < 1: | |
| return Response({"results": []}) | |
| limit = _parse_int(request.query_params.get("limit"), 10, minimum=1, maximum=25) | |
| offset = _parse_int(request.query_params.get("offset"), 0, minimum=0) | |
| q_filter = Q(symbol__icontains=query.upper()) | Q( | |
| data__companyName__icontains=query | |
| ) | |
| qs = ( | |
| ScreenerSymbolCache.objects.filter(q_filter) | |
| .order_by("symbol")[offset : offset + limit] | |
| ) | |
| results = [] | |
| for cache_row in qs: | |
| d = cache_row.data or {} | |
| symbol = cache_row.symbol | |
| results.append( | |
| { | |
| "symbol": symbol, | |
| "name": d.get("companyName") or "", | |
| "logoId": d.get("logoId"), | |
| "logoUrl": d.get("logoUrl"), | |
| "exchange": d.get("exchange") or "", | |
| "exchangeFullName": d.get("exchangeFullName") or "", | |
| "currency": d.get("currency") or "", | |
| "price": d.get("price"), | |
| "changePercentage": d.get("changePercentage"), | |
| "marketCap": d.get("marketCap"), | |
| "pe": d.get("pe"), | |
| } | |
| ) | |
| total = ScreenerSymbolCache.objects.filter(q_filter).count() | |
| return Response( | |
| { | |
| "results": results, | |
| "pagination": { | |
| "offset": offset, | |
| "limit": limit, | |
| "returned": len(results), | |
| "totalCount": total, | |
| "sortBy": "symbol", | |
| "sortOrder": "asc", | |
| }, | |
| } | |
| ) | |