Spaces:
Runtime error
Runtime error
| """ | |
| SDR Status Tracker - FastAPI Backend | |
| Fetches data from Google Sheets and serves the dashboard | |
| Supports Excel file fallback when Google Sheet access is unavailable | |
| """ | |
| import os | |
| import json | |
| import hmac | |
| import asyncio | |
| from datetime import datetime | |
| from fastapi import FastAPI, HTTPException, Request | |
| from fastapi.staticfiles import StaticFiles | |
| from fastapi.responses import FileResponse, JSONResponse, StreamingResponse | |
| from google.oauth2 import service_account | |
| from googleapiclient.discovery import build | |
| from googleapiclient.errors import HttpError | |
| # Excel file support (optional, for local testing only) | |
| try: | |
| from excel_parser import read_excel_file, get_excel_file_path | |
| EXCEL_SUPPORT = True | |
| except ImportError: | |
| EXCEL_SUPPORT = False | |
| read_excel_file = None | |
| get_excel_file_path = None | |
| app = FastAPI(title="SDR Status Tracker") | |
| # Configuration via environment variables (fallback for backward compatibility) | |
| SHEET_ID = os.environ.get("GOOGLE_SHEET_ID", "1af6-2KsRqeTQxdw5KVRp2WCrM6RT7HIcl70m-GgGZB4") | |
| SHEET_GID = os.environ.get("GOOGLE_SHEET_GID", "1864606926") # Tab ID (configured via env var for current month) | |
| # Month configuration file path (relative to this script's directory) | |
| MONTHS_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "months_config.json") | |
| # Month configuration (loaded at startup) | |
| _months_config = {"months": [], "default_month": None, "loaded_at": None} | |
| def load_months_config(force_reload=False): | |
| """ | |
| Load month configuration from JSON file. | |
| Falls back to env var for backward compatibility if file not found. | |
| """ | |
| global _months_config | |
| if _months_config["months"] and not force_reload: | |
| return _months_config | |
| try: | |
| with open(MONTHS_CONFIG_FILE, "r") as f: | |
| config = json.load(f) | |
| _months_config["months"] = config.get("months", []) | |
| _months_config["default_month"] = config.get("default_month") | |
| _months_config["loaded_at"] = datetime.now().isoformat() | |
| print(f"Months config loaded from {MONTHS_CONFIG_FILE}: {len(_months_config['months'])} months") | |
| except FileNotFoundError: | |
| print(f"Months config file {MONTHS_CONFIG_FILE} not found, using env var fallback") | |
| # Fallback: create single month from env var | |
| _months_config["months"] = [{ | |
| "id": "default", | |
| "label": "Current Month", | |
| "sheet_id": SHEET_ID, | |
| "tab_name": "DAILY - for SDR to add data🌟" | |
| }] | |
| _months_config["default_month"] = "default" | |
| _months_config["loaded_at"] = datetime.now().isoformat() | |
| except Exception as e: | |
| print(f"Error loading months config: {e}, using env var fallback") | |
| _months_config["months"] = [{ | |
| "id": "default", | |
| "label": "Current Month", | |
| "sheet_id": SHEET_ID, | |
| "tab_name": "DAILY - for SDR to add data🌟" | |
| }] | |
| _months_config["default_month"] = "default" | |
| _months_config["loaded_at"] = datetime.now().isoformat() | |
| return _months_config | |
| def get_month_config(month_id: str = None): | |
| """Get configuration for a specific month. Returns None if not found.""" | |
| config = load_months_config() | |
| if not month_id: | |
| month_id = config["default_month"] | |
| for month in config["months"]: | |
| if month["id"] == month_id: | |
| return month | |
| return None | |
| # Load months config at startup | |
| load_months_config() | |
| # Cache - per-month with webhook invalidation | |
| # Structure: {"2026-01": {"data": [...], "timestamp": datetime}, ...} | |
| _cache = {} | |
| CACHE_TTL = 3600 # 1 hour - webhook will invalidate on actual changes | |
| # Webhook secret for cache invalidation (optional security) | |
| WEBHOOK_SECRET = os.environ.get("WEBHOOK_SECRET", "") | |
| # SSE clients - set of asyncio.Queue objects for connected browsers | |
| _sse_clients: set[asyncio.Queue] = set() | |
| def get_sheets_service(): | |
| """Create Google Sheets API service using service account credentials.""" | |
| creds_json = os.environ.get("GOOGLE_CREDENTIALS") | |
| if not creds_json: | |
| raise HTTPException(status_code=500, detail="GOOGLE_CREDENTIALS not configured") | |
| try: | |
| creds_dict = json.loads(creds_json) | |
| credentials = service_account.Credentials.from_service_account_info( | |
| creds_dict, | |
| scopes=["https://www.googleapis.com/auth/spreadsheets.readonly"] | |
| ) | |
| return build("sheets", "v4", credentials=credentials) | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=f"Failed to initialize Sheets API: {str(e)}") | |
| # Known activity types for block detection | |
| # Note: SQL variants (like "SQL (offer sent)") also terminate blocks | |
| KNOWN_ACTIVITIES = { | |
| 'calls', 'emails', 'linkedin', 'prospects', | |
| 'prospects (activated)', 'discovery', 'sql', 'sql (offer sent)' | |
| } | |
| # Column configuration file path | |
| CONFIG_FILE = "column_config.json" | |
| # Default week configuration (fallback if config file not found) | |
| # Format: (slot_index, daily_start, daily_end, target_col, percentage_col) | |
| # slot_index is 0-based; actual week = start_week + slot_index | |
| DEFAULT_WEEK_CONFIGS = [ | |
| (0, 3, 7, 8, 9), # Slot 0: daily D-H (3-7), target I (8), pct J (9) | |
| (1, 10, 14, 15, 16), # Slot 1: daily K-O (10-14), target P (15), pct Q (16) | |
| (2, 17, 21, 22, 23), # Slot 2: daily R-V (17-21), target W (22), pct X (23) | |
| (3, 24, 28, 30, 31), # Slot 3: daily Y-AC (24-28), target AE (30), pct AF (31) - extra empty col | |
| ] | |
| # Default monthly column configuration (fallback) | |
| DEFAULT_MONTHLY_CONFIG = {"target_col": 32, "actual_col": 33, "pct_col": 34} | |
| # Global column configuration (loaded at startup, can be reloaded) | |
| _column_config = {"weeks": None, "monthly": None, "loaded_at": None} | |
| def load_column_config(force_reload=False): | |
| """ | |
| Load column configuration from JSON file. | |
| Falls back to hardcoded defaults if file not found or invalid. | |
| """ | |
| global _column_config | |
| if _column_config["weeks"] is not None and not force_reload: | |
| return _column_config | |
| try: | |
| with open(CONFIG_FILE, "r") as f: | |
| config = json.load(f) | |
| # Convert weeks config to tuple format | |
| weeks = [] | |
| for w in config.get("weeks", []): | |
| weeks.append(( | |
| w["week_num"], | |
| w["daily_start"], | |
| w["daily_end"], | |
| w["target_col"], | |
| w["pct_col"] | |
| )) | |
| monthly = config.get("monthly", DEFAULT_MONTHLY_CONFIG) | |
| _column_config["weeks"] = weeks if weeks else DEFAULT_WEEK_CONFIGS | |
| _column_config["monthly"] = monthly | |
| _column_config["loaded_at"] = datetime.now().isoformat() | |
| print(f"Column config loaded from {CONFIG_FILE}: {len(weeks)} weeks") | |
| except FileNotFoundError: | |
| print(f"Config file {CONFIG_FILE} not found, using defaults") | |
| _column_config["weeks"] = DEFAULT_WEEK_CONFIGS | |
| _column_config["monthly"] = DEFAULT_MONTHLY_CONFIG | |
| _column_config["loaded_at"] = datetime.now().isoformat() | |
| except Exception as e: | |
| print(f"Error loading config: {e}, using defaults") | |
| _column_config["weeks"] = DEFAULT_WEEK_CONFIGS | |
| _column_config["monthly"] = DEFAULT_MONTHLY_CONFIG | |
| _column_config["loaded_at"] = datetime.now().isoformat() | |
| return _column_config | |
| def get_week_configs(): | |
| """Get the current week configurations.""" | |
| config = load_column_config() | |
| return config["weeks"] | |
| def get_monthly_config(): | |
| """Get the current monthly column configuration.""" | |
| config = load_column_config() | |
| return config["monthly"] | |
| # Load config at module initialization | |
| load_column_config() | |
| def get_activity(row): | |
| """Extract activity type from row (column C, index 2).""" | |
| if len(row) > 2 and row[2]: | |
| return row[2].strip() | |
| return None | |
| def scan_block_for_names(block): | |
| """ | |
| Scan all rows in a block to find Case and GS names. | |
| Due to merged cells, names may appear on any row within the block. | |
| """ | |
| case_name = None | |
| gs_name = None | |
| for row_idx, row in block: | |
| if len(row) > 0 and row[0] and row[0].strip(): | |
| case_name = row[0].strip() | |
| if len(row) > 1 and row[1] and row[1].strip(): | |
| gs_name = row[1].strip() | |
| return case_name, gs_name | |
| def group_rows_into_blocks(values): | |
| """ | |
| Group data rows into blocks. Each block ends with an SQL row. | |
| Returns list of blocks, where each block is a list of (row_idx, row) tuples. | |
| """ | |
| blocks = [] | |
| current_block = [] | |
| for row_idx, row in enumerate(values): | |
| if row_idx < 4: # Skip header rows (rows 1-4) | |
| continue | |
| if not row or len(row) < 3: | |
| continue | |
| activity = get_activity(row) | |
| if not activity: | |
| continue | |
| activity_lower = activity.lower() | |
| # Check if activity is known, or is an SQL variant | |
| is_known = activity_lower in KNOWN_ACTIVITIES or activity_lower.startswith('sql') | |
| if not is_known: | |
| continue | |
| current_block.append((row_idx, row)) | |
| # SQL or SQL variants terminate the block | |
| if activity_lower.startswith('sql'): # Block complete | |
| blocks.append(current_block) | |
| current_block = [] | |
| # Don't lose incomplete blocks (blocks without SQL at the end) | |
| if current_block: | |
| blocks.append(current_block) | |
| return blocks | |
| def process_activity_row(row, case_name, gs_name, case_data, start_week=2): | |
| """Process a single activity row and add data to case_data dict.""" | |
| activity = get_activity(row) | |
| if not activity: | |
| return | |
| # Map activity types to our data structure | |
| activity_lower = activity.lower() | |
| activity_map = { | |
| "calls": "calls", | |
| "emails": "emails", | |
| "linkedin": "linkedin", | |
| "prospects": "prospects", | |
| "prospects (activated)": "prospects", | |
| "discovery": "discovery", | |
| "sql": "sql", | |
| "sql (offer sent)": "sql" | |
| } | |
| activity_key = activity_map.get(activity_lower) | |
| # Handle any other SQL variants not in the map | |
| if not activity_key and activity_lower.startswith('sql'): | |
| activity_key = 'sql' | |
| if not activity_key: | |
| return | |
| # Create key for this case+gs combination | |
| key = f"{case_name}|{gs_name}" | |
| # Calculate actual week numbers based on start_week | |
| week_configs = get_week_configs() | |
| week_numbers = [start_week + slot for slot, _, _, _, _ in week_configs] | |
| if key not in case_data: | |
| case_data[key] = { | |
| "case": case_name, | |
| "gs": gs_name, | |
| "weeks": {w: {} for w in week_numbers}, | |
| "monthlyTotal": {"sql": 0, "sqlTarget": 0, "activity": 0, "activityTarget": 0, "sqlPctList": [], "activityPctList": []} | |
| } | |
| # Extract weekly data: actual (sum of daily), target, and percentage from sheet | |
| for slot, daily_start, daily_end, target_col, pct_col in week_configs: | |
| week_num = start_week + slot | |
| actual = sum_daily(row, daily_start, daily_end) | |
| target = safe_int(row, target_col) | |
| percentage = extract_percentage(row, pct_col) | |
| if week_num not in case_data[key]["weeks"]: | |
| case_data[key]["weeks"][week_num] = {} | |
| case_data[key]["weeks"][week_num][activity_key] = actual | |
| case_data[key]["weeks"][week_num][f"{activity_key}Target"] = target | |
| # Store percentage for debugging/validation (frontend calculates its own) | |
| if percentage is not None: | |
| case_data[key]["weeks"][week_num][f"{activity_key}Pct"] = percentage | |
| # Get monthly target, actual, and percentage from config | |
| monthly_config = get_monthly_config() | |
| monthly_target = safe_int(row, monthly_config["target_col"]) | |
| monthly_actual = safe_int(row, monthly_config["actual_col"]) | |
| monthly_pct = extract_percentage(row, monthly_config.get("pct_col")) | |
| # Update monthly totals | |
| # Discovery is aggregated with SQL for monthly view (both are pipeline metrics) | |
| if activity_key == "sql": | |
| case_data[key]["monthlyTotal"]["sql"] = monthly_actual | |
| case_data[key]["monthlyTotal"]["sqlTarget"] = monthly_target | |
| if monthly_pct is not None: | |
| case_data[key]["monthlyTotal"]["sqlPctList"].append(monthly_pct) | |
| elif activity_key == "discovery": | |
| # Discovery adds to SQL totals for monthly (pipeline metric) | |
| case_data[key]["monthlyTotal"]["sql"] += monthly_actual | |
| case_data[key]["monthlyTotal"]["sqlTarget"] += monthly_target | |
| if monthly_pct is not None: | |
| case_data[key]["monthlyTotal"]["sqlPctList"].append(monthly_pct) | |
| elif activity_key in ["calls", "emails", "linkedin", "prospects"]: | |
| # Activity includes outreach activities only | |
| case_data[key]["monthlyTotal"]["activity"] += monthly_actual | |
| case_data[key]["monthlyTotal"]["activityTarget"] += monthly_target | |
| if monthly_pct is not None: | |
| case_data[key]["monthlyTotal"]["activityPctList"].append(monthly_pct) | |
| def parse_sheet_data(values, start_week=2): | |
| """ | |
| Parse the DAILY sheet data into the format expected by the dashboard. | |
| Uses block-based parsing to handle Google Sheets merged cells correctly. | |
| Each SDR/Case has a variable number of activity rows (2-6), with SQL always | |
| being the last row of each block. Case/GS names may appear on ANY row within | |
| a block due to merged cell behavior. | |
| DAILY sheet structure (0-indexed columns): | |
| - Column A (0): Case name (merged - may be empty) | |
| - Column B (1): GS/SDR name (merged - may be empty) | |
| - Column C (2): Activity type (Calls, Emails, LinkedIn, Prospects, SQL) | |
| Each week block = 5 daily columns + 1 target column + 1 percentage column. | |
| Column positions are the same each month, but week numbers vary: | |
| - Slot 0: Cols 3-7 (daily), Col 8 (target), Col 9 (percentage) | |
| - Slot 1: Cols 10-14 (daily), Col 15 (target), Col 16 (percentage) | |
| - Slot 2: Cols 17-21 (daily), Col 22 (target), Col 23 (percentage) | |
| - Slot 3: Cols 24-28 (daily), Col 30 (target), Col 31 (percentage) | |
| Monthly totals: | |
| - Column AG (32): Monthly TARGET | |
| - Column AH (33): Monthly ACTUAL | |
| - Column AI (34): Monthly PERCENTAGE | |
| Args: | |
| values: Raw sheet data | |
| start_week: The first week number for this month (e.g., 2 for Jan, 6 for Feb) | |
| """ | |
| if not values or len(values) < 5: | |
| return [] | |
| # Phase 1: Group rows into blocks (SQL terminates each block) | |
| blocks = group_rows_into_blocks(values) | |
| # Phase 2: For each block, find Case/GS and attribute all rows | |
| case_data = {} | |
| for block in blocks: | |
| case_name, gs_name = scan_block_for_names(block) | |
| if not case_name or not gs_name: | |
| continue # Skip blocks without proper attribution | |
| for row_idx, row in block: | |
| process_activity_row(row, case_name, gs_name, case_data, start_week) | |
| # Convert to list format | |
| cases = [] | |
| for idx, (key, data) in enumerate(case_data.items()): | |
| cases.append({ | |
| "id": idx + 1, | |
| "case": data["case"], | |
| "gs": data["gs"], | |
| "weeks": data["weeks"], | |
| "monthlyTotal": data["monthlyTotal"] | |
| }) | |
| return cases | |
| def safe_int(row, idx): | |
| """Safely extract an integer from a row.""" | |
| if idx is None or idx >= len(row): | |
| return 0 | |
| try: | |
| val = row[idx] | |
| if val == "" or val is None: | |
| return 0 | |
| return int(float(val)) | |
| except (ValueError, TypeError): | |
| return 0 | |
| def sum_daily(row, start_col, end_col): | |
| """Sum daily values from start_col to end_col (inclusive).""" | |
| total = 0 | |
| for i in range(start_col, end_col + 1): | |
| total += safe_int(row, i) | |
| return total | |
| def extract_percentage(row, col): | |
| """Extract percentage value from a cell (e.g., '267%' -> 267).""" | |
| if col is None or col >= len(row): | |
| return None | |
| try: | |
| val = row[col] | |
| if val == "" or val is None: | |
| return None | |
| # Handle percentage strings like "267%" or "50%" | |
| if isinstance(val, str): | |
| val = val.replace('%', '').strip() | |
| return int(float(val)) | |
| except (ValueError, TypeError): | |
| return None | |
| async def get_data(month: str = None, source: str = None): | |
| """ | |
| Fetch data from Google Sheets or local Excel file. | |
| Args: | |
| month: Month ID (e.g., "2026-02"). Uses default if not specified. | |
| source: Data source override. "file" forces local Excel file, | |
| "api" forces Google Sheets API. Auto-detects if not specified. | |
| """ | |
| global _cache | |
| # Get month configuration | |
| months_config = load_months_config() | |
| if not month: | |
| month = months_config["default_month"] | |
| month_config = get_month_config(month) | |
| if not month_config: | |
| raise HTTPException(status_code=400, detail=f"Unknown month: {month}") | |
| # Check if month config specifies a file source | |
| config_source = month_config.get("source", "api") | |
| if source: | |
| config_source = source # Override with query param | |
| # Check per-month cache | |
| now = datetime.now() | |
| cache_key = f"{month}:{config_source}" | |
| if cache_key in _cache: | |
| cache_entry = _cache[cache_key] | |
| if cache_entry["data"] and cache_entry["timestamp"]: | |
| age = (now - cache_entry["timestamp"]).total_seconds() | |
| if age < CACHE_TTL: | |
| return JSONResponse(content={ | |
| "cases": cache_entry["data"], | |
| "cached": True, | |
| "month": month, | |
| "month_label": month_config["label"], | |
| "source": cache_entry.get("source", "api") | |
| }) | |
| # Try to load data from the specified source | |
| values = None | |
| actual_source = None | |
| error_messages = [] | |
| # If source is "file" or config specifies file, try file first | |
| if config_source == "file": | |
| if not EXCEL_SUPPORT: | |
| error_messages.append("Excel support not available (excel_parser module not installed)") | |
| else: | |
| file_path = month_config.get("file_path") or get_excel_file_path(month) | |
| if file_path: | |
| try: | |
| tab_name = month_config.get("tab_name", "DAILY - for SDR to add data🌟") | |
| values = read_excel_file(file_path, tab_name) | |
| actual_source = "file" | |
| print(f"Loaded {len(values)} rows from Excel file: {file_path}") | |
| except Exception as e: | |
| error_messages.append(f"Excel file error: {str(e)}") | |
| else: | |
| error_messages.append(f"No Excel file found for month {month}") | |
| # If source is "api" or file failed, try Google Sheets API | |
| if values is None and config_source != "file": | |
| try: | |
| service = get_sheets_service() | |
| sheet_id = month_config["sheet_id"] | |
| sheet_name = month_config.get("tab_name", "DAILY - for SDR to add data🌟") | |
| result = service.spreadsheets().values().get( | |
| spreadsheetId=sheet_id, | |
| range=f"'{sheet_name}'!A:AI" | |
| ).execute() | |
| values = result.get("values", []) | |
| actual_source = "api" | |
| except HttpError as e: | |
| error_messages.append(f"Google Sheets API error: {str(e)}") | |
| # If API fails with permission error, try file fallback | |
| if EXCEL_SUPPORT and ("403" in str(e) or "permission" in str(e).lower()): | |
| file_path = month_config.get("file_path") or get_excel_file_path(month) | |
| if file_path: | |
| try: | |
| tab_name = month_config.get("tab_name", "DAILY - for SDR to add data🌟") | |
| values = read_excel_file(file_path, tab_name) | |
| actual_source = "file" | |
| print(f"API permission denied, fallback to Excel: {file_path}") | |
| except Exception as file_e: | |
| error_messages.append(f"Fallback Excel error: {str(file_e)}") | |
| except Exception as e: | |
| error_messages.append(f"Error: {str(e)}") | |
| # If we still have no data, raise an error | |
| if values is None: | |
| raise HTTPException( | |
| status_code=500, | |
| detail=f"Failed to load data. Errors: {'; '.join(error_messages)}" | |
| ) | |
| # Parse the data with month-specific start_week | |
| start_week = month_config.get("start_week", 2) | |
| cases = parse_sheet_data(values, start_week) | |
| # Update cache | |
| _cache[cache_key] = {"data": cases, "timestamp": now, "source": actual_source} | |
| return JSONResponse(content={ | |
| "cases": cases, | |
| "cached": False, | |
| "month": month, | |
| "month_label": month_config["label"], | |
| "source": actual_source | |
| }) | |
| async def get_config(): | |
| """Return current configuration (sheet ID, etc.).""" | |
| return JSONResponse(content={ | |
| "sheetId": SHEET_ID, | |
| "sheetGid": SHEET_GID, | |
| "cacheTtl": CACHE_TTL | |
| }) | |
| async def get_months(): | |
| """Return list of available months for the dropdown selector.""" | |
| config = load_months_config() | |
| return JSONResponse(content={ | |
| "months": [ | |
| {"id": m["id"], "label": m["label"], "start_week": m.get("start_week", 2)} | |
| for m in config["months"] | |
| ], | |
| "default_month": config["default_month"] | |
| }) | |
| async def invalidate_cache(request: Request, month: str = None): | |
| """ | |
| Webhook endpoint to invalidate the cache when sheet data changes. | |
| Called by Google Apps Script onEdit trigger or frontend refresh button. | |
| If month is specified, only that month's cache is cleared. | |
| If month is not specified, all months' caches are cleared. | |
| """ | |
| global _cache | |
| print(f"[WEBHOOK] Cache invalidation requested, month={month}") | |
| # Optional: verify webhook secret if configured | |
| if WEBHOOK_SECRET: | |
| auth_header = request.headers.get("X-Webhook-Secret", "") | |
| if not hmac.compare_digest(auth_header, WEBHOOK_SECRET): | |
| print(f"[WEBHOOK] Invalid webhook secret, rejecting request") | |
| raise HTTPException(status_code=401, detail="Invalid webhook secret") | |
| # Clear the cache (specific month or all) | |
| if month: | |
| # Clear ALL sources for this month (api, file, etc.) | |
| # Cache keys are stored as "{month}:{source}" (e.g., "2026-02:api") | |
| keys_to_clear = [k for k in _cache.keys() if k.startswith(f"{month}:")] | |
| for key in keys_to_clear: | |
| del _cache[key] | |
| cleared = keys_to_clear | |
| else: | |
| cleared = list(_cache.keys()) | |
| _cache.clear() | |
| # Notify all connected SSE clients to refresh | |
| for queue in _sse_clients.copy(): | |
| try: | |
| queue.put_nowait("refresh") | |
| except asyncio.QueueFull: | |
| pass # Skip if queue is full | |
| print(f"[WEBHOOK] Cleared cache keys: {cleared}, notified {len(_sse_clients)} SSE clients") | |
| return JSONResponse(content={ | |
| "success": True, | |
| "message": "Cache invalidated", | |
| "months_cleared": cleared, | |
| "clients_notified": len(_sse_clients), | |
| "timestamp": datetime.now().isoformat() | |
| }) | |
| async def sse_event_generator(queue: asyncio.Queue): | |
| """Generator that yields SSE events for a connected client.""" | |
| try: | |
| while True: | |
| # Wait for an event (with timeout to send keepalive) | |
| try: | |
| event = await asyncio.wait_for(queue.get(), timeout=30.0) | |
| yield f"data: {event}\n\n" | |
| except asyncio.TimeoutError: | |
| # Send keepalive comment to prevent connection timeout | |
| yield ": keepalive\n\n" | |
| except asyncio.CancelledError: | |
| pass | |
| async def sse_events(): | |
| """ | |
| Server-Sent Events endpoint for real-time updates. | |
| Browsers connect here to receive instant refresh notifications. | |
| """ | |
| queue: asyncio.Queue = asyncio.Queue(maxsize=10) | |
| _sse_clients.add(queue) | |
| async def event_stream(): | |
| try: | |
| async for event in sse_event_generator(queue): | |
| yield event | |
| finally: | |
| _sse_clients.discard(queue) | |
| return StreamingResponse( | |
| event_stream(), | |
| media_type="text/event-stream", | |
| headers={ | |
| "Cache-Control": "no-cache", | |
| "Connection": "keep-alive", | |
| "X-Accel-Buffering": "no", # Disable nginx buffering | |
| } | |
| ) | |
| async def cache_status(month: str = None): | |
| """Check current cache status for a specific month or all months.""" | |
| now = datetime.now() | |
| if month: | |
| # Check specific month | |
| if month in _cache and _cache[month].get("timestamp"): | |
| age = (now - _cache[month]["timestamp"]).total_seconds() | |
| return JSONResponse(content={ | |
| "cached": True, | |
| "month": month, | |
| "age_seconds": age, | |
| "ttl_seconds": CACHE_TTL, | |
| "expires_in": max(0, CACHE_TTL - age) | |
| }) | |
| return JSONResponse(content={"cached": False, "month": month}) | |
| # Return status for all cached months | |
| status = {} | |
| for m, entry in _cache.items(): | |
| if entry.get("timestamp"): | |
| age = (now - entry["timestamp"]).total_seconds() | |
| status[m] = { | |
| "cached": True, | |
| "age_seconds": age, | |
| "expires_in": max(0, CACHE_TTL - age) | |
| } | |
| return JSONResponse(content={"months": status, "ttl_seconds": CACHE_TTL}) | |
| async def reload_config(request: Request): | |
| """ | |
| Reload column configuration and months configuration. | |
| Also invalidates all data caches to force a fresh fetch with new config. | |
| """ | |
| global _cache | |
| # Optional: verify webhook secret if configured | |
| if WEBHOOK_SECRET: | |
| auth_header = request.headers.get("X-Webhook-Secret", "") | |
| if not hmac.compare_digest(auth_header, WEBHOOK_SECRET): | |
| raise HTTPException(status_code=401, detail="Invalid webhook secret") | |
| # Reload configurations | |
| column_config = load_column_config(force_reload=True) | |
| months_config = load_months_config(force_reload=True) | |
| # Invalidate all data caches | |
| _cache.clear() | |
| return JSONResponse(content={ | |
| "success": True, | |
| "message": "Configs reloaded, all caches invalidated", | |
| "column_config_loaded_at": column_config["loaded_at"], | |
| "months_config_loaded_at": months_config["loaded_at"], | |
| "weeks_count": len(column_config["weeks"]), | |
| "months_count": len(months_config["months"]), | |
| "timestamp": datetime.now().isoformat() | |
| }) | |
| async def get_column_config(): | |
| """Return current column configuration.""" | |
| config = load_column_config() | |
| return JSONResponse(content={ | |
| "weeks": [ | |
| { | |
| "week_num": w[0], | |
| "daily_start": w[1], | |
| "daily_end": w[2], | |
| "target_col": w[3], | |
| "pct_col": w[4] | |
| } | |
| for w in config["weeks"] | |
| ], | |
| "monthly": config["monthly"], | |
| "loaded_at": config["loaded_at"] | |
| }) | |
| async def debug_data(month: str = None): | |
| """Debug endpoint to see raw sheet data structure and parsed blocks.""" | |
| try: | |
| # Get month configuration | |
| months_config = load_months_config() | |
| if not month: | |
| month = months_config["default_month"] | |
| month_config = get_month_config(month) | |
| if not month_config: | |
| raise HTTPException(status_code=400, detail=f"Unknown month: {month}") | |
| service = get_sheets_service() | |
| sheet_id = month_config["sheet_id"] | |
| sheet_name = month_config.get("tab_name", "DAILY - for SDR to add data🌟") | |
| result = service.spreadsheets().values().get( | |
| spreadsheetId=sheet_id, | |
| range=f"'{sheet_name}'!A1:AI150" | |
| ).execute() | |
| values = result.get("values", []) | |
| # Parse blocks using the new algorithm | |
| blocks = group_rows_into_blocks(values) | |
| # Build block summary | |
| block_summaries = [] | |
| unattributed_blocks = [] | |
| for block_idx, block in enumerate(blocks): | |
| case_name, gs_name = scan_block_for_names(block) | |
| row_range = f"{block[0][0]+1}-{block[-1][0]+1}" if block else "empty" | |
| activities = [get_activity(row) for _, row in block] | |
| summary = { | |
| "block_num": block_idx + 1, | |
| "row_range": row_range, | |
| "case": case_name, | |
| "gs": gs_name, | |
| "activities": activities, | |
| "row_count": len(block) | |
| } | |
| if case_name and gs_name: | |
| block_summaries.append(summary) | |
| else: | |
| unattributed_blocks.append(summary) | |
| # Find Bannerflow/Ardit's rows for specific debugging | |
| ardit_rows = [] | |
| for i, row in enumerate(values): | |
| if len(row) > 1 and ("ardit" in str(row).lower() or "bannerflow" in str(row).lower()): | |
| ardit_rows.append({"row_num": i+1, "data": row[:10]}) # Limit columns for readability | |
| return JSONResponse(content={ | |
| "total_rows": len(values), | |
| "total_blocks": len(blocks), | |
| "attributed_blocks": len(block_summaries), | |
| "unattributed_blocks_count": len(unattributed_blocks), | |
| "headers_row_1": values[0] if values else [], | |
| "headers_row_4": values[3] if len(values) > 3 else [], | |
| "blocks": block_summaries, | |
| "unattributed_blocks": unattributed_blocks, | |
| "ardit_rows": ardit_rows | |
| }) | |
| except Exception as e: | |
| import traceback | |
| return JSONResponse(content={"error": str(e), "traceback": traceback.format_exc()}) | |
| # Serve static files | |
| app.mount("/static", StaticFiles(directory="static"), name="static") | |
| async def root(): | |
| """Serve the main dashboard.""" | |
| return FileResponse("static/index.html") | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) | |