File size: 2,961 Bytes
d72816f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text, inspect
from typing import List, Dict, Any, Optional
import logging

from core.database import get_db, engine, Base
# Import all models to ensure they are registered with Base.metadata
from core.models import User, ClientUser, AuditLog, GeminiJob, PaymentTransaction, Contact, RateLimit, ApiKeyUsage

router = APIRouter(prefix="/api/schema", tags=["schema"])
logger = logging.getLogger(__name__)

@router.get("/tables")
async def get_tables():
    """
    Get a list of all tables in the database.
    """
    # We can inspect the metadata from the Base class since all models inherit from it
    # and are imported above.
    return sorted(list(Base.metadata.tables.keys()))

@router.get("/table/{table_name}")
async def get_table_data(
    table_name: str,
    page: int = Query(1, ge=1),
    per_page: int = Query(50, ge=1, le=1000),
    db: AsyncSession = Depends(get_db)
):
    """
    Get data for a specific table with pagination.
    """
    if table_name not in Base.metadata.tables:
        raise HTTPException(status_code=404, detail=f"Table {table_name} not found")

    table = Base.metadata.tables[table_name]
    
    # Get columns
    columns = [c.name for c in table.columns]
    
    # Calculate offset
    offset = (page - 1) * per_page
    
    # Construct query safely using SQLAlchemy Core
    # We use text() for dynamic table names but validate against metadata first
    try:
        # Get total count
        count_query = text(f"SELECT COUNT(*) FROM {table_name}")
        result = await db.execute(count_query)
        total = result.scalar()
        
        # Get data
        data_query = text(f"SELECT * FROM {table_name} LIMIT :limit OFFSET :offset")
        result = await db.execute(data_query, {"limit": per_page, "offset": offset})
        
        # Convert rows to dicts
        # result.keys() gives column names, result.all() gives rows
        # We need to serialize datetime objects and others to JSON-friendly format
        rows = []
        for row in result:
            row_dict = {}
            for idx, col in enumerate(result.keys()):
                val = row[idx]
                # Simple string conversion for non-JSON serializable types might be needed
                # FastAPI/Pydantic handles datetime usually, but let's be safe if needed.
                # For now, let's rely on FastAPI's default encoder.
                row_dict[col] = val
            rows.append(row_dict)
            
        return {
            "table": table_name,
            "columns": columns,
            "total": total,
            "page": page,
            "per_page": per_page,
            "data": rows
        }
        
    except Exception as e:
        logger.error(f"Error fetching data for table {table_name}: {e}")
        raise HTTPException(status_code=500, detail=str(e))