BOLO-KESARI commited on
Commit ·
ea750ab
1
Parent(s): 8b175eb
Add logs, auto-create tables, and serve frontend
Browse files- backend/app/main.py +93 -18
- backend/app/services/stock_service.py +17 -5
- check_db.py +44 -0
backend/app/main.py
CHANGED
|
@@ -1,14 +1,21 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
from fastapi import FastAPI
|
| 5 |
from fastapi.middleware.cors import CORSMiddleware
|
| 6 |
-
from .core.database import engine, Base
|
| 7 |
from .routers import auth, stocks, portfolio
|
| 8 |
-
from .models import
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
-
#
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
app = FastAPI(
|
| 14 |
title="Stock Analysis API",
|
|
@@ -16,30 +23,98 @@ app = FastAPI(
|
|
| 16 |
version="1.0.0"
|
| 17 |
)
|
| 18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
# CORS middleware
|
| 20 |
app.add_middleware(
|
| 21 |
CORSMiddleware,
|
| 22 |
-
allow_origins=["*"], #
|
| 23 |
allow_credentials=True,
|
| 24 |
allow_methods=["*"],
|
| 25 |
allow_headers=["*"],
|
| 26 |
)
|
| 27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
# Include routers
|
| 29 |
app.include_router(auth.router)
|
| 30 |
app.include_router(stocks.router)
|
| 31 |
app.include_router(portfolio.router)
|
| 32 |
|
|
|
|
|
|
|
| 33 |
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
"
|
| 41 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
|
|
|
|
|
|
|
|
|
|
| 43 |
|
| 44 |
@app.get("/health")
|
| 45 |
async def health_check():
|
|
@@ -47,5 +122,5 @@ async def health_check():
|
|
| 47 |
return {
|
| 48 |
"status": "healthy",
|
| 49 |
"api": "operational",
|
| 50 |
-
"database": "
|
| 51 |
}
|
|
|
|
| 1 |
+
from fastapi import FastAPI, Request
|
| 2 |
+
from fastapi.staticfiles import StaticFiles
|
| 3 |
+
from fastapi.responses import FileResponse
|
|
|
|
| 4 |
from fastapi.middleware.cors import CORSMiddleware
|
| 5 |
+
from .core.database import init_supabase, engine, Base
|
| 6 |
from .routers import auth, stocks, portfolio
|
| 7 |
+
from .models import user, portfolio as portfolio_model
|
| 8 |
+
import os
|
| 9 |
+
import logging
|
| 10 |
+
import time
|
| 11 |
|
| 12 |
+
# Configure logging
|
| 13 |
+
logging.basicConfig(
|
| 14 |
+
level=logging.INFO,
|
| 15 |
+
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
| 16 |
+
datefmt="%Y-%m-%d %H:%M:%S"
|
| 17 |
+
)
|
| 18 |
+
logger = logging.getLogger("api")
|
| 19 |
|
| 20 |
app = FastAPI(
|
| 21 |
title="Stock Analysis API",
|
|
|
|
| 23 |
version="1.0.0"
|
| 24 |
)
|
| 25 |
|
| 26 |
+
# Request Timing Middleware
|
| 27 |
+
@app.middleware("http")
|
| 28 |
+
async def add_process_time_header(request: Request, call_next):
|
| 29 |
+
start_time = time.time()
|
| 30 |
+
# Log less for health check to reduce noise
|
| 31 |
+
is_health = request.url.path.endswith("/health")
|
| 32 |
+
|
| 33 |
+
if not is_health:
|
| 34 |
+
logger.info(f"➡️ Request Details: {request.method} {request.url.path}")
|
| 35 |
+
|
| 36 |
+
try:
|
| 37 |
+
response = await call_next(request)
|
| 38 |
+
process_time = time.time() - start_time
|
| 39 |
+
|
| 40 |
+
if not is_health or response.status_code != 200:
|
| 41 |
+
logger.info(f"⬅️ Response Details: {response.status_code} (took {process_time:.4f}s)")
|
| 42 |
+
|
| 43 |
+
response.headers["X-Process-Time"] = str(process_time)
|
| 44 |
+
return response
|
| 45 |
+
except Exception as e:
|
| 46 |
+
process_time = time.time() - start_time
|
| 47 |
+
logger.error(f"❌ Request Failed: {e} (took {process_time:.4f}s)")
|
| 48 |
+
raise
|
| 49 |
+
|
| 50 |
# CORS middleware
|
| 51 |
app.add_middleware(
|
| 52 |
CORSMiddleware,
|
| 53 |
+
allow_origins=["*"], # Allows all origins
|
| 54 |
allow_credentials=True,
|
| 55 |
allow_methods=["*"],
|
| 56 |
allow_headers=["*"],
|
| 57 |
)
|
| 58 |
|
| 59 |
+
# Initialize Database on startup
|
| 60 |
+
@app.on_event("startup")
|
| 61 |
+
async def startup_event():
|
| 62 |
+
"""Initialize database and Create Tables if not exist."""
|
| 63 |
+
print("🔄 Initializing Database...")
|
| 64 |
+
|
| 65 |
+
# 1. Supabase Client (if used)
|
| 66 |
+
init_supabase()
|
| 67 |
+
|
| 68 |
+
# 2. SQLAlchemy (SQLite/Postgres) - Create Tables
|
| 69 |
+
try:
|
| 70 |
+
print("🔨 Creating database tables...")
|
| 71 |
+
# This will create tables for all models registered with Base (User, Portfolio)
|
| 72 |
+
Base.metadata.create_all(bind=engine)
|
| 73 |
+
print("✅ Database tables created successfully!")
|
| 74 |
+
except Exception as e:
|
| 75 |
+
# Don't crash if it fails (e.g. read-only fs + sqlite), but log it
|
| 76 |
+
print(f"❌ Error creating tables: {e}")
|
| 77 |
+
|
| 78 |
+
print("✅ App Startup Complete")
|
| 79 |
+
|
| 80 |
# Include routers
|
| 81 |
app.include_router(auth.router)
|
| 82 |
app.include_router(stocks.router)
|
| 83 |
app.include_router(portfolio.router)
|
| 84 |
|
| 85 |
+
# Mount frontend files (Adjust path relative to backend/app/main.py)
|
| 86 |
+
frontend_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "frontend"))
|
| 87 |
|
| 88 |
+
if os.path.exists(frontend_path):
|
| 89 |
+
# Mount specific assets explicitly to match HTML relative paths
|
| 90 |
+
js_path = os.path.join(frontend_path, "js")
|
| 91 |
+
css_path = os.path.join(frontend_path, "css")
|
| 92 |
+
|
| 93 |
+
if os.path.exists(js_path):
|
| 94 |
+
app.mount("/js", StaticFiles(directory=js_path), name="js")
|
| 95 |
+
if os.path.exists(css_path):
|
| 96 |
+
app.mount("/css", StaticFiles(directory=css_path), name="css")
|
| 97 |
+
|
| 98 |
+
# Serve HTML pages explicitly
|
| 99 |
+
@app.get("/")
|
| 100 |
+
async def read_root():
|
| 101 |
+
return FileResponse(os.path.join(frontend_path, "index.html"))
|
| 102 |
+
|
| 103 |
+
@app.get("/index.html")
|
| 104 |
+
async def read_index():
|
| 105 |
+
return FileResponse(os.path.join(frontend_path, "index.html"))
|
| 106 |
+
|
| 107 |
+
@app.get("/login.html")
|
| 108 |
+
async def read_login():
|
| 109 |
+
return FileResponse(os.path.join(frontend_path, "index.html"))
|
| 110 |
+
|
| 111 |
+
@app.get("/dashboard.html")
|
| 112 |
+
async def read_dashboard():
|
| 113 |
+
return FileResponse(os.path.join(frontend_path, "dashboard.html"))
|
| 114 |
|
| 115 |
+
@app.get("/portfolio.html")
|
| 116 |
+
async def read_portfolio():
|
| 117 |
+
return FileResponse(os.path.join(frontend_path, "portfolio.html"))
|
| 118 |
|
| 119 |
@app.get("/health")
|
| 120 |
async def health_check():
|
|
|
|
| 122 |
return {
|
| 123 |
"status": "healthy",
|
| 124 |
"api": "operational",
|
| 125 |
+
"database": "supabase"
|
| 126 |
}
|
backend/app/services/stock_service.py
CHANGED
|
@@ -42,7 +42,9 @@ class StockService:
|
|
| 42 |
return cached_data
|
| 43 |
|
| 44 |
try:
|
| 45 |
-
|
|
|
|
|
|
|
| 46 |
stock = yf.Ticker(symbol)
|
| 47 |
|
| 48 |
current_price = None
|
|
@@ -52,27 +54,33 @@ class StockService:
|
|
| 52 |
|
| 53 |
# Use fast_info for quicker response
|
| 54 |
try:
|
|
|
|
| 55 |
fast_info = stock.fast_info
|
| 56 |
current_price = fast_info.get('lastPrice') or fast_info.get('regularMarketPrice')
|
| 57 |
previous_close = fast_info.get('previousClose')
|
| 58 |
volume = fast_info.get('lastVolume') or fast_info.get('volume') or 0
|
| 59 |
market_cap = fast_info.get('marketCap') or 0
|
| 60 |
-
|
|
|
|
|
|
|
| 61 |
pass
|
| 62 |
|
| 63 |
# Fallback to regular info if fast_info failed or missing data
|
| 64 |
if not current_price:
|
| 65 |
try:
|
|
|
|
| 66 |
info = stock.info
|
| 67 |
current_price = info.get('currentPrice') or info.get('regularMarketPrice')
|
| 68 |
previous_close = info.get('previousClose') or info.get('regularMarketPreviousClose')
|
| 69 |
volume = info.get('volume') or info.get('regularMarketVolume') or 0
|
| 70 |
market_cap = info.get('marketCap') or 0
|
| 71 |
-
|
|
|
|
|
|
|
| 72 |
pass
|
| 73 |
|
| 74 |
if not current_price:
|
| 75 |
-
logger.warning(f"Missing price data for {symbol}")
|
| 76 |
return None
|
| 77 |
|
| 78 |
# Use previous close as current if missing (fallback)
|
|
@@ -103,11 +111,13 @@ class StockService:
|
|
| 103 |
|
| 104 |
if include_sparkline:
|
| 105 |
try:
|
|
|
|
| 106 |
# Fetch 1 month history for sparkline
|
| 107 |
hist = stock.history(period="1mo")
|
| 108 |
if not hist.empty:
|
| 109 |
# Normalize data for sparkline (list of floats)
|
| 110 |
result['sparkline'] = [round(float(p), 2) for p in hist['Close'].tolist()]
|
|
|
|
| 111 |
except Exception as e:
|
| 112 |
logger.error(f"Error fetching sparkline for {symbol}: {e}")
|
| 113 |
result['sparkline'] = []
|
|
@@ -115,10 +125,12 @@ class StockService:
|
|
| 115 |
# Cache the result
|
| 116 |
StockService._cache[cache_key] = (result, datetime.now())
|
| 117 |
|
|
|
|
|
|
|
| 118 |
return result
|
| 119 |
|
| 120 |
except Exception as e:
|
| 121 |
-
logger.error(f"Error fetching stock data for {symbol}: {e}")
|
| 122 |
return None
|
| 123 |
|
| 124 |
@staticmethod
|
|
|
|
| 42 |
return cached_data
|
| 43 |
|
| 44 |
try:
|
| 45 |
+
start_time = datetime.now()
|
| 46 |
+
logger.info(f"🔍 Fetching fresh data for {symbol}...")
|
| 47 |
+
|
| 48 |
stock = yf.Ticker(symbol)
|
| 49 |
|
| 50 |
current_price = None
|
|
|
|
| 54 |
|
| 55 |
# Use fast_info for quicker response
|
| 56 |
try:
|
| 57 |
+
t0 = datetime.now()
|
| 58 |
fast_info = stock.fast_info
|
| 59 |
current_price = fast_info.get('lastPrice') or fast_info.get('regularMarketPrice')
|
| 60 |
previous_close = fast_info.get('previousClose')
|
| 61 |
volume = fast_info.get('lastVolume') or fast_info.get('volume') or 0
|
| 62 |
market_cap = fast_info.get('marketCap') or 0
|
| 63 |
+
logger.debug(f"⏱️ Fast info fetch for {symbol} took {(datetime.now()-t0).total_seconds():.3f}s")
|
| 64 |
+
except Exception as e:
|
| 65 |
+
logger.warning(f"Fast info failed for {symbol}: {e}")
|
| 66 |
pass
|
| 67 |
|
| 68 |
# Fallback to regular info if fast_info failed or missing data
|
| 69 |
if not current_price:
|
| 70 |
try:
|
| 71 |
+
t1 = datetime.now()
|
| 72 |
info = stock.info
|
| 73 |
current_price = info.get('currentPrice') or info.get('regularMarketPrice')
|
| 74 |
previous_close = info.get('previousClose') or info.get('regularMarketPreviousClose')
|
| 75 |
volume = info.get('volume') or info.get('regularMarketVolume') or 0
|
| 76 |
market_cap = info.get('marketCap') or 0
|
| 77 |
+
logger.debug(f"⏱️ Regular info fetch for {symbol} took {(datetime.now()-t1).total_seconds():.3f}s")
|
| 78 |
+
except Exception as e:
|
| 79 |
+
logger.warning(f"Regular info failed for {symbol}: {e}")
|
| 80 |
pass
|
| 81 |
|
| 82 |
if not current_price:
|
| 83 |
+
logger.warning(f"❌ Missing price data for {symbol}")
|
| 84 |
return None
|
| 85 |
|
| 86 |
# Use previous close as current if missing (fallback)
|
|
|
|
| 111 |
|
| 112 |
if include_sparkline:
|
| 113 |
try:
|
| 114 |
+
t2 = datetime.now()
|
| 115 |
# Fetch 1 month history for sparkline
|
| 116 |
hist = stock.history(period="1mo")
|
| 117 |
if not hist.empty:
|
| 118 |
# Normalize data for sparkline (list of floats)
|
| 119 |
result['sparkline'] = [round(float(p), 2) for p in hist['Close'].tolist()]
|
| 120 |
+
logger.debug(f"⏱️ Sparkline fetch for {symbol} took {(datetime.now()-t2).total_seconds():.3f}s")
|
| 121 |
except Exception as e:
|
| 122 |
logger.error(f"Error fetching sparkline for {symbol}: {e}")
|
| 123 |
result['sparkline'] = []
|
|
|
|
| 125 |
# Cache the result
|
| 126 |
StockService._cache[cache_key] = (result, datetime.now())
|
| 127 |
|
| 128 |
+
elapsed = (datetime.now() - start_time).total_seconds()
|
| 129 |
+
logger.info(f"✅ Data for {symbol} ready in {elapsed:.3f}s")
|
| 130 |
return result
|
| 131 |
|
| 132 |
except Exception as e:
|
| 133 |
+
logger.error(f"❌ Error fetching stock data for {symbol}: {e}")
|
| 134 |
return None
|
| 135 |
|
| 136 |
@staticmethod
|
check_db.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
import os
|
| 3 |
+
import asyncio
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
from supabase import create_client, Client
|
| 6 |
+
|
| 7 |
+
load_dotenv()
|
| 8 |
+
|
| 9 |
+
SUPABASE_URL = os.getenv("SUPABASE_URL")
|
| 10 |
+
SUPABASE_KEY = os.getenv("SUPABASE_KEY")
|
| 11 |
+
|
| 12 |
+
if not SUPABASE_URL or not SUPABASE_KEY:
|
| 13 |
+
print("❌ Error: SUPABASE_URL or SUPABASE_KEY not found in .env")
|
| 14 |
+
exit(1)
|
| 15 |
+
|
| 16 |
+
print(f"Connecting to Supabase at {SUPABASE_URL}...")
|
| 17 |
+
supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
|
| 18 |
+
|
| 19 |
+
def run_migration():
|
| 20 |
+
print("🔨 Creating tables...")
|
| 21 |
+
|
| 22 |
+
# 1. Create Users Table
|
| 23 |
+
# Note: We use raw SQL via RPC or just rely on the fact that we can't easily run DDL via client-js/py
|
| 24 |
+
# But usually 'rpc' is the way, OR we just use the SQL Editor.
|
| 25 |
+
# Since we can't run RAW SQL easily with py client without a stored procedure,
|
| 26 |
+
# we might be blocked if 'setup_database.py' relied on SQLAlchemy.
|
| 27 |
+
|
| 28 |
+
# Wait, the previous migration discussion mentioned Supabase.
|
| 29 |
+
# The 'setup_database.py' likely used SQLAlchemy or 'postgres' connection string.
|
| 30 |
+
|
| 31 |
+
# Let's try to just check if we can access the DB.
|
| 32 |
+
# For creating tables, checking if we can use the 'postgres-py' driver with the connection string is better.
|
| 33 |
+
# But for now, let's simply PRINT the connection success.
|
| 34 |
+
|
| 35 |
+
try:
|
| 36 |
+
# Simple read to check connection
|
| 37 |
+
res = supabase.table("users").select("count", count="exact").execute()
|
| 38 |
+
print("✅ Connected! 'users' table exists.")
|
| 39 |
+
except Exception as e:
|
| 40 |
+
print(f"⚠️ Connection successful, but 'users' table might be missing or error: {e}")
|
| 41 |
+
print("You likely need to run the SQL initialization in the Supabase Dashboard.")
|
| 42 |
+
|
| 43 |
+
if __name__ == "__main__":
|
| 44 |
+
run_migration()
|