Spaces:
Runtime error
Runtime error
Pradeep Rajan commited on
Commit ·
5c7dec2
1
Parent(s): 88199ed
Initial deployment of Zyon Traders Backend7
Browse files- .env.example +27 -0
- Dockerfile +24 -16
- README.md +95 -7
- app.py +0 -12
- config/settings.py +85 -0
- main.py +86 -295
- models/auth.py +116 -0
- models/trading.py +239 -0
- requirements.txt +41 -43
- routers/auth.py +376 -0
- routers/dhan.py +311 -0
- routers/portfolio.py +402 -0
- routers/signals.py +409 -0
- services/auth.py +286 -0
- services/supabase_client.py +42 -0
- services/websocket_manager.py +135 -70
- utils/logging_config.py +84 -0
.env.example
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Zyon Traders Backend Environment Variables
|
| 2 |
+
# Copy this file to .env and fill in your actual values
|
| 3 |
+
|
| 4 |
+
# App Configuration
|
| 5 |
+
DEBUG=false
|
| 6 |
+
SECRET_KEY=your-secret-key-here
|
| 7 |
+
ALLOWED_ORIGINS=https://your-frontend-domain.com
|
| 8 |
+
|
| 9 |
+
# Supabase Configuration
|
| 10 |
+
SUPABASE_PROJECT_URL=https://your-project.supabase.co
|
| 11 |
+
SUPABASE_ANON_KEY=your-supabase-anon-key
|
| 12 |
+
|
| 13 |
+
# Dhan Trading API
|
| 14 |
+
DHAN_API_KEY=your-dhan-api-key
|
| 15 |
+
|
| 16 |
+
# AI Services
|
| 17 |
+
GEMINI_API_KEY=your-gemini-api-key
|
| 18 |
+
TOGETHER_API_KEY=your-together-api-key
|
| 19 |
+
FIREWORKS_API_KEY=your-fireworks-api-key
|
| 20 |
+
HUGGINGFACE_API_KEY=your-huggingface-api-key
|
| 21 |
+
LANGCHAIN_API_KEY=your-langchain-api-key
|
| 22 |
+
|
| 23 |
+
# External Services
|
| 24 |
+
UPTIMEROBOT_API_KEY=your-uptimerobot-key
|
| 25 |
+
|
| 26 |
+
# Logging
|
| 27 |
+
LOG_LEVEL=INFO
|
Dockerfile
CHANGED
|
@@ -1,35 +1,43 @@
|
|
| 1 |
-
#
|
| 2 |
-
FROM python:3.
|
| 3 |
|
| 4 |
-
# Set
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
WORKDIR /app
|
| 6 |
|
| 7 |
# Install system dependencies
|
| 8 |
-
RUN apt-get update
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
&& rm -rf /var/lib/apt/lists/*
|
| 14 |
|
| 15 |
# Copy requirements first for better caching
|
| 16 |
COPY requirements.txt .
|
| 17 |
|
| 18 |
# Install Python dependencies
|
| 19 |
-
RUN pip install --no-cache-dir -
|
|
|
|
| 20 |
|
| 21 |
# Copy application code
|
| 22 |
COPY . .
|
| 23 |
|
| 24 |
-
# Create
|
| 25 |
-
RUN
|
|
|
|
|
|
|
| 26 |
|
| 27 |
-
# Expose port
|
| 28 |
-
EXPOSE
|
| 29 |
|
| 30 |
# Health check
|
| 31 |
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
| 32 |
-
CMD curl -f http://localhost:
|
| 33 |
|
| 34 |
-
#
|
| 35 |
-
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"
|
|
|
|
| 1 |
+
# Use Python 3.10 slim image for optimal compatibility
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
|
| 4 |
+
# Set environment variables
|
| 5 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
| 6 |
+
ENV PYTHONUNBUFFERED=1
|
| 7 |
+
ENV PORT=8000
|
| 8 |
+
|
| 9 |
+
# Set work directory
|
| 10 |
WORKDIR /app
|
| 11 |
|
| 12 |
# Install system dependencies
|
| 13 |
+
RUN apt-get update \
|
| 14 |
+
&& apt-get install -y --no-install-recommends \
|
| 15 |
+
build-essential \
|
| 16 |
+
libpq-dev \
|
| 17 |
+
curl \
|
| 18 |
&& rm -rf /var/lib/apt/lists/*
|
| 19 |
|
| 20 |
# Copy requirements first for better caching
|
| 21 |
COPY requirements.txt .
|
| 22 |
|
| 23 |
# Install Python dependencies
|
| 24 |
+
RUN pip install --no-cache-dir --upgrade pip \
|
| 25 |
+
&& pip install --no-cache-dir -r requirements.txt
|
| 26 |
|
| 27 |
# Copy application code
|
| 28 |
COPY . .
|
| 29 |
|
| 30 |
+
# Create non-root user for security
|
| 31 |
+
RUN useradd --create-home --shell /bin/bash app \
|
| 32 |
+
&& chown -R app:app /app
|
| 33 |
+
USER app
|
| 34 |
|
| 35 |
+
# Expose port
|
| 36 |
+
EXPOSE $PORT
|
| 37 |
|
| 38 |
# Health check
|
| 39 |
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
| 40 |
+
CMD curl -f http://localhost:$PORT/api/health || exit 1
|
| 41 |
|
| 42 |
+
# Run the application (HuggingFace Spaces compatible)
|
| 43 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
README.md
CHANGED
|
@@ -1,12 +1,100 @@
|
|
| 1 |
---
|
| 2 |
title: Zyon Traders Backend
|
| 3 |
-
emoji:
|
| 4 |
-
colorFrom:
|
| 5 |
-
colorTo:
|
| 6 |
-
sdk:
|
| 7 |
-
sdk_version: "4.25.0"
|
| 8 |
-
app_file: app.py
|
| 9 |
pinned: false
|
|
|
|
|
|
|
| 10 |
---
|
| 11 |
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
title: Zyon Traders Backend
|
| 3 |
+
emoji: 📈
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: docker
|
|
|
|
|
|
|
| 7 |
pinned: false
|
| 8 |
+
license: mit
|
| 9 |
+
app_port: 8000
|
| 10 |
---
|
| 11 |
|
| 12 |
+
# Zyon Traders Backend API
|
| 13 |
+
|
| 14 |
+
AI-powered trading platform backend built with FastAPI. This backend provides:
|
| 15 |
+
|
| 16 |
+
- **Real-time Market Data**: Integration with Dhan API for live stock prices
|
| 17 |
+
- **AI Trading Signals**: Machine learning powered trading recommendations using multiple AI services
|
| 18 |
+
- **Portfolio Analytics**: Advanced risk analysis and performance metrics
|
| 19 |
+
- **WebSocket Support**: Real-time data streaming to frontend clients
|
| 20 |
+
- **Authentication**: Supabase integration for user management
|
| 21 |
+
|
| 22 |
+
## Features
|
| 23 |
+
|
| 24 |
+
### 🤖 AI Services Integration
|
| 25 |
+
|
| 26 |
+
- **Google Gemini**: Market analysis and signal generation
|
| 27 |
+
- **Together AI**: Alternative ML models for trading insights
|
| 28 |
+
- **Fireworks AI**: High-performance inference
|
| 29 |
+
- **HuggingFace**: Open-source ML models
|
| 30 |
+
|
| 31 |
+
### 📊 Trading Features
|
| 32 |
+
|
| 33 |
+
- Real-time market data from Dhan API
|
| 34 |
+
- Portfolio tracking and management
|
| 35 |
+
- Risk analysis (VaR, Sharpe ratio, etc.)
|
| 36 |
+
- Stock screening with technical indicators
|
| 37 |
+
- Order management (buy/sell/cancel)
|
| 38 |
+
|
| 39 |
+
### 🔐 Security
|
| 40 |
+
|
| 41 |
+
- JWT token authentication
|
| 42 |
+
- CORS protection
|
| 43 |
+
- Rate limiting
|
| 44 |
+
- Environment variable security
|
| 45 |
+
|
| 46 |
+
## API Endpoints
|
| 47 |
+
|
| 48 |
+
- `GET /api/health` - Health check
|
| 49 |
+
- `POST /api/auth/login` - User authentication
|
| 50 |
+
- `GET /api/dhan/quotes` - Real-time stock quotes
|
| 51 |
+
- `GET /api/signals/generate` - AI trading signals
|
| 52 |
+
- `GET /api/portfolio/analytics` - Portfolio analysis
|
| 53 |
+
- `GET /api/screener/stocks` - Stock screening
|
| 54 |
+
- `WebSocket /ws` - Real-time data streaming
|
| 55 |
+
|
| 56 |
+
## Environment Variables Required
|
| 57 |
+
|
| 58 |
+
Set these in your HuggingFace Space settings:
|
| 59 |
+
|
| 60 |
+
```
|
| 61 |
+
SECRET_KEY=your-secret-key
|
| 62 |
+
SUPABASE_PROJECT_URL=your-supabase-url
|
| 63 |
+
SUPABASE_ANON_KEY=your-supabase-key
|
| 64 |
+
DHAN_API_KEY=your-dhan-api-key
|
| 65 |
+
GEMINI_API_KEY=your-gemini-key
|
| 66 |
+
TOGETHER_API_KEY=your-together-key
|
| 67 |
+
FIREWORKS_API_KEY=your-fireworks-key
|
| 68 |
+
HUGGINGFACE_API_KEY=your-hf-key
|
| 69 |
+
ALLOWED_ORIGINS=https://your-frontend-domain.com
|
| 70 |
+
```
|
| 71 |
+
|
| 72 |
+
## Usage
|
| 73 |
+
|
| 74 |
+
Once deployed, your API will be available at:
|
| 75 |
+
|
| 76 |
+
- Main endpoint: `https://your-username-zyon-traders-backend.hf.space`
|
| 77 |
+
- Health check: `https://your-username-zyon-traders-backend.hf.space/api/health`
|
| 78 |
+
- API docs: `https://your-username-zyon-traders-backend.hf.space/docs`
|
| 79 |
+
|
| 80 |
+
## Local Development
|
| 81 |
+
|
| 82 |
+
```bash
|
| 83 |
+
cd backend
|
| 84 |
+
pip install -r requirements.txt
|
| 85 |
+
uvicorn main:app --reload --port 8000
|
| 86 |
+
```
|
| 87 |
+
|
| 88 |
+
## Architecture
|
| 89 |
+
|
| 90 |
+
Built with:
|
| 91 |
+
|
| 92 |
+
- **FastAPI**: High-performance Python web framework
|
| 93 |
+
- **WebSockets**: Real-time communication
|
| 94 |
+
- **Pydantic**: Data validation and settings management
|
| 95 |
+
- **Asyncio**: Asynchronous programming for high concurrency
|
| 96 |
+
- **Docker**: Containerized deployment
|
| 97 |
+
|
| 98 |
+
## License
|
| 99 |
+
|
| 100 |
+
MIT License - see LICENSE file for details.
|
app.py
DELETED
|
@@ -1,12 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Hugging Face Spaces App Configuration
|
| 3 |
-
This file is required by Hugging Face Spaces for Docker apps
|
| 4 |
-
"""
|
| 5 |
-
|
| 6 |
-
# Import the main FastAPI app
|
| 7 |
-
from main import app
|
| 8 |
-
|
| 9 |
-
# This makes the app available to Hugging Face Spaces
|
| 10 |
-
if __name__ == "__main__":
|
| 11 |
-
import uvicorn
|
| 12 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
config/settings.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Application settings and configuration
|
| 3 |
+
Environment-based configuration for production deployment
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from typing import List, Optional
|
| 8 |
+
from pydantic import BaseSettings, validator
|
| 9 |
+
from functools import lru_cache
|
| 10 |
+
|
| 11 |
+
class Settings(BaseSettings):
|
| 12 |
+
"""Application settings"""
|
| 13 |
+
|
| 14 |
+
# Basic app config
|
| 15 |
+
DEBUG: bool = False
|
| 16 |
+
SECRET_KEY: str = "your-secret-key-change-in-production"
|
| 17 |
+
API_V1_STR: str = "/api"
|
| 18 |
+
PROJECT_NAME: str = "Zyon Traders API"
|
| 19 |
+
|
| 20 |
+
# Server config
|
| 21 |
+
HOST: str = "0.0.0.0"
|
| 22 |
+
PORT: int = 8000
|
| 23 |
+
|
| 24 |
+
# CORS settings
|
| 25 |
+
ALLOWED_ORIGINS: List[str] = [
|
| 26 |
+
"http://localhost:3000",
|
| 27 |
+
"http://localhost:8080",
|
| 28 |
+
"https://zyon-traders.netlify.app", # Your frontend URL
|
| 29 |
+
"https://your-domain.com" # Replace with your actual domain
|
| 30 |
+
]
|
| 31 |
+
|
| 32 |
+
# Database
|
| 33 |
+
DATABASE_URL: Optional[str] = None
|
| 34 |
+
|
| 35 |
+
# Supabase
|
| 36 |
+
SUPABASE_PROJECT_URL: str
|
| 37 |
+
SUPABASE_ANON_KEY: str
|
| 38 |
+
SUPABASE_SERVICE_ROLE_KEY: Optional[str] = None
|
| 39 |
+
|
| 40 |
+
# Dhan API
|
| 41 |
+
DHAN_API_KEY: str
|
| 42 |
+
DHAN_API_BASE_URL: str = "https://api.dhan.co/v2"
|
| 43 |
+
|
| 44 |
+
# AI Services
|
| 45 |
+
TOGETHER_API_KEY: Optional[str] = None
|
| 46 |
+
FIREWORKS_API_KEY: Optional[str] = None
|
| 47 |
+
GEMINI_API_KEY: Optional[str] = None
|
| 48 |
+
LANGCHAIN_API_KEY: Optional[str] = None
|
| 49 |
+
HUGGINGFACE_API_KEY: Optional[str] = None
|
| 50 |
+
|
| 51 |
+
# External Services
|
| 52 |
+
UPTIMEROBOT_API_KEY: Optional[str] = None
|
| 53 |
+
|
| 54 |
+
# Redis (for caching - optional)
|
| 55 |
+
REDIS_URL: Optional[str] = None
|
| 56 |
+
|
| 57 |
+
# Rate limiting
|
| 58 |
+
RATE_LIMIT_PER_MINUTE: int = 100
|
| 59 |
+
|
| 60 |
+
# WebSocket settings
|
| 61 |
+
WS_HEARTBEAT_INTERVAL: int = 30
|
| 62 |
+
WS_MAX_CONNECTIONS: int = 1000
|
| 63 |
+
|
| 64 |
+
# Logging
|
| 65 |
+
LOG_LEVEL: str = "INFO"
|
| 66 |
+
|
| 67 |
+
@validator("ALLOWED_ORIGINS", pre=True)
|
| 68 |
+
def assemble_cors_origins(cls, v: str) -> List[str]:
|
| 69 |
+
if isinstance(v, str) and not v.startswith("["):
|
| 70 |
+
return [i.strip() for i in v.split(",")]
|
| 71 |
+
elif isinstance(v, (list, str)):
|
| 72 |
+
return v
|
| 73 |
+
raise ValueError(v)
|
| 74 |
+
|
| 75 |
+
class Config:
|
| 76 |
+
env_file = ".env"
|
| 77 |
+
case_sensitive = True
|
| 78 |
+
|
| 79 |
+
@lru_cache()
|
| 80 |
+
def get_settings() -> Settings:
|
| 81 |
+
"""Get cached settings instance"""
|
| 82 |
+
return Settings()
|
| 83 |
+
|
| 84 |
+
# For Render.com deployment, settings will be loaded from environment variables
|
| 85 |
+
settings = get_settings()
|
main.py
CHANGED
|
@@ -1,362 +1,153 @@
|
|
| 1 |
"""
|
| 2 |
-
Zyon Traders
|
| 3 |
-
|
|
|
|
| 4 |
"""
|
| 5 |
|
| 6 |
-
import
|
| 7 |
-
import logging
|
| 8 |
-
from datetime import datetime
|
| 9 |
-
from typing import Dict, Any, List, Optional
|
| 10 |
-
from contextlib import asynccontextmanager
|
| 11 |
-
|
| 12 |
-
from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect, BackgroundTasks
|
| 13 |
from fastapi.middleware.cors import CORSMiddleware
|
| 14 |
from fastapi.responses import JSONResponse
|
| 15 |
-
from
|
| 16 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
-
# Import
|
| 19 |
-
from
|
| 20 |
-
from services.ai_orchestration import AIOrchestrationService
|
| 21 |
-
from services.knowledge_service import KnowledgeService
|
| 22 |
-
from services.trading_service import TradingService
|
| 23 |
from services.websocket_manager import WebSocketManager
|
|
|
|
|
|
|
| 24 |
|
| 25 |
-
#
|
| 26 |
-
|
| 27 |
logger = logging.getLogger(__name__)
|
| 28 |
|
| 29 |
-
#
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
trading_service = None
|
| 34 |
websocket_manager = WebSocketManager()
|
| 35 |
|
| 36 |
@asynccontextmanager
|
| 37 |
async def lifespan(app: FastAPI):
|
| 38 |
-
"""
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
logger.info("🚀 Starting Zyon Traders Backend on Hugging Face Spaces")
|
| 42 |
-
|
| 43 |
-
# Initialize services
|
| 44 |
-
dhan_service = DhanService()
|
| 45 |
-
ai_service = AIOrchestrationService()
|
| 46 |
-
knowledge_service = KnowledgeService()
|
| 47 |
-
trading_service = TradingService()
|
| 48 |
|
| 49 |
-
#
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
|
| 59 |
# Create FastAPI app
|
| 60 |
app = FastAPI(
|
| 61 |
-
title="Zyon Traders
|
| 62 |
-
description="AI-Powered
|
| 63 |
version="1.0.0",
|
| 64 |
-
docs_url="/docs",
|
| 65 |
-
redoc_url="/redoc",
|
| 66 |
lifespan=lifespan
|
| 67 |
)
|
| 68 |
|
| 69 |
-
# CORS
|
| 70 |
app.add_middleware(
|
| 71 |
CORSMiddleware,
|
| 72 |
-
allow_origins=
|
| 73 |
-
"https://zyonpackers.in",
|
| 74 |
-
"https://2fc6c2c78341421ab2376a35a7a55be2-9a94dad135744323b87f1579e.fly.dev",
|
| 75 |
-
"http://localhost:3000",
|
| 76 |
-
"http://localhost:5173",
|
| 77 |
-
],
|
| 78 |
allow_credentials=True,
|
| 79 |
-
allow_methods=["
|
| 80 |
allow_headers=["*"],
|
| 81 |
)
|
| 82 |
|
| 83 |
-
#
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
|
|
|
| 89 |
|
| 90 |
-
class OrderRequest(BaseModel):
|
| 91 |
-
symbol: str
|
| 92 |
-
side: str # BUY or SELL
|
| 93 |
-
quantity: int
|
| 94 |
-
order_type: str # MARKET, LIMIT, STOP
|
| 95 |
-
price: Optional[float] = None
|
| 96 |
-
segment: str = "EQ" # EQ, FO, CD
|
| 97 |
-
|
| 98 |
-
class MarketDataRequest(BaseModel):
|
| 99 |
-
symbols: List[str]
|
| 100 |
-
interval: str = "1min"
|
| 101 |
-
|
| 102 |
-
# Root endpoint
|
| 103 |
@app.get("/")
|
| 104 |
async def root():
|
| 105 |
-
"""
|
| 106 |
return {
|
| 107 |
-
"message": "Zyon Traders
|
| 108 |
-
"
|
| 109 |
-
"deployment": "Hugging Face Spaces",
|
| 110 |
"timestamp": datetime.utcnow().isoformat(),
|
| 111 |
-
"
|
| 112 |
}
|
| 113 |
|
| 114 |
-
|
| 115 |
-
@app.get("/health", response_model=HealthResponse)
|
| 116 |
async def health_check():
|
| 117 |
-
"""
|
| 118 |
-
services_status = {
|
| 119 |
-
"dhan_api": "connected" if dhan_service and await dhan_service.test_connection() else "disconnected",
|
| 120 |
-
"ai_orchestration": "active",
|
| 121 |
-
"knowledge_base": "ready",
|
| 122 |
-
"trading_engine": "operational",
|
| 123 |
-
"websocket": "active"
|
| 124 |
-
}
|
| 125 |
-
|
| 126 |
-
return HealthResponse(
|
| 127 |
-
status="OK",
|
| 128 |
-
timestamp=datetime.utcnow().isoformat(),
|
| 129 |
-
version="1.0.0",
|
| 130 |
-
services=services_status
|
| 131 |
-
)
|
| 132 |
-
|
| 133 |
-
# API v1 routes
|
| 134 |
-
@app.get("/api/v1/status")
|
| 135 |
-
async def api_status():
|
| 136 |
-
"""API status endpoint"""
|
| 137 |
-
return {
|
| 138 |
-
"api_version": "v1",
|
| 139 |
-
"status": "operational",
|
| 140 |
-
"features": [
|
| 141 |
-
"live_market_data",
|
| 142 |
-
"ai_trading_signals",
|
| 143 |
-
"paper_trading",
|
| 144 |
-
"knowledge_rag",
|
| 145 |
-
"options_chain",
|
| 146 |
-
"portfolio_management"
|
| 147 |
-
]
|
| 148 |
-
}
|
| 149 |
-
|
| 150 |
-
# Dhan API Integration
|
| 151 |
-
@app.post("/api/v1/dhan/test-connection")
|
| 152 |
-
async def test_dhan_connection():
|
| 153 |
-
"""Test Dhan API connection"""
|
| 154 |
-
if not dhan_service:
|
| 155 |
-
raise HTTPException(status_code=503, detail="Dhan service not initialized")
|
| 156 |
-
|
| 157 |
-
is_connected = await dhan_service.test_connection()
|
| 158 |
return {
|
| 159 |
-
"
|
| 160 |
-
"
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
"
|
|
|
|
|
|
|
| 164 |
}
|
| 165 |
}
|
| 166 |
|
| 167 |
-
@app.get("/api/v1/trading/market-data")
|
| 168 |
-
async def get_market_data(symbols: str = "RELIANCE,TCS,HDFCBANK"):
|
| 169 |
-
"""Get live market data for symbols"""
|
| 170 |
-
if not dhan_service:
|
| 171 |
-
raise HTTPException(status_code=503, detail="Dhan service not initialized")
|
| 172 |
-
|
| 173 |
-
symbol_list = symbols.split(",")
|
| 174 |
-
market_data = await dhan_service.get_market_data(symbol_list)
|
| 175 |
-
|
| 176 |
-
return {
|
| 177 |
-
"success": True,
|
| 178 |
-
"data": market_data,
|
| 179 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 180 |
-
}
|
| 181 |
-
|
| 182 |
-
@app.get("/api/v1/trading/options-chain/{symbol}")
|
| 183 |
-
async def get_options_chain(symbol: str, expiry: Optional[str] = None):
|
| 184 |
-
"""Get options chain data for a symbol"""
|
| 185 |
-
if not dhan_service:
|
| 186 |
-
raise HTTPException(status_code=503, detail="Dhan service not initialized")
|
| 187 |
-
|
| 188 |
-
options_data = await dhan_service.get_options_chain(symbol, expiry)
|
| 189 |
-
|
| 190 |
-
return {
|
| 191 |
-
"success": True,
|
| 192 |
-
"data": options_data,
|
| 193 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 194 |
-
}
|
| 195 |
-
|
| 196 |
-
@app.post("/api/v1/trading/orders")
|
| 197 |
-
async def place_order(order: OrderRequest):
|
| 198 |
-
"""Place a trading order (paper trading mode)"""
|
| 199 |
-
if not trading_service:
|
| 200 |
-
raise HTTPException(status_code=503, detail="Trading service not initialized")
|
| 201 |
-
|
| 202 |
-
# In paper trading mode - simulate order placement
|
| 203 |
-
order_result = await trading_service.place_paper_order(
|
| 204 |
-
symbol=order.symbol,
|
| 205 |
-
side=order.side,
|
| 206 |
-
quantity=order.quantity,
|
| 207 |
-
order_type=order.order_type,
|
| 208 |
-
price=order.price
|
| 209 |
-
)
|
| 210 |
-
|
| 211 |
-
return {
|
| 212 |
-
"success": True,
|
| 213 |
-
"data": order_result,
|
| 214 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 215 |
-
}
|
| 216 |
-
|
| 217 |
-
@app.get("/api/v1/trading/portfolio")
|
| 218 |
-
async def get_portfolio():
|
| 219 |
-
"""Get portfolio summary"""
|
| 220 |
-
if not trading_service:
|
| 221 |
-
raise HTTPException(status_code=503, detail="Trading service not initialized")
|
| 222 |
-
|
| 223 |
-
portfolio = await trading_service.get_portfolio_summary()
|
| 224 |
-
|
| 225 |
-
return {
|
| 226 |
-
"success": True,
|
| 227 |
-
"data": portfolio,
|
| 228 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 229 |
-
}
|
| 230 |
-
|
| 231 |
-
# AI Orchestration Endpoints
|
| 232 |
-
@app.post("/api/v1/ai/generate-strategy")
|
| 233 |
-
async def generate_ai_strategy(
|
| 234 |
-
market_context: str,
|
| 235 |
-
risk_profile: str = "moderate",
|
| 236 |
-
capital: float = 100000
|
| 237 |
-
):
|
| 238 |
-
"""Generate AI trading strategy"""
|
| 239 |
-
if not ai_service:
|
| 240 |
-
raise HTTPException(status_code=503, detail="AI service not initialized")
|
| 241 |
-
|
| 242 |
-
strategy = await ai_service.generate_strategy(market_context, risk_profile, capital)
|
| 243 |
-
|
| 244 |
-
return {
|
| 245 |
-
"success": True,
|
| 246 |
-
"data": strategy,
|
| 247 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 248 |
-
}
|
| 249 |
-
|
| 250 |
-
@app.post("/api/v1/ai/validate-signal")
|
| 251 |
-
async def validate_trading_signal(signal_data: Dict[str, Any]):
|
| 252 |
-
"""Validate trading signal using AI"""
|
| 253 |
-
if not ai_service:
|
| 254 |
-
raise HTTPException(status_code=503, detail="AI service not initialized")
|
| 255 |
-
|
| 256 |
-
validation = await ai_service.validate_signal(signal_data)
|
| 257 |
-
|
| 258 |
-
return {
|
| 259 |
-
"success": True,
|
| 260 |
-
"data": validation,
|
| 261 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 262 |
-
}
|
| 263 |
-
|
| 264 |
-
# Knowledge Base RAG System
|
| 265 |
-
@app.post("/api/v1/knowledge/upload")
|
| 266 |
-
async def upload_knowledge(file_url: str, content_type: str = "pdf"):
|
| 267 |
-
"""Upload document to knowledge base"""
|
| 268 |
-
if not knowledge_service:
|
| 269 |
-
raise HTTPException(status_code=503, detail="Knowledge service not initialized")
|
| 270 |
-
|
| 271 |
-
result = await knowledge_service.add_document(file_url, content_type)
|
| 272 |
-
|
| 273 |
-
return {
|
| 274 |
-
"success": True,
|
| 275 |
-
"data": result,
|
| 276 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 277 |
-
}
|
| 278 |
-
|
| 279 |
-
@app.post("/api/v1/knowledge/query")
|
| 280 |
-
async def query_knowledge(query: str, context: str = "general"):
|
| 281 |
-
"""Query the knowledge base"""
|
| 282 |
-
if not knowledge_service:
|
| 283 |
-
raise HTTPException(status_code=503, detail="Knowledge service not initialized")
|
| 284 |
-
|
| 285 |
-
response = await knowledge_service.query(query, context)
|
| 286 |
-
|
| 287 |
-
return {
|
| 288 |
-
"success": True,
|
| 289 |
-
"data": response,
|
| 290 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 291 |
-
}
|
| 292 |
-
|
| 293 |
-
# WebSocket for real-time data
|
| 294 |
@app.websocket("/ws")
|
| 295 |
async def websocket_endpoint(websocket: WebSocket):
|
| 296 |
-
"""WebSocket endpoint for real-time
|
| 297 |
await websocket_manager.connect(websocket)
|
| 298 |
try:
|
| 299 |
while True:
|
| 300 |
-
# Keep connection alive and
|
| 301 |
data = await websocket.receive_text()
|
|
|
|
| 302 |
|
| 303 |
# Handle different message types
|
| 304 |
-
if
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
await websocket_manager.send_personal_message(portfolio_update, websocket)
|
| 313 |
|
| 314 |
except WebSocketDisconnect:
|
| 315 |
websocket_manager.disconnect(websocket)
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
async def http_exception_handler(request, exc: HTTPException):
|
| 320 |
-
return JSONResponse(
|
| 321 |
-
status_code=exc.status_code,
|
| 322 |
-
content={
|
| 323 |
-
"success": False,
|
| 324 |
-
"error": exc.detail,
|
| 325 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 326 |
-
}
|
| 327 |
-
)
|
| 328 |
|
| 329 |
@app.exception_handler(Exception)
|
| 330 |
-
async def
|
| 331 |
-
|
|
|
|
| 332 |
return JSONResponse(
|
| 333 |
status_code=500,
|
| 334 |
content={
|
| 335 |
-
"success": False,
|
| 336 |
"error": "Internal server error",
|
|
|
|
| 337 |
"timestamp": datetime.utcnow().isoformat()
|
| 338 |
}
|
| 339 |
)
|
| 340 |
|
| 341 |
-
# Background tasks
|
| 342 |
-
async def market_data_updater():
|
| 343 |
-
"""Background task to update market data"""
|
| 344 |
-
while True:
|
| 345 |
-
try:
|
| 346 |
-
if dhan_service and websocket_manager.active_connections:
|
| 347 |
-
market_data = await dhan_service.get_live_data()
|
| 348 |
-
await websocket_manager.broadcast(market_data)
|
| 349 |
-
except Exception as e:
|
| 350 |
-
logger.error(f"Market data update error: {e}")
|
| 351 |
-
|
| 352 |
-
await asyncio.sleep(5) # Update every 5 seconds
|
| 353 |
-
|
| 354 |
if __name__ == "__main__":
|
| 355 |
-
|
| 356 |
uvicorn.run(
|
| 357 |
"main:app",
|
| 358 |
host="0.0.0.0",
|
| 359 |
-
port=
|
| 360 |
-
reload=
|
| 361 |
log_level="info"
|
| 362 |
)
|
|
|
|
| 1 |
"""
|
| 2 |
+
Zyon Traders FastAPI Backend
|
| 3 |
+
Production-ready API for AI-powered trading platform
|
| 4 |
+
Optimized for Render.com deployment
|
| 5 |
"""
|
| 6 |
|
| 7 |
+
from fastapi import FastAPI, HTTPException, Depends, Header, WebSocket, WebSocketDisconnect
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
from fastapi.middleware.cors import CORSMiddleware
|
| 9 |
from fastapi.responses import JSONResponse
|
| 10 |
+
from contextlib import asynccontextmanager
|
| 11 |
+
import asyncio
|
| 12 |
+
import json
|
| 13 |
+
import logging
|
| 14 |
+
from typing import Optional, List, Dict, Any
|
| 15 |
+
import os
|
| 16 |
+
from datetime import datetime, timedelta
|
| 17 |
|
| 18 |
+
# Import routers
|
| 19 |
+
from routers import auth, signals, dhan, portfolio, screener, analytics
|
|
|
|
|
|
|
|
|
|
| 20 |
from services.websocket_manager import WebSocketManager
|
| 21 |
+
from config.settings import get_settings
|
| 22 |
+
from utils.logging_config import setup_logging
|
| 23 |
|
| 24 |
+
# Setup logging
|
| 25 |
+
setup_logging()
|
| 26 |
logger = logging.getLogger(__name__)
|
| 27 |
|
| 28 |
+
# Get settings
|
| 29 |
+
settings = get_settings()
|
| 30 |
+
|
| 31 |
+
# WebSocket manager for real-time data
|
|
|
|
| 32 |
websocket_manager = WebSocketManager()
|
| 33 |
|
| 34 |
@asynccontextmanager
|
| 35 |
async def lifespan(app: FastAPI):
|
| 36 |
+
"""Application lifespan management"""
|
| 37 |
+
logger.info("Starting Zyon Traders Backend API")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
|
| 39 |
+
# Startup tasks
|
| 40 |
+
try:
|
| 41 |
+
# Initialize services
|
| 42 |
+
await websocket_manager.start_background_tasks()
|
| 43 |
+
logger.info("Background tasks initialized")
|
| 44 |
+
|
| 45 |
+
yield
|
| 46 |
+
|
| 47 |
+
except Exception as e:
|
| 48 |
+
logger.error(f"Startup error: {e}")
|
| 49 |
+
raise
|
| 50 |
+
finally:
|
| 51 |
+
# Cleanup tasks
|
| 52 |
+
logger.info("Shutting down Zyon Traders Backend API")
|
| 53 |
+
await websocket_manager.cleanup()
|
| 54 |
|
| 55 |
# Create FastAPI app
|
| 56 |
app = FastAPI(
|
| 57 |
+
title="Zyon Traders API",
|
| 58 |
+
description="AI-Powered Trading Platform Backend",
|
| 59 |
version="1.0.0",
|
| 60 |
+
docs_url="/docs" if settings.DEBUG else None,
|
| 61 |
+
redoc_url="/redoc" if settings.DEBUG else None,
|
| 62 |
lifespan=lifespan
|
| 63 |
)
|
| 64 |
|
| 65 |
+
# CORS middleware
|
| 66 |
app.add_middleware(
|
| 67 |
CORSMiddleware,
|
| 68 |
+
allow_origins=settings.ALLOWED_ORIGINS,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
allow_credentials=True,
|
| 70 |
+
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
| 71 |
allow_headers=["*"],
|
| 72 |
)
|
| 73 |
|
| 74 |
+
# Include routers
|
| 75 |
+
app.include_router(auth.router, prefix="/api/auth", tags=["Authentication"])
|
| 76 |
+
app.include_router(dhan.router, prefix="/api/dhan", tags=["Dhan Trading API"])
|
| 77 |
+
app.include_router(signals.router, prefix="/api/signals", tags=["AI Trading Signals"])
|
| 78 |
+
app.include_router(portfolio.router, prefix="/api/portfolio", tags=["Portfolio Management"])
|
| 79 |
+
app.include_router(screener.router, prefix="/api/screener", tags=["Market Screener"])
|
| 80 |
+
app.include_router(analytics.router, prefix="/api/analytics", tags=["Portfolio Analytics"])
|
| 81 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
@app.get("/")
|
| 83 |
async def root():
|
| 84 |
+
"""Health check endpoint"""
|
| 85 |
return {
|
| 86 |
+
"message": "Zyon Traders API is running",
|
| 87 |
+
"version": "1.0.0",
|
|
|
|
| 88 |
"timestamp": datetime.utcnow().isoformat(),
|
| 89 |
+
"status": "healthy"
|
| 90 |
}
|
| 91 |
|
| 92 |
+
@app.get("/api/health")
|
|
|
|
| 93 |
async def health_check():
|
| 94 |
+
"""Detailed health check"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
return {
|
| 96 |
+
"status": "healthy",
|
| 97 |
+
"timestamp": datetime.utcnow().isoformat(),
|
| 98 |
+
"version": "1.0.0",
|
| 99 |
+
"services": {
|
| 100 |
+
"database": "connected",
|
| 101 |
+
"dhan_api": "connected",
|
| 102 |
+
"ai_services": "connected"
|
| 103 |
}
|
| 104 |
}
|
| 105 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 106 |
@app.websocket("/ws")
|
| 107 |
async def websocket_endpoint(websocket: WebSocket):
|
| 108 |
+
"""WebSocket endpoint for real-time data"""
|
| 109 |
await websocket_manager.connect(websocket)
|
| 110 |
try:
|
| 111 |
while True:
|
| 112 |
+
# Keep connection alive and handle incoming messages
|
| 113 |
data = await websocket.receive_text()
|
| 114 |
+
message = json.loads(data)
|
| 115 |
|
| 116 |
# Handle different message types
|
| 117 |
+
if message.get("type") == "subscribe":
|
| 118 |
+
await websocket_manager.subscribe_to_symbols(
|
| 119 |
+
websocket, message.get("symbols", [])
|
| 120 |
+
)
|
| 121 |
+
elif message.get("type") == "unsubscribe":
|
| 122 |
+
await websocket_manager.unsubscribe_from_symbols(
|
| 123 |
+
websocket, message.get("symbols", [])
|
| 124 |
+
)
|
|
|
|
| 125 |
|
| 126 |
except WebSocketDisconnect:
|
| 127 |
websocket_manager.disconnect(websocket)
|
| 128 |
+
except Exception as e:
|
| 129 |
+
logger.error(f"WebSocket error: {e}")
|
| 130 |
+
websocket_manager.disconnect(websocket)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
|
| 132 |
@app.exception_handler(Exception)
|
| 133 |
+
async def global_exception_handler(request, exc):
|
| 134 |
+
"""Global exception handler"""
|
| 135 |
+
logger.error(f"Global exception: {exc}")
|
| 136 |
return JSONResponse(
|
| 137 |
status_code=500,
|
| 138 |
content={
|
|
|
|
| 139 |
"error": "Internal server error",
|
| 140 |
+
"message": "An unexpected error occurred",
|
| 141 |
"timestamp": datetime.utcnow().isoformat()
|
| 142 |
}
|
| 143 |
)
|
| 144 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
if __name__ == "__main__":
|
| 146 |
+
import uvicorn
|
| 147 |
uvicorn.run(
|
| 148 |
"main:app",
|
| 149 |
host="0.0.0.0",
|
| 150 |
+
port=int(os.getenv("PORT", 8000)),
|
| 151 |
+
reload=settings.DEBUG,
|
| 152 |
log_level="info"
|
| 153 |
)
|
models/auth.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pydantic models for authentication
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from pydantic import BaseModel, EmailStr, Field, validator
|
| 6 |
+
from typing import Optional
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
|
| 9 |
+
class UserSignup(BaseModel):
|
| 10 |
+
email: EmailStr = Field(..., description="User email address")
|
| 11 |
+
password: str = Field(..., min_length=8, description="User password (minimum 8 characters)")
|
| 12 |
+
first_name: str = Field(..., min_length=1, max_length=50, description="First name")
|
| 13 |
+
last_name: str = Field(..., min_length=1, max_length=50, description="Last name")
|
| 14 |
+
|
| 15 |
+
@validator('password')
|
| 16 |
+
def validate_password(cls, v):
|
| 17 |
+
if len(v) < 8:
|
| 18 |
+
raise ValueError('Password must be at least 8 characters long')
|
| 19 |
+
if not any(c.isupper() for c in v):
|
| 20 |
+
raise ValueError('Password must contain at least one uppercase letter')
|
| 21 |
+
if not any(c.islower() for c in v):
|
| 22 |
+
raise ValueError('Password must contain at least one lowercase letter')
|
| 23 |
+
if not any(c.isdigit() for c in v):
|
| 24 |
+
raise ValueError('Password must contain at least one digit')
|
| 25 |
+
return v
|
| 26 |
+
|
| 27 |
+
class UserLogin(BaseModel):
|
| 28 |
+
email: EmailStr = Field(..., description="User email address")
|
| 29 |
+
password: str = Field(..., description="User password")
|
| 30 |
+
|
| 31 |
+
class UserResponse(BaseModel):
|
| 32 |
+
id: str
|
| 33 |
+
email: str
|
| 34 |
+
first_name: str
|
| 35 |
+
last_name: str
|
| 36 |
+
role: str
|
| 37 |
+
message: Optional[str] = None
|
| 38 |
+
created_at: Optional[datetime] = None
|
| 39 |
+
updated_at: Optional[datetime] = None
|
| 40 |
+
|
| 41 |
+
class TokenResponse(BaseModel):
|
| 42 |
+
access_token: str
|
| 43 |
+
token_type: str = "bearer"
|
| 44 |
+
expires_in: int
|
| 45 |
+
user: Optional[UserResponse] = None
|
| 46 |
+
|
| 47 |
+
class PasswordReset(BaseModel):
|
| 48 |
+
token: str = Field(..., description="Password reset token")
|
| 49 |
+
new_password: str = Field(..., min_length=8, description="New password")
|
| 50 |
+
|
| 51 |
+
@validator('new_password')
|
| 52 |
+
def validate_password(cls, v):
|
| 53 |
+
if len(v) < 8:
|
| 54 |
+
raise ValueError('Password must be at least 8 characters long')
|
| 55 |
+
if not any(c.isupper() for c in v):
|
| 56 |
+
raise ValueError('Password must contain at least one uppercase letter')
|
| 57 |
+
if not any(c.islower() for c in v):
|
| 58 |
+
raise ValueError('Password must contain at least one lowercase letter')
|
| 59 |
+
if not any(c.isdigit() for c in v):
|
| 60 |
+
raise ValueError('Password must contain at least one digit')
|
| 61 |
+
return v
|
| 62 |
+
|
| 63 |
+
class TokenPayload(BaseModel):
|
| 64 |
+
sub: str # Subject (user ID)
|
| 65 |
+
email: str
|
| 66 |
+
role: str
|
| 67 |
+
exp: datetime
|
| 68 |
+
iat: datetime
|
| 69 |
+
|
| 70 |
+
class GoogleAuthRequest(BaseModel):
|
| 71 |
+
id_token: str = Field(..., description="Google ID token")
|
| 72 |
+
|
| 73 |
+
class RefreshTokenRequest(BaseModel):
|
| 74 |
+
refresh_token: str = Field(..., description="Refresh token")
|
| 75 |
+
|
| 76 |
+
class ChangePasswordRequest(BaseModel):
|
| 77 |
+
current_password: str = Field(..., description="Current password")
|
| 78 |
+
new_password: str = Field(..., min_length=8, description="New password")
|
| 79 |
+
|
| 80 |
+
@validator('new_password')
|
| 81 |
+
def validate_password(cls, v):
|
| 82 |
+
if len(v) < 8:
|
| 83 |
+
raise ValueError('Password must be at least 8 characters long')
|
| 84 |
+
if not any(c.isupper() for c in v):
|
| 85 |
+
raise ValueError('Password must contain at least one uppercase letter')
|
| 86 |
+
if not any(c.islower() for c in v):
|
| 87 |
+
raise ValueError('Password must contain at least one lowercase letter')
|
| 88 |
+
if not any(c.isdigit() for c in v):
|
| 89 |
+
raise ValueError('Password must contain at least one digit')
|
| 90 |
+
return v
|
| 91 |
+
|
| 92 |
+
class UpdateProfileRequest(BaseModel):
|
| 93 |
+
first_name: Optional[str] = Field(None, min_length=1, max_length=50)
|
| 94 |
+
last_name: Optional[str] = Field(None, min_length=1, max_length=50)
|
| 95 |
+
phone: Optional[str] = Field(None, regex=r'^\+?[1-9]\d{1,14}$')
|
| 96 |
+
preferences: Optional[dict] = None
|
| 97 |
+
|
| 98 |
+
class UserPreferences(BaseModel):
|
| 99 |
+
theme: Optional[str] = Field("light", regex=r'^(light|dark|auto)$')
|
| 100 |
+
notifications: Optional[bool] = True
|
| 101 |
+
email_alerts: Optional[bool] = True
|
| 102 |
+
sms_alerts: Optional[bool] = False
|
| 103 |
+
trading_alerts: Optional[bool] = True
|
| 104 |
+
default_order_type: Optional[str] = Field("MARKET", regex=r'^(MARKET|LIMIT)$')
|
| 105 |
+
default_product_type: Optional[str] = Field("CNC", regex=r'^(CNC|MIS|NRML)$')
|
| 106 |
+
risk_tolerance: Optional[str] = Field("MEDIUM", regex=r'^(LOW|MEDIUM|HIGH)$')
|
| 107 |
+
investment_experience: Optional[str] = Field("INTERMEDIATE", regex=r'^(BEGINNER|INTERMEDIATE|ADVANCED)$')
|
| 108 |
+
|
| 109 |
+
class UserRole(BaseModel):
|
| 110 |
+
role: str = Field(..., regex=r'^(admin|instructor|trader|student)$')
|
| 111 |
+
permissions: Optional[list] = []
|
| 112 |
+
|
| 113 |
+
class AdminUserUpdate(BaseModel):
|
| 114 |
+
role: Optional[str] = Field(None, regex=r'^(admin|instructor|trader|student)$')
|
| 115 |
+
is_active: Optional[bool] = None
|
| 116 |
+
permissions: Optional[list] = None
|
models/trading.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pydantic models for trading operations
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
from typing import List, Optional, Dict, Any
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
from enum import Enum
|
| 9 |
+
|
| 10 |
+
class ExchangeSegment(str, Enum):
|
| 11 |
+
NSE_EQ = "NSE_EQ"
|
| 12 |
+
NSE_FO = "NSE_FO"
|
| 13 |
+
BSE_EQ = "BSE_EQ"
|
| 14 |
+
BSE_FO = "BSE_FO"
|
| 15 |
+
MCX_FO = "MCX_FO"
|
| 16 |
+
|
| 17 |
+
class TransactionType(str, Enum):
|
| 18 |
+
BUY = "BUY"
|
| 19 |
+
SELL = "SELL"
|
| 20 |
+
|
| 21 |
+
class OrderType(str, Enum):
|
| 22 |
+
MARKET = "MARKET"
|
| 23 |
+
LIMIT = "LIMIT"
|
| 24 |
+
STOP_LOSS = "STOP_LOSS"
|
| 25 |
+
STOP_LOSS_LIMIT = "STOP_LOSS_LIMIT"
|
| 26 |
+
|
| 27 |
+
class ProductType(str, Enum):
|
| 28 |
+
CNC = "CNC" # Cash and Carry
|
| 29 |
+
MIS = "MIS" # Margin Intraday Square Off
|
| 30 |
+
NRML = "NRML" # Normal
|
| 31 |
+
|
| 32 |
+
class Validity(str, Enum):
|
| 33 |
+
DAY = "DAY"
|
| 34 |
+
IOC = "IOC" # Immediate or Cancel
|
| 35 |
+
|
| 36 |
+
class OrderStatus(str, Enum):
|
| 37 |
+
PENDING = "PENDING"
|
| 38 |
+
OPEN = "OPEN"
|
| 39 |
+
COMPLETE = "COMPLETE"
|
| 40 |
+
CANCELLED = "CANCELLED"
|
| 41 |
+
REJECTED = "REJECTED"
|
| 42 |
+
TRANSIT = "TRANSIT"
|
| 43 |
+
|
| 44 |
+
# Request Models
|
| 45 |
+
class InstrumentRequest(BaseModel):
|
| 46 |
+
security_id: str = Field(..., description="Security ID")
|
| 47 |
+
exchange_segment: ExchangeSegment = Field(..., description="Exchange segment")
|
| 48 |
+
|
| 49 |
+
class QuoteRequest(BaseModel):
|
| 50 |
+
instruments: List[InstrumentRequest] = Field(..., description="List of instruments to get quotes for")
|
| 51 |
+
|
| 52 |
+
class OrderRequest(BaseModel):
|
| 53 |
+
security_id: str = Field(..., description="Security ID")
|
| 54 |
+
exchange_segment: ExchangeSegment = Field(..., description="Exchange segment")
|
| 55 |
+
transaction_type: TransactionType = Field(..., description="Buy or Sell")
|
| 56 |
+
quantity: int = Field(..., gt=0, description="Order quantity")
|
| 57 |
+
order_type: OrderType = Field(..., description="Order type")
|
| 58 |
+
product_type: ProductType = Field(..., description="Product type")
|
| 59 |
+
price: Optional[float] = Field(None, ge=0, description="Order price (for limit orders)")
|
| 60 |
+
trigger_price: Optional[float] = Field(None, ge=0, description="Trigger price (for stop loss orders)")
|
| 61 |
+
validity: Optional[Validity] = Field(Validity.DAY, description="Order validity")
|
| 62 |
+
disclosed_quantity: Optional[int] = Field(0, ge=0, description="Disclosed quantity")
|
| 63 |
+
after_market_order: Optional[bool] = Field(False, description="After market order flag")
|
| 64 |
+
|
| 65 |
+
class HistoricalDataRequest(BaseModel):
|
| 66 |
+
security_id: str = Field(..., description="Security ID")
|
| 67 |
+
exchange_segment: ExchangeSegment = Field(..., description="Exchange segment")
|
| 68 |
+
instrument: str = Field("EQUITY", description="Instrument type")
|
| 69 |
+
interval: str = Field("day", description="Data interval (1, 5, 15, 30, 60, day)")
|
| 70 |
+
from_date: str = Field(..., description="From date (YYYY-MM-DD)")
|
| 71 |
+
to_date: str = Field(..., description="To date (YYYY-MM-DD)")
|
| 72 |
+
|
| 73 |
+
# Response Models
|
| 74 |
+
class QuoteResponse(BaseModel):
|
| 75 |
+
security_id: str
|
| 76 |
+
exchange_segment: str
|
| 77 |
+
last_price: float
|
| 78 |
+
change: float
|
| 79 |
+
change_percent: float
|
| 80 |
+
volume: int
|
| 81 |
+
open_price: float
|
| 82 |
+
high_price: float
|
| 83 |
+
low_price: float
|
| 84 |
+
close_price: float
|
| 85 |
+
ltp: float
|
| 86 |
+
bid_price: Optional[float] = None
|
| 87 |
+
ask_price: Optional[float] = None
|
| 88 |
+
bid_qty: Optional[int] = None
|
| 89 |
+
ask_qty: Optional[int] = None
|
| 90 |
+
timestamp: Optional[str] = None
|
| 91 |
+
|
| 92 |
+
class MarketDepth(BaseModel):
|
| 93 |
+
price: float
|
| 94 |
+
quantity: int
|
| 95 |
+
orders: int
|
| 96 |
+
|
| 97 |
+
class DetailedQuoteResponse(QuoteResponse):
|
| 98 |
+
market_depth: Optional[Dict[str, List[MarketDepth]]] = None
|
| 99 |
+
total_buy_qty: Optional[int] = None
|
| 100 |
+
total_sell_qty: Optional[int] = None
|
| 101 |
+
upper_circuit: Optional[float] = None
|
| 102 |
+
lower_circuit: Optional[float] = None
|
| 103 |
+
|
| 104 |
+
class OrderResponse(BaseModel):
|
| 105 |
+
order_id: str
|
| 106 |
+
order_status: OrderStatus
|
| 107 |
+
message: Optional[str] = None
|
| 108 |
+
|
| 109 |
+
class Holding(BaseModel):
|
| 110 |
+
security_id: str
|
| 111 |
+
exchange_segment: str
|
| 112 |
+
trading_symbol: str
|
| 113 |
+
isin: str
|
| 114 |
+
product_type: str
|
| 115 |
+
total_qty: int
|
| 116 |
+
sellable_qty: int
|
| 117 |
+
blocked_qty: int
|
| 118 |
+
collateral_qty: int
|
| 119 |
+
average_price: float
|
| 120 |
+
cost_price: float
|
| 121 |
+
ltp: float
|
| 122 |
+
current_value: float
|
| 123 |
+
pnl: float
|
| 124 |
+
pnl_percent: float
|
| 125 |
+
day_change: float
|
| 126 |
+
day_change_percent: float
|
| 127 |
+
|
| 128 |
+
class HoldingsResponse(BaseModel):
|
| 129 |
+
holdings: List[Holding]
|
| 130 |
+
|
| 131 |
+
class Position(BaseModel):
|
| 132 |
+
security_id: str
|
| 133 |
+
exchange_segment: str
|
| 134 |
+
trading_symbol: str
|
| 135 |
+
product_type: str
|
| 136 |
+
position_type: str # LONG or SHORT
|
| 137 |
+
quantity: int
|
| 138 |
+
buy_average: float
|
| 139 |
+
sell_average: float
|
| 140 |
+
realized_pnl: float
|
| 141 |
+
unrealized_pnl: float
|
| 142 |
+
ltp: float
|
| 143 |
+
|
| 144 |
+
class PositionsResponse(BaseModel):
|
| 145 |
+
positions: List[Position]
|
| 146 |
+
|
| 147 |
+
class Funds(BaseModel):
|
| 148 |
+
available_balance: float
|
| 149 |
+
sod_limit: float
|
| 150 |
+
collateral_amount: float
|
| 151 |
+
intraday: Dict[str, float]
|
| 152 |
+
delivery: Dict[str, float]
|
| 153 |
+
|
| 154 |
+
class FundsResponse(BaseModel):
|
| 155 |
+
funds: Funds
|
| 156 |
+
|
| 157 |
+
class Order(BaseModel):
|
| 158 |
+
order_id: str
|
| 159 |
+
correlation_id: Optional[str] = None
|
| 160 |
+
order_status: OrderStatus
|
| 161 |
+
transaction_type: TransactionType
|
| 162 |
+
exchange_segment: ExchangeSegment
|
| 163 |
+
product_type: ProductType
|
| 164 |
+
order_type: OrderType
|
| 165 |
+
validity: Validity
|
| 166 |
+
trading_symbol: str
|
| 167 |
+
security_id: str
|
| 168 |
+
quantity: int
|
| 169 |
+
disclosed_quantity: int
|
| 170 |
+
price: float
|
| 171 |
+
trigger_price: float
|
| 172 |
+
after_market_order: bool
|
| 173 |
+
bo_profit_value: Optional[float] = None
|
| 174 |
+
bo_stop_loss_value: Optional[float] = None
|
| 175 |
+
leg_name: Optional[str] = None
|
| 176 |
+
create_time: str
|
| 177 |
+
update_time: str
|
| 178 |
+
exchange_time: Optional[str] = None
|
| 179 |
+
filled_qty: Optional[int] = None
|
| 180 |
+
remaining_qty: Optional[int] = None
|
| 181 |
+
avg_price: Optional[float] = None
|
| 182 |
+
|
| 183 |
+
class OrdersResponse(BaseModel):
|
| 184 |
+
orders: List[Order]
|
| 185 |
+
|
| 186 |
+
class HistoricalDataPoint(BaseModel):
|
| 187 |
+
timestamp: str
|
| 188 |
+
open: float
|
| 189 |
+
high: float
|
| 190 |
+
low: float
|
| 191 |
+
close: float
|
| 192 |
+
volume: int
|
| 193 |
+
|
| 194 |
+
class HistoricalDataResponse(BaseModel):
|
| 195 |
+
start: str
|
| 196 |
+
end: str
|
| 197 |
+
data: List[HistoricalDataPoint]
|
| 198 |
+
|
| 199 |
+
class SecurityMaster(BaseModel):
|
| 200 |
+
SEM_EXM_EXCH_ID: str
|
| 201 |
+
SEM_SMST_SECURITY_ID: str
|
| 202 |
+
SEM_INSTRUMENT_NAME: str
|
| 203 |
+
SEM_TRADING_SYMBOL: str
|
| 204 |
+
SEM_EXPIRY_DATE: Optional[str] = None
|
| 205 |
+
SEM_STRIKE_PRICE: Optional[float] = None
|
| 206 |
+
SEM_OPTION_TYPE: Optional[str] = None
|
| 207 |
+
SEM_LOT_UNITS: int
|
| 208 |
+
SEM_CUSTOM_SYMBOL: str
|
| 209 |
+
SEM_SEGMENT: Optional[str] = None
|
| 210 |
+
SEM_SERIES: Optional[str] = None
|
| 211 |
+
SEM_TICK_SIZE: Optional[float] = None
|
| 212 |
+
SM_SYMBOL_NAME: Optional[str] = None
|
| 213 |
+
|
| 214 |
+
class SecurityMasterResponse(BaseModel):
|
| 215 |
+
securities: List[SecurityMaster]
|
| 216 |
+
|
| 217 |
+
# WebSocket Models
|
| 218 |
+
class WebSocketMessage(BaseModel):
|
| 219 |
+
type: str
|
| 220 |
+
data: Dict[str, Any]
|
| 221 |
+
timestamp: Optional[str] = None
|
| 222 |
+
|
| 223 |
+
class SubscriptionRequest(BaseModel):
|
| 224 |
+
type: str = "subscribe"
|
| 225 |
+
symbols: List[str]
|
| 226 |
+
feed_type: Optional[str] = "ltp" # ltp, depth, etc.
|
| 227 |
+
|
| 228 |
+
class UnsubscriptionRequest(BaseModel):
|
| 229 |
+
type: str = "unsubscribe"
|
| 230 |
+
symbols: List[str]
|
| 231 |
+
|
| 232 |
+
class LiveFeedData(BaseModel):
|
| 233 |
+
security_id: str
|
| 234 |
+
exchange_segment: str
|
| 235 |
+
ltp: float
|
| 236 |
+
change: float
|
| 237 |
+
change_percent: float
|
| 238 |
+
volume: int
|
| 239 |
+
timestamp: str
|
requirements.txt
CHANGED
|
@@ -1,57 +1,55 @@
|
|
| 1 |
-
#
|
| 2 |
-
|
| 3 |
-
# Web Framework
|
| 4 |
fastapi==0.104.1
|
| 5 |
-
uvicorn
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
# API and HTTP
|
| 9 |
-
httpx==0.25.2
|
| 10 |
-
requests==2.31.0
|
| 11 |
-
pydantic==2.5.0
|
| 12 |
|
| 13 |
-
#
|
| 14 |
-
|
| 15 |
-
langchain-community==0.0.10
|
| 16 |
-
chromadb==0.4.18
|
| 17 |
-
sentence-transformers==2.2.2
|
| 18 |
-
huggingface-hub==0.19.4
|
| 19 |
|
| 20 |
-
#
|
| 21 |
-
|
| 22 |
-
numpy==1.24.4
|
| 23 |
python-multipart==0.0.6
|
| 24 |
|
| 25 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
python-jose[cryptography]==3.3.0
|
| 27 |
passlib[bcrypt]==1.7.4
|
| 28 |
-
python-
|
| 29 |
|
| 30 |
-
#
|
| 31 |
-
|
| 32 |
-
aiofiles==23.2.1
|
| 33 |
|
| 34 |
-
#
|
| 35 |
-
|
| 36 |
-
|
|
|
|
| 37 |
|
| 38 |
-
#
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
schedule==1.2.0
|
| 42 |
-
|
| 43 |
-
# Development and Monitoring
|
| 44 |
-
structlog==23.2.0
|
| 45 |
-
prometheus-client==0.19.0
|
| 46 |
|
| 47 |
-
#
|
| 48 |
-
|
| 49 |
|
| 50 |
-
#
|
| 51 |
-
|
| 52 |
|
| 53 |
-
#
|
| 54 |
-
|
|
|
|
| 55 |
|
| 56 |
-
#
|
| 57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FastAPI and ASGI server
|
|
|
|
|
|
|
| 2 |
fastapi==0.104.1
|
| 3 |
+
uvicorn==0.24.0
|
| 4 |
+
gunicorn==21.2.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
+
# Pydantic v1 for compatibility
|
| 7 |
+
pydantic==1.10.13
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
+
# Environment and config
|
| 10 |
+
python-dotenv==1.1.1
|
|
|
|
| 11 |
python-multipart==0.0.6
|
| 12 |
|
| 13 |
+
# HTTP client for API calls
|
| 14 |
+
httpx==0.27.0
|
| 15 |
+
|
| 16 |
+
# WebSocket support
|
| 17 |
+
websockets==12.0
|
| 18 |
+
|
| 19 |
+
# Database and ORM
|
| 20 |
+
psycopg2-binary==2.9.9
|
| 21 |
+
sqlalchemy==1.4.53
|
| 22 |
+
alembic==1.13.1
|
| 23 |
+
|
| 24 |
+
# Authentication and security
|
| 25 |
python-jose[cryptography]==3.3.0
|
| 26 |
passlib[bcrypt]==1.7.4
|
| 27 |
+
python-multipart==0.0.6
|
| 28 |
|
| 29 |
+
# Supabase client
|
| 30 |
+
supabase==2.3.4
|
|
|
|
| 31 |
|
| 32 |
+
# AI and ML libraries
|
| 33 |
+
openai==1.6.1
|
| 34 |
+
google-generativeai==0.3.2
|
| 35 |
+
huggingface-hub==0.20.2
|
| 36 |
|
| 37 |
+
# Data processing
|
| 38 |
+
pandas==2.1.4
|
| 39 |
+
numpy==1.24.4
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
+
# Async file operations
|
| 42 |
+
aiofiles==23.1.0
|
| 43 |
|
| 44 |
+
# Logging and monitoring
|
| 45 |
+
loguru==0.7.2
|
| 46 |
|
| 47 |
+
# Utils
|
| 48 |
+
python-dateutil==2.8.2
|
| 49 |
+
pytz==2023.3
|
| 50 |
|
| 51 |
+
# Development dependencies (comment out for production)
|
| 52 |
+
# pytest==7.4.3
|
| 53 |
+
# pytest-asyncio==0.21.1
|
| 54 |
+
# black==23.12.1
|
| 55 |
+
# flake8==6.1.0
|
routers/auth.py
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication Router
|
| 3 |
+
Handles user authentication with Supabase integration
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, status
|
| 7 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 8 |
+
from typing import Optional, Dict, Any
|
| 9 |
+
import logging
|
| 10 |
+
from datetime import datetime, timedelta
|
| 11 |
+
import jwt
|
| 12 |
+
from passlib.context import CryptContext
|
| 13 |
+
|
| 14 |
+
from config.settings import get_settings
|
| 15 |
+
from models.auth import (
|
| 16 |
+
UserLogin,
|
| 17 |
+
UserSignup,
|
| 18 |
+
UserResponse,
|
| 19 |
+
TokenResponse,
|
| 20 |
+
PasswordReset
|
| 21 |
+
)
|
| 22 |
+
from services.supabase_client import get_supabase_client
|
| 23 |
+
from services.auth import create_access_token, verify_token
|
| 24 |
+
|
| 25 |
+
logger = logging.getLogger(__name__)
|
| 26 |
+
router = APIRouter()
|
| 27 |
+
settings = get_settings()
|
| 28 |
+
|
| 29 |
+
# Security
|
| 30 |
+
security = HTTPBearer()
|
| 31 |
+
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
| 32 |
+
|
| 33 |
+
@router.post("/signup", response_model=UserResponse)
|
| 34 |
+
async def signup(user_data: UserSignup):
|
| 35 |
+
"""Register a new user"""
|
| 36 |
+
try:
|
| 37 |
+
supabase = get_supabase_client()
|
| 38 |
+
|
| 39 |
+
# Create user with Supabase Auth
|
| 40 |
+
auth_response = supabase.auth.sign_up({
|
| 41 |
+
"email": user_data.email,
|
| 42 |
+
"password": user_data.password,
|
| 43 |
+
"options": {
|
| 44 |
+
"data": {
|
| 45 |
+
"first_name": user_data.first_name,
|
| 46 |
+
"last_name": user_data.last_name,
|
| 47 |
+
"full_name": f"{user_data.first_name} {user_data.last_name}",
|
| 48 |
+
"role": "trader" # Default role
|
| 49 |
+
}
|
| 50 |
+
}
|
| 51 |
+
})
|
| 52 |
+
|
| 53 |
+
if auth_response.user:
|
| 54 |
+
# Create user profile in public.users table
|
| 55 |
+
profile_data = {
|
| 56 |
+
"id": auth_response.user.id,
|
| 57 |
+
"email": user_data.email,
|
| 58 |
+
"first_name": user_data.first_name,
|
| 59 |
+
"last_name": user_data.last_name,
|
| 60 |
+
"role": "trader",
|
| 61 |
+
"created_at": datetime.utcnow().isoformat(),
|
| 62 |
+
"updated_at": datetime.utcnow().isoformat()
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
supabase.table("users").insert(profile_data).execute()
|
| 66 |
+
|
| 67 |
+
return UserResponse(
|
| 68 |
+
id=auth_response.user.id,
|
| 69 |
+
email=auth_response.user.email,
|
| 70 |
+
first_name=user_data.first_name,
|
| 71 |
+
last_name=user_data.last_name,
|
| 72 |
+
role="trader",
|
| 73 |
+
message="User created successfully. Please check your email to verify your account."
|
| 74 |
+
)
|
| 75 |
+
else:
|
| 76 |
+
raise HTTPException(
|
| 77 |
+
status_code=400,
|
| 78 |
+
detail="Failed to create user account"
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
except Exception as e:
|
| 82 |
+
logger.error(f"Signup error: {e}")
|
| 83 |
+
if "already registered" in str(e).lower():
|
| 84 |
+
raise HTTPException(
|
| 85 |
+
status_code=400,
|
| 86 |
+
detail="User with this email already exists"
|
| 87 |
+
)
|
| 88 |
+
raise HTTPException(
|
| 89 |
+
status_code=500,
|
| 90 |
+
detail="Failed to create user account"
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
@router.post("/login", response_model=TokenResponse)
|
| 94 |
+
async def login(login_data: UserLogin):
|
| 95 |
+
"""Authenticate user and return tokens"""
|
| 96 |
+
try:
|
| 97 |
+
supabase = get_supabase_client()
|
| 98 |
+
|
| 99 |
+
# Authenticate with Supabase
|
| 100 |
+
auth_response = supabase.auth.sign_in_with_password({
|
| 101 |
+
"email": login_data.email,
|
| 102 |
+
"password": login_data.password
|
| 103 |
+
})
|
| 104 |
+
|
| 105 |
+
if auth_response.user and auth_response.session:
|
| 106 |
+
# Get user profile
|
| 107 |
+
profile_response = supabase.table("users").select("*").eq(
|
| 108 |
+
"id", auth_response.user.id
|
| 109 |
+
).execute()
|
| 110 |
+
|
| 111 |
+
profile = profile_response.data[0] if profile_response.data else {}
|
| 112 |
+
|
| 113 |
+
# Create our own JWT token for API access
|
| 114 |
+
access_token = create_access_token({
|
| 115 |
+
"sub": auth_response.user.id,
|
| 116 |
+
"email": auth_response.user.email,
|
| 117 |
+
"role": profile.get("role", "trader")
|
| 118 |
+
})
|
| 119 |
+
|
| 120 |
+
return TokenResponse(
|
| 121 |
+
access_token=access_token,
|
| 122 |
+
token_type="bearer",
|
| 123 |
+
expires_in=3600, # 1 hour
|
| 124 |
+
user=UserResponse(
|
| 125 |
+
id=auth_response.user.id,
|
| 126 |
+
email=auth_response.user.email,
|
| 127 |
+
first_name=profile.get("first_name", ""),
|
| 128 |
+
last_name=profile.get("last_name", ""),
|
| 129 |
+
role=profile.get("role", "trader")
|
| 130 |
+
)
|
| 131 |
+
)
|
| 132 |
+
else:
|
| 133 |
+
raise HTTPException(
|
| 134 |
+
status_code=401,
|
| 135 |
+
detail="Invalid email or password"
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
except Exception as e:
|
| 139 |
+
logger.error(f"Login error: {e}")
|
| 140 |
+
if "invalid" in str(e).lower() or "unauthorized" in str(e).lower():
|
| 141 |
+
raise HTTPException(
|
| 142 |
+
status_code=401,
|
| 143 |
+
detail="Invalid email or password"
|
| 144 |
+
)
|
| 145 |
+
raise HTTPException(
|
| 146 |
+
status_code=500,
|
| 147 |
+
detail="Authentication failed"
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
@router.post("/google", response_model=TokenResponse)
|
| 151 |
+
async def google_auth(id_token: str):
|
| 152 |
+
"""Authenticate with Google OAuth"""
|
| 153 |
+
try:
|
| 154 |
+
supabase = get_supabase_client()
|
| 155 |
+
|
| 156 |
+
# Sign in with Google
|
| 157 |
+
auth_response = supabase.auth.sign_in_with_id_token({
|
| 158 |
+
"provider": "google",
|
| 159 |
+
"token": id_token
|
| 160 |
+
})
|
| 161 |
+
|
| 162 |
+
if auth_response.user and auth_response.session:
|
| 163 |
+
# Check if user profile exists, create if not
|
| 164 |
+
profile_response = supabase.table("users").select("*").eq(
|
| 165 |
+
"id", auth_response.user.id
|
| 166 |
+
).execute()
|
| 167 |
+
|
| 168 |
+
if not profile_response.data:
|
| 169 |
+
# Create user profile
|
| 170 |
+
user_metadata = auth_response.user.user_metadata or {}
|
| 171 |
+
profile_data = {
|
| 172 |
+
"id": auth_response.user.id,
|
| 173 |
+
"email": auth_response.user.email,
|
| 174 |
+
"first_name": user_metadata.get("given_name", ""),
|
| 175 |
+
"last_name": user_metadata.get("family_name", ""),
|
| 176 |
+
"role": "trader",
|
| 177 |
+
"created_at": datetime.utcnow().isoformat(),
|
| 178 |
+
"updated_at": datetime.utcnow().isoformat()
|
| 179 |
+
}
|
| 180 |
+
supabase.table("users").insert(profile_data).execute()
|
| 181 |
+
profile = profile_data
|
| 182 |
+
else:
|
| 183 |
+
profile = profile_response.data[0]
|
| 184 |
+
|
| 185 |
+
# Create access token
|
| 186 |
+
access_token = create_access_token({
|
| 187 |
+
"sub": auth_response.user.id,
|
| 188 |
+
"email": auth_response.user.email,
|
| 189 |
+
"role": profile.get("role", "trader")
|
| 190 |
+
})
|
| 191 |
+
|
| 192 |
+
return TokenResponse(
|
| 193 |
+
access_token=access_token,
|
| 194 |
+
token_type="bearer",
|
| 195 |
+
expires_in=3600,
|
| 196 |
+
user=UserResponse(
|
| 197 |
+
id=auth_response.user.id,
|
| 198 |
+
email=auth_response.user.email,
|
| 199 |
+
first_name=profile.get("first_name", ""),
|
| 200 |
+
last_name=profile.get("last_name", ""),
|
| 201 |
+
role=profile.get("role", "trader")
|
| 202 |
+
)
|
| 203 |
+
)
|
| 204 |
+
else:
|
| 205 |
+
raise HTTPException(
|
| 206 |
+
status_code=401,
|
| 207 |
+
detail="Google authentication failed"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
except Exception as e:
|
| 211 |
+
logger.error(f"Google auth error: {e}")
|
| 212 |
+
raise HTTPException(
|
| 213 |
+
status_code=500,
|
| 214 |
+
detail="Google authentication failed"
|
| 215 |
+
)
|
| 216 |
+
|
| 217 |
+
@router.post("/refresh", response_model=TokenResponse)
|
| 218 |
+
async def refresh_token(
|
| 219 |
+
credentials: HTTPAuthorizationCredentials = Depends(security)
|
| 220 |
+
):
|
| 221 |
+
"""Refresh access token"""
|
| 222 |
+
try:
|
| 223 |
+
# Verify current token
|
| 224 |
+
payload = verify_token(credentials.credentials)
|
| 225 |
+
|
| 226 |
+
# Create new token
|
| 227 |
+
access_token = create_access_token({
|
| 228 |
+
"sub": payload["sub"],
|
| 229 |
+
"email": payload["email"],
|
| 230 |
+
"role": payload.get("role", "trader")
|
| 231 |
+
})
|
| 232 |
+
|
| 233 |
+
return TokenResponse(
|
| 234 |
+
access_token=access_token,
|
| 235 |
+
token_type="bearer",
|
| 236 |
+
expires_in=3600
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
except Exception as e:
|
| 240 |
+
logger.error(f"Token refresh error: {e}")
|
| 241 |
+
raise HTTPException(
|
| 242 |
+
status_code=401,
|
| 243 |
+
detail="Invalid or expired token"
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
@router.post("/logout")
|
| 247 |
+
async def logout(
|
| 248 |
+
credentials: HTTPAuthorizationCredentials = Depends(security)
|
| 249 |
+
):
|
| 250 |
+
"""Logout user"""
|
| 251 |
+
try:
|
| 252 |
+
# Verify token
|
| 253 |
+
verify_token(credentials.credentials)
|
| 254 |
+
|
| 255 |
+
# In a production app, you might want to blacklist the token
|
| 256 |
+
# For now, we'll just return success
|
| 257 |
+
|
| 258 |
+
return {"message": "Logged out successfully"}
|
| 259 |
+
|
| 260 |
+
except Exception as e:
|
| 261 |
+
logger.error(f"Logout error: {e}")
|
| 262 |
+
return {"message": "Logged out successfully"} # Return success even if token is invalid
|
| 263 |
+
|
| 264 |
+
@router.post("/forgot-password")
|
| 265 |
+
async def forgot_password(email: str):
|
| 266 |
+
"""Send password reset email"""
|
| 267 |
+
try:
|
| 268 |
+
supabase = get_supabase_client()
|
| 269 |
+
|
| 270 |
+
# Send password reset email
|
| 271 |
+
supabase.auth.reset_password_email(email)
|
| 272 |
+
|
| 273 |
+
return {"message": "Password reset email sent"}
|
| 274 |
+
|
| 275 |
+
except Exception as e:
|
| 276 |
+
logger.error(f"Password reset error: {e}")
|
| 277 |
+
# Don't reveal if email exists for security
|
| 278 |
+
return {"message": "If the email exists, a password reset link has been sent"}
|
| 279 |
+
|
| 280 |
+
@router.post("/reset-password")
|
| 281 |
+
async def reset_password(reset_data: PasswordReset):
|
| 282 |
+
"""Reset password with token"""
|
| 283 |
+
try:
|
| 284 |
+
supabase = get_supabase_client()
|
| 285 |
+
|
| 286 |
+
# Update password
|
| 287 |
+
supabase.auth.update_user({
|
| 288 |
+
"password": reset_data.new_password
|
| 289 |
+
})
|
| 290 |
+
|
| 291 |
+
return {"message": "Password updated successfully"}
|
| 292 |
+
|
| 293 |
+
except Exception as e:
|
| 294 |
+
logger.error(f"Password reset error: {e}")
|
| 295 |
+
raise HTTPException(
|
| 296 |
+
status_code=400,
|
| 297 |
+
detail="Failed to reset password"
|
| 298 |
+
)
|
| 299 |
+
|
| 300 |
+
@router.get("/me", response_model=UserResponse)
|
| 301 |
+
async def get_current_user_info(
|
| 302 |
+
credentials: HTTPAuthorizationCredentials = Depends(security)
|
| 303 |
+
):
|
| 304 |
+
"""Get current user information"""
|
| 305 |
+
try:
|
| 306 |
+
payload = verify_token(credentials.credentials)
|
| 307 |
+
supabase = get_supabase_client()
|
| 308 |
+
|
| 309 |
+
# Get user profile
|
| 310 |
+
profile_response = supabase.table("users").select("*").eq(
|
| 311 |
+
"id", payload["sub"]
|
| 312 |
+
).execute()
|
| 313 |
+
|
| 314 |
+
if profile_response.data:
|
| 315 |
+
profile = profile_response.data[0]
|
| 316 |
+
return UserResponse(
|
| 317 |
+
id=profile["id"],
|
| 318 |
+
email=profile["email"],
|
| 319 |
+
first_name=profile.get("first_name", ""),
|
| 320 |
+
last_name=profile.get("last_name", ""),
|
| 321 |
+
role=profile.get("role", "trader")
|
| 322 |
+
)
|
| 323 |
+
else:
|
| 324 |
+
raise HTTPException(
|
| 325 |
+
status_code=404,
|
| 326 |
+
detail="User profile not found"
|
| 327 |
+
)
|
| 328 |
+
|
| 329 |
+
except Exception as e:
|
| 330 |
+
logger.error(f"Get user info error: {e}")
|
| 331 |
+
raise HTTPException(
|
| 332 |
+
status_code=401,
|
| 333 |
+
detail="Invalid or expired token"
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
@router.put("/profile", response_model=UserResponse)
|
| 337 |
+
async def update_profile(
|
| 338 |
+
profile_data: Dict[str, Any],
|
| 339 |
+
credentials: HTTPAuthorizationCredentials = Depends(security)
|
| 340 |
+
):
|
| 341 |
+
"""Update user profile"""
|
| 342 |
+
try:
|
| 343 |
+
payload = verify_token(credentials.credentials)
|
| 344 |
+
supabase = get_supabase_client()
|
| 345 |
+
|
| 346 |
+
# Update profile
|
| 347 |
+
update_data = {
|
| 348 |
+
**profile_data,
|
| 349 |
+
"updated_at": datetime.utcnow().isoformat()
|
| 350 |
+
}
|
| 351 |
+
|
| 352 |
+
response = supabase.table("users").update(update_data).eq(
|
| 353 |
+
"id", payload["sub"]
|
| 354 |
+
).execute()
|
| 355 |
+
|
| 356 |
+
if response.data:
|
| 357 |
+
profile = response.data[0]
|
| 358 |
+
return UserResponse(
|
| 359 |
+
id=profile["id"],
|
| 360 |
+
email=profile["email"],
|
| 361 |
+
first_name=profile.get("first_name", ""),
|
| 362 |
+
last_name=profile.get("last_name", ""),
|
| 363 |
+
role=profile.get("role", "trader")
|
| 364 |
+
)
|
| 365 |
+
else:
|
| 366 |
+
raise HTTPException(
|
| 367 |
+
status_code=404,
|
| 368 |
+
detail="Failed to update profile"
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
except Exception as e:
|
| 372 |
+
logger.error(f"Update profile error: {e}")
|
| 373 |
+
raise HTTPException(
|
| 374 |
+
status_code=500,
|
| 375 |
+
detail="Failed to update profile"
|
| 376 |
+
)
|
routers/dhan.py
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dhan API Router
|
| 3 |
+
Handles all Dhan trading API operations with error handling and rate limiting
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
| 7 |
+
from typing import List, Optional, Dict, Any
|
| 8 |
+
import httpx
|
| 9 |
+
import asyncio
|
| 10 |
+
from datetime import datetime, timedelta
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
from config.settings import get_settings
|
| 14 |
+
from models.trading import (
|
| 15 |
+
QuoteRequest,
|
| 16 |
+
QuoteResponse,
|
| 17 |
+
OrderRequest,
|
| 18 |
+
OrderResponse,
|
| 19 |
+
HoldingsResponse,
|
| 20 |
+
PositionsResponse,
|
| 21 |
+
FundsResponse,
|
| 22 |
+
HistoricalDataRequest,
|
| 23 |
+
HistoricalDataResponse
|
| 24 |
+
)
|
| 25 |
+
from services.auth import get_current_user
|
| 26 |
+
from utils.rate_limiter import RateLimiter
|
| 27 |
+
|
| 28 |
+
logger = logging.getLogger(__name__)
|
| 29 |
+
router = APIRouter()
|
| 30 |
+
settings = get_settings()
|
| 31 |
+
|
| 32 |
+
# Rate limiter for Dhan API calls
|
| 33 |
+
rate_limiter = RateLimiter(calls=60, period=60) # 60 calls per minute
|
| 34 |
+
|
| 35 |
+
class DhanAPIClient:
|
| 36 |
+
"""Dhan API client with error handling and rate limiting"""
|
| 37 |
+
|
| 38 |
+
def __init__(self):
|
| 39 |
+
self.base_url = settings.DHAN_API_BASE_URL
|
| 40 |
+
self.api_key = settings.DHAN_API_KEY
|
| 41 |
+
|
| 42 |
+
def get_headers(self) -> Dict[str, str]:
|
| 43 |
+
"""Get API headers"""
|
| 44 |
+
return {
|
| 45 |
+
"Content-Type": "application/json",
|
| 46 |
+
"access-token": self.api_key,
|
| 47 |
+
"Accept": "application/json"
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
async def make_request(
|
| 51 |
+
self,
|
| 52 |
+
method: str,
|
| 53 |
+
endpoint: str,
|
| 54 |
+
data: Optional[Dict] = None
|
| 55 |
+
) -> Dict[str, Any]:
|
| 56 |
+
"""Make rate-limited API request"""
|
| 57 |
+
|
| 58 |
+
# Apply rate limiting
|
| 59 |
+
await rate_limiter.acquire()
|
| 60 |
+
|
| 61 |
+
url = f"{self.base_url}{endpoint}"
|
| 62 |
+
headers = self.get_headers()
|
| 63 |
+
|
| 64 |
+
try:
|
| 65 |
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 66 |
+
if method.upper() == "GET":
|
| 67 |
+
response = await client.get(url, headers=headers, params=data)
|
| 68 |
+
else:
|
| 69 |
+
response = await client.request(
|
| 70 |
+
method, url, headers=headers, json=data
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
response.raise_for_status()
|
| 74 |
+
|
| 75 |
+
result = response.json()
|
| 76 |
+
logger.info(f"Dhan API {method} {endpoint}: {response.status_code}")
|
| 77 |
+
|
| 78 |
+
return result
|
| 79 |
+
|
| 80 |
+
except httpx.HTTPStatusError as e:
|
| 81 |
+
logger.error(f"Dhan API HTTP error: {e.response.status_code} - {e.response.text}")
|
| 82 |
+
raise HTTPException(
|
| 83 |
+
status_code=e.response.status_code,
|
| 84 |
+
detail=f"Dhan API error: {e.response.text}"
|
| 85 |
+
)
|
| 86 |
+
except httpx.RequestError as e:
|
| 87 |
+
logger.error(f"Dhan API request error: {e}")
|
| 88 |
+
raise HTTPException(
|
| 89 |
+
status_code=503,
|
| 90 |
+
detail="Dhan API service unavailable"
|
| 91 |
+
)
|
| 92 |
+
except Exception as e:
|
| 93 |
+
logger.error(f"Dhan API unexpected error: {e}")
|
| 94 |
+
raise HTTPException(
|
| 95 |
+
status_code=500,
|
| 96 |
+
detail="Internal server error"
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
# Create client instance
|
| 100 |
+
dhan_client = DhanAPIClient()
|
| 101 |
+
|
| 102 |
+
@router.get("/securities", response_model=List[Dict[str, Any]])
|
| 103 |
+
async def get_securities(
|
| 104 |
+
exchange: Optional[str] = "NSE",
|
| 105 |
+
current_user: Dict = Depends(get_current_user)
|
| 106 |
+
):
|
| 107 |
+
"""Get all securities/instruments"""
|
| 108 |
+
try:
|
| 109 |
+
result = await dhan_client.make_request("GET", "/securitymaster")
|
| 110 |
+
|
| 111 |
+
# Filter by exchange if specified
|
| 112 |
+
if exchange and exchange != "ALL":
|
| 113 |
+
securities = result.get("data", [])
|
| 114 |
+
filtered = [s for s in securities if s.get("SEM_EXM_EXCH_ID") == exchange]
|
| 115 |
+
return filtered
|
| 116 |
+
|
| 117 |
+
return result.get("data", [])
|
| 118 |
+
|
| 119 |
+
except Exception as e:
|
| 120 |
+
logger.error(f"Error fetching securities: {e}")
|
| 121 |
+
raise HTTPException(status_code=500, detail="Failed to fetch securities")
|
| 122 |
+
|
| 123 |
+
@router.post("/quotes", response_model=List[QuoteResponse])
|
| 124 |
+
async def get_quotes(
|
| 125 |
+
request: QuoteRequest,
|
| 126 |
+
current_user: Dict = Depends(get_current_user)
|
| 127 |
+
):
|
| 128 |
+
"""Get live quotes for multiple securities"""
|
| 129 |
+
try:
|
| 130 |
+
data = {
|
| 131 |
+
"instruments": [
|
| 132 |
+
{
|
| 133 |
+
"securityId": instrument.security_id,
|
| 134 |
+
"exchangeSegment": instrument.exchange_segment
|
| 135 |
+
}
|
| 136 |
+
for instrument in request.instruments
|
| 137 |
+
]
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
result = await dhan_client.make_request("POST", "/marketfeed/ltp", data)
|
| 141 |
+
return result.get("data", [])
|
| 142 |
+
|
| 143 |
+
except Exception as e:
|
| 144 |
+
logger.error(f"Error fetching quotes: {e}")
|
| 145 |
+
raise HTTPException(status_code=500, detail="Failed to fetch quotes")
|
| 146 |
+
|
| 147 |
+
@router.get("/quote/{security_id}")
|
| 148 |
+
async def get_single_quote(
|
| 149 |
+
security_id: str,
|
| 150 |
+
exchange_segment: str = "NSE_EQ",
|
| 151 |
+
current_user: Dict = Depends(get_current_user)
|
| 152 |
+
):
|
| 153 |
+
"""Get detailed quote for a single security"""
|
| 154 |
+
try:
|
| 155 |
+
endpoint = f"/marketfeed/quote?securityId={security_id}&exchangeSegment={exchange_segment}"
|
| 156 |
+
result = await dhan_client.make_request("GET", endpoint)
|
| 157 |
+
return result.get("data", {})
|
| 158 |
+
|
| 159 |
+
except Exception as e:
|
| 160 |
+
logger.error(f"Error fetching single quote: {e}")
|
| 161 |
+
raise HTTPException(status_code=500, detail="Failed to fetch quote")
|
| 162 |
+
|
| 163 |
+
@router.get("/holdings", response_model=HoldingsResponse)
|
| 164 |
+
async def get_holdings(current_user: Dict = Depends(get_current_user)):
|
| 165 |
+
"""Get user holdings"""
|
| 166 |
+
try:
|
| 167 |
+
result = await dhan_client.make_request("GET", "/holdings")
|
| 168 |
+
return {"holdings": result.get("data", [])}
|
| 169 |
+
|
| 170 |
+
except Exception as e:
|
| 171 |
+
logger.error(f"Error fetching holdings: {e}")
|
| 172 |
+
raise HTTPException(status_code=500, detail="Failed to fetch holdings")
|
| 173 |
+
|
| 174 |
+
@router.get("/positions", response_model=PositionsResponse)
|
| 175 |
+
async def get_positions(current_user: Dict = Depends(get_current_user)):
|
| 176 |
+
"""Get user positions"""
|
| 177 |
+
try:
|
| 178 |
+
result = await dhan_client.make_request("GET", "/positions")
|
| 179 |
+
return {"positions": result.get("data", [])}
|
| 180 |
+
|
| 181 |
+
except Exception as e:
|
| 182 |
+
logger.error(f"Error fetching positions: {e}")
|
| 183 |
+
raise HTTPException(status_code=500, detail="Failed to fetch positions")
|
| 184 |
+
|
| 185 |
+
@router.get("/funds", response_model=FundsResponse)
|
| 186 |
+
async def get_funds(current_user: Dict = Depends(get_current_user)):
|
| 187 |
+
"""Get user funds and margin"""
|
| 188 |
+
try:
|
| 189 |
+
result = await dhan_client.make_request("GET", "/funds")
|
| 190 |
+
return result.get("data", {})
|
| 191 |
+
|
| 192 |
+
except Exception as e:
|
| 193 |
+
logger.error(f"Error fetching funds: {e}")
|
| 194 |
+
raise HTTPException(status_code=500, detail="Failed to fetch funds")
|
| 195 |
+
|
| 196 |
+
@router.post("/orders", response_model=OrderResponse)
|
| 197 |
+
async def place_order(
|
| 198 |
+
order: OrderRequest,
|
| 199 |
+
current_user: Dict = Depends(get_current_user)
|
| 200 |
+
):
|
| 201 |
+
"""Place a new order"""
|
| 202 |
+
try:
|
| 203 |
+
order_data = {
|
| 204 |
+
"securityId": order.security_id,
|
| 205 |
+
"exchangeSegment": order.exchange_segment,
|
| 206 |
+
"transactionType": order.transaction_type,
|
| 207 |
+
"quantity": order.quantity,
|
| 208 |
+
"orderType": order.order_type,
|
| 209 |
+
"productType": order.product_type,
|
| 210 |
+
"price": order.price if order.price else 0,
|
| 211 |
+
"triggerPrice": order.trigger_price if order.trigger_price else 0,
|
| 212 |
+
"validity": order.validity,
|
| 213 |
+
"disclosedQuantity": order.disclosed_quantity if order.disclosed_quantity else 0,
|
| 214 |
+
"afterMarketOrder": order.after_market_order if order.after_market_order else False
|
| 215 |
+
}
|
| 216 |
+
|
| 217 |
+
result = await dhan_client.make_request("POST", "/orders", order_data)
|
| 218 |
+
return result.get("data", {})
|
| 219 |
+
|
| 220 |
+
except Exception as e:
|
| 221 |
+
logger.error(f"Error placing order: {e}")
|
| 222 |
+
raise HTTPException(status_code=500, detail="Failed to place order")
|
| 223 |
+
|
| 224 |
+
@router.get("/orders")
|
| 225 |
+
async def get_orders(current_user: Dict = Depends(get_current_user)):
|
| 226 |
+
"""Get user orders"""
|
| 227 |
+
try:
|
| 228 |
+
result = await dhan_client.make_request("GET", "/orders")
|
| 229 |
+
return {"orders": result.get("data", [])}
|
| 230 |
+
|
| 231 |
+
except Exception as e:
|
| 232 |
+
logger.error(f"Error fetching orders: {e}")
|
| 233 |
+
raise HTTPException(status_code=500, detail="Failed to fetch orders")
|
| 234 |
+
|
| 235 |
+
@router.delete("/orders/{order_id}")
|
| 236 |
+
async def cancel_order(
|
| 237 |
+
order_id: str,
|
| 238 |
+
current_user: Dict = Depends(get_current_user)
|
| 239 |
+
):
|
| 240 |
+
"""Cancel an order"""
|
| 241 |
+
try:
|
| 242 |
+
result = await dhan_client.make_request("DELETE", f"/orders/{order_id}")
|
| 243 |
+
return {"message": "Order cancelled successfully", "data": result.get("data", {})}
|
| 244 |
+
|
| 245 |
+
except Exception as e:
|
| 246 |
+
logger.error(f"Error cancelling order: {e}")
|
| 247 |
+
raise HTTPException(status_code=500, detail="Failed to cancel order")
|
| 248 |
+
|
| 249 |
+
@router.put("/orders/{order_id}")
|
| 250 |
+
async def modify_order(
|
| 251 |
+
order_id: str,
|
| 252 |
+
modifications: Dict[str, Any],
|
| 253 |
+
current_user: Dict = Depends(get_current_user)
|
| 254 |
+
):
|
| 255 |
+
"""Modify an existing order"""
|
| 256 |
+
try:
|
| 257 |
+
result = await dhan_client.make_request("PUT", f"/orders/{order_id}", modifications)
|
| 258 |
+
return {"message": "Order modified successfully", "data": result.get("data", {})}
|
| 259 |
+
|
| 260 |
+
except Exception as e:
|
| 261 |
+
logger.error(f"Error modifying order: {e}")
|
| 262 |
+
raise HTTPException(status_code=500, detail="Failed to modify order")
|
| 263 |
+
|
| 264 |
+
@router.post("/historical", response_model=HistoricalDataResponse)
|
| 265 |
+
async def get_historical_data(
|
| 266 |
+
request: HistoricalDataRequest,
|
| 267 |
+
current_user: Dict = Depends(get_current_user)
|
| 268 |
+
):
|
| 269 |
+
"""Get historical data for charting"""
|
| 270 |
+
try:
|
| 271 |
+
data = {
|
| 272 |
+
"securityId": request.security_id,
|
| 273 |
+
"exchangeSegment": request.exchange_segment,
|
| 274 |
+
"instrument": request.instrument,
|
| 275 |
+
"interval": request.interval,
|
| 276 |
+
"fromDate": request.from_date,
|
| 277 |
+
"toDate": request.to_date
|
| 278 |
+
}
|
| 279 |
+
|
| 280 |
+
result = await dhan_client.make_request("POST", "/charts/historical", data)
|
| 281 |
+
return result.get("data", {})
|
| 282 |
+
|
| 283 |
+
except Exception as e:
|
| 284 |
+
logger.error(f"Error fetching historical data: {e}")
|
| 285 |
+
raise HTTPException(status_code=500, detail="Failed to fetch historical data")
|
| 286 |
+
|
| 287 |
+
@router.get("/market-status")
|
| 288 |
+
async def get_market_status(current_user: Dict = Depends(get_current_user)):
|
| 289 |
+
"""Get current market status"""
|
| 290 |
+
try:
|
| 291 |
+
# This endpoint might not exist in Dhan API, so we'll create a mock response
|
| 292 |
+
# based on time and market hours
|
| 293 |
+
now = datetime.now()
|
| 294 |
+
is_weekend = now.weekday() >= 5 # Saturday = 5, Sunday = 6
|
| 295 |
+
|
| 296 |
+
# NSE trading hours: 9:15 AM to 3:30 PM
|
| 297 |
+
market_open_time = now.replace(hour=9, minute=15, second=0, microsecond=0)
|
| 298 |
+
market_close_time = now.replace(hour=15, minute=30, second=0, microsecond=0)
|
| 299 |
+
|
| 300 |
+
is_market_hours = market_open_time <= now <= market_close_time and not is_weekend
|
| 301 |
+
|
| 302 |
+
return {
|
| 303 |
+
"market_status": "OPEN" if is_market_hours else "CLOSED",
|
| 304 |
+
"timestamp": now.isoformat(),
|
| 305 |
+
"next_open": market_open_time.isoformat() if not is_market_hours else None,
|
| 306 |
+
"next_close": market_close_time.isoformat() if is_market_hours else None
|
| 307 |
+
}
|
| 308 |
+
|
| 309 |
+
except Exception as e:
|
| 310 |
+
logger.error(f"Error getting market status: {e}")
|
| 311 |
+
raise HTTPException(status_code=500, detail="Failed to get market status")
|
routers/portfolio.py
ADDED
|
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Portfolio Management Router
|
| 3 |
+
Handles portfolio tracking, analytics, and risk management
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 7 |
+
from typing import List, Dict, Any, Optional
|
| 8 |
+
import logging
|
| 9 |
+
from datetime import datetime, timedelta
|
| 10 |
+
|
| 11 |
+
from config.settings import get_settings
|
| 12 |
+
from models.portfolio import (
|
| 13 |
+
PortfolioSummary,
|
| 14 |
+
RiskAssessment,
|
| 15 |
+
PerformanceMetrics,
|
| 16 |
+
AllocationRequest
|
| 17 |
+
)
|
| 18 |
+
from services.auth import get_current_user
|
| 19 |
+
from services.portfolio_service import PortfolioService
|
| 20 |
+
from services.risk_engine import RiskEngine
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
router = APIRouter()
|
| 24 |
+
settings = get_settings()
|
| 25 |
+
|
| 26 |
+
# Initialize services
|
| 27 |
+
portfolio_service = PortfolioService()
|
| 28 |
+
risk_engine = RiskEngine()
|
| 29 |
+
|
| 30 |
+
@router.get("/summary", response_model=PortfolioSummary)
|
| 31 |
+
async def get_portfolio_summary(current_user: Dict = Depends(get_current_user)):
|
| 32 |
+
"""Get comprehensive portfolio summary"""
|
| 33 |
+
try:
|
| 34 |
+
summary = await portfolio_service.get_portfolio_summary(
|
| 35 |
+
user_id=current_user["id"]
|
| 36 |
+
)
|
| 37 |
+
return summary
|
| 38 |
+
|
| 39 |
+
except Exception as e:
|
| 40 |
+
logger.error(f"Error fetching portfolio summary: {e}")
|
| 41 |
+
raise HTTPException(
|
| 42 |
+
status_code=500,
|
| 43 |
+
detail="Failed to fetch portfolio summary"
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
@router.get("/risk-assessment", response_model=RiskAssessment)
|
| 47 |
+
async def get_risk_assessment(current_user: Dict = Depends(get_current_user)):
|
| 48 |
+
"""Get portfolio risk assessment"""
|
| 49 |
+
try:
|
| 50 |
+
# Get current holdings
|
| 51 |
+
holdings = await portfolio_service.get_holdings(current_user["id"])
|
| 52 |
+
|
| 53 |
+
# Perform risk assessment
|
| 54 |
+
risk_assessment = await risk_engine.assess_portfolio_risk(holdings)
|
| 55 |
+
|
| 56 |
+
return risk_assessment
|
| 57 |
+
|
| 58 |
+
except Exception as e:
|
| 59 |
+
logger.error(f"Error performing risk assessment: {e}")
|
| 60 |
+
raise HTTPException(
|
| 61 |
+
status_code=500,
|
| 62 |
+
detail="Failed to perform risk assessment"
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
@router.get("/performance", response_model=PerformanceMetrics)
|
| 66 |
+
async def get_performance_metrics(
|
| 67 |
+
period: str = "1Y",
|
| 68 |
+
current_user: Dict = Depends(get_current_user)
|
| 69 |
+
):
|
| 70 |
+
"""Get portfolio performance metrics"""
|
| 71 |
+
try:
|
| 72 |
+
# Calculate date range
|
| 73 |
+
end_date = datetime.utcnow()
|
| 74 |
+
|
| 75 |
+
period_map = {
|
| 76 |
+
"1D": timedelta(days=1),
|
| 77 |
+
"1W": timedelta(weeks=1),
|
| 78 |
+
"1M": timedelta(days=30),
|
| 79 |
+
"3M": timedelta(days=90),
|
| 80 |
+
"6M": timedelta(days=180),
|
| 81 |
+
"1Y": timedelta(days=365),
|
| 82 |
+
"ALL": timedelta(days=3650) # 10 years
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
start_date = end_date - period_map.get(period, timedelta(days=365))
|
| 86 |
+
|
| 87 |
+
# Calculate performance metrics
|
| 88 |
+
performance = await portfolio_service.calculate_performance_metrics(
|
| 89 |
+
user_id=current_user["id"],
|
| 90 |
+
start_date=start_date,
|
| 91 |
+
end_date=end_date
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
return performance
|
| 95 |
+
|
| 96 |
+
except Exception as e:
|
| 97 |
+
logger.error(f"Error calculating performance metrics: {e}")
|
| 98 |
+
raise HTTPException(
|
| 99 |
+
status_code=500,
|
| 100 |
+
detail="Failed to calculate performance metrics"
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
@router.get("/allocation")
|
| 104 |
+
async def get_asset_allocation(current_user: Dict = Depends(get_current_user)):
|
| 105 |
+
"""Get current asset allocation breakdown"""
|
| 106 |
+
try:
|
| 107 |
+
allocation = await portfolio_service.get_asset_allocation(
|
| 108 |
+
user_id=current_user["id"]
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
return {
|
| 112 |
+
"sector_allocation": allocation["sectors"],
|
| 113 |
+
"market_cap_allocation": allocation["market_caps"],
|
| 114 |
+
"geographic_allocation": allocation["geography"],
|
| 115 |
+
"concentration_metrics": allocation["concentration"],
|
| 116 |
+
"diversification_score": allocation["diversification_score"]
|
| 117 |
+
}
|
| 118 |
+
|
| 119 |
+
except Exception as e:
|
| 120 |
+
logger.error(f"Error fetching asset allocation: {e}")
|
| 121 |
+
raise HTTPException(
|
| 122 |
+
status_code=500,
|
| 123 |
+
detail="Failed to fetch asset allocation"
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
@router.post("/rebalance")
|
| 127 |
+
async def suggest_portfolio_rebalancing(
|
| 128 |
+
target_allocation: AllocationRequest,
|
| 129 |
+
current_user: Dict = Depends(get_current_user)
|
| 130 |
+
):
|
| 131 |
+
"""Suggest portfolio rebalancing to achieve target allocation"""
|
| 132 |
+
try:
|
| 133 |
+
# Get current portfolio
|
| 134 |
+
current_portfolio = await portfolio_service.get_holdings(current_user["id"])
|
| 135 |
+
|
| 136 |
+
# Calculate rebalancing suggestions
|
| 137 |
+
rebalancing_plan = await portfolio_service.calculate_rebalancing(
|
| 138 |
+
current_portfolio=current_portfolio,
|
| 139 |
+
target_allocation=target_allocation
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
return {
|
| 143 |
+
"current_allocation": rebalancing_plan["current"],
|
| 144 |
+
"target_allocation": rebalancing_plan["target"],
|
| 145 |
+
"suggested_trades": rebalancing_plan["trades"],
|
| 146 |
+
"estimated_cost": rebalancing_plan["cost"],
|
| 147 |
+
"tax_implications": rebalancing_plan["tax_impact"]
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
except Exception as e:
|
| 151 |
+
logger.error(f"Error calculating rebalancing: {e}")
|
| 152 |
+
raise HTTPException(
|
| 153 |
+
status_code=500,
|
| 154 |
+
detail="Failed to calculate rebalancing suggestions"
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
@router.get("/correlation-matrix")
|
| 158 |
+
async def get_correlation_matrix(current_user: Dict = Depends(get_current_user)):
|
| 159 |
+
"""Get correlation matrix for portfolio holdings"""
|
| 160 |
+
try:
|
| 161 |
+
# Get holdings
|
| 162 |
+
holdings = await portfolio_service.get_holdings(current_user["id"])
|
| 163 |
+
|
| 164 |
+
# Calculate correlation matrix
|
| 165 |
+
correlation_matrix = await portfolio_service.calculate_correlation_matrix(
|
| 166 |
+
holdings
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
return {
|
| 170 |
+
"correlation_matrix": correlation_matrix,
|
| 171 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
except Exception as e:
|
| 175 |
+
logger.error(f"Error calculating correlation matrix: {e}")
|
| 176 |
+
raise HTTPException(
|
| 177 |
+
status_code=500,
|
| 178 |
+
detail="Failed to calculate correlation matrix"
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
@router.get("/value-at-risk")
|
| 182 |
+
async def calculate_value_at_risk(
|
| 183 |
+
confidence_level: float = 0.95,
|
| 184 |
+
time_horizon: int = 1,
|
| 185 |
+
current_user: Dict = Depends(get_current_user)
|
| 186 |
+
):
|
| 187 |
+
"""Calculate Value at Risk (VaR) for the portfolio"""
|
| 188 |
+
try:
|
| 189 |
+
# Get portfolio data
|
| 190 |
+
portfolio_data = await portfolio_service.get_portfolio_for_var_calculation(
|
| 191 |
+
user_id=current_user["id"]
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
# Calculate VaR using different methods
|
| 195 |
+
var_results = await risk_engine.calculate_value_at_risk(
|
| 196 |
+
portfolio_data=portfolio_data,
|
| 197 |
+
confidence_level=confidence_level,
|
| 198 |
+
time_horizon=time_horizon
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
return {
|
| 202 |
+
"var_historical": var_results["historical"],
|
| 203 |
+
"var_parametric": var_results["parametric"],
|
| 204 |
+
"var_monte_carlo": var_results["monte_carlo"],
|
| 205 |
+
"expected_shortfall": var_results["expected_shortfall"],
|
| 206 |
+
"confidence_level": confidence_level,
|
| 207 |
+
"time_horizon_days": time_horizon,
|
| 208 |
+
"portfolio_value": var_results["portfolio_value"],
|
| 209 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 210 |
+
}
|
| 211 |
+
|
| 212 |
+
except Exception as e:
|
| 213 |
+
logger.error(f"Error calculating VaR: {e}")
|
| 214 |
+
raise HTTPException(
|
| 215 |
+
status_code=500,
|
| 216 |
+
detail="Failed to calculate Value at Risk"
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
@router.get("/stress-testing")
|
| 220 |
+
async def run_stress_testing(
|
| 221 |
+
scenario: str = "market_crash",
|
| 222 |
+
current_user: Dict = Depends(get_current_user)
|
| 223 |
+
):
|
| 224 |
+
"""Run stress testing scenarios on the portfolio"""
|
| 225 |
+
try:
|
| 226 |
+
# Get portfolio data
|
| 227 |
+
portfolio_data = await portfolio_service.get_holdings(current_user["id"])
|
| 228 |
+
|
| 229 |
+
# Run stress testing
|
| 230 |
+
stress_results = await risk_engine.run_stress_testing(
|
| 231 |
+
portfolio_data=portfolio_data,
|
| 232 |
+
scenario=scenario
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
return {
|
| 236 |
+
"scenario": scenario,
|
| 237 |
+
"results": stress_results,
|
| 238 |
+
"recommendations": stress_results.get("recommendations", []),
|
| 239 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
except Exception as e:
|
| 243 |
+
logger.error(f"Error running stress testing: {e}")
|
| 244 |
+
raise HTTPException(
|
| 245 |
+
status_code=500,
|
| 246 |
+
detail="Failed to run stress testing"
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
@router.get("/optimization")
|
| 250 |
+
async def get_portfolio_optimization(
|
| 251 |
+
objective: str = "sharpe_ratio",
|
| 252 |
+
constraints: Optional[Dict[str, Any]] = None,
|
| 253 |
+
current_user: Dict = Depends(get_current_user)
|
| 254 |
+
):
|
| 255 |
+
"""Get portfolio optimization suggestions"""
|
| 256 |
+
try:
|
| 257 |
+
# Get current portfolio and market data
|
| 258 |
+
portfolio_data = await portfolio_service.get_holdings(current_user["id"])
|
| 259 |
+
|
| 260 |
+
# Run portfolio optimization
|
| 261 |
+
optimization_results = await portfolio_service.optimize_portfolio(
|
| 262 |
+
current_portfolio=portfolio_data,
|
| 263 |
+
objective=objective,
|
| 264 |
+
constraints=constraints or {}
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
return {
|
| 268 |
+
"current_allocation": optimization_results["current"],
|
| 269 |
+
"optimal_allocation": optimization_results["optimal"],
|
| 270 |
+
"expected_return": optimization_results["expected_return"],
|
| 271 |
+
"expected_volatility": optimization_results["expected_volatility"],
|
| 272 |
+
"sharpe_ratio": optimization_results["sharpe_ratio"],
|
| 273 |
+
"suggested_changes": optimization_results["changes"],
|
| 274 |
+
"objective": objective,
|
| 275 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 276 |
+
}
|
| 277 |
+
|
| 278 |
+
except Exception as e:
|
| 279 |
+
logger.error(f"Error running portfolio optimization: {e}")
|
| 280 |
+
raise HTTPException(
|
| 281 |
+
status_code=500,
|
| 282 |
+
detail="Failed to run portfolio optimization"
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
@router.get("/tax-optimization")
|
| 286 |
+
async def get_tax_optimization(
|
| 287 |
+
tax_year: Optional[int] = None,
|
| 288 |
+
current_user: Dict = Depends(get_current_user)
|
| 289 |
+
):
|
| 290 |
+
"""Get tax optimization suggestions"""
|
| 291 |
+
try:
|
| 292 |
+
if not tax_year:
|
| 293 |
+
tax_year = datetime.utcnow().year
|
| 294 |
+
|
| 295 |
+
# Get holdings with cost basis information
|
| 296 |
+
holdings_with_cost_basis = await portfolio_service.get_holdings_with_cost_basis(
|
| 297 |
+
user_id=current_user["id"]
|
| 298 |
+
)
|
| 299 |
+
|
| 300 |
+
# Calculate tax optimization strategies
|
| 301 |
+
tax_strategies = await portfolio_service.calculate_tax_optimization(
|
| 302 |
+
holdings=holdings_with_cost_basis,
|
| 303 |
+
tax_year=tax_year
|
| 304 |
+
)
|
| 305 |
+
|
| 306 |
+
return {
|
| 307 |
+
"tax_year": tax_year,
|
| 308 |
+
"current_tax_liability": tax_strategies["current_liability"],
|
| 309 |
+
"tax_loss_harvesting": tax_strategies["loss_harvesting"],
|
| 310 |
+
"wash_sale_warnings": tax_strategies["wash_sale_warnings"],
|
| 311 |
+
"suggested_actions": tax_strategies["actions"],
|
| 312 |
+
"potential_savings": tax_strategies["potential_savings"],
|
| 313 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 314 |
+
}
|
| 315 |
+
|
| 316 |
+
except Exception as e:
|
| 317 |
+
logger.error(f"Error calculating tax optimization: {e}")
|
| 318 |
+
raise HTTPException(
|
| 319 |
+
status_code=500,
|
| 320 |
+
detail="Failed to calculate tax optimization"
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
@router.get("/benchmarking")
|
| 324 |
+
async def get_benchmark_comparison(
|
| 325 |
+
benchmark: str = "NIFTY50",
|
| 326 |
+
period: str = "1Y",
|
| 327 |
+
current_user: Dict = Depends(get_current_user)
|
| 328 |
+
):
|
| 329 |
+
"""Compare portfolio performance against benchmarks"""
|
| 330 |
+
try:
|
| 331 |
+
# Calculate date range
|
| 332 |
+
end_date = datetime.utcnow()
|
| 333 |
+
period_map = {
|
| 334 |
+
"1M": timedelta(days=30),
|
| 335 |
+
"3M": timedelta(days=90),
|
| 336 |
+
"6M": timedelta(days=180),
|
| 337 |
+
"1Y": timedelta(days=365),
|
| 338 |
+
"2Y": timedelta(days=730),
|
| 339 |
+
"5Y": timedelta(days=1825)
|
| 340 |
+
}
|
| 341 |
+
start_date = end_date - period_map.get(period, timedelta(days=365))
|
| 342 |
+
|
| 343 |
+
# Get portfolio and benchmark performance
|
| 344 |
+
comparison = await portfolio_service.compare_with_benchmark(
|
| 345 |
+
user_id=current_user["id"],
|
| 346 |
+
benchmark=benchmark,
|
| 347 |
+
start_date=start_date,
|
| 348 |
+
end_date=end_date
|
| 349 |
+
)
|
| 350 |
+
|
| 351 |
+
return {
|
| 352 |
+
"benchmark": benchmark,
|
| 353 |
+
"period": period,
|
| 354 |
+
"portfolio_return": comparison["portfolio_return"],
|
| 355 |
+
"benchmark_return": comparison["benchmark_return"],
|
| 356 |
+
"alpha": comparison["alpha"],
|
| 357 |
+
"beta": comparison["beta"],
|
| 358 |
+
"tracking_error": comparison["tracking_error"],
|
| 359 |
+
"information_ratio": comparison["information_ratio"],
|
| 360 |
+
"up_capture": comparison["up_capture"],
|
| 361 |
+
"down_capture": comparison["down_capture"],
|
| 362 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 363 |
+
}
|
| 364 |
+
|
| 365 |
+
except Exception as e:
|
| 366 |
+
logger.error(f"Error performing benchmark comparison: {e}")
|
| 367 |
+
raise HTTPException(
|
| 368 |
+
status_code=500,
|
| 369 |
+
detail="Failed to perform benchmark comparison"
|
| 370 |
+
)
|
| 371 |
+
|
| 372 |
+
@router.post("/simulate")
|
| 373 |
+
async def simulate_portfolio_changes(
|
| 374 |
+
changes: List[Dict[str, Any]],
|
| 375 |
+
current_user: Dict = Depends(get_current_user)
|
| 376 |
+
):
|
| 377 |
+
"""Simulate the impact of portfolio changes"""
|
| 378 |
+
try:
|
| 379 |
+
# Get current portfolio
|
| 380 |
+
current_portfolio = await portfolio_service.get_holdings(current_user["id"])
|
| 381 |
+
|
| 382 |
+
# Simulate changes
|
| 383 |
+
simulation_results = await portfolio_service.simulate_portfolio_changes(
|
| 384 |
+
current_portfolio=current_portfolio,
|
| 385 |
+
proposed_changes=changes
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
return {
|
| 389 |
+
"current_metrics": simulation_results["current"],
|
| 390 |
+
"projected_metrics": simulation_results["projected"],
|
| 391 |
+
"impact_analysis": simulation_results["impact"],
|
| 392 |
+
"risk_assessment": simulation_results["risk_change"],
|
| 393 |
+
"recommendations": simulation_results["recommendations"],
|
| 394 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 395 |
+
}
|
| 396 |
+
|
| 397 |
+
except Exception as e:
|
| 398 |
+
logger.error(f"Error simulating portfolio changes: {e}")
|
| 399 |
+
raise HTTPException(
|
| 400 |
+
status_code=500,
|
| 401 |
+
detail="Failed to simulate portfolio changes"
|
| 402 |
+
)
|
routers/signals.py
ADDED
|
@@ -0,0 +1,409 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
AI Trading Signals Router
|
| 3 |
+
Handles AI-powered trading signal generation and management
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
| 7 |
+
from typing import List, Dict, Any, Optional
|
| 8 |
+
import asyncio
|
| 9 |
+
import logging
|
| 10 |
+
from datetime import datetime, timedelta
|
| 11 |
+
|
| 12 |
+
from config.settings import get_settings
|
| 13 |
+
from models.signals import (
|
| 14 |
+
SignalRequest,
|
| 15 |
+
SignalResponse,
|
| 16 |
+
MarketAnalysisResponse,
|
| 17 |
+
SignalFilter
|
| 18 |
+
)
|
| 19 |
+
from services.ai_engine import AIEngine
|
| 20 |
+
from services.auth import get_current_user
|
| 21 |
+
from services.market_data import MarketDataService
|
| 22 |
+
from utils.cache import get_redis_client
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
router = APIRouter()
|
| 26 |
+
settings = get_settings()
|
| 27 |
+
|
| 28 |
+
# Initialize services
|
| 29 |
+
ai_engine = AIEngine()
|
| 30 |
+
market_data_service = MarketDataService()
|
| 31 |
+
|
| 32 |
+
@router.post("/generate", response_model=List[SignalResponse])
|
| 33 |
+
async def generate_signals(
|
| 34 |
+
request: SignalRequest,
|
| 35 |
+
background_tasks: BackgroundTasks,
|
| 36 |
+
current_user: Dict = Depends(get_current_user)
|
| 37 |
+
):
|
| 38 |
+
"""Generate AI trading signals for specified securities"""
|
| 39 |
+
try:
|
| 40 |
+
# Get market data for the requested securities
|
| 41 |
+
market_data = await market_data_service.get_market_data_batch(
|
| 42 |
+
request.securities
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
# Generate signals using AI engine
|
| 46 |
+
signals = await ai_engine.generate_trading_signals(
|
| 47 |
+
market_data=market_data,
|
| 48 |
+
user_preferences=request.preferences
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
# Store signals in database for historical tracking
|
| 52 |
+
background_tasks.add_task(
|
| 53 |
+
store_signals_history,
|
| 54 |
+
signals,
|
| 55 |
+
current_user["id"]
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
return signals
|
| 59 |
+
|
| 60 |
+
except Exception as e:
|
| 61 |
+
logger.error(f"Error generating signals: {e}")
|
| 62 |
+
raise HTTPException(
|
| 63 |
+
status_code=500,
|
| 64 |
+
detail="Failed to generate trading signals"
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
@router.get("/", response_model=List[SignalResponse])
|
| 68 |
+
async def get_signals(
|
| 69 |
+
filter_params: SignalFilter = Depends(),
|
| 70 |
+
current_user: Dict = Depends(get_current_user)
|
| 71 |
+
):
|
| 72 |
+
"""Get existing signals with filtering options"""
|
| 73 |
+
try:
|
| 74 |
+
# Check cache first
|
| 75 |
+
redis_client = get_redis_client()
|
| 76 |
+
cache_key = f"signals:{current_user['id']}:{hash(str(filter_params))}"
|
| 77 |
+
|
| 78 |
+
if redis_client:
|
| 79 |
+
cached_signals = await redis_client.get(cache_key)
|
| 80 |
+
if cached_signals:
|
| 81 |
+
return cached_signals
|
| 82 |
+
|
| 83 |
+
# Get signals from database
|
| 84 |
+
signals = await get_user_signals(
|
| 85 |
+
user_id=current_user["id"],
|
| 86 |
+
filters=filter_params
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
# Cache for 5 minutes
|
| 90 |
+
if redis_client:
|
| 91 |
+
await redis_client.setex(cache_key, 300, signals)
|
| 92 |
+
|
| 93 |
+
return signals
|
| 94 |
+
|
| 95 |
+
except Exception as e:
|
| 96 |
+
logger.error(f"Error fetching signals: {e}")
|
| 97 |
+
raise HTTPException(
|
| 98 |
+
status_code=500,
|
| 99 |
+
detail="Failed to fetch signals"
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
@router.get("/performance")
|
| 103 |
+
async def get_signals_performance(
|
| 104 |
+
days: int = 30,
|
| 105 |
+
current_user: Dict = Depends(get_current_user)
|
| 106 |
+
):
|
| 107 |
+
"""Get signals performance analytics"""
|
| 108 |
+
try:
|
| 109 |
+
end_date = datetime.utcnow()
|
| 110 |
+
start_date = end_date - timedelta(days=days)
|
| 111 |
+
|
| 112 |
+
# Get signals performance from database
|
| 113 |
+
performance = await calculate_signals_performance(
|
| 114 |
+
user_id=current_user["id"],
|
| 115 |
+
start_date=start_date,
|
| 116 |
+
end_date=end_date
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
return performance
|
| 120 |
+
|
| 121 |
+
except Exception as e:
|
| 122 |
+
logger.error(f"Error calculating performance: {e}")
|
| 123 |
+
raise HTTPException(
|
| 124 |
+
status_code=500,
|
| 125 |
+
detail="Failed to calculate signals performance"
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
@router.get("/market-analysis", response_model=MarketAnalysisResponse)
|
| 129 |
+
async def get_market_analysis(current_user: Dict = Depends(get_current_user)):
|
| 130 |
+
"""Get comprehensive market analysis"""
|
| 131 |
+
try:
|
| 132 |
+
# Check cache first
|
| 133 |
+
redis_client = get_redis_client()
|
| 134 |
+
cache_key = "market_analysis:global"
|
| 135 |
+
|
| 136 |
+
if redis_client:
|
| 137 |
+
cached_analysis = await redis_client.get(cache_key)
|
| 138 |
+
if cached_analysis:
|
| 139 |
+
return cached_analysis
|
| 140 |
+
|
| 141 |
+
# Generate fresh market analysis
|
| 142 |
+
analysis = await ai_engine.generate_market_analysis()
|
| 143 |
+
|
| 144 |
+
# Cache for 15 minutes
|
| 145 |
+
if redis_client:
|
| 146 |
+
await redis_client.setex(cache_key, 900, analysis)
|
| 147 |
+
|
| 148 |
+
return analysis
|
| 149 |
+
|
| 150 |
+
except Exception as e:
|
| 151 |
+
logger.error(f"Error generating market analysis: {e}")
|
| 152 |
+
raise HTTPException(
|
| 153 |
+
status_code=500,
|
| 154 |
+
detail="Failed to generate market analysis"
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
@router.post("/backtesting")
|
| 158 |
+
async def run_backtesting(
|
| 159 |
+
strategy_params: Dict[str, Any],
|
| 160 |
+
current_user: Dict = Depends(get_current_user)
|
| 161 |
+
):
|
| 162 |
+
"""Run backtesting on AI trading strategies"""
|
| 163 |
+
try:
|
| 164 |
+
# Validate strategy parameters
|
| 165 |
+
if not strategy_params.get("symbols") or not strategy_params.get("period"):
|
| 166 |
+
raise HTTPException(
|
| 167 |
+
status_code=400,
|
| 168 |
+
detail="Missing required parameters: symbols and period"
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
# Run backtesting
|
| 172 |
+
backtest_results = await ai_engine.run_backtesting(
|
| 173 |
+
symbols=strategy_params["symbols"],
|
| 174 |
+
period=strategy_params["period"],
|
| 175 |
+
strategy_config=strategy_params.get("config", {})
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
return backtest_results
|
| 179 |
+
|
| 180 |
+
except Exception as e:
|
| 181 |
+
logger.error(f"Error running backtesting: {e}")
|
| 182 |
+
raise HTTPException(
|
| 183 |
+
status_code=500,
|
| 184 |
+
detail="Failed to run backtesting"
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
@router.get("/sentiment/{symbol}")
|
| 188 |
+
async def get_stock_sentiment(
|
| 189 |
+
symbol: str,
|
| 190 |
+
current_user: Dict = Depends(get_current_user)
|
| 191 |
+
):
|
| 192 |
+
"""Get AI-powered sentiment analysis for a specific stock"""
|
| 193 |
+
try:
|
| 194 |
+
# Get sentiment analysis
|
| 195 |
+
sentiment = await ai_engine.analyze_stock_sentiment(symbol)
|
| 196 |
+
|
| 197 |
+
return {
|
| 198 |
+
"symbol": symbol,
|
| 199 |
+
"sentiment_score": sentiment["score"],
|
| 200 |
+
"sentiment_label": sentiment["label"],
|
| 201 |
+
"confidence": sentiment["confidence"],
|
| 202 |
+
"key_factors": sentiment["factors"],
|
| 203 |
+
"news_summary": sentiment["news_summary"],
|
| 204 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
except Exception as e:
|
| 208 |
+
logger.error(f"Error analyzing sentiment for {symbol}: {e}")
|
| 209 |
+
raise HTTPException(
|
| 210 |
+
status_code=500,
|
| 211 |
+
detail=f"Failed to analyze sentiment for {symbol}"
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
@router.post("/alerts")
|
| 215 |
+
async def create_signal_alert(
|
| 216 |
+
alert_config: Dict[str, Any],
|
| 217 |
+
current_user: Dict = Depends(get_current_user)
|
| 218 |
+
):
|
| 219 |
+
"""Create alert for specific signal conditions"""
|
| 220 |
+
try:
|
| 221 |
+
# Validate alert configuration
|
| 222 |
+
required_fields = ["symbol", "condition", "threshold"]
|
| 223 |
+
if not all(field in alert_config for field in required_fields):
|
| 224 |
+
raise HTTPException(
|
| 225 |
+
status_code=400,
|
| 226 |
+
detail="Missing required fields: symbol, condition, threshold"
|
| 227 |
+
)
|
| 228 |
+
|
| 229 |
+
# Store alert configuration
|
| 230 |
+
alert_id = await store_signal_alert(
|
| 231 |
+
user_id=current_user["id"],
|
| 232 |
+
config=alert_config
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
return {
|
| 236 |
+
"alert_id": alert_id,
|
| 237 |
+
"message": "Signal alert created successfully",
|
| 238 |
+
"config": alert_config
|
| 239 |
+
}
|
| 240 |
+
|
| 241 |
+
except Exception as e:
|
| 242 |
+
logger.error(f"Error creating signal alert: {e}")
|
| 243 |
+
raise HTTPException(
|
| 244 |
+
status_code=500,
|
| 245 |
+
detail="Failed to create signal alert"
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
@router.get("/alerts")
|
| 249 |
+
async def get_user_alerts(current_user: Dict = Depends(get_current_user)):
|
| 250 |
+
"""Get user's active signal alerts"""
|
| 251 |
+
try:
|
| 252 |
+
alerts = await get_user_signal_alerts(current_user["id"])
|
| 253 |
+
return {"alerts": alerts}
|
| 254 |
+
|
| 255 |
+
except Exception as e:
|
| 256 |
+
logger.error(f"Error fetching user alerts: {e}")
|
| 257 |
+
raise HTTPException(
|
| 258 |
+
status_code=500,
|
| 259 |
+
detail="Failed to fetch alerts"
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
@router.delete("/alerts/{alert_id}")
|
| 263 |
+
async def delete_signal_alert(
|
| 264 |
+
alert_id: str,
|
| 265 |
+
current_user: Dict = Depends(get_current_user)
|
| 266 |
+
):
|
| 267 |
+
"""Delete a signal alert"""
|
| 268 |
+
try:
|
| 269 |
+
success = await delete_user_alert(
|
| 270 |
+
alert_id=alert_id,
|
| 271 |
+
user_id=current_user["id"]
|
| 272 |
+
)
|
| 273 |
+
|
| 274 |
+
if success:
|
| 275 |
+
return {"message": "Alert deleted successfully"}
|
| 276 |
+
else:
|
| 277 |
+
raise HTTPException(
|
| 278 |
+
status_code=404,
|
| 279 |
+
detail="Alert not found"
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
except Exception as e:
|
| 283 |
+
logger.error(f"Error deleting alert: {e}")
|
| 284 |
+
raise HTTPException(
|
| 285 |
+
status_code=500,
|
| 286 |
+
detail="Failed to delete alert"
|
| 287 |
+
)
|
| 288 |
+
|
| 289 |
+
@router.get("/strategies")
|
| 290 |
+
async def get_ai_strategies(current_user: Dict = Depends(get_current_user)):
|
| 291 |
+
"""Get available AI trading strategies"""
|
| 292 |
+
try:
|
| 293 |
+
strategies = await ai_engine.get_available_strategies()
|
| 294 |
+
return {"strategies": strategies}
|
| 295 |
+
|
| 296 |
+
except Exception as e:
|
| 297 |
+
logger.error(f"Error fetching strategies: {e}")
|
| 298 |
+
raise HTTPException(
|
| 299 |
+
status_code=500,
|
| 300 |
+
detail="Failed to fetch strategies"
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
@router.post("/custom-strategy")
|
| 304 |
+
async def create_custom_strategy(
|
| 305 |
+
strategy_config: Dict[str, Any],
|
| 306 |
+
current_user: Dict = Depends(get_current_user)
|
| 307 |
+
):
|
| 308 |
+
"""Create a custom AI trading strategy"""
|
| 309 |
+
try:
|
| 310 |
+
# Validate strategy configuration
|
| 311 |
+
if not strategy_config.get("name") or not strategy_config.get("rules"):
|
| 312 |
+
raise HTTPException(
|
| 313 |
+
status_code=400,
|
| 314 |
+
detail="Missing required fields: name and rules"
|
| 315 |
+
)
|
| 316 |
+
|
| 317 |
+
# Create and validate strategy
|
| 318 |
+
strategy_id = await ai_engine.create_custom_strategy(
|
| 319 |
+
user_id=current_user["id"],
|
| 320 |
+
config=strategy_config
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
return {
|
| 324 |
+
"strategy_id": strategy_id,
|
| 325 |
+
"message": "Custom strategy created successfully",
|
| 326 |
+
"config": strategy_config
|
| 327 |
+
}
|
| 328 |
+
|
| 329 |
+
except Exception as e:
|
| 330 |
+
logger.error(f"Error creating custom strategy: {e}")
|
| 331 |
+
raise HTTPException(
|
| 332 |
+
status_code=500,
|
| 333 |
+
detail="Failed to create custom strategy"
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
# Helper functions
|
| 337 |
+
async def store_signals_history(signals: List[SignalResponse], user_id: str):
|
| 338 |
+
"""Store signals in database for historical tracking"""
|
| 339 |
+
try:
|
| 340 |
+
# Implementation would depend on your database choice
|
| 341 |
+
# This is a placeholder for the actual database storage logic
|
| 342 |
+
pass
|
| 343 |
+
except Exception as e:
|
| 344 |
+
logger.error(f"Error storing signals history: {e}")
|
| 345 |
+
|
| 346 |
+
async def get_user_signals(
|
| 347 |
+
user_id: str,
|
| 348 |
+
filters: SignalFilter
|
| 349 |
+
) -> List[SignalResponse]:
|
| 350 |
+
"""Get user signals from database with filtering"""
|
| 351 |
+
try:
|
| 352 |
+
# Implementation would depend on your database choice
|
| 353 |
+
# This is a placeholder for the actual database query logic
|
| 354 |
+
return []
|
| 355 |
+
except Exception as e:
|
| 356 |
+
logger.error(f"Error fetching user signals: {e}")
|
| 357 |
+
return []
|
| 358 |
+
|
| 359 |
+
async def calculate_signals_performance(
|
| 360 |
+
user_id: str,
|
| 361 |
+
start_date: datetime,
|
| 362 |
+
end_date: datetime
|
| 363 |
+
) -> Dict[str, Any]:
|
| 364 |
+
"""Calculate signals performance metrics"""
|
| 365 |
+
try:
|
| 366 |
+
# Implementation would depend on your database choice
|
| 367 |
+
# This is a placeholder for the actual performance calculation logic
|
| 368 |
+
return {
|
| 369 |
+
"total_signals": 0,
|
| 370 |
+
"successful_signals": 0,
|
| 371 |
+
"accuracy": 0.0,
|
| 372 |
+
"avg_return": 0.0,
|
| 373 |
+
"max_drawdown": 0.0,
|
| 374 |
+
"sharpe_ratio": 0.0
|
| 375 |
+
}
|
| 376 |
+
except Exception as e:
|
| 377 |
+
logger.error(f"Error calculating performance: {e}")
|
| 378 |
+
return {}
|
| 379 |
+
|
| 380 |
+
async def store_signal_alert(user_id: str, config: Dict[str, Any]) -> str:
|
| 381 |
+
"""Store signal alert configuration"""
|
| 382 |
+
try:
|
| 383 |
+
# Implementation would depend on your database choice
|
| 384 |
+
# This is a placeholder for the actual alert storage logic
|
| 385 |
+
import uuid
|
| 386 |
+
return str(uuid.uuid4())
|
| 387 |
+
except Exception as e:
|
| 388 |
+
logger.error(f"Error storing signal alert: {e}")
|
| 389 |
+
raise
|
| 390 |
+
|
| 391 |
+
async def get_user_signal_alerts(user_id: str) -> List[Dict[str, Any]]:
|
| 392 |
+
"""Get user's signal alerts"""
|
| 393 |
+
try:
|
| 394 |
+
# Implementation would depend on your database choice
|
| 395 |
+
# This is a placeholder for the actual alert retrieval logic
|
| 396 |
+
return []
|
| 397 |
+
except Exception as e:
|
| 398 |
+
logger.error(f"Error fetching user alerts: {e}")
|
| 399 |
+
return []
|
| 400 |
+
|
| 401 |
+
async def delete_user_alert(alert_id: str, user_id: str) -> bool:
|
| 402 |
+
"""Delete user's signal alert"""
|
| 403 |
+
try:
|
| 404 |
+
# Implementation would depend on your database choice
|
| 405 |
+
# This is a placeholder for the actual alert deletion logic
|
| 406 |
+
return True
|
| 407 |
+
except Exception as e:
|
| 408 |
+
logger.error(f"Error deleting user alert: {e}")
|
| 409 |
+
return False
|
services/auth.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication service with JWT token management
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import jwt
|
| 6 |
+
from datetime import datetime, timedelta
|
| 7 |
+
from typing import Dict, Any, Optional
|
| 8 |
+
from fastapi import HTTPException, status, Depends
|
| 9 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 10 |
+
from passlib.context import CryptContext
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
from config.settings import get_settings
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
settings = get_settings()
|
| 17 |
+
security = HTTPBearer()
|
| 18 |
+
|
| 19 |
+
# Password hashing
|
| 20 |
+
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
| 21 |
+
|
| 22 |
+
# JWT settings
|
| 23 |
+
SECRET_KEY = settings.SECRET_KEY
|
| 24 |
+
ALGORITHM = "HS256"
|
| 25 |
+
ACCESS_TOKEN_EXPIRE_MINUTES = 60
|
| 26 |
+
|
| 27 |
+
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
| 28 |
+
"""Verify a password against its hash"""
|
| 29 |
+
return pwd_context.verify(plain_password, hashed_password)
|
| 30 |
+
|
| 31 |
+
def get_password_hash(password: str) -> str:
|
| 32 |
+
"""Hash a password"""
|
| 33 |
+
return pwd_context.hash(password)
|
| 34 |
+
|
| 35 |
+
def create_access_token(data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
| 36 |
+
"""Create a new access token"""
|
| 37 |
+
to_encode = data.copy()
|
| 38 |
+
|
| 39 |
+
if expires_delta:
|
| 40 |
+
expire = datetime.utcnow() + expires_delta
|
| 41 |
+
else:
|
| 42 |
+
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 43 |
+
|
| 44 |
+
to_encode.update({
|
| 45 |
+
"exp": expire,
|
| 46 |
+
"iat": datetime.utcnow(),
|
| 47 |
+
"type": "access"
|
| 48 |
+
})
|
| 49 |
+
|
| 50 |
+
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
| 51 |
+
return encoded_jwt
|
| 52 |
+
|
| 53 |
+
def create_refresh_token(data: Dict[str, Any]) -> str:
|
| 54 |
+
"""Create a new refresh token"""
|
| 55 |
+
to_encode = data.copy()
|
| 56 |
+
expire = datetime.utcnow() + timedelta(days=7) # Refresh token valid for 7 days
|
| 57 |
+
|
| 58 |
+
to_encode.update({
|
| 59 |
+
"exp": expire,
|
| 60 |
+
"iat": datetime.utcnow(),
|
| 61 |
+
"type": "refresh"
|
| 62 |
+
})
|
| 63 |
+
|
| 64 |
+
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
| 65 |
+
return encoded_jwt
|
| 66 |
+
|
| 67 |
+
def verify_token(token: str) -> Dict[str, Any]:
|
| 68 |
+
"""Verify and decode a JWT token"""
|
| 69 |
+
try:
|
| 70 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 71 |
+
|
| 72 |
+
# Check if token has expired
|
| 73 |
+
exp = payload.get("exp")
|
| 74 |
+
if exp and datetime.fromtimestamp(exp) < datetime.utcnow():
|
| 75 |
+
raise HTTPException(
|
| 76 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 77 |
+
detail="Token has expired",
|
| 78 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
return payload
|
| 82 |
+
|
| 83 |
+
except jwt.ExpiredSignatureError:
|
| 84 |
+
raise HTTPException(
|
| 85 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 86 |
+
detail="Token has expired",
|
| 87 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 88 |
+
)
|
| 89 |
+
except jwt.JWTError:
|
| 90 |
+
raise HTTPException(
|
| 91 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 92 |
+
detail="Invalid token",
|
| 93 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
async def get_current_user(credentials: HTTPAuthorizationCredentials = Depends(security)) -> Dict[str, Any]:
|
| 97 |
+
"""Get current user from JWT token"""
|
| 98 |
+
|
| 99 |
+
credentials_exception = HTTPException(
|
| 100 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 101 |
+
detail="Could not validate credentials",
|
| 102 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
try:
|
| 106 |
+
payload = verify_token(credentials.credentials)
|
| 107 |
+
user_id: str = payload.get("sub")
|
| 108 |
+
email: str = payload.get("email")
|
| 109 |
+
role: str = payload.get("role", "trader")
|
| 110 |
+
|
| 111 |
+
if user_id is None or email is None:
|
| 112 |
+
raise credentials_exception
|
| 113 |
+
|
| 114 |
+
return {
|
| 115 |
+
"id": user_id,
|
| 116 |
+
"email": email,
|
| 117 |
+
"role": role
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
except HTTPException:
|
| 121 |
+
raise credentials_exception
|
| 122 |
+
except Exception as e:
|
| 123 |
+
logger.error(f"Error validating user credentials: {e}")
|
| 124 |
+
raise credentials_exception
|
| 125 |
+
|
| 126 |
+
async def get_current_admin(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
| 127 |
+
"""Get current user and verify admin role"""
|
| 128 |
+
|
| 129 |
+
if current_user.get("role") != "admin":
|
| 130 |
+
raise HTTPException(
|
| 131 |
+
status_code=status.HTTP_403_FORBIDDEN,
|
| 132 |
+
detail="Insufficient permissions. Admin role required."
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
return current_user
|
| 136 |
+
|
| 137 |
+
async def get_current_instructor_or_admin(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
| 138 |
+
"""Get current user and verify instructor or admin role"""
|
| 139 |
+
|
| 140 |
+
allowed_roles = ["admin", "instructor"]
|
| 141 |
+
if current_user.get("role") not in allowed_roles:
|
| 142 |
+
raise HTTPException(
|
| 143 |
+
status_code=status.HTTP_403_FORBIDDEN,
|
| 144 |
+
detail="Insufficient permissions. Instructor or Admin role required."
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
return current_user
|
| 148 |
+
|
| 149 |
+
def create_token_pair(user_data: Dict[str, Any]) -> Dict[str, Any]:
|
| 150 |
+
"""Create access and refresh token pair"""
|
| 151 |
+
|
| 152 |
+
token_data = {
|
| 153 |
+
"sub": user_data["id"],
|
| 154 |
+
"email": user_data["email"],
|
| 155 |
+
"role": user_data.get("role", "trader")
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
access_token = create_access_token(token_data)
|
| 159 |
+
refresh_token = create_refresh_token(token_data)
|
| 160 |
+
|
| 161 |
+
return {
|
| 162 |
+
"access_token": access_token,
|
| 163 |
+
"refresh_token": refresh_token,
|
| 164 |
+
"token_type": "bearer",
|
| 165 |
+
"expires_in": ACCESS_TOKEN_EXPIRE_MINUTES * 60 # Convert to seconds
|
| 166 |
+
}
|
| 167 |
+
|
| 168 |
+
def decode_token_without_verification(token: str) -> Optional[Dict[str, Any]]:
|
| 169 |
+
"""Decode token without verification (for debugging purposes only)"""
|
| 170 |
+
try:
|
| 171 |
+
return jwt.decode(token, options={"verify_signature": False})
|
| 172 |
+
except Exception as e:
|
| 173 |
+
logger.error(f"Error decoding token: {e}")
|
| 174 |
+
return None
|
| 175 |
+
|
| 176 |
+
class TokenBlacklist:
|
| 177 |
+
"""Simple in-memory token blacklist. In production, use Redis or database."""
|
| 178 |
+
|
| 179 |
+
def __init__(self):
|
| 180 |
+
self._blacklisted_tokens = set()
|
| 181 |
+
|
| 182 |
+
def add_token(self, token: str):
|
| 183 |
+
"""Add token to blacklist"""
|
| 184 |
+
self._blacklisted_tokens.add(token)
|
| 185 |
+
|
| 186 |
+
def is_blacklisted(self, token: str) -> bool:
|
| 187 |
+
"""Check if token is blacklisted"""
|
| 188 |
+
return token in self._blacklisted_tokens
|
| 189 |
+
|
| 190 |
+
def cleanup_expired(self):
|
| 191 |
+
"""Remove expired tokens from blacklist"""
|
| 192 |
+
# Implementation would check token expiry and remove expired ones
|
| 193 |
+
# This is a simplified version
|
| 194 |
+
pass
|
| 195 |
+
|
| 196 |
+
# Global token blacklist instance
|
| 197 |
+
token_blacklist = TokenBlacklist()
|
| 198 |
+
|
| 199 |
+
async def verify_active_token(credentials: HTTPAuthorizationCredentials = Depends(security)) -> str:
|
| 200 |
+
"""Verify token is not blacklisted"""
|
| 201 |
+
|
| 202 |
+
token = credentials.credentials
|
| 203 |
+
|
| 204 |
+
if token_blacklist.is_blacklisted(token):
|
| 205 |
+
raise HTTPException(
|
| 206 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 207 |
+
detail="Token has been revoked",
|
| 208 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
return token
|
| 212 |
+
|
| 213 |
+
def invalidate_token(token: str):
|
| 214 |
+
"""Add token to blacklist"""
|
| 215 |
+
token_blacklist.add_token(token)
|
| 216 |
+
|
| 217 |
+
# Rate limiting for authentication endpoints
|
| 218 |
+
class AuthRateLimiter:
|
| 219 |
+
"""Simple rate limiter for authentication endpoints"""
|
| 220 |
+
|
| 221 |
+
def __init__(self):
|
| 222 |
+
self._attempts = {}
|
| 223 |
+
|
| 224 |
+
def is_rate_limited(self, identifier: str, max_attempts: int = 5, window_minutes: int = 15) -> bool:
|
| 225 |
+
"""Check if identifier is rate limited"""
|
| 226 |
+
now = datetime.utcnow()
|
| 227 |
+
window_start = now - timedelta(minutes=window_minutes)
|
| 228 |
+
|
| 229 |
+
# Clean old attempts
|
| 230 |
+
if identifier in self._attempts:
|
| 231 |
+
self._attempts[identifier] = [
|
| 232 |
+
attempt for attempt in self._attempts[identifier]
|
| 233 |
+
if attempt > window_start
|
| 234 |
+
]
|
| 235 |
+
|
| 236 |
+
# Check current attempts
|
| 237 |
+
attempts = self._attempts.get(identifier, [])
|
| 238 |
+
return len(attempts) >= max_attempts
|
| 239 |
+
|
| 240 |
+
def record_attempt(self, identifier: str):
|
| 241 |
+
"""Record an authentication attempt"""
|
| 242 |
+
now = datetime.utcnow()
|
| 243 |
+
if identifier not in self._attempts:
|
| 244 |
+
self._attempts[identifier] = []
|
| 245 |
+
self._attempts[identifier].append(now)
|
| 246 |
+
|
| 247 |
+
# Global rate limiter instance
|
| 248 |
+
auth_rate_limiter = AuthRateLimiter()
|
| 249 |
+
|
| 250 |
+
def check_rate_limit(identifier: str, max_attempts: int = 5) -> bool:
|
| 251 |
+
"""Check and record rate limit for authentication"""
|
| 252 |
+
|
| 253 |
+
if auth_rate_limiter.is_rate_limited(identifier, max_attempts):
|
| 254 |
+
raise HTTPException(
|
| 255 |
+
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
| 256 |
+
detail="Too many authentication attempts. Please try again later."
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
auth_rate_limiter.record_attempt(identifier)
|
| 260 |
+
return True
|
| 261 |
+
|
| 262 |
+
def generate_api_key(user_id: str) -> str:
|
| 263 |
+
"""Generate API key for user"""
|
| 264 |
+
import secrets
|
| 265 |
+
import hashlib
|
| 266 |
+
|
| 267 |
+
# Generate a random API key
|
| 268 |
+
api_key = secrets.token_urlsafe(32)
|
| 269 |
+
|
| 270 |
+
# Create a hash for storage (never store plain API keys)
|
| 271 |
+
api_key_hash = hashlib.sha256(api_key.encode()).hexdigest()
|
| 272 |
+
|
| 273 |
+
# In production, store api_key_hash in database linked to user_id
|
| 274 |
+
|
| 275 |
+
return api_key
|
| 276 |
+
|
| 277 |
+
def verify_api_key(api_key: str) -> Optional[str]:
|
| 278 |
+
"""Verify API key and return user_id"""
|
| 279 |
+
import hashlib
|
| 280 |
+
|
| 281 |
+
api_key_hash = hashlib.sha256(api_key.encode()).hexdigest()
|
| 282 |
+
|
| 283 |
+
# In production, lookup api_key_hash in database and return user_id
|
| 284 |
+
# This is a placeholder implementation
|
| 285 |
+
|
| 286 |
+
return None # Return user_id if found
|
services/supabase_client.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Supabase client service
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from supabase import create_client, Client
|
| 6 |
+
from functools import lru_cache
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
from config.settings import get_settings
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
settings = get_settings()
|
| 13 |
+
|
| 14 |
+
@lru_cache()
|
| 15 |
+
def get_supabase_client() -> Client:
|
| 16 |
+
"""Get Supabase client instance"""
|
| 17 |
+
try:
|
| 18 |
+
supabase: Client = create_client(
|
| 19 |
+
settings.SUPABASE_PROJECT_URL,
|
| 20 |
+
settings.SUPABASE_ANON_KEY
|
| 21 |
+
)
|
| 22 |
+
return supabase
|
| 23 |
+
except Exception as e:
|
| 24 |
+
logger.error(f"Failed to create Supabase client: {e}")
|
| 25 |
+
raise
|
| 26 |
+
|
| 27 |
+
@lru_cache()
|
| 28 |
+
def get_supabase_admin_client() -> Client:
|
| 29 |
+
"""Get Supabase admin client instance"""
|
| 30 |
+
try:
|
| 31 |
+
if not settings.SUPABASE_SERVICE_ROLE_KEY:
|
| 32 |
+
logger.warning("SUPABASE_SERVICE_ROLE_KEY not set, using anon key")
|
| 33 |
+
return get_supabase_client()
|
| 34 |
+
|
| 35 |
+
supabase: Client = create_client(
|
| 36 |
+
settings.SUPABASE_PROJECT_URL,
|
| 37 |
+
settings.SUPABASE_SERVICE_ROLE_KEY
|
| 38 |
+
)
|
| 39 |
+
return supabase
|
| 40 |
+
except Exception as e:
|
| 41 |
+
logger.error(f"Failed to create Supabase admin client: {e}")
|
| 42 |
+
raise
|
services/websocket_manager.py
CHANGED
|
@@ -1,104 +1,169 @@
|
|
| 1 |
"""
|
| 2 |
-
WebSocket Manager for
|
| 3 |
-
Handles
|
| 4 |
"""
|
| 5 |
|
| 6 |
import asyncio
|
| 7 |
-
import logging
|
| 8 |
-
from typing import List, Dict, Any
|
| 9 |
-
from datetime import datetime
|
| 10 |
import json
|
| 11 |
-
|
|
|
|
| 12 |
from fastapi import WebSocket
|
|
|
|
|
|
|
| 13 |
|
| 14 |
logger = logging.getLogger(__name__)
|
| 15 |
|
|
|
|
| 16 |
class WebSocketManager:
|
| 17 |
-
"""Manages WebSocket connections
|
| 18 |
|
| 19 |
def __init__(self):
|
| 20 |
self.active_connections: List[WebSocket] = []
|
| 21 |
-
|
| 22 |
-
|
|
|
|
| 23 |
|
| 24 |
async def connect(self, websocket: WebSocket):
|
| 25 |
"""Accept new WebSocket connection"""
|
| 26 |
await websocket.accept()
|
| 27 |
self.active_connections.append(websocket)
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
# Send welcome message
|
| 31 |
-
await self.send_personal_message({
|
| 32 |
-
"type": "connection_established",
|
| 33 |
-
"message": "Connected to Zyon Traders real-time data stream",
|
| 34 |
-
"timestamp": datetime.utcnow().isoformat()
|
| 35 |
-
}, websocket)
|
| 36 |
|
| 37 |
def disconnect(self, websocket: WebSocket):
|
| 38 |
"""Remove WebSocket connection"""
|
| 39 |
if websocket in self.active_connections:
|
| 40 |
self.active_connections.remove(websocket)
|
| 41 |
-
|
|
|
|
|
|
|
| 42 |
|
| 43 |
-
async def
|
| 44 |
-
"""
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
|
|
|
|
|
|
|
|
|
| 50 |
|
| 51 |
-
async def
|
| 52 |
-
"""
|
| 53 |
-
if
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
await connection.send_text(message)
|
| 62 |
-
except Exception as e:
|
| 63 |
-
logger.error(f"❌ Error broadcasting to WebSocket: {e}")
|
| 64 |
-
disconnected.append(connection)
|
| 65 |
-
|
| 66 |
-
# Remove disconnected clients
|
| 67 |
-
for connection in disconnected:
|
| 68 |
-
self.disconnect(connection)
|
| 69 |
|
| 70 |
-
async def
|
| 71 |
-
"""Broadcast
|
| 72 |
-
|
| 73 |
-
"type": "
|
| 74 |
-
"
|
|
|
|
| 75 |
"timestamp": datetime.utcnow().isoformat()
|
| 76 |
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
| 78 |
-
async def
|
| 79 |
-
"""Broadcast
|
| 80 |
-
|
| 81 |
-
"type": "
|
| 82 |
-
"data":
|
| 83 |
"timestamp": datetime.utcnow().isoformat()
|
| 84 |
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
|
| 86 |
-
async def
|
| 87 |
-
"""
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
|
| 94 |
-
def
|
| 95 |
-
"""
|
| 96 |
-
|
|
|
|
|
|
|
| 97 |
|
| 98 |
-
def
|
| 99 |
-
"""
|
| 100 |
-
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
"""
|
| 2 |
+
WebSocket Manager for Zyon Traders Backend
|
| 3 |
+
Handles real-time data distribution to connected clients
|
| 4 |
"""
|
| 5 |
|
| 6 |
import asyncio
|
|
|
|
|
|
|
|
|
|
| 7 |
import json
|
| 8 |
+
import logging
|
| 9 |
+
from typing import Dict, List, Set
|
| 10 |
from fastapi import WebSocket
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
import random
|
| 13 |
|
| 14 |
logger = logging.getLogger(__name__)
|
| 15 |
|
| 16 |
+
|
| 17 |
class WebSocketManager:
|
| 18 |
+
"""Manages WebSocket connections and real-time data distribution"""
|
| 19 |
|
| 20 |
def __init__(self):
|
| 21 |
self.active_connections: List[WebSocket] = []
|
| 22 |
+
self.subscriptions: Dict[WebSocket, Set[str]] = {}
|
| 23 |
+
self.background_task: asyncio.Task = None
|
| 24 |
+
self.running = False
|
| 25 |
|
| 26 |
async def connect(self, websocket: WebSocket):
|
| 27 |
"""Accept new WebSocket connection"""
|
| 28 |
await websocket.accept()
|
| 29 |
self.active_connections.append(websocket)
|
| 30 |
+
self.subscriptions[websocket] = set()
|
| 31 |
+
logger.info(f"New WebSocket connection. Total: {len(self.active_connections)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
|
| 33 |
def disconnect(self, websocket: WebSocket):
|
| 34 |
"""Remove WebSocket connection"""
|
| 35 |
if websocket in self.active_connections:
|
| 36 |
self.active_connections.remove(websocket)
|
| 37 |
+
if websocket in self.subscriptions:
|
| 38 |
+
del self.subscriptions[websocket]
|
| 39 |
+
logger.info(f"WebSocket disconnected. Total: {len(self.active_connections)}")
|
| 40 |
|
| 41 |
+
async def subscribe_to_symbols(self, websocket: WebSocket, symbols: List[str]):
|
| 42 |
+
"""Subscribe to specific symbols for real-time updates"""
|
| 43 |
+
if websocket in self.subscriptions:
|
| 44 |
+
self.subscriptions[websocket].update(symbols)
|
| 45 |
+
await websocket.send_text(json.dumps({
|
| 46 |
+
"type": "subscription_success",
|
| 47 |
+
"symbols": symbols,
|
| 48 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 49 |
+
}))
|
| 50 |
+
logger.info(f"WebSocket subscribed to symbols: {symbols}")
|
| 51 |
|
| 52 |
+
async def unsubscribe_from_symbols(self, websocket: WebSocket, symbols: List[str]):
|
| 53 |
+
"""Unsubscribe from specific symbols"""
|
| 54 |
+
if websocket in self.subscriptions:
|
| 55 |
+
self.subscriptions[websocket].difference_update(symbols)
|
| 56 |
+
await websocket.send_text(json.dumps({
|
| 57 |
+
"type": "unsubscription_success",
|
| 58 |
+
"symbols": symbols,
|
| 59 |
+
"timestamp": datetime.utcnow().isoformat()
|
| 60 |
+
}))
|
| 61 |
+
logger.info(f"WebSocket unsubscribed from symbols: {symbols}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
|
| 63 |
+
async def broadcast_to_symbol_subscribers(self, symbol: str, data: dict):
|
| 64 |
+
"""Broadcast data to all clients subscribed to a specific symbol"""
|
| 65 |
+
message = json.dumps({
|
| 66 |
+
"type": "market_data",
|
| 67 |
+
"symbol": symbol,
|
| 68 |
+
"data": data,
|
| 69 |
"timestamp": datetime.utcnow().isoformat()
|
| 70 |
})
|
| 71 |
+
|
| 72 |
+
disconnected = []
|
| 73 |
+
for websocket in self.active_connections:
|
| 74 |
+
if symbol in self.subscriptions.get(websocket, set()):
|
| 75 |
+
try:
|
| 76 |
+
await websocket.send_text(message)
|
| 77 |
+
except Exception as e:
|
| 78 |
+
logger.error(f"Error sending data to WebSocket: {e}")
|
| 79 |
+
disconnected.append(websocket)
|
| 80 |
+
|
| 81 |
+
# Clean up disconnected clients
|
| 82 |
+
for websocket in disconnected:
|
| 83 |
+
self.disconnect(websocket)
|
| 84 |
|
| 85 |
+
async def broadcast_to_all(self, data: dict):
|
| 86 |
+
"""Broadcast data to all connected clients"""
|
| 87 |
+
message = json.dumps({
|
| 88 |
+
"type": "broadcast",
|
| 89 |
+
"data": data,
|
| 90 |
"timestamp": datetime.utcnow().isoformat()
|
| 91 |
})
|
| 92 |
+
|
| 93 |
+
disconnected = []
|
| 94 |
+
for websocket in self.active_connections:
|
| 95 |
+
try:
|
| 96 |
+
await websocket.send_text(message)
|
| 97 |
+
except Exception as e:
|
| 98 |
+
logger.error(f"Error broadcasting to WebSocket: {e}")
|
| 99 |
+
disconnected.append(websocket)
|
| 100 |
+
|
| 101 |
+
# Clean up disconnected clients
|
| 102 |
+
for websocket in disconnected:
|
| 103 |
+
self.disconnect(websocket)
|
| 104 |
|
| 105 |
+
async def simulate_market_data(self):
|
| 106 |
+
"""Simulate real-time market data updates"""
|
| 107 |
+
symbols = ["NIFTY", "SENSEX", "BANKNIFTY", "RELIANCE", "TCS", "INFY", "HDFC"]
|
| 108 |
+
|
| 109 |
+
while self.running:
|
| 110 |
+
try:
|
| 111 |
+
# Simulate price updates
|
| 112 |
+
for symbol in symbols:
|
| 113 |
+
# Generate realistic price movement
|
| 114 |
+
base_price = {
|
| 115 |
+
"NIFTY": 22000,
|
| 116 |
+
"SENSEX": 72000,
|
| 117 |
+
"BANKNIFTY": 47000,
|
| 118 |
+
"RELIANCE": 2500,
|
| 119 |
+
"TCS": 3800,
|
| 120 |
+
"INFY": 1700,
|
| 121 |
+
"HDFC": 1600
|
| 122 |
+
}.get(symbol, 1000)
|
| 123 |
+
|
| 124 |
+
change_percent = random.uniform(-2, 2)
|
| 125 |
+
price = base_price * (1 + change_percent / 100)
|
| 126 |
+
|
| 127 |
+
data = {
|
| 128 |
+
"price": round(price, 2),
|
| 129 |
+
"change": round(base_price * change_percent / 100, 2),
|
| 130 |
+
"change_percent": round(change_percent, 2),
|
| 131 |
+
"volume": random.randint(100000, 10000000),
|
| 132 |
+
"last_updated": datetime.utcnow().isoformat()
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
await self.broadcast_to_symbol_subscribers(symbol, data)
|
| 136 |
+
|
| 137 |
+
# Wait before next update
|
| 138 |
+
await asyncio.sleep(2) # Update every 2 seconds
|
| 139 |
+
|
| 140 |
+
except Exception as e:
|
| 141 |
+
logger.error(f"Error in market data simulation: {e}")
|
| 142 |
+
await asyncio.sleep(5)
|
| 143 |
|
| 144 |
+
async def start_background_tasks(self):
|
| 145 |
+
"""Start background tasks for data simulation"""
|
| 146 |
+
self.running = True
|
| 147 |
+
self.background_task = asyncio.create_task(self.simulate_market_data())
|
| 148 |
+
logger.info("WebSocket background tasks started")
|
| 149 |
|
| 150 |
+
async def cleanup(self):
|
| 151 |
+
"""Cleanup resources"""
|
| 152 |
+
self.running = False
|
| 153 |
+
if self.background_task:
|
| 154 |
+
self.background_task.cancel()
|
| 155 |
+
try:
|
| 156 |
+
await self.background_task
|
| 157 |
+
except asyncio.CancelledError:
|
| 158 |
+
pass
|
| 159 |
+
|
| 160 |
+
# Close all connections
|
| 161 |
+
for websocket in self.active_connections:
|
| 162 |
+
try:
|
| 163 |
+
await websocket.close()
|
| 164 |
+
except Exception:
|
| 165 |
+
pass
|
| 166 |
+
|
| 167 |
+
self.active_connections.clear()
|
| 168 |
+
self.subscriptions.clear()
|
| 169 |
+
logger.info("WebSocket manager cleaned up")
|
utils/logging_config.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Logging configuration for Zyon Traders Backend
|
| 3 |
+
Production-ready logging setup for Render.com deployment
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
import logging.config
|
| 8 |
+
import os
|
| 9 |
+
from typing import Dict, Any
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def setup_logging():
|
| 13 |
+
"""Setup logging configuration for the application"""
|
| 14 |
+
|
| 15 |
+
# Determine log level from environment
|
| 16 |
+
log_level = os.getenv("LOG_LEVEL", "INFO").upper()
|
| 17 |
+
|
| 18 |
+
# Logging configuration
|
| 19 |
+
config: Dict[str, Any] = {
|
| 20 |
+
"version": 1,
|
| 21 |
+
"disable_existing_loggers": False,
|
| 22 |
+
"formatters": {
|
| 23 |
+
"detailed": {
|
| 24 |
+
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
| 25 |
+
"datefmt": "%Y-%m-%d %H:%M:%S"
|
| 26 |
+
},
|
| 27 |
+
"simple": {
|
| 28 |
+
"format": "%(levelname)s - %(message)s"
|
| 29 |
+
},
|
| 30 |
+
"json": {
|
| 31 |
+
"format": "%(asctime)s %(name)s %(levelname)s %(message)s",
|
| 32 |
+
"datefmt": "%Y-%m-%d %H:%M:%S"
|
| 33 |
+
}
|
| 34 |
+
},
|
| 35 |
+
"handlers": {
|
| 36 |
+
"console": {
|
| 37 |
+
"class": "logging.StreamHandler",
|
| 38 |
+
"level": log_level,
|
| 39 |
+
"formatter": "detailed",
|
| 40 |
+
"stream": "ext://sys.stdout"
|
| 41 |
+
},
|
| 42 |
+
"file": {
|
| 43 |
+
"class": "logging.FileHandler",
|
| 44 |
+
"level": "INFO",
|
| 45 |
+
"formatter": "detailed",
|
| 46 |
+
"filename": "app.log",
|
| 47 |
+
"mode": "a"
|
| 48 |
+
}
|
| 49 |
+
},
|
| 50 |
+
"root": {
|
| 51 |
+
"level": log_level,
|
| 52 |
+
"handlers": ["console"]
|
| 53 |
+
},
|
| 54 |
+
"loggers": {
|
| 55 |
+
"uvicorn": {
|
| 56 |
+
"level": "INFO",
|
| 57 |
+
"handlers": ["console"],
|
| 58 |
+
"propagate": False
|
| 59 |
+
},
|
| 60 |
+
"uvicorn.error": {
|
| 61 |
+
"level": "INFO",
|
| 62 |
+
"handlers": ["console"],
|
| 63 |
+
"propagate": False
|
| 64 |
+
},
|
| 65 |
+
"uvicorn.access": {
|
| 66 |
+
"level": "INFO",
|
| 67 |
+
"handlers": ["console"],
|
| 68 |
+
"propagate": False
|
| 69 |
+
},
|
| 70 |
+
"fastapi": {
|
| 71 |
+
"level": "INFO",
|
| 72 |
+
"handlers": ["console"],
|
| 73 |
+
"propagate": False
|
| 74 |
+
}
|
| 75 |
+
}
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
# Apply configuration
|
| 79 |
+
logging.config.dictConfig(config)
|
| 80 |
+
|
| 81 |
+
# Set third-party loggers to WARNING to reduce noise
|
| 82 |
+
logging.getLogger("httpx").setLevel(logging.WARNING)
|
| 83 |
+
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
| 84 |
+
logging.getLogger("urllib3").setLevel(logging.WARNING)
|