Commit Β·
1cb2a65
1
Parent(s): 2c86277
refactor(ans): Restructure ANS microservice architecture and configuration
Browse files- Reorganize project structure with new directories for models, repositories, and services
- Add comprehensive README.md with service documentation and setup instructions
- Create .env.example with detailed environment configuration
- Update .gitignore to include comprehensive Python and development exclusions
- Modify app.py to include logging setup and lifespan context manager
- Remove deprecated service and route files
- Add client modules for inter-service communication
- Implement utility modules for database and health checks
- Enhance project configuration and dependency management
- .env.example +22 -0
- .gitignore +145 -0
- README.md +85 -10
- app/app.py +54 -14
- app/cache.py +23 -0
- app/clients/__init__.py +7 -0
- app/clients/crm_client.py +116 -0
- app/clients/mpms_client.py +104 -0
- app/clients/rms_client.py +109 -0
- app/clients/tms_client.py +130 -0
- app/dependencies/auth.py +49 -36
- app/insightfy_utils-0.1.0-py3-none-any.whl +0 -0
- app/main.py +142 -0
- app/model/appointment_analytics.py +0 -71
- app/model/customer_analytics.py +0 -56
- app/models/__init__.py +1 -0
- app/nosql.py +32 -52
- app/repositories/__init__.py +1 -0
- app/routers/__init__.py +1 -0
- app/routers/analytics_router.py +73 -121
- app/routers/auth_route.py +0 -30
- app/schemas/__init__.py +1 -0
- app/services/__init__.py +1 -0
- app/services/appointment_analytics_service.py +0 -76
- app/services/customer_analytics_service.py +0 -71
- app/sql.py +67 -0
- app/util/dbutil.py +0 -48
- app/utils/db_utils.py +126 -0
- app/utils/health_utils.py +64 -0
- app/{util β utils}/jwt.py +14 -6
- requirements.txt +30 -8
- settings.py +31 -80
.env.example
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ANS Environment Variables
|
| 2 |
+
|
| 3 |
+
# Database Configuration
|
| 4 |
+
DATABASE_URL=postgresql+asyncpg://user:password@localhost:5432/ans_db
|
| 5 |
+
REDIS_URL=redis://localhost:6379/0
|
| 6 |
+
MONGODB_URL=mongodb://localhost:27017/ans_db
|
| 7 |
+
|
| 8 |
+
# Service URLs for Inter-Service Communication
|
| 9 |
+
MPMS_BASE_URL=http://localhost:8001
|
| 10 |
+
RMS_BASE_URL=http://localhost:8002
|
| 11 |
+
TMS_BASE_URL=http://localhost:8003
|
| 12 |
+
CRM_BASE_URL=http://localhost:8004
|
| 13 |
+
|
| 14 |
+
# Service Configuration
|
| 15 |
+
SERVICE_NAME=ans
|
| 16 |
+
SERVICE_PORT=8005
|
| 17 |
+
LOG_LEVEL=INFO
|
| 18 |
+
|
| 19 |
+
# JWT Configuration
|
| 20 |
+
JWT_SECRET_KEY=your-secret-key
|
| 21 |
+
JWT_ALGORITHM=HS256
|
| 22 |
+
JWT_EXPIRATION_MINUTES=1440
|
.gitignore
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# C extensions
|
| 7 |
+
*.so
|
| 8 |
+
|
| 9 |
+
# Distribution / packaging
|
| 10 |
+
.Python
|
| 11 |
+
build/
|
| 12 |
+
develop-eggs/
|
| 13 |
+
dist/
|
| 14 |
+
downloads/
|
| 15 |
+
eggs/
|
| 16 |
+
.eggs/
|
| 17 |
+
lib/
|
| 18 |
+
lib64/
|
| 19 |
+
parts/
|
| 20 |
+
sdist/
|
| 21 |
+
var/
|
| 22 |
+
wheels/
|
| 23 |
+
pip-wheel-metadata/
|
| 24 |
+
share/python-wheels/
|
| 25 |
+
*.egg-info/
|
| 26 |
+
.installed.cfg
|
| 27 |
+
*.egg
|
| 28 |
+
MANIFEST
|
| 29 |
+
|
| 30 |
+
# PyInstaller
|
| 31 |
+
# Usually these files are written by a python script from a template
|
| 32 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 33 |
+
*.manifest
|
| 34 |
+
*.spec
|
| 35 |
+
|
| 36 |
+
# Installer logs
|
| 37 |
+
pip-log.txt
|
| 38 |
+
pip-delete-this-directory.txt
|
| 39 |
+
|
| 40 |
+
# Unit test / coverage reports
|
| 41 |
+
htmlcov/
|
| 42 |
+
.tox/
|
| 43 |
+
.nox/
|
| 44 |
+
.coverage
|
| 45 |
+
.coverage.*
|
| 46 |
+
.cache
|
| 47 |
+
nosetests.xml
|
| 48 |
+
coverage.xml
|
| 49 |
+
*.cover
|
| 50 |
+
*.py,cover
|
| 51 |
+
.hypothesis/
|
| 52 |
+
.pytest_cache/
|
| 53 |
+
|
| 54 |
+
# Translations
|
| 55 |
+
*.mo
|
| 56 |
+
*.pot
|
| 57 |
+
|
| 58 |
+
# Django stuff:
|
| 59 |
+
*.log
|
| 60 |
+
local_settings.py
|
| 61 |
+
db.sqlite3
|
| 62 |
+
db.sqlite3-journal
|
| 63 |
+
|
| 64 |
+
# Flask stuff:
|
| 65 |
+
instance/
|
| 66 |
+
.webassets-cache
|
| 67 |
+
|
| 68 |
+
# Scrapy stuff:
|
| 69 |
+
.scrapy
|
| 70 |
+
|
| 71 |
+
# Sphinx documentation
|
| 72 |
+
docs/_build/
|
| 73 |
+
|
| 74 |
+
# PyBuilder
|
| 75 |
+
target/
|
| 76 |
+
|
| 77 |
+
# Jupyter Notebook
|
| 78 |
+
.ipynb_checkpoints
|
| 79 |
+
|
| 80 |
+
# IPython
|
| 81 |
+
profile_default/
|
| 82 |
+
ipython_config.py
|
| 83 |
+
|
| 84 |
+
# pyenv
|
| 85 |
+
.python-version
|
| 86 |
+
|
| 87 |
+
# pipenv
|
| 88 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 89 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 90 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 91 |
+
# install all needed dependencies.
|
| 92 |
+
#Pipfile.lock
|
| 93 |
+
|
| 94 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
| 95 |
+
__pypackages__/
|
| 96 |
+
|
| 97 |
+
# Celery stuff
|
| 98 |
+
celerybeat-schedule
|
| 99 |
+
celerybeat.pid
|
| 100 |
+
|
| 101 |
+
# SageMath parsed files
|
| 102 |
+
*.sage.py
|
| 103 |
+
|
| 104 |
+
# Environments
|
| 105 |
+
.env
|
| 106 |
+
.venv
|
| 107 |
+
env/
|
| 108 |
+
venv/
|
| 109 |
+
ENV/
|
| 110 |
+
env.bak/
|
| 111 |
+
venv.bak/
|
| 112 |
+
|
| 113 |
+
# Spyder project settings
|
| 114 |
+
.spyderproject
|
| 115 |
+
.spyproject
|
| 116 |
+
|
| 117 |
+
# Rope project settings
|
| 118 |
+
.ropeproject
|
| 119 |
+
|
| 120 |
+
# mkdocs documentation
|
| 121 |
+
/site
|
| 122 |
+
|
| 123 |
+
# mypy
|
| 124 |
+
.mypy_cache/
|
| 125 |
+
.dmypy.json
|
| 126 |
+
dmypy.json
|
| 127 |
+
|
| 128 |
+
# Pyre type checker
|
| 129 |
+
.pyre/
|
| 130 |
+
|
| 131 |
+
# IDE
|
| 132 |
+
.vscode/
|
| 133 |
+
.idea/
|
| 134 |
+
*.swp
|
| 135 |
+
*.swo
|
| 136 |
+
*~
|
| 137 |
+
|
| 138 |
+
# OS
|
| 139 |
+
.DS_Store
|
| 140 |
+
.DS_Store?
|
| 141 |
+
._*
|
| 142 |
+
.Spotlight-V100
|
| 143 |
+
.Trashes
|
| 144 |
+
ehthumbs.db
|
| 145 |
+
Thumbs.db
|
README.md
CHANGED
|
@@ -1,10 +1,85 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
-
|
| 9 |
-
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Analytics and Notification Service (ANS)
|
| 2 |
+
|
| 3 |
+
The Analytics and Notification Service (ANS) is a microservice that provides business intelligence, analytics, and notification capabilities for the Insightfy Bloom platform.
|
| 4 |
+
|
| 5 |
+
## Features
|
| 6 |
+
|
| 7 |
+
- **Analytics Dashboard**: Real-time business metrics and KPIs
|
| 8 |
+
- **Reporting**: Generate various business reports
|
| 9 |
+
- **Data Aggregation**: Collect and process data from other microservices
|
| 10 |
+
- **Notifications**: Send alerts and notifications based on business rules
|
| 11 |
+
- **Performance Monitoring**: Track business performance indicators
|
| 12 |
+
|
| 13 |
+
## Architecture
|
| 14 |
+
|
| 15 |
+
ANS integrates with other microservices:
|
| 16 |
+
- **MPMS**: Product and merchant data
|
| 17 |
+
- **RMS**: Resource and employee data
|
| 18 |
+
- **TMS**: Transaction and sales data
|
| 19 |
+
- **CRM**: Customer relationship data
|
| 20 |
+
|
| 21 |
+
## Database Connections
|
| 22 |
+
|
| 23 |
+
- **PostgreSQL**: Primary database for analytics data
|
| 24 |
+
- **MongoDB**: Document storage for flexible analytics
|
| 25 |
+
- **Redis**: Caching and session management
|
| 26 |
+
|
| 27 |
+
## API Endpoints
|
| 28 |
+
|
| 29 |
+
### Health Checks
|
| 30 |
+
- `GET /health` - Service health status
|
| 31 |
+
- `GET /ready` - Readiness check
|
| 32 |
+
- `GET /live` - Liveness check
|
| 33 |
+
|
| 34 |
+
### Analytics
|
| 35 |
+
- `GET /api/v1/analytics/dashboard` - Dashboard data
|
| 36 |
+
- `GET /api/v1/analytics/reports` - Generate reports
|
| 37 |
+
|
| 38 |
+
## Environment Variables
|
| 39 |
+
|
| 40 |
+
Copy `.env.example` to `.env` and configure:
|
| 41 |
+
|
| 42 |
+
```bash
|
| 43 |
+
# Database Configuration
|
| 44 |
+
DATABASE_URL=postgresql+asyncpg://user:password@localhost:5432/ans_db
|
| 45 |
+
REDIS_URL=redis://localhost:6379/0
|
| 46 |
+
MONGODB_URL=mongodb://localhost:27017/ans_db
|
| 47 |
+
|
| 48 |
+
# Service URLs
|
| 49 |
+
MPMS_BASE_URL=http://localhost:8001
|
| 50 |
+
RMS_BASE_URL=http://localhost:8002
|
| 51 |
+
TMS_BASE_URL=http://localhost:8003
|
| 52 |
+
CRM_BASE_URL=http://localhost:8004
|
| 53 |
+
|
| 54 |
+
# Service Configuration
|
| 55 |
+
SERVICE_NAME=ans
|
| 56 |
+
SERVICE_PORT=8005
|
| 57 |
+
LOG_LEVEL=INFO
|
| 58 |
+
|
| 59 |
+
# JWT Configuration
|
| 60 |
+
JWT_SECRET_KEY=your-secret-key
|
| 61 |
+
JWT_ALGORITHM=HS256
|
| 62 |
+
```
|
| 63 |
+
|
| 64 |
+
## Running the Service
|
| 65 |
+
|
| 66 |
+
```bash
|
| 67 |
+
# Install dependencies
|
| 68 |
+
pip install -r requirements.txt
|
| 69 |
+
|
| 70 |
+
# Run the service
|
| 71 |
+
uvicorn app.main:app --host 0.0.0.0 --port 8005 --reload
|
| 72 |
+
```
|
| 73 |
+
|
| 74 |
+
## Development
|
| 75 |
+
|
| 76 |
+
The service follows the standard microservice architecture:
|
| 77 |
+
|
| 78 |
+
- `app/routers/` - API route handlers
|
| 79 |
+
- `app/services/` - Business logic
|
| 80 |
+
- `app/repositories/` - Data access layer
|
| 81 |
+
- `app/models/` - Database models
|
| 82 |
+
- `app/schemas/` - Pydantic schemas
|
| 83 |
+
- `app/clients/` - Inter-service communication
|
| 84 |
+
- `app/dependencies/` - FastAPI dependencies (auth, etc.)
|
| 85 |
+
- `app/utils/` - Utility functions
|
app/app.py
CHANGED
|
@@ -1,33 +1,73 @@
|
|
|
|
|
| 1 |
from fastapi import FastAPI
|
| 2 |
from fastapi.middleware.cors import CORSMiddleware
|
|
|
|
| 3 |
|
| 4 |
from app.routers.analytics_router import router as analytics_router
|
| 5 |
#from app.routers.auth_route import router as auth_router
|
| 6 |
|
| 7 |
-
|
| 8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
version="1.0",
|
| 10 |
-
description="API for
|
|
|
|
| 11 |
)
|
| 12 |
|
|
|
|
| 13 |
app.add_middleware(
|
| 14 |
CORSMiddleware,
|
| 15 |
-
allow_origins=["*"],
|
| 16 |
allow_credentials=True,
|
| 17 |
allow_methods=["*"],
|
| 18 |
allow_headers=["*"],
|
| 19 |
)
|
| 20 |
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
prefix="/api/v1/analytics",
|
| 24 |
-
tags=["Analytics"]
|
| 25 |
-
)
|
| 26 |
|
| 27 |
-
#
|
| 28 |
@app.get("/", tags=["Health"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
async def health_check():
|
| 30 |
-
"""
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import asynccontextmanager
|
| 2 |
from fastapi import FastAPI
|
| 3 |
from fastapi.middleware.cors import CORSMiddleware
|
| 4 |
+
from insightfy_utils.logging import setup_logging, get_logger
|
| 5 |
|
| 6 |
from app.routers.analytics_router import router as analytics_router
|
| 7 |
#from app.routers.auth_route import router as auth_router
|
| 8 |
|
| 9 |
+
# Setup logging at module level
|
| 10 |
+
setup_logging(level="INFO", format_type="json", app_name="insightfy-bloom-ms-ans")
|
| 11 |
+
logger = get_logger(__name__)
|
| 12 |
+
|
| 13 |
+
@asynccontextmanager
|
| 14 |
+
async def lifespan(app: FastAPI):
|
| 15 |
+
"""Application lifespan manager."""
|
| 16 |
+
# Startup
|
| 17 |
+
from app.sql import connect_to_database
|
| 18 |
+
from app.nosql import connect_stores
|
| 19 |
+
|
| 20 |
+
logger.info("Starting ANS application")
|
| 21 |
+
await connect_to_database()
|
| 22 |
+
await connect_stores()
|
| 23 |
+
logger.info("Database connections established")
|
| 24 |
+
|
| 25 |
+
yield
|
| 26 |
+
|
| 27 |
+
# Shutdown
|
| 28 |
+
from app.sql import disconnect_from_database
|
| 29 |
+
from app.nosql import disconnect_stores
|
| 30 |
+
|
| 31 |
+
logger.info("Shutting down ANS application")
|
| 32 |
+
await disconnect_from_database()
|
| 33 |
+
await disconnect_stores()
|
| 34 |
+
logger.info("Application shutdown complete")
|
| 35 |
+
|
| 36 |
+
# Initialize FastAPI application
|
| 37 |
+
app = FastAPI(
|
| 38 |
+
title="Analytics and Notification Service API",
|
| 39 |
version="1.0",
|
| 40 |
+
description="Analytics and Notification Service API for business intelligence and alerts",
|
| 41 |
+
lifespan=lifespan
|
| 42 |
)
|
| 43 |
|
| 44 |
+
# CORS configuration
|
| 45 |
app.add_middleware(
|
| 46 |
CORSMiddleware,
|
| 47 |
+
allow_origins=["*"],
|
| 48 |
allow_credentials=True,
|
| 49 |
allow_methods=["*"],
|
| 50 |
allow_headers=["*"],
|
| 51 |
)
|
| 52 |
|
| 53 |
+
# Register routers
|
| 54 |
+
app.include_router(analytics_router, prefix="/api/v1/analytics", tags=["Analytics"])
|
|
|
|
|
|
|
|
|
|
| 55 |
|
| 56 |
+
# Health check endpoints
|
| 57 |
@app.get("/", tags=["Health"])
|
| 58 |
+
async def root():
|
| 59 |
+
"""Root endpoint - API information."""
|
| 60 |
+
return {
|
| 61 |
+
"service": "Analytics and Notification Service API",
|
| 62 |
+
"version": "1.0",
|
| 63 |
+
"status": "running"
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
@app.get("/health", tags=["Health"])
|
| 67 |
async def health_check():
|
| 68 |
+
"""Health check endpoint."""
|
| 69 |
+
return {
|
| 70 |
+
"status": "healthy",
|
| 71 |
+
"service": "insightfy-bloom-ms-ans",
|
| 72 |
+
"version": "1.0"
|
| 73 |
+
}
|
app/cache.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
from app.nosql import redis_client
|
| 3 |
+
import json
|
| 4 |
+
from insightfy_utils.logging import get_logger
|
| 5 |
+
|
| 6 |
+
logger = get_logger(__name__)
|
| 7 |
+
|
| 8 |
+
CACHE_EXPIRY_SECONDS = 3600
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
async def get_or_set_cache(key: str, fetch_func, expiry: int = CACHE_EXPIRY_SECONDS) -> Any:
|
| 12 |
+
"""
|
| 13 |
+
Retrieve data from Redis cache or execute a function to fetch it.
|
| 14 |
+
"""
|
| 15 |
+
cached_data = await redis_client.get(key)
|
| 16 |
+
if cached_data:
|
| 17 |
+
logger.info("Cache hit", extra={"key": key})
|
| 18 |
+
return json.loads(cached_data)
|
| 19 |
+
|
| 20 |
+
logger.info("Cache miss", extra={"key": key})
|
| 21 |
+
data = await fetch_func()
|
| 22 |
+
await redis_client.set(key, json.dumps(data), ex=expiry)
|
| 23 |
+
return data
|
app/clients/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Service clients for inter-service communication."""
|
| 2 |
+
from app.clients.mpms_client import MPMSClient
|
| 3 |
+
from app.clients.rms_client import RMSClient
|
| 4 |
+
from app.clients.tms_client import TMSClient
|
| 5 |
+
from app.clients.crm_client import CRMClient
|
| 6 |
+
|
| 7 |
+
__all__ = ["MPMSClient", "RMSClient", "TMSClient", "CRMClient"]
|
app/clients/crm_client.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
CRM (Customer Relationship Management) client for ANS.
|
| 3 |
+
"""
|
| 4 |
+
from insightfy_utils.http.clients import ServiceClient
|
| 5 |
+
from insightfy_utils.logging import get_logger
|
| 6 |
+
from typing import Optional, Dict, Any, List
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
logger = get_logger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class CRMClient:
|
| 13 |
+
"""Client for communicating with CRM."""
|
| 14 |
+
|
| 15 |
+
def __init__(self):
|
| 16 |
+
base_url = os.getenv("CRM_BASE_URL", "http://localhost:8004")
|
| 17 |
+
self.client = ServiceClient(
|
| 18 |
+
base_url=base_url,
|
| 19 |
+
service_name="crm",
|
| 20 |
+
timeout=30.0,
|
| 21 |
+
max_retries=3
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
async def get_customer_data(
|
| 25 |
+
self,
|
| 26 |
+
merchant_id: str,
|
| 27 |
+
customer_id: Optional[str] = None,
|
| 28 |
+
correlation_id: Optional[str] = None
|
| 29 |
+
) -> List[Dict[str, Any]]:
|
| 30 |
+
"""Get customer data from CRM for analytics."""
|
| 31 |
+
try:
|
| 32 |
+
params = {"merchant_id": merchant_id}
|
| 33 |
+
if customer_id:
|
| 34 |
+
params["customer_id"] = customer_id
|
| 35 |
+
|
| 36 |
+
response = await self.client.get(
|
| 37 |
+
"/api/v1/customers",
|
| 38 |
+
params=params,
|
| 39 |
+
correlation_id=correlation_id
|
| 40 |
+
)
|
| 41 |
+
return response.json().get("data", [])
|
| 42 |
+
except Exception as e:
|
| 43 |
+
logger.error(
|
| 44 |
+
"Failed to get customer data from CRM",
|
| 45 |
+
extra={
|
| 46 |
+
"merchant_id": merchant_id,
|
| 47 |
+
"customer_id": customer_id,
|
| 48 |
+
"correlation_id": correlation_id
|
| 49 |
+
},
|
| 50 |
+
exc_info=e
|
| 51 |
+
)
|
| 52 |
+
raise
|
| 53 |
+
|
| 54 |
+
async def get_campaign_data(
|
| 55 |
+
self,
|
| 56 |
+
merchant_id: str,
|
| 57 |
+
start_date: str,
|
| 58 |
+
end_date: str,
|
| 59 |
+
correlation_id: Optional[str] = None
|
| 60 |
+
) -> List[Dict[str, Any]]:
|
| 61 |
+
"""Get campaign data from CRM for analytics."""
|
| 62 |
+
try:
|
| 63 |
+
params = {
|
| 64 |
+
"merchant_id": merchant_id,
|
| 65 |
+
"start_date": start_date,
|
| 66 |
+
"end_date": end_date
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
response = await self.client.get(
|
| 70 |
+
"/api/v1/campaigns",
|
| 71 |
+
params=params,
|
| 72 |
+
correlation_id=correlation_id
|
| 73 |
+
)
|
| 74 |
+
return response.json().get("data", [])
|
| 75 |
+
except Exception as e:
|
| 76 |
+
logger.error(
|
| 77 |
+
"Failed to get campaign data from CRM",
|
| 78 |
+
extra={
|
| 79 |
+
"merchant_id": merchant_id,
|
| 80 |
+
"start_date": start_date,
|
| 81 |
+
"end_date": end_date,
|
| 82 |
+
"correlation_id": correlation_id
|
| 83 |
+
},
|
| 84 |
+
exc_info=e
|
| 85 |
+
)
|
| 86 |
+
raise
|
| 87 |
+
|
| 88 |
+
async def get_loyalty_data(
|
| 89 |
+
self,
|
| 90 |
+
merchant_id: str,
|
| 91 |
+
correlation_id: Optional[str] = None
|
| 92 |
+
) -> List[Dict[str, Any]]:
|
| 93 |
+
"""Get loyalty program data from CRM for analytics."""
|
| 94 |
+
try:
|
| 95 |
+
params = {"merchant_id": merchant_id}
|
| 96 |
+
|
| 97 |
+
response = await self.client.get(
|
| 98 |
+
"/api/v1/loyalty",
|
| 99 |
+
params=params,
|
| 100 |
+
correlation_id=correlation_id
|
| 101 |
+
)
|
| 102 |
+
return response.json().get("data", [])
|
| 103 |
+
except Exception as e:
|
| 104 |
+
logger.error(
|
| 105 |
+
"Failed to get loyalty data from CRM",
|
| 106 |
+
extra={
|
| 107 |
+
"merchant_id": merchant_id,
|
| 108 |
+
"correlation_id": correlation_id
|
| 109 |
+
},
|
| 110 |
+
exc_info=e
|
| 111 |
+
)
|
| 112 |
+
raise
|
| 113 |
+
|
| 114 |
+
async def close(self):
|
| 115 |
+
"""Close the CRM client."""
|
| 116 |
+
await self.client.close()
|
app/clients/mpms_client.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
MPMS (Merchant/Product Management Service) client for ANS.
|
| 3 |
+
"""
|
| 4 |
+
from insightfy_utils.http.clients import ServiceClient
|
| 5 |
+
from insightfy_utils.logging import get_logger
|
| 6 |
+
from typing import Optional, Dict, Any, List
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
logger = get_logger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class MPMSClient:
|
| 13 |
+
"""Client for communicating with MPMS."""
|
| 14 |
+
|
| 15 |
+
def __init__(self):
|
| 16 |
+
base_url = os.getenv("MPMS_BASE_URL", "http://localhost:8001")
|
| 17 |
+
self.client = ServiceClient(
|
| 18 |
+
base_url=base_url,
|
| 19 |
+
service_name="mpms",
|
| 20 |
+
timeout=30.0,
|
| 21 |
+
max_retries=3
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
async def get_product(
|
| 25 |
+
self,
|
| 26 |
+
merchant_id: str,
|
| 27 |
+
catalogue_id: str,
|
| 28 |
+
correlation_id: Optional[str] = None
|
| 29 |
+
) -> Dict[str, Any]:
|
| 30 |
+
"""Get product details from MPMS."""
|
| 31 |
+
try:
|
| 32 |
+
response = await self.client.get(
|
| 33 |
+
f"/api/v1/products/{catalogue_id}",
|
| 34 |
+
params={"merchant_id": merchant_id},
|
| 35 |
+
correlation_id=correlation_id
|
| 36 |
+
)
|
| 37 |
+
return response.json()
|
| 38 |
+
except Exception as e:
|
| 39 |
+
logger.error(
|
| 40 |
+
"Failed to get product from MPMS",
|
| 41 |
+
extra={
|
| 42 |
+
"merchant_id": merchant_id,
|
| 43 |
+
"catalogue_id": catalogue_id,
|
| 44 |
+
"correlation_id": correlation_id
|
| 45 |
+
},
|
| 46 |
+
exc_info=e
|
| 47 |
+
)
|
| 48 |
+
raise
|
| 49 |
+
|
| 50 |
+
async def get_products_batch(
|
| 51 |
+
self,
|
| 52 |
+
merchant_id: str,
|
| 53 |
+
catalogue_ids: List[str],
|
| 54 |
+
correlation_id: Optional[str] = None
|
| 55 |
+
) -> List[Dict[str, Any]]:
|
| 56 |
+
"""Get multiple products in batch from MPMS."""
|
| 57 |
+
try:
|
| 58 |
+
response = await self.client.post(
|
| 59 |
+
"/api/v1/products/batch",
|
| 60 |
+
json={
|
| 61 |
+
"merchant_id": merchant_id,
|
| 62 |
+
"catalogue_ids": catalogue_ids
|
| 63 |
+
},
|
| 64 |
+
correlation_id=correlation_id
|
| 65 |
+
)
|
| 66 |
+
return response.json().get("data", [])
|
| 67 |
+
except Exception as e:
|
| 68 |
+
logger.error(
|
| 69 |
+
"Failed to get products batch from MPMS",
|
| 70 |
+
extra={
|
| 71 |
+
"merchant_id": merchant_id,
|
| 72 |
+
"count": len(catalogue_ids),
|
| 73 |
+
"correlation_id": correlation_id
|
| 74 |
+
},
|
| 75 |
+
exc_info=e
|
| 76 |
+
)
|
| 77 |
+
raise
|
| 78 |
+
|
| 79 |
+
async def get_merchant_info(
|
| 80 |
+
self,
|
| 81 |
+
merchant_id: str,
|
| 82 |
+
correlation_id: Optional[str] = None
|
| 83 |
+
) -> Dict[str, Any]:
|
| 84 |
+
"""Get merchant information from MPMS."""
|
| 85 |
+
try:
|
| 86 |
+
response = await self.client.get(
|
| 87 |
+
f"/api/v1/merchants/{merchant_id}",
|
| 88 |
+
correlation_id=correlation_id
|
| 89 |
+
)
|
| 90 |
+
return response.json()
|
| 91 |
+
except Exception as e:
|
| 92 |
+
logger.error(
|
| 93 |
+
"Failed to get merchant info from MPMS",
|
| 94 |
+
extra={
|
| 95 |
+
"merchant_id": merchant_id,
|
| 96 |
+
"correlation_id": correlation_id
|
| 97 |
+
},
|
| 98 |
+
exc_info=e
|
| 99 |
+
)
|
| 100 |
+
raise
|
| 101 |
+
|
| 102 |
+
async def close(self):
|
| 103 |
+
"""Close the MPMS client."""
|
| 104 |
+
await self.client.close()
|
app/clients/rms_client.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
RMS (Resource Management Service) client for ANS.
|
| 3 |
+
"""
|
| 4 |
+
from insightfy_utils.http.clients import ServiceClient
|
| 5 |
+
from insightfy_utils.logging import get_logger
|
| 6 |
+
from typing import Optional, Dict, Any
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
logger = get_logger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RMSClient:
|
| 13 |
+
"""Client for communicating with RMS."""
|
| 14 |
+
|
| 15 |
+
def __init__(self):
|
| 16 |
+
base_url = os.getenv("RMS_BASE_URL", "http://localhost:8002")
|
| 17 |
+
self.client = ServiceClient(
|
| 18 |
+
base_url=base_url,
|
| 19 |
+
service_name="rms",
|
| 20 |
+
timeout=30.0,
|
| 21 |
+
max_retries=3
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
async def get_employee(
|
| 25 |
+
self,
|
| 26 |
+
merchant_id: str,
|
| 27 |
+
associate_id: str,
|
| 28 |
+
correlation_id: Optional[str] = None
|
| 29 |
+
) -> Dict[str, Any]:
|
| 30 |
+
"""Get employee details from RMS."""
|
| 31 |
+
try:
|
| 32 |
+
response = await self.client.get(
|
| 33 |
+
f"/api/v1/employees/{associate_id}",
|
| 34 |
+
params={"merchant_id": merchant_id},
|
| 35 |
+
correlation_id=correlation_id
|
| 36 |
+
)
|
| 37 |
+
return response.json()
|
| 38 |
+
except Exception as e:
|
| 39 |
+
logger.error(
|
| 40 |
+
"Failed to get employee from RMS",
|
| 41 |
+
extra={
|
| 42 |
+
"merchant_id": merchant_id,
|
| 43 |
+
"associate_id": associate_id,
|
| 44 |
+
"correlation_id": correlation_id
|
| 45 |
+
},
|
| 46 |
+
exc_info=e
|
| 47 |
+
)
|
| 48 |
+
raise
|
| 49 |
+
|
| 50 |
+
async def get_branch(
|
| 51 |
+
self,
|
| 52 |
+
merchant_id: str,
|
| 53 |
+
branch_id: str,
|
| 54 |
+
correlation_id: Optional[str] = None
|
| 55 |
+
) -> Dict[str, Any]:
|
| 56 |
+
"""Get branch details from RMS."""
|
| 57 |
+
try:
|
| 58 |
+
response = await self.client.get(
|
| 59 |
+
f"/api/v1/branches/{branch_id}",
|
| 60 |
+
params={"merchant_id": merchant_id},
|
| 61 |
+
correlation_id=correlation_id
|
| 62 |
+
)
|
| 63 |
+
return response.json()
|
| 64 |
+
except Exception as e:
|
| 65 |
+
logger.error(
|
| 66 |
+
"Failed to get branch from RMS",
|
| 67 |
+
extra={
|
| 68 |
+
"merchant_id": merchant_id,
|
| 69 |
+
"branch_id": branch_id,
|
| 70 |
+
"correlation_id": correlation_id
|
| 71 |
+
},
|
| 72 |
+
exc_info=e
|
| 73 |
+
)
|
| 74 |
+
raise
|
| 75 |
+
|
| 76 |
+
async def validate_permissions(
|
| 77 |
+
self,
|
| 78 |
+
merchant_id: str,
|
| 79 |
+
associate_id: str,
|
| 80 |
+
required_permissions: list,
|
| 81 |
+
correlation_id: Optional[str] = None
|
| 82 |
+
) -> bool:
|
| 83 |
+
"""Validate employee permissions in RMS."""
|
| 84 |
+
try:
|
| 85 |
+
response = await self.client.post(
|
| 86 |
+
"/api/v1/permissions/validate",
|
| 87 |
+
json={
|
| 88 |
+
"merchant_id": merchant_id,
|
| 89 |
+
"associate_id": associate_id,
|
| 90 |
+
"permissions": required_permissions
|
| 91 |
+
},
|
| 92 |
+
correlation_id=correlation_id
|
| 93 |
+
)
|
| 94 |
+
return response.json().get("valid", False)
|
| 95 |
+
except Exception as e:
|
| 96 |
+
logger.error(
|
| 97 |
+
"Failed to validate permissions in RMS",
|
| 98 |
+
extra={
|
| 99 |
+
"merchant_id": merchant_id,
|
| 100 |
+
"associate_id": associate_id,
|
| 101 |
+
"correlation_id": correlation_id
|
| 102 |
+
},
|
| 103 |
+
exc_info=e
|
| 104 |
+
)
|
| 105 |
+
raise
|
| 106 |
+
|
| 107 |
+
async def close(self):
|
| 108 |
+
"""Close the RMS client."""
|
| 109 |
+
await self.client.close()
|
app/clients/tms_client.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
TMS (Transaction Management Service) client for ANS.
|
| 3 |
+
"""
|
| 4 |
+
from insightfy_utils.http.clients import ServiceClient
|
| 5 |
+
from insightfy_utils.logging import get_logger
|
| 6 |
+
from typing import Optional, Dict, Any, List
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
logger = get_logger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class TMSClient:
|
| 13 |
+
"""Client for communicating with TMS."""
|
| 14 |
+
|
| 15 |
+
def __init__(self):
|
| 16 |
+
base_url = os.getenv("TMS_BASE_URL", "http://localhost:8003")
|
| 17 |
+
self.client = ServiceClient(
|
| 18 |
+
base_url=base_url,
|
| 19 |
+
service_name="tms",
|
| 20 |
+
timeout=30.0,
|
| 21 |
+
max_retries=3
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
async def get_sales_data(
|
| 25 |
+
self,
|
| 26 |
+
merchant_id: str,
|
| 27 |
+
start_date: str,
|
| 28 |
+
end_date: str,
|
| 29 |
+
branch_id: Optional[str] = None,
|
| 30 |
+
correlation_id: Optional[str] = None
|
| 31 |
+
) -> List[Dict[str, Any]]:
|
| 32 |
+
"""Get sales data from TMS for analytics."""
|
| 33 |
+
try:
|
| 34 |
+
params = {
|
| 35 |
+
"merchant_id": merchant_id,
|
| 36 |
+
"start_date": start_date,
|
| 37 |
+
"end_date": end_date
|
| 38 |
+
}
|
| 39 |
+
if branch_id:
|
| 40 |
+
params["branch_id"] = branch_id
|
| 41 |
+
|
| 42 |
+
response = await self.client.get(
|
| 43 |
+
"/api/v1/sales",
|
| 44 |
+
params=params,
|
| 45 |
+
correlation_id=correlation_id
|
| 46 |
+
)
|
| 47 |
+
return response.json().get("data", [])
|
| 48 |
+
except Exception as e:
|
| 49 |
+
logger.error(
|
| 50 |
+
"Failed to get sales data from TMS",
|
| 51 |
+
extra={
|
| 52 |
+
"merchant_id": merchant_id,
|
| 53 |
+
"start_date": start_date,
|
| 54 |
+
"end_date": end_date,
|
| 55 |
+
"correlation_id": correlation_id
|
| 56 |
+
},
|
| 57 |
+
exc_info=e
|
| 58 |
+
)
|
| 59 |
+
raise
|
| 60 |
+
|
| 61 |
+
async def get_stock_data(
|
| 62 |
+
self,
|
| 63 |
+
merchant_id: str,
|
| 64 |
+
branch_id: Optional[str] = None,
|
| 65 |
+
correlation_id: Optional[str] = None
|
| 66 |
+
) -> List[Dict[str, Any]]:
|
| 67 |
+
"""Get stock data from TMS for analytics."""
|
| 68 |
+
try:
|
| 69 |
+
params = {"merchant_id": merchant_id}
|
| 70 |
+
if branch_id:
|
| 71 |
+
params["branch_id"] = branch_id
|
| 72 |
+
|
| 73 |
+
response = await self.client.get(
|
| 74 |
+
"/api/v1/stock",
|
| 75 |
+
params=params,
|
| 76 |
+
correlation_id=correlation_id
|
| 77 |
+
)
|
| 78 |
+
return response.json().get("data", [])
|
| 79 |
+
except Exception as e:
|
| 80 |
+
logger.error(
|
| 81 |
+
"Failed to get stock data from TMS",
|
| 82 |
+
extra={
|
| 83 |
+
"merchant_id": merchant_id,
|
| 84 |
+
"branch_id": branch_id,
|
| 85 |
+
"correlation_id": correlation_id
|
| 86 |
+
},
|
| 87 |
+
exc_info=e
|
| 88 |
+
)
|
| 89 |
+
raise
|
| 90 |
+
|
| 91 |
+
async def get_appointment_data(
|
| 92 |
+
self,
|
| 93 |
+
merchant_id: str,
|
| 94 |
+
start_date: str,
|
| 95 |
+
end_date: str,
|
| 96 |
+
branch_id: Optional[str] = None,
|
| 97 |
+
correlation_id: Optional[str] = None
|
| 98 |
+
) -> List[Dict[str, Any]]:
|
| 99 |
+
"""Get appointment data from TMS for analytics."""
|
| 100 |
+
try:
|
| 101 |
+
params = {
|
| 102 |
+
"merchant_id": merchant_id,
|
| 103 |
+
"start_date": start_date,
|
| 104 |
+
"end_date": end_date
|
| 105 |
+
}
|
| 106 |
+
if branch_id:
|
| 107 |
+
params["branch_id"] = branch_id
|
| 108 |
+
|
| 109 |
+
response = await self.client.get(
|
| 110 |
+
"/api/v1/appointments",
|
| 111 |
+
params=params,
|
| 112 |
+
correlation_id=correlation_id
|
| 113 |
+
)
|
| 114 |
+
return response.json().get("data", [])
|
| 115 |
+
except Exception as e:
|
| 116 |
+
logger.error(
|
| 117 |
+
"Failed to get appointment data from TMS",
|
| 118 |
+
extra={
|
| 119 |
+
"merchant_id": merchant_id,
|
| 120 |
+
"start_date": start_date,
|
| 121 |
+
"end_date": end_date,
|
| 122 |
+
"correlation_id": correlation_id
|
| 123 |
+
},
|
| 124 |
+
exc_info=e
|
| 125 |
+
)
|
| 126 |
+
raise
|
| 127 |
+
|
| 128 |
+
async def close(self):
|
| 129 |
+
"""Close the TMS client."""
|
| 130 |
+
await self.client.close()
|
app/dependencies/auth.py
CHANGED
|
@@ -1,22 +1,26 @@
|
|
| 1 |
-
|
| 2 |
from fastapi import Depends, HTTPException, status
|
| 3 |
from enum import Enum
|
| 4 |
-
from app.
|
| 5 |
from settings import SECRET_KEY, ALGORITHM
|
| 6 |
from fastapi.security import APIKeyHeader
|
| 7 |
from app.nosql import mongo_db
|
| 8 |
|
| 9 |
-
|
| 10 |
-
logger = logging.getLogger(__name__)
|
| 11 |
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
oauth2_scheme = APIKeyHeader(name="Authorization")
|
| 22 |
|
|
@@ -25,35 +29,34 @@ def get_current_user(token: str = Depends(oauth2_scheme)) -> dict:
|
|
| 25 |
if token.startswith("Bearer "):
|
| 26 |
token = token.split(" ")[1]
|
| 27 |
payload = decode_jwt_token(token)
|
| 28 |
-
logger.info(
|
|
|
|
|
|
|
|
|
|
| 29 |
return {
|
| 30 |
"associate_id": payload["associate_id"],
|
| 31 |
"merchant_id": payload["merchant_id"],
|
| 32 |
-
"branch_id":payload["branch_id"],
|
| 33 |
-
"
|
| 34 |
}
|
| 35 |
except Exception as e:
|
| 36 |
-
logger.warning(
|
| 37 |
raise HTTPException(
|
| 38 |
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 39 |
detail="Invalid authentication credentials",
|
| 40 |
)
|
| 41 |
-
|
| 42 |
|
| 43 |
async def user_has_permission(merchant_id: str, role_id: str, permission: str) -> bool:
|
| 44 |
-
"""
|
| 45 |
-
Check if the user has the required permission using MongoDB filtering.
|
| 46 |
-
permission format: RESOURCE:ACTION (e.g., appointments:create)
|
| 47 |
-
"""
|
| 48 |
try:
|
| 49 |
-
# Support both 'RESOURCE:ACTION' and 'ACTION_RESOURCE' formats
|
| 50 |
if "_" in permission:
|
| 51 |
parts = permission.split("_", 1)
|
| 52 |
if len(parts) == 2:
|
| 53 |
action, resource = parts
|
| 54 |
else:
|
| 55 |
-
logger.warning(
|
| 56 |
return False
|
|
|
|
| 57 |
query = {
|
| 58 |
"merchant_id": merchant_id,
|
| 59 |
"role_id": role_id,
|
|
@@ -61,30 +64,40 @@ async def user_has_permission(merchant_id: str, role_id: str, permission: str) -
|
|
| 61 |
}
|
| 62 |
role_doc = await mongo_db["access_roles"].find_one(query)
|
| 63 |
has_perm = role_doc is not None
|
| 64 |
-
logger.info(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
return has_perm
|
| 66 |
except Exception as e:
|
| 67 |
-
logger.error(
|
| 68 |
return False
|
| 69 |
|
| 70 |
-
# Reusable FastAPI dependency for permission checks
|
| 71 |
async def require_permission(
|
| 72 |
access_id: str,
|
| 73 |
current_user: dict = Depends(get_current_user)
|
| 74 |
) -> dict:
|
| 75 |
-
"""
|
| 76 |
-
Dependency to check user permission for a given access_id (permission string).
|
| 77 |
-
Raises HTTPException(403) if not permitted.
|
| 78 |
-
Returns current_user if permitted.
|
| 79 |
-
"""
|
| 80 |
merchant_id = current_user.get("merchant_id")
|
| 81 |
user_id = current_user.get("associate_id")
|
| 82 |
-
role_id = current_user.get("
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
if not user_id or not merchant_id or not role_id:
|
| 84 |
-
logger.warning(
|
| 85 |
raise HTTPException(status_code=403, detail="Forbidden")
|
|
|
|
| 86 |
if not await user_has_permission(merchant_id, role_id, access_id):
|
| 87 |
-
logger.warning(
|
| 88 |
raise HTTPException(status_code=403, detail="Forbidden")
|
| 89 |
-
|
| 90 |
-
|
|
|
|
|
|
| 1 |
+
from insightfy_utils.logging import get_logger
|
| 2 |
from fastapi import Depends, HTTPException, status
|
| 3 |
from enum import Enum
|
| 4 |
+
from app.utils.jwt import decode_jwt_token
|
| 5 |
from settings import SECRET_KEY, ALGORITHM
|
| 6 |
from fastapi.security import APIKeyHeader
|
| 7 |
from app.nosql import mongo_db
|
| 8 |
|
| 9 |
+
logger = get_logger(__name__)
|
|
|
|
| 10 |
|
| 11 |
+
class AccessID(Enum):
|
| 12 |
+
VIEW_ANALYTICS = "view_analytics"
|
| 13 |
+
CREATE_ANALYTICS = "create_analytics"
|
| 14 |
+
UPDATE_ANALYTICS = "update_analytics"
|
| 15 |
+
DELETE_ANALYTICS = "delete_analytics"
|
| 16 |
+
VIEW_REPORTS = "view_reports"
|
| 17 |
+
CREATE_REPORTS = "create_reports"
|
| 18 |
+
UPDATE_REPORTS = "update_reports"
|
| 19 |
+
DELETE_REPORTS = "delete_reports"
|
| 20 |
+
VIEW_DASHBOARD = "view_dashboard"
|
| 21 |
+
CREATE_DASHBOARD = "create_dashboard"
|
| 22 |
+
UPDATE_DASHBOARD = "update_dashboard"
|
| 23 |
+
DELETE_DASHBOARD = "delete_dashboard"
|
| 24 |
|
| 25 |
oauth2_scheme = APIKeyHeader(name="Authorization")
|
| 26 |
|
|
|
|
| 29 |
if token.startswith("Bearer "):
|
| 30 |
token = token.split(" ")[1]
|
| 31 |
payload = decode_jwt_token(token)
|
| 32 |
+
logger.info("User authenticated", extra={
|
| 33 |
+
"associate_id": payload.get("associate_id"),
|
| 34 |
+
"merchant_id": payload.get("merchant_id")
|
| 35 |
+
})
|
| 36 |
return {
|
| 37 |
"associate_id": payload["associate_id"],
|
| 38 |
"merchant_id": payload["merchant_id"],
|
| 39 |
+
"branch_id": payload["branch_id"],
|
| 40 |
+
"role_id": payload.get("role_id", "user")
|
| 41 |
}
|
| 42 |
except Exception as e:
|
| 43 |
+
logger.warning("Authentication failed", exc_info=e)
|
| 44 |
raise HTTPException(
|
| 45 |
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 46 |
detail="Invalid authentication credentials",
|
| 47 |
)
|
|
|
|
| 48 |
|
| 49 |
async def user_has_permission(merchant_id: str, role_id: str, permission: str) -> bool:
|
| 50 |
+
"""Check if the user has the required permission using MongoDB filtering."""
|
|
|
|
|
|
|
|
|
|
| 51 |
try:
|
|
|
|
| 52 |
if "_" in permission:
|
| 53 |
parts = permission.split("_", 1)
|
| 54 |
if len(parts) == 2:
|
| 55 |
action, resource = parts
|
| 56 |
else:
|
| 57 |
+
logger.warning("Invalid permission format", extra={"permission": permission})
|
| 58 |
return False
|
| 59 |
+
|
| 60 |
query = {
|
| 61 |
"merchant_id": merchant_id,
|
| 62 |
"role_id": role_id,
|
|
|
|
| 64 |
}
|
| 65 |
role_doc = await mongo_db["access_roles"].find_one(query)
|
| 66 |
has_perm = role_doc is not None
|
| 67 |
+
logger.info("Permission check", extra={
|
| 68 |
+
"role_id": role_id,
|
| 69 |
+
"merchant_id": merchant_id,
|
| 70 |
+
"permission": permission,
|
| 71 |
+
"granted": has_perm
|
| 72 |
+
})
|
| 73 |
return has_perm
|
| 74 |
except Exception as e:
|
| 75 |
+
logger.error("Permission check error", exc_info=e)
|
| 76 |
return False
|
| 77 |
|
|
|
|
| 78 |
async def require_permission(
|
| 79 |
access_id: str,
|
| 80 |
current_user: dict = Depends(get_current_user)
|
| 81 |
) -> dict:
|
| 82 |
+
"""Dependency to check user permission for a given access_id."""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
merchant_id = current_user.get("merchant_id")
|
| 84 |
user_id = current_user.get("associate_id")
|
| 85 |
+
role_id = current_user.get("role_id")
|
| 86 |
+
|
| 87 |
+
logger.info("Checking permission", extra={
|
| 88 |
+
"user_id": user_id,
|
| 89 |
+
"merchant_id": merchant_id,
|
| 90 |
+
"role_id": role_id,
|
| 91 |
+
"access_id": access_id
|
| 92 |
+
})
|
| 93 |
+
|
| 94 |
if not user_id or not merchant_id or not role_id:
|
| 95 |
+
logger.warning("Permission denied: missing credentials", extra={"user_id": user_id})
|
| 96 |
raise HTTPException(status_code=403, detail="Forbidden")
|
| 97 |
+
|
| 98 |
if not await user_has_permission(merchant_id, role_id, access_id):
|
| 99 |
+
logger.warning("Permission denied", extra={"user_id": user_id, "access_id": access_id})
|
| 100 |
raise HTTPException(status_code=403, detail="Forbidden")
|
| 101 |
+
|
| 102 |
+
logger.info("Permission granted", extra={"user_id": user_id, "access_id": access_id})
|
| 103 |
+
return current_user
|
app/insightfy_utils-0.1.0-py3-none-any.whl
ADDED
|
Binary file (32.2 kB). View file
|
|
|
app/main.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI, Request
|
| 2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
+
from fastapi.responses import JSONResponse
|
| 4 |
+
from contextlib import asynccontextmanager
|
| 5 |
+
|
| 6 |
+
from insightfy_utils.logging import get_logger
|
| 7 |
+
from insightfy_utils.telemetry.health import HealthCheck, HealthStatus
|
| 8 |
+
from insightfy_utils.telemetry.metrics import MetricsCollector, track_request_metrics
|
| 9 |
+
from insightfy_utils.models.shared_schemas import HealthCheckResponse, HealthCheckComponent
|
| 10 |
+
|
| 11 |
+
from app.sql import database
|
| 12 |
+
from app.nosql import redis_client
|
| 13 |
+
from app.clients import MPMSClient, RMSClient, TMSClient, CRMClient
|
| 14 |
+
|
| 15 |
+
logger = get_logger(__name__)
|
| 16 |
+
|
| 17 |
+
# Initialize health check and metrics
|
| 18 |
+
health_check = HealthCheck(service_name="ans")
|
| 19 |
+
metrics_collector = MetricsCollector(service_name="ans")
|
| 20 |
+
|
| 21 |
+
# Initialize service clients
|
| 22 |
+
mpms_client = MPMSClient()
|
| 23 |
+
rms_client = RMSClient()
|
| 24 |
+
tms_client = TMSClient()
|
| 25 |
+
crm_client = CRMClient()
|
| 26 |
+
|
| 27 |
+
@asynccontextmanager
|
| 28 |
+
async def lifespan(app: FastAPI):
|
| 29 |
+
"""Application lifespan events"""
|
| 30 |
+
# Startup
|
| 31 |
+
logger.info("Starting ANS service")
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
# Test database connections
|
| 35 |
+
await database.connect()
|
| 36 |
+
health_check.add_component("postgresql", HealthStatus.HEALTHY)
|
| 37 |
+
logger.info("PostgreSQL connection established")
|
| 38 |
+
except Exception as e:
|
| 39 |
+
logger.error("Failed to connect to PostgreSQL", exc_info=e)
|
| 40 |
+
health_check.add_component("postgresql", HealthStatus.UNHEALTHY, str(e))
|
| 41 |
+
|
| 42 |
+
try:
|
| 43 |
+
# Test Redis connection
|
| 44 |
+
await redis_client.ping()
|
| 45 |
+
health_check.add_component("redis", HealthStatus.HEALTHY)
|
| 46 |
+
logger.info("Redis connection established")
|
| 47 |
+
except Exception as e:
|
| 48 |
+
logger.error("Failed to connect to Redis", exc_info=e)
|
| 49 |
+
health_check.add_component("redis", HealthStatus.UNHEALTHY, str(e))
|
| 50 |
+
|
| 51 |
+
yield
|
| 52 |
+
|
| 53 |
+
# Shutdown
|
| 54 |
+
logger.info("Shutting down ANS service")
|
| 55 |
+
await database.disconnect()
|
| 56 |
+
await mpms_client.close()
|
| 57 |
+
await rms_client.close()
|
| 58 |
+
await tms_client.close()
|
| 59 |
+
await crm_client.close()
|
| 60 |
+
logger.info("Service clients closed")
|
| 61 |
+
|
| 62 |
+
app = FastAPI(
|
| 63 |
+
title="Analytics and Notification Service",
|
| 64 |
+
description="ANS - Handles analytics, reporting, and notifications",
|
| 65 |
+
version="1.0.0",
|
| 66 |
+
lifespan=lifespan
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
# Add CORS middleware
|
| 70 |
+
app.add_middleware(
|
| 71 |
+
CORSMiddleware,
|
| 72 |
+
allow_origins=["*"],
|
| 73 |
+
allow_credentials=True,
|
| 74 |
+
allow_methods=["*"],
|
| 75 |
+
allow_headers=["*"],
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
# Add metrics middleware
|
| 79 |
+
@app.middleware("http")
|
| 80 |
+
async def metrics_middleware(request: Request, call_next):
|
| 81 |
+
"""Track request metrics"""
|
| 82 |
+
return await track_request_metrics(
|
| 83 |
+
request=request,
|
| 84 |
+
call_next=call_next,
|
| 85 |
+
metrics_collector=metrics_collector,
|
| 86 |
+
logger=logger
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
# Make clients available to the app
|
| 90 |
+
app.state.mpms_client = mpms_client
|
| 91 |
+
app.state.rms_client = rms_client
|
| 92 |
+
app.state.tms_client = tms_client
|
| 93 |
+
app.state.crm_client = crm_client
|
| 94 |
+
|
| 95 |
+
# Health check endpoint
|
| 96 |
+
@app.get("/health", response_model=HealthCheckResponse, tags=["Health"])
|
| 97 |
+
async def health():
|
| 98 |
+
"""Service health check endpoint"""
|
| 99 |
+
health_data = health_check.get_health()
|
| 100 |
+
|
| 101 |
+
# Convert to standardized response format
|
| 102 |
+
components = {}
|
| 103 |
+
for name, component in health_data.get("components", {}).items():
|
| 104 |
+
components[name] = HealthCheckComponent(
|
| 105 |
+
status=component.get("status"),
|
| 106 |
+
message=component.get("message")
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
return HealthCheckResponse(
|
| 110 |
+
status=health_data["status"],
|
| 111 |
+
service="ans",
|
| 112 |
+
version="1.0.0",
|
| 113 |
+
components=components
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
# Readiness check endpoint
|
| 117 |
+
@app.get("/ready", tags=["Health"])
|
| 118 |
+
async def ready():
|
| 119 |
+
"""Service readiness check endpoint"""
|
| 120 |
+
health_data = health_check.get_health()
|
| 121 |
+
|
| 122 |
+
# Service is ready if all critical components are healthy
|
| 123 |
+
is_ready = health_data["status"] == "healthy"
|
| 124 |
+
|
| 125 |
+
if is_ready:
|
| 126 |
+
return JSONResponse(content=health_data, status_code=200)
|
| 127 |
+
else:
|
| 128 |
+
return JSONResponse(content=health_data, status_code=503)
|
| 129 |
+
|
| 130 |
+
# Liveness check endpoint
|
| 131 |
+
@app.get("/live", tags=["Health"])
|
| 132 |
+
async def live():
|
| 133 |
+
"""Service liveness check endpoint"""
|
| 134 |
+
return {"status": "alive", "service": "ans"}
|
| 135 |
+
|
| 136 |
+
# Metrics endpoint
|
| 137 |
+
@app.get("/metrics", tags=["Metrics"])
|
| 138 |
+
async def metrics():
|
| 139 |
+
"""Prometheus-compatible metrics endpoint"""
|
| 140 |
+
return metrics_collector.get_metrics()
|
| 141 |
+
|
| 142 |
+
# ...existing code... (include routers)
|
app/model/appointment_analytics.py
DELETED
|
@@ -1,71 +0,0 @@
|
|
| 1 |
-
from sqlalchemy import (
|
| 2 |
-
Column,
|
| 3 |
-
Integer,
|
| 4 |
-
String,
|
| 5 |
-
Date,
|
| 6 |
-
Time,
|
| 7 |
-
ForeignKey,
|
| 8 |
-
DECIMAL,
|
| 9 |
-
TIMESTAMP,
|
| 10 |
-
text,
|
| 11 |
-
#Index,
|
| 12 |
-
)
|
| 13 |
-
from sqlalchemy.ext.declarative import declarative_base
|
| 14 |
-
from sqlalchemy.orm import relationship
|
| 15 |
-
|
| 16 |
-
# Create a base class for declarative models
|
| 17 |
-
Base = declarative_base()
|
| 18 |
-
|
| 19 |
-
class AppointmentAnalytics(Base):
|
| 20 |
-
"""
|
| 21 |
-
SQLAlchemy ORM model for the `appointment_analytics` table.
|
| 22 |
-
"""
|
| 23 |
-
__tablename__ = "appointment_analytics"
|
| 24 |
-
|
| 25 |
-
# Columns
|
| 26 |
-
appointment_id = Column(String(50), primary_key=True)
|
| 27 |
-
customer_id = Column(String(50), ForeignKey("customer_analytics.customer_id"), nullable=False)
|
| 28 |
-
merchant_id = Column(String(50), nullable=False)
|
| 29 |
-
location_id = Column(String(20), nullable=False)
|
| 30 |
-
staff_id = Column(String(50))
|
| 31 |
-
appointment_date = Column(Date, nullable=False)
|
| 32 |
-
appointment_time = Column(Time)
|
| 33 |
-
booking_date = Column(Date)
|
| 34 |
-
days_advance_booking = Column(Integer)
|
| 35 |
-
service_name = Column(String(200))
|
| 36 |
-
service_category = Column(String(100))
|
| 37 |
-
duration_minutes = Column(Integer)
|
| 38 |
-
service_price = Column(DECIMAL(10, 2))
|
| 39 |
-
appointment_status = Column(String(20))
|
| 40 |
-
cancellation_reason = Column(String(200))
|
| 41 |
-
no_show_reason = Column(String(200))
|
| 42 |
-
revenue = Column(DECIMAL(10, 2), default=0)
|
| 43 |
-
discount_applied = Column(DECIMAL(10, 2), default=0)
|
| 44 |
-
payment_method = Column(String(50))
|
| 45 |
-
wait_time_minutes = Column(Integer)
|
| 46 |
-
satisfaction_rating = Column(DECIMAL(3, 2))
|
| 47 |
-
created_at = Column(TIMESTAMP, server_default=text("CURRENT_TIMESTAMP"))
|
| 48 |
-
updated_at = Column(
|
| 49 |
-
TIMESTAMP,
|
| 50 |
-
server_default=text("CURRENT_TIMESTAMP"),
|
| 51 |
-
onupdate=text("CURRENT_TIMESTAMP"),
|
| 52 |
-
)
|
| 53 |
-
|
| 54 |
-
# Relationships (with `customer_analytics` model )
|
| 55 |
-
customer = relationship("CustomerAnalytics", back_populates="appointments")
|
| 56 |
-
'''
|
| 57 |
-
# Table arguments for indexes
|
| 58 |
-
__table_args__ = (
|
| 59 |
-
Index("idx_appointment_date", appointment_date),
|
| 60 |
-
Index("idx_appointment_customer", customer_id),
|
| 61 |
-
Index("idx_appointment_status", appointment_status),
|
| 62 |
-
Index("idx_appointment_service", service_category),
|
| 63 |
-
)
|
| 64 |
-
'''
|
| 65 |
-
|
| 66 |
-
def __repr__(self):
|
| 67 |
-
return (
|
| 68 |
-
f"<AppointmentAnalytics(appointment_id='{self.appointment_id}', "
|
| 69 |
-
f"customer_id='{self.customer_id}', "
|
| 70 |
-
f"appointment_date='{self.appointment_date}')>"
|
| 71 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/model/customer_analytics.py
DELETED
|
@@ -1,56 +0,0 @@
|
|
| 1 |
-
from sqlalchemy import Column, Integer, String, Date, TIMESTAMP, Numeric, Text
|
| 2 |
-
from sqlalchemy.orm import declarative_base, relationship
|
| 3 |
-
|
| 4 |
-
# Create a base class for declarative models
|
| 5 |
-
Base = declarative_base()
|
| 6 |
-
|
| 7 |
-
class CustomerAnalytics(Base):
|
| 8 |
-
__tablename__ = 'customer_analytics'
|
| 9 |
-
|
| 10 |
-
customer_id = Column(String(50), primary_key=True, nullable=False)
|
| 11 |
-
merchant_id = Column(String(50), nullable=False)
|
| 12 |
-
location_id = Column(String(20), nullable=False)
|
| 13 |
-
customer_name = Column(String(200))
|
| 14 |
-
email = Column(String(200))
|
| 15 |
-
phone = Column(String(20))
|
| 16 |
-
city = Column(String(100))
|
| 17 |
-
state = Column(String(100))
|
| 18 |
-
country = Column(String(100))
|
| 19 |
-
acquisition_date = Column(Date, nullable=False)
|
| 20 |
-
acquisition_source = Column(String(100))
|
| 21 |
-
acquisition_channel = Column(String(50))
|
| 22 |
-
campaign_id = Column(String(50))
|
| 23 |
-
referral_customer_id = Column(String(50))
|
| 24 |
-
acquisition_cost = Column(Numeric(10, 2), default=0)
|
| 25 |
-
first_appointment_date = Column(Date)
|
| 26 |
-
days_to_first_appointment = Column(Integer)
|
| 27 |
-
total_revenue = Column(Numeric(10, 2), default=0)
|
| 28 |
-
total_visits = Column(Integer, default=0)
|
| 29 |
-
avg_spend_per_visit = Column(Numeric(10, 2), default=0)
|
| 30 |
-
customer_lifetime_value = Column(Numeric(12, 2), default=0)
|
| 31 |
-
last_visit_date = Column(Date)
|
| 32 |
-
last_interaction_date = Column(Date)
|
| 33 |
-
loyalty_points = Column(Integer, default=0)
|
| 34 |
-
membership_type = Column(String(50))
|
| 35 |
-
subscription_plan = Column(String(100))
|
| 36 |
-
visit_frequency_days = Column(Numeric(5, 1))
|
| 37 |
-
churn_risk_score = Column(Numeric(3, 2), default=0)
|
| 38 |
-
churn_risk_level = Column(String(20), default='Low')
|
| 39 |
-
customer_status = Column(String(20), default='Active')
|
| 40 |
-
avg_satisfaction_score = Column(Numeric(3, 2))
|
| 41 |
-
nps_score = Column(Integer)
|
| 42 |
-
total_surveys_completed = Column(Integer, default=0)
|
| 43 |
-
last_survey_date = Column(Date)
|
| 44 |
-
created_at = Column(TIMESTAMP(timezone=True), server_default='now()')
|
| 45 |
-
updated_at = Column(TIMESTAMP(timezone=True), server_default='now()', onupdate='now()')
|
| 46 |
-
|
| 47 |
-
# This creates the link from a customer to all their appointments
|
| 48 |
-
# 'appointments' is the name of this attribute on this model
|
| 49 |
-
appointments = relationship(
|
| 50 |
-
"AppointmentAnalytics",
|
| 51 |
-
back_populates="customer",
|
| 52 |
-
cascade="all, delete-orphan" #for managing related objects
|
| 53 |
-
)
|
| 54 |
-
|
| 55 |
-
def __repr__(self):
|
| 56 |
-
return f"<Customer(id={self.customer_id}, name='{self.customer_name}', email='{self.email}')>"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/models/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Database models for ANS service."""
|
app/nosql.py
CHANGED
|
@@ -1,14 +1,10 @@
|
|
| 1 |
# data_stores.py (or your current filename)
|
| 2 |
from __future__ import annotations
|
| 3 |
|
| 4 |
-
|
| 5 |
-
from
|
|
|
|
| 6 |
|
| 7 |
-
import motor.motor_asyncio
|
| 8 |
-
import redis.asyncio as redis
|
| 9 |
-
from redis.exceptions import RedisError
|
| 10 |
-
|
| 11 |
-
# Single source of truth
|
| 12 |
from settings import (
|
| 13 |
MONGO_URI,
|
| 14 |
MONGO_DB_NAME,
|
|
@@ -18,14 +14,7 @@ from settings import (
|
|
| 18 |
CACHE_DB,
|
| 19 |
)
|
| 20 |
|
| 21 |
-
|
| 22 |
-
# Logging
|
| 23 |
-
# -----------------------------------------------------------------------------
|
| 24 |
-
logging.basicConfig(
|
| 25 |
-
level=logging.INFO,
|
| 26 |
-
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
| 27 |
-
)
|
| 28 |
-
logger = logging.getLogger(__name__)
|
| 29 |
|
| 30 |
# -----------------------------------------------------------------------------
|
| 31 |
# Validate configuration
|
|
@@ -41,45 +30,39 @@ if not CACHE_URL and not CACHE_URI:
|
|
| 41 |
)
|
| 42 |
|
| 43 |
# -----------------------------------------------------------------------------
|
| 44 |
-
# MongoDB client
|
| 45 |
# -----------------------------------------------------------------------------
|
| 46 |
try:
|
| 47 |
-
mongo_client =
|
| 48 |
MONGO_URI,
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
|
|
|
| 52 |
)
|
| 53 |
-
|
| 54 |
-
logger.info("MongoDB client initialized (db=%s).", MONGO_DB_NAME)
|
| 55 |
except Exception as e:
|
| 56 |
-
logger.
|
| 57 |
raise
|
| 58 |
|
| 59 |
# -----------------------------------------------------------------------------
|
| 60 |
-
# Redis client
|
| 61 |
# -----------------------------------------------------------------------------
|
| 62 |
|
| 63 |
|
| 64 |
-
def _redis_from_settings()
|
| 65 |
-
"""
|
| 66 |
-
Prefer a single URL (CACHE_URL / REDIS_URL). If not provided,
|
| 67 |
-
fall back to CACHE_URI ("host:port") + optional password.
|
| 68 |
-
"""
|
| 69 |
if CACHE_URL:
|
| 70 |
-
|
| 71 |
return redis.from_url(CACHE_URL, decode_responses=True)
|
| 72 |
|
| 73 |
-
# Fallback: host:port (required), password optional
|
| 74 |
try:
|
| 75 |
host, port_str = CACHE_URI.split(":")
|
| 76 |
port = int(port_str)
|
| 77 |
except Exception:
|
| 78 |
-
raise ValueError(
|
| 79 |
-
"Invalid CACHE_URI format. Expected 'host:port', e.g., 'localhost:6379'."
|
| 80 |
-
)
|
| 81 |
|
| 82 |
-
return
|
| 83 |
host=host,
|
| 84 |
port=port,
|
| 85 |
password=CACHE_PASSWORD or None,
|
|
@@ -90,31 +73,33 @@ def _redis_from_settings() -> redis.Redis:
|
|
| 90 |
|
| 91 |
try:
|
| 92 |
redis_client = _redis_from_settings()
|
| 93 |
-
logger.info("Redis client initialized
|
| 94 |
except Exception as e:
|
| 95 |
-
logger.
|
| 96 |
raise
|
| 97 |
|
| 98 |
# -----------------------------------------------------------------------------
|
| 99 |
-
#
|
| 100 |
# -----------------------------------------------------------------------------
|
| 101 |
|
| 102 |
|
| 103 |
async def ping_mongo() -> bool:
|
|
|
|
| 104 |
try:
|
| 105 |
await mongo_db.command("ping")
|
| 106 |
return True
|
| 107 |
except Exception:
|
| 108 |
-
logger.
|
| 109 |
return False
|
| 110 |
|
| 111 |
|
| 112 |
async def ping_redis() -> bool:
|
|
|
|
| 113 |
try:
|
| 114 |
pong = await redis_client.ping()
|
| 115 |
return bool(pong)
|
| 116 |
-
except
|
| 117 |
-
logger.
|
| 118 |
return False
|
| 119 |
|
| 120 |
# -----------------------------------------------------------------------------
|
|
@@ -123,30 +108,25 @@ async def ping_redis() -> bool:
|
|
| 123 |
|
| 124 |
|
| 125 |
async def connect_stores() -> None:
|
| 126 |
-
"""
|
| 127 |
-
No-op for motor/redis (lazy connections), but you can force a ping to fail fast.
|
| 128 |
-
"""
|
| 129 |
ok_mongo = await ping_mongo()
|
| 130 |
ok_redis = await ping_redis()
|
| 131 |
if not (ok_mongo and ok_redis):
|
| 132 |
raise RuntimeError("Store connectivity check failed (mongo or redis).")
|
| 133 |
-
logger.info("Store connectivity OK (Mongo & Redis)
|
| 134 |
|
| 135 |
|
| 136 |
async def disconnect_stores() -> None:
|
| 137 |
-
"""
|
| 138 |
-
Close drivers on shutdown.
|
| 139 |
-
"""
|
| 140 |
try:
|
| 141 |
mongo_client.close()
|
| 142 |
except Exception:
|
| 143 |
-
logger.
|
| 144 |
|
| 145 |
try:
|
| 146 |
await redis_client.close()
|
| 147 |
except Exception:
|
| 148 |
-
logger.
|
| 149 |
|
| 150 |
# Alias for backward compatibility
|
| 151 |
-
db = mongo_db
|
| 152 |
-
|
|
|
|
| 1 |
# data_stores.py (or your current filename)
|
| 2 |
from __future__ import annotations
|
| 3 |
|
| 4 |
+
from insightfy_utils.logging import get_logger
|
| 5 |
+
from insightfy_utils.db.mongo_connector import create_mongo_connection
|
| 6 |
+
from insightfy_utils.db.redis_connector import create_redis_connection
|
| 7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
from settings import (
|
| 9 |
MONGO_URI,
|
| 10 |
MONGO_DB_NAME,
|
|
|
|
| 14 |
CACHE_DB,
|
| 15 |
)
|
| 16 |
|
| 17 |
+
logger = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
# -----------------------------------------------------------------------------
|
| 20 |
# Validate configuration
|
|
|
|
| 30 |
)
|
| 31 |
|
| 32 |
# -----------------------------------------------------------------------------
|
| 33 |
+
# MongoDB client
|
| 34 |
# -----------------------------------------------------------------------------
|
| 35 |
try:
|
| 36 |
+
mongo_client, mongo_db = create_mongo_connection(
|
| 37 |
MONGO_URI,
|
| 38 |
+
MONGO_DB_NAME,
|
| 39 |
+
server_selection_timeout_ms=60_000,
|
| 40 |
+
socket_timeout_ms=60_000,
|
| 41 |
+
connect_timeout_ms=60_000,
|
| 42 |
)
|
| 43 |
+
logger.info("MongoDB client initialized", extra={"db_name": MONGO_DB_NAME})
|
|
|
|
| 44 |
except Exception as e:
|
| 45 |
+
logger.error("Failed to initialize MongoDB client", exc_info=e)
|
| 46 |
raise
|
| 47 |
|
| 48 |
# -----------------------------------------------------------------------------
|
| 49 |
+
# Redis client
|
| 50 |
# -----------------------------------------------------------------------------
|
| 51 |
|
| 52 |
|
| 53 |
+
def _redis_from_settings():
|
| 54 |
+
"""Create Redis client from settings."""
|
|
|
|
|
|
|
|
|
|
| 55 |
if CACHE_URL:
|
| 56 |
+
import redis.asyncio as redis
|
| 57 |
return redis.from_url(CACHE_URL, decode_responses=True)
|
| 58 |
|
|
|
|
| 59 |
try:
|
| 60 |
host, port_str = CACHE_URI.split(":")
|
| 61 |
port = int(port_str)
|
| 62 |
except Exception:
|
| 63 |
+
raise ValueError("Invalid CACHE_URI format. Expected 'host:port'")
|
|
|
|
|
|
|
| 64 |
|
| 65 |
+
return create_redis_connection(
|
| 66 |
host=host,
|
| 67 |
port=port,
|
| 68 |
password=CACHE_PASSWORD or None,
|
|
|
|
| 73 |
|
| 74 |
try:
|
| 75 |
redis_client = _redis_from_settings()
|
| 76 |
+
logger.info("Redis client initialized")
|
| 77 |
except Exception as e:
|
| 78 |
+
logger.error("Failed to initialize Redis client", exc_info=e)
|
| 79 |
raise
|
| 80 |
|
| 81 |
# -----------------------------------------------------------------------------
|
| 82 |
+
# Health check helpers
|
| 83 |
# -----------------------------------------------------------------------------
|
| 84 |
|
| 85 |
|
| 86 |
async def ping_mongo() -> bool:
|
| 87 |
+
"""Check MongoDB connectivity."""
|
| 88 |
try:
|
| 89 |
await mongo_db.command("ping")
|
| 90 |
return True
|
| 91 |
except Exception:
|
| 92 |
+
logger.error("Mongo ping failed", exc_info=True)
|
| 93 |
return False
|
| 94 |
|
| 95 |
|
| 96 |
async def ping_redis() -> bool:
|
| 97 |
+
"""Check Redis connectivity."""
|
| 98 |
try:
|
| 99 |
pong = await redis_client.ping()
|
| 100 |
return bool(pong)
|
| 101 |
+
except Exception:
|
| 102 |
+
logger.error("Redis ping failed", exc_info=True)
|
| 103 |
return False
|
| 104 |
|
| 105 |
# -----------------------------------------------------------------------------
|
|
|
|
| 108 |
|
| 109 |
|
| 110 |
async def connect_stores() -> None:
|
| 111 |
+
"""Verify store connectivity on startup."""
|
|
|
|
|
|
|
| 112 |
ok_mongo = await ping_mongo()
|
| 113 |
ok_redis = await ping_redis()
|
| 114 |
if not (ok_mongo and ok_redis):
|
| 115 |
raise RuntimeError("Store connectivity check failed (mongo or redis).")
|
| 116 |
+
logger.info("Store connectivity OK (Mongo & Redis)")
|
| 117 |
|
| 118 |
|
| 119 |
async def disconnect_stores() -> None:
|
| 120 |
+
"""Close connections on shutdown."""
|
|
|
|
|
|
|
| 121 |
try:
|
| 122 |
mongo_client.close()
|
| 123 |
except Exception:
|
| 124 |
+
logger.error("Error closing Mongo client", exc_info=True)
|
| 125 |
|
| 126 |
try:
|
| 127 |
await redis_client.close()
|
| 128 |
except Exception:
|
| 129 |
+
logger.error("Error closing Redis client", exc_info=True)
|
| 130 |
|
| 131 |
# Alias for backward compatibility
|
| 132 |
+
db = mongo_db
|
|
|
app/repositories/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Repository layer for data access in ANS service."""
|
app/routers/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""API routers for ANS service."""
|
app/routers/analytics_router.py
CHANGED
|
@@ -1,125 +1,77 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
from
|
| 5 |
-
from
|
| 6 |
-
from
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
return
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
merchant_id = current_user.get("merchant_id")
|
| 29 |
-
result = await CustomerAnalyticService.get_all_customer_analytics(merchant_id)
|
| 30 |
-
print("result ",result)
|
| 31 |
-
if(result is not None):
|
| 32 |
-
return {**result}
|
| 33 |
-
else:
|
| 34 |
-
return {"error": "Customers not found"}
|
| 35 |
-
|
| 36 |
-
@router.get("/{customer_id}", status_code=200)
|
| 37 |
-
async def get_by_customer_id(customer_id: str,
|
| 38 |
-
current_user: dict = Depends(require_view_customer_permission)):
|
| 39 |
-
print("get_by_customer_id")
|
| 40 |
-
|
| 41 |
-
merchant_id = current_user.get("merchant_id")
|
| 42 |
-
result = await CustomerAnalyticService.get_by_customer_id(customer_id, merchant_id)
|
| 43 |
-
print("result ",result)
|
| 44 |
-
|
| 45 |
-
if(result is not None):
|
| 46 |
-
return {**result}
|
| 47 |
-
else:
|
| 48 |
-
return {"error": "Customers not found"}
|
| 49 |
-
|
| 50 |
-
@router.get("/merchant_id/{merchant_id}", status_code=200)
|
| 51 |
-
async def get_customers_by_merchant_id(merchant_id: str,
|
| 52 |
-
current_user: dict = Depends(require_view_customer_permission)):
|
| 53 |
-
print("get_by_merchant_id")
|
| 54 |
-
|
| 55 |
-
curuser_merchant_id = current_user.get("merchant_id")
|
| 56 |
-
|
| 57 |
-
if(curuser_merchant_id==merchant_id):
|
| 58 |
-
result = await CustomerAnalyticService.get_by_merchant_id(merchant_id)
|
| 59 |
-
print("result ",result)
|
| 60 |
-
else:
|
| 61 |
-
return {"error": "Customers not found"}
|
| 62 |
-
#raise HTTPException(status_code=403, detail=ACCESS_DENIED_ERROR)
|
| 63 |
-
|
| 64 |
-
return {**result}
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
'''
|
| 68 |
-
------------------APPOINTMENT ANALYTCIS APIS--------------------
|
| 69 |
-
'''
|
| 70 |
-
@router.get("/appointment/list/{merchant_id}", status_code=200)
|
| 71 |
-
async def list_appointment_analytics(merchant_id: str,
|
| 72 |
-
current_user: dict = Depends(require_view_customer_permission)):
|
| 73 |
-
print("list_appointment_analytics")
|
| 74 |
-
|
| 75 |
-
curuser_merchant_id = current_user.get("merchant_id")
|
| 76 |
-
|
| 77 |
-
if(curuser_merchant_id==merchant_id):
|
| 78 |
-
result = await AppointmentAnalyticsService.get_all_appointment_analytics(merchant_id)
|
| 79 |
-
print("result ",result)
|
| 80 |
-
else:
|
| 81 |
-
return {"error": "Appointments not found"}
|
| 82 |
-
#raise HTTPException(status_code=403, detail=ACCESS_DENIED_ERROR)
|
| 83 |
-
|
| 84 |
-
return {**result}
|
| 85 |
-
|
| 86 |
-
@router.get("/appointment/customer/{customer_id}", status_code=200)
|
| 87 |
-
async def get_appointments_by_customerid(customer_id: str,
|
| 88 |
-
current_user: dict = Depends(require_view_customer_permission)):
|
| 89 |
-
print("get_appointments_by_customerid")
|
| 90 |
-
|
| 91 |
-
merchant_id = current_user.get("merchant_id")
|
| 92 |
-
|
| 93 |
-
result = await AppointmentAnalyticsService.get_appointments_by_customerid(customer_id,merchant_id)
|
| 94 |
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
#raise HTTPException(status_code=403, detail=ACCESS_DENIED_ERROR)
|
| 101 |
-
|
| 102 |
-
return {**result}
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
@router.get("/appointment/{appointment_id}", status_code=200)
|
| 108 |
-
async def get_appointments_by_id(appointment_id: str,
|
| 109 |
-
current_user: dict = Depends(require_view_customer_permission)):
|
| 110 |
-
print("get_appointments_by_id")
|
| 111 |
|
| 112 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 113 |
|
| 114 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Analytics router for ANS service.
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 5 |
+
from typing import List, Optional
|
| 6 |
+
from datetime import datetime, date
|
| 7 |
+
|
| 8 |
+
from app.dependencies.auth import get_current_user, require_permission, AccessID
|
| 9 |
+
from insightfy_utils.logging import get_logger
|
| 10 |
+
|
| 11 |
+
logger = get_logger(__name__)
|
| 12 |
+
|
| 13 |
+
router = APIRouter()
|
| 14 |
+
|
| 15 |
+
@router.get("/health")
|
| 16 |
+
async def analytics_health():
|
| 17 |
+
"""Analytics service health check."""
|
| 18 |
+
return {"status": "healthy", "service": "analytics"}
|
| 19 |
+
|
| 20 |
+
@router.get("/dashboard")
|
| 21 |
+
async def get_dashboard_data(
|
| 22 |
+
start_date: Optional[date] = None,
|
| 23 |
+
end_date: Optional[date] = None,
|
| 24 |
+
current_user: dict = Depends(get_current_user)
|
| 25 |
+
):
|
| 26 |
+
"""Get dashboard analytics data."""
|
| 27 |
+
merchant_id = current_user["merchant_id"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
+
logger.info("Getting dashboard data", extra={
|
| 30 |
+
"merchant_id": merchant_id,
|
| 31 |
+
"start_date": start_date,
|
| 32 |
+
"end_date": end_date
|
| 33 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
+
# Placeholder implementation
|
| 36 |
+
return {
|
| 37 |
+
"merchant_id": merchant_id,
|
| 38 |
+
"data": {
|
| 39 |
+
"sales_summary": {},
|
| 40 |
+
"customer_metrics": {},
|
| 41 |
+
"inventory_status": {},
|
| 42 |
+
"performance_indicators": {}
|
| 43 |
+
},
|
| 44 |
+
"period": {
|
| 45 |
+
"start_date": start_date,
|
| 46 |
+
"end_date": end_date
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
@router.get("/reports")
|
| 51 |
+
async def get_reports(
|
| 52 |
+
report_type: str,
|
| 53 |
+
start_date: Optional[date] = None,
|
| 54 |
+
end_date: Optional[date] = None,
|
| 55 |
+
current_user: dict = Depends(require_permission(AccessID.VIEW_REPORTS.value))
|
| 56 |
+
):
|
| 57 |
+
"""Get analytics reports."""
|
| 58 |
+
merchant_id = current_user["merchant_id"]
|
| 59 |
|
| 60 |
+
logger.info("Getting reports", extra={
|
| 61 |
+
"merchant_id": merchant_id,
|
| 62 |
+
"report_type": report_type,
|
| 63 |
+
"start_date": start_date,
|
| 64 |
+
"end_date": end_date
|
| 65 |
+
})
|
| 66 |
|
| 67 |
+
# Placeholder implementation
|
| 68 |
+
return {
|
| 69 |
+
"merchant_id": merchant_id,
|
| 70 |
+
"report_type": report_type,
|
| 71 |
+
"data": [],
|
| 72 |
+
"generated_at": datetime.utcnow(),
|
| 73 |
+
"period": {
|
| 74 |
+
"start_date": start_date,
|
| 75 |
+
"end_date": end_date
|
| 76 |
+
}
|
| 77 |
+
}
|
app/routers/auth_route.py
DELETED
|
@@ -1,30 +0,0 @@
|
|
| 1 |
-
from fastapi import APIRouter, HTTPException, status, Depends, Form
|
| 2 |
-
from jose import jwt
|
| 3 |
-
from datetime import datetime, timedelta
|
| 4 |
-
from settings import SECRET_KEY, ALGORITHM
|
| 5 |
-
|
| 6 |
-
router = APIRouter(prefix="/auth", tags=["Authentication"])
|
| 7 |
-
|
| 8 |
-
# Hardcoded test user
|
| 9 |
-
USERS_DB = {
|
| 10 |
-
"admin@example.com": {
|
| 11 |
-
"password": "admin123",
|
| 12 |
-
"merchant_id": "MERCHANT001",
|
| 13 |
-
"associate_id": "admin"
|
| 14 |
-
}
|
| 15 |
-
}
|
| 16 |
-
|
| 17 |
-
@router.post("/login")
|
| 18 |
-
def login(username: str = Form(...), password: str = Form(...)):
|
| 19 |
-
user = USERS_DB.get(username)
|
| 20 |
-
if not user or user["password"] != password:
|
| 21 |
-
raise HTTPException(status_code=401, detail="Invalid credentials")
|
| 22 |
-
|
| 23 |
-
token_data = {
|
| 24 |
-
"sub": user["associate_id"],
|
| 25 |
-
"merchant_id": user["merchant_id"],
|
| 26 |
-
"exp": datetime.utcnow() + timedelta(hours=2)
|
| 27 |
-
}
|
| 28 |
-
|
| 29 |
-
access_token = jwt.encode(token_data, SECRET_KEY, algorithm=ALGORITHM)
|
| 30 |
-
return {"access_token": access_token, "token_type": "bearer"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/schemas/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Pydantic schemas for ANS service."""
|
app/services/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Business logic services for ANS service."""
|
app/services/appointment_analytics_service.py
DELETED
|
@@ -1,76 +0,0 @@
|
|
| 1 |
-
from app.model.appointment_analytics import AppointmentAnalytics
|
| 2 |
-
from sqlalchemy import select, and_
|
| 3 |
-
from app.util.dbutil import get_db_session
|
| 4 |
-
import logging
|
| 5 |
-
|
| 6 |
-
logger=logging.getLogger(__name__)
|
| 7 |
-
|
| 8 |
-
class AppointmentAnalyticService:
|
| 9 |
-
@staticmethod
|
| 10 |
-
async def get_all_appointment_analytics(merchantid):
|
| 11 |
-
|
| 12 |
-
try:
|
| 13 |
-
# The 'with' statement handles session management
|
| 14 |
-
async with get_db_session() as session:
|
| 15 |
-
result = await session.execute(select(AppointmentAnalytics).filter_by(merchant_id=merchantid))
|
| 16 |
-
appointments = result.scalars().all()
|
| 17 |
-
|
| 18 |
-
result_dict = {"total": len(appointments),
|
| 19 |
-
"appointments":appointments}
|
| 20 |
-
return result_dict
|
| 21 |
-
|
| 22 |
-
except Exception as e:
|
| 23 |
-
print(f"An error occurred: {e}")
|
| 24 |
-
return None
|
| 25 |
-
|
| 26 |
-
@staticmethod
|
| 27 |
-
async def get_appointments_by_customerid(customerid,merchantid):
|
| 28 |
-
|
| 29 |
-
try:
|
| 30 |
-
# The 'with' statement handles session management
|
| 31 |
-
async with get_db_session() as session:
|
| 32 |
-
#customer = session.query(AppointmentAnalytics).filter_by(merchant_id=merchantid).first()
|
| 33 |
-
result = await session.execute(select(AppointmentAnalytics)
|
| 34 |
-
.filter(
|
| 35 |
-
and_(
|
| 36 |
-
AppointmentAnalytics.customer_id == customerid,
|
| 37 |
-
AppointmentAnalytics.merchant_id == merchantid
|
| 38 |
-
)
|
| 39 |
-
)
|
| 40 |
-
)
|
| 41 |
-
appointments = result.scalars().all()
|
| 42 |
-
|
| 43 |
-
result_dict = {"total": len(appointments),
|
| 44 |
-
"appointments":appointments}
|
| 45 |
-
return result_dict
|
| 46 |
-
|
| 47 |
-
except Exception as e:
|
| 48 |
-
print(f"An error occurred: {e}")
|
| 49 |
-
return None
|
| 50 |
-
|
| 51 |
-
@staticmethod
|
| 52 |
-
async def get_by_appointment_id(appointmentid, merchantid):
|
| 53 |
-
try:
|
| 54 |
-
# The 'with' statement handles session management
|
| 55 |
-
async with get_db_session() as session:
|
| 56 |
-
#customer = session.query(AppointmentAnalytics).filter_by(merchant_id=merchantid).first()
|
| 57 |
-
result = await session.execute(select(AppointmentAnalytics)
|
| 58 |
-
.filter(
|
| 59 |
-
and_(
|
| 60 |
-
AppointmentAnalytics.appointment_id == appointmentid,
|
| 61 |
-
AppointmentAnalytics.merchant_id == merchantid
|
| 62 |
-
)
|
| 63 |
-
)
|
| 64 |
-
)
|
| 65 |
-
|
| 66 |
-
appointment = result.scalars().first()
|
| 67 |
-
|
| 68 |
-
result_dict = {"appointment" : "Appointments not found"}
|
| 69 |
-
if appointment:
|
| 70 |
-
result_dict = {"appointment": appointment}
|
| 71 |
-
return result_dict
|
| 72 |
-
except Exception as e:
|
| 73 |
-
print(f"An error occurred: {e}")
|
| 74 |
-
return None
|
| 75 |
-
|
| 76 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/services/customer_analytics_service.py
DELETED
|
@@ -1,71 +0,0 @@
|
|
| 1 |
-
from app.model.customer_analytics import CustomerAnalytics
|
| 2 |
-
from sqlalchemy import select, and_
|
| 3 |
-
from app.util.dbutil import get_db_session
|
| 4 |
-
import logging
|
| 5 |
-
|
| 6 |
-
logger=logging.getLogger(__name__)
|
| 7 |
-
|
| 8 |
-
class CustomerAnalyticService:
|
| 9 |
-
|
| 10 |
-
@staticmethod
|
| 11 |
-
async def get_all_customer_analytics(merchantid):
|
| 12 |
-
|
| 13 |
-
try:
|
| 14 |
-
# The 'with' statement handles session management
|
| 15 |
-
async with get_db_session() as session:
|
| 16 |
-
#customer = session.query(CustomerAnalytics).all()
|
| 17 |
-
#result = await session.execute(select(CustomerAnalytics))
|
| 18 |
-
result = await session.execute(select(CustomerAnalytics).filter_by(merchant_id=merchantid))
|
| 19 |
-
customers = result.scalars().all()
|
| 20 |
-
|
| 21 |
-
result_dict = {"total": len(customers),
|
| 22 |
-
"customers":customers}
|
| 23 |
-
return result_dict
|
| 24 |
-
|
| 25 |
-
except Exception as e:
|
| 26 |
-
print(f"An error occurred: {e}")
|
| 27 |
-
return None
|
| 28 |
-
|
| 29 |
-
@staticmethod
|
| 30 |
-
async def get_by_customer_id(customerid, merchantid):
|
| 31 |
-
try:
|
| 32 |
-
# The 'with' statement handles session management
|
| 33 |
-
async with get_db_session() as session:
|
| 34 |
-
#customer = session.query(CustomerAnalytics).filter_by(customer_id=customerid).first()
|
| 35 |
-
#result = await session.execute(select(CustomerAnalytics).filter_by(customer_id=customerid))
|
| 36 |
-
result = await session.execute(select(CustomerAnalytics)
|
| 37 |
-
.filter(
|
| 38 |
-
and_(
|
| 39 |
-
CustomerAnalytics.customer_id == customerid,
|
| 40 |
-
CustomerAnalytics.merchant_id == merchantid
|
| 41 |
-
)
|
| 42 |
-
)
|
| 43 |
-
)
|
| 44 |
-
|
| 45 |
-
customer = result.scalars().first()
|
| 46 |
-
|
| 47 |
-
result_dict = {"customer" : "Customer not found"}
|
| 48 |
-
if customer:
|
| 49 |
-
result_dict = {"customer": customer}
|
| 50 |
-
return result_dict
|
| 51 |
-
except Exception as e:
|
| 52 |
-
print(f"An error occurred: {e}")
|
| 53 |
-
return None
|
| 54 |
-
|
| 55 |
-
@staticmethod
|
| 56 |
-
async def get_by_merchant_id(merchantid):
|
| 57 |
-
try:
|
| 58 |
-
# The 'with' statement handles session management
|
| 59 |
-
async with get_db_session() as session:
|
| 60 |
-
#customer = session.query(CustomerAnalytics).filter_by(customer_id=customerid).first()
|
| 61 |
-
result = await session.execute(select(CustomerAnalytics).filter_by(merchant_id=merchantid))
|
| 62 |
-
customers = result.scalars().all()
|
| 63 |
-
|
| 64 |
-
result_dict = {"customers" : "Customer not found"}
|
| 65 |
-
if customers:
|
| 66 |
-
result_dict = {"customers": customers}
|
| 67 |
-
return result_dict
|
| 68 |
-
except Exception as e:
|
| 69 |
-
print(f"An error occurred: {e}")
|
| 70 |
-
return None
|
| 71 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/sql.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# database.py
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
from insightfy_utils.logging import get_logger
|
| 5 |
+
from insightfy_utils.db.postgres_connector import create_postgres_connection, create_postgres_metadata
|
| 6 |
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
| 7 |
+
from sqlalchemy.orm import sessionmaker
|
| 8 |
+
|
| 9 |
+
from settings import DATABASE_URI
|
| 10 |
+
|
| 11 |
+
logger = get_logger(__name__)
|
| 12 |
+
|
| 13 |
+
logger.info("Using DATABASE_URL", extra={"url": DATABASE_URI})
|
| 14 |
+
|
| 15 |
+
if not DATABASE_URI:
|
| 16 |
+
logger.error("DATABASE_URI is empty or missing from settings")
|
| 17 |
+
raise ValueError("DATABASE_URI is not set. Check environment variables.")
|
| 18 |
+
|
| 19 |
+
# Use centralized database connection from insightfy-utils
|
| 20 |
+
database = create_postgres_connection(DATABASE_URI)
|
| 21 |
+
metadata = create_postgres_metadata()
|
| 22 |
+
|
| 23 |
+
logger.info("Database configuration loaded successfully")
|
| 24 |
+
|
| 25 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 26 |
+
# Lifecycle helpers
|
| 27 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 28 |
+
async def connect_to_database() -> None:
|
| 29 |
+
"""Connects to the database when the application starts."""
|
| 30 |
+
try:
|
| 31 |
+
await database.connect()
|
| 32 |
+
logger.info("Successfully connected to the database")
|
| 33 |
+
except Exception as e:
|
| 34 |
+
logger.exception("Error connecting to the database")
|
| 35 |
+
raise
|
| 36 |
+
|
| 37 |
+
async def disconnect_from_database() -> None:
|
| 38 |
+
"""Disconnects from the database when the application shuts down."""
|
| 39 |
+
try:
|
| 40 |
+
await database.disconnect()
|
| 41 |
+
logger.info("Successfully disconnected from the database")
|
| 42 |
+
except Exception as e:
|
| 43 |
+
logger.exception("Error disconnecting from the database")
|
| 44 |
+
raise
|
| 45 |
+
|
| 46 |
+
# For advanced use cases with custom connection pool settings
|
| 47 |
+
async_engine = create_async_engine(
|
| 48 |
+
DATABASE_URI,
|
| 49 |
+
echo=False,
|
| 50 |
+
future=True,
|
| 51 |
+
pool_size=10,
|
| 52 |
+
max_overflow=20,
|
| 53 |
+
pool_timeout=30,
|
| 54 |
+
pool_recycle=3600,
|
| 55 |
+
pool_pre_ping=True,
|
| 56 |
+
connect_args={
|
| 57 |
+
"server_settings": {
|
| 58 |
+
"application_name": "insightfy-ans",
|
| 59 |
+
"jit": "off"
|
| 60 |
+
},
|
| 61 |
+
"command_timeout": 60,
|
| 62 |
+
"statement_cache_size": 0
|
| 63 |
+
}
|
| 64 |
+
)
|
| 65 |
+
async_session = sessionmaker(
|
| 66 |
+
async_engine, expire_on_commit=False, class_=AsyncSession
|
| 67 |
+
)
|
app/util/dbutil.py
DELETED
|
@@ -1,48 +0,0 @@
|
|
| 1 |
-
#from sqlalchemy import create_engine
|
| 2 |
-
from sqlalchemy.orm import sessionmaker
|
| 3 |
-
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
| 4 |
-
from contextlib import contextmanager, asynccontextmanager
|
| 5 |
-
#import asyncio
|
| 6 |
-
|
| 7 |
-
from settings import DB_PROTOCOL, DB_USER,DB_PASSWORD,DB_HOST,DB_PORT,DB_NAME,DB_SSLMODE
|
| 8 |
-
# 1. Use a single, global Session factory
|
| 9 |
-
# This factory should only be created once when your application starts.
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
#db_url = '{protocol}://{user}:{password}@{host}:{port}/{name}?sslmode={sslmode}'.format(
|
| 13 |
-
async_db_url = '{protocol}://{user}:{password}@{host}:{port}/{name}?ssl={sslmode}'.format(
|
| 14 |
-
protocol=DB_PROTOCOL,
|
| 15 |
-
user=DB_USER,
|
| 16 |
-
password=DB_PASSWORD,
|
| 17 |
-
host=DB_HOST,
|
| 18 |
-
port=DB_PORT,
|
| 19 |
-
name=DB_NAME,
|
| 20 |
-
sslmode = "require" if DB_SSLMODE=="require" else None
|
| 21 |
-
|
| 22 |
-
#sslmode=DB_SSLMODE
|
| 23 |
-
)
|
| 24 |
-
|
| 25 |
-
#print("async_db_url = ",async_db_url)
|
| 26 |
-
#engine = create_engine(db_url)
|
| 27 |
-
async_engine = create_async_engine(async_db_url)
|
| 28 |
-
#session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
| 29 |
-
asyncsession = sessionmaker(
|
| 30 |
-
async_engine, expire_on_commit=False, class_=AsyncSession
|
| 31 |
-
)
|
| 32 |
-
|
| 33 |
-
# 2. Use a context manager to manage sessions
|
| 34 |
-
#@contextmanager
|
| 35 |
-
@asynccontextmanager
|
| 36 |
-
async def get_db_session():
|
| 37 |
-
"""
|
| 38 |
-
Provides a transactional scope around a series of operations.
|
| 39 |
-
A new session is created, used, and then automatically closed.
|
| 40 |
-
"""
|
| 41 |
-
db = asyncsession()
|
| 42 |
-
try:
|
| 43 |
-
yield db
|
| 44 |
-
finally:
|
| 45 |
-
#db.close()
|
| 46 |
-
await db.close()
|
| 47 |
-
|
| 48 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app/utils/db_utils.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database utility functions for connection management and retry logic.
|
| 3 |
+
"""
|
| 4 |
+
import asyncio
|
| 5 |
+
from typing import Any, Callable, TypeVar
|
| 6 |
+
from functools import wraps
|
| 7 |
+
from sqlalchemy.exc import DisconnectionError, InterfaceError, OperationalError
|
| 8 |
+
from app.sql import async_session, async_engine
|
| 9 |
+
from insightfy_utils.logging import get_logger
|
| 10 |
+
|
| 11 |
+
logger = get_logger(__name__)
|
| 12 |
+
|
| 13 |
+
# Type variable for return type
|
| 14 |
+
T = TypeVar('T')
|
| 15 |
+
|
| 16 |
+
class DatabaseConnectionError(Exception):
|
| 17 |
+
"""Custom exception for database connection issues."""
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
def retry_db_operation(max_retries: int = 3, delay: float = 1.0):
|
| 21 |
+
"""
|
| 22 |
+
Decorator to retry database operations on connection failures.
|
| 23 |
+
|
| 24 |
+
Args:
|
| 25 |
+
max_retries: Maximum number of retry attempts
|
| 26 |
+
delay: Delay between retries in seconds
|
| 27 |
+
"""
|
| 28 |
+
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
| 29 |
+
@wraps(func)
|
| 30 |
+
async def wrapper(*args, **kwargs) -> T:
|
| 31 |
+
last_exception = None
|
| 32 |
+
|
| 33 |
+
for attempt in range(max_retries + 1):
|
| 34 |
+
try:
|
| 35 |
+
return await func(*args, **kwargs)
|
| 36 |
+
except (DisconnectionError, InterfaceError, OperationalError) as e:
|
| 37 |
+
last_exception = e
|
| 38 |
+
logger.warning(
|
| 39 |
+
f"Database operation failed (attempt {attempt + 1}/{max_retries + 1}): {e}"
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
if attempt < max_retries:
|
| 43 |
+
# Exponential backoff
|
| 44 |
+
wait_time = delay * (2 ** attempt)
|
| 45 |
+
logger.info(f"Retrying in {wait_time} seconds...")
|
| 46 |
+
await asyncio.sleep(wait_time)
|
| 47 |
+
continue
|
| 48 |
+
else:
|
| 49 |
+
logger.error(f"Database operation failed after {max_retries + 1} attempts")
|
| 50 |
+
raise DatabaseConnectionError(
|
| 51 |
+
f"Database operation failed after {max_retries + 1} attempts. "
|
| 52 |
+
f"Last error: {e}"
|
| 53 |
+
) from e
|
| 54 |
+
except Exception as e:
|
| 55 |
+
# For non-connection errors, don't retry
|
| 56 |
+
logger.error(f"Non-recoverable database error: {e}")
|
| 57 |
+
raise
|
| 58 |
+
|
| 59 |
+
# This should never be reached, but just in case
|
| 60 |
+
raise last_exception
|
| 61 |
+
|
| 62 |
+
return wrapper
|
| 63 |
+
return decorator
|
| 64 |
+
|
| 65 |
+
async def test_connection() -> bool:
|
| 66 |
+
"""
|
| 67 |
+
Test database connectivity.
|
| 68 |
+
|
| 69 |
+
Returns:
|
| 70 |
+
bool: True if connection is successful, False otherwise
|
| 71 |
+
"""
|
| 72 |
+
try:
|
| 73 |
+
async with async_session() as session:
|
| 74 |
+
await session.execute("SELECT 1")
|
| 75 |
+
return True
|
| 76 |
+
except Exception as e:
|
| 77 |
+
logger.error(f"Connection test failed: {e}")
|
| 78 |
+
return False
|
| 79 |
+
|
| 80 |
+
async def ensure_connection() -> None:
|
| 81 |
+
"""
|
| 82 |
+
Ensure database connection is available, attempt to reconnect if needed.
|
| 83 |
+
|
| 84 |
+
Raises:
|
| 85 |
+
DatabaseConnectionError: If connection cannot be established
|
| 86 |
+
"""
|
| 87 |
+
if not await test_connection():
|
| 88 |
+
logger.warning("Database connection lost, attempting to reconnect...")
|
| 89 |
+
|
| 90 |
+
try:
|
| 91 |
+
# Dispose existing connections and create new ones
|
| 92 |
+
await async_engine.dispose()
|
| 93 |
+
|
| 94 |
+
# Test connection again
|
| 95 |
+
if await test_connection():
|
| 96 |
+
logger.info("Database reconnection successful")
|
| 97 |
+
else:
|
| 98 |
+
raise DatabaseConnectionError("Failed to reconnect to database")
|
| 99 |
+
|
| 100 |
+
except Exception as e:
|
| 101 |
+
raise DatabaseConnectionError(f"Failed to ensure database connection: {e}") from e
|
| 102 |
+
|
| 103 |
+
async def safe_session_execute(query, *args, **kwargs):
|
| 104 |
+
"""
|
| 105 |
+
Safely execute a database query with connection retry logic.
|
| 106 |
+
|
| 107 |
+
Args:
|
| 108 |
+
query: SQL query or statement to execute
|
| 109 |
+
*args, **kwargs: Additional arguments for session.execute
|
| 110 |
+
|
| 111 |
+
Returns:
|
| 112 |
+
Query result
|
| 113 |
+
|
| 114 |
+
Raises:
|
| 115 |
+
DatabaseConnectionError: If operation fails after retries
|
| 116 |
+
"""
|
| 117 |
+
@retry_db_operation(max_retries=2)
|
| 118 |
+
async def _execute():
|
| 119 |
+
async with async_session() as session:
|
| 120 |
+
try:
|
| 121 |
+
result = await session.execute(query, *args, **kwargs)
|
| 122 |
+
return result
|
| 123 |
+
finally:
|
| 124 |
+
await session.close()
|
| 125 |
+
|
| 126 |
+
return await _execute()
|
app/utils/health_utils.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ANS-specific health check utilities.
|
| 3 |
+
Uses centralized health check from insightfy_utils.telemetry.
|
| 4 |
+
"""
|
| 5 |
+
from insightfy_utils.logging import get_logger
|
| 6 |
+
from insightfy_utils.telemetry.health import HealthCheck, HealthStatus
|
| 7 |
+
from app.sql import database, async_session
|
| 8 |
+
from app.nosql import redis_client
|
| 9 |
+
|
| 10 |
+
logger = get_logger(__name__)
|
| 11 |
+
|
| 12 |
+
async def check_postgresql_health() -> tuple[HealthStatus, str]:
|
| 13 |
+
"""Check PostgreSQL database health"""
|
| 14 |
+
try:
|
| 15 |
+
async with async_session() as session:
|
| 16 |
+
await session.execute("SELECT 1")
|
| 17 |
+
return HealthStatus.HEALTHY, "PostgreSQL is responsive"
|
| 18 |
+
except Exception as e:
|
| 19 |
+
logger.error("PostgreSQL health check failed", exc_info=e)
|
| 20 |
+
return HealthStatus.UNHEALTHY, f"PostgreSQL error: {str(e)}"
|
| 21 |
+
|
| 22 |
+
async def check_redis_health() -> tuple[HealthStatus, str]:
|
| 23 |
+
"""Check Redis health"""
|
| 24 |
+
try:
|
| 25 |
+
await redis_client.ping()
|
| 26 |
+
return HealthStatus.HEALTHY, "Redis is responsive"
|
| 27 |
+
except Exception as e:
|
| 28 |
+
logger.error("Redis health check failed", exc_info=e)
|
| 29 |
+
return HealthStatus.UNHEALTHY, f"Redis error: {str(e)}"
|
| 30 |
+
|
| 31 |
+
async def check_mongodb_health() -> tuple[HealthStatus, str]:
|
| 32 |
+
"""Check MongoDB health (if used in ANS)"""
|
| 33 |
+
try:
|
| 34 |
+
from app.nosql import mongo_db
|
| 35 |
+
await mongo_db.command("ping")
|
| 36 |
+
return HealthStatus.HEALTHY, "MongoDB is responsive"
|
| 37 |
+
except Exception as e:
|
| 38 |
+
logger.error("MongoDB health check failed", exc_info=e)
|
| 39 |
+
return HealthStatus.UNHEALTHY, f"MongoDB error: {str(e)}"
|
| 40 |
+
|
| 41 |
+
async def perform_comprehensive_health_check() -> dict:
|
| 42 |
+
"""
|
| 43 |
+
Perform comprehensive health check of all ANS components.
|
| 44 |
+
Returns health status dictionary.
|
| 45 |
+
"""
|
| 46 |
+
health = HealthCheck(service_name="ans")
|
| 47 |
+
|
| 48 |
+
# Check PostgreSQL
|
| 49 |
+
pg_status, pg_message = await check_postgresql_health()
|
| 50 |
+
health.add_component("postgresql", pg_status, pg_message)
|
| 51 |
+
|
| 52 |
+
# Check Redis
|
| 53 |
+
redis_status, redis_message = await check_redis_health()
|
| 54 |
+
health.add_component("redis", redis_status, redis_message)
|
| 55 |
+
|
| 56 |
+
# Check MongoDB (if applicable)
|
| 57 |
+
try:
|
| 58 |
+
mongo_status, mongo_message = await check_mongodb_health()
|
| 59 |
+
health.add_component("mongodb", mongo_status, mongo_message)
|
| 60 |
+
except ImportError:
|
| 61 |
+
# MongoDB not used in this service
|
| 62 |
+
pass
|
| 63 |
+
|
| 64 |
+
return health.get_health()
|
app/{util β utils}/jwt.py
RENAMED
|
@@ -1,16 +1,24 @@
|
|
| 1 |
from jose import ExpiredSignatureError, jwt, JWTError
|
| 2 |
from settings import SECRET_KEY, ALGORITHM
|
| 3 |
|
| 4 |
-
|
| 5 |
def decode_jwt_token(token: str) -> dict:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
try:
|
| 7 |
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 8 |
-
print("payload = ",payload)
|
| 9 |
return payload
|
| 10 |
-
|
| 11 |
except ExpiredSignatureError:
|
| 12 |
raise ValueError("Token has expired")
|
| 13 |
-
|
| 14 |
except JWTError as e:
|
| 15 |
-
|
| 16 |
-
raise ValueError("Invalid token")
|
|
|
|
| 1 |
from jose import ExpiredSignatureError, jwt, JWTError
|
| 2 |
from settings import SECRET_KEY, ALGORITHM
|
| 3 |
|
| 4 |
+
|
| 5 |
def decode_jwt_token(token: str) -> dict:
|
| 6 |
+
"""
|
| 7 |
+
Decode and validate JWT token.
|
| 8 |
+
|
| 9 |
+
Args:
|
| 10 |
+
token: JWT token string
|
| 11 |
+
|
| 12 |
+
Returns:
|
| 13 |
+
Decoded payload dict
|
| 14 |
+
|
| 15 |
+
Raises:
|
| 16 |
+
ValueError: If token is expired or invalid
|
| 17 |
+
"""
|
| 18 |
try:
|
| 19 |
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
|
|
|
| 20 |
return payload
|
|
|
|
| 21 |
except ExpiredSignatureError:
|
| 22 |
raise ValueError("Token has expired")
|
|
|
|
| 23 |
except JWTError as e:
|
| 24 |
+
raise ValueError(f"Invalid token: {e}")
|
|
|
requirements.txt
CHANGED
|
@@ -1,8 +1,30 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FastAPI and ASGI server
|
| 2 |
+
fastapi>=0.104.0
|
| 3 |
+
uvicorn[standard]>=0.24.0
|
| 4 |
+
pydantic>=2.0.0
|
| 5 |
+
pydantic-settings>=2.0.0
|
| 6 |
+
|
| 7 |
+
# Database - Async PostgreSQL
|
| 8 |
+
databases>=0.8.0
|
| 9 |
+
sqlalchemy>=2.0.0
|
| 10 |
+
asyncpg>=0.29.0
|
| 11 |
+
|
| 12 |
+
# Database - MongoDB
|
| 13 |
+
motor>=3.3.0
|
| 14 |
+
|
| 15 |
+
# Database - Redis
|
| 16 |
+
redis>=5.0.0
|
| 17 |
+
|
| 18 |
+
# Authentication
|
| 19 |
+
python-jose[cryptography]>=3.3.0
|
| 20 |
+
passlib[bcrypt]>=1.7.4
|
| 21 |
+
python-multipart>=0.0.6
|
| 22 |
+
|
| 23 |
+
# Utilities
|
| 24 |
+
python-dotenv>=1.0.0
|
| 25 |
+
python-dateutil>=2.8.0
|
| 26 |
+
httpx>=0.24.0
|
| 27 |
+
pandas>=1.5.0
|
| 28 |
+
|
| 29 |
+
# Centralized utilities (our shared package)
|
| 30 |
+
#insightfy_utils>=0.1.0
|
settings.py
CHANGED
|
@@ -1,10 +1,9 @@
|
|
| 1 |
from __future__ import annotations
|
| 2 |
-
|
| 3 |
-
import os
|
| 4 |
from urllib.parse import quote_plus, urlencode
|
| 5 |
-
|
|
|
|
| 6 |
|
| 7 |
-
|
| 8 |
|
| 9 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 10 |
# Security
|
|
@@ -12,83 +11,35 @@ load_dotenv() # keep this so local .env is picked up
|
|
| 12 |
SECRET_KEY = os.getenv("SECRET_KEY")
|
| 13 |
ALGORITHM = os.getenv("ALGORITHM", "HS256")
|
| 14 |
|
| 15 |
-
#postgres keys
|
| 16 |
-
|
| 17 |
-
DB_PROTOCOL = os.getenv("DB_PROTOCOL")
|
| 18 |
-
DB_USER = os.getenv("DB_USER")
|
| 19 |
-
DB_PASSWORD = os.getenv("DB_PASSWORD")
|
| 20 |
-
DB_HOST = os.getenv("DB_HOST")
|
| 21 |
-
DB_PORT = os.getenv("DB_PORT")
|
| 22 |
-
DB_NAME = os.getenv("DB_NAME")
|
| 23 |
-
DB_SSLMODE = os.getenv("DB_SSLMODE")
|
| 24 |
-
|
| 25 |
-
# settings.py (append these near DATABASE_URI or at the end)
|
| 26 |
-
|
| 27 |
-
# MongoDB
|
| 28 |
-
MONGO_URI = os.getenv("MONGO_URI") # e.g., mongodb+srv://user:pass@cluster/db?retryWrites=true&w=majority
|
| 29 |
-
MONGO_DB_NAME = os.getenv("MONGO_DB_NAME", "insightfy-bloom")
|
| 30 |
-
|
| 31 |
-
# Redis/Cache
|
| 32 |
-
# Prefer a single URL if available (works with redis:// and rediss://)
|
| 33 |
-
CACHE_URL = os.getenv("CACHE_URL") or os.getenv("REDIS_URL")
|
| 34 |
-
|
| 35 |
-
# Backwards-compat: some setups provide host:port + password separately
|
| 36 |
-
CACHE_URI = os.getenv("CACHE_URI") # e.g., "localhost:6379"
|
| 37 |
-
CACHE_PASSWORD = os.getenv("CACHE_K") # password only (optional)
|
| 38 |
-
|
| 39 |
-
# Optional: default Redis DB index if using host:port form
|
| 40 |
-
CACHE_DB = int(os.getenv("CACHE_DB", "0"))
|
| 41 |
-
|
| 42 |
-
|
| 43 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 44 |
-
# Database
|
| 45 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
Supports:
|
| 61 |
-
- URL-encoding for user/password
|
| 62 |
-
- Default schema via ?options=-csearch_path=<schema>
|
| 63 |
-
- sslmode=require (e.g., Neon)
|
| 64 |
-
Honors DATABASE_URI if set (escape hatch/override).
|
| 65 |
-
"""
|
| 66 |
-
|
| 67 |
-
# If already provided as a single secret, honor it and return immediately.
|
| 68 |
-
direct_uri = env.get("DATABASE_URI") or env.get("DATABASE_URL")
|
| 69 |
-
if direct_uri:
|
| 70 |
-
return direct_uri
|
| 71 |
-
|
| 72 |
-
protocol = _getenv("DB_PROTOCOL", default="postgresql+asyncpg") or "postgresql+asyncpg"
|
| 73 |
-
user = _getenv("DB_USER", required=True)
|
| 74 |
-
password = _getenv("DB_PASSWORD", required=True)
|
| 75 |
-
host = _getenv("DB_HOST", default="localhost") or "localhost"
|
| 76 |
-
port = _getenv("DB_PORT", default="5432") or "5432"
|
| 77 |
-
dbname = _getenv("DB_NAME", required=True)
|
| 78 |
-
|
| 79 |
-
# Optional extras
|
| 80 |
-
schema = _getenv("DB_SCHEMA") # e.g., "trans"
|
| 81 |
-
# sslmode is not supported in asyncpg URI; SSL must be set programmatically if needed
|
| 82 |
-
|
| 83 |
-
# No query parameters needed for asyncpg URI
|
| 84 |
-
q = ""
|
| 85 |
-
|
| 86 |
-
# URL-encode credentials so special characters don't break the URI
|
| 87 |
-
user_enc = quote_plus(user)
|
| 88 |
-
password_enc = quote_plus(password)
|
| 89 |
-
|
| 90 |
-
return f"{protocol}://{user_enc}:{password_enc}@{host}:{port}/{dbname}{q}"
|
| 91 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
|
| 93 |
-
#
|
| 94 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from __future__ import annotations
|
|
|
|
|
|
|
| 2 |
from urllib.parse import quote_plus, urlencode
|
| 3 |
+
import os
|
| 4 |
+
from insightfy_utils.config import load_env
|
| 5 |
|
| 6 |
+
load_env()
|
| 7 |
|
| 8 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 9 |
# Security
|
|
|
|
| 11 |
SECRET_KEY = os.getenv("SECRET_KEY")
|
| 12 |
ALGORITHM = os.getenv("ALGORITHM", "HS256")
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 15 |
+
# Database - PostgreSQL
|
| 16 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 17 |
+
DB_PROTOCOL = os.getenv("DB_PROTOCOL", "postgresql+asyncpg")
|
| 18 |
+
DB_USER = os.getenv("DB_USER")
|
| 19 |
+
DB_PASSWORD = os.getenv("DB_PASSWORD")
|
| 20 |
+
DB_HOST = os.getenv("DB_HOST")
|
| 21 |
+
DB_PORT = os.getenv("DB_PORT", "5432")
|
| 22 |
+
DB_NAME = os.getenv("DB_NAME")
|
| 23 |
+
DB_SCHEMA = os.getenv("DB_SCHEMA", "analytics")
|
| 24 |
+
DB_SSLMODE = os.getenv("DB_SSLMODE", "require")
|
| 25 |
+
|
| 26 |
+
# Build DATABASE_URI
|
| 27 |
+
if all([DB_USER, DB_PASSWORD, DB_HOST, DB_NAME]):
|
| 28 |
+
DATABASE_URI = f"{DB_PROTOCOL}://{DB_USER}:{quote_plus(DB_PASSWORD)}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
|
| 29 |
+
else:
|
| 30 |
+
DATABASE_URI = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
|
| 32 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 33 |
+
# Database - MongoDB
|
| 34 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 35 |
+
MONGO_URI = os.getenv('MONGO_URI')
|
| 36 |
+
MONGO_DB_NAME = "insightfy-bloom"
|
| 37 |
|
| 38 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 39 |
+
# Database - Redis/Cache
|
| 40 |
+
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 41 |
+
CACHE_URI = os.getenv('CACHE_URI')
|
| 42 |
+
CACHE_K = os.getenv('CACHE_K')
|
| 43 |
+
CACHE_URL = os.getenv("CACHE_URL") or os.getenv("REDIS_URL")
|
| 44 |
+
CACHE_PASSWORD = CACHE_K
|
| 45 |
+
CACHE_DB = int(os.getenv("CACHE_DB", "0"))
|