Spaces:
Paused
Paused
Upload 54 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .env +15 -0
- alembic.ini +77 -0
- alembic/README +1 -0
- alembic/__pycache__/env.cpython-312.pyc +0 -0
- alembic/env.py +63 -0
- alembic/script.py.mako +26 -0
- app/__init__.py +0 -0
- app/__pycache__/__init__.cpython-312.pyc +0 -0
- app/__pycache__/main.cpython-312.pyc +0 -0
- app/api/__init__.py +0 -0
- app/api/analytics.py +175 -0
- app/api/auth.py +69 -0
- app/api/calendar.py +146 -0
- app/api/files.py +53 -0
- app/api/maintenance.py +133 -0
- app/api/notifications.py +84 -0
- app/api/orders.py +142 -0
- app/api/products.py +86 -0
- app/api/scheduler.py +203 -0
- app/api/users.py +120 -0
- app/core/__init__.py +0 -0
- app/core/__pycache__/__init__.cpython-312.pyc +0 -0
- app/core/__pycache__/config.cpython-312.pyc +0 -0
- app/core/config.py +34 -0
- app/core/dependencies.py +52 -0
- app/core/security.py +23 -0
- app/db/__init__.py +0 -0
- app/db/__pycache__/__init__.cpython-312.pyc +0 -0
- app/db/__pycache__/database.cpython-312.pyc +0 -0
- app/db/__pycache__/models.cpython-312.pyc +0 -0
- app/db/database.py +30 -0
- app/db/init_db.py +77 -0
- app/db/models.py +131 -0
- app/db/schemas.py +63 -0
- app/main.py +108 -0
- app/schemas/events.py +87 -0
- app/services/analytics.py +131 -0
- app/services/backup.py +179 -0
- app/services/calendar.py +215 -0
- app/services/maintenance.py +286 -0
- app/services/notifications.py +105 -0
- app/services/scheduler.py +243 -0
- app/templates/email/low_stock_alert.html +41 -0
- app/templates/email/order_confirmation.html +40 -0
- app/templates/email/password_reset.html +38 -0
- app/templates/email/welcome.html +45 -0
- app/utils/cache.py +55 -0
- app/utils/email.py +103 -0
- app/utils/file_storage.py +79 -0
- app/utils/logger.py +59 -0
.env
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PROJECT_NAME=Admin Dashboard API
|
| 2 |
+
VERSION=1.0.0
|
| 3 |
+
API_V1_STR=/api/v1
|
| 4 |
+
|
| 5 |
+
# Security
|
| 6 |
+
SECRET_KEY=your-secret-key-here-change-in-production
|
| 7 |
+
ACCESS_TOKEN_EXPIRE_MINUTES=30
|
| 8 |
+
ALGORITHM=HS256
|
| 9 |
+
|
| 10 |
+
# Database
|
| 11 |
+
DATABASE_URL=postgresql+asyncpg://postgres:Lovyelias5584.@db.mqyrkmsdgugdhxiucukb.supabase.co:5432/postgres
|
| 12 |
+
|
| 13 |
+
# Redis Cache
|
| 14 |
+
REDIS_HOST=localhost
|
| 15 |
+
REDIS_PORT=6379
|
alembic.ini
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A generic, single database configuration.
|
| 2 |
+
|
| 3 |
+
[alembic]
|
| 4 |
+
# path to migration scripts
|
| 5 |
+
script_location = alembic
|
| 6 |
+
|
| 7 |
+
# template used to generate migration files
|
| 8 |
+
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
| 9 |
+
|
| 10 |
+
# timezone to use when rendering the date within the migration file
|
| 11 |
+
# as well as the filename.
|
| 12 |
+
timezone = UTC
|
| 13 |
+
|
| 14 |
+
# max length of characters to apply to the "slug" field
|
| 15 |
+
truncate_slug_length = 40
|
| 16 |
+
|
| 17 |
+
# set to 'true' to run the environment during
|
| 18 |
+
# the 'revision' command, regardless of autogenerate
|
| 19 |
+
revision_environment = false
|
| 20 |
+
|
| 21 |
+
# set to 'true' to allow .pyc and .pyo files without
|
| 22 |
+
# a source .py file to be detected as revisions in the
|
| 23 |
+
# versions/ directory
|
| 24 |
+
sourceless = false
|
| 25 |
+
|
| 26 |
+
# version location specification
|
| 27 |
+
version_locations = alembic/versions
|
| 28 |
+
|
| 29 |
+
# version path separator
|
| 30 |
+
version_path_separator = os
|
| 31 |
+
|
| 32 |
+
# the output encoding used when revision files
|
| 33 |
+
# are written from script.py.mako
|
| 34 |
+
output_encoding = utf-8
|
| 35 |
+
|
| 36 |
+
sqlalchemy.url = postgresql+psycopg2://postgres:Lovyelias5584.@db.mqyrkmsdgugdhxiucukb.supabase.co:5432/postgres
|
| 37 |
+
|
| 38 |
+
[post_write_hooks]
|
| 39 |
+
# format using "black"
|
| 40 |
+
hooks = black
|
| 41 |
+
black.type = console_scripts
|
| 42 |
+
black.entrypoint = black
|
| 43 |
+
black.options = -l 79 REVISION_SCRIPT_FILENAME
|
| 44 |
+
|
| 45 |
+
[loggers]
|
| 46 |
+
keys = root,sqlalchemy,alembic
|
| 47 |
+
|
| 48 |
+
[handlers]
|
| 49 |
+
keys = console
|
| 50 |
+
|
| 51 |
+
[formatters]
|
| 52 |
+
keys = generic
|
| 53 |
+
|
| 54 |
+
[logger_root]
|
| 55 |
+
level = WARN
|
| 56 |
+
handlers = console
|
| 57 |
+
qualname =
|
| 58 |
+
|
| 59 |
+
[logger_sqlalchemy]
|
| 60 |
+
level = WARN
|
| 61 |
+
handlers =
|
| 62 |
+
qualname = sqlalchemy.engine
|
| 63 |
+
|
| 64 |
+
[logger_alembic]
|
| 65 |
+
level = INFO
|
| 66 |
+
handlers =
|
| 67 |
+
qualname = alembic
|
| 68 |
+
|
| 69 |
+
[handler_console]
|
| 70 |
+
class = StreamHandler
|
| 71 |
+
args = (sys.stderr,)
|
| 72 |
+
level = NOTSET
|
| 73 |
+
formatter = generic
|
| 74 |
+
|
| 75 |
+
[formatter_generic]
|
| 76 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 77 |
+
datefmt = %H:%M:%S
|
alembic/README
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Generic single-database configuration.
|
alembic/__pycache__/env.cpython-312.pyc
ADDED
|
Binary file (2.93 kB). View file
|
|
|
alembic/env.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from logging.config import fileConfig
|
| 2 |
+
from sqlalchemy import engine_from_config
|
| 3 |
+
from sqlalchemy import pool
|
| 4 |
+
from alembic import context
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
# Add the parent directory to the Python path
|
| 10 |
+
parent_dir = str(Path(__file__).resolve().parents[1])
|
| 11 |
+
sys.path.append(parent_dir)
|
| 12 |
+
|
| 13 |
+
from app.core.config import settings
|
| 14 |
+
from app.db.models import Base
|
| 15 |
+
|
| 16 |
+
config = context.config
|
| 17 |
+
|
| 18 |
+
if config.config_file_name is not None:
|
| 19 |
+
fileConfig(config.config_file_name)
|
| 20 |
+
|
| 21 |
+
def get_url():
|
| 22 |
+
return str(settings.DATABASE_URL).replace("+asyncpg", "+psycopg2")
|
| 23 |
+
|
| 24 |
+
config.set_main_option("sqlalchemy.url", get_url())
|
| 25 |
+
|
| 26 |
+
target_metadata = Base.metadata
|
| 27 |
+
|
| 28 |
+
def run_migrations_offline() -> None:
|
| 29 |
+
"""Run migrations in 'offline' mode."""
|
| 30 |
+
url = get_url()
|
| 31 |
+
context.configure(
|
| 32 |
+
url=url,
|
| 33 |
+
target_metadata=target_metadata,
|
| 34 |
+
literal_binds=True,
|
| 35 |
+
dialect_opts={"paramstyle": "named"},
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
with context.begin_transaction():
|
| 39 |
+
context.run_migrations()
|
| 40 |
+
|
| 41 |
+
def run_migrations_online() -> None:
|
| 42 |
+
"""Run migrations in 'online' mode."""
|
| 43 |
+
configuration = config.get_section(config.config_ini_section)
|
| 44 |
+
configuration["sqlalchemy.url"] = get_url()
|
| 45 |
+
connectable = engine_from_config(
|
| 46 |
+
configuration,
|
| 47 |
+
prefix="sqlalchemy.",
|
| 48 |
+
poolclass=pool.NullPool,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
with connectable.connect() as connection:
|
| 52 |
+
context.configure(
|
| 53 |
+
connection=connection,
|
| 54 |
+
target_metadata=target_metadata
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
with context.begin_transaction():
|
| 58 |
+
context.run_migrations()
|
| 59 |
+
|
| 60 |
+
if context.is_offline_mode():
|
| 61 |
+
run_migrations_offline()
|
| 62 |
+
else:
|
| 63 |
+
run_migrations_online()
|
alembic/script.py.mako
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""${message}
|
| 2 |
+
|
| 3 |
+
Revision ID: ${up_revision}
|
| 4 |
+
Revises: ${down_revision | comma,n}
|
| 5 |
+
Create Date: ${create_date}
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
${imports if imports else ""}
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = ${repr(up_revision)}
|
| 16 |
+
down_revision: Union[str, None] = ${repr(down_revision)}
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
${upgrades if upgrades else "pass"}
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def downgrade() -> None:
|
| 26 |
+
${downgrades if downgrades else "pass"}
|
app/__init__.py
ADDED
|
File without changes
|
app/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (168 Bytes). View file
|
|
|
app/__pycache__/main.cpython-312.pyc
ADDED
|
Binary file (6.63 kB). View file
|
|
|
app/api/__init__.py
ADDED
|
File without changes
|
app/api/analytics.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, Query
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select, func, cast, Date
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
from typing import Dict, Any
|
| 6 |
+
from ..core.dependencies import get_current_superuser
|
| 7 |
+
from ..db.database import get_db
|
| 8 |
+
from ..db.models import Order, Product, User
|
| 9 |
+
|
| 10 |
+
router = APIRouter()
|
| 11 |
+
|
| 12 |
+
@router.get("/sales")
|
| 13 |
+
async def get_sales_analytics(
|
| 14 |
+
start_date: datetime = Query(default=None),
|
| 15 |
+
end_date: datetime = Query(default=None),
|
| 16 |
+
_=Depends(get_current_superuser),
|
| 17 |
+
db: AsyncSession = Depends(get_db)
|
| 18 |
+
) -> Dict[str, Any]:
|
| 19 |
+
if not start_date:
|
| 20 |
+
start_date = datetime.now() - timedelta(days=30)
|
| 21 |
+
if not end_date:
|
| 22 |
+
end_date = datetime.now()
|
| 23 |
+
|
| 24 |
+
# Daily sales query
|
| 25 |
+
stmt = select(
|
| 26 |
+
cast(Order.created_at, Date).label('date'),
|
| 27 |
+
func.sum(Order.total_amount).label('total_sales'),
|
| 28 |
+
func.count().label('order_count')
|
| 29 |
+
).where(
|
| 30 |
+
Order.created_at.between(start_date, end_date),
|
| 31 |
+
Order.status.in_(['completed', 'delivered'])
|
| 32 |
+
).group_by(
|
| 33 |
+
cast(Order.created_at, Date)
|
| 34 |
+
).order_by(
|
| 35 |
+
cast(Order.created_at, Date)
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
result = await db.execute(stmt)
|
| 39 |
+
daily_sales = result.all()
|
| 40 |
+
|
| 41 |
+
# Calculate totals
|
| 42 |
+
total_revenue = sum(day.total_sales for day in daily_sales)
|
| 43 |
+
total_orders = sum(day.order_count for day in daily_sales)
|
| 44 |
+
avg_order_value = total_revenue / total_orders if total_orders > 0 else 0
|
| 45 |
+
|
| 46 |
+
return {
|
| 47 |
+
"daily_sales": [
|
| 48 |
+
{"date": day.date, "total_sales": day.total_sales, "order_count": day.order_count}
|
| 49 |
+
for day in daily_sales
|
| 50 |
+
],
|
| 51 |
+
"total_revenue": total_revenue,
|
| 52 |
+
"total_orders": total_orders,
|
| 53 |
+
"average_order_value": avg_order_value
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
@router.get("/products")
|
| 57 |
+
async def get_product_analytics(
|
| 58 |
+
_=Depends(get_current_superuser),
|
| 59 |
+
db: AsyncSession = Depends(get_db)
|
| 60 |
+
) -> Dict[str, Any]:
|
| 61 |
+
# Top selling products
|
| 62 |
+
stmt = select(
|
| 63 |
+
Product,
|
| 64 |
+
func.sum(Order.total_amount).label('total_revenue'),
|
| 65 |
+
func.count().label('total_orders')
|
| 66 |
+
).join(
|
| 67 |
+
Order, Product.id == Order.id
|
| 68 |
+
).group_by(
|
| 69 |
+
Product.id
|
| 70 |
+
).order_by(
|
| 71 |
+
func.sum(Order.total_amount).desc()
|
| 72 |
+
).limit(10)
|
| 73 |
+
|
| 74 |
+
result = await db.execute(stmt)
|
| 75 |
+
top_products = result.all()
|
| 76 |
+
|
| 77 |
+
# Count total and low stock products
|
| 78 |
+
total_products = await db.scalar(select(func.count()).select_from(Product))
|
| 79 |
+
low_stock_count = await db.scalar(
|
| 80 |
+
select(func.count()).select_from(Product).where(Product.inventory_count < 10)
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
return {
|
| 84 |
+
"top_products": [
|
| 85 |
+
{
|
| 86 |
+
"id": product.id,
|
| 87 |
+
"name": product.name,
|
| 88 |
+
"total_revenue": revenue,
|
| 89 |
+
"total_orders": orders
|
| 90 |
+
}
|
| 91 |
+
for product, revenue, orders in top_products
|
| 92 |
+
],
|
| 93 |
+
"total_products": total_products,
|
| 94 |
+
"low_stock_products": low_stock_count
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
@router.get("/customers")
|
| 98 |
+
async def get_customer_analytics(
|
| 99 |
+
_=Depends(get_current_superuser),
|
| 100 |
+
db: AsyncSession = Depends(get_db)
|
| 101 |
+
) -> Dict[str, Any]:
|
| 102 |
+
# Customer statistics
|
| 103 |
+
stmt = select(
|
| 104 |
+
User,
|
| 105 |
+
func.sum(Order.total_amount).label('total_spent'),
|
| 106 |
+
func.count().label('total_orders')
|
| 107 |
+
).join(
|
| 108 |
+
Order, User.id == Order.customer_id
|
| 109 |
+
).group_by(
|
| 110 |
+
User.id
|
| 111 |
+
).order_by(
|
| 112 |
+
func.sum(Order.total_amount).desc()
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
result = await db.execute(stmt)
|
| 116 |
+
customer_data = result.all()
|
| 117 |
+
|
| 118 |
+
total_customers = len(customer_data)
|
| 119 |
+
total_revenue = sum(spent for _, spent, _ in customer_data)
|
| 120 |
+
avg_customer_value = total_revenue / total_customers if total_customers > 0 else 0
|
| 121 |
+
|
| 122 |
+
# Customer segments
|
| 123 |
+
segments = {
|
| 124 |
+
"high_value": len([c for c, spent, _ in customer_data if spent > 1000]),
|
| 125 |
+
"medium_value": len([c for c, spent, _ in customer_data if 500 <= spent <= 1000]),
|
| 126 |
+
"low_value": len([c for c, spent, _ in customer_data if spent < 500])
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
return {
|
| 130 |
+
"total_customers": total_customers,
|
| 131 |
+
"average_customer_value": avg_customer_value,
|
| 132 |
+
"customer_segments": segments,
|
| 133 |
+
"top_customers": [
|
| 134 |
+
{
|
| 135 |
+
"id": customer.id,
|
| 136 |
+
"email": customer.email,
|
| 137 |
+
"total_spent": spent,
|
| 138 |
+
"total_orders": orders
|
| 139 |
+
}
|
| 140 |
+
for customer, spent, orders in customer_data[:10] # Top 10 customers
|
| 141 |
+
]
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
@router.get("/dashboard")
|
| 145 |
+
async def get_dashboard_analytics(
|
| 146 |
+
_=Depends(get_current_superuser),
|
| 147 |
+
db: AsyncSession = Depends(get_db)
|
| 148 |
+
) -> Dict[str, Any]:
|
| 149 |
+
"""Get a comprehensive dashboard with key metrics"""
|
| 150 |
+
# Get last 30 days of sales data
|
| 151 |
+
start_date = datetime.now() - timedelta(days=30)
|
| 152 |
+
end_date = datetime.now()
|
| 153 |
+
|
| 154 |
+
sales_data = await get_sales_analytics(start_date, end_date, _, db)
|
| 155 |
+
product_data = await get_product_analytics(_, db)
|
| 156 |
+
customer_data = await get_customer_analytics(_, db)
|
| 157 |
+
|
| 158 |
+
return {
|
| 159 |
+
"sales_summary": {
|
| 160 |
+
"total_revenue": sales_data["total_revenue"],
|
| 161 |
+
"total_orders": sales_data["total_orders"],
|
| 162 |
+
"average_order_value": sales_data["average_order_value"],
|
| 163 |
+
"daily_sales": sales_data["daily_sales"][-7:] # Last 7 days
|
| 164 |
+
},
|
| 165 |
+
"product_summary": {
|
| 166 |
+
"total_products": product_data["total_products"],
|
| 167 |
+
"low_stock_products": product_data["low_stock_products"],
|
| 168 |
+
"top_selling_products": product_data["top_products"][:5] # Top 5 products
|
| 169 |
+
},
|
| 170 |
+
"customer_summary": {
|
| 171 |
+
"total_customers": customer_data["total_customers"],
|
| 172 |
+
"average_customer_value": customer_data["average_customer_value"],
|
| 173 |
+
"customer_segments": customer_data["customer_segments"]
|
| 174 |
+
}
|
| 175 |
+
}
|
app/api/auth.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 2 |
+
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from sqlalchemy import select
|
| 5 |
+
from ..core.security import create_access_token, verify_password, get_password_hash
|
| 6 |
+
from ..db.database import get_db
|
| 7 |
+
from ..db.models import User
|
| 8 |
+
from datetime import timedelta
|
| 9 |
+
from typing import Any
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
| 13 |
+
|
| 14 |
+
@router.post("/login")
|
| 15 |
+
async def login(
|
| 16 |
+
form_data: OAuth2PasswordRequestForm = Depends(),
|
| 17 |
+
db: AsyncSession = Depends(get_db)
|
| 18 |
+
) -> Any:
|
| 19 |
+
stmt = select(User).where(User.email == form_data.username)
|
| 20 |
+
result = await db.execute(stmt)
|
| 21 |
+
user = result.scalar_one_or_none()
|
| 22 |
+
|
| 23 |
+
if not user:
|
| 24 |
+
raise HTTPException(
|
| 25 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 26 |
+
detail="Incorrect email or password",
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
if not verify_password(form_data.password, user.hashed_password):
|
| 30 |
+
raise HTTPException(
|
| 31 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 32 |
+
detail="Incorrect email or password",
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
access_token = create_access_token(user.id)
|
| 36 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 37 |
+
|
| 38 |
+
@router.post("/register", response_model=User)
|
| 39 |
+
async def register(
|
| 40 |
+
user_data: OAuth2PasswordRequestForm = Depends(),
|
| 41 |
+
db: AsyncSession = Depends(get_db)
|
| 42 |
+
) -> Any:
|
| 43 |
+
# Check if user exists
|
| 44 |
+
stmt = select(User).where(User.email == user_data.username)
|
| 45 |
+
result = await db.execute(stmt)
|
| 46 |
+
existing_user = result.scalar_one_or_none()
|
| 47 |
+
|
| 48 |
+
if existing_user:
|
| 49 |
+
raise HTTPException(
|
| 50 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 51 |
+
detail="Email already registered",
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
# Create new user
|
| 55 |
+
user = User(
|
| 56 |
+
email=user_data.username,
|
| 57 |
+
hashed_password=get_password_hash(user_data.password),
|
| 58 |
+
full_name=user_data.username, # You might want to add this as a separate field in the form
|
| 59 |
+
username=user_data.username,
|
| 60 |
+
is_active=True,
|
| 61 |
+
is_superuser=False,
|
| 62 |
+
roles=["user"]
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
db.add(user)
|
| 66 |
+
await db.commit()
|
| 67 |
+
await db.refresh(user)
|
| 68 |
+
|
| 69 |
+
return user
|
app/api/calendar.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, HTTPException, Query
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select, or_
|
| 4 |
+
from typing import List, Dict, Any
|
| 5 |
+
from datetime import datetime, timedelta
|
| 6 |
+
from ..core.dependencies import get_current_active_user
|
| 7 |
+
from ..db.database import get_db
|
| 8 |
+
from ..db.models import Event, User
|
| 9 |
+
|
| 10 |
+
router = APIRouter()
|
| 11 |
+
|
| 12 |
+
@router.post("/events", response_model=Event)
|
| 13 |
+
async def create_event(
|
| 14 |
+
event: Event,
|
| 15 |
+
current_user: User = Depends(get_current_active_user),
|
| 16 |
+
db: AsyncSession = Depends(get_db)
|
| 17 |
+
) -> Event:
|
| 18 |
+
"""Create a new calendar event"""
|
| 19 |
+
# Set the user_id from the authenticated user
|
| 20 |
+
event.user_id = current_user.id
|
| 21 |
+
|
| 22 |
+
# Add to database
|
| 23 |
+
db.add(event)
|
| 24 |
+
await db.commit()
|
| 25 |
+
await db.refresh(event)
|
| 26 |
+
return event
|
| 27 |
+
|
| 28 |
+
@router.get("/events", response_model=List[Event])
|
| 29 |
+
async def get_events(
|
| 30 |
+
start_date: datetime = Query(default=None),
|
| 31 |
+
end_date: datetime = Query(default=None),
|
| 32 |
+
include_attendee_events: bool = True,
|
| 33 |
+
current_user: User = Depends(get_current_active_user),
|
| 34 |
+
db: AsyncSession = Depends(get_db)
|
| 35 |
+
) -> List[Event]:
|
| 36 |
+
"""Get user's events within a date range"""
|
| 37 |
+
if not start_date:
|
| 38 |
+
start_date = datetime.now()
|
| 39 |
+
if not end_date:
|
| 40 |
+
end_date = start_date + timedelta(days=30)
|
| 41 |
+
|
| 42 |
+
query = select(Event).where(
|
| 43 |
+
Event.start_time >= start_date,
|
| 44 |
+
Event.end_time <= end_date
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
if include_attendee_events:
|
| 48 |
+
query = query.where(or_(
|
| 49 |
+
Event.user_id == current_user.id,
|
| 50 |
+
Event.attendees.contains([str(current_user.id)])
|
| 51 |
+
))
|
| 52 |
+
else:
|
| 53 |
+
query = query.where(Event.user_id == current_user.id)
|
| 54 |
+
|
| 55 |
+
query = query.order_by(Event.start_time)
|
| 56 |
+
result = await db.execute(query)
|
| 57 |
+
return result.scalars().all()
|
| 58 |
+
|
| 59 |
+
@router.put("/events/{event_id}", response_model=Event)
|
| 60 |
+
async def update_event(
|
| 61 |
+
event_id: int,
|
| 62 |
+
event_update: Event,
|
| 63 |
+
current_user: User = Depends(get_current_active_user),
|
| 64 |
+
db: AsyncSession = Depends(get_db)
|
| 65 |
+
) -> Event:
|
| 66 |
+
"""Update an event"""
|
| 67 |
+
stmt = select(Event).where(
|
| 68 |
+
Event.id == event_id,
|
| 69 |
+
Event.user_id == current_user.id
|
| 70 |
+
)
|
| 71 |
+
result = await db.execute(stmt)
|
| 72 |
+
event = result.scalar_one_or_none()
|
| 73 |
+
|
| 74 |
+
if not event:
|
| 75 |
+
raise HTTPException(
|
| 76 |
+
status_code=404,
|
| 77 |
+
detail="Event not found or you don't have permission to update it"
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
# Update event fields
|
| 81 |
+
update_data = event_update.dict(exclude_unset=True)
|
| 82 |
+
for field, value in update_data.items():
|
| 83 |
+
setattr(event, field, value)
|
| 84 |
+
|
| 85 |
+
event.updated_at = datetime.utcnow()
|
| 86 |
+
await db.commit()
|
| 87 |
+
await db.refresh(event)
|
| 88 |
+
return event
|
| 89 |
+
|
| 90 |
+
@router.delete("/events/{event_id}")
|
| 91 |
+
async def delete_event(
|
| 92 |
+
event_id: int,
|
| 93 |
+
current_user: User = Depends(get_current_active_user),
|
| 94 |
+
db: AsyncSession = Depends(get_db)
|
| 95 |
+
) -> Dict[str, bool]:
|
| 96 |
+
"""Delete an event"""
|
| 97 |
+
stmt = select(Event).where(
|
| 98 |
+
Event.id == event_id,
|
| 99 |
+
Event.user_id == current_user.id
|
| 100 |
+
)
|
| 101 |
+
result = await db.execute(stmt)
|
| 102 |
+
event = result.scalar_one_or_none()
|
| 103 |
+
|
| 104 |
+
if not event:
|
| 105 |
+
raise HTTPException(
|
| 106 |
+
status_code=404,
|
| 107 |
+
detail="Event not found or you don't have permission to delete it"
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
await db.delete(event)
|
| 111 |
+
await db.commit()
|
| 112 |
+
return {"success": True}
|
| 113 |
+
|
| 114 |
+
@router.post("/events/{event_id}/respond")
|
| 115 |
+
async def respond_to_event(
|
| 116 |
+
event_id: int,
|
| 117 |
+
response: str,
|
| 118 |
+
current_user: User = Depends(get_current_active_user),
|
| 119 |
+
db: AsyncSession = Depends(get_db)
|
| 120 |
+
) -> Dict[str, bool]:
|
| 121 |
+
"""Respond to an event invitation"""
|
| 122 |
+
if response not in ["accepted", "declined", "maybe"]:
|
| 123 |
+
raise HTTPException(
|
| 124 |
+
status_code=400,
|
| 125 |
+
detail="Invalid response. Must be one of: accepted, declined, maybe"
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
stmt = select(Event).where(
|
| 129 |
+
Event.id == event_id,
|
| 130 |
+
Event.attendees.contains([str(current_user.id)])
|
| 131 |
+
)
|
| 132 |
+
result = await db.execute(stmt)
|
| 133 |
+
event = result.scalar_one_or_none()
|
| 134 |
+
|
| 135 |
+
if not event:
|
| 136 |
+
raise HTTPException(
|
| 137 |
+
status_code=404,
|
| 138 |
+
detail="Event not found or you are not invited to this event"
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
# Update the response in the attendee_responses dictionary
|
| 142 |
+
event.attendee_responses[str(current_user.id)] = response
|
| 143 |
+
event.updated_at = datetime.utcnow()
|
| 144 |
+
|
| 145 |
+
await db.commit()
|
| 146 |
+
return {"success": True}
|
app/api/files.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, UploadFile, File, Depends, HTTPException
|
| 2 |
+
from fastapi.responses import FileResponse
|
| 3 |
+
from typing import List
|
| 4 |
+
from ..core.dependencies import get_current_active_user
|
| 5 |
+
from ..utils.file_storage import file_storage
|
| 6 |
+
from ..utils.logger import logger
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
@router.post("/upload")
|
| 12 |
+
async def upload_file(
|
| 13 |
+
file: UploadFile = File(...),
|
| 14 |
+
category: str = "documents",
|
| 15 |
+
current_user = Depends(get_current_active_user)
|
| 16 |
+
) -> dict:
|
| 17 |
+
try:
|
| 18 |
+
file_path = await file_storage.save_file(file, category)
|
| 19 |
+
if not file_path:
|
| 20 |
+
raise HTTPException(status_code=400, detail="Failed to upload file")
|
| 21 |
+
|
| 22 |
+
return {
|
| 23 |
+
"filename": file.filename,
|
| 24 |
+
"stored_path": file_path,
|
| 25 |
+
"url": file_storage.get_file_url(file_path)
|
| 26 |
+
}
|
| 27 |
+
except ValueError as e:
|
| 28 |
+
raise HTTPException(status_code=400, detail=str(e))
|
| 29 |
+
except Exception as e:
|
| 30 |
+
logger.error(f"File upload error: {str(e)}")
|
| 31 |
+
raise HTTPException(status_code=500, detail="Internal server error")
|
| 32 |
+
|
| 33 |
+
@router.delete("/{file_path:path}")
|
| 34 |
+
async def delete_file(
|
| 35 |
+
file_path: str,
|
| 36 |
+
current_user = Depends(get_current_active_user)
|
| 37 |
+
) -> dict:
|
| 38 |
+
success = await file_storage.delete_file(file_path)
|
| 39 |
+
if not success:
|
| 40 |
+
raise HTTPException(status_code=404, detail="File not found")
|
| 41 |
+
|
| 42 |
+
return {"status": "success", "message": "File deleted successfully"}
|
| 43 |
+
|
| 44 |
+
@router.get("/{file_path:path}")
|
| 45 |
+
async def get_file(
|
| 46 |
+
file_path: str,
|
| 47 |
+
current_user = Depends(get_current_active_user)
|
| 48 |
+
):
|
| 49 |
+
full_path = Path("uploads") / file_path
|
| 50 |
+
if not full_path.exists():
|
| 51 |
+
raise HTTPException(status_code=404, detail="File not found")
|
| 52 |
+
|
| 53 |
+
return FileResponse(str(full_path))
|
app/api/maintenance.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select, delete, func
|
| 4 |
+
from typing import Dict, Any, List
|
| 5 |
+
from datetime import datetime, timedelta
|
| 6 |
+
from ..core.dependencies import get_current_active_user
|
| 7 |
+
from ..db.database import get_db
|
| 8 |
+
from ..db.models import User, Order, Notification, Event
|
| 9 |
+
from ..utils.logger import logger
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
|
| 13 |
+
@router.post("/sessions/cleanup")
|
| 14 |
+
async def cleanup_sessions(
|
| 15 |
+
current_user: User = Depends(get_current_active_user),
|
| 16 |
+
db: AsyncSession = Depends(get_db)
|
| 17 |
+
) -> Dict[str, int]:
|
| 18 |
+
"""Manually trigger session cleanup"""
|
| 19 |
+
if "admin" not in current_user.roles:
|
| 20 |
+
raise HTTPException(
|
| 21 |
+
status_code=403,
|
| 22 |
+
detail="Only administrators can perform maintenance operations"
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
cutoff_date = datetime.utcnow() - timedelta(days=7)
|
| 26 |
+
stmt = delete(Event).where(Event.created_at < cutoff_date)
|
| 27 |
+
result = await db.execute(stmt)
|
| 28 |
+
await db.commit()
|
| 29 |
+
|
| 30 |
+
return {"deleted_sessions": result.rowcount}
|
| 31 |
+
|
| 32 |
+
@router.post("/data/archive")
|
| 33 |
+
async def archive_data(
|
| 34 |
+
current_user: User = Depends(get_current_active_user),
|
| 35 |
+
db: AsyncSession = Depends(get_db)
|
| 36 |
+
) -> Dict[str, int]:
|
| 37 |
+
"""Manually trigger data archiving"""
|
| 38 |
+
if "admin" not in current_user.roles:
|
| 39 |
+
raise HTTPException(
|
| 40 |
+
status_code=403,
|
| 41 |
+
detail="Only administrators can perform maintenance operations"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
archive_date = datetime.utcnow() - timedelta(days=365)
|
| 45 |
+
archived = {}
|
| 46 |
+
|
| 47 |
+
# Archive old orders
|
| 48 |
+
orders_stmt = delete(Order).where(
|
| 49 |
+
Order.created_at < archive_date,
|
| 50 |
+
Order.status.in_(["delivered", "cancelled"])
|
| 51 |
+
)
|
| 52 |
+
orders_result = await db.execute(orders_stmt)
|
| 53 |
+
archived["orders"] = orders_result.rowcount
|
| 54 |
+
|
| 55 |
+
# Archive old notifications
|
| 56 |
+
notif_stmt = delete(Notification).where(
|
| 57 |
+
Notification.created_at < archive_date,
|
| 58 |
+
Notification.read == True
|
| 59 |
+
)
|
| 60 |
+
notif_result = await db.execute(notif_stmt)
|
| 61 |
+
archived["notifications"] = notif_result.rowcount
|
| 62 |
+
|
| 63 |
+
await db.commit()
|
| 64 |
+
return archived
|
| 65 |
+
|
| 66 |
+
@router.get("/health")
|
| 67 |
+
async def check_health(
|
| 68 |
+
current_user: User = Depends(get_current_active_user),
|
| 69 |
+
db: AsyncSession = Depends(get_db)
|
| 70 |
+
) -> Dict[str, Any]:
|
| 71 |
+
"""Check system health metrics"""
|
| 72 |
+
if "admin" not in current_user.roles:
|
| 73 |
+
raise HTTPException(
|
| 74 |
+
status_code=403,
|
| 75 |
+
detail="Only administrators can view system health"
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
try:
|
| 79 |
+
# Check database connection
|
| 80 |
+
await db.execute(select(1))
|
| 81 |
+
|
| 82 |
+
# Get database statistics
|
| 83 |
+
total_users = await db.scalar(select(func.count()).select_from(User))
|
| 84 |
+
total_orders = await db.scalar(select(func.count()).select_from(Order))
|
| 85 |
+
total_notifications = await db.scalar(select(func.count()).select_from(Notification))
|
| 86 |
+
|
| 87 |
+
return {
|
| 88 |
+
"status": "healthy",
|
| 89 |
+
"timestamp": datetime.utcnow(),
|
| 90 |
+
"database": {
|
| 91 |
+
"connected": True,
|
| 92 |
+
"total_users": total_users,
|
| 93 |
+
"total_orders": total_orders,
|
| 94 |
+
"total_notifications": total_notifications
|
| 95 |
+
}
|
| 96 |
+
}
|
| 97 |
+
except Exception as e:
|
| 98 |
+
logger.error(f"Health check error: {str(e)}")
|
| 99 |
+
return {
|
| 100 |
+
"status": "unhealthy",
|
| 101 |
+
"error": str(e),
|
| 102 |
+
"timestamp": datetime.utcnow()
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
@router.post("/database/maintenance")
|
| 106 |
+
async def perform_db_maintenance(
|
| 107 |
+
current_user: User = Depends(get_current_active_user),
|
| 108 |
+
db: AsyncSession = Depends(get_db)
|
| 109 |
+
) -> Dict[str, Any]:
|
| 110 |
+
"""Manually trigger database maintenance"""
|
| 111 |
+
if "admin" not in current_user.roles:
|
| 112 |
+
raise HTTPException(
|
| 113 |
+
status_code=403,
|
| 114 |
+
detail="Only administrators can perform maintenance operations"
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
try:
|
| 118 |
+
# Cleanup expired sessions
|
| 119 |
+
await cleanup_sessions(current_user, db)
|
| 120 |
+
|
| 121 |
+
# Run VACUUM ANALYZE (requires raw SQL)
|
| 122 |
+
await db.execute("VACUUM ANALYZE;")
|
| 123 |
+
|
| 124 |
+
return {
|
| 125 |
+
"status": "success",
|
| 126 |
+
"message": "Database maintenance completed successfully"
|
| 127 |
+
}
|
| 128 |
+
except Exception as e:
|
| 129 |
+
logger.error(f"Database maintenance error: {str(e)}")
|
| 130 |
+
raise HTTPException(
|
| 131 |
+
status_code=500,
|
| 132 |
+
detail=f"Database maintenance failed: {str(e)}"
|
| 133 |
+
)
|
app/api/notifications.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, HTTPException, Query
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select, update
|
| 4 |
+
from typing import List, Dict, Any, Optional
|
| 5 |
+
from ..core.dependencies import get_current_active_user
|
| 6 |
+
from ..db.database import get_db
|
| 7 |
+
from ..db.models import Notification, User
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
@router.get("/")
|
| 12 |
+
async def get_notifications(
|
| 13 |
+
skip: int = Query(0, ge=0),
|
| 14 |
+
limit: int = Query(50, ge=1, le=100),
|
| 15 |
+
unread_only: bool = False,
|
| 16 |
+
current_user: User = Depends(get_current_active_user),
|
| 17 |
+
db: AsyncSession = Depends(get_db)
|
| 18 |
+
) -> List[Notification]:
|
| 19 |
+
"""Get user's notifications"""
|
| 20 |
+
query = select(Notification).where(Notification.user_id == current_user.id)
|
| 21 |
+
|
| 22 |
+
if unread_only:
|
| 23 |
+
query = query.where(Notification.read == False)
|
| 24 |
+
|
| 25 |
+
query = query.order_by(Notification.created_at.desc()).offset(skip).limit(limit)
|
| 26 |
+
result = await db.execute(query)
|
| 27 |
+
return result.scalars().all()
|
| 28 |
+
|
| 29 |
+
@router.post("/mark-read/{notification_id}")
|
| 30 |
+
async def mark_notification_read(
|
| 31 |
+
notification_id: int,
|
| 32 |
+
current_user: User = Depends(get_current_active_user),
|
| 33 |
+
db: AsyncSession = Depends(get_db)
|
| 34 |
+
) -> Dict[str, bool]:
|
| 35 |
+
"""Mark a notification as read"""
|
| 36 |
+
stmt = select(Notification).where(
|
| 37 |
+
Notification.id == notification_id,
|
| 38 |
+
Notification.user_id == current_user.id
|
| 39 |
+
)
|
| 40 |
+
result = await db.execute(stmt)
|
| 41 |
+
notification = result.scalar_one_or_none()
|
| 42 |
+
|
| 43 |
+
if not notification:
|
| 44 |
+
raise HTTPException(status_code=404, detail="Notification not found")
|
| 45 |
+
|
| 46 |
+
notification.read = True
|
| 47 |
+
await db.commit()
|
| 48 |
+
return {"success": True}
|
| 49 |
+
|
| 50 |
+
@router.post("/mark-all-read")
|
| 51 |
+
async def mark_all_notifications_read(
|
| 52 |
+
current_user: User = Depends(get_current_active_user),
|
| 53 |
+
db: AsyncSession = Depends(get_db)
|
| 54 |
+
) -> Dict[str, int]:
|
| 55 |
+
"""Mark all notifications as read"""
|
| 56 |
+
stmt = update(Notification).where(
|
| 57 |
+
Notification.user_id == current_user.id,
|
| 58 |
+
Notification.read == False
|
| 59 |
+
).values(read=True)
|
| 60 |
+
|
| 61 |
+
result = await db.execute(stmt)
|
| 62 |
+
await db.commit()
|
| 63 |
+
return {"marked_count": result.rowcount}
|
| 64 |
+
|
| 65 |
+
@router.delete("/{notification_id}")
|
| 66 |
+
async def delete_notification(
|
| 67 |
+
notification_id: int,
|
| 68 |
+
current_user: User = Depends(get_current_active_user),
|
| 69 |
+
db: AsyncSession = Depends(get_db)
|
| 70 |
+
) -> Dict[str, bool]:
|
| 71 |
+
"""Delete a notification"""
|
| 72 |
+
stmt = select(Notification).where(
|
| 73 |
+
Notification.id == notification_id,
|
| 74 |
+
Notification.user_id == current_user.id
|
| 75 |
+
)
|
| 76 |
+
result = await db.execute(stmt)
|
| 77 |
+
notification = result.scalar_one_or_none()
|
| 78 |
+
|
| 79 |
+
if not notification:
|
| 80 |
+
raise HTTPException(status_code=404, detail="Notification not found")
|
| 81 |
+
|
| 82 |
+
await db.delete(notification)
|
| 83 |
+
await db.commit()
|
| 84 |
+
return {"success": True}
|
app/api/orders.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select
|
| 4 |
+
from typing import List, Optional
|
| 5 |
+
from ..db.database import get_db
|
| 6 |
+
from ..db.models import Order, Product, OrderItem, User
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
@router.post("/", response_model=Order)
|
| 12 |
+
async def create_order(
|
| 13 |
+
order: Order,
|
| 14 |
+
db: AsyncSession = Depends(get_db)
|
| 15 |
+
) -> Order:
|
| 16 |
+
# Calculate total and validate products
|
| 17 |
+
total = 0
|
| 18 |
+
order_items = []
|
| 19 |
+
|
| 20 |
+
for item in order.items:
|
| 21 |
+
# Get product
|
| 22 |
+
stmt = select(Product).where(Product.id == item.product_id)
|
| 23 |
+
result = await db.execute(stmt)
|
| 24 |
+
product = result.scalar_one_or_none()
|
| 25 |
+
|
| 26 |
+
if not product:
|
| 27 |
+
raise HTTPException(
|
| 28 |
+
status_code=404,
|
| 29 |
+
detail=f"Product {item.product_id} not found"
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
if product.inventory_count < item.quantity:
|
| 33 |
+
raise HTTPException(
|
| 34 |
+
status_code=400,
|
| 35 |
+
detail=f"Insufficient inventory for product {item.product_id}"
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
# Update inventory
|
| 39 |
+
product.inventory_count -= item.quantity
|
| 40 |
+
total += product.price * item.quantity
|
| 41 |
+
|
| 42 |
+
# Create order item
|
| 43 |
+
order_item = OrderItem(
|
| 44 |
+
product_id=item.product_id,
|
| 45 |
+
quantity=item.quantity,
|
| 46 |
+
price=product.price
|
| 47 |
+
)
|
| 48 |
+
order_items.append(order_item)
|
| 49 |
+
|
| 50 |
+
# Create order
|
| 51 |
+
db_order = Order(
|
| 52 |
+
customer_id=order.customer_id,
|
| 53 |
+
total_amount=total,
|
| 54 |
+
status="pending",
|
| 55 |
+
items=order_items,
|
| 56 |
+
created_at=datetime.utcnow(),
|
| 57 |
+
updated_at=datetime.utcnow()
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
db.add(db_order)
|
| 61 |
+
await db.commit()
|
| 62 |
+
await db.refresh(db_order)
|
| 63 |
+
return db_order
|
| 64 |
+
|
| 65 |
+
@router.get("/", response_model=List[Order])
|
| 66 |
+
async def list_orders(
|
| 67 |
+
skip: int = 0,
|
| 68 |
+
limit: int = 10,
|
| 69 |
+
status: Optional[str] = None,
|
| 70 |
+
db: AsyncSession = Depends(get_db)
|
| 71 |
+
) -> List[Order]:
|
| 72 |
+
query = select(Order)
|
| 73 |
+
if status:
|
| 74 |
+
query = query.where(Order.status == status)
|
| 75 |
+
|
| 76 |
+
query = query.offset(skip).limit(limit)
|
| 77 |
+
result = await db.execute(query)
|
| 78 |
+
return result.scalars().all()
|
| 79 |
+
|
| 80 |
+
@router.get("/{order_id}", response_model=Order)
|
| 81 |
+
async def get_order(
|
| 82 |
+
order_id: int,
|
| 83 |
+
db: AsyncSession = Depends(get_db)
|
| 84 |
+
) -> Order:
|
| 85 |
+
stmt = select(Order).where(Order.id == order_id)
|
| 86 |
+
result = await db.execute(stmt)
|
| 87 |
+
order = result.scalar_one_or_none()
|
| 88 |
+
|
| 89 |
+
if not order:
|
| 90 |
+
raise HTTPException(status_code=404, detail="Order not found")
|
| 91 |
+
return order
|
| 92 |
+
|
| 93 |
+
@router.put("/{order_id}/status", response_model=Order)
|
| 94 |
+
async def update_order_status(
|
| 95 |
+
order_id: int,
|
| 96 |
+
status: str,
|
| 97 |
+
db: AsyncSession = Depends(get_db)
|
| 98 |
+
) -> Order:
|
| 99 |
+
valid_statuses = ["pending", "processing", "shipped", "delivered", "cancelled"]
|
| 100 |
+
if status not in valid_statuses:
|
| 101 |
+
raise HTTPException(status_code=400, detail="Invalid status")
|
| 102 |
+
|
| 103 |
+
stmt = select(Order).where(Order.id == order_id)
|
| 104 |
+
result = await db.execute(stmt)
|
| 105 |
+
order = result.scalar_one_or_none()
|
| 106 |
+
|
| 107 |
+
if not order:
|
| 108 |
+
raise HTTPException(status_code=404, detail="Order not found")
|
| 109 |
+
|
| 110 |
+
order.status = status
|
| 111 |
+
order.updated_at = datetime.utcnow()
|
| 112 |
+
|
| 113 |
+
await db.commit()
|
| 114 |
+
await db.refresh(order)
|
| 115 |
+
return order
|
| 116 |
+
|
| 117 |
+
@router.delete("/{order_id}")
|
| 118 |
+
async def delete_order(
|
| 119 |
+
order_id: int,
|
| 120 |
+
db: AsyncSession = Depends(get_db)
|
| 121 |
+
):
|
| 122 |
+
# Get the order
|
| 123 |
+
stmt = select(Order).where(Order.id == order_id)
|
| 124 |
+
result = await db.execute(stmt)
|
| 125 |
+
order = result.scalar_one_or_none()
|
| 126 |
+
|
| 127 |
+
if not order:
|
| 128 |
+
raise HTTPException(status_code=404, detail="Order not found")
|
| 129 |
+
|
| 130 |
+
# Restore inventory for each product
|
| 131 |
+
for item in order.items:
|
| 132 |
+
product_stmt = select(Product).where(Product.id == item.product_id)
|
| 133 |
+
product_result = await db.execute(product_stmt)
|
| 134 |
+
product = product_result.scalar_one_or_none()
|
| 135 |
+
|
| 136 |
+
if product:
|
| 137 |
+
product.inventory_count += item.quantity
|
| 138 |
+
|
| 139 |
+
await db.delete(order)
|
| 140 |
+
await db.commit()
|
| 141 |
+
|
| 142 |
+
return {"status": "success", "message": "Order deleted and inventory restored"}
|
app/api/products.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select
|
| 4 |
+
from typing import List, Optional
|
| 5 |
+
from ..db.database import get_db
|
| 6 |
+
from ..db.models import Product
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
@router.post("/", response_model=Product)
|
| 12 |
+
async def create_product(
|
| 13 |
+
product: Product,
|
| 14 |
+
db: AsyncSession = Depends(get_db)
|
| 15 |
+
) -> Product:
|
| 16 |
+
db.add(product)
|
| 17 |
+
await db.commit()
|
| 18 |
+
await db.refresh(product)
|
| 19 |
+
return product
|
| 20 |
+
|
| 21 |
+
@router.get("/", response_model=List[Product])
|
| 22 |
+
async def list_products(
|
| 23 |
+
skip: int = 0,
|
| 24 |
+
limit: int = 10,
|
| 25 |
+
category: Optional[str] = None,
|
| 26 |
+
db: AsyncSession = Depends(get_db)
|
| 27 |
+
) -> List[Product]:
|
| 28 |
+
query = select(Product)
|
| 29 |
+
if category:
|
| 30 |
+
query = query.where(Product.category == category)
|
| 31 |
+
|
| 32 |
+
query = query.offset(skip).limit(limit)
|
| 33 |
+
result = await db.execute(query)
|
| 34 |
+
return result.scalars().all()
|
| 35 |
+
|
| 36 |
+
@router.get("/{product_id}", response_model=Product)
|
| 37 |
+
async def get_product(
|
| 38 |
+
product_id: int,
|
| 39 |
+
db: AsyncSession = Depends(get_db)
|
| 40 |
+
) -> Product:
|
| 41 |
+
stmt = select(Product).where(Product.id == product_id)
|
| 42 |
+
result = await db.execute(stmt)
|
| 43 |
+
product = result.scalar_one_or_none()
|
| 44 |
+
|
| 45 |
+
if not product:
|
| 46 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 47 |
+
return product
|
| 48 |
+
|
| 49 |
+
@router.put("/{product_id}", response_model=Product)
|
| 50 |
+
async def update_product(
|
| 51 |
+
product_id: int,
|
| 52 |
+
product_update: Product,
|
| 53 |
+
db: AsyncSession = Depends(get_db)
|
| 54 |
+
) -> Product:
|
| 55 |
+
stmt = select(Product).where(Product.id == product_id)
|
| 56 |
+
result = await db.execute(stmt)
|
| 57 |
+
product = result.scalar_one_or_none()
|
| 58 |
+
|
| 59 |
+
if not product:
|
| 60 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 61 |
+
|
| 62 |
+
# Update product fields
|
| 63 |
+
update_data = product_update.dict(exclude_unset=True)
|
| 64 |
+
for field, value in update_data.items():
|
| 65 |
+
setattr(product, field, value)
|
| 66 |
+
|
| 67 |
+
product.updated_at = datetime.utcnow()
|
| 68 |
+
await db.commit()
|
| 69 |
+
await db.refresh(product)
|
| 70 |
+
return product
|
| 71 |
+
|
| 72 |
+
@router.delete("/{product_id}")
|
| 73 |
+
async def delete_product(
|
| 74 |
+
product_id: int,
|
| 75 |
+
db: AsyncSession = Depends(get_db)
|
| 76 |
+
):
|
| 77 |
+
stmt = select(Product).where(Product.id == product_id)
|
| 78 |
+
result = await db.execute(stmt)
|
| 79 |
+
product = result.scalar_one_or_none()
|
| 80 |
+
|
| 81 |
+
if not product:
|
| 82 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 83 |
+
|
| 84 |
+
await db.delete(product)
|
| 85 |
+
await db.commit()
|
| 86 |
+
return {"status": "success", "message": "Product deleted"}
|
app/api/scheduler.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select, delete
|
| 4 |
+
from typing import List, Dict, Any, Optional
|
| 5 |
+
from datetime import datetime, timedelta
|
| 6 |
+
from ..core.dependencies import get_current_active_user
|
| 7 |
+
from ..db.database import get_db
|
| 8 |
+
from ..db.models import Event, User
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
|
| 13 |
+
class RecurringEventCreate(BaseModel):
|
| 14 |
+
title: str
|
| 15 |
+
description: str
|
| 16 |
+
start_time: datetime
|
| 17 |
+
end_time: datetime
|
| 18 |
+
recurrence_pattern: str
|
| 19 |
+
recurrence_end_date: Optional[datetime] = None
|
| 20 |
+
attendees: List[str] = []
|
| 21 |
+
reminder_minutes: int = 30
|
| 22 |
+
|
| 23 |
+
class RecurringEventUpdate(BaseModel):
|
| 24 |
+
title: Optional[str] = None
|
| 25 |
+
description: Optional[str] = None
|
| 26 |
+
start_time: Optional[datetime] = None
|
| 27 |
+
end_time: Optional[datetime] = None
|
| 28 |
+
attendees: Optional[List[str]] = None
|
| 29 |
+
reminder_minutes: Optional[int] = None
|
| 30 |
+
|
| 31 |
+
@router.post("/recurring-events")
|
| 32 |
+
async def create_recurring_event(
|
| 33 |
+
event_data: RecurringEventCreate,
|
| 34 |
+
current_user: User = Depends(get_current_active_user),
|
| 35 |
+
db: AsyncSession = Depends(get_db)
|
| 36 |
+
) -> List[Dict[str, Any]]:
|
| 37 |
+
"""Create a new recurring event"""
|
| 38 |
+
if event_data.recurrence_pattern not in ["daily", "weekly", "monthly", "yearly"]:
|
| 39 |
+
raise HTTPException(
|
| 40 |
+
status_code=400,
|
| 41 |
+
detail="Invalid recurrence pattern. Must be one of: daily, weekly, monthly, yearly"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
if event_data.start_time >= event_data.end_time:
|
| 45 |
+
raise HTTPException(
|
| 46 |
+
status_code=400,
|
| 47 |
+
detail="End time must be after start time"
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
events = []
|
| 51 |
+
current_start = event_data.start_time
|
| 52 |
+
current_end = event_data.end_time
|
| 53 |
+
duration = event_data.end_time - event_data.start_time
|
| 54 |
+
sequence_number = 0
|
| 55 |
+
|
| 56 |
+
while True:
|
| 57 |
+
if event_data.recurrence_end_date and current_start > event_data.recurrence_end_date:
|
| 58 |
+
break
|
| 59 |
+
|
| 60 |
+
event = Event(
|
| 61 |
+
user_id=current_user.id,
|
| 62 |
+
title=event_data.title,
|
| 63 |
+
description=event_data.description,
|
| 64 |
+
start_time=current_start,
|
| 65 |
+
end_time=current_end,
|
| 66 |
+
attendees=event_data.attendees,
|
| 67 |
+
reminder_minutes=event_data.reminder_minutes,
|
| 68 |
+
is_recurring=True,
|
| 69 |
+
recurrence_pattern=event_data.recurrence_pattern,
|
| 70 |
+
sequence_number=sequence_number,
|
| 71 |
+
status="scheduled"
|
| 72 |
+
)
|
| 73 |
+
db.add(event)
|
| 74 |
+
events.append(event)
|
| 75 |
+
|
| 76 |
+
# Calculate next occurrence
|
| 77 |
+
sequence_number += 1
|
| 78 |
+
if event_data.recurrence_pattern == "daily":
|
| 79 |
+
current_start += timedelta(days=1)
|
| 80 |
+
elif event_data.recurrence_pattern == "weekly":
|
| 81 |
+
current_start += timedelta(weeks=1)
|
| 82 |
+
elif event_data.recurrence_pattern == "monthly":
|
| 83 |
+
# Add one month (approximately)
|
| 84 |
+
if current_start.month == 12:
|
| 85 |
+
current_start = current_start.replace(year=current_start.year + 1, month=1)
|
| 86 |
+
else:
|
| 87 |
+
current_start = current_start.replace(month=current_start.month + 1)
|
| 88 |
+
elif event_data.recurrence_pattern == "yearly":
|
| 89 |
+
current_start = current_start.replace(year=current_start.year + 1)
|
| 90 |
+
|
| 91 |
+
current_end = current_start + duration
|
| 92 |
+
|
| 93 |
+
await db.commit()
|
| 94 |
+
|
| 95 |
+
# Refresh all events to get their IDs
|
| 96 |
+
for event in events:
|
| 97 |
+
await db.refresh(event)
|
| 98 |
+
|
| 99 |
+
return events
|
| 100 |
+
|
| 101 |
+
@router.put("/recurring-events/{event_id}")
|
| 102 |
+
async def update_recurring_event(
|
| 103 |
+
event_id: int,
|
| 104 |
+
event_update: RecurringEventUpdate,
|
| 105 |
+
update_future: bool = True,
|
| 106 |
+
current_user: User = Depends(get_current_active_user),
|
| 107 |
+
db: AsyncSession = Depends(get_db)
|
| 108 |
+
) -> List[Dict[str, Any]]:
|
| 109 |
+
"""Update a recurring event and optionally its future occurrences"""
|
| 110 |
+
update_data = event_update.dict(exclude_unset=True)
|
| 111 |
+
if not update_data:
|
| 112 |
+
raise HTTPException(status_code=400, detail="No update data provided")
|
| 113 |
+
|
| 114 |
+
# Get the original event
|
| 115 |
+
stmt = select(Event).where(
|
| 116 |
+
Event.id == event_id,
|
| 117 |
+
Event.user_id == current_user.id
|
| 118 |
+
)
|
| 119 |
+
result = await db.execute(stmt)
|
| 120 |
+
event = result.scalar_one_or_none()
|
| 121 |
+
|
| 122 |
+
if not event:
|
| 123 |
+
raise HTTPException(
|
| 124 |
+
status_code=404,
|
| 125 |
+
detail="Event not found or you don't have permission to update it"
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
updated_events = [event]
|
| 129 |
+
|
| 130 |
+
# Update future occurrences if requested
|
| 131 |
+
if update_future and event.is_recurring:
|
| 132 |
+
future_stmt = select(Event).where(
|
| 133 |
+
Event.recurrence_group == event.recurrence_group,
|
| 134 |
+
Event.sequence_number > event.sequence_number,
|
| 135 |
+
Event.user_id == current_user.id
|
| 136 |
+
)
|
| 137 |
+
future_result = await db.execute(future_stmt)
|
| 138 |
+
future_events = future_result.scalars().all()
|
| 139 |
+
|
| 140 |
+
for future_event in future_events:
|
| 141 |
+
for field, value in update_data.items():
|
| 142 |
+
setattr(future_event, field, value)
|
| 143 |
+
updated_events.append(future_event)
|
| 144 |
+
|
| 145 |
+
await db.commit()
|
| 146 |
+
return updated_events
|
| 147 |
+
|
| 148 |
+
@router.delete("/recurring-events/{event_id}")
|
| 149 |
+
async def delete_recurring_event(
|
| 150 |
+
event_id: int,
|
| 151 |
+
delete_future: bool = True,
|
| 152 |
+
current_user: User = Depends(get_current_active_user),
|
| 153 |
+
db: AsyncSession = Depends(get_db)
|
| 154 |
+
) -> Dict[str, bool]:
|
| 155 |
+
"""Delete a recurring event and optionally its future occurrences"""
|
| 156 |
+
stmt = select(Event).where(
|
| 157 |
+
Event.id == event_id,
|
| 158 |
+
Event.user_id == current_user.id
|
| 159 |
+
)
|
| 160 |
+
result = await db.execute(stmt)
|
| 161 |
+
event = result.scalar_one_or_none()
|
| 162 |
+
|
| 163 |
+
if not event:
|
| 164 |
+
raise HTTPException(
|
| 165 |
+
status_code=404,
|
| 166 |
+
detail="Event not found or you don't have permission to delete it"
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
if delete_future and event.is_recurring:
|
| 170 |
+
delete_stmt = delete(Event).where(
|
| 171 |
+
Event.recurrence_group == event.recurrence_group,
|
| 172 |
+
Event.sequence_number >= event.sequence_number,
|
| 173 |
+
Event.user_id == current_user.id
|
| 174 |
+
)
|
| 175 |
+
await db.execute(delete_stmt)
|
| 176 |
+
else:
|
| 177 |
+
await db.delete(event)
|
| 178 |
+
|
| 179 |
+
await db.commit()
|
| 180 |
+
return {"success": True}
|
| 181 |
+
|
| 182 |
+
@router.get("/recurring-events/upcoming")
|
| 183 |
+
async def get_upcoming_recurring_events(
|
| 184 |
+
days: int = 30,
|
| 185 |
+
current_user: User = Depends(get_current_active_user),
|
| 186 |
+
db: AsyncSession = Depends(get_db)
|
| 187 |
+
) -> List[Dict[str, Any]]:
|
| 188 |
+
"""Get upcoming recurring events for the next N days"""
|
| 189 |
+
if days <= 0 or days > 365:
|
| 190 |
+
raise HTTPException(
|
| 191 |
+
status_code=400,
|
| 192 |
+
detail="Days parameter must be between 1 and 365"
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
end_date = datetime.utcnow() + timedelta(days=days)
|
| 196 |
+
stmt = select(Event).where(
|
| 197 |
+
Event.user_id == current_user.id,
|
| 198 |
+
Event.start_time <= end_date,
|
| 199 |
+
Event.is_recurring == True
|
| 200 |
+
).order_by(Event.start_time)
|
| 201 |
+
|
| 202 |
+
result = await db.execute(stmt)
|
| 203 |
+
return result.scalars().all()
|
app/api/users.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select
|
| 4 |
+
from typing import List, Optional
|
| 5 |
+
from ..db.database import get_db
|
| 6 |
+
from ..db.models import User
|
| 7 |
+
from ..core.dependencies import get_current_superuser, get_current_active_user
|
| 8 |
+
from ..core.security import get_password_hash
|
| 9 |
+
|
| 10 |
+
router = APIRouter()
|
| 11 |
+
|
| 12 |
+
@router.get("/me", response_model=User)
|
| 13 |
+
async def read_user_me(current_user: User = Depends(get_current_active_user)):
|
| 14 |
+
return current_user
|
| 15 |
+
|
| 16 |
+
@router.get("/", response_model=List[User])
|
| 17 |
+
async def list_users(
|
| 18 |
+
skip: int = 0,
|
| 19 |
+
limit: int = 10,
|
| 20 |
+
current_user: User = Depends(get_current_superuser),
|
| 21 |
+
db: AsyncSession = Depends(get_db)
|
| 22 |
+
) -> List[User]:
|
| 23 |
+
stmt = select(User).offset(skip).limit(limit)
|
| 24 |
+
result = await db.execute(stmt)
|
| 25 |
+
return result.scalars().all()
|
| 26 |
+
|
| 27 |
+
@router.post("/", response_model=User)
|
| 28 |
+
async def create_user(
|
| 29 |
+
user: User,
|
| 30 |
+
current_user: User = Depends(get_current_superuser),
|
| 31 |
+
db: AsyncSession = Depends(get_db)
|
| 32 |
+
) -> User:
|
| 33 |
+
# Check if email exists
|
| 34 |
+
stmt = select(User).where(User.email == user.email)
|
| 35 |
+
result = await db.execute(stmt)
|
| 36 |
+
if result.scalar_one_or_none():
|
| 37 |
+
raise HTTPException(
|
| 38 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 39 |
+
detail="Email already registered"
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
# Hash password if provided
|
| 43 |
+
if hasattr(user, "password"):
|
| 44 |
+
user.hashed_password = get_password_hash(user.password)
|
| 45 |
+
delattr(user, "password")
|
| 46 |
+
|
| 47 |
+
db.add(user)
|
| 48 |
+
await db.commit()
|
| 49 |
+
await db.refresh(user)
|
| 50 |
+
return user
|
| 51 |
+
|
| 52 |
+
@router.put("/{user_id}", response_model=User)
|
| 53 |
+
async def update_user(
|
| 54 |
+
user_id: int,
|
| 55 |
+
user_update: User,
|
| 56 |
+
current_user: User = Depends(get_current_superuser),
|
| 57 |
+
db: AsyncSession = Depends(get_db)
|
| 58 |
+
) -> User:
|
| 59 |
+
stmt = select(User).where(User.id == user_id)
|
| 60 |
+
result = await db.execute(stmt)
|
| 61 |
+
db_user = result.scalar_one_or_none()
|
| 62 |
+
|
| 63 |
+
if not db_user:
|
| 64 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 65 |
+
|
| 66 |
+
# Update user fields
|
| 67 |
+
update_data = user_update.dict(exclude_unset=True)
|
| 68 |
+
if "password" in update_data:
|
| 69 |
+
update_data["hashed_password"] = get_password_hash(update_data.pop("password"))
|
| 70 |
+
|
| 71 |
+
for field, value in update_data.items():
|
| 72 |
+
setattr(db_user, field, value)
|
| 73 |
+
|
| 74 |
+
await db.commit()
|
| 75 |
+
await db.refresh(db_user)
|
| 76 |
+
return db_user
|
| 77 |
+
|
| 78 |
+
@router.delete("/{user_id}")
|
| 79 |
+
async def delete_user(
|
| 80 |
+
user_id: int,
|
| 81 |
+
current_user: User = Depends(get_current_superuser),
|
| 82 |
+
db: AsyncSession = Depends(get_db)
|
| 83 |
+
):
|
| 84 |
+
stmt = select(User).where(User.id == user_id)
|
| 85 |
+
result = await db.execute(stmt)
|
| 86 |
+
user = result.scalar_one_or_none()
|
| 87 |
+
|
| 88 |
+
if not user:
|
| 89 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 90 |
+
|
| 91 |
+
await db.delete(user)
|
| 92 |
+
await db.commit()
|
| 93 |
+
return {"status": "success", "message": "User deleted"}
|
| 94 |
+
|
| 95 |
+
@router.put("/{user_id}/roles", response_model=User)
|
| 96 |
+
async def update_user_roles(
|
| 97 |
+
user_id: int,
|
| 98 |
+
roles: List[str],
|
| 99 |
+
current_user: User = Depends(get_current_superuser),
|
| 100 |
+
db: AsyncSession = Depends(get_db)
|
| 101 |
+
) -> User:
|
| 102 |
+
valid_roles = ["user", "admin", "manager", "support"]
|
| 103 |
+
invalid_roles = [role for role in roles if role not in valid_roles]
|
| 104 |
+
if invalid_roles:
|
| 105 |
+
raise HTTPException(
|
| 106 |
+
status_code=400,
|
| 107 |
+
detail=f"Invalid roles: {', '.join(invalid_roles)}"
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
stmt = select(User).where(User.id == user_id)
|
| 111 |
+
result = await db.execute(stmt)
|
| 112 |
+
user = result.scalar_one_or_none()
|
| 113 |
+
|
| 114 |
+
if not user:
|
| 115 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 116 |
+
|
| 117 |
+
user.roles = roles
|
| 118 |
+
await db.commit()
|
| 119 |
+
await db.refresh(user)
|
| 120 |
+
return user
|
app/core/__init__.py
ADDED
|
File without changes
|
app/core/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (173 Bytes). View file
|
|
|
app/core/__pycache__/config.cpython-312.pyc
ADDED
|
Binary file (1.6 kB). View file
|
|
|
app/core/config.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic_settings import BaseSettings
|
| 2 |
+
from typing import Optional
|
| 3 |
+
|
| 4 |
+
class Settings(BaseSettings):
|
| 5 |
+
API_V1_STR: str = "/api/v1"
|
| 6 |
+
PROJECT_NAME: str = "Admin Dashboard"
|
| 7 |
+
VERSION: str = "1.0.0"
|
| 8 |
+
|
| 9 |
+
# PostgreSQL Database settings
|
| 10 |
+
DATABASE_URL: str = "postgresql+asyncpg://postgres:Lovyelias5584.@db.mqyrkmsdgugdhxiucukb.supabase.co:5432/postgres"
|
| 11 |
+
|
| 12 |
+
# JWT Settings
|
| 13 |
+
SECRET_KEY: str = "your-secret-key-here" # Change in production
|
| 14 |
+
ALGORITHM: str = "HS256"
|
| 15 |
+
ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
|
| 16 |
+
|
| 17 |
+
# Redis settings
|
| 18 |
+
REDIS_HOST: str = "localhost"
|
| 19 |
+
REDIS_PORT: int = 6379
|
| 20 |
+
|
| 21 |
+
# Email settings
|
| 22 |
+
MAIL_USERNAME: str = "yungdml31@gmail.com"
|
| 23 |
+
MAIL_PASSWORD: str = ""
|
| 24 |
+
MAIL_FROM: str = "admin@angelo.com"
|
| 25 |
+
MAIL_PORT: int = 587
|
| 26 |
+
MAIL_SERVER: str = "smtp.gmail.com"
|
| 27 |
+
|
| 28 |
+
# Frontend URL for email links
|
| 29 |
+
FRONTEND_URL: str = "http://localhost:3000"
|
| 30 |
+
|
| 31 |
+
class Config:
|
| 32 |
+
case_sensitive = True
|
| 33 |
+
|
| 34 |
+
settings = Settings()
|
app/core/dependencies.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import Depends, HTTPException, status
|
| 2 |
+
from fastapi.security import OAuth2PasswordBearer
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from sqlalchemy import select
|
| 5 |
+
from jose import JWTError, jwt
|
| 6 |
+
from ..db.database import get_db
|
| 7 |
+
from ..db.models import User
|
| 8 |
+
from ..core.config import settings
|
| 9 |
+
|
| 10 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/auth/login")
|
| 11 |
+
|
| 12 |
+
async def get_current_user(
|
| 13 |
+
token: str = Depends(oauth2_scheme),
|
| 14 |
+
db: AsyncSession = Depends(get_db)
|
| 15 |
+
):
|
| 16 |
+
credentials_exception = HTTPException(
|
| 17 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 18 |
+
detail="Could not validate credentials",
|
| 19 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
try:
|
| 23 |
+
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
| 24 |
+
user_id: str = payload.get("sub")
|
| 25 |
+
if user_id is None:
|
| 26 |
+
raise credentials_exception
|
| 27 |
+
except JWTError:
|
| 28 |
+
raise credentials_exception
|
| 29 |
+
|
| 30 |
+
stmt = select(User).where(User.id == int(user_id))
|
| 31 |
+
result = await db.execute(stmt)
|
| 32 |
+
user = result.scalar_one_or_none()
|
| 33 |
+
|
| 34 |
+
if user is None:
|
| 35 |
+
raise credentials_exception
|
| 36 |
+
return user
|
| 37 |
+
|
| 38 |
+
async def get_current_active_user(
|
| 39 |
+
current_user: User = Depends(get_current_user)
|
| 40 |
+
):
|
| 41 |
+
if not current_user.is_active:
|
| 42 |
+
raise HTTPException(status_code=400, detail="Inactive user")
|
| 43 |
+
return current_user
|
| 44 |
+
|
| 45 |
+
async def get_current_superuser(
|
| 46 |
+
current_user: User = Depends(get_current_user)
|
| 47 |
+
):
|
| 48 |
+
if not current_user.is_superuser:
|
| 49 |
+
raise HTTPException(
|
| 50 |
+
status_code=403, detail="The user doesn't have enough privileges"
|
| 51 |
+
)
|
| 52 |
+
return current_user
|
app/core/security.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timedelta
|
| 2 |
+
from typing import Any, Optional
|
| 3 |
+
from jose import jwt
|
| 4 |
+
from passlib.context import CryptContext
|
| 5 |
+
from .config import settings
|
| 6 |
+
|
| 7 |
+
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
| 8 |
+
|
| 9 |
+
def create_access_token(subject: Any, expires_delta: Optional[timedelta] = None) -> str:
|
| 10 |
+
if expires_delta:
|
| 11 |
+
expire = datetime.utcnow() + expires_delta
|
| 12 |
+
else:
|
| 13 |
+
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 14 |
+
|
| 15 |
+
to_encode = {"exp": expire, "sub": str(subject)}
|
| 16 |
+
encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
| 17 |
+
return encoded_jwt
|
| 18 |
+
|
| 19 |
+
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
| 20 |
+
return pwd_context.verify(plain_password, hashed_password)
|
| 21 |
+
|
| 22 |
+
def get_password_hash(password: str) -> str:
|
| 23 |
+
return pwd_context.hash(password)
|
app/db/__init__.py
ADDED
|
File without changes
|
app/db/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (171 Bytes). View file
|
|
|
app/db/__pycache__/database.cpython-312.pyc
ADDED
|
Binary file (1.31 kB). View file
|
|
|
app/db/__pycache__/models.cpython-312.pyc
ADDED
|
Binary file (7.66 kB). View file
|
|
|
app/db/database.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
| 2 |
+
from sqlalchemy.orm import sessionmaker, declarative_base
|
| 3 |
+
from sqlalchemy import create_engine
|
| 4 |
+
from ..core.config import settings
|
| 5 |
+
|
| 6 |
+
# Create async engine for FastAPI
|
| 7 |
+
async_engine = create_async_engine(
|
| 8 |
+
settings.DATABASE_URL,
|
| 9 |
+
echo=True,
|
| 10 |
+
future=True,
|
| 11 |
+
pool_pre_ping=True
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
# Create async session factory
|
| 15 |
+
AsyncSessionLocal = async_sessionmaker(
|
| 16 |
+
bind=async_engine,
|
| 17 |
+
class_=AsyncSession,
|
| 18 |
+
expire_on_commit=False
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
# Create declarative base for models
|
| 22 |
+
Base = declarative_base()
|
| 23 |
+
|
| 24 |
+
# Database dependency
|
| 25 |
+
async def get_db():
|
| 26 |
+
async with AsyncSessionLocal() as session:
|
| 27 |
+
try:
|
| 28 |
+
yield session
|
| 29 |
+
finally:
|
| 30 |
+
await session.close()
|
app/db/init_db.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy import create_engine
|
| 2 |
+
from sqlalchemy.orm import sessionmaker
|
| 3 |
+
from ..core.config import settings
|
| 4 |
+
from ..core.security import get_password_hash
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from .models import Base, User, Product
|
| 7 |
+
import asyncio
|
| 8 |
+
|
| 9 |
+
def init_db():
|
| 10 |
+
# Create synchronous engine for initialization
|
| 11 |
+
engine = create_engine(
|
| 12 |
+
settings.DATABASE_URL.replace("+asyncpg", ""),
|
| 13 |
+
echo=True
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
# Create all tables
|
| 17 |
+
Base.metadata.create_all(bind=engine)
|
| 18 |
+
|
| 19 |
+
# Create session
|
| 20 |
+
SessionLocal = sessionmaker(bind=engine)
|
| 21 |
+
session = SessionLocal()
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
# Create default admin user if not exists
|
| 25 |
+
admin_user = session.query(User).filter_by(email="admin@example.com").first()
|
| 26 |
+
if not admin_user:
|
| 27 |
+
admin_user = User(
|
| 28 |
+
email="admin@example.com",
|
| 29 |
+
username="admin",
|
| 30 |
+
full_name="System Administrator",
|
| 31 |
+
hashed_password=get_password_hash("admin123"), # Change in production
|
| 32 |
+
is_active=True,
|
| 33 |
+
is_superuser=True,
|
| 34 |
+
roles=["admin"],
|
| 35 |
+
created_at=datetime.utcnow()
|
| 36 |
+
)
|
| 37 |
+
session.add(admin_user)
|
| 38 |
+
print("Created default admin user.")
|
| 39 |
+
|
| 40 |
+
# Create default product categories as products
|
| 41 |
+
categories = [
|
| 42 |
+
"Soups & Stews",
|
| 43 |
+
"Rice Dishes",
|
| 44 |
+
"Swallow & Fufu",
|
| 45 |
+
"Snacks & Small Chops",
|
| 46 |
+
"Protein & Meat",
|
| 47 |
+
"Drinks"
|
| 48 |
+
]
|
| 49 |
+
|
| 50 |
+
for category in categories:
|
| 51 |
+
exists = session.query(Product).filter_by(name=category).first()
|
| 52 |
+
if not exists:
|
| 53 |
+
product = Product(
|
| 54 |
+
name=category,
|
| 55 |
+
description=f"Category: {category}",
|
| 56 |
+
price=0.0, # Category products have zero price
|
| 57 |
+
category=category,
|
| 58 |
+
inventory_count=0, # Categories don't have inventory
|
| 59 |
+
seller_id=admin_user.id if admin_user else 1, # Link to admin user
|
| 60 |
+
created_at=datetime.utcnow()
|
| 61 |
+
)
|
| 62 |
+
session.add(product)
|
| 63 |
+
|
| 64 |
+
print("Initialized product categories.")
|
| 65 |
+
|
| 66 |
+
# Commit changes
|
| 67 |
+
session.commit()
|
| 68 |
+
|
| 69 |
+
except Exception as e:
|
| 70 |
+
print(f"Error during initialization: {e}")
|
| 71 |
+
session.rollback()
|
| 72 |
+
raise
|
| 73 |
+
finally:
|
| 74 |
+
session.close()
|
| 75 |
+
|
| 76 |
+
if __name__ == "__main__":
|
| 77 |
+
init_db()
|
app/db/models.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Float, ForeignKey, ARRAY, JSON, Table
|
| 2 |
+
from sqlalchemy.orm import relationship, mapped_column, Mapped
|
| 3 |
+
from sqlalchemy.dialects.postgresql import JSONB
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from typing import List, Optional
|
| 6 |
+
from .database import Base
|
| 7 |
+
|
| 8 |
+
# Association tables for many-to-many relationships
|
| 9 |
+
user_roles = Table(
|
| 10 |
+
'user_roles',
|
| 11 |
+
Base.metadata,
|
| 12 |
+
Column('user_id', Integer, ForeignKey('users.id')),
|
| 13 |
+
Column('role_id', Integer, ForeignKey('roles.id'))
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
class User(Base):
|
| 17 |
+
__tablename__ = "users"
|
| 18 |
+
|
| 19 |
+
id: Mapped[int] = mapped_column(primary_key=True)
|
| 20 |
+
email: Mapped[str] = mapped_column(String, unique=True, index=True)
|
| 21 |
+
username: Mapped[str] = mapped_column(String, unique=True, index=True)
|
| 22 |
+
full_name: Mapped[str] = mapped_column(String)
|
| 23 |
+
hashed_password: Mapped[str] = mapped_column(String)
|
| 24 |
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
| 25 |
+
is_superuser: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 26 |
+
roles: Mapped[List[str]] = mapped_column(ARRAY(String), default=list)
|
| 27 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
| 28 |
+
|
| 29 |
+
# Relationships
|
| 30 |
+
products = relationship("Product", back_populates="seller")
|
| 31 |
+
orders = relationship("Order", back_populates="customer")
|
| 32 |
+
notifications = relationship("Notification", back_populates="user")
|
| 33 |
+
|
| 34 |
+
class Product(Base):
|
| 35 |
+
__tablename__ = "products"
|
| 36 |
+
|
| 37 |
+
id: Mapped[int] = mapped_column(primary_key=True)
|
| 38 |
+
name: Mapped[str] = mapped_column(String, index=True)
|
| 39 |
+
description: Mapped[str] = mapped_column(String)
|
| 40 |
+
price: Mapped[float] = mapped_column(Float)
|
| 41 |
+
category: Mapped[str] = mapped_column(String, index=True)
|
| 42 |
+
inventory_count: Mapped[int] = mapped_column(Integer)
|
| 43 |
+
seller_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
| 44 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
| 45 |
+
updated_at: Mapped[datetime] = mapped_column(
|
| 46 |
+
DateTime,
|
| 47 |
+
default=datetime.utcnow,
|
| 48 |
+
onupdate=datetime.utcnow
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
# Relationships
|
| 52 |
+
seller = relationship("User", back_populates="products")
|
| 53 |
+
order_items = relationship("OrderItem", back_populates="product")
|
| 54 |
+
|
| 55 |
+
class Order(Base):
|
| 56 |
+
__tablename__ = "orders"
|
| 57 |
+
|
| 58 |
+
id: Mapped[int] = mapped_column(primary_key=True)
|
| 59 |
+
customer_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
| 60 |
+
total_amount: Mapped[float] = mapped_column(Float)
|
| 61 |
+
status: Mapped[str] = mapped_column(String)
|
| 62 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
| 63 |
+
updated_at: Mapped[datetime] = mapped_column(
|
| 64 |
+
DateTime,
|
| 65 |
+
default=datetime.utcnow,
|
| 66 |
+
onupdate=datetime.utcnow
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
# Relationships
|
| 70 |
+
customer = relationship("User", back_populates="orders")
|
| 71 |
+
items = relationship("OrderItem", back_populates="order", cascade="all, delete-orphan")
|
| 72 |
+
|
| 73 |
+
class OrderItem(Base):
|
| 74 |
+
__tablename__ = "order_items"
|
| 75 |
+
|
| 76 |
+
id: Mapped[int] = mapped_column(primary_key=True)
|
| 77 |
+
order_id: Mapped[int] = mapped_column(ForeignKey("orders.id"))
|
| 78 |
+
product_id: Mapped[int] = mapped_column(ForeignKey("products.id"))
|
| 79 |
+
quantity: Mapped[int] = mapped_column(Integer)
|
| 80 |
+
price: Mapped[float] = mapped_column(Float)
|
| 81 |
+
|
| 82 |
+
# Relationships
|
| 83 |
+
order = relationship("Order", back_populates="items")
|
| 84 |
+
product = relationship("Product", back_populates="order_items")
|
| 85 |
+
|
| 86 |
+
class Notification(Base):
|
| 87 |
+
__tablename__ = "notifications"
|
| 88 |
+
|
| 89 |
+
id: Mapped[int] = mapped_column(primary_key=True)
|
| 90 |
+
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
| 91 |
+
title: Mapped[str] = mapped_column(String)
|
| 92 |
+
message: Mapped[str] = mapped_column(String)
|
| 93 |
+
type: Mapped[str] = mapped_column(String)
|
| 94 |
+
data: Mapped[Optional[dict]] = mapped_column(JSONB)
|
| 95 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
| 96 |
+
read: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 97 |
+
|
| 98 |
+
# Relationship
|
| 99 |
+
user = relationship("User", back_populates="notifications")
|
| 100 |
+
|
| 101 |
+
class Event(Base):
|
| 102 |
+
__tablename__ = "events"
|
| 103 |
+
|
| 104 |
+
id: Mapped[int] = mapped_column(primary_key=True)
|
| 105 |
+
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
|
| 106 |
+
title: Mapped[str] = mapped_column(String)
|
| 107 |
+
description: Mapped[str] = mapped_column(String)
|
| 108 |
+
start_time: Mapped[datetime] = mapped_column(DateTime)
|
| 109 |
+
end_time: Mapped[datetime] = mapped_column(DateTime)
|
| 110 |
+
attendees: Mapped[List[str]] = mapped_column(ARRAY(String), default=list)
|
| 111 |
+
is_all_day: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 112 |
+
reminder_minutes: Mapped[int] = mapped_column(Integer)
|
| 113 |
+
status: Mapped[str] = mapped_column(String)
|
| 114 |
+
attendee_responses: Mapped[dict] = mapped_column(JSONB, default=dict)
|
| 115 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
|
| 116 |
+
updated_at: Mapped[datetime] = mapped_column(
|
| 117 |
+
DateTime,
|
| 118 |
+
default=datetime.utcnow,
|
| 119 |
+
onupdate=datetime.utcnow
|
| 120 |
+
)
|
| 121 |
+
# Fields for recurring events
|
| 122 |
+
is_recurring: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 123 |
+
recurrence_pattern: Mapped[Optional[str]] = mapped_column(String)
|
| 124 |
+
recurrence_group: Mapped[Optional[str]] = mapped_column(String)
|
| 125 |
+
recurrence_end_date: Mapped[Optional[datetime]] = mapped_column(DateTime)
|
| 126 |
+
parent_event_id: Mapped[Optional[int]] = mapped_column(Integer)
|
| 127 |
+
sequence_number: Mapped[Optional[int]] = mapped_column(Integer)
|
| 128 |
+
reminder_sent: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 129 |
+
|
| 130 |
+
# Relationship
|
| 131 |
+
user = relationship("User")
|
app/db/schemas.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy.orm import validates
|
| 2 |
+
from sqlalchemy import event
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from .models import User, Product, Order, Event, Notification
|
| 5 |
+
import re
|
| 6 |
+
|
| 7 |
+
@validates('email')
|
| 8 |
+
def validate_email(self, key, email):
|
| 9 |
+
if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email):
|
| 10 |
+
raise ValueError('Invalid email address')
|
| 11 |
+
return email
|
| 12 |
+
|
| 13 |
+
@validates('username')
|
| 14 |
+
def validate_username(self, key, username):
|
| 15 |
+
if len(username) < 3:
|
| 16 |
+
raise ValueError('Username must be at least 3 characters long')
|
| 17 |
+
return username
|
| 18 |
+
|
| 19 |
+
@validates('inventory_count')
|
| 20 |
+
def validate_inventory(self, key, count):
|
| 21 |
+
if count < 0:
|
| 22 |
+
raise ValueError('Inventory count cannot be negative')
|
| 23 |
+
return count
|
| 24 |
+
|
| 25 |
+
@validates('price')
|
| 26 |
+
def validate_price(self, key, price):
|
| 27 |
+
if price < 0:
|
| 28 |
+
raise ValueError('Price cannot be negative')
|
| 29 |
+
return price
|
| 30 |
+
|
| 31 |
+
# Event listeners for automatic timestamps
|
| 32 |
+
@event.listens_for(Product, 'before_insert')
|
| 33 |
+
def set_created_at(mapper, connection, target):
|
| 34 |
+
target.created_at = datetime.utcnow()
|
| 35 |
+
target.updated_at = datetime.utcnow()
|
| 36 |
+
|
| 37 |
+
@event.listens_for(Product, 'before_update')
|
| 38 |
+
def set_updated_at(mapper, connection, target):
|
| 39 |
+
target.updated_at = datetime.utcnow()
|
| 40 |
+
|
| 41 |
+
@event.listens_for(Order, 'before_insert')
|
| 42 |
+
def set_order_created_at(mapper, connection, target):
|
| 43 |
+
target.created_at = datetime.utcnow()
|
| 44 |
+
target.updated_at = datetime.utcnow()
|
| 45 |
+
|
| 46 |
+
@event.listens_for(Order, 'before_update')
|
| 47 |
+
def set_order_updated_at(mapper, connection, target):
|
| 48 |
+
target.updated_at = datetime.utcnow()
|
| 49 |
+
|
| 50 |
+
@event.listens_for(Event, 'before_insert')
|
| 51 |
+
def set_event_created_at(mapper, connection, target):
|
| 52 |
+
target.created_at = datetime.utcnow()
|
| 53 |
+
target.updated_at = datetime.utcnow()
|
| 54 |
+
|
| 55 |
+
@event.listens_for(Event, 'before_update')
|
| 56 |
+
def set_event_updated_at(mapper, connection, target):
|
| 57 |
+
target.updated_at = datetime.utcnow()
|
| 58 |
+
|
| 59 |
+
# Add validators to models
|
| 60 |
+
User.validate_email = validate_email
|
| 61 |
+
User.validate_username = validate_username
|
| 62 |
+
Product.validate_inventory = validate_inventory
|
| 63 |
+
Product.validate_price = validate_price
|
app/main.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI, Request, WebSocket
|
| 2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
+
from .core.config import settings
|
| 4 |
+
from .db.database import engine, Base
|
| 5 |
+
from .api import auth, products, orders, users, analytics, files, notifications, calendar, scheduler, maintenance
|
| 6 |
+
from .utils.rate_limiter import rate_limiter
|
| 7 |
+
from .utils.logger import log_api_request
|
| 8 |
+
from .utils.tasks import run_periodic_tasks
|
| 9 |
+
import time
|
| 10 |
+
import logging
|
| 11 |
+
import asyncio
|
| 12 |
+
from typing import List
|
| 13 |
+
|
| 14 |
+
# Configure logging
|
| 15 |
+
logging.basicConfig(level=logging.INFO)
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
app = FastAPI(title=settings.PROJECT_NAME, version=settings.VERSION)
|
| 19 |
+
|
| 20 |
+
# Store active WebSocket connections and background tasks
|
| 21 |
+
active_connections: List[WebSocket] = []
|
| 22 |
+
background_tasks = set()
|
| 23 |
+
|
| 24 |
+
# Configure CORS
|
| 25 |
+
app.add_middleware(
|
| 26 |
+
CORSMiddleware,
|
| 27 |
+
allow_origins=["*"], # Configure appropriately for production
|
| 28 |
+
allow_credentials=True,
|
| 29 |
+
allow_methods=["*"],
|
| 30 |
+
allow_headers=["*"],
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
# WebSocket connection manager
|
| 34 |
+
@app.websocket("/ws")
|
| 35 |
+
async def websocket_endpoint(websocket: WebSocket):
|
| 36 |
+
await websocket.accept()
|
| 37 |
+
active_connections.append(websocket)
|
| 38 |
+
try:
|
| 39 |
+
while True:
|
| 40 |
+
data = await websocket.receive_text()
|
| 41 |
+
except:
|
| 42 |
+
active_connections.remove(websocket)
|
| 43 |
+
|
| 44 |
+
# Notification broadcaster
|
| 45 |
+
async def broadcast_notification(message: dict):
|
| 46 |
+
for connection in active_connections:
|
| 47 |
+
try:
|
| 48 |
+
await connection.send_json(message)
|
| 49 |
+
except:
|
| 50 |
+
active_connections.remove(connection)
|
| 51 |
+
|
| 52 |
+
# Request logging and rate limiting middleware
|
| 53 |
+
@app.middleware("http")
|
| 54 |
+
async def middleware(request: Request, call_next):
|
| 55 |
+
await rate_limiter.check_rate_limit(request)
|
| 56 |
+
start_time = time.time()
|
| 57 |
+
response = await call_next(request)
|
| 58 |
+
end_time = time.time()
|
| 59 |
+
duration = end_time - start_time
|
| 60 |
+
log_api_request(
|
| 61 |
+
method=request.method,
|
| 62 |
+
path=request.url.path,
|
| 63 |
+
status_code=response.status_code,
|
| 64 |
+
duration=duration
|
| 65 |
+
)
|
| 66 |
+
return response
|
| 67 |
+
|
| 68 |
+
# Application startup and shutdown events
|
| 69 |
+
@app.on_event("startup")
|
| 70 |
+
async def startup_event():
|
| 71 |
+
# Create all database tables
|
| 72 |
+
async with engine.begin() as conn:
|
| 73 |
+
await conn.run_sync(Base.metadata.create_all)
|
| 74 |
+
|
| 75 |
+
# Start background tasks
|
| 76 |
+
task = asyncio.create_task(run_periodic_tasks())
|
| 77 |
+
background_tasks.add(task)
|
| 78 |
+
task.add_done_callback(background_tasks.discard)
|
| 79 |
+
|
| 80 |
+
@app.on_event("shutdown")
|
| 81 |
+
async def shutdown_event():
|
| 82 |
+
# Cancel background tasks
|
| 83 |
+
for task in background_tasks:
|
| 84 |
+
task.cancel()
|
| 85 |
+
|
| 86 |
+
# Close WebSocket connections
|
| 87 |
+
for connection in active_connections:
|
| 88 |
+
await connection.close()
|
| 89 |
+
|
| 90 |
+
# Include routers
|
| 91 |
+
app.include_router(auth.router, prefix=f"{settings.API_V1_STR}/auth", tags=["auth"])
|
| 92 |
+
app.include_router(users.router, prefix=f"{settings.API_V1_STR}/users", tags=["users"])
|
| 93 |
+
app.include_router(products.router, prefix=f"{settings.API_V1_STR}/products", tags=["products"])
|
| 94 |
+
app.include_router(orders.router, prefix=f"{settings.API_V1_STR}/orders", tags=["orders"])
|
| 95 |
+
app.include_router(analytics.router, prefix=f"{settings.API_V1_STR}/analytics", tags=["analytics"])
|
| 96 |
+
app.include_router(files.router, prefix=f"{settings.API_V1_STR}/files", tags=["files"])
|
| 97 |
+
app.include_router(notifications.router, prefix=f"{settings.API_V1_STR}/notifications", tags=["notifications"])
|
| 98 |
+
app.include_router(calendar.router, prefix=f"{settings.API_V1_STR}/calendar", tags=["calendar"])
|
| 99 |
+
app.include_router(scheduler.router, prefix=f"{settings.API_V1_STR}/scheduler", tags=["scheduler"])
|
| 100 |
+
app.include_router(maintenance.router, prefix=f"{settings.API_V1_STR}/maintenance", tags=["maintenance"])
|
| 101 |
+
|
| 102 |
+
@app.get("/")
|
| 103 |
+
async def root():
|
| 104 |
+
return {
|
| 105 |
+
"message": f"Welcome to {settings.PROJECT_NAME} v{settings.VERSION}",
|
| 106 |
+
"docs_url": "/docs",
|
| 107 |
+
"openapi_url": "/openapi.json"
|
| 108 |
+
}
|
app/schemas/events.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel, validator
|
| 2 |
+
from typing import List, Optional, Dict, Any
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
|
| 5 |
+
class EventBase(BaseModel):
|
| 6 |
+
title: str
|
| 7 |
+
description: str
|
| 8 |
+
start_time: datetime
|
| 9 |
+
end_time: datetime
|
| 10 |
+
is_all_day: bool = False
|
| 11 |
+
reminder_minutes: int = 30
|
| 12 |
+
|
| 13 |
+
@validator('end_time')
|
| 14 |
+
def end_time_after_start_time(cls, v, values):
|
| 15 |
+
if 'start_time' in values and v <= values['start_time']:
|
| 16 |
+
raise ValueError('end_time must be after start_time')
|
| 17 |
+
return v
|
| 18 |
+
|
| 19 |
+
@validator('reminder_minutes')
|
| 20 |
+
def valid_reminder_minutes(cls, v):
|
| 21 |
+
if v < 0:
|
| 22 |
+
raise ValueError('reminder_minutes cannot be negative')
|
| 23 |
+
return v
|
| 24 |
+
|
| 25 |
+
class EventCreate(EventBase):
|
| 26 |
+
attendees: List[str] = []
|
| 27 |
+
|
| 28 |
+
class EventUpdate(BaseModel):
|
| 29 |
+
title: Optional[str] = None
|
| 30 |
+
description: Optional[str] = None
|
| 31 |
+
start_time: Optional[datetime] = None
|
| 32 |
+
end_time: Optional[datetime] = None
|
| 33 |
+
is_all_day: Optional[bool] = None
|
| 34 |
+
reminder_minutes: Optional[int] = None
|
| 35 |
+
attendees: Optional[List[str]] = None
|
| 36 |
+
|
| 37 |
+
@validator('reminder_minutes')
|
| 38 |
+
def valid_reminder_minutes(cls, v):
|
| 39 |
+
if v is not None and v < 0:
|
| 40 |
+
raise ValueError('reminder_minutes cannot be negative')
|
| 41 |
+
return v
|
| 42 |
+
|
| 43 |
+
class EventInDB(EventBase):
|
| 44 |
+
id: str
|
| 45 |
+
user_id: str
|
| 46 |
+
attendees: List[str]
|
| 47 |
+
status: str
|
| 48 |
+
attendee_responses: Dict[str, str]
|
| 49 |
+
created_at: datetime
|
| 50 |
+
updated_at: Optional[datetime] = None
|
| 51 |
+
reminder_sent: bool = False
|
| 52 |
+
is_recurring: bool = False
|
| 53 |
+
recurrence_group: Optional[str] = None
|
| 54 |
+
parent_event_id: Optional[str] = None
|
| 55 |
+
sequence_number: Optional[int] = None
|
| 56 |
+
|
| 57 |
+
class Config:
|
| 58 |
+
orm_mode = True
|
| 59 |
+
|
| 60 |
+
class RecurringEventCreate(EventCreate):
|
| 61 |
+
recurrence_pattern: str
|
| 62 |
+
recurrence_end_date: Optional[datetime] = None
|
| 63 |
+
|
| 64 |
+
@validator('recurrence_pattern')
|
| 65 |
+
def valid_recurrence_pattern(cls, v):
|
| 66 |
+
valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
|
| 67 |
+
if v not in valid_patterns:
|
| 68 |
+
raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
|
| 69 |
+
return v
|
| 70 |
+
|
| 71 |
+
@validator('recurrence_end_date')
|
| 72 |
+
def end_date_after_start_time(cls, v, values):
|
| 73 |
+
if v is not None and 'start_time' in values and v <= values['start_time']:
|
| 74 |
+
raise ValueError('recurrence_end_date must be after start_time')
|
| 75 |
+
return v
|
| 76 |
+
|
| 77 |
+
class RecurringEventUpdate(EventUpdate):
|
| 78 |
+
recurrence_pattern: Optional[str] = None
|
| 79 |
+
recurrence_end_date: Optional[datetime] = None
|
| 80 |
+
|
| 81 |
+
@validator('recurrence_pattern')
|
| 82 |
+
def valid_recurrence_pattern(cls, v):
|
| 83 |
+
if v is not None:
|
| 84 |
+
valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
|
| 85 |
+
if v not in valid_patterns:
|
| 86 |
+
raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
|
| 87 |
+
return v
|
app/services/analytics.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timedelta
|
| 2 |
+
from ..db.database import db
|
| 3 |
+
from ..utils.cache import cache
|
| 4 |
+
from typing import Dict, List, Any
|
| 5 |
+
|
| 6 |
+
class AnalyticsService:
|
| 7 |
+
@staticmethod
|
| 8 |
+
async def get_sales_analytics(start_date: datetime, end_date: datetime) -> Dict[str, Any]:
|
| 9 |
+
cache_key = f"sales_analytics:{start_date.date()}:{end_date.date()}"
|
| 10 |
+
cached_data = await cache.get_cache(cache_key)
|
| 11 |
+
if cached_data:
|
| 12 |
+
return cached_data
|
| 13 |
+
|
| 14 |
+
pipeline = [
|
| 15 |
+
{
|
| 16 |
+
"$match": {
|
| 17 |
+
"created_at": {
|
| 18 |
+
"$gte": start_date,
|
| 19 |
+
"$lte": end_date
|
| 20 |
+
},
|
| 21 |
+
"status": {"$in": ["completed", "delivered"]}
|
| 22 |
+
}
|
| 23 |
+
},
|
| 24 |
+
{
|
| 25 |
+
"$group": {
|
| 26 |
+
"_id": {"$dateToString": {"format": "%Y-%m-%d", "date": "$created_at"}},
|
| 27 |
+
"total_sales": {"$sum": "$total_amount"},
|
| 28 |
+
"order_count": {"$sum": 1}
|
| 29 |
+
}
|
| 30 |
+
},
|
| 31 |
+
{"$sort": {"_id": 1}}
|
| 32 |
+
]
|
| 33 |
+
|
| 34 |
+
sales_data = await db.db["orders"].aggregate(pipeline).to_list(None)
|
| 35 |
+
result = {
|
| 36 |
+
"daily_sales": sales_data,
|
| 37 |
+
"total_revenue": sum(day["total_sales"] for day in sales_data),
|
| 38 |
+
"total_orders": sum(day["order_count"] for day in sales_data),
|
| 39 |
+
"average_order_value": sum(day["total_sales"] for day in sales_data) /
|
| 40 |
+
(sum(day["order_count"] for day in sales_data) or 1)
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
|
| 44 |
+
return result
|
| 45 |
+
|
| 46 |
+
@staticmethod
|
| 47 |
+
async def get_product_analytics() -> Dict[str, Any]:
|
| 48 |
+
cache_key = "product_analytics"
|
| 49 |
+
cached_data = await cache.get_cache(cache_key)
|
| 50 |
+
if cached_data:
|
| 51 |
+
return cached_data
|
| 52 |
+
|
| 53 |
+
pipeline = [
|
| 54 |
+
{
|
| 55 |
+
"$unwind": "$products"
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"$group": {
|
| 59 |
+
"_id": "$products.product_id",
|
| 60 |
+
"total_quantity": {"$sum": "$products.quantity"},
|
| 61 |
+
"total_revenue": {
|
| 62 |
+
"$sum": {
|
| 63 |
+
"$multiply": ["$products.price", "$products.quantity"]
|
| 64 |
+
}
|
| 65 |
+
}
|
| 66 |
+
}
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"$sort": {"total_revenue": -1}
|
| 70 |
+
},
|
| 71 |
+
{
|
| 72 |
+
"$limit": 10
|
| 73 |
+
}
|
| 74 |
+
]
|
| 75 |
+
|
| 76 |
+
top_products = await db.db["orders"].aggregate(pipeline).to_list(None)
|
| 77 |
+
|
| 78 |
+
# Get product details
|
| 79 |
+
for product in top_products:
|
| 80 |
+
product_detail = await db.db["products"].find_one({"_id": product["_id"]})
|
| 81 |
+
if product_detail:
|
| 82 |
+
product["name"] = product_detail["name"]
|
| 83 |
+
product["category"] = product_detail["category"]
|
| 84 |
+
|
| 85 |
+
result = {
|
| 86 |
+
"top_products": top_products,
|
| 87 |
+
"total_products": await db.db["products"].count_documents({}),
|
| 88 |
+
"low_stock_products": await db.db["products"].count_documents({"inventory_count": {"$lt": 10}})
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
|
| 92 |
+
return result
|
| 93 |
+
|
| 94 |
+
@staticmethod
|
| 95 |
+
async def get_customer_analytics() -> Dict[str, Any]:
|
| 96 |
+
cache_key = "customer_analytics"
|
| 97 |
+
cached_data = await cache.get_cache(cache_key)
|
| 98 |
+
if cached_data:
|
| 99 |
+
return cached_data
|
| 100 |
+
|
| 101 |
+
pipeline = [
|
| 102 |
+
{
|
| 103 |
+
"$group": {
|
| 104 |
+
"_id": "$customer_id",
|
| 105 |
+
"total_orders": {"$sum": 1},
|
| 106 |
+
"total_spent": {"$sum": "$total_amount"},
|
| 107 |
+
"last_order": {"$max": "$created_at"}
|
| 108 |
+
}
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"$sort": {"total_spent": -1}
|
| 112 |
+
}
|
| 113 |
+
]
|
| 114 |
+
|
| 115 |
+
customer_data = await db.db["orders"].aggregate(pipeline).to_list(None)
|
| 116 |
+
|
| 117 |
+
result = {
|
| 118 |
+
"total_customers": len(customer_data),
|
| 119 |
+
"top_customers": customer_data[:10],
|
| 120 |
+
"average_customer_value": sum(c["total_spent"] for c in customer_data) / (len(customer_data) or 1),
|
| 121 |
+
"customer_segments": {
|
| 122 |
+
"high_value": len([c for c in customer_data if c["total_spent"] > 1000]),
|
| 123 |
+
"medium_value": len([c for c in customer_data if 500 <= c["total_spent"] <= 1000]),
|
| 124 |
+
"low_value": len([c for c in customer_data if c["total_spent"] < 500])
|
| 125 |
+
}
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
|
| 129 |
+
return result
|
| 130 |
+
|
| 131 |
+
analytics = AnalyticsService()
|
app/services/backup.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import json
|
| 4 |
+
import tarfile
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from typing import Dict, Any, List
|
| 7 |
+
from bson import ObjectId
|
| 8 |
+
from ..db.database import db
|
| 9 |
+
from ..utils.logger import logger
|
| 10 |
+
|
| 11 |
+
class BackupService:
|
| 12 |
+
def __init__(self):
|
| 13 |
+
self.backup_dir = "backups"
|
| 14 |
+
self._ensure_backup_dir()
|
| 15 |
+
|
| 16 |
+
def _ensure_backup_dir(self):
|
| 17 |
+
"""Ensure backup directory exists"""
|
| 18 |
+
if not os.path.exists(self.backup_dir):
|
| 19 |
+
os.makedirs(self.backup_dir)
|
| 20 |
+
|
| 21 |
+
async def create_backup(self, include_files: bool = True) -> Dict[str, Any]:
|
| 22 |
+
"""Create a new system backup"""
|
| 23 |
+
try:
|
| 24 |
+
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
| 25 |
+
backup_id = str(ObjectId())
|
| 26 |
+
backup_name = f"backup_{timestamp}_{backup_id}"
|
| 27 |
+
backup_path = os.path.join(self.backup_dir, backup_name)
|
| 28 |
+
|
| 29 |
+
# Create backup directory
|
| 30 |
+
os.makedirs(backup_path, exist_ok=True)
|
| 31 |
+
|
| 32 |
+
# Backup database collections
|
| 33 |
+
db_backup = {}
|
| 34 |
+
for collection in await db.db.list_collection_names():
|
| 35 |
+
docs = await db.db[collection].find().to_list(None)
|
| 36 |
+
db_backup[collection] = [
|
| 37 |
+
{**doc, "_id": str(doc["_id"])}
|
| 38 |
+
for doc in docs
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
# Save database backup
|
| 42 |
+
with open(os.path.join(backup_path, "database.json"), "w") as f:
|
| 43 |
+
json.dump(db_backup, f, default=str)
|
| 44 |
+
|
| 45 |
+
# Backup files if requested
|
| 46 |
+
if include_files:
|
| 47 |
+
uploads_dir = "uploads"
|
| 48 |
+
if os.path.exists(uploads_dir):
|
| 49 |
+
shutil.copytree(
|
| 50 |
+
uploads_dir,
|
| 51 |
+
os.path.join(backup_path, "uploads"),
|
| 52 |
+
dirs_exist_ok=True
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
# Create archive
|
| 56 |
+
archive_path = f"{backup_path}.tar.gz"
|
| 57 |
+
with tarfile.open(archive_path, "w:gz") as tar:
|
| 58 |
+
tar.add(backup_path, arcname=os.path.basename(backup_path))
|
| 59 |
+
|
| 60 |
+
# Clean up temporary directory
|
| 61 |
+
shutil.rmtree(backup_path)
|
| 62 |
+
|
| 63 |
+
# Record backup in database
|
| 64 |
+
backup_info = {
|
| 65 |
+
"_id": backup_id,
|
| 66 |
+
"filename": f"{backup_name}.tar.gz",
|
| 67 |
+
"path": archive_path,
|
| 68 |
+
"created_at": datetime.utcnow(),
|
| 69 |
+
"size": os.path.getsize(archive_path),
|
| 70 |
+
"includes_files": include_files
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
await db.db["backup_history"].insert_one(backup_info)
|
| 74 |
+
|
| 75 |
+
return {
|
| 76 |
+
"id": backup_id,
|
| 77 |
+
"path": archive_path,
|
| 78 |
+
"size": backup_info["size"],
|
| 79 |
+
"created_at": backup_info["created_at"]
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
except Exception as e:
|
| 83 |
+
logger.error(f"Backup creation failed: {str(e)}")
|
| 84 |
+
raise
|
| 85 |
+
|
| 86 |
+
async def restore_backup(self, backup_path: str) -> Dict[str, Any]:
|
| 87 |
+
"""Restore system from a backup"""
|
| 88 |
+
try:
|
| 89 |
+
if not os.path.exists(backup_path):
|
| 90 |
+
raise FileNotFoundError("Backup file not found")
|
| 91 |
+
|
| 92 |
+
# Create temporary restoration directory
|
| 93 |
+
restore_dir = f"restore_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}"
|
| 94 |
+
os.makedirs(restore_dir, exist_ok=True)
|
| 95 |
+
|
| 96 |
+
# Extract archive
|
| 97 |
+
with tarfile.open(backup_path, "r:gz") as tar:
|
| 98 |
+
tar.extractall(restore_dir)
|
| 99 |
+
|
| 100 |
+
backup_contents = os.listdir(restore_dir)[0]
|
| 101 |
+
backup_root = os.path.join(restore_dir, backup_contents)
|
| 102 |
+
|
| 103 |
+
# Restore database
|
| 104 |
+
with open(os.path.join(backup_root, "database.json"), "r") as f:
|
| 105 |
+
db_backup = json.load(f)
|
| 106 |
+
|
| 107 |
+
# Clear existing collections
|
| 108 |
+
for collection in await db.db.list_collection_names():
|
| 109 |
+
await db.db[collection].delete_many({})
|
| 110 |
+
|
| 111 |
+
# Restore collections
|
| 112 |
+
for collection, docs in db_backup.items():
|
| 113 |
+
if docs:
|
| 114 |
+
# Convert string IDs back to ObjectId
|
| 115 |
+
for doc in docs:
|
| 116 |
+
doc["_id"] = ObjectId(doc["_id"])
|
| 117 |
+
await db.db[collection].insert_many(docs)
|
| 118 |
+
|
| 119 |
+
# Restore files if present
|
| 120 |
+
uploads_source = os.path.join(backup_root, "uploads")
|
| 121 |
+
if os.path.exists(uploads_source):
|
| 122 |
+
if os.path.exists("uploads"):
|
| 123 |
+
shutil.rmtree("uploads")
|
| 124 |
+
shutil.copytree(uploads_source, "uploads")
|
| 125 |
+
|
| 126 |
+
# Clean up
|
| 127 |
+
shutil.rmtree(restore_dir)
|
| 128 |
+
|
| 129 |
+
return {
|
| 130 |
+
"success": True,
|
| 131 |
+
"collections_restored": len(db_backup),
|
| 132 |
+
"files_restored": os.path.exists(uploads_source)
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
except Exception as e:
|
| 136 |
+
logger.error(f"Backup restoration failed: {str(e)}")
|
| 137 |
+
raise
|
| 138 |
+
finally:
|
| 139 |
+
if os.path.exists(restore_dir):
|
| 140 |
+
shutil.rmtree(restore_dir)
|
| 141 |
+
|
| 142 |
+
async def list_backups(self) -> List[Dict[str, Any]]:
|
| 143 |
+
"""List all available backups"""
|
| 144 |
+
try:
|
| 145 |
+
backups = await db.db["backup_history"].find().sort("created_at", -1).to_list(None)
|
| 146 |
+
return [
|
| 147 |
+
{
|
| 148 |
+
"id": str(backup["_id"]),
|
| 149 |
+
"filename": backup["filename"],
|
| 150 |
+
"created_at": backup["created_at"],
|
| 151 |
+
"size": backup["size"],
|
| 152 |
+
"includes_files": backup["includes_files"]
|
| 153 |
+
}
|
| 154 |
+
for backup in backups
|
| 155 |
+
]
|
| 156 |
+
except Exception as e:
|
| 157 |
+
logger.error(f"Failed to list backups: {str(e)}")
|
| 158 |
+
raise
|
| 159 |
+
|
| 160 |
+
async def delete_backup(self, backup_id: str) -> bool:
|
| 161 |
+
"""Delete a backup"""
|
| 162 |
+
try:
|
| 163 |
+
backup = await db.db["backup_history"].find_one({"_id": backup_id})
|
| 164 |
+
if not backup:
|
| 165 |
+
return False
|
| 166 |
+
|
| 167 |
+
# Delete the physical backup file
|
| 168 |
+
if os.path.exists(backup["path"]):
|
| 169 |
+
os.remove(backup["path"])
|
| 170 |
+
|
| 171 |
+
# Remove from database
|
| 172 |
+
await db.db["backup_history"].delete_one({"_id": backup_id})
|
| 173 |
+
return True
|
| 174 |
+
|
| 175 |
+
except Exception as e:
|
| 176 |
+
logger.error(f"Failed to delete backup: {str(e)}")
|
| 177 |
+
raise
|
| 178 |
+
|
| 179 |
+
backup = BackupService()
|
app/services/calendar.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timedelta
|
| 2 |
+
from typing import List, Dict, Any, Optional
|
| 3 |
+
from bson import ObjectId
|
| 4 |
+
from ..db.database import db
|
| 5 |
+
from ..utils.cache import cache
|
| 6 |
+
from ..services.notifications import notifications
|
| 7 |
+
|
| 8 |
+
class CalendarService:
|
| 9 |
+
async def create_event(
|
| 10 |
+
self,
|
| 11 |
+
user_id: str,
|
| 12 |
+
title: str,
|
| 13 |
+
description: str,
|
| 14 |
+
start_time: datetime,
|
| 15 |
+
end_time: datetime,
|
| 16 |
+
attendees: List[str] = None,
|
| 17 |
+
is_all_day: bool = False,
|
| 18 |
+
reminder_minutes: int = 30
|
| 19 |
+
) -> Dict[str, Any]:
|
| 20 |
+
"""Create a new calendar event"""
|
| 21 |
+
event = {
|
| 22 |
+
"user_id": user_id,
|
| 23 |
+
"title": title,
|
| 24 |
+
"description": description,
|
| 25 |
+
"start_time": start_time,
|
| 26 |
+
"end_time": end_time,
|
| 27 |
+
"attendees": attendees or [],
|
| 28 |
+
"is_all_day": is_all_day,
|
| 29 |
+
"reminder_minutes": reminder_minutes,
|
| 30 |
+
"status": "scheduled",
|
| 31 |
+
"created_at": datetime.utcnow()
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
result = await db.db["events"].insert_one(event)
|
| 35 |
+
event["_id"] = result.inserted_id
|
| 36 |
+
|
| 37 |
+
# Clear cache for affected users
|
| 38 |
+
cache_keys = [f"user_events:{user_id}"]
|
| 39 |
+
for attendee in attendees or []:
|
| 40 |
+
cache_keys.append(f"user_events:{attendee}")
|
| 41 |
+
|
| 42 |
+
for key in cache_keys:
|
| 43 |
+
await cache.delete_cache(key)
|
| 44 |
+
|
| 45 |
+
# Notify attendees
|
| 46 |
+
if attendees:
|
| 47 |
+
for attendee in attendees:
|
| 48 |
+
await notifications.create_notification(
|
| 49 |
+
user_id=attendee,
|
| 50 |
+
title=f"New Event Invitation: {title}",
|
| 51 |
+
message=f"You have been invited to an event: {title}",
|
| 52 |
+
notification_type="event_invitation",
|
| 53 |
+
data={"event_id": str(result.inserted_id)}
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
return event
|
| 57 |
+
|
| 58 |
+
async def get_user_events(
|
| 59 |
+
self,
|
| 60 |
+
user_id: str,
|
| 61 |
+
start_date: datetime,
|
| 62 |
+
end_date: datetime,
|
| 63 |
+
include_attendee_events: bool = True
|
| 64 |
+
) -> List[Dict[str, Any]]:
|
| 65 |
+
"""Get events for a user within a date range"""
|
| 66 |
+
cache_key = f"user_events:{user_id}:{start_date.date()}:{end_date.date()}"
|
| 67 |
+
cached = await cache.get_cache(cache_key)
|
| 68 |
+
if cached:
|
| 69 |
+
return cached
|
| 70 |
+
|
| 71 |
+
query = {
|
| 72 |
+
"$or": [
|
| 73 |
+
{"user_id": user_id}, # Events created by user
|
| 74 |
+
{"attendees": user_id} if include_attendee_events else {"_id": None}
|
| 75 |
+
],
|
| 76 |
+
"start_time": {"$gte": start_date},
|
| 77 |
+
"end_time": {"$lte": end_date}
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
cursor = db.db["events"].find(query).sort("start_time", 1)
|
| 81 |
+
events = await cursor.to_list(None)
|
| 82 |
+
|
| 83 |
+
await cache.set_cache(cache_key, events, expire=300) # Cache for 5 minutes
|
| 84 |
+
return events
|
| 85 |
+
|
| 86 |
+
async def update_event(
|
| 87 |
+
self,
|
| 88 |
+
event_id: str,
|
| 89 |
+
user_id: str,
|
| 90 |
+
update_data: Dict[str, Any]
|
| 91 |
+
) -> Optional[Dict[str, Any]]:
|
| 92 |
+
"""Update an event"""
|
| 93 |
+
if not ObjectId.is_valid(event_id):
|
| 94 |
+
return None
|
| 95 |
+
|
| 96 |
+
event = await db.db["events"].find_one({
|
| 97 |
+
"_id": ObjectId(event_id),
|
| 98 |
+
"user_id": user_id # Only creator can update
|
| 99 |
+
})
|
| 100 |
+
|
| 101 |
+
if not event:
|
| 102 |
+
return None
|
| 103 |
+
|
| 104 |
+
update_data["updated_at"] = datetime.utcnow()
|
| 105 |
+
|
| 106 |
+
await db.db["events"].update_one(
|
| 107 |
+
{"_id": ObjectId(event_id)},
|
| 108 |
+
{"$set": update_data}
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
# Clear cache for affected users
|
| 112 |
+
cache_keys = [f"user_events:{user_id}"]
|
| 113 |
+
for attendee in event.get("attendees", []):
|
| 114 |
+
cache_keys.append(f"user_events:{attendee}")
|
| 115 |
+
|
| 116 |
+
for key in cache_keys:
|
| 117 |
+
await cache.delete_cache(key)
|
| 118 |
+
|
| 119 |
+
# Notify attendees of changes
|
| 120 |
+
if "start_time" in update_data or "end_time" in update_data:
|
| 121 |
+
for attendee in event.get("attendees", []):
|
| 122 |
+
await notifications.create_notification(
|
| 123 |
+
user_id=attendee,
|
| 124 |
+
title=f"Event Updated: {event['title']}",
|
| 125 |
+
message=f"An event you're attending has been updated",
|
| 126 |
+
notification_type="event_update",
|
| 127 |
+
data={"event_id": event_id}
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
return await db.db["events"].find_one({"_id": ObjectId(event_id)})
|
| 131 |
+
|
| 132 |
+
async def delete_event(self, event_id: str, user_id: str) -> bool:
|
| 133 |
+
"""Delete an event"""
|
| 134 |
+
if not ObjectId.is_valid(event_id):
|
| 135 |
+
return False
|
| 136 |
+
|
| 137 |
+
event = await db.db["events"].find_one({
|
| 138 |
+
"_id": ObjectId(event_id),
|
| 139 |
+
"user_id": user_id # Only creator can delete
|
| 140 |
+
})
|
| 141 |
+
|
| 142 |
+
if not event:
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
result = await db.db["events"].delete_one({"_id": ObjectId(event_id)})
|
| 146 |
+
|
| 147 |
+
if result.deleted_count > 0:
|
| 148 |
+
# Clear cache for affected users
|
| 149 |
+
cache_keys = [f"user_events:{user_id}"]
|
| 150 |
+
for attendee in event.get("attendees", []):
|
| 151 |
+
cache_keys.append(f"user_events:{attendee}")
|
| 152 |
+
# Notify attendees
|
| 153 |
+
await notifications.create_notification(
|
| 154 |
+
user_id=attendee,
|
| 155 |
+
title=f"Event Cancelled: {event['title']}",
|
| 156 |
+
message=f"An event you were attending has been cancelled",
|
| 157 |
+
notification_type="event_cancellation",
|
| 158 |
+
data={"event_id": event_id}
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
for key in cache_keys:
|
| 162 |
+
await cache.delete_cache(key)
|
| 163 |
+
|
| 164 |
+
return True
|
| 165 |
+
return False
|
| 166 |
+
|
| 167 |
+
async def respond_to_event(
|
| 168 |
+
self,
|
| 169 |
+
event_id: str,
|
| 170 |
+
user_id: str,
|
| 171 |
+
response: str
|
| 172 |
+
) -> bool:
|
| 173 |
+
"""Respond to an event invitation"""
|
| 174 |
+
if not ObjectId.is_valid(event_id):
|
| 175 |
+
return False
|
| 176 |
+
|
| 177 |
+
valid_responses = ["accepted", "declined", "maybe"]
|
| 178 |
+
if response not in valid_responses:
|
| 179 |
+
return False
|
| 180 |
+
|
| 181 |
+
event = await db.db["events"].find_one({
|
| 182 |
+
"_id": ObjectId(event_id),
|
| 183 |
+
"attendees": user_id
|
| 184 |
+
})
|
| 185 |
+
|
| 186 |
+
if not event:
|
| 187 |
+
return False
|
| 188 |
+
|
| 189 |
+
# Update response in attendee list
|
| 190 |
+
await db.db["events"].update_one(
|
| 191 |
+
{"_id": ObjectId(event_id)},
|
| 192 |
+
{
|
| 193 |
+
"$set": {
|
| 194 |
+
f"attendee_responses.{user_id}": response,
|
| 195 |
+
"updated_at": datetime.utcnow()
|
| 196 |
+
}
|
| 197 |
+
}
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
# Notify event creator
|
| 201 |
+
await notifications.create_notification(
|
| 202 |
+
user_id=event["user_id"],
|
| 203 |
+
title=f"Event Response: {event['title']}",
|
| 204 |
+
message=f"An attendee has {response} your event",
|
| 205 |
+
notification_type="event_response",
|
| 206 |
+
data={
|
| 207 |
+
"event_id": event_id,
|
| 208 |
+
"responder": user_id,
|
| 209 |
+
"response": response
|
| 210 |
+
}
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
return True
|
| 214 |
+
|
| 215 |
+
calendar = CalendarService()
|
app/services/maintenance.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import psutil
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
from typing import Dict, Any, List
|
| 5 |
+
|
| 6 |
+
from ..db.database import db
|
| 7 |
+
from ..utils.logger import logger
|
| 8 |
+
from ..utils.cache import cache
|
| 9 |
+
from ..services.notifications import notifications
|
| 10 |
+
from .backup import backup
|
| 11 |
+
import gzip
|
| 12 |
+
import shutil
|
| 13 |
+
|
| 14 |
+
class MaintenanceService:
|
| 15 |
+
async def cleanup_expired_sessions(self) -> int:
|
| 16 |
+
"""Clean up expired sessions from the database"""
|
| 17 |
+
try:
|
| 18 |
+
cutoff_date = datetime.utcnow() - timedelta(days=7)
|
| 19 |
+
result = await db.db["sessions"].delete_many({
|
| 20 |
+
"last_activity": {"$lt": cutoff_date}
|
| 21 |
+
})
|
| 22 |
+
logger.info(f"Cleaned up {result.deleted_count} expired sessions")
|
| 23 |
+
return result.deleted_count
|
| 24 |
+
except Exception as e:
|
| 25 |
+
logger.error(f"Error cleaning up sessions: {str(e)}")
|
| 26 |
+
return 0
|
| 27 |
+
|
| 28 |
+
async def archive_old_data(self) -> Dict[str, int]:
|
| 29 |
+
"""Archive old data to maintain database performance"""
|
| 30 |
+
try:
|
| 31 |
+
archive_date = datetime.utcnow() - timedelta(days=365) # Archive data older than 1 year
|
| 32 |
+
archives = {}
|
| 33 |
+
|
| 34 |
+
# Archive old orders
|
| 35 |
+
old_orders = await db.db["orders"].find({
|
| 36 |
+
"created_at": {"$lt": archive_date},
|
| 37 |
+
"status": {"$in": ["delivered", "cancelled"]}
|
| 38 |
+
}).to_list(None)
|
| 39 |
+
|
| 40 |
+
if old_orders:
|
| 41 |
+
await db.db["archived_orders"].insert_many(old_orders)
|
| 42 |
+
result = await db.db["orders"].delete_many({
|
| 43 |
+
"_id": {"$in": [order["_id"] for order in old_orders]}
|
| 44 |
+
})
|
| 45 |
+
archives["orders"] = len(old_orders)
|
| 46 |
+
|
| 47 |
+
# Archive old notifications
|
| 48 |
+
old_notifications = await db.db["notifications"].find({
|
| 49 |
+
"created_at": {"$lt": archive_date},
|
| 50 |
+
"read": True
|
| 51 |
+
}).to_list(None)
|
| 52 |
+
|
| 53 |
+
if old_notifications:
|
| 54 |
+
await db.db["archived_notifications"].insert_many(old_notifications)
|
| 55 |
+
result = await db.db["notifications"].delete_many({
|
| 56 |
+
"_id": {"$in": [notif["_id"] for notif in old_notifications]}
|
| 57 |
+
})
|
| 58 |
+
archives["notifications"] = len(old_notifications)
|
| 59 |
+
|
| 60 |
+
# Archive old metrics (keep last 90 days)
|
| 61 |
+
cutoff_date = datetime.utcnow() - timedelta(days=90)
|
| 62 |
+
result = await db.db.system_metrics.delete_many({
|
| 63 |
+
"timestamp": {"$lt": cutoff_date}
|
| 64 |
+
})
|
| 65 |
+
logger.info(f"Archived {result.deleted_count} old metric records")
|
| 66 |
+
archives["metrics"] = result.deleted_count
|
| 67 |
+
|
| 68 |
+
# Archive old audit logs (keep last 180 days)
|
| 69 |
+
audit_cutoff = datetime.utcnow() - timedelta(days=180)
|
| 70 |
+
audit_result = await db.db.audit_logs.delete_many({
|
| 71 |
+
"timestamp": {"$lt": audit_cutoff}
|
| 72 |
+
})
|
| 73 |
+
logger.info(f"Archived {audit_result.deleted_count} old audit logs")
|
| 74 |
+
archives["audit_logs"] = audit_result.deleted_count
|
| 75 |
+
|
| 76 |
+
return archives
|
| 77 |
+
except Exception as e:
|
| 78 |
+
logger.error(f"Error archiving old data: {str(e)}")
|
| 79 |
+
return {}
|
| 80 |
+
|
| 81 |
+
async def check_system_health(self) -> Dict[str, Any]:
|
| 82 |
+
"""Check various system health metrics"""
|
| 83 |
+
try:
|
| 84 |
+
health_data = {
|
| 85 |
+
"timestamp": datetime.utcnow(),
|
| 86 |
+
"database": {},
|
| 87 |
+
"cache": {},
|
| 88 |
+
"storage": {}
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
# Check database stats
|
| 92 |
+
db_stats = await db.db.command("dbStats")
|
| 93 |
+
health_data["database"] = {
|
| 94 |
+
"size": db_stats["dataSize"],
|
| 95 |
+
"collections": db_stats["collections"],
|
| 96 |
+
"indexes": db_stats["indexes"]
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
# Check Redis cache
|
| 100 |
+
try:
|
| 101 |
+
cache_info = await cache.redis_client.info()
|
| 102 |
+
health_data["cache"] = {
|
| 103 |
+
"connected": True,
|
| 104 |
+
"used_memory": cache_info["used_memory"],
|
| 105 |
+
"connected_clients": cache_info["connected_clients"]
|
| 106 |
+
}
|
| 107 |
+
except:
|
| 108 |
+
health_data["cache"] = {"connected": False}
|
| 109 |
+
|
| 110 |
+
# Check storage metrics
|
| 111 |
+
storage_stats = {
|
| 112 |
+
"uploads_size": await self._get_directory_size("uploads"),
|
| 113 |
+
"logs_size": await self._get_directory_size("logs")
|
| 114 |
+
}
|
| 115 |
+
health_data["storage"] = storage_stats
|
| 116 |
+
|
| 117 |
+
return health_data
|
| 118 |
+
except Exception as e:
|
| 119 |
+
logger.error(f"Error checking system health: {str(e)}")
|
| 120 |
+
return {"error": str(e)}
|
| 121 |
+
|
| 122 |
+
async def perform_database_maintenance(self):
|
| 123 |
+
"""Perform routine database maintenance tasks"""
|
| 124 |
+
try:
|
| 125 |
+
# Cleanup expired sessions
|
| 126 |
+
await self.cleanup_expired_sessions()
|
| 127 |
+
|
| 128 |
+
# Run database vacuum and analyze
|
| 129 |
+
await db.db.command('analyze')
|
| 130 |
+
logger.info("Database maintenance completed successfully")
|
| 131 |
+
except Exception as e:
|
| 132 |
+
logger.error(f"Database maintenance failed: {str(e)}")
|
| 133 |
+
raise
|
| 134 |
+
|
| 135 |
+
async def monitor_system_resources(self) -> Dict[str, Any]:
|
| 136 |
+
"""Monitor system resources and return metrics"""
|
| 137 |
+
try:
|
| 138 |
+
cpu_percent = psutil.cpu_percent()
|
| 139 |
+
memory = psutil.virtual_memory()
|
| 140 |
+
disk = psutil.disk_usage('/')
|
| 141 |
+
|
| 142 |
+
metrics = {
|
| 143 |
+
"cpu_usage": cpu_percent,
|
| 144 |
+
"memory_usage": memory.percent,
|
| 145 |
+
"disk_usage": disk.percent,
|
| 146 |
+
"timestamp": datetime.utcnow()
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
await db.db.system_metrics.insert_one(metrics)
|
| 150 |
+
|
| 151 |
+
# Alert if resources are critically low
|
| 152 |
+
if any([cpu_percent > 90, memory.percent > 90, disk.percent > 90]):
|
| 153 |
+
logger.warning("System resources critically low", extra=metrics)
|
| 154 |
+
|
| 155 |
+
return metrics
|
| 156 |
+
except Exception as e:
|
| 157 |
+
logger.error(f"Resource monitoring failed: {str(e)}")
|
| 158 |
+
raise
|
| 159 |
+
|
| 160 |
+
async def perform_scheduled_backup(self):
|
| 161 |
+
"""Perform scheduled system backup"""
|
| 162 |
+
try:
|
| 163 |
+
result = await backup.create_backup(include_files=True)
|
| 164 |
+
logger.info(f"Scheduled backup completed successfully: {result['id']}")
|
| 165 |
+
|
| 166 |
+
# Cleanup old backups (keep last 7 days)
|
| 167 |
+
await self.cleanup_old_backups(days_to_keep=7)
|
| 168 |
+
except Exception as e:
|
| 169 |
+
logger.error(f"Scheduled backup failed: {str(e)}")
|
| 170 |
+
raise
|
| 171 |
+
|
| 172 |
+
async def cleanup_old_backups(self, days_to_keep: int = 7):
|
| 173 |
+
"""Clean up backups older than specified days"""
|
| 174 |
+
try:
|
| 175 |
+
cutoff_date = datetime.utcnow() - timedelta(days=days_to_keep)
|
| 176 |
+
old_backups = await db.db.backup_history.find({
|
| 177 |
+
"created_at": {"$lt": cutoff_date}
|
| 178 |
+
}).to_list(None)
|
| 179 |
+
|
| 180 |
+
for old_backup in old_backups:
|
| 181 |
+
await backup.delete_backup(str(old_backup["_id"]))
|
| 182 |
+
|
| 183 |
+
logger.info(f"Cleaned up {len(old_backups)} old backups")
|
| 184 |
+
except Exception as e:
|
| 185 |
+
logger.error(f"Backup cleanup failed: {str(e)}")
|
| 186 |
+
raise
|
| 187 |
+
|
| 188 |
+
async def rotate_log_files(self):
|
| 189 |
+
"""Rotate and archive log files"""
|
| 190 |
+
try:
|
| 191 |
+
log_dir = "logs"
|
| 192 |
+
if not os.path.exists(log_dir):
|
| 193 |
+
return
|
| 194 |
+
|
| 195 |
+
current_date = datetime.utcnow().strftime("%Y%m%d")
|
| 196 |
+
for filename in os.listdir(log_dir):
|
| 197 |
+
if filename.endswith(".log"):
|
| 198 |
+
src_path = os.path.join(log_dir, filename)
|
| 199 |
+
dst_path = os.path.join(log_dir, f"{filename}.{current_date}")
|
| 200 |
+
|
| 201 |
+
if os.path.exists(src_path):
|
| 202 |
+
os.rename(src_path, dst_path)
|
| 203 |
+
|
| 204 |
+
logger.info("Log rotation completed successfully")
|
| 205 |
+
except Exception as e:
|
| 206 |
+
logger.error(f"Log rotation failed: {str(e)}")
|
| 207 |
+
raise
|
| 208 |
+
|
| 209 |
+
async def manage_storage_quotas(self):
|
| 210 |
+
"""Check and manage storage quotas"""
|
| 211 |
+
try:
|
| 212 |
+
results = {
|
| 213 |
+
"status": "ok",
|
| 214 |
+
"warnings": [],
|
| 215 |
+
"cleaned": 0
|
| 216 |
+
}
|
| 217 |
+
|
| 218 |
+
# Check uploads directory size
|
| 219 |
+
uploads_dir = "uploads"
|
| 220 |
+
if os.path.exists(uploads_dir):
|
| 221 |
+
total_size = sum(
|
| 222 |
+
os.path.getsize(os.path.join(dirpath, filename))
|
| 223 |
+
for dirpath, _, filenames in os.walk(uploads_dir)
|
| 224 |
+
for filename in filenames
|
| 225 |
+
)
|
| 226 |
+
|
| 227 |
+
# Alert if total size exceeds 90% of quota (e.g., 10GB)
|
| 228 |
+
quota_limit = 10 * 1024 * 1024 * 1024 # 10GB in bytes
|
| 229 |
+
if total_size > (quota_limit * 0.9):
|
| 230 |
+
warning_msg = f"Storage quota nearly reached: {total_size / quota_limit:.1%}"
|
| 231 |
+
results["warnings"].append(warning_msg)
|
| 232 |
+
await self._notify_resource_warning(warning_msg)
|
| 233 |
+
|
| 234 |
+
# Check database size
|
| 235 |
+
db_stats = await db.db.command("dbStats")
|
| 236 |
+
db_size = db_stats["dataSize"] + db_stats["indexSize"]
|
| 237 |
+
|
| 238 |
+
# Alert if database size exceeds 90% of quota (e.g., 5GB)
|
| 239 |
+
db_quota = 5 * 1024 * 1024 * 1024 # 5GB in bytes
|
| 240 |
+
if db_size > (db_quota * 0.9):
|
| 241 |
+
warning_msg = f"Database quota nearly reached: {db_size / db_quota:.1%}"
|
| 242 |
+
results["warnings"].append(warning_msg)
|
| 243 |
+
await self._notify_resource_warning(warning_msg)
|
| 244 |
+
|
| 245 |
+
# Clean up temporary uploads older than 24 hours
|
| 246 |
+
temp_dir = os.path.join("uploads", "temp")
|
| 247 |
+
if os.path.exists(temp_dir):
|
| 248 |
+
current_time = datetime.utcnow()
|
| 249 |
+
for file_name in os.listdir(temp_dir):
|
| 250 |
+
file_path = os.path.join(temp_dir, file_name)
|
| 251 |
+
file_age = datetime.fromtimestamp(os.path.getctime(file_path))
|
| 252 |
+
|
| 253 |
+
if current_time - file_age > timedelta(hours=24):
|
| 254 |
+
os.remove(file_path)
|
| 255 |
+
results["cleaned"] += 1
|
| 256 |
+
|
| 257 |
+
return results
|
| 258 |
+
except Exception as e:
|
| 259 |
+
logger.error(f"Error managing storage quotas: {str(e)}")
|
| 260 |
+
return {"error": str(e)}
|
| 261 |
+
|
| 262 |
+
async def _notify_resource_warning(self, message: str):
|
| 263 |
+
"""Send notification for resource warnings"""
|
| 264 |
+
try:
|
| 265 |
+
# Get admin users
|
| 266 |
+
admin_users = await db.db["users"].find(
|
| 267 |
+
{"roles": "admin"}
|
| 268 |
+
).to_list(None)
|
| 269 |
+
|
| 270 |
+
# Send notifications
|
| 271 |
+
for admin in admin_users:
|
| 272 |
+
await notifications.create_notification(
|
| 273 |
+
user_id=str(admin["_id"]),
|
| 274 |
+
title="System Resource Warning",
|
| 275 |
+
message=message,
|
| 276 |
+
notification_type="system_warning"
|
| 277 |
+
)
|
| 278 |
+
except Exception as e:
|
| 279 |
+
logger.error(f"Error sending resource warning: {str(e)}")
|
| 280 |
+
|
| 281 |
+
async def _get_directory_size(self, path: str) -> int:
|
| 282 |
+
"""Get the total size of a directory in bytes"""
|
| 283 |
+
from pathlib import Path
|
| 284 |
+
return sum(f.stat().st_size for f in Path(path).glob('**/*') if f.is_file())
|
| 285 |
+
|
| 286 |
+
maintenance = MaintenanceService()
|
app/services/notifications.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Any
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from ..db.database import db
|
| 4 |
+
from ..core.config import settings
|
| 5 |
+
from ..utils.cache import cache
|
| 6 |
+
from ..main import broadcast_notification
|
| 7 |
+
|
| 8 |
+
class NotificationService:
|
| 9 |
+
async def create_notification(
|
| 10 |
+
self,
|
| 11 |
+
user_id: str,
|
| 12 |
+
title: str,
|
| 13 |
+
message: str,
|
| 14 |
+
notification_type: str,
|
| 15 |
+
data: Dict[str, Any] = None
|
| 16 |
+
):
|
| 17 |
+
"""Create and store a notification"""
|
| 18 |
+
notification = {
|
| 19 |
+
"user_id": user_id,
|
| 20 |
+
"title": title,
|
| 21 |
+
"message": message,
|
| 22 |
+
"type": notification_type,
|
| 23 |
+
"data": data,
|
| 24 |
+
"created_at": datetime.utcnow(),
|
| 25 |
+
"read": False
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
# Store in database
|
| 29 |
+
await db.db["notifications"].insert_one(notification)
|
| 30 |
+
|
| 31 |
+
# Broadcast to connected clients
|
| 32 |
+
await broadcast_notification({
|
| 33 |
+
"type": "notification",
|
| 34 |
+
"data": notification
|
| 35 |
+
})
|
| 36 |
+
|
| 37 |
+
# Clear user's notification cache
|
| 38 |
+
await cache.delete_cache(f"user_notifications:{user_id}")
|
| 39 |
+
|
| 40 |
+
return notification
|
| 41 |
+
|
| 42 |
+
async def get_user_notifications(
|
| 43 |
+
self,
|
| 44 |
+
user_id: str,
|
| 45 |
+
skip: int = 0,
|
| 46 |
+
limit: int = 50,
|
| 47 |
+
unread_only: bool = False
|
| 48 |
+
):
|
| 49 |
+
"""Get notifications for a user"""
|
| 50 |
+
cache_key = f"user_notifications:{user_id}"
|
| 51 |
+
if not unread_only:
|
| 52 |
+
cached = await cache.get_cache(cache_key)
|
| 53 |
+
if cached:
|
| 54 |
+
return cached
|
| 55 |
+
|
| 56 |
+
query = {"user_id": user_id}
|
| 57 |
+
if unread_only:
|
| 58 |
+
query["read"] = False
|
| 59 |
+
|
| 60 |
+
cursor = db.db["notifications"].find(query)\
|
| 61 |
+
.sort("created_at", -1)\
|
| 62 |
+
.skip(skip)\
|
| 63 |
+
.limit(limit)
|
| 64 |
+
|
| 65 |
+
notifications = await cursor.to_list(length=limit)
|
| 66 |
+
|
| 67 |
+
if not unread_only:
|
| 68 |
+
await cache.set_cache(cache_key, notifications, expire=300) # Cache for 5 minutes
|
| 69 |
+
|
| 70 |
+
return notifications
|
| 71 |
+
|
| 72 |
+
async def mark_as_read(self, notification_id: str, user_id: str):
|
| 73 |
+
"""Mark a notification as read"""
|
| 74 |
+
result = await db.db["notifications"].update_one(
|
| 75 |
+
{"_id": notification_id, "user_id": user_id},
|
| 76 |
+
{"$set": {"read": True}}
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
if result.modified_count > 0:
|
| 80 |
+
await cache.delete_cache(f"user_notifications:{user_id}")
|
| 81 |
+
return True
|
| 82 |
+
return False
|
| 83 |
+
|
| 84 |
+
async def mark_all_as_read(self, user_id: str):
|
| 85 |
+
"""Mark all notifications as read for a user"""
|
| 86 |
+
result = await db.db["notifications"].update_many(
|
| 87 |
+
{"user_id": user_id, "read": False},
|
| 88 |
+
{"$set": {"read": True}}
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
await cache.delete_cache(f"user_notifications:{user_id}")
|
| 92 |
+
return result.modified_count
|
| 93 |
+
|
| 94 |
+
async def delete_notification(self, notification_id: str, user_id: str):
|
| 95 |
+
"""Delete a notification"""
|
| 96 |
+
result = await db.db["notifications"].delete_one(
|
| 97 |
+
{"_id": notification_id, "user_id": user_id}
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
if result.deleted_count > 0:
|
| 101 |
+
await cache.delete_cache(f"user_notifications:{user_id}")
|
| 102 |
+
return True
|
| 103 |
+
return False
|
| 104 |
+
|
| 105 |
+
notifications = NotificationService()
|
app/services/scheduler.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timedelta
|
| 2 |
+
from typing import List, Dict, Any, Optional
|
| 3 |
+
from bson import ObjectId
|
| 4 |
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
| 5 |
+
from apscheduler.triggers.cron import CronTrigger
|
| 6 |
+
from ..db.database import db
|
| 7 |
+
from ..utils.cache import cache
|
| 8 |
+
from ..utils.logger import logger
|
| 9 |
+
from ..services.calendar import calendar
|
| 10 |
+
from ..services.maintenance import maintenance
|
| 11 |
+
|
| 12 |
+
class SchedulerService:
|
| 13 |
+
def __init__(self):
|
| 14 |
+
self.scheduler = AsyncIOScheduler()
|
| 15 |
+
self._setup_maintenance_jobs()
|
| 16 |
+
|
| 17 |
+
def _setup_maintenance_jobs(self):
|
| 18 |
+
"""Setup all maintenance related scheduled jobs"""
|
| 19 |
+
# Daily database maintenance at 2 AM
|
| 20 |
+
self.scheduler.add_job(
|
| 21 |
+
maintenance.perform_database_maintenance,
|
| 22 |
+
CronTrigger(hour=2),
|
| 23 |
+
id="daily_db_maintenance",
|
| 24 |
+
replace_existing=True
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
# Session cleanup every 6 hours
|
| 28 |
+
self.scheduler.add_job(
|
| 29 |
+
maintenance.cleanup_expired_sessions,
|
| 30 |
+
CronTrigger(hour="*/6"),
|
| 31 |
+
id="session_cleanup",
|
| 32 |
+
replace_existing=True
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
# System health check every 15 minutes
|
| 36 |
+
self.scheduler.add_job(
|
| 37 |
+
maintenance.monitor_system_resources,
|
| 38 |
+
CronTrigger(minute="*/15"),
|
| 39 |
+
id="health_check",
|
| 40 |
+
replace_existing=True
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
# Daily backup at 1 AM
|
| 44 |
+
self.scheduler.add_job(
|
| 45 |
+
maintenance.perform_scheduled_backup,
|
| 46 |
+
CronTrigger(hour=1),
|
| 47 |
+
id="daily_backup",
|
| 48 |
+
replace_existing=True
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
# Daily log rotation at 3 AM
|
| 52 |
+
self.scheduler.add_job(
|
| 53 |
+
maintenance.rotate_log_files,
|
| 54 |
+
CronTrigger(hour=3),
|
| 55 |
+
id="log_rotation",
|
| 56 |
+
replace_existing=True
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Storage quota check every 2 hours
|
| 60 |
+
self.scheduler.add_job(
|
| 61 |
+
maintenance.manage_storage_quotas,
|
| 62 |
+
CronTrigger(hour="*/2"),
|
| 63 |
+
id="storage_quota_check",
|
| 64 |
+
replace_existing=True
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
# Monthly data archiving at 4 AM on the 1st of each month
|
| 68 |
+
self.scheduler.add_job(
|
| 69 |
+
maintenance.archive_old_data,
|
| 70 |
+
CronTrigger(day=1, hour=4),
|
| 71 |
+
id="monthly_archiving",
|
| 72 |
+
replace_existing=True
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
def start(self):
|
| 76 |
+
"""Start the scheduler"""
|
| 77 |
+
try:
|
| 78 |
+
self.scheduler.start()
|
| 79 |
+
logger.info("Scheduler started successfully")
|
| 80 |
+
except Exception as e:
|
| 81 |
+
logger.error(f"Failed to start scheduler: {str(e)}")
|
| 82 |
+
raise
|
| 83 |
+
|
| 84 |
+
def shutdown(self):
|
| 85 |
+
"""Shutdown the scheduler"""
|
| 86 |
+
try:
|
| 87 |
+
self.scheduler.shutdown()
|
| 88 |
+
logger.info("Scheduler shutdown successfully")
|
| 89 |
+
except Exception as e:
|
| 90 |
+
logger.error(f"Error during scheduler shutdown: {str(e)}")
|
| 91 |
+
raise
|
| 92 |
+
|
| 93 |
+
def get_jobs(self):
|
| 94 |
+
"""Get all scheduled jobs"""
|
| 95 |
+
return [
|
| 96 |
+
{
|
| 97 |
+
"id": job.id,
|
| 98 |
+
"name": job.name,
|
| 99 |
+
"next_run_time": job.next_run_time.isoformat() if job.next_run_time else None,
|
| 100 |
+
"trigger": str(job.trigger)
|
| 101 |
+
}
|
| 102 |
+
for job in self.scheduler.get_jobs()
|
| 103 |
+
]
|
| 104 |
+
|
| 105 |
+
async def create_recurring_event(
|
| 106 |
+
self,
|
| 107 |
+
user_id: str,
|
| 108 |
+
title: str,
|
| 109 |
+
description: str,
|
| 110 |
+
start_time: datetime,
|
| 111 |
+
end_time: datetime,
|
| 112 |
+
recurrence_pattern: str, # daily, weekly, monthly, yearly
|
| 113 |
+
recurrence_end_date: Optional[datetime] = None,
|
| 114 |
+
attendees: List[str] = None,
|
| 115 |
+
reminder_minutes: int = 30
|
| 116 |
+
) -> List[Dict[str, Any]]:
|
| 117 |
+
"""Create recurring events based on pattern"""
|
| 118 |
+
events = []
|
| 119 |
+
current_start = start_time
|
| 120 |
+
current_end = end_time
|
| 121 |
+
duration = end_time - start_time
|
| 122 |
+
|
| 123 |
+
while True:
|
| 124 |
+
if recurrence_end_date and current_start > recurrence_end_date:
|
| 125 |
+
break
|
| 126 |
+
|
| 127 |
+
# Create individual event instance
|
| 128 |
+
event = await calendar.create_event(
|
| 129 |
+
user_id=user_id,
|
| 130 |
+
title=title,
|
| 131 |
+
description=description,
|
| 132 |
+
start_time=current_start,
|
| 133 |
+
end_time=current_end,
|
| 134 |
+
attendees=attendees,
|
| 135 |
+
reminder_minutes=reminder_minutes
|
| 136 |
+
)
|
| 137 |
+
events.append(event)
|
| 138 |
+
|
| 139 |
+
# Calculate next occurrence
|
| 140 |
+
if recurrence_pattern == "daily":
|
| 141 |
+
current_start += timedelta(days=1)
|
| 142 |
+
elif recurrence_pattern == "weekly":
|
| 143 |
+
current_start += timedelta(weeks=1)
|
| 144 |
+
elif recurrence_pattern == "monthly":
|
| 145 |
+
# Add one month (approximately)
|
| 146 |
+
if current_start.month == 12:
|
| 147 |
+
current_start = current_start.replace(year=current_start.year + 1, month=1)
|
| 148 |
+
else:
|
| 149 |
+
current_start = current_start.replace(month=current_start.month + 1)
|
| 150 |
+
elif recurrence_pattern == "yearly":
|
| 151 |
+
current_start = current_start.replace(year=current_start.year + 1)
|
| 152 |
+
|
| 153 |
+
current_end = current_start + duration
|
| 154 |
+
|
| 155 |
+
return events
|
| 156 |
+
|
| 157 |
+
async def update_recurring_event(
|
| 158 |
+
self,
|
| 159 |
+
event_id: str,
|
| 160 |
+
user_id: str,
|
| 161 |
+
update_data: Dict[str, Any],
|
| 162 |
+
update_future: bool = True
|
| 163 |
+
) -> List[Dict[str, Any]]:
|
| 164 |
+
"""Update a recurring event and optionally its future occurrences"""
|
| 165 |
+
# Get the original event
|
| 166 |
+
event = await db.db["events"].find_one({
|
| 167 |
+
"_id": ObjectId(event_id),
|
| 168 |
+
"user_id": user_id
|
| 169 |
+
})
|
| 170 |
+
|
| 171 |
+
if not event:
|
| 172 |
+
return []
|
| 173 |
+
|
| 174 |
+
# Update the current event
|
| 175 |
+
await calendar.update_event(event_id, user_id, update_data)
|
| 176 |
+
|
| 177 |
+
updated_events = [event]
|
| 178 |
+
|
| 179 |
+
# Update future occurrences if requested
|
| 180 |
+
if update_future:
|
| 181 |
+
future_events = await db.db["events"].find({
|
| 182 |
+
"recurrence_group": event.get("recurrence_group"),
|
| 183 |
+
"start_time": {"$gt": event["start_time"]},
|
| 184 |
+
"user_id": user_id
|
| 185 |
+
}).to_list(None)
|
| 186 |
+
|
| 187 |
+
for future_event in future_events:
|
| 188 |
+
await calendar.update_event(
|
| 189 |
+
str(future_event["_id"]),
|
| 190 |
+
user_id,
|
| 191 |
+
update_data
|
| 192 |
+
)
|
| 193 |
+
updated_events.append(future_event)
|
| 194 |
+
|
| 195 |
+
return updated_events
|
| 196 |
+
|
| 197 |
+
async def delete_recurring_event(
|
| 198 |
+
self,
|
| 199 |
+
event_id: str,
|
| 200 |
+
user_id: str,
|
| 201 |
+
delete_future: bool = True
|
| 202 |
+
) -> bool:
|
| 203 |
+
"""Delete a recurring event and optionally its future occurrences"""
|
| 204 |
+
event = await db.db["events"].find_one({
|
| 205 |
+
"_id": ObjectId(event_id),
|
| 206 |
+
"user_id": user_id
|
| 207 |
+
})
|
| 208 |
+
|
| 209 |
+
if not event:
|
| 210 |
+
return False
|
| 211 |
+
|
| 212 |
+
# Delete the current event
|
| 213 |
+
await calendar.delete_event(event_id, user_id)
|
| 214 |
+
|
| 215 |
+
# Delete future occurrences if requested
|
| 216 |
+
if delete_future and event.get("recurrence_group"):
|
| 217 |
+
await db.db["events"].delete_many({
|
| 218 |
+
"recurrence_group": event["recurrence_group"],
|
| 219 |
+
"start_time": {"$gt": event["start_time"]},
|
| 220 |
+
"user_id": user_id
|
| 221 |
+
})
|
| 222 |
+
|
| 223 |
+
return True
|
| 224 |
+
|
| 225 |
+
async def get_upcoming_recurring_events(
|
| 226 |
+
self,
|
| 227 |
+
user_id: str,
|
| 228 |
+
days: int = 30
|
| 229 |
+
) -> List[Dict[str, Any]]:
|
| 230 |
+
"""Get upcoming recurring events for a user"""
|
| 231 |
+
start_date = datetime.utcnow()
|
| 232 |
+
end_date = start_date + timedelta(days=days)
|
| 233 |
+
|
| 234 |
+
events = await calendar.get_user_events(
|
| 235 |
+
user_id=user_id,
|
| 236 |
+
start_date=start_date,
|
| 237 |
+
end_date=end_date,
|
| 238 |
+
include_attendee_events=True
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
return sorted(events, key=lambda x: x["start_time"])
|
| 242 |
+
|
| 243 |
+
scheduler = SchedulerService()
|
app/templates/email/low_stock_alert.html
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html>
|
| 3 |
+
<head>
|
| 4 |
+
<style>
|
| 5 |
+
body { font-family: Arial, sans-serif; line-height: 1.6; color: #333; }
|
| 6 |
+
.container { max-width: 600px; margin: 0 auto; padding: 20px; }
|
| 7 |
+
.header { background: #f8f9fa; padding: 20px; text-align: center; }
|
| 8 |
+
.content { padding: 20px; }
|
| 9 |
+
.footer { text-align: center; padding: 20px; font-size: 12px; color: #666; }
|
| 10 |
+
.button { display: inline-block; padding: 10px 20px; background: #ffc107; color: #333; text-decoration: none; border-radius: 5px; }
|
| 11 |
+
.alert { color: #856404; background: #fff3cd; padding: 10px; border-radius: 5px; margin: 20px 0; }
|
| 12 |
+
</style>
|
| 13 |
+
</head>
|
| 14 |
+
<body>
|
| 15 |
+
<div class="container">
|
| 16 |
+
<div class="header">
|
| 17 |
+
<h1>Low Stock Alert</h1>
|
| 18 |
+
</div>
|
| 19 |
+
<div class="content">
|
| 20 |
+
<div class="alert">
|
| 21 |
+
<h2>Product Stock Alert</h2>
|
| 22 |
+
<p>The following product is running low on inventory:</p>
|
| 23 |
+
</div>
|
| 24 |
+
|
| 25 |
+
<ul style="list-style: none; padding: 0;">
|
| 26 |
+
<li><strong>Product Name:</strong> {{ product_name }}</li>
|
| 27 |
+
<li><strong>Current Stock:</strong> {{ current_stock }} units</li>
|
| 28 |
+
</ul>
|
| 29 |
+
|
| 30 |
+
<p style="text-align: center; margin: 30px 0;">
|
| 31 |
+
<a href="{{ inventory_url }}/products/{{ product_id }}" class="button">View Product</a>
|
| 32 |
+
</p>
|
| 33 |
+
|
| 34 |
+
<p>Please review and restock this item as needed to maintain adequate inventory levels.</p>
|
| 35 |
+
</div>
|
| 36 |
+
<div class="footer">
|
| 37 |
+
<p>This is an automated inventory alert. Please take appropriate action.</p>
|
| 38 |
+
</div>
|
| 39 |
+
</div>
|
| 40 |
+
</body>
|
| 41 |
+
</html>
|
app/templates/email/order_confirmation.html
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html>
|
| 3 |
+
<head>
|
| 4 |
+
<style>
|
| 5 |
+
body { font-family: Arial, sans-serif; line-height: 1.6; color: #333; }
|
| 6 |
+
.container { max-width: 600px; margin: 0 auto; padding: 20px; }
|
| 7 |
+
.header { background: #f8f9fa; padding: 20px; text-align: center; }
|
| 8 |
+
.content { padding: 20px; }
|
| 9 |
+
.footer { text-align: center; padding: 20px; font-size: 12px; color: #666; }
|
| 10 |
+
.button { display: inline-block; padding: 10px 20px; background: #007bff; color: white; text-decoration: none; border-radius: 5px; }
|
| 11 |
+
</style>
|
| 12 |
+
</head>
|
| 13 |
+
<body>
|
| 14 |
+
<div class="container">
|
| 15 |
+
<div class="header">
|
| 16 |
+
<h1>Order Confirmation</h1>
|
| 17 |
+
</div>
|
| 18 |
+
<div class="content">
|
| 19 |
+
<p>Thank you for your order!</p>
|
| 20 |
+
<h2>Order Details</h2>
|
| 21 |
+
<p>Order ID: {{ order_id }}</p>
|
| 22 |
+
<p>Total Amount: ${{ "%.2f"|format(total_amount) }}</p>
|
| 23 |
+
<p>Status: {{ status|title }}</p>
|
| 24 |
+
|
| 25 |
+
<h3>Products Ordered:</h3>
|
| 26 |
+
<ul>
|
| 27 |
+
{% for product in products %}
|
| 28 |
+
<li>{{ product.name }} - Quantity: {{ product.quantity }} - Price: ${{ "%.2f"|format(product.price) }}</li>
|
| 29 |
+
{% endfor %}
|
| 30 |
+
</ul>
|
| 31 |
+
|
| 32 |
+
<p>You can track your order status by clicking the button below:</p>
|
| 33 |
+
<a href="{{ tracking_url }}" class="button">Track Order</a>
|
| 34 |
+
</div>
|
| 35 |
+
<div class="footer">
|
| 36 |
+
<p>This is an automated message, please do not reply to this email.</p>
|
| 37 |
+
</div>
|
| 38 |
+
</div>
|
| 39 |
+
</body>
|
| 40 |
+
</html>
|
app/templates/email/password_reset.html
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html>
|
| 3 |
+
<head>
|
| 4 |
+
<style>
|
| 5 |
+
body { font-family: Arial, sans-serif; line-height: 1.6; color: #333; }
|
| 6 |
+
.container { max-width: 600px; margin: 0 auto; padding: 20px; }
|
| 7 |
+
.header { background: #f8f9fa; padding: 20px; text-align: center; }
|
| 8 |
+
.content { padding: 20px; }
|
| 9 |
+
.footer { text-align: center; padding: 20px; font-size: 12px; color: #666; }
|
| 10 |
+
.button { display: inline-block; padding: 10px 20px; background: #dc3545; color: white; text-decoration: none; border-radius: 5px; }
|
| 11 |
+
.warning { color: #721c24; background: #f8d7da; padding: 10px; border-radius: 5px; margin: 20px 0; }
|
| 12 |
+
</style>
|
| 13 |
+
</head>
|
| 14 |
+
<body>
|
| 15 |
+
<div class="container">
|
| 16 |
+
<div class="header">
|
| 17 |
+
<h1>Password Reset Request</h1>
|
| 18 |
+
</div>
|
| 19 |
+
<div class="content">
|
| 20 |
+
<p>We received a request to reset your password. Click the button below to create a new password:</p>
|
| 21 |
+
|
| 22 |
+
<p style="text-align: center; margin: 30px 0;">
|
| 23 |
+
<a href="{{ reset_url }}" class="button">Reset Password</a>
|
| 24 |
+
</p>
|
| 25 |
+
|
| 26 |
+
<div class="warning">
|
| 27 |
+
<p>If you didn't request a password reset, please ignore this email or contact support if you have concerns.</p>
|
| 28 |
+
</div>
|
| 29 |
+
|
| 30 |
+
<p>This password reset link will expire in 30 minutes for security reasons.</p>
|
| 31 |
+
</div>
|
| 32 |
+
<div class="footer">
|
| 33 |
+
<p>This is an automated message, please do not reply to this email.</p>
|
| 34 |
+
<p>For security purposes, never share this email or the reset link with anyone.</p>
|
| 35 |
+
</div>
|
| 36 |
+
</div>
|
| 37 |
+
</body>
|
| 38 |
+
</html>
|
app/templates/email/welcome.html
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html>
|
| 3 |
+
<head>
|
| 4 |
+
<style>
|
| 5 |
+
body { font-family: Arial, sans-serif; line-height: 1.6; color: #333; }
|
| 6 |
+
.container { max-width: 600px; margin: 0 auto; padding: 20px; }
|
| 7 |
+
.header { background: #f8f9fa; padding: 20px; text-align: center; }
|
| 8 |
+
.content { padding: 20px; }
|
| 9 |
+
.footer { text-align: center; padding: 20px; font-size: 12px; color: #666; }
|
| 10 |
+
.button { display: inline-block; padding: 10px 20px; background: #28a745; color: white; text-decoration: none; border-radius: 5px; }
|
| 11 |
+
.features { background: #f8f9fa; padding: 20px; border-radius: 5px; margin: 20px 0; }
|
| 12 |
+
</style>
|
| 13 |
+
</head>
|
| 14 |
+
<body>
|
| 15 |
+
<div class="container">
|
| 16 |
+
<div class="header">
|
| 17 |
+
<h1>Welcome to Admin Dashboard!</h1>
|
| 18 |
+
</div>
|
| 19 |
+
<div class="content">
|
| 20 |
+
<p>Hello {{ username }},</p>
|
| 21 |
+
<p>Welcome to our platform! We're excited to have you on board.</p>
|
| 22 |
+
|
| 23 |
+
<div class="features">
|
| 24 |
+
<h3>What you can do with your account:</h3>
|
| 25 |
+
<ul>
|
| 26 |
+
<li>Manage products and inventory</li>
|
| 27 |
+
<li>Process and track orders</li>
|
| 28 |
+
<li>View analytics and reports</li>
|
| 29 |
+
<li>Manage customer relationships</li>
|
| 30 |
+
</ul>
|
| 31 |
+
</div>
|
| 32 |
+
|
| 33 |
+
<p style="text-align: center; margin: 30px 0;">
|
| 34 |
+
<a href="{{ login_url }}" class="button">Get Started</a>
|
| 35 |
+
</p>
|
| 36 |
+
|
| 37 |
+
<p>If you have any questions or need assistance, our support team is here to help.</p>
|
| 38 |
+
</div>
|
| 39 |
+
<div class="footer">
|
| 40 |
+
<p>This is an automated message, please do not reply to this email.</p>
|
| 41 |
+
<p>You're receiving this email because you registered for an account.</p>
|
| 42 |
+
</div>
|
| 43 |
+
</div>
|
| 44 |
+
</body>
|
| 45 |
+
</html>
|
app/utils/cache.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import redis
|
| 2 |
+
import json
|
| 3 |
+
from ..core.config import settings
|
| 4 |
+
from typing import Any, Optional
|
| 5 |
+
|
| 6 |
+
class RedisCache:
|
| 7 |
+
def __init__(self):
|
| 8 |
+
self.redis_client = redis.Redis(
|
| 9 |
+
host=settings.REDIS_HOST,
|
| 10 |
+
port=settings.REDIS_PORT,
|
| 11 |
+
decode_responses=True
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
async def set_cache(self, key: str, value: Any, expire: int = 3600):
|
| 15 |
+
"""Set a cache entry with optional expiration time (default 1 hour)"""
|
| 16 |
+
try:
|
| 17 |
+
self.redis_client.setex(
|
| 18 |
+
key,
|
| 19 |
+
expire,
|
| 20 |
+
json.dumps(value)
|
| 21 |
+
)
|
| 22 |
+
return True
|
| 23 |
+
except Exception as e:
|
| 24 |
+
print(f"Cache set error: {str(e)}")
|
| 25 |
+
return False
|
| 26 |
+
|
| 27 |
+
async def get_cache(self, key: str) -> Optional[Any]:
|
| 28 |
+
"""Get a cached value by key"""
|
| 29 |
+
try:
|
| 30 |
+
value = self.redis_client.get(key)
|
| 31 |
+
return json.loads(value) if value else None
|
| 32 |
+
except Exception as e:
|
| 33 |
+
print(f"Cache get error: {str(e)}")
|
| 34 |
+
return None
|
| 35 |
+
|
| 36 |
+
async def delete_cache(self, key: str) -> bool:
|
| 37 |
+
"""Delete a cache entry by key"""
|
| 38 |
+
try:
|
| 39 |
+
return bool(self.redis_client.delete(key))
|
| 40 |
+
except Exception as e:
|
| 41 |
+
print(f"Cache delete error: {str(e)}")
|
| 42 |
+
return False
|
| 43 |
+
|
| 44 |
+
async def clear_cache_pattern(self, pattern: str) -> bool:
|
| 45 |
+
"""Clear all cache entries matching a pattern"""
|
| 46 |
+
try:
|
| 47 |
+
keys = self.redis_client.keys(pattern)
|
| 48 |
+
if keys:
|
| 49 |
+
return bool(self.redis_client.delete(*keys))
|
| 50 |
+
return True
|
| 51 |
+
except Exception as e:
|
| 52 |
+
print(f"Cache clear error: {str(e)}")
|
| 53 |
+
return False
|
| 54 |
+
|
| 55 |
+
cache = RedisCache()
|
app/utils/email.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi_mail import FastMail, MessageSchema, ConnectionConfig
|
| 2 |
+
from pydantic import EmailStr
|
| 3 |
+
from typing import List, Dict, Any
|
| 4 |
+
from ..core.config import settings
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
import aiofiles
|
| 7 |
+
import jinja2
|
| 8 |
+
|
| 9 |
+
class EmailService:
|
| 10 |
+
def __init__(self):
|
| 11 |
+
self.conf = ConnectionConfig(
|
| 12 |
+
MAIL_USERNAME=settings.MAIL_USERNAME,
|
| 13 |
+
MAIL_PASSWORD=settings.MAIL_PASSWORD,
|
| 14 |
+
MAIL_FROM=settings.MAIL_FROM,
|
| 15 |
+
MAIL_PORT=settings.MAIL_PORT,
|
| 16 |
+
MAIL_SERVER=settings.MAIL_SERVER,
|
| 17 |
+
MAIL_TLS=True,
|
| 18 |
+
MAIL_SSL=False,
|
| 19 |
+
TEMPLATE_FOLDER=Path(__file__).parent.parent / 'templates' / 'email'
|
| 20 |
+
)
|
| 21 |
+
self.fast_mail = FastMail(self.conf)
|
| 22 |
+
|
| 23 |
+
# Create templates directory if it doesn't exist
|
| 24 |
+
template_dir = Path(__file__).parent.parent / 'templates' / 'email'
|
| 25 |
+
template_dir.mkdir(parents=True, exist_ok=True)
|
| 26 |
+
|
| 27 |
+
async def send_email(
|
| 28 |
+
self,
|
| 29 |
+
email_to: List[EmailStr],
|
| 30 |
+
subject: str,
|
| 31 |
+
template_name: str,
|
| 32 |
+
template_data: Dict[str, Any]
|
| 33 |
+
):
|
| 34 |
+
"""Send an email using a template"""
|
| 35 |
+
try:
|
| 36 |
+
message = MessageSchema(
|
| 37 |
+
subject=subject,
|
| 38 |
+
recipients=email_to,
|
| 39 |
+
template_body=template_data,
|
| 40 |
+
subtype="html"
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
await self.fast_mail.send_message(
|
| 44 |
+
message,
|
| 45 |
+
template_name=template_name
|
| 46 |
+
)
|
| 47 |
+
return True
|
| 48 |
+
except Exception as e:
|
| 49 |
+
print(f"Failed to send email: {str(e)}")
|
| 50 |
+
return False
|
| 51 |
+
|
| 52 |
+
async def send_order_confirmation(self, email: EmailStr, order_data: Dict[str, Any]):
|
| 53 |
+
"""Send order confirmation email"""
|
| 54 |
+
return await self.send_email(
|
| 55 |
+
email_to=[email],
|
| 56 |
+
subject="Order Confirmation",
|
| 57 |
+
template_name="order_confirmation.html",
|
| 58 |
+
template_data={
|
| 59 |
+
"order_id": str(order_data["_id"]),
|
| 60 |
+
"total_amount": order_data["total_amount"],
|
| 61 |
+
"products": order_data["products"],
|
| 62 |
+
"status": order_data["status"]
|
| 63 |
+
}
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
async def send_password_reset(self, email: EmailStr, reset_token: str):
|
| 67 |
+
"""Send password reset email"""
|
| 68 |
+
return await self.send_email(
|
| 69 |
+
email_to=[email],
|
| 70 |
+
subject="Password Reset Request",
|
| 71 |
+
template_name="password_reset.html",
|
| 72 |
+
template_data={
|
| 73 |
+
"reset_token": reset_token,
|
| 74 |
+
"reset_url": f"{settings.FRONTEND_URL}/reset-password?token={reset_token}"
|
| 75 |
+
}
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
async def send_welcome_email(self, email: EmailStr, username: str):
|
| 79 |
+
"""Send welcome email to new users"""
|
| 80 |
+
return await self.send_email(
|
| 81 |
+
email_to=[email],
|
| 82 |
+
subject="Welcome to Admin Dashboard",
|
| 83 |
+
template_name="welcome.html",
|
| 84 |
+
template_data={
|
| 85 |
+
"username": username,
|
| 86 |
+
"login_url": f"{settings.FRONTEND_URL}/login"
|
| 87 |
+
}
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
async def send_low_stock_alert(self, email: EmailStr, product_data: Dict[str, Any]):
|
| 91 |
+
"""Send low stock alert to admins"""
|
| 92 |
+
return await self.send_email(
|
| 93 |
+
email_to=[email],
|
| 94 |
+
subject="Low Stock Alert",
|
| 95 |
+
template_name="low_stock_alert.html",
|
| 96 |
+
template_data={
|
| 97 |
+
"product_name": product_data["name"],
|
| 98 |
+
"current_stock": product_data["inventory_count"],
|
| 99 |
+
"product_id": str(product_data["_id"])
|
| 100 |
+
}
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
email_service = EmailService()
|
app/utils/file_storage.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
from fastapi import UploadFile
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import Optional
|
| 7 |
+
from ..core.config import settings
|
| 8 |
+
from .logger import logger
|
| 9 |
+
|
| 10 |
+
class FileStorage:
|
| 11 |
+
def __init__(self):
|
| 12 |
+
self.upload_dir = Path("uploads")
|
| 13 |
+
self.upload_dir.mkdir(exist_ok=True)
|
| 14 |
+
|
| 15 |
+
# Create subdirectories for different file types
|
| 16 |
+
self.image_dir = self.upload_dir / "images"
|
| 17 |
+
self.document_dir = self.upload_dir / "documents"
|
| 18 |
+
self.image_dir.mkdir(exist_ok=True)
|
| 19 |
+
self.document_dir.mkdir(exist_ok=True)
|
| 20 |
+
|
| 21 |
+
async def save_file(
|
| 22 |
+
self,
|
| 23 |
+
file: UploadFile,
|
| 24 |
+
category: str = "documents",
|
| 25 |
+
max_size: int = 10 * 1024 * 1024 # 10MB default
|
| 26 |
+
) -> Optional[str]:
|
| 27 |
+
try:
|
| 28 |
+
# Validate file size
|
| 29 |
+
file.file.seek(0, os.SEEK_END)
|
| 30 |
+
size = file.file.tell()
|
| 31 |
+
file.file.seek(0)
|
| 32 |
+
|
| 33 |
+
if size > max_size:
|
| 34 |
+
raise ValueError(f"File size exceeds maximum limit of {max_size/1024/1024}MB")
|
| 35 |
+
|
| 36 |
+
# Generate unique filename
|
| 37 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 38 |
+
filename = f"{timestamp}_{file.filename}"
|
| 39 |
+
|
| 40 |
+
# Determine storage directory based on category
|
| 41 |
+
if category == "images":
|
| 42 |
+
save_dir = self.image_dir
|
| 43 |
+
allowed_types = {".jpg", ".jpeg", ".png", ".gif"}
|
| 44 |
+
else:
|
| 45 |
+
save_dir = self.document_dir
|
| 46 |
+
allowed_types = {".pdf", ".doc", ".docx", ".txt"}
|
| 47 |
+
|
| 48 |
+
# Validate file type
|
| 49 |
+
file_ext = Path(file.filename).suffix.lower()
|
| 50 |
+
if file_ext not in allowed_types:
|
| 51 |
+
raise ValueError(f"File type {file_ext} not allowed")
|
| 52 |
+
|
| 53 |
+
# Save file
|
| 54 |
+
file_path = save_dir / filename
|
| 55 |
+
with file_path.open("wb") as buffer:
|
| 56 |
+
shutil.copyfileobj(file.file, buffer)
|
| 57 |
+
|
| 58 |
+
return str(file_path.relative_to(self.upload_dir))
|
| 59 |
+
|
| 60 |
+
except Exception as e:
|
| 61 |
+
logger.error(f"File upload error: {str(e)}")
|
| 62 |
+
return None
|
| 63 |
+
|
| 64 |
+
async def delete_file(self, file_path: str) -> bool:
|
| 65 |
+
try:
|
| 66 |
+
full_path = self.upload_dir / file_path
|
| 67 |
+
if full_path.exists():
|
| 68 |
+
full_path.unlink()
|
| 69 |
+
return True
|
| 70 |
+
return False
|
| 71 |
+
except Exception as e:
|
| 72 |
+
logger.error(f"File deletion error: {str(e)}")
|
| 73 |
+
return False
|
| 74 |
+
|
| 75 |
+
def get_file_url(self, file_path: str) -> str:
|
| 76 |
+
"""Generate URL for accessing the file"""
|
| 77 |
+
return f"/uploads/{file_path}"
|
| 78 |
+
|
| 79 |
+
file_storage = FileStorage()
|
app/utils/logger.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from logging.handlers import RotatingFileHandler
|
| 6 |
+
from ..core.config import settings
|
| 7 |
+
|
| 8 |
+
# Create logs directory if it doesn't exist
|
| 9 |
+
logs_dir = Path("logs")
|
| 10 |
+
logs_dir.mkdir(exist_ok=True)
|
| 11 |
+
|
| 12 |
+
# Configure logging format
|
| 13 |
+
log_format = logging.Formatter(
|
| 14 |
+
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
def setup_logger(name: str) -> logging.Logger:
|
| 18 |
+
logger = logging.getLogger(name)
|
| 19 |
+
logger.setLevel(logging.INFO)
|
| 20 |
+
|
| 21 |
+
# Console handler
|
| 22 |
+
console_handler = logging.StreamHandler(sys.stdout)
|
| 23 |
+
console_handler.setFormatter(log_format)
|
| 24 |
+
logger.addHandler(console_handler)
|
| 25 |
+
|
| 26 |
+
# File handler with rotation
|
| 27 |
+
file_handler = RotatingFileHandler(
|
| 28 |
+
logs_dir / f"{name}.log",
|
| 29 |
+
maxBytes=10485760, # 10MB
|
| 30 |
+
backupCount=5
|
| 31 |
+
)
|
| 32 |
+
file_handler.setFormatter(log_format)
|
| 33 |
+
logger.addHandler(file_handler)
|
| 34 |
+
|
| 35 |
+
return logger
|
| 36 |
+
|
| 37 |
+
# Create main application logger
|
| 38 |
+
logger = setup_logger("admin_dashboard")
|
| 39 |
+
|
| 40 |
+
def log_api_request(method: str, path: str, status_code: int, duration: float):
|
| 41 |
+
"""Log API request details"""
|
| 42 |
+
logger.info(
|
| 43 |
+
f"API Request - Method: {method}, Path: {path}, "
|
| 44 |
+
f"Status: {status_code}, Duration: {duration:.3f}s"
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
def log_error(error: Exception, context: dict = None):
|
| 48 |
+
"""Log error with context"""
|
| 49 |
+
logger.error(
|
| 50 |
+
f"Error: {str(error)}, Type: {type(error).__name__}, "
|
| 51 |
+
f"Context: {context or {}}"
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
def log_database_operation(operation: str, collection: str, success: bool):
|
| 55 |
+
"""Log database operations"""
|
| 56 |
+
logger.info(
|
| 57 |
+
f"Database Operation - Type: {operation}, Collection: {collection}, "
|
| 58 |
+
f"Success: {success}"
|
| 59 |
+
)
|