diff --git a/.coverage b/.coverage
new file mode 100644
index 0000000000000000000000000000000000000000..dddc9d4bcaa00c0461e80c0d9b218752848bab71
Binary files /dev/null and b/.coverage differ
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..52cfe2ed36af03fa5827508b3e17fb6d1726d2fa
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,43 @@
+# Git
+.git
+.gitignore
+
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Virtual Environment
+venv/
+ENV/
+
+# IDE
+.idea/
+.vscode/
+*.swp
+*.swo
+
+# Project specific
+logs/
+uploads/
+backups/
+.env
+*.log
\ No newline at end of file
diff --git a/.env b/.env
new file mode 100644
index 0000000000000000000000000000000000000000..4937dde0d8caabd43bf8a6523b16671b101c40ba
--- /dev/null
+++ b/.env
@@ -0,0 +1,14 @@
+PROJECT_NAME=Admin Dashboard API
+VERSION=1.0.0
+API_V1_STR=/api/v1
+
+# Security
+SECRET_KEY=your-secret-key-here-change-in-production
+ACCESS_TOKEN_EXPIRE_MINUTES=30
+ALGORITHM=HS256
+
+# Database
+DATABASE_URL=postgresql+asyncpg://postgres:Lovyelias5584.@db.juycnkjuzylnbruwaqmp.supabase.co:5432/postgres
+# Redis Cache
+REDIS_HOST=localhost
+REDIS_PORT=6379
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..c352ebe6bfcff93108607d9289554c8f783fa5fd
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,58 @@
+# Use Python 3.11 slim as base image
+FROM python:3.11-slim as builder
+
+# Set working directory
+WORKDIR /app
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ gcc \
+ libpq-dev \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install Python dependencies
+COPY requirements.txt .
+RUN pip wheel --no-cache-dir --no-deps --wheel-dir /app/wheels -r requirements.txt
+
+# Final stage
+FROM python:3.11-slim
+
+# Create non-root user
+RUN useradd -m appuser
+
+# Set working directory
+WORKDIR /app
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ libpq5 \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy wheels from builder stage
+COPY --from=builder /app/wheels /wheels
+COPY --from=builder /app/requirements.txt .
+
+# Install Python packages
+RUN pip install --no-cache /wheels/*
+
+# Copy application code
+COPY ./app app/
+COPY ./alembic.ini .
+COPY ./alembic alembic/
+
+# Create necessary directories with proper permissions
+RUN mkdir -p /app/logs /app/uploads/images /app/uploads/documents /app/backups && \
+ chown -R appuser:appuser /app
+
+# Switch to non-root user
+USER appuser
+
+# Set environment variables
+ENV PYTHONPATH=/app \
+ PYTHONUNBUFFERED=1
+
+# Expose port
+EXPOSE 8000
+
+# Start the application with uvicorn
+CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
\ No newline at end of file
diff --git a/alembic.ini b/alembic.ini
new file mode 100644
index 0000000000000000000000000000000000000000..761e652c76b533b9982f8667524e320ae517bd0e
--- /dev/null
+++ b/alembic.ini
@@ -0,0 +1,77 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = alembic
+
+# template used to generate migration files
+file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
+
+# timezone to use when rendering the date within the migration file
+# as well as the filename.
+timezone = UTC
+
+# max length of characters to apply to the "slug" field
+truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+sourceless = false
+
+# version location specification
+version_locations = alembic/versions
+
+# version path separator
+version_path_separator = os
+
+# the output encoding used when revision files
+# are written from script.py.mako
+output_encoding = utf-8
+
+sqlalchemy.url = postgresql+psycopg2://postgres:Lovyelias5584.@db.mqyrkmsdgugdhxiucukb.supabase.co:5432/postgres
+
+[post_write_hooks]
+# format using "black"
+hooks = black
+black.type = console_scripts
+black.entrypoint = black
+black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/alembic/README b/alembic/README
new file mode 100644
index 0000000000000000000000000000000000000000..98e4f9c44effe479ed38c66ba922e7bcc672916f
--- /dev/null
+++ b/alembic/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/alembic/__pycache__/env.cpython-312.pyc b/alembic/__pycache__/env.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b114317a5007040593853113a5442bf4917b9f53
Binary files /dev/null and b/alembic/__pycache__/env.cpython-312.pyc differ
diff --git a/alembic/env.py b/alembic/env.py
new file mode 100644
index 0000000000000000000000000000000000000000..49188293be5c3fc2086ce6abd13b00df072f3a02
--- /dev/null
+++ b/alembic/env.py
@@ -0,0 +1,63 @@
+from logging.config import fileConfig
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+from alembic import context
+import os
+import sys
+from pathlib import Path
+
+# Add the parent directory to the Python path
+parent_dir = str(Path(__file__).resolve().parents[1])
+sys.path.append(parent_dir)
+
+from app.core.config import settings
+from app.db.models import Base
+
+config = context.config
+
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+def get_url():
+ return str(settings.DATABASE_URL).replace("+asyncpg", "+psycopg2")
+
+config.set_main_option("sqlalchemy.url", get_url())
+
+target_metadata = Base.metadata
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode."""
+ url = get_url()
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode."""
+ configuration = config.get_section(config.config_ini_section)
+ configuration["sqlalchemy.url"] = get_url()
+ connectable = engine_from_config(
+ configuration,
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/alembic/script.py.mako b/alembic/script.py.mako
new file mode 100644
index 0000000000000000000000000000000000000000..fbc4b07dcef98b20c6f96b642097f35e8433258e
--- /dev/null
+++ b/alembic/script.py.mako
@@ -0,0 +1,26 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ ${downgrades if downgrades else "pass"}
diff --git a/app/__init__.py b/app/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/app/__pycache__/__init__.cpython-312.pyc b/app/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..96195daa7444dc966e9a0873ad74c05b850fbf2e
Binary files /dev/null and b/app/__pycache__/__init__.cpython-312.pyc differ
diff --git a/app/__pycache__/main.cpython-312.pyc b/app/__pycache__/main.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ed14fbab92213a708f7d7497d16fbb96d8f72e1f
Binary files /dev/null and b/app/__pycache__/main.cpython-312.pyc differ
diff --git a/app/api/__init__.py b/app/api/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/app/api/__pycache__/__init__.cpython-312.pyc b/app/api/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..82cd1dc07c9437b2c6dfc859cb1d42f87852919c
Binary files /dev/null and b/app/api/__pycache__/__init__.cpython-312.pyc differ
diff --git a/app/api/__pycache__/analytics.cpython-312.pyc b/app/api/__pycache__/analytics.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0dd214dd87b6201fc73a01ebc8dc583e97abaed6
Binary files /dev/null and b/app/api/__pycache__/analytics.cpython-312.pyc differ
diff --git a/app/api/__pycache__/auth.cpython-312.pyc b/app/api/__pycache__/auth.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..69a5d85c86aa573a1a98534d9498e462d0d50fd4
Binary files /dev/null and b/app/api/__pycache__/auth.cpython-312.pyc differ
diff --git a/app/api/__pycache__/calendar.cpython-312.pyc b/app/api/__pycache__/calendar.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2b70369a9d21f29d0e5200e1a9e5f3f6f9a4252e
Binary files /dev/null and b/app/api/__pycache__/calendar.cpython-312.pyc differ
diff --git a/app/api/__pycache__/files.cpython-312.pyc b/app/api/__pycache__/files.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e376e01db570903610b4f2241944ed963fb2340e
Binary files /dev/null and b/app/api/__pycache__/files.cpython-312.pyc differ
diff --git a/app/api/__pycache__/maintenance.cpython-312.pyc b/app/api/__pycache__/maintenance.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0107c4bbeb5bbc9e2f1b9fda83c9b019445db3d7
Binary files /dev/null and b/app/api/__pycache__/maintenance.cpython-312.pyc differ
diff --git a/app/api/__pycache__/notifications.cpython-312.pyc b/app/api/__pycache__/notifications.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9b0e53a3836b22334ccf42fc9d9fbf8062de273c
Binary files /dev/null and b/app/api/__pycache__/notifications.cpython-312.pyc differ
diff --git a/app/api/__pycache__/orders.cpython-312.pyc b/app/api/__pycache__/orders.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0c2bf7a57dbbfc8c10d09d37c9e2d0c826e442f3
Binary files /dev/null and b/app/api/__pycache__/orders.cpython-312.pyc differ
diff --git a/app/api/__pycache__/products.cpython-312.pyc b/app/api/__pycache__/products.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f7cc45e9d03f571a3ed4e775a8fc675549d9e190
Binary files /dev/null and b/app/api/__pycache__/products.cpython-312.pyc differ
diff --git a/app/api/__pycache__/scheduler.cpython-312.pyc b/app/api/__pycache__/scheduler.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..40ba8748b57a76499a1abd9fc8c0f34721f7c5ae
Binary files /dev/null and b/app/api/__pycache__/scheduler.cpython-312.pyc differ
diff --git a/app/api/__pycache__/users.cpython-312.pyc b/app/api/__pycache__/users.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0805c219879c14e7d11352b302b42276182ca711
Binary files /dev/null and b/app/api/__pycache__/users.cpython-312.pyc differ
diff --git a/app/api/analytics.py b/app/api/analytics.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e2187fc02954896c76d8b787466a01cc5cfd4d0
--- /dev/null
+++ b/app/api/analytics.py
@@ -0,0 +1,232 @@
+from fastapi import APIRouter, Depends, Query, HTTPException
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select, func, cast, Date, and_
+from datetime import datetime, timedelta
+from typing import Dict, Any, Optional
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import Order, Product, User
+
+router = APIRouter()
+
+@router.get("/sales")
+async def get_sales_analytics(
+ start_date: datetime = Query(default=None),
+ end_date: datetime = Query(default=None),
+ branch_id: Optional[int] = Query(None, description="Filter analytics by branch"),
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, Any]:
+ if not start_date:
+ start_date = datetime.now() - timedelta(days=30)
+ if not end_date:
+ end_date = datetime.now()
+
+ # Build query conditions
+ conditions = [
+ Order.created_at.between(start_date, end_date),
+ Order.status.in_(['completed', 'delivered'])
+ ]
+
+ # Add branch filter
+ if branch_id:
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only view analytics from your own branch"
+ )
+ conditions.append(Order.branch_id == branch_id)
+ elif not current_user.is_superuser:
+ # Non-superusers can only see their branch's analytics
+ conditions.append(Order.branch_id == current_user.branch_id)
+
+ # Daily sales query
+ stmt = select(
+ cast(Order.created_at, Date).label('date'),
+ func.sum(Order.total_amount).label('total_sales'),
+ func.count().label('order_count')
+ ).where(
+ and_(*conditions)
+ ).group_by(
+ cast(Order.created_at, Date)
+ ).order_by(
+ cast(Order.created_at, Date)
+ )
+
+ result = await db.execute(stmt)
+ daily_sales = result.all()
+
+ # Calculate totals
+ total_revenue = sum(day.total_sales for day in daily_sales)
+ total_orders = sum(day.order_count for day in daily_sales)
+ avg_order_value = total_revenue / total_orders if total_orders > 0 else 0
+
+ return {
+ "daily_sales": [
+ {"date": day.date, "total_sales": day.total_sales, "order_count": day.order_count}
+ for day in daily_sales
+ ],
+ "total_revenue": total_revenue,
+ "total_orders": total_orders,
+ "average_order_value": avg_order_value
+ }
+
+@router.get("/products")
+async def get_product_analytics(
+ branch_id: Optional[int] = Query(None, description="Filter analytics by branch"),
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, Any]:
+ # Build base conditions
+ conditions = []
+
+ # Add branch filter
+ if branch_id:
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only view analytics from your own branch"
+ )
+ conditions.append(Product.branch_id == branch_id)
+ elif not current_user.is_superuser:
+ conditions.append(Product.branch_id == current_user.branch_id)
+
+ # Top selling products
+ stmt = select(
+ Product,
+ func.sum(Order.total_amount).label('total_revenue'),
+ func.count().label('total_orders')
+ ).join(
+ Order, Product.id == Order.id
+ ).where(
+ and_(*conditions)
+ ).group_by(
+ Product.id
+ ).order_by(
+ func.sum(Order.total_amount).desc()
+ ).limit(10)
+
+ result = await db.execute(stmt)
+ top_products = result.all()
+
+ # Count total and low stock products
+ total_products = await db.scalar(
+ select(func.count()).select_from(Product).where(and_(*conditions))
+ )
+
+ low_stock_conditions = conditions + [Product.inventory_count < 10]
+ low_stock_count = await db.scalar(
+ select(func.count()).select_from(Product).where(and_(*low_stock_conditions))
+ )
+
+ return {
+ "top_products": [
+ {
+ "id": product.id,
+ "name": product.name,
+ "total_revenue": revenue,
+ "total_orders": orders
+ }
+ for product, revenue, orders in top_products
+ ],
+ "total_products": total_products,
+ "low_stock_products": low_stock_count
+ }
+
+@router.get("/customers")
+async def get_customer_analytics(
+ branch_id: Optional[int] = Query(None, description="Filter analytics by branch"),
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, Any]:
+ # Build base conditions
+ conditions = []
+
+ # Add branch filter
+ if branch_id:
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only view analytics from your own branch"
+ )
+ conditions.append(Order.branch_id == branch_id)
+ elif not current_user.is_superuser:
+ conditions.append(Order.branch_id == current_user.branch_id)
+
+ # Customer statistics
+ stmt = select(
+ User,
+ func.sum(Order.total_amount).label('total_spent'),
+ func.count().label('total_orders')
+ ).join(
+ Order, User.id == Order.customer_id
+ ).where(
+ and_(*conditions)
+ ).group_by(
+ User.id
+ ).order_by(
+ func.sum(Order.total_amount).desc()
+ )
+
+ result = await db.execute(stmt)
+ customer_data = result.all()
+
+ total_customers = len(customer_data)
+ total_revenue = sum(spent for _, spent, _ in customer_data)
+ avg_customer_value = total_revenue / total_customers if total_customers > 0 else 0
+
+ # Customer segments
+ segments = {
+ "high_value": len([c for c, spent, _ in customer_data if spent > 1000]),
+ "medium_value": len([c for c, spent, _ in customer_data if 500 <= spent <= 1000]),
+ "low_value": len([c for c, spent, _ in customer_data if spent < 500])
+ }
+
+ return {
+ "total_customers": total_customers,
+ "average_customer_value": avg_customer_value,
+ "customer_segments": segments,
+ "top_customers": [
+ {
+ "id": customer.id,
+ "email": customer.email,
+ "total_spent": spent,
+ "total_orders": orders
+ }
+ for customer, spent, orders in customer_data[:10] # Top 10 customers
+ ]
+ }
+
+@router.get("/dashboard")
+async def get_dashboard_analytics(
+ branch_id: Optional[int] = Query(None, description="Filter analytics by branch"),
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, Any]:
+ """Get a comprehensive dashboard with key metrics"""
+ # Get last 30 days of sales data
+ start_date = datetime.now() - timedelta(days=30)
+ end_date = datetime.now()
+
+ sales_data = await get_sales_analytics(start_date, end_date, branch_id, current_user, db)
+ product_data = await get_product_analytics(branch_id, current_user, db)
+ customer_data = await get_customer_analytics(branch_id, current_user, db)
+
+ return {
+ "sales_summary": {
+ "total_revenue": sales_data["total_revenue"],
+ "total_orders": sales_data["total_orders"],
+ "average_order_value": sales_data["average_order_value"],
+ "daily_sales": sales_data["daily_sales"][-7:] # Last 7 days
+ },
+ "product_summary": {
+ "total_products": product_data["total_products"],
+ "low_stock_products": product_data["low_stock_products"],
+ "top_selling_products": product_data["top_products"][:5] # Top 5 products
+ },
+ "customer_summary": {
+ "total_customers": customer_data["total_customers"],
+ "average_customer_value": customer_data["average_customer_value"],
+ "customer_segments": customer_data["customer_segments"]
+ }
+ }
\ No newline at end of file
diff --git a/app/api/auth.py b/app/api/auth.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6657e7ae4179c038107733ae809e8b3d429990a
--- /dev/null
+++ b/app/api/auth.py
@@ -0,0 +1,70 @@
+from fastapi import APIRouter, Depends, HTTPException, status
+from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+from ..core.security import create_access_token, verify_password, get_password_hash
+from ..db.database import get_db
+from ..db.models import User
+from ..db.schemas import UserInDB
+from datetime import timedelta
+from typing import Any
+
+router = APIRouter()
+oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
+
+@router.post("/login")
+async def login(
+ form_data: OAuth2PasswordRequestForm = Depends(),
+ db: AsyncSession = Depends(get_db)
+) -> Any:
+ stmt = select(User).where(User.email == form_data.username)
+ result = await db.execute(stmt)
+ user = result.scalar_one_or_none()
+
+ if not user:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Incorrect email or password",
+ )
+
+ if not verify_password(form_data.password, user.hashed_password):
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Incorrect email or password",
+ )
+
+ access_token = create_access_token(user.id)
+ return {"access_token": access_token, "token_type": "bearer"}
+
+@router.post("/register", response_model=UserInDB)
+async def register(
+ user_data: OAuth2PasswordRequestForm = Depends(),
+ db: AsyncSession = Depends(get_db)
+) -> Any:
+ # Check if user exists
+ stmt = select(User).where(User.email == user_data.username)
+ result = await db.execute(stmt)
+ existing_user = result.scalar_one_or_none()
+
+ if existing_user:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Email already registered",
+ )
+
+ # Create new user
+ user = User(
+ email=user_data.username,
+ hashed_password=get_password_hash(user_data.password),
+ full_name=user_data.username, # You might want to add this as a separate field in the form
+ username=user_data.username,
+ is_active=True,
+ is_superuser=False,
+ roles=["user"]
+ )
+
+ db.add(user)
+ await db.commit()
+ await db.refresh(user)
+
+ return user
\ No newline at end of file
diff --git a/app/api/branches.py b/app/api/branches.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fc62e3c77f65ddd26f9ad8a1f4f38c038ede43e
--- /dev/null
+++ b/app/api/branches.py
@@ -0,0 +1,89 @@
+from fastapi import APIRouter, HTTPException, Depends
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+from typing import List
+from ..core.dependencies import get_current_superuser
+from ..db.database import get_db
+from ..db.models import Branch
+from ..db.schemas import BranchCreate, BranchInDB
+
+router = APIRouter()
+
+@router.post("/", response_model=BranchInDB)
+async def create_branch(
+ branch: BranchCreate,
+ current_user = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+) -> BranchInDB:
+ """Create a new branch (superuser only)"""
+ db_branch = Branch(**branch.dict())
+ db.add(db_branch)
+ await db.commit()
+ await db.refresh(db_branch)
+ return db_branch
+
+@router.get("/", response_model=List[BranchInDB])
+async def list_branches(
+ skip: int = 0,
+ limit: int = 100,
+ db: AsyncSession = Depends(get_db)
+) -> List[BranchInDB]:
+ """List all branches"""
+ query = select(Branch).offset(skip).limit(limit)
+ result = await db.execute(query)
+ return result.scalars().all()
+
+@router.get("/{branch_id}", response_model=BranchInDB)
+async def get_branch(
+ branch_id: int,
+ db: AsyncSession = Depends(get_db)
+) -> BranchInDB:
+ """Get a specific branch"""
+ stmt = select(Branch).where(Branch.id == branch_id)
+ result = await db.execute(stmt)
+ branch = result.scalar_one_or_none()
+
+ if not branch:
+ raise HTTPException(status_code=404, detail="Branch not found")
+ return branch
+
+@router.put("/{branch_id}", response_model=BranchInDB)
+async def update_branch(
+ branch_id: int,
+ branch_update: BranchCreate,
+ current_user = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+) -> BranchInDB:
+ """Update a branch (superuser only)"""
+ stmt = select(Branch).where(Branch.id == branch_id)
+ result = await db.execute(stmt)
+ branch = result.scalar_one_or_none()
+
+ if not branch:
+ raise HTTPException(status_code=404, detail="Branch not found")
+
+ # Update branch fields
+ for field, value in branch_update.dict().items():
+ setattr(branch, field, value)
+
+ await db.commit()
+ await db.refresh(branch)
+ return branch
+
+@router.delete("/{branch_id}")
+async def delete_branch(
+ branch_id: int,
+ current_user = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+):
+ """Delete a branch (superuser only)"""
+ stmt = select(Branch).where(Branch.id == branch_id)
+ result = await db.execute(stmt)
+ branch = result.scalar_one_or_none()
+
+ if not branch:
+ raise HTTPException(status_code=404, detail="Branch not found")
+
+ await db.delete(branch)
+ await db.commit()
+ return {"status": "success", "message": "Branch deleted"}
\ No newline at end of file
diff --git a/app/api/calendar.py b/app/api/calendar.py
new file mode 100644
index 0000000000000000000000000000000000000000..f46c651aa71eea9c5c79b7b5d44d4a93b0746caf
--- /dev/null
+++ b/app/api/calendar.py
@@ -0,0 +1,156 @@
+from fastapi import APIRouter, Depends, HTTPException, Query
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select, or_
+from typing import List, Dict, Any
+from datetime import datetime, timedelta
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import Event, User
+from ..db.schemas import EventCreate, EventUpdate, EventInDB, RecurringEventCreate
+
+router = APIRouter()
+
+@router.post("/events", response_model=EventInDB)
+async def create_event(
+ event: EventCreate,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> EventInDB:
+ """Create a new calendar event"""
+ db_event = Event(
+ user_id=current_user.id,
+ title=event.title,
+ description=event.description,
+ start_time=event.start_time,
+ end_time=event.end_time,
+ attendees=event.attendees,
+ is_all_day=event.is_all_day,
+ reminder_minutes=event.reminder_minutes,
+ status="scheduled",
+ attendee_responses={}
+ )
+
+ db.add(db_event)
+ await db.commit()
+ await db.refresh(db_event)
+ return db_event
+
+@router.get("/events", response_model=List[EventInDB])
+async def get_events(
+ start_date: datetime = Query(default=None),
+ end_date: datetime = Query(default=None),
+ include_attendee_events: bool = True,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[EventInDB]:
+ """Get user's events within a date range"""
+ if not start_date:
+ start_date = datetime.now()
+ if not end_date:
+ end_date = start_date + timedelta(days=30)
+
+ query = select(Event).where(
+ Event.start_time >= start_date,
+ Event.end_time <= end_date
+ )
+
+ if include_attendee_events:
+ query = query.where(or_(
+ Event.user_id == current_user.id,
+ Event.attendees.contains([str(current_user.id)])
+ ))
+ else:
+ query = query.where(Event.user_id == current_user.id)
+
+ query = query.order_by(Event.start_time)
+ result = await db.execute(query)
+ return result.scalars().all()
+
+@router.put("/events/{event_id}", response_model=EventInDB)
+async def update_event(
+ event_id: int,
+ event_update: EventUpdate,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> EventInDB:
+ """Update an event"""
+ stmt = select(Event).where(
+ Event.id == event_id,
+ Event.user_id == current_user.id
+ )
+ result = await db.execute(stmt)
+ event = result.scalar_one_or_none()
+
+ if not event:
+ raise HTTPException(
+ status_code=404,
+ detail="Event not found or you don't have permission to update it"
+ )
+
+ # Update event fields
+ update_data = event_update.dict(exclude_unset=True)
+ for field, value in update_data.items():
+ setattr(event, field, value)
+
+ event.updated_at = datetime.utcnow()
+ await db.commit()
+ await db.refresh(event)
+ return event
+
+@router.delete("/events/{event_id}")
+async def delete_event(
+ event_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, bool]:
+ """Delete an event"""
+ stmt = select(Event).where(
+ Event.id == event_id,
+ Event.user_id == current_user.id
+ )
+ result = await db.execute(stmt)
+ event = result.scalar_one_or_none()
+
+ if not event:
+ raise HTTPException(
+ status_code=404,
+ detail="Event not found or you don't have permission to delete it"
+ )
+
+ await db.delete(event)
+ await db.commit()
+ return {"success": True}
+
+@router.post("/events/{event_id}/respond")
+async def respond_to_event(
+ event_id: int,
+ response: str,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, bool]:
+ """Respond to an event invitation"""
+ if response not in ["accepted", "declined", "maybe"]:
+ raise HTTPException(
+ status_code=400,
+ detail="Invalid response. Must be one of: accepted, declined, maybe"
+ )
+
+ stmt = select(Event).where(
+ Event.id == event_id,
+ Event.attendees.contains([str(current_user.id)])
+ )
+ result = await db.execute(stmt)
+ event = result.scalar_one_or_none()
+
+ if not event:
+ raise HTTPException(
+ status_code=404,
+ detail="Event not found or you are not invited to this event"
+ )
+
+ # Update the response in the attendee_responses dictionary
+ event.attendee_responses[str(current_user.id)] = response
+ event.updated_at = datetime.utcnow()
+
+ await db.commit()
+ return {"success": True}
\ No newline at end of file
diff --git a/app/api/files.py b/app/api/files.py
new file mode 100644
index 0000000000000000000000000000000000000000..48b4b667409d213af64088b96aad9ece852c4972
--- /dev/null
+++ b/app/api/files.py
@@ -0,0 +1,53 @@
+from fastapi import APIRouter, UploadFile, File, Depends, HTTPException
+from fastapi.responses import FileResponse
+from typing import List
+from ..core.dependencies import get_current_active_user
+from ..utils.file_storage import file_storage
+from ..utils.logger import logger
+from pathlib import Path
+
+router = APIRouter()
+
+@router.post("/upload")
+async def upload_file(
+ file: UploadFile = File(...),
+ category: str = "documents",
+ current_user = Depends(get_current_active_user)
+) -> dict:
+ try:
+ file_path = await file_storage.save_file(file, category)
+ if not file_path:
+ raise HTTPException(status_code=400, detail="Failed to upload file")
+
+ return {
+ "filename": file.filename,
+ "stored_path": file_path,
+ "url": file_storage.get_file_url(file_path)
+ }
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+ except Exception as e:
+ logger.error(f"File upload error: {str(e)}")
+ raise HTTPException(status_code=500, detail="Internal server error")
+
+@router.delete("/{file_path:path}")
+async def delete_file(
+ file_path: str,
+ current_user = Depends(get_current_active_user)
+) -> dict:
+ success = await file_storage.delete_file(file_path)
+ if not success:
+ raise HTTPException(status_code=404, detail="File not found")
+
+ return {"status": "success", "message": "File deleted successfully"}
+
+@router.get("/{file_path:path}")
+async def get_file(
+ file_path: str,
+ current_user = Depends(get_current_active_user)
+):
+ full_path = Path("uploads") / file_path
+ if not full_path.exists():
+ raise HTTPException(status_code=404, detail="File not found")
+
+ return FileResponse(str(full_path))
\ No newline at end of file
diff --git a/app/api/maintenance.py b/app/api/maintenance.py
new file mode 100644
index 0000000000000000000000000000000000000000..54fa475abff30506d8b3c9cfe131327b88883e1a
--- /dev/null
+++ b/app/api/maintenance.py
@@ -0,0 +1,133 @@
+from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select, delete, func
+from typing import Dict, Any, List
+from datetime import datetime, timedelta
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import User, Order, Notification, Event
+from ..utils.logger import logger
+
+router = APIRouter()
+
+@router.post("/sessions/cleanup")
+async def cleanup_sessions(
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, int]:
+ """Manually trigger session cleanup"""
+ if "admin" not in current_user.roles:
+ raise HTTPException(
+ status_code=403,
+ detail="Only administrators can perform maintenance operations"
+ )
+
+ cutoff_date = datetime.utcnow() - timedelta(days=7)
+ stmt = delete(Event).where(Event.created_at < cutoff_date)
+ result = await db.execute(stmt)
+ await db.commit()
+
+ return {"deleted_sessions": result.rowcount}
+
+@router.post("/data/archive")
+async def archive_data(
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, int]:
+ """Manually trigger data archiving"""
+ if "admin" not in current_user.roles:
+ raise HTTPException(
+ status_code=403,
+ detail="Only administrators can perform maintenance operations"
+ )
+
+ archive_date = datetime.utcnow() - timedelta(days=365)
+ archived = {}
+
+ # Archive old orders
+ orders_stmt = delete(Order).where(
+ Order.created_at < archive_date,
+ Order.status.in_(["delivered", "cancelled"])
+ )
+ orders_result = await db.execute(orders_stmt)
+ archived["orders"] = orders_result.rowcount
+
+ # Archive old notifications
+ notif_stmt = delete(Notification).where(
+ Notification.created_at < archive_date,
+ Notification.read == True
+ )
+ notif_result = await db.execute(notif_stmt)
+ archived["notifications"] = notif_result.rowcount
+
+ await db.commit()
+ return archived
+
+@router.get("/health")
+async def check_health(
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, Any]:
+ """Check system health metrics"""
+ if "admin" not in current_user.roles:
+ raise HTTPException(
+ status_code=403,
+ detail="Only administrators can view system health"
+ )
+
+ try:
+ # Check database connection
+ await db.execute(select(1))
+
+ # Get database statistics
+ total_users = await db.scalar(select(func.count()).select_from(User))
+ total_orders = await db.scalar(select(func.count()).select_from(Order))
+ total_notifications = await db.scalar(select(func.count()).select_from(Notification))
+
+ return {
+ "status": "healthy",
+ "timestamp": datetime.utcnow(),
+ "database": {
+ "connected": True,
+ "total_users": total_users,
+ "total_orders": total_orders,
+ "total_notifications": total_notifications
+ }
+ }
+ except Exception as e:
+ logger.error(f"Health check error: {str(e)}")
+ return {
+ "status": "unhealthy",
+ "error": str(e),
+ "timestamp": datetime.utcnow()
+ }
+
+@router.post("/database/maintenance")
+async def perform_db_maintenance(
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, Any]:
+ """Manually trigger database maintenance"""
+ if "admin" not in current_user.roles:
+ raise HTTPException(
+ status_code=403,
+ detail="Only administrators can perform maintenance operations"
+ )
+
+ try:
+ # Cleanup expired sessions
+ await cleanup_sessions(current_user, db)
+
+ # Run VACUUM ANALYZE (requires raw SQL)
+ await db.execute("VACUUM ANALYZE;")
+
+ return {
+ "status": "success",
+ "message": "Database maintenance completed successfully"
+ }
+ except Exception as e:
+ logger.error(f"Database maintenance error: {str(e)}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Database maintenance failed: {str(e)}"
+ )
\ No newline at end of file
diff --git a/app/api/notifications.py b/app/api/notifications.py
new file mode 100644
index 0000000000000000000000000000000000000000..889f4565eeb3dc405b980ac1a64726297570b54a
--- /dev/null
+++ b/app/api/notifications.py
@@ -0,0 +1,86 @@
+from fastapi import APIRouter, Depends, HTTPException, Query
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select, update
+from typing import List, Dict, Any, Optional
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import Notification, User
+from ..db.schemas import NotificationCreate, NotificationInDB
+
+router = APIRouter()
+
+@router.get("/", response_model=List[NotificationInDB])
+async def get_notifications(
+ skip: int = Query(0, ge=0),
+ limit: int = Query(50, ge=1, le=100),
+ unread_only: bool = False,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[NotificationInDB]:
+ """Get user's notifications"""
+ query = select(Notification).where(Notification.user_id == current_user.id)
+
+ if unread_only:
+ query = query.where(Notification.read == False)
+
+ query = query.order_by(Notification.created_at.desc()).offset(skip).limit(limit)
+ result = await db.execute(query)
+ return result.scalars().all()
+
+@router.post("/mark-read/{notification_id}", response_model=NotificationInDB)
+async def mark_notification_read(
+ notification_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> NotificationInDB:
+ """Mark a notification as read"""
+ stmt = select(Notification).where(
+ Notification.id == notification_id,
+ Notification.user_id == current_user.id
+ )
+ result = await db.execute(stmt)
+ notification = result.scalar_one_or_none()
+
+ if not notification:
+ raise HTTPException(status_code=404, detail="Notification not found")
+
+ notification.read = True
+ await db.commit()
+ await db.refresh(notification)
+ return notification
+
+@router.post("/mark-all-read")
+async def mark_all_notifications_read(
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, int]:
+ """Mark all notifications as read"""
+ stmt = update(Notification).where(
+ Notification.user_id == current_user.id,
+ Notification.read == False
+ ).values(read=True)
+
+ result = await db.execute(stmt)
+ await db.commit()
+ return {"marked_count": result.rowcount}
+
+@router.delete("/{notification_id}")
+async def delete_notification(
+ notification_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, bool]:
+ """Delete a notification"""
+ stmt = select(Notification).where(
+ Notification.id == notification_id,
+ Notification.user_id == current_user.id
+ )
+ result = await db.execute(stmt)
+ notification = result.scalar_one_or_none()
+
+ if not notification:
+ raise HTTPException(status_code=404, detail="Notification not found")
+
+ await db.delete(notification)
+ await db.commit()
+ return {"success": True}
\ No newline at end of file
diff --git a/app/api/orders.py b/app/api/orders.py
new file mode 100644
index 0000000000000000000000000000000000000000..092c4d3ed4392bebdc0c150bec3b738fde33ef5c
--- /dev/null
+++ b/app/api/orders.py
@@ -0,0 +1,186 @@
+from fastapi import APIRouter, HTTPException, status, Depends, Query
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+from typing import List, Optional
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import Order, Product, OrderItem, User
+from ..db.schemas import OrderCreate, OrderInDB
+from datetime import datetime
+
+router = APIRouter()
+
+@router.post("/", response_model=OrderInDB)
+async def create_order(
+ order: OrderCreate,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> OrderInDB:
+ # Ensure user belongs to the branch they're creating the order for
+ if current_user.branch_id != order.branch_id and not current_user.is_superuser:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only create orders for your own branch"
+ )
+
+ # Calculate total and validate products
+ total = 0
+ order_items = []
+
+ for item in order.items:
+ # Get product
+ stmt = select(Product).where(
+ Product.id == item.product_id,
+ Product.branch_id == order.branch_id # Ensure product belongs to the same branch
+ )
+ result = await db.execute(stmt)
+ product = result.scalar_one_or_none()
+
+ if not product:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Product {item.product_id} not found in this branch"
+ )
+
+ if product.inventory_count < item.quantity:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Insufficient inventory for product {item.product_id}"
+ )
+
+ # Update inventory
+ product.inventory_count -= item.quantity
+ total += product.price * item.quantity
+
+ # Create order item
+ order_item = OrderItem(
+ product_id=item.product_id,
+ quantity=item.quantity,
+ price=product.price
+ )
+ order_items.append(order_item)
+
+ # Create order
+ db_order = Order(
+ customer_id=order.customer_id,
+ branch_id=order.branch_id,
+ total_amount=total,
+ status="pending",
+ items=order_items
+ )
+
+ db.add(db_order)
+ await db.commit()
+ await db.refresh(db_order)
+ return db_order
+
+@router.get("/", response_model=List[OrderInDB])
+async def list_orders(
+ skip: int = 0,
+ limit: int = 10,
+ status: Optional[str] = None,
+ branch_id: Optional[int] = Query(None, description="Filter orders by branch"),
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[OrderInDB]:
+ query = select(Order)
+
+ # Filter by status if provided
+ if status:
+ query = query.where(Order.status == status)
+
+ # Filter by branch if provided, otherwise use user's branch
+ if branch_id:
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only view orders from your own branch"
+ )
+ query = query.where(Order.branch_id == branch_id)
+ elif not current_user.is_superuser:
+ # Non-superusers can only see orders from their branch
+ query = query.where(Order.branch_id == current_user.branch_id)
+
+ query = query.offset(skip).limit(limit)
+ result = await db.execute(query)
+ return result.scalars().all()
+
+@router.get("/{order_id}", response_model=OrderInDB)
+async def get_order(
+ order_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> OrderInDB:
+ stmt = select(Order).where(Order.id == order_id)
+ result = await db.execute(stmt)
+ order = result.scalar_one_or_none()
+
+ if not order:
+ raise HTTPException(status_code=404, detail="Order not found")
+
+ # Check if user has access to this order's branch
+ if not current_user.is_superuser and order.branch_id != current_user.branch_id:
+ raise HTTPException(status_code=403, detail="You cannot access orders from other branches")
+
+ return order
+
+@router.put("/{order_id}/status", response_model=OrderInDB)
+async def update_order_status(
+ order_id: int,
+ status: str,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> OrderInDB:
+ valid_statuses = ["pending", "processing", "shipped", "delivered", "cancelled"]
+ if status not in valid_statuses:
+ raise HTTPException(status_code=400, detail="Invalid status")
+
+ stmt = select(Order).where(Order.id == order_id)
+ result = await db.execute(stmt)
+ order = result.scalar_one_or_none()
+
+ if not order:
+ raise HTTPException(status_code=404, detail="Order not found")
+
+ # Check if user has access to this order's branch
+ if not current_user.is_superuser and order.branch_id != current_user.branch_id:
+ raise HTTPException(status_code=403, detail="You cannot modify orders from other branches")
+
+ order.status = status
+ order.updated_at = datetime.utcnow()
+
+ await db.commit()
+ await db.refresh(order)
+ return order
+
+@router.delete("/{order_id}")
+async def delete_order(
+ order_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+):
+ # Get the order
+ stmt = select(Order).where(Order.id == order_id)
+ result = await db.execute(stmt)
+ order = result.scalar_one_or_none()
+
+ if not order:
+ raise HTTPException(status_code=404, detail="Order not found")
+
+ # Check if user has access to this order's branch
+ if not current_user.is_superuser and order.branch_id != current_user.branch_id:
+ raise HTTPException(status_code=403, detail="You cannot delete orders from other branches")
+
+ # Restore inventory for each product
+ for item in order.items:
+ product_stmt = select(Product).where(Product.id == item.product_id)
+ product_result = await db.execute(product_stmt)
+ product = product_result.scalar_one_or_none()
+
+ if product:
+ product.inventory_count += item.quantity
+
+ await db.delete(order)
+ await db.commit()
+
+ return {"status": "success", "message": "Order deleted and inventory restored"}
\ No newline at end of file
diff --git a/app/api/products.py b/app/api/products.py
new file mode 100644
index 0000000000000000000000000000000000000000..33afd989a7df1ff50b6abd7a601511775b98d0e0
--- /dev/null
+++ b/app/api/products.py
@@ -0,0 +1,131 @@
+from fastapi import APIRouter, HTTPException, status, Depends, Query
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+from typing import List, Optional
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import Product, User
+from ..db.schemas import ProductCreate, ProductInDB
+
+router = APIRouter()
+
+@router.post("/", response_model=ProductInDB)
+async def create_product(
+ product: ProductCreate,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> ProductInDB:
+ # Ensure user belongs to the branch they're creating the product for
+ if current_user.branch_id != product.branch_id and not current_user.is_superuser:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only create products for your own branch"
+ )
+
+ db_product = Product(**product.dict())
+ db.add(db_product)
+ await db.commit()
+ await db.refresh(db_product)
+ return db_product
+
+@router.get("/", response_model=List[ProductInDB])
+async def list_products(
+ skip: int = 0,
+ limit: int = 10,
+ category: Optional[str] = None,
+ branch_id: Optional[int] = Query(None, description="Filter products by branch"),
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[ProductInDB]:
+ query = select(Product)
+
+ # Filter by category if provided
+ if category:
+ query = query.where(Product.category == category)
+
+ # Filter by branch if provided, otherwise use user's branch
+ if branch_id:
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
+ raise HTTPException(
+ status_code=403,
+ detail="You can only view products from your own branch"
+ )
+ query = query.where(Product.branch_id == branch_id)
+ elif not current_user.is_superuser:
+ # Non-superusers can only see products from their branch
+ query = query.where(Product.branch_id == current_user.branch_id)
+
+ query = query.offset(skip).limit(limit)
+ result = await db.execute(query)
+ return result.scalars().all()
+
+@router.get("/{product_id}", response_model=ProductInDB)
+async def get_product(
+ product_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> ProductInDB:
+ stmt = select(Product).where(Product.id == product_id)
+ result = await db.execute(stmt)
+ product = result.scalar_one_or_none()
+
+ if not product:
+ raise HTTPException(status_code=404, detail="Product not found")
+
+ # Check if user has access to this product's branch
+ if not current_user.is_superuser and product.branch_id != current_user.branch_id:
+ raise HTTPException(status_code=403, detail="You cannot access products from other branches")
+
+ return product
+
+@router.put("/{product_id}", response_model=ProductInDB)
+async def update_product(
+ product_id: int,
+ product_update: ProductCreate,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> ProductInDB:
+ stmt = select(Product).where(Product.id == product_id)
+ result = await db.execute(stmt)
+ product = result.scalar_one_or_none()
+
+ if not product:
+ raise HTTPException(status_code=404, detail="Product not found")
+
+ # Check if user has access to this product's branch
+ if not current_user.is_superuser and product.branch_id != current_user.branch_id:
+ raise HTTPException(status_code=403, detail="You cannot modify products from other branches")
+
+ # Ensure the branch isn't being changed to a different branch
+ if product_update.branch_id != product.branch_id:
+ raise HTTPException(status_code=400, detail="Cannot change product's branch")
+
+ # Update product fields
+ update_data = product_update.dict(exclude_unset=True)
+ for field, value in update_data.items():
+ setattr(product, field, value)
+
+ await db.commit()
+ await db.refresh(product)
+ return product
+
+@router.delete("/{product_id}")
+async def delete_product(
+ product_id: int,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+):
+ stmt = select(Product).where(Product.id == product_id)
+ result = await db.execute(stmt)
+ product = result.scalar_one_or_none()
+
+ if not product:
+ raise HTTPException(status_code=404, detail="Product not found")
+
+ # Check if user has access to this product's branch
+ if not current_user.is_superuser and product.branch_id != current_user.branch_id:
+ raise HTTPException(status_code=403, detail="You cannot delete products from other branches")
+
+ await db.delete(product)
+ await db.commit()
+ return {"status": "success", "message": "Product deleted"}
\ No newline at end of file
diff --git a/app/api/scheduler.py b/app/api/scheduler.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae7b90c82b60d0150c6f6111fb2fcb55ef79fa6e
--- /dev/null
+++ b/app/api/scheduler.py
@@ -0,0 +1,203 @@
+from fastapi import APIRouter, Depends, HTTPException
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select, delete
+from typing import List, Dict, Any, Optional
+from datetime import datetime, timedelta
+from ..core.dependencies import get_current_active_user
+from ..db.database import get_db
+from ..db.models import Event, User
+from pydantic import BaseModel
+
+router = APIRouter()
+
+class RecurringEventCreate(BaseModel):
+ title: str
+ description: str
+ start_time: datetime
+ end_time: datetime
+ recurrence_pattern: str
+ recurrence_end_date: Optional[datetime] = None
+ attendees: List[str] = []
+ reminder_minutes: int = 30
+
+class RecurringEventUpdate(BaseModel):
+ title: Optional[str] = None
+ description: Optional[str] = None
+ start_time: Optional[datetime] = None
+ end_time: Optional[datetime] = None
+ attendees: Optional[List[str]] = None
+ reminder_minutes: Optional[int] = None
+
+@router.post("/recurring-events")
+async def create_recurring_event(
+ event_data: RecurringEventCreate,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[Dict[str, Any]]:
+ """Create a new recurring event"""
+ if event_data.recurrence_pattern not in ["daily", "weekly", "monthly", "yearly"]:
+ raise HTTPException(
+ status_code=400,
+ detail="Invalid recurrence pattern. Must be one of: daily, weekly, monthly, yearly"
+ )
+
+ if event_data.start_time >= event_data.end_time:
+ raise HTTPException(
+ status_code=400,
+ detail="End time must be after start time"
+ )
+
+ events = []
+ current_start = event_data.start_time
+ current_end = event_data.end_time
+ duration = event_data.end_time - event_data.start_time
+ sequence_number = 0
+
+ while True:
+ if event_data.recurrence_end_date and current_start > event_data.recurrence_end_date:
+ break
+
+ event = Event(
+ user_id=current_user.id,
+ title=event_data.title,
+ description=event_data.description,
+ start_time=current_start,
+ end_time=current_end,
+ attendees=event_data.attendees,
+ reminder_minutes=event_data.reminder_minutes,
+ is_recurring=True,
+ recurrence_pattern=event_data.recurrence_pattern,
+ sequence_number=sequence_number,
+ status="scheduled"
+ )
+ db.add(event)
+ events.append(event)
+
+ # Calculate next occurrence
+ sequence_number += 1
+ if event_data.recurrence_pattern == "daily":
+ current_start += timedelta(days=1)
+ elif event_data.recurrence_pattern == "weekly":
+ current_start += timedelta(weeks=1)
+ elif event_data.recurrence_pattern == "monthly":
+ # Add one month (approximately)
+ if current_start.month == 12:
+ current_start = current_start.replace(year=current_start.year + 1, month=1)
+ else:
+ current_start = current_start.replace(month=current_start.month + 1)
+ elif event_data.recurrence_pattern == "yearly":
+ current_start = current_start.replace(year=current_start.year + 1)
+
+ current_end = current_start + duration
+
+ await db.commit()
+
+ # Refresh all events to get their IDs
+ for event in events:
+ await db.refresh(event)
+
+ return events
+
+@router.put("/recurring-events/{event_id}")
+async def update_recurring_event(
+ event_id: int,
+ event_update: RecurringEventUpdate,
+ update_future: bool = True,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[Dict[str, Any]]:
+ """Update a recurring event and optionally its future occurrences"""
+ update_data = event_update.dict(exclude_unset=True)
+ if not update_data:
+ raise HTTPException(status_code=400, detail="No update data provided")
+
+ # Get the original event
+ stmt = select(Event).where(
+ Event.id == event_id,
+ Event.user_id == current_user.id
+ )
+ result = await db.execute(stmt)
+ event = result.scalar_one_or_none()
+
+ if not event:
+ raise HTTPException(
+ status_code=404,
+ detail="Event not found or you don't have permission to update it"
+ )
+
+ updated_events = [event]
+
+ # Update future occurrences if requested
+ if update_future and event.is_recurring:
+ future_stmt = select(Event).where(
+ Event.recurrence_group == event.recurrence_group,
+ Event.sequence_number > event.sequence_number,
+ Event.user_id == current_user.id
+ )
+ future_result = await db.execute(future_stmt)
+ future_events = future_result.scalars().all()
+
+ for future_event in future_events:
+ for field, value in update_data.items():
+ setattr(future_event, field, value)
+ updated_events.append(future_event)
+
+ await db.commit()
+ return updated_events
+
+@router.delete("/recurring-events/{event_id}")
+async def delete_recurring_event(
+ event_id: int,
+ delete_future: bool = True,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> Dict[str, bool]:
+ """Delete a recurring event and optionally its future occurrences"""
+ stmt = select(Event).where(
+ Event.id == event_id,
+ Event.user_id == current_user.id
+ )
+ result = await db.execute(stmt)
+ event = result.scalar_one_or_none()
+
+ if not event:
+ raise HTTPException(
+ status_code=404,
+ detail="Event not found or you don't have permission to delete it"
+ )
+
+ if delete_future and event.is_recurring:
+ delete_stmt = delete(Event).where(
+ Event.recurrence_group == event.recurrence_group,
+ Event.sequence_number >= event.sequence_number,
+ Event.user_id == current_user.id
+ )
+ await db.execute(delete_stmt)
+ else:
+ await db.delete(event)
+
+ await db.commit()
+ return {"success": True}
+
+@router.get("/recurring-events/upcoming")
+async def get_upcoming_recurring_events(
+ days: int = 30,
+ current_user: User = Depends(get_current_active_user),
+ db: AsyncSession = Depends(get_db)
+) -> List[Dict[str, Any]]:
+ """Get upcoming recurring events for the next N days"""
+ if days <= 0 or days > 365:
+ raise HTTPException(
+ status_code=400,
+ detail="Days parameter must be between 1 and 365"
+ )
+
+ end_date = datetime.utcnow() + timedelta(days=days)
+ stmt = select(Event).where(
+ Event.user_id == current_user.id,
+ Event.start_time <= end_date,
+ Event.is_recurring == True
+ ).order_by(Event.start_time)
+
+ result = await db.execute(stmt)
+ return result.scalars().all()
\ No newline at end of file
diff --git a/app/api/users.py b/app/api/users.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ce76f2928afacc222667de5cf18997c4a524eb5
--- /dev/null
+++ b/app/api/users.py
@@ -0,0 +1,127 @@
+from fastapi import APIRouter, HTTPException, status, Depends
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+from typing import List, Optional
+from ..db.database import get_db
+from ..db.models import User
+from ..db.schemas import UserCreate, UserInDB
+from ..core.dependencies import get_current_superuser, get_current_active_user
+from ..core.security import get_password_hash
+
+router = APIRouter()
+
+@router.get("/me", response_model=UserInDB)
+async def read_user_me(current_user: User = Depends(get_current_active_user)):
+ return current_user
+
+@router.get("/", response_model=List[UserInDB])
+async def list_users(
+ skip: int = 0,
+ limit: int = 10,
+ current_user: User = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+) -> List[UserInDB]:
+ stmt = select(User).offset(skip).limit(limit)
+ result = await db.execute(stmt)
+ return result.scalars().all()
+
+@router.post("/", response_model=UserInDB)
+async def create_user(
+ user: UserCreate,
+ current_user: User = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+) -> UserInDB:
+ # Check if email exists
+ stmt = select(User).where(User.email == user.email)
+ result = await db.execute(stmt)
+ if result.scalar_one_or_none():
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Email already registered"
+ )
+
+ # Create new user
+ db_user = User(
+ email=user.email,
+ username=user.username,
+ full_name=user.full_name,
+ hashed_password=get_password_hash(user.password),
+ is_active=user.is_active,
+ is_superuser=user.is_superuser,
+ roles=user.roles
+ )
+
+ db.add(db_user)
+ await db.commit()
+ await db.refresh(db_user)
+ return db_user
+
+@router.put("/{user_id}", response_model=UserInDB)
+async def update_user(
+ user_id: int,
+ user_update: UserCreate,
+ current_user: User = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+) -> UserInDB:
+ stmt = select(User).where(User.id == user_id)
+ result = await db.execute(stmt)
+ db_user = result.scalar_one_or_none()
+
+ if not db_user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Update user fields
+ update_data = user_update.dict(exclude_unset=True)
+ if "password" in update_data:
+ update_data["hashed_password"] = get_password_hash(update_data.pop("password"))
+
+ for field, value in update_data.items():
+ setattr(db_user, field, value)
+
+ await db.commit()
+ await db.refresh(db_user)
+ return db_user
+
+@router.delete("/{user_id}")
+async def delete_user(
+ user_id: int,
+ current_user: User = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+):
+ stmt = select(User).where(User.id == user_id)
+ result = await db.execute(stmt)
+ user = result.scalar_one_or_none()
+
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ await db.delete(user)
+ await db.commit()
+ return {"status": "success", "message": "User deleted"}
+
+@router.put("/{user_id}/roles", response_model=UserInDB)
+async def update_user_roles(
+ user_id: int,
+ roles: List[str],
+ current_user: User = Depends(get_current_superuser),
+ db: AsyncSession = Depends(get_db)
+) -> UserInDB:
+ valid_roles = ["user", "admin", "manager", "support"]
+ invalid_roles = [role for role in roles if role not in valid_roles]
+ if invalid_roles:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Invalid roles: {', '.join(invalid_roles)}"
+ )
+
+ stmt = select(User).where(User.id == user_id)
+ result = await db.execute(stmt)
+ user = result.scalar_one_or_none()
+
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ user.roles = roles
+ await db.commit()
+ await db.refresh(user)
+ return user
\ No newline at end of file
diff --git a/app/core/__init__.py b/app/core/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/app/core/__pycache__/__init__.cpython-312.pyc b/app/core/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..288a6d737963390170f67314e95e3c00c29e7f20
Binary files /dev/null and b/app/core/__pycache__/__init__.cpython-312.pyc differ
diff --git a/app/core/__pycache__/config.cpython-312.pyc b/app/core/__pycache__/config.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..186683e128a4293bb8e3227badc5302cab0db504
Binary files /dev/null and b/app/core/__pycache__/config.cpython-312.pyc differ
diff --git a/app/core/__pycache__/dependencies.cpython-312.pyc b/app/core/__pycache__/dependencies.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bf2aa3c6722d10c76fd6e523b0b231b6e47fe854
Binary files /dev/null and b/app/core/__pycache__/dependencies.cpython-312.pyc differ
diff --git a/app/core/__pycache__/security.cpython-312.pyc b/app/core/__pycache__/security.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3ce6af0f099fa5afbe745c8a5b04d3b5ea7e00e3
Binary files /dev/null and b/app/core/__pycache__/security.cpython-312.pyc differ
diff --git a/app/core/config.py b/app/core/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4596d678990a9f8c49832930b099ba538d82e26
--- /dev/null
+++ b/app/core/config.py
@@ -0,0 +1,36 @@
+from pydantic_settings import BaseSettings
+from typing import ClassVar
+
+
+class Settings(BaseSettings):
+ API_V1_STR: str = "/api/v1"
+ PROJECT_NAME: str = "Admin Dashboard"
+ VERSION: str = "1.0.0"
+
+ # PostgreSQL Database settings
+ DATABASE_URL: ClassVar[str] = "postgresql+asyncpg://postgres.juycnkjuzylnbruwaqmp:Lovyelias5584.@aws-0-eu-central-1.pooler.supabase.com:5432/postgres"
+
+ # JWT Settings
+ SECRET_KEY: str = "your-secret-key-here"
+ ALGORITHM: str = "HS256"
+ ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
+
+ # Redis settings
+ REDIS_HOST: str = "localhost"
+ REDIS_PORT: int = 6379
+
+ # Email settings
+ MAIL_USERNAME: str = "yungdml31@gmail.com"
+ MAIL_PASSWORD: str = ""
+ MAIL_FROM: str = "admin@angelo.com"
+ MAIL_PORT: int = 587
+ MAIL_SERVER: str = "smtp.gmail.com"
+
+ # Frontend URL
+ FRONTEND_URL: str = "http://localhost:3000"
+
+ class Config:
+ case_sensitive = True
+
+
+settings = Settings()
diff --git a/app/core/dependencies.py b/app/core/dependencies.py
new file mode 100644
index 0000000000000000000000000000000000000000..21f26817e11c293a9b64c010d016224b307eb28a
--- /dev/null
+++ b/app/core/dependencies.py
@@ -0,0 +1,52 @@
+from fastapi import Depends, HTTPException, status
+from fastapi.security import OAuth2PasswordBearer
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy import select
+from jose import JWTError, jwt
+from ..db.database import get_db
+from ..db.models import User
+from ..core.config import settings
+
+oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/auth/login")
+
+async def get_current_user(
+ token: str = Depends(oauth2_scheme),
+ db: AsyncSession = Depends(get_db)
+):
+ credentials_exception = HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Could not validate credentials",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
+ try:
+ payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
+ user_id: str = payload.get("sub")
+ if user_id is None:
+ raise credentials_exception
+ except JWTError:
+ raise credentials_exception
+
+ stmt = select(User).where(User.id == int(user_id))
+ result = await db.execute(stmt)
+ user = result.scalar_one_or_none()
+
+ if user is None:
+ raise credentials_exception
+ return user
+
+async def get_current_active_user(
+ current_user: User = Depends(get_current_user)
+):
+ if not current_user.is_active:
+ raise HTTPException(status_code=400, detail="Inactive user")
+ return current_user
+
+async def get_current_superuser(
+ current_user: User = Depends(get_current_user)
+):
+ if not current_user.is_superuser:
+ raise HTTPException(
+ status_code=403, detail="The user doesn't have enough privileges"
+ )
+ return current_user
\ No newline at end of file
diff --git a/app/core/security.py b/app/core/security.py
new file mode 100644
index 0000000000000000000000000000000000000000..46fc5e23ac4d66ef3ee48be79907c69d76de179e
--- /dev/null
+++ b/app/core/security.py
@@ -0,0 +1,23 @@
+from datetime import datetime, timedelta
+from typing import Any, Optional
+from jose import jwt
+from passlib.context import CryptContext
+from .config import settings
+
+pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
+
+def create_access_token(subject: Any, expires_delta: Optional[timedelta] = None) -> str:
+ if expires_delta:
+ expire = datetime.utcnow() + expires_delta
+ else:
+ expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
+
+ to_encode = {"exp": expire, "sub": str(subject)}
+ encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
+ return encoded_jwt
+
+def verify_password(plain_password: str, hashed_password: str) -> bool:
+ return pwd_context.verify(plain_password, hashed_password)
+
+def get_password_hash(password: str) -> str:
+ return pwd_context.hash(password)
\ No newline at end of file
diff --git a/app/db/__init__.py b/app/db/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/app/db/__pycache__/__init__.cpython-312.pyc b/app/db/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c796f745236bde10c1c2f824f6751277c5b7885b
Binary files /dev/null and b/app/db/__pycache__/__init__.cpython-312.pyc differ
diff --git a/app/db/__pycache__/database.cpython-312.pyc b/app/db/__pycache__/database.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8815de981acaea17049e23ad80593748c01c6fab
Binary files /dev/null and b/app/db/__pycache__/database.cpython-312.pyc differ
diff --git a/app/db/__pycache__/models.cpython-312.pyc b/app/db/__pycache__/models.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d3feb6b806a66805ad19d0a325399c7c890e996e
Binary files /dev/null and b/app/db/__pycache__/models.cpython-312.pyc differ
diff --git a/app/db/__pycache__/schemas.cpython-312.pyc b/app/db/__pycache__/schemas.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8f0fd646d1d010bd993deb4b5cf68b223414b9f4
Binary files /dev/null and b/app/db/__pycache__/schemas.cpython-312.pyc differ
diff --git a/app/db/database.py b/app/db/database.py
new file mode 100644
index 0000000000000000000000000000000000000000..3689da79461dda70222cc960f862d132e604aacd
--- /dev/null
+++ b/app/db/database.py
@@ -0,0 +1,55 @@
+from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
+from sqlalchemy.orm import declarative_base
+from ..core.config import settings
+import contextlib
+
+# Create async engine for FastAPI
+async_engine = create_async_engine(
+ settings.DATABASE_URL,
+ echo=True,
+ future=True,
+ pool_pre_ping=True
+)
+
+# Create async session factory
+AsyncSessionLocal = async_sessionmaker(
+ bind=async_engine,
+ class_=AsyncSession,
+ expire_on_commit=False
+)
+
+# Create declarative base for models
+Base = declarative_base()
+
+# Database dependency for FastAPI routes
+async def get_db():
+ async with AsyncSessionLocal() as session:
+ try:
+ yield session
+ finally:
+ await session.close()
+
+# Database access for background tasks and services
+class Database:
+ def __init__(self):
+ self._session_factory = AsyncSessionLocal
+
+ @contextlib.asynccontextmanager
+ async def session(self):
+ """Get a database session with automatic commit/rollback"""
+ session = self._session_factory()
+ try:
+ yield session
+ await session.commit()
+ except:
+ await session.rollback()
+ raise
+ finally:
+ await session.close()
+
+ async def get_session(self):
+ """Get a session for manual management"""
+ return self._session_factory()
+
+# Create singleton instance for database access
+db = Database()
diff --git a/app/db/init_db.py b/app/db/init_db.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0adbde49333096ce2841cc1cf1172a322a7474c
--- /dev/null
+++ b/app/db/init_db.py
@@ -0,0 +1,77 @@
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from ..core.config import settings
+from ..core.security import get_password_hash
+from datetime import datetime
+from .models import Base, User, Product
+import asyncio
+
+def init_db():
+ # Create synchronous engine for initialization
+ engine = create_engine(
+ settings.DATABASE_URL.replace("+asyncpg", ""),
+ echo=True
+ )
+
+ # Create all tables
+ Base.metadata.create_all(bind=engine)
+
+ # Create session
+ SessionLocal = sessionmaker(bind=engine)
+ session = SessionLocal()
+
+ try:
+ # Create default admin user if not exists
+ admin_user = session.query(User).filter_by(email="admin@example.com").first()
+ if not admin_user:
+ admin_user = User(
+ email="admin@example.com",
+ username="admin",
+ full_name="System Administrator",
+ hashed_password=get_password_hash("admin123"), # Change in production
+ is_active=True,
+ is_superuser=True,
+ roles=["admin"],
+ created_at=datetime.utcnow()
+ )
+ session.add(admin_user)
+ print("Created default admin user.")
+
+ # Create default product categories as products
+ categories = [
+ "Soups & Stews",
+ "Rice Dishes",
+ "Swallow & Fufu",
+ "Snacks & Small Chops",
+ "Protein & Meat",
+ "Drinks"
+ ]
+
+ for category in categories:
+ exists = session.query(Product).filter_by(name=category).first()
+ if not exists:
+ product = Product(
+ name=category,
+ description=f"Category: {category}",
+ price=0.0, # Category products have zero price
+ category=category,
+ inventory_count=0, # Categories don't have inventory
+ seller_id=admin_user.id if admin_user else 1, # Link to admin user
+ created_at=datetime.utcnow()
+ )
+ session.add(product)
+
+ print("Initialized product categories.")
+
+ # Commit changes
+ session.commit()
+
+ except Exception as e:
+ print(f"Error during initialization: {e}")
+ session.rollback()
+ raise
+ finally:
+ session.close()
+
+if __name__ == "__main__":
+ init_db()
\ No newline at end of file
diff --git a/app/db/models.py b/app/db/models.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a7b71aff0119f4329b28a7815a37f93871724f4
--- /dev/null
+++ b/app/db/models.py
@@ -0,0 +1,186 @@
+from sqlalchemy import Column, Integer, String, Boolean, DateTime, Float, ForeignKey, ARRAY, JSON, Table
+from sqlalchemy.orm import relationship, mapped_column, Mapped
+from sqlalchemy.dialects.postgresql import JSONB
+from datetime import datetime
+from typing import List, Optional
+from .database import Base
+
+# Association tables for many-to-many relationships
+user_roles = Table(
+ 'user_roles',
+ Base.metadata,
+ Column('user_id', Integer, ForeignKey('users.id')),
+ Column('role_id', Integer, ForeignKey('roles.id'))
+)
+
+# Role model
+class Role(Base):
+ __tablename__ = "roles"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ name: Mapped[str] = mapped_column(String, unique=True, index=True)
+ description: Mapped[str] = mapped_column(String)
+ permissions: Mapped[List[str]] = mapped_column(ARRAY(String), default=list)
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
+
+ # Relationships
+ users = relationship("User", secondary=user_roles, back_populates="roles")
+
+# Branch model
+class Branch(Base):
+ __tablename__ = "branches"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ name: Mapped[str] = mapped_column(String, unique=True, index=True)
+ address: Mapped[str] = mapped_column(String)
+ phone: Mapped[str] = mapped_column(String)
+ email: Mapped[str] = mapped_column(String)
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
+
+ # Relationships
+ users = relationship("User", back_populates="branch")
+ products = relationship("Product", back_populates="branch")
+ orders = relationship("Order", back_populates="branch")
+
+class User(Base):
+ __tablename__ = "users"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ email: Mapped[str] = mapped_column(String, unique=True, index=True)
+ username: Mapped[str] = mapped_column(String, unique=True, index=True)
+ full_name: Mapped[str] = mapped_column(String)
+ hashed_password: Mapped[str] = mapped_column(String)
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
+ is_superuser: Mapped[bool] = mapped_column(Boolean, default=False)
+ branch_id: Mapped[Optional[int]] = mapped_column(ForeignKey("branches.id"))
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+
+ # Update roles relationship to use many-to-many
+ roles = relationship("Role", secondary=user_roles, back_populates="users")
+
+ # Other relationships
+ branch = relationship("Branch", back_populates="users")
+ products = relationship("Product", back_populates="seller")
+ orders = relationship("Order", back_populates="customer")
+ notifications = relationship("Notification", back_populates="user")
+ sessions = relationship("Session", back_populates="user", cascade="all, delete-orphan")
+
+class Product(Base):
+ __tablename__ = "products"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ name: Mapped[str] = mapped_column(String, index=True)
+ description: Mapped[str] = mapped_column(String)
+ price: Mapped[float] = mapped_column(Float)
+ category: Mapped[str] = mapped_column(String, index=True)
+ inventory_count: Mapped[int] = mapped_column(Integer)
+ seller_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
+ branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id"))
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ updated_at: Mapped[datetime] = mapped_column(
+ DateTime,
+ default=datetime.utcnow,
+ onupdate=datetime.utcnow
+ )
+
+ # Relationships
+ seller = relationship("User", back_populates="products")
+ branch = relationship("Branch", back_populates="products")
+ order_items = relationship("OrderItem", back_populates="product")
+
+class Order(Base):
+ __tablename__ = "orders"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ customer_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
+ branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id"))
+ total_amount: Mapped[float] = mapped_column(Float)
+ status: Mapped[str] = mapped_column(String)
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ updated_at: Mapped[datetime] = mapped_column(
+ DateTime,
+ default=datetime.utcnow,
+ onupdate=datetime.utcnow
+ )
+
+ # Relationships
+ customer = relationship("User", back_populates="orders")
+ branch = relationship("Branch", back_populates="orders")
+ items = relationship("OrderItem", back_populates="order", cascade="all, delete-orphan")
+
+class OrderItem(Base):
+ __tablename__ = "order_items"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ order_id: Mapped[int] = mapped_column(ForeignKey("orders.id"))
+ product_id: Mapped[int] = mapped_column(ForeignKey("products.id"))
+ quantity: Mapped[int] = mapped_column(Integer)
+ price: Mapped[float] = mapped_column(Float)
+
+ # Relationships
+ order = relationship("Order", back_populates="items")
+ product = relationship("Product", back_populates="order_items")
+
+class Notification(Base):
+ __tablename__ = "notifications"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
+ title: Mapped[str] = mapped_column(String)
+ message: Mapped[str] = mapped_column(String)
+ type: Mapped[str] = mapped_column(String)
+ data: Mapped[Optional[dict]] = mapped_column(JSONB)
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ read: Mapped[bool] = mapped_column(Boolean, default=False)
+
+ # Relationship
+ user = relationship("User", back_populates="notifications")
+
+class Event(Base):
+ __tablename__ = "events"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
+ title: Mapped[str] = mapped_column(String)
+ description: Mapped[str] = mapped_column(String)
+ start_time: Mapped[datetime] = mapped_column(DateTime)
+ end_time: Mapped[datetime] = mapped_column(DateTime)
+ attendees: Mapped[List[str]] = mapped_column(ARRAY(String), default=list)
+ is_all_day: Mapped[bool] = mapped_column(Boolean, default=False)
+ reminder_minutes: Mapped[int] = mapped_column(Integer)
+ status: Mapped[str] = mapped_column(String)
+ attendee_responses: Mapped[dict] = mapped_column(JSONB, default=dict)
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ updated_at: Mapped[datetime] = mapped_column(
+ DateTime,
+ default=datetime.utcnow,
+ onupdate=datetime.utcnow
+ )
+ # Fields for recurring events
+ is_recurring: Mapped[bool] = mapped_column(Boolean, default=False)
+ recurrence_pattern: Mapped[Optional[str]] = mapped_column(String)
+ recurrence_group: Mapped[Optional[str]] = mapped_column(String)
+ recurrence_end_date: Mapped[Optional[datetime]] = mapped_column(DateTime)
+ parent_event_id: Mapped[Optional[int]] = mapped_column(Integer)
+ sequence_number: Mapped[Optional[int]] = mapped_column(Integer)
+ reminder_sent: Mapped[bool] = mapped_column(Boolean, default=False)
+
+ # Relationship
+ user = relationship("User")
+
+class Session(Base):
+ __tablename__ = "sessions"
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
+ session_token: Mapped[str] = mapped_column(String, unique=True, index=True)
+ last_activity: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
+ expires_at: Mapped[datetime] = mapped_column(DateTime)
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
+
+ # Relationship
+ user = relationship("User", back_populates="sessions")
\ No newline at end of file
diff --git a/app/db/schemas.py b/app/db/schemas.py
new file mode 100644
index 0000000000000000000000000000000000000000..00cbcd27d1218a00f3eed34108e22da154d8b750
--- /dev/null
+++ b/app/db/schemas.py
@@ -0,0 +1,314 @@
+from sqlalchemy.orm import validates
+from sqlalchemy import event
+from datetime import datetime
+from typing import List, Optional, Dict
+from pydantic import BaseModel, EmailStr, validator
+from .models import User, Product, Order, Event, Notification, Session
+import re
+
+# Role schemas
+class RoleBase(BaseModel):
+ name: str
+ description: str
+ permissions: List[str] = []
+
+class RoleCreate(RoleBase):
+ pass
+
+class RoleUpdate(RoleBase):
+ name: Optional[str] = None
+ description: Optional[str] = None
+ permissions: Optional[List[str]] = None
+
+class RoleInDB(RoleBase):
+ id: int
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+
+ class Config:
+ from_attributes = True
+
+class BranchBase(BaseModel):
+ name: str
+ address: str
+ phone: str
+ email: EmailStr
+ is_active: bool = True
+
+class BranchCreate(BranchBase):
+ pass
+
+class BranchInDB(BranchBase):
+ id: int
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+
+ class Config:
+ from_attributes = True
+
+# Update User schemas
+class UserBase(BaseModel):
+ email: EmailStr
+ username: str
+ full_name: str
+ is_active: bool = True
+ is_superuser: bool = False
+ branch_id: Optional[int] = None
+
+class UserCreate(UserBase):
+ password: str
+ role_ids: Optional[List[int]] = None # IDs of roles to assign
+
+class UserUpdate(BaseModel):
+ email: Optional[EmailStr] = None
+ username: Optional[str] = None
+ full_name: Optional[str] = None
+ is_active: Optional[bool] = None
+ is_superuser: Optional[bool] = None
+ password: Optional[str] = None
+ branch_id: Optional[int] = None
+ role_ids: Optional[List[int]] = None
+
+class UserInDB(UserBase):
+ id: int
+ created_at: datetime
+ roles: List[RoleInDB]
+
+ class Config:
+ from_attributes = True
+
+class ProductBase(BaseModel):
+ name: str
+ description: str
+ price: float
+ category: str
+ inventory_count: int
+ seller_id: int
+ branch_id: int
+
+class ProductCreate(ProductBase):
+ pass
+
+class ProductInDB(ProductBase):
+ id: int
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+
+ class Config:
+ from_attributes = True
+
+class OrderItemBase(BaseModel):
+ product_id: int
+ quantity: int
+ price: float
+
+class OrderItemCreate(OrderItemBase):
+ pass
+
+class OrderItemInDB(OrderItemBase):
+ id: int
+ order_id: int
+
+ class Config:
+ from_attributes = True
+
+class OrderBase(BaseModel):
+ customer_id: int
+ branch_id: int
+ total_amount: float
+ status: str = "pending"
+ items: List[OrderItemCreate]
+
+class OrderCreate(OrderBase):
+ pass
+
+class OrderInDB(OrderBase):
+ id: int
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+ items: List[OrderItemInDB]
+
+ class Config:
+ from_attributes = True
+
+class NotificationBase(BaseModel):
+ user_id: int
+ title: str
+ message: str
+ type: str
+ data: Optional[dict] = None
+ read: bool = False
+
+class NotificationCreate(NotificationBase):
+ pass
+
+class NotificationInDB(NotificationBase):
+ id: int
+ created_at: datetime
+
+ class Config:
+ from_attributes = True
+
+class EventBase(BaseModel):
+ title: str
+ description: str
+ start_time: datetime
+ end_time: datetime
+ is_all_day: bool = False
+ reminder_minutes: int = 30
+
+ @validator('end_time')
+ def end_time_after_start_time(cls, v, values):
+ if 'start_time' in values and v <= values['start_time']:
+ raise ValueError('end_time must be after start_time')
+ return v
+
+ @validator('reminder_minutes')
+ def valid_reminder_minutes(cls, v):
+ if v < 0:
+ raise ValueError('reminder_minutes cannot be negative')
+ return v
+
+class EventCreate(EventBase):
+ attendees: List[str] = []
+
+class EventUpdate(BaseModel):
+ title: Optional[str] = None
+ description: Optional[str] = None
+ start_time: Optional[datetime] = None
+ end_time: Optional[datetime] = None
+ is_all_day: Optional[bool] = None
+ reminder_minutes: Optional[int] = None
+ attendees: Optional[List[str]] = None
+
+ @validator('reminder_minutes')
+ def valid_reminder_minutes(cls, v):
+ if v is not None and v < 0:
+ raise ValueError('reminder_minutes cannot be negative')
+ return v
+
+class EventInDB(EventBase):
+ id: int
+ user_id: int
+ attendees: List[str]
+ status: str
+ attendee_responses: Dict[str, str]
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+ reminder_sent: bool = False
+ is_recurring: bool = False
+ recurrence_group: Optional[str] = None
+ parent_event_id: Optional[int] = None
+ sequence_number: Optional[int] = None
+
+ class Config:
+ from_attributes = True
+
+class RecurringEventCreate(EventCreate):
+ recurrence_pattern: str
+ recurrence_end_date: Optional[datetime] = None
+
+ @validator('recurrence_pattern')
+ def valid_recurrence_pattern(cls, v):
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
+ if v not in valid_patterns:
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
+ return v
+
+ @validator('recurrence_end_date')
+ def end_date_after_start_time(cls, v, values):
+ if v is not None and 'start_time' in values and v <= values['start_time']:
+ raise ValueError('recurrence_end_date must be after start_time')
+ return v
+
+class RecurringEventUpdate(EventUpdate):
+ recurrence_pattern: Optional[str] = None
+ recurrence_end_date: Optional[datetime] = None
+
+ @validator('recurrence_pattern')
+ def valid_recurrence_pattern(cls, v):
+ if v is not None:
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
+ if v not in valid_patterns:
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
+ return v
+
+class SessionBase(BaseModel):
+ user_id: int
+ session_token: str
+ expires_at: datetime
+ is_active: bool = True
+
+class SessionCreate(SessionBase):
+ pass
+
+class SessionUpdate(BaseModel):
+ last_activity: Optional[datetime] = None
+ expires_at: Optional[datetime] = None
+ is_active: Optional[bool] = None
+
+class SessionInDB(SessionBase):
+ id: int
+ created_at: datetime
+ last_activity: datetime
+
+ class Config:
+ from_attributes = True
+
+@validates('email')
+def validate_email(self, key, email):
+ if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email):
+ raise ValueError('Invalid email address')
+ return email
+
+@validates('username')
+def validate_username(self, key, username):
+ if len(username) < 3:
+ raise ValueError('Username must be at least 3 characters long')
+ return username
+
+@validates('inventory_count')
+def validate_inventory(self, key, count):
+ if count < 0:
+ raise ValueError('Inventory count cannot be negative')
+ return count
+
+@validates('price')
+def validate_price(self, key, price):
+ if price < 0:
+ raise ValueError('Price cannot be negative')
+ return price
+
+# Event listeners for automatic timestamps
+@event.listens_for(Product, 'before_insert')
+def set_created_at(mapper, connection, target):
+ target.created_at = datetime.utcnow()
+ target.updated_at = datetime.utcnow()
+
+@event.listens_for(Product, 'before_update')
+def set_updated_at(mapper, connection, target):
+ target.updated_at = datetime.utcnow()
+
+@event.listens_for(Order, 'before_insert')
+def set_order_created_at(mapper, connection, target):
+ target.created_at = datetime.utcnow()
+ target.updated_at = datetime.utcnow()
+
+@event.listens_for(Order, 'before_update')
+def set_order_updated_at(mapper, connection, target):
+ target.updated_at = datetime.utcnow()
+
+@event.listens_for(Event, 'before_insert')
+def set_event_created_at(mapper, connection, target):
+ target.created_at = datetime.utcnow()
+ target.updated_at = datetime.utcnow()
+
+@event.listens_for(Event, 'before_update')
+def set_event_updated_at(mapper, connection, target):
+ target.updated_at = datetime.utcnow()
+
+# Add validators to models
+User.validate_email = validate_email
+User.validate_username = validate_username
+Product.validate_inventory = validate_inventory
+Product.validate_price = validate_price
\ No newline at end of file
diff --git a/app/main.py b/app/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0f672edc279cb1e47b6e37cf33266c611115d4a
--- /dev/null
+++ b/app/main.py
@@ -0,0 +1,95 @@
+from fastapi import FastAPI, Request, WebSocket
+from fastapi.middleware.cors import CORSMiddleware
+from .core.config import settings
+from .db.database import async_engine as engine, Base
+from .api import auth, products, orders, users, analytics, files, notifications, calendar, scheduler, maintenance
+from .utils.rate_limiter import rate_limiter
+from .utils.logger import log_api_request
+from .utils.tasks import run_periodic_tasks
+from .services.websocket import connect, disconnect
+import time
+import logging
+import asyncio
+from typing import List
+
+# Configure logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+app = FastAPI(title=settings.PROJECT_NAME, version=settings.VERSION)
+
+# Store background tasks
+background_tasks = set()
+
+# Configure CORS
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"], # Configure appropriately for production
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+)
+
+# WebSocket endpoint
+@app.websocket("/ws")
+async def websocket_endpoint(websocket: WebSocket):
+ await connect(websocket)
+ try:
+ while True:
+ data = await websocket.receive_text()
+ except:
+ await disconnect(websocket)
+
+# Request logging and rate limiting middleware
+@app.middleware("http")
+async def middleware(request: Request, call_next):
+ await rate_limiter.check_rate_limit(request)
+ start_time = time.time()
+ response = await call_next(request)
+ end_time = time.time()
+ duration = end_time - start_time
+ log_api_request(
+ method=request.method,
+ path=request.url.path,
+ status_code=response.status_code,
+ duration=duration
+ )
+ return response
+
+# Application startup and shutdown events
+@app.on_event("startup")
+async def startup_event():
+ # Create all database tables
+ async with engine.begin() as conn:
+ await conn.run_sync(Base.metadata.create_all)
+
+ # Start background tasks
+ task = asyncio.create_task(run_periodic_tasks())
+ background_tasks.add(task)
+ task.add_done_callback(background_tasks.discard)
+
+@app.on_event("shutdown")
+async def shutdown_event():
+ # Cancel background tasks
+ for task in background_tasks:
+ task.cancel()
+
+# Include routers
+app.include_router(auth.router, prefix=f"{settings.API_V1_STR}/auth", tags=["auth"])
+app.include_router(users.router, prefix=f"{settings.API_V1_STR}/users", tags=["users"])
+app.include_router(products.router, prefix=f"{settings.API_V1_STR}/products", tags=["products"])
+app.include_router(orders.router, prefix=f"{settings.API_V1_STR}/orders", tags=["orders"])
+app.include_router(analytics.router, prefix=f"{settings.API_V1_STR}/analytics", tags=["analytics"])
+app.include_router(files.router, prefix=f"{settings.API_V1_STR}/files", tags=["files"])
+app.include_router(notifications.router, prefix=f"{settings.API_V1_STR}/notifications", tags=["notifications"])
+app.include_router(calendar.router, prefix=f"{settings.API_V1_STR}/calendar", tags=["calendar"])
+app.include_router(scheduler.router, prefix=f"{settings.API_V1_STR}/scheduler", tags=["scheduler"])
+app.include_router(maintenance.router, prefix=f"{settings.API_V1_STR}/maintenance", tags=["maintenance"])
+
+@app.get("/")
+async def root():
+ return {
+ "message": f"Welcome to {settings.PROJECT_NAME} v{settings.VERSION}",
+ "docs_url": "/docs",
+ "openapi_url": "/openapi.json"
+ }
\ No newline at end of file
diff --git a/app/schemas/__pycache__/events.cpython-312.pyc b/app/schemas/__pycache__/events.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..047c5658135d82384997ebeb942faa5d4e469ad0
Binary files /dev/null and b/app/schemas/__pycache__/events.cpython-312.pyc differ
diff --git a/app/schemas/events.py b/app/schemas/events.py
new file mode 100644
index 0000000000000000000000000000000000000000..4225d12daace193ecec9bddcc6a1428b8f383830
--- /dev/null
+++ b/app/schemas/events.py
@@ -0,0 +1,87 @@
+from pydantic import BaseModel, validator
+from typing import List, Optional, Dict, Any
+from datetime import datetime
+
+class EventBase(BaseModel):
+ title: str
+ description: str
+ start_time: datetime
+ end_time: datetime
+ is_all_day: bool = False
+ reminder_minutes: int = 30
+
+ @validator('end_time')
+ def end_time_after_start_time(cls, v, values):
+ if 'start_time' in values and v <= values['start_time']:
+ raise ValueError('end_time must be after start_time')
+ return v
+
+ @validator('reminder_minutes')
+ def valid_reminder_minutes(cls, v):
+ if v < 0:
+ raise ValueError('reminder_minutes cannot be negative')
+ return v
+
+class EventCreate(EventBase):
+ attendees: List[str] = []
+
+class EventUpdate(BaseModel):
+ title: Optional[str] = None
+ description: Optional[str] = None
+ start_time: Optional[datetime] = None
+ end_time: Optional[datetime] = None
+ is_all_day: Optional[bool] = None
+ reminder_minutes: Optional[int] = None
+ attendees: Optional[List[str]] = None
+
+ @validator('reminder_minutes')
+ def valid_reminder_minutes(cls, v):
+ if v is not None and v < 0:
+ raise ValueError('reminder_minutes cannot be negative')
+ return v
+
+class EventInDB(EventBase):
+ id: str
+ user_id: str
+ attendees: List[str]
+ status: str
+ attendee_responses: Dict[str, str]
+ created_at: datetime
+ updated_at: Optional[datetime] = None
+ reminder_sent: bool = False
+ is_recurring: bool = False
+ recurrence_group: Optional[str] = None
+ parent_event_id: Optional[str] = None
+ sequence_number: Optional[int] = None
+
+ class Config:
+ orm_mode = True
+
+class RecurringEventCreate(EventCreate):
+ recurrence_pattern: str
+ recurrence_end_date: Optional[datetime] = None
+
+ @validator('recurrence_pattern')
+ def valid_recurrence_pattern(cls, v):
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
+ if v not in valid_patterns:
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
+ return v
+
+ @validator('recurrence_end_date')
+ def end_date_after_start_time(cls, v, values):
+ if v is not None and 'start_time' in values and v <= values['start_time']:
+ raise ValueError('recurrence_end_date must be after start_time')
+ return v
+
+class RecurringEventUpdate(EventUpdate):
+ recurrence_pattern: Optional[str] = None
+ recurrence_end_date: Optional[datetime] = None
+
+ @validator('recurrence_pattern')
+ def valid_recurrence_pattern(cls, v):
+ if v is not None:
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
+ if v not in valid_patterns:
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
+ return v
\ No newline at end of file
diff --git a/app/services/__pycache__/backup.cpython-312.pyc b/app/services/__pycache__/backup.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c3f25b0756d58c3daf9c752b0fd10dcadd8c6f0b
Binary files /dev/null and b/app/services/__pycache__/backup.cpython-312.pyc differ
diff --git a/app/services/__pycache__/maintenance.cpython-312.pyc b/app/services/__pycache__/maintenance.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5e641a578d1eed73251248ae01e72422272e6049
Binary files /dev/null and b/app/services/__pycache__/maintenance.cpython-312.pyc differ
diff --git a/app/services/__pycache__/notifications.cpython-312.pyc b/app/services/__pycache__/notifications.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e2154d707db63bae74e1c20de1d44e3768b4a656
Binary files /dev/null and b/app/services/__pycache__/notifications.cpython-312.pyc differ
diff --git a/app/services/__pycache__/websocket.cpython-312.pyc b/app/services/__pycache__/websocket.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6cece0ec9a770c48115f79230f8c8621e58c9436
Binary files /dev/null and b/app/services/__pycache__/websocket.cpython-312.pyc differ
diff --git a/app/services/analytics.py b/app/services/analytics.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6baba6e91aeca5da0efdd1560068dbeec0ae39a
--- /dev/null
+++ b/app/services/analytics.py
@@ -0,0 +1,131 @@
+from datetime import datetime, timedelta
+from ..db.database import db
+from ..utils.cache import cache
+from typing import Dict, List, Any
+
+class AnalyticsService:
+ @staticmethod
+ async def get_sales_analytics(start_date: datetime, end_date: datetime) -> Dict[str, Any]:
+ cache_key = f"sales_analytics:{start_date.date()}:{end_date.date()}"
+ cached_data = await cache.get_cache(cache_key)
+ if cached_data:
+ return cached_data
+
+ pipeline = [
+ {
+ "$match": {
+ "created_at": {
+ "$gte": start_date,
+ "$lte": end_date
+ },
+ "status": {"$in": ["completed", "delivered"]}
+ }
+ },
+ {
+ "$group": {
+ "_id": {"$dateToString": {"format": "%Y-%m-%d", "date": "$created_at"}},
+ "total_sales": {"$sum": "$total_amount"},
+ "order_count": {"$sum": 1}
+ }
+ },
+ {"$sort": {"_id": 1}}
+ ]
+
+ sales_data = await db.db["orders"].aggregate(pipeline).to_list(None)
+ result = {
+ "daily_sales": sales_data,
+ "total_revenue": sum(day["total_sales"] for day in sales_data),
+ "total_orders": sum(day["order_count"] for day in sales_data),
+ "average_order_value": sum(day["total_sales"] for day in sales_data) /
+ (sum(day["order_count"] for day in sales_data) or 1)
+ }
+
+ await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
+ return result
+
+ @staticmethod
+ async def get_product_analytics() -> Dict[str, Any]:
+ cache_key = "product_analytics"
+ cached_data = await cache.get_cache(cache_key)
+ if cached_data:
+ return cached_data
+
+ pipeline = [
+ {
+ "$unwind": "$products"
+ },
+ {
+ "$group": {
+ "_id": "$products.product_id",
+ "total_quantity": {"$sum": "$products.quantity"},
+ "total_revenue": {
+ "$sum": {
+ "$multiply": ["$products.price", "$products.quantity"]
+ }
+ }
+ }
+ },
+ {
+ "$sort": {"total_revenue": -1}
+ },
+ {
+ "$limit": 10
+ }
+ ]
+
+ top_products = await db.db["orders"].aggregate(pipeline).to_list(None)
+
+ # Get product details
+ for product in top_products:
+ product_detail = await db.db["products"].find_one({"_id": product["_id"]})
+ if product_detail:
+ product["name"] = product_detail["name"]
+ product["category"] = product_detail["category"]
+
+ result = {
+ "top_products": top_products,
+ "total_products": await db.db["products"].count_documents({}),
+ "low_stock_products": await db.db["products"].count_documents({"inventory_count": {"$lt": 10}})
+ }
+
+ await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
+ return result
+
+ @staticmethod
+ async def get_customer_analytics() -> Dict[str, Any]:
+ cache_key = "customer_analytics"
+ cached_data = await cache.get_cache(cache_key)
+ if cached_data:
+ return cached_data
+
+ pipeline = [
+ {
+ "$group": {
+ "_id": "$customer_id",
+ "total_orders": {"$sum": 1},
+ "total_spent": {"$sum": "$total_amount"},
+ "last_order": {"$max": "$created_at"}
+ }
+ },
+ {
+ "$sort": {"total_spent": -1}
+ }
+ ]
+
+ customer_data = await db.db["orders"].aggregate(pipeline).to_list(None)
+
+ result = {
+ "total_customers": len(customer_data),
+ "top_customers": customer_data[:10],
+ "average_customer_value": sum(c["total_spent"] for c in customer_data) / (len(customer_data) or 1),
+ "customer_segments": {
+ "high_value": len([c for c in customer_data if c["total_spent"] > 1000]),
+ "medium_value": len([c for c in customer_data if 500 <= c["total_spent"] <= 1000]),
+ "low_value": len([c for c in customer_data if c["total_spent"] < 500])
+ }
+ }
+
+ await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
+ return result
+
+analytics = AnalyticsService()
\ No newline at end of file
diff --git a/app/services/backup.py b/app/services/backup.py
new file mode 100644
index 0000000000000000000000000000000000000000..5417a540a3a4d67f8ecaee97c8b19f90d7c536d2
--- /dev/null
+++ b/app/services/backup.py
@@ -0,0 +1,179 @@
+import os
+import shutil
+import json
+import tarfile
+from datetime import datetime
+from typing import Dict, Any, List
+from bson import ObjectId
+from ..db.database import db
+from ..utils.logger import logger
+
+class BackupService:
+ def __init__(self):
+ self.backup_dir = "backups"
+ self._ensure_backup_dir()
+
+ def _ensure_backup_dir(self):
+ """Ensure backup directory exists"""
+ if not os.path.exists(self.backup_dir):
+ os.makedirs(self.backup_dir)
+
+ async def create_backup(self, include_files: bool = True) -> Dict[str, Any]:
+ """Create a new system backup"""
+ try:
+ timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
+ backup_id = str(ObjectId())
+ backup_name = f"backup_{timestamp}_{backup_id}"
+ backup_path = os.path.join(self.backup_dir, backup_name)
+
+ # Create backup directory
+ os.makedirs(backup_path, exist_ok=True)
+
+ # Backup database collections
+ db_backup = {}
+ for collection in await db.db.list_collection_names():
+ docs = await db.db[collection].find().to_list(None)
+ db_backup[collection] = [
+ {**doc, "_id": str(doc["_id"])}
+ for doc in docs
+ ]
+
+ # Save database backup
+ with open(os.path.join(backup_path, "database.json"), "w") as f:
+ json.dump(db_backup, f, default=str)
+
+ # Backup files if requested
+ if include_files:
+ uploads_dir = "uploads"
+ if os.path.exists(uploads_dir):
+ shutil.copytree(
+ uploads_dir,
+ os.path.join(backup_path, "uploads"),
+ dirs_exist_ok=True
+ )
+
+ # Create archive
+ archive_path = f"{backup_path}.tar.gz"
+ with tarfile.open(archive_path, "w:gz") as tar:
+ tar.add(backup_path, arcname=os.path.basename(backup_path))
+
+ # Clean up temporary directory
+ shutil.rmtree(backup_path)
+
+ # Record backup in database
+ backup_info = {
+ "_id": backup_id,
+ "filename": f"{backup_name}.tar.gz",
+ "path": archive_path,
+ "created_at": datetime.utcnow(),
+ "size": os.path.getsize(archive_path),
+ "includes_files": include_files
+ }
+
+ await db.db["backup_history"].insert_one(backup_info)
+
+ return {
+ "id": backup_id,
+ "path": archive_path,
+ "size": backup_info["size"],
+ "created_at": backup_info["created_at"]
+ }
+
+ except Exception as e:
+ logger.error(f"Backup creation failed: {str(e)}")
+ raise
+
+ async def restore_backup(self, backup_path: str) -> Dict[str, Any]:
+ """Restore system from a backup"""
+ try:
+ if not os.path.exists(backup_path):
+ raise FileNotFoundError("Backup file not found")
+
+ # Create temporary restoration directory
+ restore_dir = f"restore_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}"
+ os.makedirs(restore_dir, exist_ok=True)
+
+ # Extract archive
+ with tarfile.open(backup_path, "r:gz") as tar:
+ tar.extractall(restore_dir)
+
+ backup_contents = os.listdir(restore_dir)[0]
+ backup_root = os.path.join(restore_dir, backup_contents)
+
+ # Restore database
+ with open(os.path.join(backup_root, "database.json"), "r") as f:
+ db_backup = json.load(f)
+
+ # Clear existing collections
+ for collection in await db.db.list_collection_names():
+ await db.db[collection].delete_many({})
+
+ # Restore collections
+ for collection, docs in db_backup.items():
+ if docs:
+ # Convert string IDs back to ObjectId
+ for doc in docs:
+ doc["_id"] = ObjectId(doc["_id"])
+ await db.db[collection].insert_many(docs)
+
+ # Restore files if present
+ uploads_source = os.path.join(backup_root, "uploads")
+ if os.path.exists(uploads_source):
+ if os.path.exists("uploads"):
+ shutil.rmtree("uploads")
+ shutil.copytree(uploads_source, "uploads")
+
+ # Clean up
+ shutil.rmtree(restore_dir)
+
+ return {
+ "success": True,
+ "collections_restored": len(db_backup),
+ "files_restored": os.path.exists(uploads_source)
+ }
+
+ except Exception as e:
+ logger.error(f"Backup restoration failed: {str(e)}")
+ raise
+ finally:
+ if os.path.exists(restore_dir):
+ shutil.rmtree(restore_dir)
+
+ async def list_backups(self) -> List[Dict[str, Any]]:
+ """List all available backups"""
+ try:
+ backups = await db.db["backup_history"].find().sort("created_at", -1).to_list(None)
+ return [
+ {
+ "id": str(backup["_id"]),
+ "filename": backup["filename"],
+ "created_at": backup["created_at"],
+ "size": backup["size"],
+ "includes_files": backup["includes_files"]
+ }
+ for backup in backups
+ ]
+ except Exception as e:
+ logger.error(f"Failed to list backups: {str(e)}")
+ raise
+
+ async def delete_backup(self, backup_id: str) -> bool:
+ """Delete a backup"""
+ try:
+ backup = await db.db["backup_history"].find_one({"_id": backup_id})
+ if not backup:
+ return False
+
+ # Delete the physical backup file
+ if os.path.exists(backup["path"]):
+ os.remove(backup["path"])
+
+ # Remove from database
+ await db.db["backup_history"].delete_one({"_id": backup_id})
+ return True
+
+ except Exception as e:
+ logger.error(f"Failed to delete backup: {str(e)}")
+ raise
+
+backup = BackupService()
\ No newline at end of file
diff --git a/app/services/calendar.py b/app/services/calendar.py
new file mode 100644
index 0000000000000000000000000000000000000000..5adc359db51bb384a57f6f8d3e69087f10eedf03
--- /dev/null
+++ b/app/services/calendar.py
@@ -0,0 +1,215 @@
+from datetime import datetime, timedelta
+from typing import List, Dict, Any, Optional
+from bson import ObjectId
+from ..db.database import db
+from ..utils.cache import cache
+from ..services.notifications import notifications
+
+class CalendarService:
+ async def create_event(
+ self,
+ user_id: str,
+ title: str,
+ description: str,
+ start_time: datetime,
+ end_time: datetime,
+ attendees: List[str] = None,
+ is_all_day: bool = False,
+ reminder_minutes: int = 30
+ ) -> Dict[str, Any]:
+ """Create a new calendar event"""
+ event = {
+ "user_id": user_id,
+ "title": title,
+ "description": description,
+ "start_time": start_time,
+ "end_time": end_time,
+ "attendees": attendees or [],
+ "is_all_day": is_all_day,
+ "reminder_minutes": reminder_minutes,
+ "status": "scheduled",
+ "created_at": datetime.utcnow()
+ }
+
+ result = await db.db["events"].insert_one(event)
+ event["_id"] = result.inserted_id
+
+ # Clear cache for affected users
+ cache_keys = [f"user_events:{user_id}"]
+ for attendee in attendees or []:
+ cache_keys.append(f"user_events:{attendee}")
+
+ for key in cache_keys:
+ await cache.delete_cache(key)
+
+ # Notify attendees
+ if attendees:
+ for attendee in attendees:
+ await notifications.create_notification(
+ user_id=attendee,
+ title=f"New Event Invitation: {title}",
+ message=f"You have been invited to an event: {title}",
+ notification_type="event_invitation",
+ data={"event_id": str(result.inserted_id)}
+ )
+
+ return event
+
+ async def get_user_events(
+ self,
+ user_id: str,
+ start_date: datetime,
+ end_date: datetime,
+ include_attendee_events: bool = True
+ ) -> List[Dict[str, Any]]:
+ """Get events for a user within a date range"""
+ cache_key = f"user_events:{user_id}:{start_date.date()}:{end_date.date()}"
+ cached = await cache.get_cache(cache_key)
+ if cached:
+ return cached
+
+ query = {
+ "$or": [
+ {"user_id": user_id}, # Events created by user
+ {"attendees": user_id} if include_attendee_events else {"_id": None}
+ ],
+ "start_time": {"$gte": start_date},
+ "end_time": {"$lte": end_date}
+ }
+
+ cursor = db.db["events"].find(query).sort("start_time", 1)
+ events = await cursor.to_list(None)
+
+ await cache.set_cache(cache_key, events, expire=300) # Cache for 5 minutes
+ return events
+
+ async def update_event(
+ self,
+ event_id: str,
+ user_id: str,
+ update_data: Dict[str, Any]
+ ) -> Optional[Dict[str, Any]]:
+ """Update an event"""
+ if not ObjectId.is_valid(event_id):
+ return None
+
+ event = await db.db["events"].find_one({
+ "_id": ObjectId(event_id),
+ "user_id": user_id # Only creator can update
+ })
+
+ if not event:
+ return None
+
+ update_data["updated_at"] = datetime.utcnow()
+
+ await db.db["events"].update_one(
+ {"_id": ObjectId(event_id)},
+ {"$set": update_data}
+ )
+
+ # Clear cache for affected users
+ cache_keys = [f"user_events:{user_id}"]
+ for attendee in event.get("attendees", []):
+ cache_keys.append(f"user_events:{attendee}")
+
+ for key in cache_keys:
+ await cache.delete_cache(key)
+
+ # Notify attendees of changes
+ if "start_time" in update_data or "end_time" in update_data:
+ for attendee in event.get("attendees", []):
+ await notifications.create_notification(
+ user_id=attendee,
+ title=f"Event Updated: {event['title']}",
+ message=f"An event you're attending has been updated",
+ notification_type="event_update",
+ data={"event_id": event_id}
+ )
+
+ return await db.db["events"].find_one({"_id": ObjectId(event_id)})
+
+ async def delete_event(self, event_id: str, user_id: str) -> bool:
+ """Delete an event"""
+ if not ObjectId.is_valid(event_id):
+ return False
+
+ event = await db.db["events"].find_one({
+ "_id": ObjectId(event_id),
+ "user_id": user_id # Only creator can delete
+ })
+
+ if not event:
+ return False
+
+ result = await db.db["events"].delete_one({"_id": ObjectId(event_id)})
+
+ if result.deleted_count > 0:
+ # Clear cache for affected users
+ cache_keys = [f"user_events:{user_id}"]
+ for attendee in event.get("attendees", []):
+ cache_keys.append(f"user_events:{attendee}")
+ # Notify attendees
+ await notifications.create_notification(
+ user_id=attendee,
+ title=f"Event Cancelled: {event['title']}",
+ message=f"An event you were attending has been cancelled",
+ notification_type="event_cancellation",
+ data={"event_id": event_id}
+ )
+
+ for key in cache_keys:
+ await cache.delete_cache(key)
+
+ return True
+ return False
+
+ async def respond_to_event(
+ self,
+ event_id: str,
+ user_id: str,
+ response: str
+ ) -> bool:
+ """Respond to an event invitation"""
+ if not ObjectId.is_valid(event_id):
+ return False
+
+ valid_responses = ["accepted", "declined", "maybe"]
+ if response not in valid_responses:
+ return False
+
+ event = await db.db["events"].find_one({
+ "_id": ObjectId(event_id),
+ "attendees": user_id
+ })
+
+ if not event:
+ return False
+
+ # Update response in attendee list
+ await db.db["events"].update_one(
+ {"_id": ObjectId(event_id)},
+ {
+ "$set": {
+ f"attendee_responses.{user_id}": response,
+ "updated_at": datetime.utcnow()
+ }
+ }
+ )
+
+ # Notify event creator
+ await notifications.create_notification(
+ user_id=event["user_id"],
+ title=f"Event Response: {event['title']}",
+ message=f"An attendee has {response} your event",
+ notification_type="event_response",
+ data={
+ "event_id": event_id,
+ "responder": user_id,
+ "response": response
+ }
+ )
+
+ return True
+
+calendar = CalendarService()
\ No newline at end of file
diff --git a/app/services/maintenance.py b/app/services/maintenance.py
new file mode 100644
index 0000000000000000000000000000000000000000..b63d3aacaedf7e46be805f48919a6bb27a3cc45d
--- /dev/null
+++ b/app/services/maintenance.py
@@ -0,0 +1,217 @@
+import os
+import shutil
+import psutil
+from datetime import datetime, timedelta
+from typing import Dict, Any, Optional
+from sqlalchemy import select, delete, update, func
+from ..db.database import db
+from ..utils.logger import logger
+from ..core.config import settings
+from ..services.websocket import create_and_broadcast_notification
+from ..db.models import User, Order, Notification, Session
+
+class MaintenanceService:
+ async def cleanup_expired_sessions(self) -> int:
+ """Clean up expired sessions"""
+ try:
+ cutoff = datetime.utcnow() - timedelta(days=7)
+ async with db.session() as session:
+ stmt = delete(Session).where(Session.last_activity < cutoff)
+ result = await session.execute(stmt)
+ await session.commit()
+ return result.rowcount
+ except Exception as e:
+ logger.error(f"Error cleaning up sessions: {str(e)}")
+ return 0
+
+ async def archive_old_data(self) -> Dict[str, int]:
+ """Archive old data"""
+ try:
+ cutoff = datetime.utcnow() - timedelta(days=365)
+ archived = {}
+
+ async with db.session() as session:
+ # Archive old orders
+ order_stmt = update(Order).where(
+ Order.created_at < cutoff,
+ Order.status.in_(["completed", "cancelled"])
+ ).values(archived=True)
+ order_result = await session.execute(order_stmt)
+ archived["orders"] = order_result.rowcount
+
+ # Archive old notifications
+ notif_stmt = update(Notification).where(
+ Notification.created_at < cutoff,
+ Notification.read == True
+ ).values(archived=True)
+ notif_result = await session.execute(notif_stmt)
+ archived["notifications"] = notif_result.rowcount
+
+ await session.commit()
+ return archived
+
+ except Exception as e:
+ logger.error(f"Error archiving old data: {str(e)}")
+ return None
+
+ async def check_system_health(self) -> Dict[str, Any]:
+ """Check system health"""
+ try:
+ async with db.session() as session:
+ # Check database connection by running a simple query
+ await session.execute(select(func.now()))
+
+ # Get database size (using psutil for disk stats)
+ disk = psutil.disk_usage('/')
+ total_space = disk.total / (1024 * 1024 * 1024) # GB
+ free_space = disk.free / (1024 * 1024 * 1024) # GB
+
+ health_data = {
+ "status": "healthy",
+ "database": {
+ "connected": True
+ },
+ "disk": {
+ "total_gb": total_space,
+ "free_gb": free_space,
+ "usage_percent": disk.percent
+ },
+ "timestamp": datetime.utcnow()
+ }
+
+ # Send alert if disk space is low
+ if free_space < 5: # Less than 5GB free
+ await create_and_broadcast_notification(
+ user_id="admin",
+ title="Low Disk Space Alert",
+ message=f"Server is running low on disk space. Only {free_space:.2f}GB remaining.",
+ notification_type="system_alert",
+ data={"free_space_gb": free_space}
+ )
+
+ return health_data
+
+ except Exception as e:
+ logger.error(f"Health check error: {str(e)}")
+ return {"status": "unhealthy", "error": str(e)}
+
+ async def monitor_system_resources(self) -> Dict[str, Any]:
+ """Monitor system resources"""
+ try:
+ async with db.session() as session:
+ # Get current active connections (using psutil for process stats)
+ process = psutil.Process()
+ open_files = process.open_files()
+ connections = len([f for f in open_files if 'socket' in str(f.path)])
+
+ resources = {
+ "database": {
+ "connections": connections,
+ },
+ "system": {
+ "cpu_percent": psutil.cpu_percent(),
+ "memory_percent": psutil.virtual_memory().percent
+ },
+ "timestamp": datetime.utcnow()
+ }
+
+ # Alert if too many connections or high resource usage
+ if connections > settings.MAX_DB_CONNECTIONS * 0.9:
+ await create_and_broadcast_notification(
+ user_id="admin",
+ title="High Database Connections",
+ message=f"Database has {connections} active connections",
+ notification_type="system_alert",
+ data={"connections": connections}
+ )
+
+ return resources
+
+ except Exception as e:
+ logger.error(f"Resource monitoring error: {str(e)}")
+ return {"error": str(e)}
+
+ async def perform_database_maintenance(self) -> Dict[str, Any]:
+ """Perform database maintenance tasks"""
+ try:
+ async with db.session() as session:
+ # Run ANALYZE on major tables
+ for table in [User, Order, Notification]:
+ await session.execute(f"ANALYZE {table.__tablename__}")
+
+ # Clean up any orphaned records
+ # For example, delete notifications for non-existent users
+ stmt = delete(Notification).where(
+ ~Notification.user_id.in_(
+ select(User.id)
+ )
+ )
+ await session.execute(stmt)
+ await session.commit()
+
+ return {"status": "success"}
+
+ except Exception as e:
+ logger.error(f"Database maintenance error: {str(e)}")
+ return {"error": str(e)}
+
+ async def rotate_log_files(self) -> None:
+ """Rotate log files"""
+ log_dir = "logs"
+ max_log_size = 10 * 1024 * 1024 # 10MB
+
+ try:
+ for filename in os.listdir(log_dir):
+ filepath = os.path.join(log_dir, filename)
+ if os.path.getsize(filepath) > max_log_size:
+ # Archive old log
+ archive_name = f"{filename}.{datetime.now().strftime('%Y%m%d')}"
+ shutil.move(filepath, os.path.join(log_dir, archive_name))
+
+ # Create new log file
+ open(filepath, 'a').close()
+ logger.info(f"Rotated log file: {filename}")
+ except Exception as e:
+ logger.error(f"Log rotation error: {str(e)}")
+
+ async def manage_storage_quotas(self) -> Dict[str, Any]:
+ """Manage storage quotas and cleanup"""
+ try:
+ results = {
+ "warnings": [],
+ "cleaned": 0
+ }
+
+ # Check and clean upload directories
+ upload_dirs = ["uploads/documents", "uploads/images"]
+ for directory in upload_dirs:
+ if os.path.exists(directory):
+ total_size = sum(
+ os.path.getsize(os.path.join(directory, f))
+ for f in os.listdir(directory)
+ if os.path.isfile(os.path.join(directory, f))
+ ) / (1024 * 1024) # Convert to MB
+
+ if total_size > settings.MAX_UPLOAD_DIR_SIZE_MB:
+ results["warnings"].append(
+ f"Upload directory {directory} exceeds size limit"
+ )
+
+ # Clean up temporary files
+ temp_dirs = ["uploads/temp", "backups/temp"]
+ for directory in temp_dirs:
+ if os.path.exists(directory):
+ # Remove files older than 24 hours
+ cutoff = datetime.now() - timedelta(days=1)
+ for filename in os.listdir(directory):
+ filepath = os.path.join(directory, filename)
+ if os.path.getctime(filepath) < cutoff.timestamp():
+ os.remove(filepath)
+ results["cleaned"] += 1
+
+ return results
+ except Exception as e:
+ logger.error(f"Storage quota management error: {str(e)}")
+ return {"error": str(e)}
+
+maintenance = MaintenanceService()
\ No newline at end of file
diff --git a/app/services/notifications.py b/app/services/notifications.py
new file mode 100644
index 0000000000000000000000000000000000000000..a82d3066cdba1a89e6cd6eba578562502190e4fb
--- /dev/null
+++ b/app/services/notifications.py
@@ -0,0 +1,89 @@
+from typing import Dict, Any
+from datetime import datetime
+from ..db.database import db
+from ..core.config import settings
+from ..utils.cache import cache
+from ..services.websocket import create_and_broadcast_notification
+
+class NotificationService:
+ async def create_notification(
+ self,
+ user_id: str,
+ title: str,
+ message: str,
+ notification_type: str,
+ data: Dict[str, Any] = None
+ ):
+ """Create and store a notification"""
+ return await create_and_broadcast_notification(
+ user_id=user_id,
+ title=title,
+ message=message,
+ notification_type=notification_type,
+ data=data
+ )
+
+ async def get_user_notifications(
+ self,
+ user_id: str,
+ skip: int = 0,
+ limit: int = 50,
+ unread_only: bool = False
+ ):
+ """Get notifications for a user"""
+ cache_key = f"user_notifications:{user_id}"
+ if not unread_only:
+ cached = await cache.get_cache(cache_key)
+ if cached:
+ return cached
+
+ query = {"user_id": user_id}
+ if unread_only:
+ query["read"] = False
+
+ cursor = db.db["notifications"].find(query)\
+ .sort("created_at", -1)\
+ .skip(skip)\
+ .limit(limit)
+
+ notifications = await cursor.to_list(length=limit)
+
+ if not unread_only:
+ await cache.set_cache(cache_key, notifications, expire=300) # Cache for 5 minutes
+
+ return notifications
+
+ async def mark_as_read(self, notification_id: str, user_id: str):
+ """Mark a notification as read"""
+ result = await db.db["notifications"].update_one(
+ {"_id": notification_id, "user_id": user_id},
+ {"$set": {"read": True}}
+ )
+
+ if result.modified_count > 0:
+ await cache.delete_cache(f"user_notifications:{user_id}")
+ return True
+ return False
+
+ async def mark_all_as_read(self, user_id: str):
+ """Mark all notifications as read for a user"""
+ result = await db.db["notifications"].update_many(
+ {"user_id": user_id, "read": False},
+ {"$set": {"read": True}}
+ )
+
+ await cache.delete_cache(f"user_notifications:{user_id}")
+ return result.modified_count
+
+ async def delete_notification(self, notification_id: str, user_id: str):
+ """Delete a notification"""
+ result = await db.db["notifications"].delete_one(
+ {"_id": notification_id, "user_id": user_id}
+ )
+
+ if result.deleted_count > 0:
+ await cache.delete_cache(f"user_notifications:{user_id}")
+ return True
+ return False
+
+notifications = NotificationService()
\ No newline at end of file
diff --git a/app/services/scheduler.py b/app/services/scheduler.py
new file mode 100644
index 0000000000000000000000000000000000000000..55cf4563120361a44f9acab0b4aa9c3a1218110b
--- /dev/null
+++ b/app/services/scheduler.py
@@ -0,0 +1,243 @@
+from datetime import datetime, timedelta
+from typing import List, Dict, Any, Optional
+from bson import ObjectId
+from apscheduler.schedulers.asyncio import AsyncIOScheduler
+from apscheduler.triggers.cron import CronTrigger
+from ..db.database import db
+from ..utils.cache import cache
+from ..utils.logger import logger
+from ..services.calendar import calendar
+from ..services.maintenance import maintenance
+
+class SchedulerService:
+ def __init__(self):
+ self.scheduler = AsyncIOScheduler()
+ self._setup_maintenance_jobs()
+
+ def _setup_maintenance_jobs(self):
+ """Setup all maintenance related scheduled jobs"""
+ # Daily database maintenance at 2 AM
+ self.scheduler.add_job(
+ maintenance.perform_database_maintenance,
+ CronTrigger(hour=2),
+ id="daily_db_maintenance",
+ replace_existing=True
+ )
+
+ # Session cleanup every 6 hours
+ self.scheduler.add_job(
+ maintenance.cleanup_expired_sessions,
+ CronTrigger(hour="*/6"),
+ id="session_cleanup",
+ replace_existing=True
+ )
+
+ # System health check every 15 minutes
+ self.scheduler.add_job(
+ maintenance.monitor_system_resources,
+ CronTrigger(minute="*/15"),
+ id="health_check",
+ replace_existing=True
+ )
+
+ # Daily backup at 1 AM
+ self.scheduler.add_job(
+ maintenance.perform_scheduled_backup,
+ CronTrigger(hour=1),
+ id="daily_backup",
+ replace_existing=True
+ )
+
+ # Daily log rotation at 3 AM
+ self.scheduler.add_job(
+ maintenance.rotate_log_files,
+ CronTrigger(hour=3),
+ id="log_rotation",
+ replace_existing=True
+ )
+
+ # Storage quota check every 2 hours
+ self.scheduler.add_job(
+ maintenance.manage_storage_quotas,
+ CronTrigger(hour="*/2"),
+ id="storage_quota_check",
+ replace_existing=True
+ )
+
+ # Monthly data archiving at 4 AM on the 1st of each month
+ self.scheduler.add_job(
+ maintenance.archive_old_data,
+ CronTrigger(day=1, hour=4),
+ id="monthly_archiving",
+ replace_existing=True
+ )
+
+ def start(self):
+ """Start the scheduler"""
+ try:
+ self.scheduler.start()
+ logger.info("Scheduler started successfully")
+ except Exception as e:
+ logger.error(f"Failed to start scheduler: {str(e)}")
+ raise
+
+ def shutdown(self):
+ """Shutdown the scheduler"""
+ try:
+ self.scheduler.shutdown()
+ logger.info("Scheduler shutdown successfully")
+ except Exception as e:
+ logger.error(f"Error during scheduler shutdown: {str(e)}")
+ raise
+
+ def get_jobs(self):
+ """Get all scheduled jobs"""
+ return [
+ {
+ "id": job.id,
+ "name": job.name,
+ "next_run_time": job.next_run_time.isoformat() if job.next_run_time else None,
+ "trigger": str(job.trigger)
+ }
+ for job in self.scheduler.get_jobs()
+ ]
+
+ async def create_recurring_event(
+ self,
+ user_id: str,
+ title: str,
+ description: str,
+ start_time: datetime,
+ end_time: datetime,
+ recurrence_pattern: str, # daily, weekly, monthly, yearly
+ recurrence_end_date: Optional[datetime] = None,
+ attendees: List[str] = None,
+ reminder_minutes: int = 30
+ ) -> List[Dict[str, Any]]:
+ """Create recurring events based on pattern"""
+ events = []
+ current_start = start_time
+ current_end = end_time
+ duration = end_time - start_time
+
+ while True:
+ if recurrence_end_date and current_start > recurrence_end_date:
+ break
+
+ # Create individual event instance
+ event = await calendar.create_event(
+ user_id=user_id,
+ title=title,
+ description=description,
+ start_time=current_start,
+ end_time=current_end,
+ attendees=attendees,
+ reminder_minutes=reminder_minutes
+ )
+ events.append(event)
+
+ # Calculate next occurrence
+ if recurrence_pattern == "daily":
+ current_start += timedelta(days=1)
+ elif recurrence_pattern == "weekly":
+ current_start += timedelta(weeks=1)
+ elif recurrence_pattern == "monthly":
+ # Add one month (approximately)
+ if current_start.month == 12:
+ current_start = current_start.replace(year=current_start.year + 1, month=1)
+ else:
+ current_start = current_start.replace(month=current_start.month + 1)
+ elif recurrence_pattern == "yearly":
+ current_start = current_start.replace(year=current_start.year + 1)
+
+ current_end = current_start + duration
+
+ return events
+
+ async def update_recurring_event(
+ self,
+ event_id: str,
+ user_id: str,
+ update_data: Dict[str, Any],
+ update_future: bool = True
+ ) -> List[Dict[str, Any]]:
+ """Update a recurring event and optionally its future occurrences"""
+ # Get the original event
+ event = await db.db["events"].find_one({
+ "_id": ObjectId(event_id),
+ "user_id": user_id
+ })
+
+ if not event:
+ return []
+
+ # Update the current event
+ await calendar.update_event(event_id, user_id, update_data)
+
+ updated_events = [event]
+
+ # Update future occurrences if requested
+ if update_future:
+ future_events = await db.db["events"].find({
+ "recurrence_group": event.get("recurrence_group"),
+ "start_time": {"$gt": event["start_time"]},
+ "user_id": user_id
+ }).to_list(None)
+
+ for future_event in future_events:
+ await calendar.update_event(
+ str(future_event["_id"]),
+ user_id,
+ update_data
+ )
+ updated_events.append(future_event)
+
+ return updated_events
+
+ async def delete_recurring_event(
+ self,
+ event_id: str,
+ user_id: str,
+ delete_future: bool = True
+ ) -> bool:
+ """Delete a recurring event and optionally its future occurrences"""
+ event = await db.db["events"].find_one({
+ "_id": ObjectId(event_id),
+ "user_id": user_id
+ })
+
+ if not event:
+ return False
+
+ # Delete the current event
+ await calendar.delete_event(event_id, user_id)
+
+ # Delete future occurrences if requested
+ if delete_future and event.get("recurrence_group"):
+ await db.db["events"].delete_many({
+ "recurrence_group": event["recurrence_group"],
+ "start_time": {"$gt": event["start_time"]},
+ "user_id": user_id
+ })
+
+ return True
+
+ async def get_upcoming_recurring_events(
+ self,
+ user_id: str,
+ days: int = 30
+ ) -> List[Dict[str, Any]]:
+ """Get upcoming recurring events for a user"""
+ start_date = datetime.utcnow()
+ end_date = start_date + timedelta(days=days)
+
+ events = await calendar.get_user_events(
+ user_id=user_id,
+ start_date=start_date,
+ end_date=end_date,
+ include_attendee_events=True
+ )
+
+ return sorted(events, key=lambda x: x["start_time"])
+
+scheduler = SchedulerService()
\ No newline at end of file
diff --git a/app/services/websocket.py b/app/services/websocket.py
new file mode 100644
index 0000000000000000000000000000000000000000..1484b5748c065a3d628621f3e18ff363cc4296fe
--- /dev/null
+++ b/app/services/websocket.py
@@ -0,0 +1,80 @@
+from typing import List, Dict, Any
+from fastapi import WebSocket
+from datetime import datetime
+from ..db.database import db
+from ..utils.cache import cache
+from ..db.models import Notification
+
+# Store active WebSocket connections
+active_connections: List[WebSocket] = []
+
+async def connect(websocket: WebSocket):
+ """Accept a new WebSocket connection"""
+ await websocket.accept()
+ active_connections.append(websocket)
+
+async def disconnect(websocket: WebSocket):
+ """Remove a WebSocket connection"""
+ if websocket in active_connections:
+ active_connections.remove(websocket)
+
+async def broadcast_message(message: dict):
+ """Broadcast a message to all connected clients"""
+ disconnected = []
+ for connection in active_connections:
+ try:
+ await connection.send_json(message)
+ except:
+ disconnected.append(connection)
+
+ # Clean up disconnected clients
+ for connection in disconnected:
+ await disconnect(connection)
+
+async def create_and_broadcast_notification(
+ user_id: str,
+ title: str,
+ message: str,
+ notification_type: str,
+ data: Dict[str, Any] = None
+) -> Dict[str, Any]:
+ """Create and broadcast a notification"""
+ async with db.session() as session:
+ # Create notification
+ notification = Notification(
+ user_id=user_id,
+ title=title,
+ message=message,
+ type=notification_type,
+ data=data,
+ created_at=datetime.utcnow(),
+ read=False
+ )
+
+ # Store in database
+ session.add(notification)
+ await session.commit()
+ await session.refresh(notification)
+
+ # Convert to dict for broadcasting
+ notification_dict = {
+ "id": str(notification.id),
+ "user_id": notification.user_id,
+ "title": notification.title,
+ "message": notification.message,
+ "type": notification.type,
+ "data": notification.data,
+ "created_at": notification.created_at.isoformat(),
+ "read": notification.read
+ }
+
+ # Broadcast to connected clients
+ await broadcast_message({
+ "type": "notification",
+ "data": notification_dict
+ })
+
+ # Clear user's notification cache
+ await cache.delete_cache(f"user_notifications:{user_id}")
+
+ return notification_dict
\ No newline at end of file
diff --git a/app/templates/email/low_stock_alert.html b/app/templates/email/low_stock_alert.html
new file mode 100644
index 0000000000000000000000000000000000000000..ec7c3794d68f498c834cdd1032627234bb457e85
--- /dev/null
+++ b/app/templates/email/low_stock_alert.html
@@ -0,0 +1,41 @@
+
+
+
+
+
+
+
+
+
+
+
Product Stock Alert
+
The following product is running low on inventory:
+
+
+
+ - Product Name: {{ product_name }}
+ - Current Stock: {{ current_stock }} units
+
+
+
+ View Product
+
+
+
Please review and restock this item as needed to maintain adequate inventory levels.
+
+
+
+
+
\ No newline at end of file
diff --git a/app/templates/email/order_confirmation.html b/app/templates/email/order_confirmation.html
new file mode 100644
index 0000000000000000000000000000000000000000..ffd4bd8a4c296b46e8cb5b5bae8365aca36c907d
--- /dev/null
+++ b/app/templates/email/order_confirmation.html
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
Thank you for your order!
+
Order Details
+
Order ID: {{ order_id }}
+
Total Amount: ${{ "%.2f"|format(total_amount) }}
+
Status: {{ status|title }}
+
+
Products Ordered:
+
+ {% for product in products %}
+ - {{ product.name }} - Quantity: {{ product.quantity }} - Price: ${{ "%.2f"|format(product.price) }}
+ {% endfor %}
+
+
+
You can track your order status by clicking the button below:
+
Track Order
+
+
+
+
+
\ No newline at end of file
diff --git a/app/templates/email/password_reset.html b/app/templates/email/password_reset.html
new file mode 100644
index 0000000000000000000000000000000000000000..c82dfbc987a7b23e264644eff39fe3176935f3b8
--- /dev/null
+++ b/app/templates/email/password_reset.html
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+
We received a request to reset your password. Click the button below to create a new password:
+
+
+ Reset Password
+
+
+
+
If you didn't request a password reset, please ignore this email or contact support if you have concerns.
+
+
+
This password reset link will expire in 30 minutes for security reasons.
+
+
+
+
+
\ No newline at end of file
diff --git a/app/templates/email/welcome.html b/app/templates/email/welcome.html
new file mode 100644
index 0000000000000000000000000000000000000000..0a405b3c5e55f6da62f90a217ad1b5f7503dc3b6
--- /dev/null
+++ b/app/templates/email/welcome.html
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+
+
+
Hello {{ username }},
+
Welcome to our platform! We're excited to have you on board.
+
+
+
What you can do with your account:
+
+ - Manage products and inventory
+ - Process and track orders
+ - View analytics and reports
+ - Manage customer relationships
+
+
+
+
+ Get Started
+
+
+
If you have any questions or need assistance, our support team is here to help.
+
+
+
+
+
\ No newline at end of file
diff --git a/app/utils/__pycache__/cache.cpython-312.pyc b/app/utils/__pycache__/cache.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d50154df424d38d3c07cec078067f5a1f04741a
Binary files /dev/null and b/app/utils/__pycache__/cache.cpython-312.pyc differ
diff --git a/app/utils/__pycache__/file_storage.cpython-312.pyc b/app/utils/__pycache__/file_storage.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bfeb97fd839bf458f98966f0cf1958ac041aefda
Binary files /dev/null and b/app/utils/__pycache__/file_storage.cpython-312.pyc differ
diff --git a/app/utils/__pycache__/logger.cpython-312.pyc b/app/utils/__pycache__/logger.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b52d9498dd1feb5ba3f542f558a7fcd208b8b6d8
Binary files /dev/null and b/app/utils/__pycache__/logger.cpython-312.pyc differ
diff --git a/app/utils/__pycache__/rate_limiter.cpython-312.pyc b/app/utils/__pycache__/rate_limiter.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cc7bd113c647127759059ca6df1612a705551d60
Binary files /dev/null and b/app/utils/__pycache__/rate_limiter.cpython-312.pyc differ
diff --git a/app/utils/__pycache__/tasks.cpython-312.pyc b/app/utils/__pycache__/tasks.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9f8c161d50c5808dea49b84c75fa42e7bfe0c3aa
Binary files /dev/null and b/app/utils/__pycache__/tasks.cpython-312.pyc differ
diff --git a/app/utils/cache.py b/app/utils/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..9971dce2906d2746fbf0719e9f7e62fd4ad16c39
--- /dev/null
+++ b/app/utils/cache.py
@@ -0,0 +1,101 @@
+import redis
+import json
+from ..core.config import settings
+from typing import Any, Optional
+from ..utils.logger import logger
+
+class RedisCache:
+ _instance = None
+
+ def __new__(cls):
+ if cls._instance is None:
+ cls._instance = super(RedisCache, cls).__new__(cls)
+ cls._instance.initialize()
+ return cls._instance
+
+ def initialize(self):
+ """Initialize Redis connection with fallback to dummy cache"""
+ try:
+ self.redis_client = redis.Redis(
+ host=settings.REDIS_HOST,
+ port=settings.REDIS_PORT,
+ decode_responses=True,
+ socket_timeout=1 # 1 second timeout
+ )
+ self.redis_client.ping() # Test connection
+ self.is_connected = True
+ logger.info("Redis cache initialized successfully")
+ except Exception as e:
+ self.is_connected = False
+ self.fallback_cache = {}
+ logger.warning(f"Redis connection failed, using in-memory fallback: {str(e)}")
+
+ async def set_cache(self, key: str, value: Any, expire: int = 3600):
+ """Set a cache entry with optional expiration time (default 1 hour)"""
+ try:
+ if not self.is_connected:
+ self.fallback_cache[key] = value
+ return True
+
+ return bool(self.redis_client.setex(
+ key,
+ expire,
+ json.dumps(value)
+ ))
+ except Exception as e:
+ logger.error(f"Cache set error: {str(e)}")
+ return False
+
+ async def get_cache(self, key: str) -> Optional[Any]:
+ """Get a cached value by key"""
+ try:
+ if not self.is_connected:
+ return self.fallback_cache.get(key)
+
+ value = self.redis_client.get(key)
+ return json.loads(value) if value else None
+ except Exception as e:
+ logger.error(f"Cache get error: {str(e)}")
+ return None
+
+ async def delete_cache(self, key: str) -> bool:
+ """Delete a cache entry by key"""
+ try:
+ if not self.is_connected:
+ self.fallback_cache.pop(key, None)
+ return True
+
+ return bool(self.redis_client.delete(key))
+ except Exception as e:
+ logger.error(f"Cache delete error: {str(e)}")
+ return False
+
+ async def clear_cache_pattern(self, pattern: str) -> bool:
+ """Clear all cache entries matching a pattern"""
+ try:
+ if not self.is_connected:
+ # Simple pattern matching for fallback cache
+ keys_to_delete = [k for k in self.fallback_cache if pattern in k]
+ for k in keys_to_delete:
+ del self.fallback_cache[k]
+ return True
+
+ keys = self.redis_client.keys(pattern)
+ if keys:
+ return bool(self.redis_client.delete(*keys))
+ return True
+ except Exception as e:
+ logger.error(f"Cache clear error: {str(e)}")
+ return False
+
+ def check_connection(self) -> bool:
+ """Check if Redis connection is alive"""
+ try:
+ self.redis_client.ping()
+ self.is_connected = True
+ return True
+ except:
+ self.is_connected = False
+ return False
+
+cache = RedisCache()
\ No newline at end of file
diff --git a/app/utils/email.py b/app/utils/email.py
new file mode 100644
index 0000000000000000000000000000000000000000..e43fe2f0c4015dbd581c7445014e91b0907f756e
--- /dev/null
+++ b/app/utils/email.py
@@ -0,0 +1,103 @@
+from fastapi_mail import FastMail, MessageSchema, ConnectionConfig
+from pydantic import EmailStr
+from typing import List, Dict, Any
+from ..core.config import settings
+from pathlib import Path
+import aiofiles
+import jinja2
+
+class EmailService:
+ def __init__(self):
+ self.conf = ConnectionConfig(
+ MAIL_USERNAME=settings.MAIL_USERNAME,
+ MAIL_PASSWORD=settings.MAIL_PASSWORD,
+ MAIL_FROM=settings.MAIL_FROM,
+ MAIL_PORT=settings.MAIL_PORT,
+ MAIL_SERVER=settings.MAIL_SERVER,
+ MAIL_TLS=True,
+ MAIL_SSL=False,
+ TEMPLATE_FOLDER=Path(__file__).parent.parent / 'templates' / 'email'
+ )
+ self.fast_mail = FastMail(self.conf)
+
+ # Create templates directory if it doesn't exist
+ template_dir = Path(__file__).parent.parent / 'templates' / 'email'
+ template_dir.mkdir(parents=True, exist_ok=True)
+
+ async def send_email(
+ self,
+ email_to: List[EmailStr],
+ subject: str,
+ template_name: str,
+ template_data: Dict[str, Any]
+ ):
+ """Send an email using a template"""
+ try:
+ message = MessageSchema(
+ subject=subject,
+ recipients=email_to,
+ template_body=template_data,
+ subtype="html"
+ )
+
+ await self.fast_mail.send_message(
+ message,
+ template_name=template_name
+ )
+ return True
+ except Exception as e:
+ print(f"Failed to send email: {str(e)}")
+ return False
+
+ async def send_order_confirmation(self, email: EmailStr, order_data: Dict[str, Any]):
+ """Send order confirmation email"""
+ return await self.send_email(
+ email_to=[email],
+ subject="Order Confirmation",
+ template_name="order_confirmation.html",
+ template_data={
+ "order_id": str(order_data["_id"]),
+ "total_amount": order_data["total_amount"],
+ "products": order_data["products"],
+ "status": order_data["status"]
+ }
+ )
+
+ async def send_password_reset(self, email: EmailStr, reset_token: str):
+ """Send password reset email"""
+ return await self.send_email(
+ email_to=[email],
+ subject="Password Reset Request",
+ template_name="password_reset.html",
+ template_data={
+ "reset_token": reset_token,
+ "reset_url": f"{settings.FRONTEND_URL}/reset-password?token={reset_token}"
+ }
+ )
+
+ async def send_welcome_email(self, email: EmailStr, username: str):
+ """Send welcome email to new users"""
+ return await self.send_email(
+ email_to=[email],
+ subject="Welcome to Admin Dashboard",
+ template_name="welcome.html",
+ template_data={
+ "username": username,
+ "login_url": f"{settings.FRONTEND_URL}/login"
+ }
+ )
+
+ async def send_low_stock_alert(self, email: EmailStr, product_data: Dict[str, Any]):
+ """Send low stock alert to admins"""
+ return await self.send_email(
+ email_to=[email],
+ subject="Low Stock Alert",
+ template_name="low_stock_alert.html",
+ template_data={
+ "product_name": product_data["name"],
+ "current_stock": product_data["inventory_count"],
+ "product_id": str(product_data["_id"])
+ }
+ )
+
+email_service = EmailService()
\ No newline at end of file
diff --git a/app/utils/file_storage.py b/app/utils/file_storage.py
new file mode 100644
index 0000000000000000000000000000000000000000..217a83162c5c631f030ca5277edb5589ee34d2eb
--- /dev/null
+++ b/app/utils/file_storage.py
@@ -0,0 +1,79 @@
+import os
+import shutil
+from fastapi import UploadFile
+from datetime import datetime
+from pathlib import Path
+from typing import Optional
+from ..core.config import settings
+from .logger import logger
+
+class FileStorage:
+ def __init__(self):
+ self.upload_dir = Path("uploads")
+ self.upload_dir.mkdir(exist_ok=True)
+
+ # Create subdirectories for different file types
+ self.image_dir = self.upload_dir / "images"
+ self.document_dir = self.upload_dir / "documents"
+ self.image_dir.mkdir(exist_ok=True)
+ self.document_dir.mkdir(exist_ok=True)
+
+ async def save_file(
+ self,
+ file: UploadFile,
+ category: str = "documents",
+ max_size: int = 10 * 1024 * 1024 # 10MB default
+ ) -> Optional[str]:
+ try:
+ # Validate file size
+ file.file.seek(0, os.SEEK_END)
+ size = file.file.tell()
+ file.file.seek(0)
+
+ if size > max_size:
+ raise ValueError(f"File size exceeds maximum limit of {max_size/1024/1024}MB")
+
+ # Generate unique filename
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ filename = f"{timestamp}_{file.filename}"
+
+ # Determine storage directory based on category
+ if category == "images":
+ save_dir = self.image_dir
+ allowed_types = {".jpg", ".jpeg", ".png", ".gif"}
+ else:
+ save_dir = self.document_dir
+ allowed_types = {".pdf", ".doc", ".docx", ".txt"}
+
+ # Validate file type
+ file_ext = Path(file.filename).suffix.lower()
+ if file_ext not in allowed_types:
+ raise ValueError(f"File type {file_ext} not allowed")
+
+ # Save file
+ file_path = save_dir / filename
+ with file_path.open("wb") as buffer:
+ shutil.copyfileobj(file.file, buffer)
+
+ return str(file_path.relative_to(self.upload_dir))
+
+ except Exception as e:
+ logger.error(f"File upload error: {str(e)}")
+ return None
+
+ async def delete_file(self, file_path: str) -> bool:
+ try:
+ full_path = self.upload_dir / file_path
+ if full_path.exists():
+ full_path.unlink()
+ return True
+ return False
+ except Exception as e:
+ logger.error(f"File deletion error: {str(e)}")
+ return False
+
+ def get_file_url(self, file_path: str) -> str:
+ """Generate URL for accessing the file"""
+ return f"/uploads/{file_path}"
+
+file_storage = FileStorage()
\ No newline at end of file
diff --git a/app/utils/logger.py b/app/utils/logger.py
new file mode 100644
index 0000000000000000000000000000000000000000..683420b2879a7f67121656edf446596e7599f608
--- /dev/null
+++ b/app/utils/logger.py
@@ -0,0 +1,59 @@
+import logging
+import sys
+from datetime import datetime
+from pathlib import Path
+from logging.handlers import RotatingFileHandler
+from ..core.config import settings
+
+# Create logs directory if it doesn't exist
+logs_dir = Path("logs")
+logs_dir.mkdir(exist_ok=True)
+
+# Configure logging format
+log_format = logging.Formatter(
+ "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+)
+
+def setup_logger(name: str) -> logging.Logger:
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.INFO)
+
+ # Console handler
+ console_handler = logging.StreamHandler(sys.stdout)
+ console_handler.setFormatter(log_format)
+ logger.addHandler(console_handler)
+
+ # File handler with rotation
+ file_handler = RotatingFileHandler(
+ logs_dir / f"{name}.log",
+ maxBytes=10485760, # 10MB
+ backupCount=5
+ )
+ file_handler.setFormatter(log_format)
+ logger.addHandler(file_handler)
+
+ return logger
+
+# Create main application logger
+logger = setup_logger("admin_dashboard")
+
+def log_api_request(method: str, path: str, status_code: int, duration: float):
+ """Log API request details"""
+ logger.info(
+ f"API Request - Method: {method}, Path: {path}, "
+ f"Status: {status_code}, Duration: {duration:.3f}s"
+ )
+
+def log_error(error: Exception, context: dict = None):
+ """Log error with context"""
+ logger.error(
+ f"Error: {str(error)}, Type: {type(error).__name__}, "
+ f"Context: {context or {}}"
+ )
+
+def log_database_operation(operation: str, collection: str, success: bool):
+ """Log database operations"""
+ logger.info(
+ f"Database Operation - Type: {operation}, Collection: {collection}, "
+ f"Success: {success}"
+ )
\ No newline at end of file
diff --git a/app/utils/rate_limiter.py b/app/utils/rate_limiter.py
new file mode 100644
index 0000000000000000000000000000000000000000..b532f7ea07a39d348c9864ca8be1a2f9d5cc1981
--- /dev/null
+++ b/app/utils/rate_limiter.py
@@ -0,0 +1,110 @@
+from fastapi import HTTPException, Request
+from redis import Redis
+from ..core.config import settings
+from ..utils.logger import logger
+import time
+from typing import Dict
+import asyncio
+
+class RateLimiter:
+ _instance = None
+ _memory_store: Dict[str, Dict[float, float]] = {}
+
+ def __new__(cls):
+ if cls._instance is None:
+ cls._instance = super(RateLimiter, cls).__new__(cls)
+ cls._instance.initialize()
+ return cls._instance
+
+ def initialize(self):
+ """Initialize Redis connection with fallback to in-memory store"""
+ self.rate_limit = 100 # requests
+ self.time_window = 60 # seconds
+
+ try:
+ self.redis = Redis(
+ host=settings.REDIS_HOST,
+ port=settings.REDIS_PORT,
+ decode_responses=True,
+ socket_timeout=1
+ )
+ self.redis.ping()
+ self.is_connected = True
+ logger.info("Rate limiter Redis connection initialized successfully")
+ except Exception as e:
+ self.is_connected = False
+ logger.warning(f"Redis connection failed for rate limiter, using in-memory fallback: {str(e)}")
+
+ async def check_rate_limit(self, request: Request):
+ """Check rate limit for a client IP"""
+ client_ip = request.client.host
+ current = time.time()
+ window_start = current - self.time_window
+
+ # Use Redis if available
+ if self.is_connected:
+ try:
+ key = f"rate_limit:{client_ip}"
+ pipeline = self.redis.pipeline()
+ pipeline.zremrangebyscore(key, 0, window_start)
+ pipeline.zadd(key, {str(current): current})
+ pipeline.zcard(key)
+ pipeline.expire(key, self.time_window)
+ _, _, request_count, _ = pipeline.execute()
+ except Exception as e:
+ logger.error(f"Redis rate limit error: {str(e)}")
+ self.is_connected = False
+ return await self._check_memory_rate_limit(client_ip, current, window_start)
+ else:
+ request_count = await self._check_memory_rate_limit(client_ip, current, window_start)
+
+ if request_count > self.rate_limit:
+ raise HTTPException(
+ status_code=429,
+ detail="Too many requests. Please try again later."
+ )
+
+ async def _check_memory_rate_limit(self, client_ip: str, current: float, window_start: float) -> int:
+ """Check rate limit using in-memory store"""
+ if client_ip not in self._memory_store:
+ self._memory_store[client_ip] = {}
+
+ # Clean old entries
+ self._memory_store[client_ip] = {
+ ts: score for ts, score in self._memory_store[client_ip].items()
+ if score > window_start
+ }
+
+ # Add new request
+ self._memory_store[client_ip][str(current)] = current
+
+ # Clean up old IPs periodically
+ if len(self._memory_store) > 10000: # Prevent memory leak
+ await self._cleanup_memory_store()
+
+ return len(self._memory_store[client_ip])
+
+ async def _cleanup_memory_store(self):
+ """Clean up old entries from memory store"""
+ current = time.time()
+ window_start = current - self.time_window
+
+ # Remove old IP entries
+ old_ips = [
+ ip for ip, timestamps in self._memory_store.items()
+ if all(score <= window_start for score in timestamps.values())
+ ]
+ for ip in old_ips:
+ del self._memory_store[ip]
+
+ def check_connection(self) -> bool:
+ """Check if Redis connection is alive"""
+ try:
+ self.redis.ping()
+ self.is_connected = True
+ return True
+ except:
+ self.is_connected = False
+ return False
+
+rate_limiter = RateLimiter()
\ No newline at end of file
diff --git a/app/utils/tasks.py b/app/utils/tasks.py
new file mode 100644
index 0000000000000000000000000000000000000000..2053b0a07718dc04e10aeb46e85c8e9eca5c870c
--- /dev/null
+++ b/app/utils/tasks.py
@@ -0,0 +1,154 @@
+from datetime import datetime, timedelta
+from ..db.database import db
+from ..services.maintenance import maintenance
+from ..utils.logger import logger
+from ..services.websocket import create_and_broadcast_notification
+import asyncio
+from sqlalchemy import select, delete
+from ..db.models import Event, User, Notification
+
+async def check_event_reminders():
+ """Check and send event reminders"""
+ try:
+ now = datetime.utcnow()
+ # Find events happening soon that haven't sent reminders
+ stmt = select(Event).where(
+ Event.start_time > now,
+ Event.start_time <= now + timedelta(minutes=30),
+ Event.reminder_sent == False
+ )
+
+ async with db.session() as session:
+ result = await session.execute(stmt)
+ upcoming_events = result.scalars().all()
+
+ for event in upcoming_events:
+ # Get event owner
+ user_stmt = select(User).where(User.id == event.user_id)
+ user_result = await session.execute(user_stmt)
+ user = user_result.scalar_one_or_none()
+
+ if user:
+ # Send reminder to event owner and attendees
+ reminder_users = [str(user.id)] + event.attendees
+ for user_id in reminder_users:
+ await create_and_broadcast_notification(
+ user_id=user_id,
+ title=f"Event Reminder: {event.title}",
+ message=f"Your event '{event.title}' starts in {event.reminder_minutes} minutes",
+ notification_type="event_reminder",
+ data={"event_id": str(event.id)}
+ )
+
+ # Mark reminder as sent
+ event.reminder_sent = True
+
+ # Commit all changes at once
+ await session.commit()
+
+ except Exception as e:
+ logger.error(f"Error in event reminder check: {str(e)}")
+
+async def cleanup_old_notifications():
+ """Clean up old notifications"""
+ try:
+ cutoff = datetime.utcnow() - timedelta(days=30)
+ async with db.session() as session:
+ # Delete notifications older than 30 days that have been read
+ stmt = delete(Notification).where(
+ Notification.created_at < cutoff,
+ Notification.read == True
+ )
+ result = await session.execute(stmt)
+ await session.commit()
+
+ # Get number of deleted rows
+ deleted_count = result.rowcount
+ logger.info(f"Cleaned up {deleted_count} old notifications")
+ except Exception as e:
+ logger.error(f"Error in cleanup_old_notifications: {str(e)}")
+
+async def perform_daily_maintenance():
+ """Perform daily system maintenance tasks"""
+ try:
+ # Clean up expired sessions
+ deleted_sessions = await maintenance.cleanup_expired_sessions()
+ logger.info(f"Cleaned up {deleted_sessions} expired sessions")
+
+ # Archive old data
+ archived = await maintenance.archive_old_data()
+ if archived:
+ logger.info(f"Archived data: {archived}")
+
+ # Check system health
+ health_data = await maintenance.check_system_health()
+ if "error" not in health_data:
+ logger.info("System health check completed successfully")
+ else:
+ logger.error(f"System health check error: {health_data['error']}")
+
+ # Monitor system resources
+ resources = await maintenance.monitor_system_resources()
+ if "error" not in resources:
+ logger.info("System resource monitoring completed successfully")
+ else:
+ logger.error(f"Resource monitoring error: {resources['error']}")
+
+ except Exception as e:
+ logger.error(f"Error in daily maintenance: {str(e)}")
+
+async def perform_weekly_maintenance():
+ """Perform weekly system maintenance tasks"""
+ try:
+ # Perform database maintenance
+ await maintenance.perform_database_maintenance()
+ logger.info("Database maintenance completed successfully")
+
+ # Rotate log files
+ await maintenance.rotate_log_files()
+ logger.info("Log rotation completed successfully")
+
+ # Manage storage quotas
+ quota_results = await maintenance.manage_storage_quotas()
+ if quota_results.get("warnings"):
+ for warning in quota_results["warnings"]:
+ logger.warning(warning)
+ logger.info(f"Storage cleanup: removed {quota_results.get('cleaned', 0)} temporary files")
+
+ except Exception as e:
+ logger.error(f"Error in weekly maintenance: {str(e)}")
+
+async def run_periodic_tasks():
+ """Run periodic maintenance tasks"""
+ daily_maintenance_run = False
+ weekly_maintenance_run = False
+
+ while True:
+ try:
+ now = datetime.utcnow()
+
+ # Check event reminders every minute
+ await check_event_reminders()
+
+ # Clean up old notifications daily
+ await cleanup_old_notifications()
+
+ # Run daily maintenance at 2 AM
+ if now.hour == 2 and not daily_maintenance_run:
+ await perform_daily_maintenance()
+ daily_maintenance_run = True
+ elif now.hour != 2:
+ daily_maintenance_run = False
+
+ # Run weekly maintenance on Sunday at 3 AM
+ if now.weekday() == 6 and now.hour == 3 and not weekly_maintenance_run:
+ await perform_weekly_maintenance()
+ weekly_maintenance_run = True
+ elif now.weekday() != 6 or now.hour != 3:
+ weekly_maintenance_run = False
+
+ # Wait before next check
+ await asyncio.sleep(60) # 1 minute
+ except Exception as e:
+ logger.error(f"Error in periodic tasks: {str(e)}")
+ await asyncio.sleep(60) # Wait before retrying
diff --git a/logs/admin_dashboard.log b/logs/admin_dashboard.log
new file mode 100644
index 0000000000000000000000000000000000000000..18808edc1e94da84c201820f3b6bf02bd8463e38
--- /dev/null
+++ b/logs/admin_dashboard.log
@@ -0,0 +1,204 @@
+2025-04-19 01:32:12,312 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 01:37:09,024 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 01:37:53,673 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:15:15,868 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:15:17,913 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:16:16,456 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:16:18,515 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:17:26,030 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:17:28,065 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:17:38,545 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'DatabaseSession' object has no attribute 'db'
+2025-04-19 02:18:39,604 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'DatabaseSession' object has no attribute 'db'
+2025-04-19 02:19:40,700 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'DatabaseSession' object has no attribute 'db'
+2025-04-19 02:20:41,953 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'DatabaseSession' object has no attribute 'db'
+2025-04-19 02:21:33,994 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:21:36,083 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:22:16,980 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:22:19,075 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:22:31,154 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'DatabaseSession' object has no attribute 'db'
+2025-04-19 02:23:07,704 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:23:09,759 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:23:17,547 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:23:17,551 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:23:41,570 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:23:43,636 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:23:51,036 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:23:51,039 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:24:51,034 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:24:51,037 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:25:51,032 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:25:51,036 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:26:51,035 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:26:51,040 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:27:51,033 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:27:51,036 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:28:51,034 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:28:51,041 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:29:51,042 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:29:51,049 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 02:33:32,962 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:33:35,069 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:33:43,539 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:33:43,569 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:34:43,694 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:34:43,700 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:35:43,712 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:35:43,715 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:36:59,634 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:37:01,677 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:37:09,060 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:37:09,063 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:37:34,289 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:37:36,328 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:37:43,630 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:37:43,632 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:38:14,607 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:38:16,665 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:38:24,482 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:38:24,489 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:39:26,255 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:39:28,314 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:39:35,733 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:39:35,736 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:40:10,037 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:40:12,094 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:40:19,162 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:40:19,164 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:40:54,819 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:40:56,865 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:41:03,667 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:41:03,669 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:42:03,661 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:42:03,664 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:43:03,683 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:43:03,687 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:44:03,695 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:44:03,699 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:44:21,191 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:44:23,221 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:44:30,424 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:44:30,428 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:45:30,451 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:45:30,454 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:46:30,465 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:46:30,468 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:47:30,469 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:47:30,472 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:48:30,500 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:48:30,503 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:49:30,533 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:49:30,536 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:50:12,411 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:50:14,466 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 02:50:22,188 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:50:22,194 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:51:22,211 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:51:22,213 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:52:22,235 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:52:22,239 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:53:22,256 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:53:22,262 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:54:22,276 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:54:22,279 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:55:22,287 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:55:22,290 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:56:22,302 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:56:22,305 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:57:22,308 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:57:22,310 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:58:22,320 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:58:22,323 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:59:22,336 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 02:59:22,339 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 03:00:22,357 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 03:00:22,515 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 03:00:22,764 - admin_dashboard - ERROR - Error in daily maintenance: 'async_sessionmaker' object has no attribute 'db'
+2025-04-19 03:01:22,896 - admin_dashboard - ERROR - Error in event reminder check: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 03:01:22,930 - admin_dashboard - ERROR - Error in cleanup_old_notifications: 'async_sessionmaker' object has no attribute 'session'
+2025-04-19 03:02:12,508 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:02:14,571 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:02:52,842 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:02:55,123 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:03:07,454 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:03:07,457 - admin_dashboard - ERROR - Error in daily maintenance: 'Database' object has no attribute 'db'
+2025-04-19 03:04:09,626 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:05:11,787 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:06:13,940 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:06:53,983 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:06:56,064 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:07:07,052 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:07:07,056 - admin_dashboard - ERROR - Error in daily maintenance: 'Database' object has no attribute 'db'
+2025-04-19 03:08:09,584 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:09:43,769 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:09:45,874 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:10:31,311 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:10:33,371 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:10:44,271 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:10:44,274 - admin_dashboard - ERROR - Error in daily maintenance: 'Database' object has no attribute 'db'
+2025-04-19 03:12:23,573 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:12:26,432 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:13:33,756 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:13:37,236 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:13:50,540 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:13:51,786 - admin_dashboard - INFO - Cleaned up 0 expired sessions
+2025-04-19 03:13:52,328 - admin_dashboard - ERROR - Error archiving old data: Unconsumed column names: archived
+2025-04-19 03:13:53,601 - admin_dashboard - ERROR - Health check error: argument 1 (impossible)
+2025-04-19 03:13:53,604 - admin_dashboard - ERROR - System health check error: argument 1 (impossible)
+2025-04-19 03:13:54,738 - admin_dashboard - ERROR - Resource monitoring error: 'Settings' object has no attribute 'MAX_DB_CONNECTIONS'
+2025-04-19 03:13:54,742 - admin_dashboard - ERROR - Resource monitoring error: 'Settings' object has no attribute 'MAX_DB_CONNECTIONS'
+2025-04-19 03:14:57,209 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:15:34,694 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:15:37,360 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:15:50,508 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:15:51,795 - admin_dashboard - INFO - Cleaned up 0 expired sessions
+2025-04-19 03:15:52,376 - admin_dashboard - ERROR - Error archiving old data: Unconsumed column names: archived
+2025-04-19 03:15:53,702 - admin_dashboard - ERROR - Health check error: argument 1 (impossible)
+2025-04-19 03:15:53,708 - admin_dashboard - ERROR - System health check error: argument 1 (impossible)
+2025-04-19 03:16:01,607 - admin_dashboard - ERROR - Resource monitoring error: 'Settings' object has no attribute 'MAX_DB_CONNECTIONS'
+2025-04-19 03:16:01,612 - admin_dashboard - ERROR - Resource monitoring error: 'Settings' object has no attribute 'MAX_DB_CONNECTIONS'
+2025-04-19 03:17:03,801 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:18:07,466 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:19:09,683 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:20:11,902 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:21:14,761 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:22:16,870 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:23:18,969 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:24:21,193 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:25:23,336 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:26:25,771 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:27:27,862 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:28:29,967 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:29:32,062 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:30:34,158 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:31:36,585 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:32:38,674 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:33:40,823 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:34:42,931 - admin_dashboard - INFO - Cleaned up 0 old notifications
+2025-04-19 03:46:01,817 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:46:04,024 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:48:13,305 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:48:15,467 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:49:39,627 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:49:41,857 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:49:49,446 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:49:51,626 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:51:07,134 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:51:09,333 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:53:08,741 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:53:10,983 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:53:57,413 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:53:59,638 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:54:32,999 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:54:35,355 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:55:49,903 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:55:52,166 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:58:23,923 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:58:26,256 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:58:42,115 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 03:58:44,334 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 04:00:09,589 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 04:00:12,082 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 04:01:03,575 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 04:01:05,799 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
+2025-04-19 04:01:44,779 - admin_dashboard - WARNING - Redis connection failed for rate limiter, using in-memory fallback: Timeout connecting to server
+2025-04-19 04:01:47,120 - admin_dashboard - WARNING - Redis connection failed, using in-memory fallback: Timeout connecting to server
diff --git a/package.json b/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..ce13b46a6abd70e34bad18f01444f0c8114ebcac
--- /dev/null
+++ b/package.json
@@ -0,0 +1,12 @@
+{
+ "name": "admin_bknd2",
+ "version": "1.0.0",
+ "main": "index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\"; exit 1"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "description": ""
+}
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000000000000000000000000000000000000..b6ddf91f797b48dfa3bb6b3a64d0a5f0025ac74e
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,7 @@
+[pytest]
+asyncio_mode = auto
+testpaths = tests
+python_files = test_*.py
+python_classes = Test
+python_functions = test_*
+addopts = -v --tb=short --cov=app --cov-report=term-missing
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2d69aa3563d73c5dfd09040c26093545e01cfa21
Binary files /dev/null and b/requirements.txt differ
diff --git a/run_prod_tests.py b/run_prod_tests.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d09ceab772cf1f24f61eddc1a76fb0183785c84
--- /dev/null
+++ b/run_prod_tests.py
@@ -0,0 +1,47 @@
+import os
+import sys
+import subprocess
+import time
+
+def run_tests():
+ # Set up environment variables
+ os.environ["TEST_ENV"] = "prod"
+ os.environ["PYTHONPATH"] = os.path.dirname(os.path.abspath(__file__))
+ os.environ["PROD_API_URL"] = "https://fred808-admin-desk.hf.space"
+
+ # Add retry mechanism for API availability
+ max_retries = 3
+ retry_delay = 5 # seconds
+
+ for attempt in range(max_retries):
+ # Run pytest with verbose flag and proper output capture
+ cmd = [
+ "python", "-m", "pytest",
+ "-v",
+ "--capture=no",
+ "--asyncio-mode=auto", # Better async handling
+ "tests/api/test_auth.py"
+ ]
+
+ process = subprocess.run(cmd, text=True, capture_output=True)
+
+ # Print output
+ print(f"\nAttempt {attempt + 1} of {max_retries}")
+ print("\nTest Output:")
+ print(process.stdout)
+
+ if process.stderr:
+ print("\nErrors:")
+ print(process.stderr)
+
+ # Check if tests failed due to service unavailable
+ if "503 Service Unavailable" not in process.stdout:
+ return process.returncode
+
+ print(f"\nService unavailable, retrying in {retry_delay} seconds...")
+ time.sleep(retry_delay)
+
+ return 1 # Return error if all retries failed
+
+if __name__ == "__main__":
+ sys.exit(run_tests())
\ No newline at end of file