Fred808 commited on
Commit
8dafdf7
·
verified ·
1 Parent(s): 902a7f2

Upload 32 files

Browse files
app/core/__init__.py ADDED
File without changes
app/core/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (173 Bytes). View file
 
app/core/__pycache__/config.cpython-312.pyc CHANGED
Binary files a/app/core/__pycache__/config.cpython-312.pyc and b/app/core/__pycache__/config.cpython-312.pyc differ
 
app/core/__pycache__/dependencies.cpython-312.pyc ADDED
Binary file (2.6 kB). View file
 
app/core/__pycache__/security.cpython-312.pyc ADDED
Binary file (1.72 kB). View file
 
app/core/auth.py CHANGED
@@ -1,25 +1,17 @@
1
- from datetime import datetime
2
- from fastapi import HTTPException, Security, status
3
- from fastapi.security import APIKeyHeader
4
- from ..core.config import settings
5
-
6
- # API Key header scheme for service token
7
- SERVICE_TOKEN_HEADER = APIKeyHeader(name="X-Service-Token", auto_error=False)
8
-
9
- async def verify_service_token(token: str = Security(SERVICE_TOKEN_HEADER)) -> bool:
10
- """Verify the service token for internal service-to-service communication."""
11
- if not token:
12
- raise HTTPException(
13
- status_code=status.HTTP_401_UNAUTHORIZED,
14
- detail="Service token is required",
15
- headers={"WWW-Authenticate": "Bearer"},
16
- )
17
-
18
- if token != settings.SERVICE_TOKEN:
19
- raise HTTPException(
20
- status_code=status.HTTP_401_UNAUTHORIZED,
21
- detail="Invalid service token",
22
- headers={"WWW-Authenticate": "Bearer"},
23
- )
24
-
25
- return True
 
1
+ from fastapi import Depends, HTTPException, status, Header
2
+ from typing import Optional
3
+ from .config import settings
4
+
5
+ async def verify_service_token(
6
+ x_service_token: Optional[str] = Header(None, alias="X-Service-Token")
7
+ ) -> None:
8
+ if not x_service_token:
9
+ raise HTTPException(
10
+ status_code=status.HTTP_401_UNAUTHORIZED,
11
+ detail="X-Service-Token header is missing",
12
+ )
13
+ if x_service_token != settings.SERVICE_TOKEN:
14
+ raise HTTPException(
15
+ status_code=status.HTTP_401_UNAUTHORIZED,
16
+ detail="Invalid service token",
17
+ )
 
 
 
 
 
 
 
 
app/core/config.py CHANGED
@@ -1,17 +1,40 @@
1
- from pydantic_settings import BaseSettings
2
-
3
- class Settings(BaseSettings):
4
- PROJECT_NAME: str = "POS Backend"
5
- API_V1_STR: str = "/api/v1"
6
- SECRET_KEY: str = "your-secret-key-here"
7
- ALGORITHM: str = "HS256"
8
- ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
9
- REFRESH_TOKEN_EXPIRE_DAYS: int = 7
10
- DATABASE_URL: str = "postgresql+asyncpg://postgres.juycnkjuzylnbruwaqmp:Lovyelias5584.@aws-0-eu-central-1.pooler.supabase.com:5432/postgres"
11
- SERVICE_TOKEN: str = "6f9aee88d5b94b0190c317abcbf4e6e7834cc2c7f02e45693e123456789abcde" # Should be set in .env file
12
-
13
-
14
- class Config:
15
- env_file = ".env"
16
-
17
- settings = Settings()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic_settings import BaseSettings
2
+ from typing import ClassVar
3
+
4
+
5
+ class Settings(BaseSettings):
6
+ API_V1_STR: str = "/api/v1"
7
+ PROJECT_NAME: str = "Admin Dashboard"
8
+ VERSION: str = "1.0.0"
9
+
10
+ # Service Authentication
11
+ SERVICE_TOKEN: str = "6f9aee88d5b94b0190c317abcbf4e6e7834cc2c7f02e45693e123456789abcde"
12
+ POS_API_URL: str = "https://fred808-desk-back2.hf.space" # Default value, should be overridden in environment
13
+
14
+ # PostgreSQL Database settings
15
+ DATABASE_URL: ClassVar[str] = "postgresql+asyncpg://postgres.juycnkjuzylnbruwaqmp:Lovyelias5584.@aws-0-eu-central-1.pooler.supabase.com:5432/postgres"
16
+
17
+ # JWT Settings
18
+ SECRET_KEY: str = "your-secret-key-here"
19
+ ALGORITHM: str = "HS256"
20
+ ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
21
+
22
+ # Redis settings
23
+ REDIS_HOST: str = "localhost"
24
+ REDIS_PORT: int = 6379
25
+
26
+ # Email settings
27
+ MAIL_USERNAME: str = "yungdml31@gmail.com"
28
+ MAIL_PASSWORD: str = ""
29
+ MAIL_FROM: str = "admin@angelo.com"
30
+ MAIL_PORT: int = 587
31
+ MAIL_SERVER: str = "smtp.gmail.com"
32
+
33
+ # Frontend URL
34
+ FRONTEND_URL: str = "http://localhost:3000"
35
+
36
+ class Config:
37
+ case_sensitive = True
38
+
39
+
40
+ settings = Settings()
app/core/dependencies.py CHANGED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import Depends, HTTPException, status
2
+ from fastapi.security import OAuth2PasswordBearer
3
+ from sqlalchemy.ext.asyncio import AsyncSession
4
+ from sqlalchemy import select
5
+ from sqlalchemy.orm import selectinload
6
+ from jose import JWTError, jwt
7
+ from ..db.database import get_db
8
+ from ..db.models import User
9
+ from ..core.config import settings
10
+
11
+ oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/auth/login")
12
+
13
+ async def get_current_user(
14
+ token: str = Depends(oauth2_scheme),
15
+ db: AsyncSession = Depends(get_db)
16
+ ):
17
+ credentials_exception = HTTPException(
18
+ status_code=status.HTTP_401_UNAUTHORIZED,
19
+ detail="Could not validate credentials",
20
+ headers={"WWW-Authenticate": "Bearer"},
21
+ )
22
+
23
+ try:
24
+ payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
25
+ user_id: str = payload.get("sub")
26
+ if user_id is None:
27
+ raise credentials_exception
28
+ except JWTError:
29
+ raise credentials_exception
30
+
31
+ stmt = select(User).options(selectinload(User.roles)).where(User.id == int(user_id))
32
+ result = await db.execute(stmt)
33
+ user = result.scalar_one_or_none()
34
+
35
+ if user is None:
36
+ raise credentials_exception
37
+ return user
38
+
39
+ async def get_current_active_user(
40
+ current_user: User = Depends(get_current_user)
41
+ ):
42
+ if not current_user.is_active:
43
+ raise HTTPException(status_code=400, detail="Inactive user")
44
+ return current_user
45
+
46
+ async def get_current_superuser(
47
+ current_user: User = Depends(get_current_user)
48
+ ):
49
+ if not current_user.is_superuser:
50
+ raise HTTPException(
51
+ status_code=403, detail="The user doesn't have enough privileges"
52
+ )
53
+ return current_user
app/core/security.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from typing import Any, Optional
3
+ from jose import jwt
4
+ from passlib.context import CryptContext
5
+ from .config import settings
6
+
7
+ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
8
+
9
+ def create_access_token(subject: Any, expires_delta: Optional[timedelta] = None) -> str:
10
+ if expires_delta:
11
+ expire = datetime.utcnow() + expires_delta
12
+ else:
13
+ expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
14
+
15
+ to_encode = {"exp": expire, "sub": str(subject)}
16
+ encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
17
+ return encoded_jwt
18
+
19
+ def verify_password(plain_password: str, hashed_password: str) -> bool:
20
+ return pwd_context.verify(plain_password, hashed_password)
21
+
22
+ def get_password_hash(password: str) -> str:
23
+ return pwd_context.hash(password)
app/db/__init__.py ADDED
File without changes
app/db/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (171 Bytes). View file
 
app/db/__pycache__/database.cpython-312.pyc CHANGED
Binary files a/app/db/__pycache__/database.cpython-312.pyc and b/app/db/__pycache__/database.cpython-312.pyc differ
 
app/db/__pycache__/models.cpython-312.pyc ADDED
Binary file (10.8 kB). View file
 
app/db/__pycache__/schemas.cpython-312.pyc ADDED
Binary file (16.2 kB). View file
 
app/db/database.py CHANGED
@@ -1,83 +1,55 @@
1
  from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
2
- from sqlalchemy.orm import DeclarativeBase
3
- from sqlalchemy import text
4
  from ..core.config import settings
5
- import logging
6
- import ssl
7
- import platform
8
- import asyncio
9
 
10
- logger = logging.getLogger(__name__)
11
-
12
- def get_ssl_context():
13
- """Create an SSL context for database connection."""
14
- ssl_context = ssl.create_default_context()
15
- ssl_context.check_hostname = False
16
- ssl_context.verify_mode = ssl.CERT_NONE
17
- return ssl_context
18
-
19
- # Configure Windows-specific event loop policy
20
- if platform.system() == 'Windows':
21
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
22
-
23
- # Create async engine with connection pooling
24
- engine = create_async_engine(
25
  settings.DATABASE_URL,
26
  echo=True,
27
- pool_pre_ping=True,
28
- pool_size=20,
29
- max_overflow=10,
30
- pool_timeout=30,
31
- connect_args={
32
- "ssl": get_ssl_context(),
33
- "server_settings": {
34
- "application_name": "pos_backend",
35
- "statement_timeout": "60000", # 60 seconds
36
- }
37
- }
38
  )
39
 
40
- # Create async session maker with pooling configuration
41
- async_session_maker = async_sessionmaker(
42
- engine,
43
  class_=AsyncSession,
44
- expire_on_commit=False,
45
- autocommit=False,
46
- autoflush=False
47
  )
48
 
49
- class Base(DeclarativeBase):
50
- pass
51
-
52
- async def verify_connection():
53
- try:
54
- async with engine.connect() as conn:
55
- await conn.execute(text("SELECT 1"))
56
- logger.info("Database connection verified successfully")
57
- return True
58
- except Exception as e:
59
- logger.error(f"Database connection failed: {str(e)}")
60
- raise
61
 
 
62
  async def get_db():
63
- async with async_session_maker() as session:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  try:
65
- await verify_connection()
66
  yield session
67
  await session.commit()
68
- except Exception as e:
69
- logger.error(f"Database session error: {str(e)}")
70
  await session.rollback()
71
  raise
72
  finally:
73
  await session.close()
74
 
75
- async def init_db():
76
- try:
77
- await verify_connection()
78
- async with engine.begin() as conn:
79
- await conn.run_sync(Base.metadata.create_all)
80
- logger.info("Database initialized successfully")
81
- except Exception as e:
82
- logger.error(f"Database initialization failed: {str(e)}")
83
- raise
 
1
  from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
2
+ from sqlalchemy.orm import declarative_base
 
3
  from ..core.config import settings
4
+ import contextlib
 
 
 
5
 
6
+ # Create async engine for FastAPI
7
+ async_engine = create_async_engine(
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  settings.DATABASE_URL,
9
  echo=True,
10
+ future=True,
11
+ pool_pre_ping=True
 
 
 
 
 
 
 
 
 
12
  )
13
 
14
+ # Create async session factory
15
+ AsyncSessionLocal = async_sessionmaker(
16
+ bind=async_engine,
17
  class_=AsyncSession,
18
+ expire_on_commit=False
 
 
19
  )
20
 
21
+ # Create declarative base for models
22
+ Base = declarative_base()
 
 
 
 
 
 
 
 
 
 
23
 
24
+ # Database dependency for FastAPI routes
25
  async def get_db():
26
+ async with AsyncSessionLocal() as session:
27
+ try:
28
+ yield session
29
+ finally:
30
+ await session.close()
31
+
32
+ # Database access for background tasks and services
33
+ class Database:
34
+ def __init__(self):
35
+ self._session_factory = AsyncSessionLocal
36
+
37
+ @contextlib.asynccontextmanager
38
+ async def session(self):
39
+ """Get a database session with automatic commit/rollback"""
40
+ session = self._session_factory()
41
  try:
 
42
  yield session
43
  await session.commit()
44
+ except:
 
45
  await session.rollback()
46
  raise
47
  finally:
48
  await session.close()
49
 
50
+ async def get_session(self):
51
+ """Get a session for manual management"""
52
+ return self._session_factory()
53
+
54
+ # Create singleton instance for database access
55
+ db = Database()
 
 
 
app/db/init_db.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import create_engine
2
+ from sqlalchemy.orm import sessionmaker
3
+ from ..core.config import settings
4
+ from ..core.security import get_password_hash
5
+ from datetime import datetime
6
+ from .models import Base, User, Product
7
+ import asyncio
8
+
9
+ def init_db():
10
+ # Create synchronous engine for initialization
11
+ engine = create_engine(
12
+ settings.DATABASE_URL.replace("+asyncpg", ""),
13
+ echo=True
14
+ )
15
+
16
+ # Create all tables
17
+ Base.metadata.create_all(bind=engine)
18
+
19
+ # Create session
20
+ SessionLocal = sessionmaker(bind=engine)
21
+ session = SessionLocal()
22
+
23
+ try:
24
+ # Create default admin user if not exists
25
+ admin_user = session.query(User).filter_by(email="admin@example.com").first()
26
+ if not admin_user:
27
+ admin_user = User(
28
+ email="admin@example.com",
29
+ username="admin",
30
+ full_name="System Administrator",
31
+ hashed_password=get_password_hash("admin123"), # Change in production
32
+ is_active=True,
33
+ is_superuser=True,
34
+ roles=["admin"],
35
+ created_at=datetime.utcnow()
36
+ )
37
+ session.add(admin_user)
38
+ print("Created default admin user.")
39
+
40
+ # Create default product categories as products
41
+ categories = [
42
+ "Soups & Stews",
43
+ "Rice Dishes",
44
+ "Swallow & Fufu",
45
+ "Snacks & Small Chops",
46
+ "Protein & Meat",
47
+ "Drinks"
48
+ ]
49
+
50
+ for category in categories:
51
+ exists = session.query(Product).filter_by(name=category).first()
52
+ if not exists:
53
+ product = Product(
54
+ name=category,
55
+ description=f"Category: {category}",
56
+ price=0.0, # Category products have zero price
57
+ category=category,
58
+ inventory_count=0, # Categories don't have inventory
59
+ seller_id=admin_user.id if admin_user else 1, # Link to admin user
60
+ created_at=datetime.utcnow()
61
+ )
62
+ session.add(product)
63
+
64
+ print("Initialized product categories.")
65
+
66
+ # Commit changes
67
+ session.commit()
68
+
69
+ except Exception as e:
70
+ print(f"Error during initialization: {e}")
71
+ session.rollback()
72
+ raise
73
+ finally:
74
+ session.close()
75
+
76
+ if __name__ == "__main__":
77
+ init_db()
app/db/models.py CHANGED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import Column, Integer, String, Boolean, DateTime, Float, ForeignKey, ARRAY, JSON, Table
2
+ from sqlalchemy.orm import relationship, mapped_column, Mapped
3
+ from sqlalchemy.dialects.postgresql import JSONB
4
+ from datetime import datetime
5
+ from typing import List, Optional
6
+ from .database import Base
7
+
8
+ # Association tables for many-to-many relationships
9
+ user_roles = Table(
10
+ 'user_roles',
11
+ Base.metadata,
12
+ Column('user_id', Integer, ForeignKey('users.id')),
13
+ Column('role_id', Integer, ForeignKey('roles.id'))
14
+ )
15
+
16
+ # Role model
17
+ class Role(Base):
18
+ __tablename__ = "roles"
19
+
20
+ id: Mapped[int] = mapped_column(primary_key=True)
21
+ name: Mapped[str] = mapped_column(String, unique=True, index=True)
22
+ description: Mapped[str] = mapped_column(String)
23
+ permissions: Mapped[List[str]] = mapped_column(ARRAY(String), default=list)
24
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
25
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
26
+
27
+ # Relationships
28
+ users = relationship("User", secondary=user_roles, back_populates="roles")
29
+
30
+ # Branch model
31
+ class Branch(Base):
32
+ __tablename__ = "branches"
33
+
34
+ id: Mapped[int] = mapped_column(primary_key=True)
35
+ name: Mapped[str] = mapped_column(String, unique=True, index=True)
36
+ address: Mapped[str] = mapped_column(String)
37
+ phone: Mapped[str] = mapped_column(String)
38
+ email: Mapped[str] = mapped_column(String)
39
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
40
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
41
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
42
+
43
+ # Relationships
44
+ users = relationship("User", back_populates="branch")
45
+ products = relationship("Product", back_populates="branch")
46
+ orders = relationship("Order", back_populates="branch")
47
+
48
+ class User(Base):
49
+ __tablename__ = "users"
50
+
51
+ id: Mapped[int] = mapped_column(primary_key=True)
52
+ email: Mapped[str] = mapped_column(String, unique=True, index=True)
53
+ username: Mapped[str] = mapped_column(String, unique=True, index=True)
54
+ full_name: Mapped[str] = mapped_column(String)
55
+ hashed_password: Mapped[str] = mapped_column(String)
56
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
57
+ is_superuser: Mapped[bool] = mapped_column(Boolean, default=False)
58
+ branch_id: Mapped[Optional[int]] = mapped_column(ForeignKey("branches.id"))
59
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
60
+
61
+ # Update roles relationship to use many-to-many
62
+ roles = relationship("Role", secondary=user_roles, back_populates="users")
63
+
64
+ # Other relationships
65
+ branch = relationship("Branch", back_populates="users")
66
+ products = relationship("Product", back_populates="seller")
67
+ orders = relationship("Order", back_populates="customer")
68
+ notifications = relationship("Notification", back_populates="user")
69
+ sessions = relationship("Session", back_populates="user", cascade="all, delete-orphan")
70
+
71
+ class Product(Base):
72
+ __tablename__ = "products"
73
+
74
+ id: Mapped[int] = mapped_column(primary_key=True)
75
+ name: Mapped[str] = mapped_column(String, index=True)
76
+ description: Mapped[str] = mapped_column(String)
77
+ price: Mapped[float] = mapped_column(Float)
78
+ category: Mapped[str] = mapped_column(String, index=True)
79
+ inventory_count: Mapped[int] = mapped_column(Integer)
80
+ seller_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
81
+ branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id"))
82
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
83
+ updated_at: Mapped[datetime] = mapped_column(
84
+ DateTime,
85
+ default=datetime.utcnow,
86
+ onupdate=datetime.utcnow
87
+ )
88
+
89
+ # Relationships
90
+ seller = relationship("User", back_populates="products")
91
+ branch = relationship("Branch", back_populates="products")
92
+ order_items = relationship("OrderItem", back_populates="product")
93
+
94
+ class Order(Base):
95
+ __tablename__ = "orders"
96
+
97
+ id: Mapped[int] = mapped_column(primary_key=True)
98
+ customer_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
99
+ branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id"))
100
+ total_amount: Mapped[float] = mapped_column(Float)
101
+ status: Mapped[str] = mapped_column(String)
102
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
103
+ updated_at: Mapped[datetime] = mapped_column(
104
+ DateTime,
105
+ default=datetime.utcnow,
106
+ onupdate=datetime.utcnow
107
+ )
108
+
109
+ # Relationships
110
+ customer = relationship("User", back_populates="orders")
111
+ branch = relationship("Branch", back_populates="orders")
112
+ items = relationship("OrderItem", back_populates="order", cascade="all, delete-orphan")
113
+
114
+ class OrderItem(Base):
115
+ __tablename__ = "order_items"
116
+
117
+ id: Mapped[int] = mapped_column(primary_key=True)
118
+ order_id: Mapped[int] = mapped_column(ForeignKey("orders.id"))
119
+ product_id: Mapped[int] = mapped_column(ForeignKey("products.id"))
120
+ quantity: Mapped[int] = mapped_column(Integer)
121
+ price: Mapped[float] = mapped_column(Float)
122
+
123
+ # Relationships
124
+ order = relationship("Order", back_populates="items")
125
+ product = relationship("Product", back_populates="order_items")
126
+
127
+ class Notification(Base):
128
+ __tablename__ = "notifications"
129
+
130
+ id: Mapped[int] = mapped_column(primary_key=True)
131
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
132
+ title: Mapped[str] = mapped_column(String)
133
+ message: Mapped[str] = mapped_column(String)
134
+ type: Mapped[str] = mapped_column(String)
135
+ data: Mapped[Optional[dict]] = mapped_column(JSONB)
136
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
137
+ read: Mapped[bool] = mapped_column(Boolean, default=False)
138
+
139
+ # Relationship
140
+ user = relationship("User", back_populates="notifications")
141
+
142
+ class Event(Base):
143
+ __tablename__ = "events"
144
+
145
+ id: Mapped[int] = mapped_column(primary_key=True)
146
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
147
+ title: Mapped[str] = mapped_column(String)
148
+ description: Mapped[str] = mapped_column(String)
149
+ start_time: Mapped[datetime] = mapped_column(DateTime)
150
+ end_time: Mapped[datetime] = mapped_column(DateTime)
151
+ attendees: Mapped[List[str]] = mapped_column(ARRAY(String), default=list)
152
+ is_all_day: Mapped[bool] = mapped_column(Boolean, default=False)
153
+ reminder_minutes: Mapped[int] = mapped_column(Integer)
154
+ status: Mapped[str] = mapped_column(String)
155
+ attendee_responses: Mapped[dict] = mapped_column(JSONB, default=dict)
156
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
157
+ updated_at: Mapped[datetime] = mapped_column(
158
+ DateTime,
159
+ default=datetime.utcnow,
160
+ onupdate=datetime.utcnow
161
+ )
162
+ # Fields for recurring events
163
+ is_recurring: Mapped[bool] = mapped_column(Boolean, default=False)
164
+ recurrence_pattern: Mapped[Optional[str]] = mapped_column(String)
165
+ recurrence_group: Mapped[Optional[str]] = mapped_column(String)
166
+ recurrence_end_date: Mapped[Optional[datetime]] = mapped_column(DateTime)
167
+ parent_event_id: Mapped[Optional[int]] = mapped_column(Integer)
168
+ sequence_number: Mapped[Optional[int]] = mapped_column(Integer)
169
+ reminder_sent: Mapped[bool] = mapped_column(Boolean, default=False)
170
+
171
+ # Relationship
172
+ user = relationship("User")
173
+
174
+ class Session(Base):
175
+ __tablename__ = "sessions"
176
+
177
+ id: Mapped[int] = mapped_column(primary_key=True)
178
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
179
+ session_token: Mapped[str] = mapped_column(String, unique=True, index=True)
180
+ last_activity: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
181
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
182
+ expires_at: Mapped[datetime] = mapped_column(DateTime)
183
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
184
+
185
+ # Relationship
186
+ user = relationship("User", back_populates="sessions")
app/db/schemas.py ADDED
@@ -0,0 +1,318 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy.orm import validates
2
+ from sqlalchemy import event
3
+ from datetime import datetime
4
+ from typing import List, Optional, Dict
5
+ from pydantic import BaseModel, EmailStr, validator
6
+ from .models import User, Product, Order, Event, Notification, Session
7
+ import re
8
+
9
+ # Role schemas
10
+ class RoleBase(BaseModel):
11
+ name: str
12
+ description: str
13
+ permissions: List[str] = []
14
+
15
+ class RoleCreate(RoleBase):
16
+ pass
17
+
18
+ class RoleUpdate(RoleBase):
19
+ name: Optional[str] = None
20
+ description: Optional[str] = None
21
+ permissions: Optional[List[str]] = None
22
+
23
+ class RoleInDB(RoleBase):
24
+ id: int
25
+ created_at: datetime
26
+ updated_at: Optional[datetime] = None
27
+
28
+ class Config:
29
+ from_attributes = True
30
+
31
+ class BranchBase(BaseModel):
32
+ name: str
33
+ address: str
34
+ phone: str
35
+ email: EmailStr
36
+ is_active: bool = True
37
+
38
+ class BranchCreate(BranchBase):
39
+ pass
40
+
41
+ class BranchInDB(BranchBase):
42
+ id: int
43
+ created_at: datetime
44
+ updated_at: Optional[datetime] = None
45
+
46
+ class Config:
47
+ from_attributes = True
48
+
49
+ # Update User schemas
50
+ class UserBase(BaseModel):
51
+ email: EmailStr
52
+ username: str
53
+ full_name: str
54
+ is_active: bool = True
55
+ is_superuser: bool = False
56
+ branch_id: Optional[int] = None
57
+
58
+ class UserCreate(UserBase):
59
+ password: str
60
+ role_ids: Optional[List[int]] = None # IDs of roles to assign
61
+
62
+ class UserUpdate(BaseModel):
63
+ email: Optional[EmailStr] = None
64
+ username: Optional[str] = None
65
+ full_name: Optional[str] = None
66
+ is_active: Optional[bool] = None
67
+ is_superuser: Optional[bool] = None
68
+ password: Optional[str] = None
69
+ branch_id: Optional[int] = None
70
+ role_ids: Optional[List[int]] = None
71
+
72
+ class UserInDB(UserBase):
73
+ id: int
74
+ created_at: datetime
75
+ roles: List[RoleInDB]
76
+
77
+ class Config:
78
+ from_attributes = True
79
+
80
+ class ProductBase(BaseModel):
81
+ name: str
82
+ description: str
83
+ price: float
84
+ category: str
85
+ inventory_count: int
86
+ seller_id: int
87
+ branch_id: int
88
+
89
+ class ProductCreate(ProductBase):
90
+ pass
91
+
92
+ class ProductInDB(ProductBase):
93
+ id: int
94
+ created_at: datetime
95
+ updated_at: Optional[datetime] = None
96
+
97
+ class Config:
98
+ from_attributes = True
99
+
100
+ class OrderItemBase(BaseModel):
101
+ product_id: int
102
+ quantity: int
103
+ price: float
104
+
105
+ class OrderItemCreate(OrderItemBase):
106
+ pass
107
+
108
+ class OrderItemInDB(OrderItemBase):
109
+ id: int
110
+ order_id: int
111
+
112
+ class Config:
113
+ from_attributes = True
114
+
115
+ class OrderBase(BaseModel):
116
+ customer_id: int
117
+ branch_id: int
118
+ total_amount: float
119
+ status: str = "pending"
120
+ items: List[OrderItemCreate]
121
+
122
+ class OrderCreate(OrderBase):
123
+ pass
124
+
125
+ class OrderInDB(OrderBase):
126
+ id: int
127
+ created_at: datetime
128
+ updated_at: Optional[datetime] = None
129
+ items: List[OrderItemInDB]
130
+
131
+ class Config:
132
+ from_attributes = True
133
+
134
+ class NotificationBase(BaseModel):
135
+ user_id: int
136
+ title: str
137
+ message: str
138
+ type: str
139
+ data: Optional[dict] = None
140
+ read: bool = False
141
+
142
+ class NotificationCreate(NotificationBase):
143
+ pass
144
+
145
+ class NotificationInDB(NotificationBase):
146
+ id: int
147
+ created_at: datetime
148
+
149
+ class Config:
150
+ from_attributes = True
151
+
152
+ class EventBase(BaseModel):
153
+ title: str
154
+ description: str
155
+ start_time: datetime
156
+ end_time: datetime
157
+ is_all_day: bool = False
158
+ reminder_minutes: int = 30
159
+
160
+ @validator('end_time')
161
+ def end_time_after_start_time(cls, v, values):
162
+ if 'start_time' in values and v <= values['start_time']:
163
+ raise ValueError('end_time must be after start_time')
164
+ return v
165
+
166
+ @validator('reminder_minutes')
167
+ def valid_reminder_minutes(cls, v):
168
+ if v < 0:
169
+ raise ValueError('reminder_minutes cannot be negative')
170
+ return v
171
+
172
+ class EventCreate(EventBase):
173
+ attendees: List[str] = []
174
+
175
+ class EventUpdate(BaseModel):
176
+ title: Optional[str] = None
177
+ description: Optional[str] = None
178
+ start_time: Optional[datetime] = None
179
+ end_time: Optional[datetime] = None
180
+ is_all_day: Optional[bool] = None
181
+ reminder_minutes: Optional[int] = None
182
+ attendees: Optional[List[str]] = None
183
+
184
+ @validator('reminder_minutes')
185
+ def valid_reminder_minutes(cls, v):
186
+ if v is not None and v < 0:
187
+ raise ValueError('reminder_minutes cannot be negative')
188
+ return v
189
+
190
+ class EventInDB(EventBase):
191
+ id: int
192
+ user_id: int
193
+ attendees: List[str]
194
+ status: str
195
+ attendee_responses: Dict[str, str]
196
+ created_at: datetime
197
+ updated_at: Optional[datetime] = None
198
+ reminder_sent: bool = False
199
+ is_recurring: bool = False
200
+ recurrence_group: Optional[str] = None
201
+ parent_event_id: Optional[int] = None
202
+ sequence_number: Optional[int] = None
203
+
204
+ class Config:
205
+ from_attributes = True
206
+
207
+ class RecurringEventCreate(EventCreate):
208
+ recurrence_pattern: str
209
+ recurrence_end_date: Optional[datetime] = None
210
+
211
+ @validator('recurrence_pattern')
212
+ def valid_recurrence_pattern(cls, v):
213
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
214
+ if v not in valid_patterns:
215
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
216
+ return v
217
+
218
+ @validator('recurrence_end_date')
219
+ def end_date_after_start_time(cls, v, values):
220
+ if v is not None and 'start_time' in values and v <= values['start_time']:
221
+ raise ValueError('recurrence_end_date must be after start_time')
222
+ return v
223
+
224
+ class RecurringEventUpdate(EventUpdate):
225
+ recurrence_pattern: Optional[str] = None
226
+ recurrence_end_date: Optional[datetime] = None
227
+
228
+ @validator('recurrence_pattern')
229
+ def valid_recurrence_pattern(cls, v):
230
+ if v is not None:
231
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
232
+ if v not in valid_patterns:
233
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
234
+ return v
235
+
236
+ class SessionBase(BaseModel):
237
+ user_id: int
238
+ session_token: str
239
+ expires_at: datetime
240
+ is_active: bool = True
241
+
242
+ class SessionCreate(SessionBase):
243
+ pass
244
+
245
+ class SessionUpdate(BaseModel):
246
+ last_activity: Optional[datetime] = None
247
+ expires_at: Optional[datetime] = None
248
+ is_active: Optional[bool] = None
249
+
250
+ class SessionInDB(SessionBase):
251
+ id: int
252
+ created_at: datetime
253
+ last_activity: datetime
254
+
255
+ class Config:
256
+ from_attributes = True
257
+
258
+ class LoginData(BaseModel):
259
+ email: str
260
+ password: str
261
+
262
+ @validates('email')
263
+ def validate_email(self, key, email):
264
+ if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email):
265
+ raise ValueError('Invalid email address')
266
+ return email
267
+
268
+ @validates('username')
269
+ def validate_username(self, key, username):
270
+ if len(username) < 3:
271
+ raise ValueError('Username must be at least 3 characters long')
272
+ return username
273
+
274
+ @validates('inventory_count')
275
+ def validate_inventory(self, key, count):
276
+ if count < 0:
277
+ raise ValueError('Inventory count cannot be negative')
278
+ return count
279
+
280
+ @validates('price')
281
+ def validate_price(self, key, price):
282
+ if price < 0:
283
+ raise ValueError('Price cannot be negative')
284
+ return price
285
+
286
+ # Event listeners for automatic timestamps
287
+ @event.listens_for(Product, 'before_insert')
288
+ def set_created_at(mapper, connection, target):
289
+ target.created_at = datetime.utcnow()
290
+ target.updated_at = datetime.utcnow()
291
+
292
+ @event.listens_for(Product, 'before_update')
293
+ def set_updated_at(mapper, connection, target):
294
+ target.updated_at = datetime.utcnow()
295
+
296
+ @event.listens_for(Order, 'before_insert')
297
+ def set_order_created_at(mapper, connection, target):
298
+ target.created_at = datetime.utcnow()
299
+ target.updated_at = datetime.utcnow()
300
+
301
+ @event.listens_for(Order, 'before_update')
302
+ def set_order_updated_at(mapper, connection, target):
303
+ target.updated_at = datetime.utcnow()
304
+
305
+ @event.listens_for(Event, 'before_insert')
306
+ def set_event_created_at(mapper, connection, target):
307
+ target.created_at = datetime.utcnow()
308
+ target.updated_at = datetime.utcnow()
309
+
310
+ @event.listens_for(Event, 'before_update')
311
+ def set_event_updated_at(mapper, connection, target):
312
+ target.updated_at = datetime.utcnow()
313
+
314
+ # Add validators to models
315
+ User.validate_email = validate_email
316
+ User.validate_username = validate_username
317
+ Product.validate_inventory = validate_inventory
318
+ Product.validate_price = validate_price
app/schemas/__pycache__/events.cpython-312.pyc ADDED
Binary file (4.93 kB). View file
 
app/schemas/events.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, validator
2
+ from typing import List, Optional, Dict, Any
3
+ from datetime import datetime
4
+
5
+ class EventBase(BaseModel):
6
+ title: str
7
+ description: str
8
+ start_time: datetime
9
+ end_time: datetime
10
+ is_all_day: bool = False
11
+ reminder_minutes: int = 30
12
+
13
+ @validator('end_time')
14
+ def end_time_after_start_time(cls, v, values):
15
+ if 'start_time' in values and v <= values['start_time']:
16
+ raise ValueError('end_time must be after start_time')
17
+ return v
18
+
19
+ @validator('reminder_minutes')
20
+ def valid_reminder_minutes(cls, v):
21
+ if v < 0:
22
+ raise ValueError('reminder_minutes cannot be negative')
23
+ return v
24
+
25
+ class EventCreate(EventBase):
26
+ attendees: List[str] = []
27
+
28
+ class EventUpdate(BaseModel):
29
+ title: Optional[str] = None
30
+ description: Optional[str] = None
31
+ start_time: Optional[datetime] = None
32
+ end_time: Optional[datetime] = None
33
+ is_all_day: Optional[bool] = None
34
+ reminder_minutes: Optional[int] = None
35
+ attendees: Optional[List[str]] = None
36
+
37
+ @validator('reminder_minutes')
38
+ def valid_reminder_minutes(cls, v):
39
+ if v is not None and v < 0:
40
+ raise ValueError('reminder_minutes cannot be negative')
41
+ return v
42
+
43
+ class EventInDB(EventBase):
44
+ id: str
45
+ user_id: str
46
+ attendees: List[str]
47
+ status: str
48
+ attendee_responses: Dict[str, str]
49
+ created_at: datetime
50
+ updated_at: Optional[datetime] = None
51
+ reminder_sent: bool = False
52
+ is_recurring: bool = False
53
+ recurrence_group: Optional[str] = None
54
+ parent_event_id: Optional[str] = None
55
+ sequence_number: Optional[int] = None
56
+
57
+ class Config:
58
+ orm_mode = True
59
+
60
+ class RecurringEventCreate(EventCreate):
61
+ recurrence_pattern: str
62
+ recurrence_end_date: Optional[datetime] = None
63
+
64
+ @validator('recurrence_pattern')
65
+ def valid_recurrence_pattern(cls, v):
66
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
67
+ if v not in valid_patterns:
68
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
69
+ return v
70
+
71
+ @validator('recurrence_end_date')
72
+ def end_date_after_start_time(cls, v, values):
73
+ if v is not None and 'start_time' in values and v <= values['start_time']:
74
+ raise ValueError('recurrence_end_date must be after start_time')
75
+ return v
76
+
77
+ class RecurringEventUpdate(EventUpdate):
78
+ recurrence_pattern: Optional[str] = None
79
+ recurrence_end_date: Optional[datetime] = None
80
+
81
+ @validator('recurrence_pattern')
82
+ def valid_recurrence_pattern(cls, v):
83
+ if v is not None:
84
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
85
+ if v not in valid_patterns:
86
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
87
+ return v
app/services/__pycache__/backup.cpython-312.pyc ADDED
Binary file (9.58 kB). View file
 
app/services/__pycache__/maintenance.cpython-312.pyc ADDED
Binary file (13.2 kB). View file
 
app/services/__pycache__/notifications.cpython-312.pyc ADDED
Binary file (4.28 kB). View file
 
app/services/__pycache__/websocket.cpython-312.pyc ADDED
Binary file (3.55 kB). View file
 
app/services/analytics.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from ..db.database import db
3
+ from ..utils.cache import cache
4
+ from typing import Dict, List, Any
5
+
6
+ class AnalyticsService:
7
+ @staticmethod
8
+ async def get_sales_analytics(start_date: datetime, end_date: datetime) -> Dict[str, Any]:
9
+ cache_key = f"sales_analytics:{start_date.date()}:{end_date.date()}"
10
+ cached_data = await cache.get_cache(cache_key)
11
+ if cached_data:
12
+ return cached_data
13
+
14
+ pipeline = [
15
+ {
16
+ "$match": {
17
+ "created_at": {
18
+ "$gte": start_date,
19
+ "$lte": end_date
20
+ },
21
+ "status": {"$in": ["completed", "delivered"]}
22
+ }
23
+ },
24
+ {
25
+ "$group": {
26
+ "_id": {"$dateToString": {"format": "%Y-%m-%d", "date": "$created_at"}},
27
+ "total_sales": {"$sum": "$total_amount"},
28
+ "order_count": {"$sum": 1}
29
+ }
30
+ },
31
+ {"$sort": {"_id": 1}}
32
+ ]
33
+
34
+ sales_data = await db.db["orders"].aggregate(pipeline).to_list(None)
35
+ result = {
36
+ "daily_sales": sales_data,
37
+ "total_revenue": sum(day["total_sales"] for day in sales_data),
38
+ "total_orders": sum(day["order_count"] for day in sales_data),
39
+ "average_order_value": sum(day["total_sales"] for day in sales_data) /
40
+ (sum(day["order_count"] for day in sales_data) or 1)
41
+ }
42
+
43
+ await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
44
+ return result
45
+
46
+ @staticmethod
47
+ async def get_product_analytics() -> Dict[str, Any]:
48
+ cache_key = "product_analytics"
49
+ cached_data = await cache.get_cache(cache_key)
50
+ if cached_data:
51
+ return cached_data
52
+
53
+ pipeline = [
54
+ {
55
+ "$unwind": "$products"
56
+ },
57
+ {
58
+ "$group": {
59
+ "_id": "$products.product_id",
60
+ "total_quantity": {"$sum": "$products.quantity"},
61
+ "total_revenue": {
62
+ "$sum": {
63
+ "$multiply": ["$products.price", "$products.quantity"]
64
+ }
65
+ }
66
+ }
67
+ },
68
+ {
69
+ "$sort": {"total_revenue": -1}
70
+ },
71
+ {
72
+ "$limit": 10
73
+ }
74
+ ]
75
+
76
+ top_products = await db.db["orders"].aggregate(pipeline).to_list(None)
77
+
78
+ # Get product details
79
+ for product in top_products:
80
+ product_detail = await db.db["products"].find_one({"_id": product["_id"]})
81
+ if product_detail:
82
+ product["name"] = product_detail["name"]
83
+ product["category"] = product_detail["category"]
84
+
85
+ result = {
86
+ "top_products": top_products,
87
+ "total_products": await db.db["products"].count_documents({}),
88
+ "low_stock_products": await db.db["products"].count_documents({"inventory_count": {"$lt": 10}})
89
+ }
90
+
91
+ await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
92
+ return result
93
+
94
+ @staticmethod
95
+ async def get_customer_analytics() -> Dict[str, Any]:
96
+ cache_key = "customer_analytics"
97
+ cached_data = await cache.get_cache(cache_key)
98
+ if cached_data:
99
+ return cached_data
100
+
101
+ pipeline = [
102
+ {
103
+ "$group": {
104
+ "_id": "$customer_id",
105
+ "total_orders": {"$sum": 1},
106
+ "total_spent": {"$sum": "$total_amount"},
107
+ "last_order": {"$max": "$created_at"}
108
+ }
109
+ },
110
+ {
111
+ "$sort": {"total_spent": -1}
112
+ }
113
+ ]
114
+
115
+ customer_data = await db.db["orders"].aggregate(pipeline).to_list(None)
116
+
117
+ result = {
118
+ "total_customers": len(customer_data),
119
+ "top_customers": customer_data[:10],
120
+ "average_customer_value": sum(c["total_spent"] for c in customer_data) / (len(customer_data) or 1),
121
+ "customer_segments": {
122
+ "high_value": len([c for c in customer_data if c["total_spent"] > 1000]),
123
+ "medium_value": len([c for c in customer_data if 500 <= c["total_spent"] <= 1000]),
124
+ "low_value": len([c for c in customer_data if c["total_spent"] < 500])
125
+ }
126
+ }
127
+
128
+ await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
129
+ return result
130
+
131
+ analytics = AnalyticsService()
app/services/backup.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import json
4
+ import tarfile
5
+ from datetime import datetime
6
+ from typing import Dict, Any, List
7
+ from bson import ObjectId
8
+ from ..db.database import db
9
+ from ..utils.logger import logger
10
+
11
+ class BackupService:
12
+ def __init__(self):
13
+ self.backup_dir = "backups"
14
+ self._ensure_backup_dir()
15
+
16
+ def _ensure_backup_dir(self):
17
+ """Ensure backup directory exists"""
18
+ if not os.path.exists(self.backup_dir):
19
+ os.makedirs(self.backup_dir)
20
+
21
+ async def create_backup(self, include_files: bool = True) -> Dict[str, Any]:
22
+ """Create a new system backup"""
23
+ try:
24
+ timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
25
+ backup_id = str(ObjectId())
26
+ backup_name = f"backup_{timestamp}_{backup_id}"
27
+ backup_path = os.path.join(self.backup_dir, backup_name)
28
+
29
+ # Create backup directory
30
+ os.makedirs(backup_path, exist_ok=True)
31
+
32
+ # Backup database collections
33
+ db_backup = {}
34
+ for collection in await db.db.list_collection_names():
35
+ docs = await db.db[collection].find().to_list(None)
36
+ db_backup[collection] = [
37
+ {**doc, "_id": str(doc["_id"])}
38
+ for doc in docs
39
+ ]
40
+
41
+ # Save database backup
42
+ with open(os.path.join(backup_path, "database.json"), "w") as f:
43
+ json.dump(db_backup, f, default=str)
44
+
45
+ # Backup files if requested
46
+ if include_files:
47
+ uploads_dir = "uploads"
48
+ if os.path.exists(uploads_dir):
49
+ shutil.copytree(
50
+ uploads_dir,
51
+ os.path.join(backup_path, "uploads"),
52
+ dirs_exist_ok=True
53
+ )
54
+
55
+ # Create archive
56
+ archive_path = f"{backup_path}.tar.gz"
57
+ with tarfile.open(archive_path, "w:gz") as tar:
58
+ tar.add(backup_path, arcname=os.path.basename(backup_path))
59
+
60
+ # Clean up temporary directory
61
+ shutil.rmtree(backup_path)
62
+
63
+ # Record backup in database
64
+ backup_info = {
65
+ "_id": backup_id,
66
+ "filename": f"{backup_name}.tar.gz",
67
+ "path": archive_path,
68
+ "created_at": datetime.utcnow(),
69
+ "size": os.path.getsize(archive_path),
70
+ "includes_files": include_files
71
+ }
72
+
73
+ await db.db["backup_history"].insert_one(backup_info)
74
+
75
+ return {
76
+ "id": backup_id,
77
+ "path": archive_path,
78
+ "size": backup_info["size"],
79
+ "created_at": backup_info["created_at"]
80
+ }
81
+
82
+ except Exception as e:
83
+ logger.error(f"Backup creation failed: {str(e)}")
84
+ raise
85
+
86
+ async def restore_backup(self, backup_path: str) -> Dict[str, Any]:
87
+ """Restore system from a backup"""
88
+ try:
89
+ if not os.path.exists(backup_path):
90
+ raise FileNotFoundError("Backup file not found")
91
+
92
+ # Create temporary restoration directory
93
+ restore_dir = f"restore_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}"
94
+ os.makedirs(restore_dir, exist_ok=True)
95
+
96
+ # Extract archive
97
+ with tarfile.open(backup_path, "r:gz") as tar:
98
+ tar.extractall(restore_dir)
99
+
100
+ backup_contents = os.listdir(restore_dir)[0]
101
+ backup_root = os.path.join(restore_dir, backup_contents)
102
+
103
+ # Restore database
104
+ with open(os.path.join(backup_root, "database.json"), "r") as f:
105
+ db_backup = json.load(f)
106
+
107
+ # Clear existing collections
108
+ for collection in await db.db.list_collection_names():
109
+ await db.db[collection].delete_many({})
110
+
111
+ # Restore collections
112
+ for collection, docs in db_backup.items():
113
+ if docs:
114
+ # Convert string IDs back to ObjectId
115
+ for doc in docs:
116
+ doc["_id"] = ObjectId(doc["_id"])
117
+ await db.db[collection].insert_many(docs)
118
+
119
+ # Restore files if present
120
+ uploads_source = os.path.join(backup_root, "uploads")
121
+ if os.path.exists(uploads_source):
122
+ if os.path.exists("uploads"):
123
+ shutil.rmtree("uploads")
124
+ shutil.copytree(uploads_source, "uploads")
125
+
126
+ # Clean up
127
+ shutil.rmtree(restore_dir)
128
+
129
+ return {
130
+ "success": True,
131
+ "collections_restored": len(db_backup),
132
+ "files_restored": os.path.exists(uploads_source)
133
+ }
134
+
135
+ except Exception as e:
136
+ logger.error(f"Backup restoration failed: {str(e)}")
137
+ raise
138
+ finally:
139
+ if os.path.exists(restore_dir):
140
+ shutil.rmtree(restore_dir)
141
+
142
+ async def list_backups(self) -> List[Dict[str, Any]]:
143
+ """List all available backups"""
144
+ try:
145
+ backups = await db.db["backup_history"].find().sort("created_at", -1).to_list(None)
146
+ return [
147
+ {
148
+ "id": str(backup["_id"]),
149
+ "filename": backup["filename"],
150
+ "created_at": backup["created_at"],
151
+ "size": backup["size"],
152
+ "includes_files": backup["includes_files"]
153
+ }
154
+ for backup in backups
155
+ ]
156
+ except Exception as e:
157
+ logger.error(f"Failed to list backups: {str(e)}")
158
+ raise
159
+
160
+ async def delete_backup(self, backup_id: str) -> bool:
161
+ """Delete a backup"""
162
+ try:
163
+ backup = await db.db["backup_history"].find_one({"_id": backup_id})
164
+ if not backup:
165
+ return False
166
+
167
+ # Delete the physical backup file
168
+ if os.path.exists(backup["path"]):
169
+ os.remove(backup["path"])
170
+
171
+ # Remove from database
172
+ await db.db["backup_history"].delete_one({"_id": backup_id})
173
+ return True
174
+
175
+ except Exception as e:
176
+ logger.error(f"Failed to delete backup: {str(e)}")
177
+ raise
178
+
179
+ backup = BackupService()
app/services/calendar.py ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from typing import List, Dict, Any, Optional
3
+ from bson import ObjectId
4
+ from ..db.database import db
5
+ from ..utils.cache import cache
6
+ from ..services.notifications import notifications
7
+
8
+ class CalendarService:
9
+ async def create_event(
10
+ self,
11
+ user_id: str,
12
+ title: str,
13
+ description: str,
14
+ start_time: datetime,
15
+ end_time: datetime,
16
+ attendees: List[str] = None,
17
+ is_all_day: bool = False,
18
+ reminder_minutes: int = 30
19
+ ) -> Dict[str, Any]:
20
+ """Create a new calendar event"""
21
+ event = {
22
+ "user_id": user_id,
23
+ "title": title,
24
+ "description": description,
25
+ "start_time": start_time,
26
+ "end_time": end_time,
27
+ "attendees": attendees or [],
28
+ "is_all_day": is_all_day,
29
+ "reminder_minutes": reminder_minutes,
30
+ "status": "scheduled",
31
+ "created_at": datetime.utcnow()
32
+ }
33
+
34
+ result = await db.db["events"].insert_one(event)
35
+ event["_id"] = result.inserted_id
36
+
37
+ # Clear cache for affected users
38
+ cache_keys = [f"user_events:{user_id}"]
39
+ for attendee in attendees or []:
40
+ cache_keys.append(f"user_events:{attendee}")
41
+
42
+ for key in cache_keys:
43
+ await cache.delete_cache(key)
44
+
45
+ # Notify attendees
46
+ if attendees:
47
+ for attendee in attendees:
48
+ await notifications.create_notification(
49
+ user_id=attendee,
50
+ title=f"New Event Invitation: {title}",
51
+ message=f"You have been invited to an event: {title}",
52
+ notification_type="event_invitation",
53
+ data={"event_id": str(result.inserted_id)}
54
+ )
55
+
56
+ return event
57
+
58
+ async def get_user_events(
59
+ self,
60
+ user_id: str,
61
+ start_date: datetime,
62
+ end_date: datetime,
63
+ include_attendee_events: bool = True
64
+ ) -> List[Dict[str, Any]]:
65
+ """Get events for a user within a date range"""
66
+ cache_key = f"user_events:{user_id}:{start_date.date()}:{end_date.date()}"
67
+ cached = await cache.get_cache(cache_key)
68
+ if cached:
69
+ return cached
70
+
71
+ query = {
72
+ "$or": [
73
+ {"user_id": user_id}, # Events created by user
74
+ {"attendees": user_id} if include_attendee_events else {"_id": None}
75
+ ],
76
+ "start_time": {"$gte": start_date},
77
+ "end_time": {"$lte": end_date}
78
+ }
79
+
80
+ cursor = db.db["events"].find(query).sort("start_time", 1)
81
+ events = await cursor.to_list(None)
82
+
83
+ await cache.set_cache(cache_key, events, expire=300) # Cache for 5 minutes
84
+ return events
85
+
86
+ async def update_event(
87
+ self,
88
+ event_id: str,
89
+ user_id: str,
90
+ update_data: Dict[str, Any]
91
+ ) -> Optional[Dict[str, Any]]:
92
+ """Update an event"""
93
+ if not ObjectId.is_valid(event_id):
94
+ return None
95
+
96
+ event = await db.db["events"].find_one({
97
+ "_id": ObjectId(event_id),
98
+ "user_id": user_id # Only creator can update
99
+ })
100
+
101
+ if not event:
102
+ return None
103
+
104
+ update_data["updated_at"] = datetime.utcnow()
105
+
106
+ await db.db["events"].update_one(
107
+ {"_id": ObjectId(event_id)},
108
+ {"$set": update_data}
109
+ )
110
+
111
+ # Clear cache for affected users
112
+ cache_keys = [f"user_events:{user_id}"]
113
+ for attendee in event.get("attendees", []):
114
+ cache_keys.append(f"user_events:{attendee}")
115
+
116
+ for key in cache_keys:
117
+ await cache.delete_cache(key)
118
+
119
+ # Notify attendees of changes
120
+ if "start_time" in update_data or "end_time" in update_data:
121
+ for attendee in event.get("attendees", []):
122
+ await notifications.create_notification(
123
+ user_id=attendee,
124
+ title=f"Event Updated: {event['title']}",
125
+ message=f"An event you're attending has been updated",
126
+ notification_type="event_update",
127
+ data={"event_id": event_id}
128
+ )
129
+
130
+ return await db.db["events"].find_one({"_id": ObjectId(event_id)})
131
+
132
+ async def delete_event(self, event_id: str, user_id: str) -> bool:
133
+ """Delete an event"""
134
+ if not ObjectId.is_valid(event_id):
135
+ return False
136
+
137
+ event = await db.db["events"].find_one({
138
+ "_id": ObjectId(event_id),
139
+ "user_id": user_id # Only creator can delete
140
+ })
141
+
142
+ if not event:
143
+ return False
144
+
145
+ result = await db.db["events"].delete_one({"_id": ObjectId(event_id)})
146
+
147
+ if result.deleted_count > 0:
148
+ # Clear cache for affected users
149
+ cache_keys = [f"user_events:{user_id}"]
150
+ for attendee in event.get("attendees", []):
151
+ cache_keys.append(f"user_events:{attendee}")
152
+ # Notify attendees
153
+ await notifications.create_notification(
154
+ user_id=attendee,
155
+ title=f"Event Cancelled: {event['title']}",
156
+ message=f"An event you were attending has been cancelled",
157
+ notification_type="event_cancellation",
158
+ data={"event_id": event_id}
159
+ )
160
+
161
+ for key in cache_keys:
162
+ await cache.delete_cache(key)
163
+
164
+ return True
165
+ return False
166
+
167
+ async def respond_to_event(
168
+ self,
169
+ event_id: str,
170
+ user_id: str,
171
+ response: str
172
+ ) -> bool:
173
+ """Respond to an event invitation"""
174
+ if not ObjectId.is_valid(event_id):
175
+ return False
176
+
177
+ valid_responses = ["accepted", "declined", "maybe"]
178
+ if response not in valid_responses:
179
+ return False
180
+
181
+ event = await db.db["events"].find_one({
182
+ "_id": ObjectId(event_id),
183
+ "attendees": user_id
184
+ })
185
+
186
+ if not event:
187
+ return False
188
+
189
+ # Update response in attendee list
190
+ await db.db["events"].update_one(
191
+ {"_id": ObjectId(event_id)},
192
+ {
193
+ "$set": {
194
+ f"attendee_responses.{user_id}": response,
195
+ "updated_at": datetime.utcnow()
196
+ }
197
+ }
198
+ )
199
+
200
+ # Notify event creator
201
+ await notifications.create_notification(
202
+ user_id=event["user_id"],
203
+ title=f"Event Response: {event['title']}",
204
+ message=f"An attendee has {response} your event",
205
+ notification_type="event_response",
206
+ data={
207
+ "event_id": event_id,
208
+ "responder": user_id,
209
+ "response": response
210
+ }
211
+ )
212
+
213
+ return True
214
+
215
+ calendar = CalendarService()
app/services/maintenance.py ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import psutil
4
+ from datetime import datetime, timedelta
5
+ from typing import Dict, Any, Optional
6
+ from sqlalchemy import select, delete, update, func
7
+ from ..db.database import db
8
+ from ..utils.logger import logger
9
+ from ..core.config import settings
10
+ from ..services.websocket import create_and_broadcast_notification
11
+ from ..db.models import User, Order, Notification, Session
12
+
13
+ class MaintenanceService:
14
+ async def cleanup_expired_sessions(self) -> int:
15
+ """Clean up expired sessions"""
16
+ try:
17
+ cutoff = datetime.utcnow() - timedelta(days=7)
18
+ async with db.session() as session:
19
+ stmt = delete(Session).where(Session.last_activity < cutoff)
20
+ result = await session.execute(stmt)
21
+ await session.commit()
22
+ return result.rowcount
23
+ except Exception as e:
24
+ logger.error(f"Error cleaning up sessions: {str(e)}")
25
+ return 0
26
+
27
+ async def archive_old_data(self) -> Dict[str, int]:
28
+ """Archive old data"""
29
+ try:
30
+ cutoff = datetime.utcnow() - timedelta(days=365)
31
+ archived = {}
32
+
33
+ async with db.session() as session:
34
+ # Archive old orders
35
+ order_stmt = update(Order).where(
36
+ Order.created_at < cutoff,
37
+ Order.status.in_(["completed", "cancelled"])
38
+ ).values(archived=True)
39
+ order_result = await session.execute(order_stmt)
40
+ archived["orders"] = order_result.rowcount
41
+
42
+ # Archive old notifications
43
+ notif_stmt = update(Notification).where(
44
+ Notification.created_at < cutoff,
45
+ Notification.read == True
46
+ ).values(archived=True)
47
+ notif_result = await session.execute(notif_stmt)
48
+ archived["notifications"] = notif_result.rowcount
49
+
50
+ await session.commit()
51
+ return archived
52
+
53
+ except Exception as e:
54
+ logger.error(f"Error archiving old data: {str(e)}")
55
+ return None
56
+
57
+ async def check_system_health(self) -> Dict[str, Any]:
58
+ """Check system health"""
59
+ try:
60
+ async with db.session() as session:
61
+ # Check database connection by running a simple query
62
+ await session.execute(select(func.now()))
63
+
64
+ # Get database size (using psutil for disk stats)
65
+ disk = psutil.disk_usage('/')
66
+ total_space = disk.total / (1024 * 1024 * 1024) # GB
67
+ free_space = disk.free / (1024 * 1024 * 1024) # GB
68
+
69
+ health_data = {
70
+ "status": "healthy",
71
+ "database": {
72
+ "connected": True
73
+ },
74
+ "disk": {
75
+ "total_gb": total_space,
76
+ "free_gb": free_space,
77
+ "usage_percent": disk.percent
78
+ },
79
+ "timestamp": datetime.utcnow()
80
+ }
81
+
82
+ # Send alert if disk space is low
83
+ if free_space < 5: # Less than 5GB free
84
+ await create_and_broadcast_notification(
85
+ user_id="admin",
86
+ title="Low Disk Space Alert",
87
+ message=f"Server is running low on disk space. Only {free_space:.2f}GB remaining.",
88
+ notification_type="system_alert",
89
+ data={"free_space_gb": free_space}
90
+ )
91
+
92
+ return health_data
93
+
94
+ except Exception as e:
95
+ logger.error(f"Health check error: {str(e)}")
96
+ return {"status": "unhealthy", "error": str(e)}
97
+
98
+ async def monitor_system_resources(self) -> Dict[str, Any]:
99
+ """Monitor system resources"""
100
+ try:
101
+ async with db.session() as session:
102
+ # Get current active connections (using psutil for process stats)
103
+ process = psutil.Process()
104
+ open_files = process.open_files()
105
+ connections = len([f for f in open_files if 'socket' in str(f.path)])
106
+
107
+ resources = {
108
+ "database": {
109
+ "connections": connections,
110
+ },
111
+ "system": {
112
+ "cpu_percent": psutil.cpu_percent(),
113
+ "memory_percent": psutil.virtual_memory().percent
114
+ },
115
+ "timestamp": datetime.utcnow()
116
+ }
117
+
118
+ # Alert if too many connections or high resource usage
119
+ if connections > settings.MAX_DB_CONNECTIONS * 0.9:
120
+ await create_and_broadcast_notification(
121
+ user_id="admin",
122
+ title="High Database Connections",
123
+ message=f"Database has {connections} active connections",
124
+ notification_type="system_alert",
125
+ data={"connections": connections}
126
+ )
127
+
128
+ return resources
129
+
130
+ except Exception as e:
131
+ logger.error(f"Resource monitoring error: {str(e)}")
132
+ return {"error": str(e)}
133
+
134
+ async def perform_database_maintenance(self) -> Dict[str, Any]:
135
+ """Perform database maintenance tasks"""
136
+ try:
137
+ async with db.session() as session:
138
+ # Run ANALYZE on major tables
139
+ for table in [User, Order, Notification]:
140
+ await session.execute(f"ANALYZE {table.__tablename__}")
141
+
142
+ # Clean up any orphaned records
143
+ # For example, delete notifications for non-existent users
144
+ stmt = delete(Notification).where(
145
+ ~Notification.user_id.in_(
146
+ select(User.id)
147
+ )
148
+ )
149
+ await session.execute(stmt)
150
+ await session.commit()
151
+
152
+ return {"status": "success"}
153
+
154
+ except Exception as e:
155
+ logger.error(f"Database maintenance error: {str(e)}")
156
+ return {"error": str(e)}
157
+
158
+ async def rotate_log_files(self) -> None:
159
+ """Rotate log files"""
160
+ log_dir = "logs"
161
+ max_log_size = 10 * 1024 * 1024 # 10MB
162
+
163
+ try:
164
+ for filename in os.listdir(log_dir):
165
+ filepath = os.path.join(log_dir, filename)
166
+ if os.path.getsize(filepath) > max_log_size:
167
+ # Archive old log
168
+ archive_name = f"{filename}.{datetime.now().strftime('%Y%m%d')}"
169
+ shutil.move(filepath, os.path.join(log_dir, archive_name))
170
+
171
+ # Create new log file
172
+ open(filepath, 'a').close()
173
+ logger.info(f"Rotated log file: {filename}")
174
+ except Exception as e:
175
+ logger.error(f"Log rotation error: {str(e)}")
176
+
177
+ async def manage_storage_quotas(self) -> Dict[str, Any]:
178
+ """Manage storage quotas and cleanup"""
179
+ try:
180
+ results = {
181
+ "warnings": [],
182
+ "cleaned": 0
183
+ }
184
+
185
+ # Check and clean upload directories
186
+ upload_dirs = ["uploads/documents", "uploads/images"]
187
+ for directory in upload_dirs:
188
+ if os.path.exists(directory):
189
+ total_size = sum(
190
+ os.path.getsize(os.path.join(directory, f))
191
+ for f in os.listdir(directory)
192
+ if os.path.isfile(os.path.join(directory, f))
193
+ ) / (1024 * 1024) # Convert to MB
194
+
195
+ if total_size > settings.MAX_UPLOAD_DIR_SIZE_MB:
196
+ results["warnings"].append(
197
+ f"Upload directory {directory} exceeds size limit"
198
+ )
199
+
200
+ # Clean up temporary files
201
+ temp_dirs = ["uploads/temp", "backups/temp"]
202
+ for directory in temp_dirs:
203
+ if os.path.exists(directory):
204
+ # Remove files older than 24 hours
205
+ cutoff = datetime.now() - timedelta(days=1)
206
+ for filename in os.listdir(directory):
207
+ filepath = os.path.join(directory, filename)
208
+ if os.path.getctime(filepath) < cutoff.timestamp():
209
+ os.remove(filepath)
210
+ results["cleaned"] += 1
211
+
212
+ return results
213
+ except Exception as e:
214
+ logger.error(f"Storage quota management error: {str(e)}")
215
+ return {"error": str(e)}
216
+
217
+ maintenance = MaintenanceService()
app/services/notifications.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Any
2
+ from datetime import datetime
3
+ from ..db.database import db
4
+ from ..core.config import settings
5
+ from ..utils.cache import cache
6
+ from ..services.websocket import create_and_broadcast_notification
7
+
8
+ class NotificationService:
9
+ async def create_notification(
10
+ self,
11
+ user_id: str,
12
+ title: str,
13
+ message: str,
14
+ notification_type: str,
15
+ data: Dict[str, Any] = None
16
+ ):
17
+ """Create and store a notification"""
18
+ return await create_and_broadcast_notification(
19
+ user_id=user_id,
20
+ title=title,
21
+ message=message,
22
+ notification_type=notification_type,
23
+ data=data
24
+ )
25
+
26
+ async def get_user_notifications(
27
+ self,
28
+ user_id: str,
29
+ skip: int = 0,
30
+ limit: int = 50,
31
+ unread_only: bool = False
32
+ ):
33
+ """Get notifications for a user"""
34
+ cache_key = f"user_notifications:{user_id}"
35
+ if not unread_only:
36
+ cached = await cache.get_cache(cache_key)
37
+ if cached:
38
+ return cached
39
+
40
+ query = {"user_id": user_id}
41
+ if unread_only:
42
+ query["read"] = False
43
+
44
+ cursor = db.db["notifications"].find(query)\
45
+ .sort("created_at", -1)\
46
+ .skip(skip)\
47
+ .limit(limit)
48
+
49
+ notifications = await cursor.to_list(length=limit)
50
+
51
+ if not unread_only:
52
+ await cache.set_cache(cache_key, notifications, expire=300) # Cache for 5 minutes
53
+
54
+ return notifications
55
+
56
+ async def mark_as_read(self, notification_id: str, user_id: str):
57
+ """Mark a notification as read"""
58
+ result = await db.db["notifications"].update_one(
59
+ {"_id": notification_id, "user_id": user_id},
60
+ {"$set": {"read": True}}
61
+ )
62
+
63
+ if result.modified_count > 0:
64
+ await cache.delete_cache(f"user_notifications:{user_id}")
65
+ return True
66
+ return False
67
+
68
+ async def mark_all_as_read(self, user_id: str):
69
+ """Mark all notifications as read for a user"""
70
+ result = await db.db["notifications"].update_many(
71
+ {"user_id": user_id, "read": False},
72
+ {"$set": {"read": True}}
73
+ )
74
+
75
+ await cache.delete_cache(f"user_notifications:{user_id}")
76
+ return result.modified_count
77
+
78
+ async def delete_notification(self, notification_id: str, user_id: str):
79
+ """Delete a notification"""
80
+ result = await db.db["notifications"].delete_one(
81
+ {"_id": notification_id, "user_id": user_id}
82
+ )
83
+
84
+ if result.deleted_count > 0:
85
+ await cache.delete_cache(f"user_notifications:{user_id}")
86
+ return True
87
+ return False
88
+
89
+ notifications = NotificationService()
app/services/pos_client.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List
2
+ from httpx import AsyncClient
3
+ from pydantic import BaseModel
4
+ from datetime import datetime
5
+ from ..core.config import settings
6
+
7
+ class OrderSchema(BaseModel):
8
+ id: str
9
+ branch_id: str
10
+ customer_id: str
11
+ items: List[dict]
12
+ total_amount: float
13
+ status: str
14
+ created_at: datetime
15
+ updated_at: datetime | None
16
+
17
+ async def get_orders(branch_id: str) -> List[OrderSchema]:
18
+ """
19
+ Fetch orders from the POS system for a specific branch
20
+
21
+ Args:
22
+ branch_id: The ID of the branch to fetch orders for
23
+
24
+ Returns:
25
+ List[OrderSchema]: A list of orders from the POS system
26
+ """
27
+ async with AsyncClient() as client:
28
+ response = await client.get(
29
+ f"{settings.POS_API_URL}/internal/orders",
30
+ params={"branch_id": branch_id},
31
+ headers={"Authorization": f"Bearer {settings.SERVICE_TOKEN}"}
32
+ )
33
+ response.raise_for_status()
34
+ return [OrderSchema(**order) for order in response.json()]
app/services/scheduler.py ADDED
@@ -0,0 +1,243 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from typing import List, Dict, Any, Optional
3
+ from bson import ObjectId
4
+ from apscheduler.schedulers.asyncio import AsyncIOScheduler
5
+ from apscheduler.triggers.cron import CronTrigger
6
+ from ..db.database import db
7
+ from ..utils.cache import cache
8
+ from ..utils.logger import logger
9
+ from ..services.calendar import calendar
10
+ from ..services.maintenance import maintenance
11
+
12
+ class SchedulerService:
13
+ def __init__(self):
14
+ self.scheduler = AsyncIOScheduler()
15
+ self._setup_maintenance_jobs()
16
+
17
+ def _setup_maintenance_jobs(self):
18
+ """Setup all maintenance related scheduled jobs"""
19
+ # Daily database maintenance at 2 AM
20
+ self.scheduler.add_job(
21
+ maintenance.perform_database_maintenance,
22
+ CronTrigger(hour=2),
23
+ id="daily_db_maintenance",
24
+ replace_existing=True
25
+ )
26
+
27
+ # Session cleanup every 6 hours
28
+ self.scheduler.add_job(
29
+ maintenance.cleanup_expired_sessions,
30
+ CronTrigger(hour="*/6"),
31
+ id="session_cleanup",
32
+ replace_existing=True
33
+ )
34
+
35
+ # System health check every 15 minutes
36
+ self.scheduler.add_job(
37
+ maintenance.monitor_system_resources,
38
+ CronTrigger(minute="*/15"),
39
+ id="health_check",
40
+ replace_existing=True
41
+ )
42
+
43
+ # Daily backup at 1 AM
44
+ self.scheduler.add_job(
45
+ maintenance.perform_scheduled_backup,
46
+ CronTrigger(hour=1),
47
+ id="daily_backup",
48
+ replace_existing=True
49
+ )
50
+
51
+ # Daily log rotation at 3 AM
52
+ self.scheduler.add_job(
53
+ maintenance.rotate_log_files,
54
+ CronTrigger(hour=3),
55
+ id="log_rotation",
56
+ replace_existing=True
57
+ )
58
+
59
+ # Storage quota check every 2 hours
60
+ self.scheduler.add_job(
61
+ maintenance.manage_storage_quotas,
62
+ CronTrigger(hour="*/2"),
63
+ id="storage_quota_check",
64
+ replace_existing=True
65
+ )
66
+
67
+ # Monthly data archiving at 4 AM on the 1st of each month
68
+ self.scheduler.add_job(
69
+ maintenance.archive_old_data,
70
+ CronTrigger(day=1, hour=4),
71
+ id="monthly_archiving",
72
+ replace_existing=True
73
+ )
74
+
75
+ def start(self):
76
+ """Start the scheduler"""
77
+ try:
78
+ self.scheduler.start()
79
+ logger.info("Scheduler started successfully")
80
+ except Exception as e:
81
+ logger.error(f"Failed to start scheduler: {str(e)}")
82
+ raise
83
+
84
+ def shutdown(self):
85
+ """Shutdown the scheduler"""
86
+ try:
87
+ self.scheduler.shutdown()
88
+ logger.info("Scheduler shutdown successfully")
89
+ except Exception as e:
90
+ logger.error(f"Error during scheduler shutdown: {str(e)}")
91
+ raise
92
+
93
+ def get_jobs(self):
94
+ """Get all scheduled jobs"""
95
+ return [
96
+ {
97
+ "id": job.id,
98
+ "name": job.name,
99
+ "next_run_time": job.next_run_time.isoformat() if job.next_run_time else None,
100
+ "trigger": str(job.trigger)
101
+ }
102
+ for job in self.scheduler.get_jobs()
103
+ ]
104
+
105
+ async def create_recurring_event(
106
+ self,
107
+ user_id: str,
108
+ title: str,
109
+ description: str,
110
+ start_time: datetime,
111
+ end_time: datetime,
112
+ recurrence_pattern: str, # daily, weekly, monthly, yearly
113
+ recurrence_end_date: Optional[datetime] = None,
114
+ attendees: List[str] = None,
115
+ reminder_minutes: int = 30
116
+ ) -> List[Dict[str, Any]]:
117
+ """Create recurring events based on pattern"""
118
+ events = []
119
+ current_start = start_time
120
+ current_end = end_time
121
+ duration = end_time - start_time
122
+
123
+ while True:
124
+ if recurrence_end_date and current_start > recurrence_end_date:
125
+ break
126
+
127
+ # Create individual event instance
128
+ event = await calendar.create_event(
129
+ user_id=user_id,
130
+ title=title,
131
+ description=description,
132
+ start_time=current_start,
133
+ end_time=current_end,
134
+ attendees=attendees,
135
+ reminder_minutes=reminder_minutes
136
+ )
137
+ events.append(event)
138
+
139
+ # Calculate next occurrence
140
+ if recurrence_pattern == "daily":
141
+ current_start += timedelta(days=1)
142
+ elif recurrence_pattern == "weekly":
143
+ current_start += timedelta(weeks=1)
144
+ elif recurrence_pattern == "monthly":
145
+ # Add one month (approximately)
146
+ if current_start.month == 12:
147
+ current_start = current_start.replace(year=current_start.year + 1, month=1)
148
+ else:
149
+ current_start = current_start.replace(month=current_start.month + 1)
150
+ elif recurrence_pattern == "yearly":
151
+ current_start = current_start.replace(year=current_start.year + 1)
152
+
153
+ current_end = current_start + duration
154
+
155
+ return events
156
+
157
+ async def update_recurring_event(
158
+ self,
159
+ event_id: str,
160
+ user_id: str,
161
+ update_data: Dict[str, Any],
162
+ update_future: bool = True
163
+ ) -> List[Dict[str, Any]]:
164
+ """Update a recurring event and optionally its future occurrences"""
165
+ # Get the original event
166
+ event = await db.db["events"].find_one({
167
+ "_id": ObjectId(event_id),
168
+ "user_id": user_id
169
+ })
170
+
171
+ if not event:
172
+ return []
173
+
174
+ # Update the current event
175
+ await calendar.update_event(event_id, user_id, update_data)
176
+
177
+ updated_events = [event]
178
+
179
+ # Update future occurrences if requested
180
+ if update_future:
181
+ future_events = await db.db["events"].find({
182
+ "recurrence_group": event.get("recurrence_group"),
183
+ "start_time": {"$gt": event["start_time"]},
184
+ "user_id": user_id
185
+ }).to_list(None)
186
+
187
+ for future_event in future_events:
188
+ await calendar.update_event(
189
+ str(future_event["_id"]),
190
+ user_id,
191
+ update_data
192
+ )
193
+ updated_events.append(future_event)
194
+
195
+ return updated_events
196
+
197
+ async def delete_recurring_event(
198
+ self,
199
+ event_id: str,
200
+ user_id: str,
201
+ delete_future: bool = True
202
+ ) -> bool:
203
+ """Delete a recurring event and optionally its future occurrences"""
204
+ event = await db.db["events"].find_one({
205
+ "_id": ObjectId(event_id),
206
+ "user_id": user_id
207
+ })
208
+
209
+ if not event:
210
+ return False
211
+
212
+ # Delete the current event
213
+ await calendar.delete_event(event_id, user_id)
214
+
215
+ # Delete future occurrences if requested
216
+ if delete_future and event.get("recurrence_group"):
217
+ await db.db["events"].delete_many({
218
+ "recurrence_group": event["recurrence_group"],
219
+ "start_time": {"$gt": event["start_time"]},
220
+ "user_id": user_id
221
+ })
222
+
223
+ return True
224
+
225
+ async def get_upcoming_recurring_events(
226
+ self,
227
+ user_id: str,
228
+ days: int = 30
229
+ ) -> List[Dict[str, Any]]:
230
+ """Get upcoming recurring events for a user"""
231
+ start_date = datetime.utcnow()
232
+ end_date = start_date + timedelta(days=days)
233
+
234
+ events = await calendar.get_user_events(
235
+ user_id=user_id,
236
+ start_date=start_date,
237
+ end_date=end_date,
238
+ include_attendee_events=True
239
+ )
240
+
241
+ return sorted(events, key=lambda x: x["start_time"])
242
+
243
+ scheduler = SchedulerService()
app/services/websocket.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Dict, Any
2
+ from fastapi import WebSocket
3
+ from datetime import datetime
4
+ from ..db.database import db
5
+ from ..utils.cache import cache
6
+ from ..db.models import Notification
7
+
8
+ # Store active WebSocket connections
9
+ active_connections: List[WebSocket] = []
10
+
11
+ async def connect(websocket: WebSocket):
12
+ """Accept a new WebSocket connection"""
13
+ await websocket.accept()
14
+ active_connections.append(websocket)
15
+
16
+ async def disconnect(websocket: WebSocket):
17
+ """Remove a WebSocket connection"""
18
+ if websocket in active_connections:
19
+ active_connections.remove(websocket)
20
+
21
+ async def broadcast_message(message: dict):
22
+ """Broadcast a message to all connected clients"""
23
+ disconnected = []
24
+ for connection in active_connections:
25
+ try:
26
+ await connection.send_json(message)
27
+ except:
28
+ disconnected.append(connection)
29
+
30
+ # Clean up disconnected clients
31
+ for connection in disconnected:
32
+ await disconnect(connection)
33
+
34
+ async def create_and_broadcast_notification(
35
+ user_id: str,
36
+ title: str,
37
+ message: str,
38
+ notification_type: str,
39
+ data: Dict[str, Any] = None
40
+ ) -> Dict[str, Any]:
41
+ """Create and broadcast a notification"""
42
+ async with db.session() as session:
43
+ # Create notification
44
+ notification = Notification(
45
+ user_id=user_id,
46
+ title=title,
47
+ message=message,
48
+ type=notification_type,
49
+ data=data,
50
+ created_at=datetime.utcnow(),
51
+ read=False
52
+ )
53
+
54
+ # Store in database
55
+ session.add(notification)
56
+ await session.commit()
57
+ await session.refresh(notification)
58
+
59
+ # Convert to dict for broadcasting
60
+ notification_dict = {
61
+ "id": str(notification.id),
62
+ "user_id": notification.user_id,
63
+ "title": notification.title,
64
+ "message": notification.message,
65
+ "type": notification.type,
66
+ "data": notification.data,
67
+ "created_at": notification.created_at.isoformat(),
68
+ "read": notification.read
69
+ }
70
+
71
+ # Broadcast to connected clients
72
+ await broadcast_message({
73
+ "type": "notification",
74
+ "data": notification_dict
75
+ })
76
+
77
+ # Clear user's notification cache
78
+ await cache.delete_cache(f"user_notifications:{user_id}")
79
+
80
+ return notification_dict