Fred808 commited on
Commit
20adca1
·
verified ·
1 Parent(s): 7c36b02

Upload 82 files

Browse files
app/__pycache__/main.cpython-312.pyc CHANGED
Binary files a/app/__pycache__/main.cpython-312.pyc and b/app/__pycache__/main.cpython-312.pyc differ
 
app/api/analytics.py CHANGED
@@ -1,11 +1,11 @@
1
  from fastapi import APIRouter, Depends, Query, HTTPException
2
  from sqlalchemy.ext.asyncio import AsyncSession
3
- from sqlalchemy import select, func, cast, Date, and_
4
  from datetime import datetime, timedelta
5
  from typing import Dict, Any, Optional
6
  from ..core.dependencies import get_current_active_user
7
  from ..db.database import get_db
8
- from ..db.models import Order, Product, User
9
 
10
  router = APIRouter()
11
 
@@ -198,35 +198,153 @@ async def get_customer_analytics(
198
  }
199
 
200
  @router.get("/dashboard")
201
- async def get_dashboard_analytics(
202
- branch_id: Optional[int] = Query(None, description="Filter analytics by branch"),
 
203
  current_user: User = Depends(get_current_active_user),
204
  db: AsyncSession = Depends(get_db)
205
  ) -> Dict[str, Any]:
206
- """Get a comprehensive dashboard with key metrics"""
207
- # Get last 30 days of sales data
208
- start_date = datetime.now() - timedelta(days=30)
209
- end_date = datetime.now()
210
 
211
- sales_data = await get_sales_analytics(start_date, end_date, branch_id, current_user, db)
212
- product_data = await get_product_analytics(branch_id, current_user, db)
213
- customer_data = await get_customer_analytics(branch_id, current_user, db)
214
 
215
- return {
216
- "sales_summary": {
217
- "total_revenue": sales_data["total_revenue"],
218
- "total_orders": sales_data["total_orders"],
219
- "average_order_value": sales_data["average_order_value"],
220
- "daily_sales": sales_data["daily_sales"][-7:] # Last 7 days
 
 
 
 
 
221
  },
222
- "product_summary": {
223
- "total_products": product_data["total_products"],
224
- "low_stock_products": product_data["low_stock_products"],
225
- "top_selling_products": product_data["top_products"][:5] # Top 5 products
226
  },
227
- "customer_summary": {
228
- "total_customers": customer_data["total_customers"],
229
- "average_customer_value": customer_data["average_customer_value"],
230
- "customer_segments": customer_data["customer_segments"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231
  }
232
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from fastapi import APIRouter, Depends, Query, HTTPException
2
  from sqlalchemy.ext.asyncio import AsyncSession
3
+ from sqlalchemy import select, func, cast, Date, and_, distinct
4
  from datetime import datetime, timedelta
5
  from typing import Dict, Any, Optional
6
  from ..core.dependencies import get_current_active_user
7
  from ..db.database import get_db
8
+ from ..db.models import Order, Product, User, Brand
9
 
10
  router = APIRouter()
11
 
 
198
  }
199
 
200
  @router.get("/dashboard")
201
+ async def get_dashboard_metrics(
202
+ start_date: Optional[str] = None,
203
+ end_date: Optional[str] = None,
204
  current_user: User = Depends(get_current_active_user),
205
  db: AsyncSession = Depends(get_db)
206
  ) -> Dict[str, Any]:
207
+ # Parse dates or use last 30 days as default
208
+ end = datetime.now() if not end_date else datetime.fromisoformat(end_date)
209
+ start = (end - timedelta(days=30)) if not start_date else datetime.fromisoformat(start_date)
 
210
 
211
+ # Get current period metrics
212
+ current_metrics = await get_period_metrics(db, start, end, current_user)
 
213
 
214
+ # Get previous period metrics for comparison
215
+ prev_start = start - timedelta(days=30)
216
+ prev_end = end - timedelta(days=30)
217
+ prev_metrics = await get_period_metrics(db, prev_start, prev_end, current_user)
218
+
219
+ # Calculate deltas and trends
220
+ metrics = {
221
+ "revenue": {
222
+ "total": current_metrics["revenue"],
223
+ "delta": calculate_delta(current_metrics["revenue"], prev_metrics["revenue"]),
224
+ "trend": "up" if current_metrics["revenue"] > prev_metrics["revenue"] else "down"
225
  },
226
+ "orders": {
227
+ "total": current_metrics["orders"],
228
+ "delta": calculate_delta(current_metrics["orders"], prev_metrics["orders"]),
229
+ "trend": "up" if current_metrics["orders"] > prev_metrics["orders"] else "down"
230
  },
231
+ "voids": {
232
+ "total": current_metrics["voids"],
233
+ "delta": calculate_delta(current_metrics["voids"], prev_metrics["voids"]),
234
+ "trend": "up" if current_metrics["voids"] > prev_metrics["voids"] else "down"
235
+ },
236
+ "stock": {
237
+ "total": current_metrics["stock_total"],
238
+ "items_below_threshold": current_metrics["stock_low"]
239
+ }
240
+ }
241
+
242
+ # Get traffic sources data
243
+ traffic_sources = {
244
+ "direct": {"value": 45, "trend": "up", "delta": 12.5},
245
+ "social": {"value": 25, "trend": "down", "delta": -5.0},
246
+ "marketing": {"value": 20, "trend": "up", "delta": 8.2},
247
+ "affiliates": {"value": 10, "trend": "down", "delta": -2.1}
248
+ }
249
+
250
+ # Get financial overview
251
+ overview = {
252
+ "revenue": {"value": current_metrics["revenue"], "trend": "up" if current_metrics["revenue"] > prev_metrics["revenue"] else "down"},
253
+ "expenses": {"value": current_metrics["expenses"], "trend": "up" if current_metrics["expenses"] > prev_metrics["expenses"] else "down"},
254
+ "investment": {"value": current_metrics["investment"]},
255
+ "savings": {"value": current_metrics["savings"]}
256
+ }
257
+
258
+ return {
259
+ "metrics": metrics,
260
+ "traffic_sources": traffic_sources,
261
+ "overview": overview
262
+ }
263
+
264
+ @router.get("/brands")
265
+ async def get_brands_analytics(
266
+ current_user: User = Depends(get_current_active_user),
267
+ db: AsyncSession = Depends(get_db)
268
+ ) -> Dict[str, Any]:
269
+ # Get brands with their associated metrics
270
+ query = select(
271
+ Brand,
272
+ func.count(Product.id).label("product_count"),
273
+ func.count(distinct(Product.branch_id)).label("store_count")
274
+ ).outerjoin(Product).group_by(Brand.id)
275
+
276
+ result = await db.execute(query)
277
+ brands_data = result.all()
278
+
279
+ # Calculate totals
280
+ total_brands = len(brands_data)
281
+ active_brands = sum(1 for row in brands_data if row.Brand.is_active)
282
+
283
+ # Format brand details
284
+ brands = [
285
+ {
286
+ "name": row.Brand.name,
287
+ "category": row.Brand.category,
288
+ "established": row.Brand.established_date.strftime("%Y-%m-%d"),
289
+ "stores": row.store_count,
290
+ "products": row.product_count,
291
+ "status": "Active" if row.Brand.is_active else "Inactive"
292
  }
293
+ for row in brands_data
294
+ ]
295
+
296
+ return {
297
+ "total": total_brands,
298
+ "active": active_brands,
299
+ "brands": brands
300
+ }
301
+
302
+ async def get_period_metrics(
303
+ db: AsyncSession,
304
+ start_date: datetime,
305
+ end_date: datetime,
306
+ current_user: User
307
+ ) -> Dict[str, Any]:
308
+ # Base query conditions
309
+ conditions = [
310
+ Order.created_at.between(start_date, end_date)
311
+ ]
312
+
313
+ if not current_user.is_superuser:
314
+ conditions.append(Order.branch_id == current_user.branch_id)
315
+
316
+ # Get revenue and orders metrics
317
+ metrics_query = select(
318
+ func.sum(Order.total_amount).label("revenue"),
319
+ func.count().label("orders"),
320
+ func.sum(case((Order.status == "void", 1), else_=0)).label("voids"),
321
+ func.sum(Order.expenses).label("expenses")
322
+ ).where(and_(*conditions))
323
+
324
+ result = await db.execute(metrics_query)
325
+ row = result.first()
326
+
327
+ # Get stock metrics
328
+ stock_query = select(
329
+ func.count(Product.id).label("total"),
330
+ func.sum(case((Product.stock_level < Product.reorder_threshold, 1), else_=0)).label("low_stock")
331
+ )
332
+
333
+ stock_result = await db.execute(stock_query)
334
+ stock_row = stock_result.first()
335
+
336
+ return {
337
+ "revenue": float(row.revenue or 0),
338
+ "orders": int(row.orders or 0),
339
+ "voids": int(row.voids or 0),
340
+ "expenses": float(row.expenses or 0),
341
+ "investment": 50000.00, # Example fixed value, replace with actual calculation
342
+ "savings": 25000.00, # Example fixed value, replace with actual calculation
343
+ "stock_total": int(stock_row.total or 0),
344
+ "stock_low": int(stock_row.low_stock or 0)
345
+ }
346
+
347
+ def calculate_delta(current: float, previous: float) -> float:
348
+ if previous == 0:
349
+ return 100 if current > 0 else 0
350
+ return ((current - previous) / previous) * 100
app/api/auth.py CHANGED
@@ -1,82 +1,82 @@
1
- from fastapi import APIRouter, Depends, HTTPException, status, Form, Body
2
- from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
3
- from sqlalchemy.ext.asyncio import AsyncSession
4
- from sqlalchemy import select
5
- from sqlalchemy.orm import selectinload
6
- from ..core.security import create_access_token, verify_password, get_password_hash
7
- from ..db.database import get_db
8
- from ..db.models import User
9
- from ..db.schemas import UserCreate, UserInDB, LoginData
10
- from datetime import timedelta
11
- from typing import Any
12
-
13
- router = APIRouter()
14
- oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
15
-
16
- @router.post("/login/form")
17
- async def login_form(
18
- form_data: OAuth2PasswordRequestForm = Depends(),
19
- db: AsyncSession = Depends(get_db)
20
- ) -> Any:
21
- return await authenticate_user(db, form_data.username, form_data.password)
22
-
23
- @router.post("/login")
24
- async def login_json(
25
- login_data: LoginData,
26
- db: AsyncSession = Depends(get_db)
27
- ) -> Any:
28
- return await authenticate_user(db, login_data.email, login_data.password)
29
-
30
- async def authenticate_user(db: AsyncSession, email: str, password: str) -> dict:
31
- stmt = select(User).where(User.email == email)
32
- result = await db.execute(stmt)
33
- user = result.scalar_one_or_none()
34
-
35
- if not user:
36
- raise HTTPException(
37
- status_code=status.HTTP_401_UNAUTHORIZED,
38
- detail="Incorrect email or password",
39
- )
40
-
41
- if not verify_password(password, user.hashed_password):
42
- raise HTTPException(
43
- status_code=status.HTTP_401_UNAUTHORIZED,
44
- detail="Incorrect email or password",
45
- )
46
-
47
- access_token = create_access_token(user.id)
48
- return {"access_token": access_token, "token_type": "bearer"}
49
-
50
- @router.post("/register", response_model=UserInDB)
51
- async def register(
52
- user_data: UserCreate,
53
- db: AsyncSession = Depends(get_db)
54
- ) -> Any:
55
- # Check if user exists by email
56
- stmt = select(User).where(User.email == user_data.email)
57
- result = await db.execute(stmt)
58
- if result.scalar_one_or_none():
59
- raise HTTPException(
60
- status_code=status.HTTP_400_BAD_REQUEST,
61
- detail="Email already registered",
62
- )
63
-
64
- # Create new user
65
- user = User(
66
- email=user_data.email,
67
- full_name=user_data.full_name,
68
- hashed_password=get_password_hash(user_data.password),
69
- is_active=user_data.is_active,
70
- is_superuser=user_data.is_superuser,
71
- branch_id=user_data.branch_id
72
- )
73
-
74
- db.add(user)
75
- await db.commit()
76
-
77
- # Refresh user with roles relationship loaded
78
- stmt = select(User).options(selectinload(User.roles)).where(User.id == user.id)
79
- result = await db.execute(stmt)
80
- user = result.scalar_one()
81
-
82
- return user
 
1
+ from fastapi import APIRouter, Depends, HTTPException, status, Form, Body
2
+ from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
3
+ from sqlalchemy.ext.asyncio import AsyncSession
4
+ from sqlalchemy import select
5
+ from sqlalchemy.orm import selectinload
6
+ from ..core.security import create_access_token, verify_password, get_password_hash
7
+ from ..db.database import get_db
8
+ from ..db.models import User
9
+ from ..db.schemas import UserCreate, UserInDB, LoginData
10
+ from datetime import timedelta
11
+ from typing import Any
12
+
13
+ router = APIRouter()
14
+ oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
15
+
16
+ @router.post("/login/form")
17
+ async def login_form(
18
+ form_data: OAuth2PasswordRequestForm = Depends(),
19
+ db: AsyncSession = Depends(get_db)
20
+ ) -> Any:
21
+ return await authenticate_user(db, form_data.username, form_data.password)
22
+
23
+ @router.post("/login")
24
+ async def login_json(
25
+ login_data: LoginData,
26
+ db: AsyncSession = Depends(get_db)
27
+ ) -> Any:
28
+ return await authenticate_user(db, login_data.email, login_data.password)
29
+
30
+ async def authenticate_user(db: AsyncSession, email: str, password: str) -> dict:
31
+ stmt = select(User).where(User.email == email)
32
+ result = await db.execute(stmt)
33
+ user = result.scalar_one_or_none()
34
+
35
+ if not user:
36
+ raise HTTPException(
37
+ status_code=status.HTTP_401_UNAUTHORIZED,
38
+ detail="Incorrect email or password",
39
+ )
40
+
41
+ if not verify_password(password, user.hashed_password):
42
+ raise HTTPException(
43
+ status_code=status.HTTP_401_UNAUTHORIZED,
44
+ detail="Incorrect email or password",
45
+ )
46
+
47
+ access_token = create_access_token(user.id)
48
+ return {"access_token": access_token, "token_type": "bearer"}
49
+
50
+ @router.post("/register", response_model=UserInDB)
51
+ async def register(
52
+ user_data: UserCreate,
53
+ db: AsyncSession = Depends(get_db)
54
+ ) -> Any:
55
+ # Check if user exists by email
56
+ stmt = select(User).where(User.email == user_data.email)
57
+ result = await db.execute(stmt)
58
+ if result.scalar_one_or_none():
59
+ raise HTTPException(
60
+ status_code=status.HTTP_400_BAD_REQUEST,
61
+ detail="Email already registered",
62
+ )
63
+
64
+ # Create new user
65
+ user = User(
66
+ email=user_data.email,
67
+ full_name=user_data.full_name,
68
+ hashed_password=get_password_hash(user_data.password),
69
+ is_active=user_data.is_active,
70
+ is_superuser=user_data.is_superuser,
71
+ branch_id=user_data.branch_id
72
+ )
73
+
74
+ db.add(user)
75
+ await db.commit()
76
+
77
+ # Refresh user with roles relationship loaded
78
+ stmt = select(User).options(selectinload(User.roles)).where(User.id == user.id)
79
+ result = await db.execute(stmt)
80
+ user = result.scalar_one()
81
+
82
+ return user
app/api/maintenance.py CHANGED
@@ -3,10 +3,13 @@ from sqlalchemy.ext.asyncio import AsyncSession
3
  from sqlalchemy import select, delete, func
4
  from typing import Dict, Any, List
5
  from datetime import datetime, timedelta
6
- from ..core.dependencies import get_current_active_user
7
  from ..db.database import get_db
8
  from ..db.models import User, Order, Notification, Event
9
  from ..utils.logger import logger
 
 
 
10
 
11
  router = APIRouter()
12
 
@@ -130,4 +133,91 @@ async def perform_db_maintenance(
130
  raise HTTPException(
131
  status_code=500,
132
  detail=f"Database maintenance failed: {str(e)}"
133
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  from sqlalchemy import select, delete, func
4
  from typing import Dict, Any, List
5
  from datetime import datetime, timedelta
6
+ from ..core.dependencies import get_current_active_user, get_db
7
  from ..db.database import get_db
8
  from ..db.models import User, Order, Notification, Event
9
  from ..utils.logger import logger
10
+ from ..services.maintenance import maintenance_service
11
+ from ..services.pos_analytics import pos_analytics
12
+ import asyncio
13
 
14
  router = APIRouter()
15
 
 
133
  raise HTTPException(
134
  status_code=500,
135
  detail=f"Database maintenance failed: {str(e)}"
136
+ )
137
+
138
+ @router.get("/health")
139
+ async def health_check() -> Dict[str, Any]:
140
+ """Public health check endpoint that doesn't require authentication"""
141
+ return {
142
+ "status": "healthy",
143
+ "timestamp": maintenance_service.get_timestamp()
144
+ }
145
+
146
+ @router.get("/system-status")
147
+ async def get_system_status(
148
+ current_user: User = Depends(get_current_active_user),
149
+ db: AsyncSession = Depends(get_db)
150
+ ) -> Dict[str, Any]:
151
+ """Get detailed system status including POS integration health"""
152
+ if not current_user.is_superuser:
153
+ raise HTTPException(
154
+ status_code=403,
155
+ detail="Only superusers can access system status"
156
+ )
157
+
158
+ # Check database connectivity
159
+ db_status = await maintenance_service.check_database()
160
+
161
+ # Check POS integration
162
+ pos_status = await pos_analytics.health_check()
163
+
164
+ # Check Redis connectivity
165
+ redis_status = await maintenance_service.check_redis()
166
+
167
+ # Check background tasks
168
+ tasks_status = await maintenance_service.check_background_tasks()
169
+
170
+ return {
171
+ "status": "healthy",
172
+ "timestamp": maintenance_service.get_timestamp(),
173
+ "components": {
174
+ "database": db_status,
175
+ "redis": redis_status,
176
+ "background_tasks": tasks_status,
177
+ "pos_integration": pos_status
178
+ }
179
+ }
180
+
181
+ @router.post("/sync/pos-metrics")
182
+ async def trigger_pos_sync(
183
+ current_user: User = Depends(get_current_active_user)
184
+ ) -> Dict[str, Any]:
185
+ """Manually trigger POS metrics synchronization"""
186
+ if not current_user.is_superuser:
187
+ raise HTTPException(
188
+ status_code=403,
189
+ detail="Only superusers can trigger manual sync"
190
+ )
191
+
192
+ try:
193
+ success = await pos_analytics.sync_all_metrics()
194
+ return {
195
+ "status": "success" if success else "partial_failure",
196
+ "message": "POS metrics sync completed successfully" if success else "Some metrics failed to sync"
197
+ }
198
+ except Exception as e:
199
+ raise HTTPException(
200
+ status_code=500,
201
+ detail=f"Failed to sync POS metrics: {str(e)}"
202
+ )
203
+
204
+ @router.get("/sync/status")
205
+ async def get_sync_status(
206
+ current_user: User = Depends(get_current_active_user)
207
+ ) -> Dict[str, Any]:
208
+ """Get status of background sync tasks"""
209
+ if not current_user.is_superuser:
210
+ raise HTTPException(
211
+ status_code=403,
212
+ detail="Only superusers can view sync status"
213
+ )
214
+
215
+ tasks_status = await maintenance_service.check_background_tasks()
216
+ pos_health = await pos_analytics.health_check()
217
+
218
+ return {
219
+ "background_tasks": tasks_status,
220
+ "pos_integration": pos_health,
221
+ "last_sync_timestamp": maintenance_service.get_last_sync_time(),
222
+ "next_sync_due": maintenance_service.get_next_sync_time()
223
+ }
app/api/orders.py CHANGED
@@ -1,209 +1,209 @@
1
- from fastapi import APIRouter, HTTPException, status, Depends, Query
2
- from sqlalchemy.ext.asyncio import AsyncSession
3
- from sqlalchemy import select
4
- from typing import List, Optional
5
- from ..core.dependencies import get_current_active_user
6
- from ..db.database import get_db
7
- from ..db.models import Order, Product, OrderItem, User
8
- from ..db.schemas import OrderCreate, OrderInDB
9
- from ..services.pos_client import get_orders, OrderSchema
10
- from datetime import datetime
11
-
12
- router = APIRouter()
13
-
14
- @router.post("/", response_model=OrderInDB)
15
- async def create_order(
16
- order: OrderCreate,
17
- current_user: User = Depends(get_current_active_user),
18
- db: AsyncSession = Depends(get_db)
19
- ) -> OrderInDB:
20
- # Ensure user belongs to the branch they're creating the order for
21
- if current_user.branch_id != order.branch_id and not current_user.is_superuser:
22
- raise HTTPException(
23
- status_code=403,
24
- detail="You can only create orders for your own branch"
25
- )
26
-
27
- # Calculate total and validate products
28
- total = 0
29
- order_items = []
30
-
31
- for item in order.items:
32
- # Get product
33
- stmt = select(Product).where(
34
- Product.id == item.product_id,
35
- Product.branch_id == order.branch_id # Ensure product belongs to the same branch
36
- )
37
- result = await db.execute(stmt)
38
- product = result.scalar_one_or_none()
39
-
40
- if not product:
41
- raise HTTPException(
42
- status_code=404,
43
- detail=f"Product {item.product_id} not found in this branch"
44
- )
45
-
46
- if product.inventory_count < item.quantity:
47
- raise HTTPException(
48
- status_code=400,
49
- detail=f"Insufficient inventory for product {item.product_id}"
50
- )
51
-
52
- # Update inventory
53
- product.inventory_count -= item.quantity
54
- total += product.price * item.quantity
55
-
56
- # Create order item
57
- order_item = OrderItem(
58
- product_id=item.product_id,
59
- quantity=item.quantity,
60
- price=product.price
61
- )
62
- order_items.append(order_item)
63
-
64
- # Create order
65
- db_order = Order(
66
- customer_id=order.customer_id,
67
- branch_id=order.branch_id,
68
- total_amount=total,
69
- status="pending",
70
- items=order_items
71
- )
72
-
73
- db.add(db_order)
74
- await db.commit()
75
- await db.refresh(db_order)
76
- return db_order
77
-
78
- @router.get("/", response_model=List[OrderInDB])
79
- async def list_orders(
80
- skip: int = 0,
81
- limit: int = 10,
82
- status: Optional[str] = None,
83
- branch_id: Optional[int] = Query(None, description="Filter orders by branch"),
84
- current_user: User = Depends(get_current_active_user),
85
- db: AsyncSession = Depends(get_db)
86
- ) -> List[OrderInDB]:
87
- query = select(Order)
88
-
89
- # Filter by status if provided
90
- if status:
91
- query = query.where(Order.status == status)
92
-
93
- # Filter by branch if provided, otherwise use user's branch
94
- if branch_id:
95
- if not current_user.is_superuser and branch_id != current_user.branch_id:
96
- raise HTTPException(
97
- status_code=403,
98
- detail="You can only view orders from your own branch"
99
- )
100
- query = query.where(Order.branch_id == branch_id)
101
- elif not current_user.is_superuser:
102
- # Non-superusers can only see orders from their branch
103
- query = query.where(Order.branch_id == current_user.branch_id)
104
-
105
- query = query.offset(skip).limit(limit)
106
- result = await db.execute(query)
107
- return result.scalars().all()
108
-
109
- @router.get("/{order_id}", response_model=OrderInDB)
110
- async def get_order(
111
- order_id: int,
112
- current_user: User = Depends(get_current_active_user),
113
- db: AsyncSession = Depends(get_db)
114
- ) -> OrderInDB:
115
- stmt = select(Order).where(Order.id == order_id)
116
- result = await db.execute(stmt)
117
- order = result.scalar_one_or_none()
118
-
119
- if not order:
120
- raise HTTPException(status_code=404, detail="Order not found")
121
-
122
- # Check if user has access to this order's branch
123
- if not current_user.is_superuser and order.branch_id != current_user.branch_id:
124
- raise HTTPException(status_code=403, detail="You cannot access orders from other branches")
125
-
126
- return order
127
-
128
- @router.put("/{order_id}/status", response_model=OrderInDB)
129
- async def update_order_status(
130
- order_id: int,
131
- status: str,
132
- current_user: User = Depends(get_current_active_user),
133
- db: AsyncSession = Depends(get_db)
134
- ) -> OrderInDB:
135
- valid_statuses = ["pending", "processing", "shipped", "delivered", "cancelled"]
136
- if status not in valid_statuses:
137
- raise HTTPException(status_code=400, detail="Invalid status")
138
-
139
- stmt = select(Order).where(Order.id == order_id)
140
- result = await db.execute(stmt)
141
- order = result.scalar_one_or_none()
142
-
143
- if not order:
144
- raise HTTPException(status_code=404, detail="Order not found")
145
-
146
- # Check if user has access to this order's branch
147
- if not current_user.is_superuser and order.branch_id != current_user.branch_id:
148
- raise HTTPException(status_code=403, detail="You cannot modify orders from other branches")
149
-
150
- order.status = status
151
- order.updated_at = datetime.utcnow()
152
-
153
- await db.commit()
154
- await db.refresh(order)
155
- return order
156
-
157
- @router.delete("/{order_id}")
158
- async def delete_order(
159
- order_id: int,
160
- current_user: User = Depends(get_current_active_user),
161
- db: AsyncSession = Depends(get_db)
162
- ):
163
- # Get the order
164
- stmt = select(Order).where(Order.id == order_id)
165
- result = await db.execute(stmt)
166
- order = result.scalar_one_or_none()
167
-
168
- if not order:
169
- raise HTTPException(status_code=404, detail="Order not found")
170
-
171
- # Check if user has access to this order's branch
172
- if not current_user.is_superuser and order.branch_id != current_user.branch_id:
173
- raise HTTPException(status_code=403, detail="You cannot delete orders from other branches")
174
-
175
- # Restore inventory for each product
176
- for item in order.items:
177
- product_stmt = select(Product).where(Product.id == item.product_id)
178
- product_result = await db.execute(product_stmt)
179
- product = product_result.scalar_one_or_none()
180
-
181
- if product:
182
- product.inventory_count += item.quantity
183
-
184
- await db.delete(order)
185
- await db.commit()
186
-
187
- return {"status": "success", "message": "Order deleted and inventory restored"}
188
-
189
- @router.get("/admin/orders", response_model=List[OrderSchema])
190
- async def list_admin_orders(
191
- current_user: User = Depends(get_current_active_user)
192
- ) -> List[OrderSchema]:
193
- """
194
- Get POS system orders for the current user's branch.
195
- Requires authenticated user.
196
- """
197
- if not current_user.branch_id:
198
- raise HTTPException(
199
- status_code=400,
200
- detail="User does not belong to any branch"
201
- )
202
-
203
- try:
204
- return await get_orders(str(current_user.branch_id))
205
- except Exception as e:
206
- raise HTTPException(
207
- status_code=500,
208
- detail=f"Failed to fetch POS orders: {str(e)}"
209
  )
 
1
+ from fastapi import APIRouter, HTTPException, status, Depends, Query
2
+ from sqlalchemy.ext.asyncio import AsyncSession
3
+ from sqlalchemy import select
4
+ from typing import List, Optional
5
+ from ..core.dependencies import get_current_active_user
6
+ from ..db.database import get_db
7
+ from ..db.models import Order, Product, OrderItem, User
8
+ from ..db.schemas import OrderCreate, OrderInDB
9
+ from ..services.pos_client import get_orders, OrderSchema
10
+ from datetime import datetime
11
+
12
+ router = APIRouter()
13
+
14
+ @router.post("/", response_model=OrderInDB)
15
+ async def create_order(
16
+ order: OrderCreate,
17
+ current_user: User = Depends(get_current_active_user),
18
+ db: AsyncSession = Depends(get_db)
19
+ ) -> OrderInDB:
20
+ # Ensure user belongs to the branch they're creating the order for
21
+ if current_user.branch_id != order.branch_id and not current_user.is_superuser:
22
+ raise HTTPException(
23
+ status_code=403,
24
+ detail="You can only create orders for your own branch"
25
+ )
26
+
27
+ # Calculate total and validate products
28
+ total = 0
29
+ order_items = []
30
+
31
+ for item in order.items:
32
+ # Get product
33
+ stmt = select(Product).where(
34
+ Product.id == item.product_id,
35
+ Product.branch_id == order.branch_id # Ensure product belongs to the same branch
36
+ )
37
+ result = await db.execute(stmt)
38
+ product = result.scalar_one_or_none()
39
+
40
+ if not product:
41
+ raise HTTPException(
42
+ status_code=404,
43
+ detail=f"Product {item.product_id} not found in this branch"
44
+ )
45
+
46
+ if product.inventory_count < item.quantity:
47
+ raise HTTPException(
48
+ status_code=400,
49
+ detail=f"Insufficient inventory for product {item.product_id}"
50
+ )
51
+
52
+ # Update inventory
53
+ product.inventory_count -= item.quantity
54
+ total += product.price * item.quantity
55
+
56
+ # Create order item
57
+ order_item = OrderItem(
58
+ product_id=item.product_id,
59
+ quantity=item.quantity,
60
+ price=product.price
61
+ )
62
+ order_items.append(order_item)
63
+
64
+ # Create order
65
+ db_order = Order(
66
+ customer_id=order.customer_id,
67
+ branch_id=order.branch_id,
68
+ total_amount=total,
69
+ status="pending",
70
+ items=order_items
71
+ )
72
+
73
+ db.add(db_order)
74
+ await db.commit()
75
+ await db.refresh(db_order)
76
+ return db_order
77
+
78
+ @router.get("/", response_model=List[OrderInDB])
79
+ async def list_orders(
80
+ skip: int = 0,
81
+ limit: int = 10,
82
+ status: Optional[str] = None,
83
+ branch_id: Optional[int] = Query(None, description="Filter orders by branch"),
84
+ current_user: User = Depends(get_current_active_user),
85
+ db: AsyncSession = Depends(get_db)
86
+ ) -> List[OrderInDB]:
87
+ query = select(Order)
88
+
89
+ # Filter by status if provided
90
+ if status:
91
+ query = query.where(Order.status == status)
92
+
93
+ # Filter by branch if provided, otherwise use user's branch
94
+ if branch_id:
95
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
96
+ raise HTTPException(
97
+ status_code=403,
98
+ detail="You can only view orders from your own branch"
99
+ )
100
+ query = query.where(Order.branch_id == branch_id)
101
+ elif not current_user.is_superuser:
102
+ # Non-superusers can only see orders from their branch
103
+ query = query.where(Order.branch_id == current_user.branch_id)
104
+
105
+ query = query.offset(skip).limit(limit)
106
+ result = await db.execute(query)
107
+ return result.scalars().all()
108
+
109
+ @router.get("/{order_id}", response_model=OrderInDB)
110
+ async def get_order(
111
+ order_id: int,
112
+ current_user: User = Depends(get_current_active_user),
113
+ db: AsyncSession = Depends(get_db)
114
+ ) -> OrderInDB:
115
+ stmt = select(Order).where(Order.id == order_id)
116
+ result = await db.execute(stmt)
117
+ order = result.scalar_one_or_none()
118
+
119
+ if not order:
120
+ raise HTTPException(status_code=404, detail="Order not found")
121
+
122
+ # Check if user has access to this order's branch
123
+ if not current_user.is_superuser and order.branch_id != current_user.branch_id:
124
+ raise HTTPException(status_code=403, detail="You cannot access orders from other branches")
125
+
126
+ return order
127
+
128
+ @router.put("/{order_id}/status", response_model=OrderInDB)
129
+ async def update_order_status(
130
+ order_id: int,
131
+ status: str,
132
+ current_user: User = Depends(get_current_active_user),
133
+ db: AsyncSession = Depends(get_db)
134
+ ) -> OrderInDB:
135
+ valid_statuses = ["pending", "processing", "shipped", "delivered", "cancelled"]
136
+ if status not in valid_statuses:
137
+ raise HTTPException(status_code=400, detail="Invalid status")
138
+
139
+ stmt = select(Order).where(Order.id == order_id)
140
+ result = await db.execute(stmt)
141
+ order = result.scalar_one_or_none()
142
+
143
+ if not order:
144
+ raise HTTPException(status_code=404, detail="Order not found")
145
+
146
+ # Check if user has access to this order's branch
147
+ if not current_user.is_superuser and order.branch_id != current_user.branch_id:
148
+ raise HTTPException(status_code=403, detail="You cannot modify orders from other branches")
149
+
150
+ order.status = status
151
+ order.updated_at = datetime.utcnow()
152
+
153
+ await db.commit()
154
+ await db.refresh(order)
155
+ return order
156
+
157
+ @router.delete("/{order_id}")
158
+ async def delete_order(
159
+ order_id: int,
160
+ current_user: User = Depends(get_current_active_user),
161
+ db: AsyncSession = Depends(get_db)
162
+ ):
163
+ # Get the order
164
+ stmt = select(Order).where(Order.id == order_id)
165
+ result = await db.execute(stmt)
166
+ order = result.scalar_one_or_none()
167
+
168
+ if not order:
169
+ raise HTTPException(status_code=404, detail="Order not found")
170
+
171
+ # Check if user has access to this order's branch
172
+ if not current_user.is_superuser and order.branch_id != current_user.branch_id:
173
+ raise HTTPException(status_code=403, detail="You cannot delete orders from other branches")
174
+
175
+ # Restore inventory for each product
176
+ for item in order.items:
177
+ product_stmt = select(Product).where(Product.id == item.product_id)
178
+ product_result = await db.execute(product_stmt)
179
+ product = product_result.scalar_one_or_none()
180
+
181
+ if product:
182
+ product.inventory_count += item.quantity
183
+
184
+ await db.delete(order)
185
+ await db.commit()
186
+
187
+ return {"status": "success", "message": "Order deleted and inventory restored"}
188
+
189
+ @router.get("/admin/orders", response_model=List[OrderSchema])
190
+ async def list_admin_orders(
191
+ current_user: User = Depends(get_current_active_user)
192
+ ) -> List[OrderSchema]:
193
+ """
194
+ Get POS system orders for the current user's branch.
195
+ Requires authenticated user.
196
+ """
197
+ if not current_user.branch_id:
198
+ raise HTTPException(
199
+ status_code=400,
200
+ detail="User does not belong to any branch"
201
+ )
202
+
203
+ try:
204
+ return await get_orders(str(current_user.branch_id))
205
+ except Exception as e:
206
+ raise HTTPException(
207
+ status_code=500,
208
+ detail=f"Failed to fetch POS orders: {str(e)}"
209
  )
app/api/sessions.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends, HTTPException, status
2
+ from sqlalchemy.orm import Session
3
+ from typing import Any, Dict
4
+
5
+ from app.db.database import get_db
6
+ from app.db import models, schemas
7
+ from app.core.auth import get_current_active_user
8
+
9
+ router = APIRouter(
10
+ prefix="/api/sessions",
11
+ tags=["sessions"]
12
+ )
13
+
14
+ @router.put("/iteration-status")
15
+ def update_iteration_status(
16
+ session_update: schemas.SessionUpdate,
17
+ current_user: models.User = Depends(get_current_active_user),
18
+ db: Session = Depends(get_db)
19
+ ) -> Dict[str, bool]:
20
+ """Update the iteration status of the current session."""
21
+ session = db.query(models.Session).filter(
22
+ models.Session.user_id == current_user.id
23
+ ).first()
24
+
25
+ if not session:
26
+ raise HTTPException(status_code=404, detail="Active session not found")
27
+
28
+ session.iteration_active = session_update.iteration_active
29
+ db.commit()
30
+
31
+ return {"iteration_active": session.iteration_active}
app/api/staff_analytics.py ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends, Query, HTTPException
2
+ from sqlalchemy.ext.asyncio import AsyncSession
3
+ from datetime import datetime
4
+ from typing import Dict, Any, Optional, List
5
+ from ..core.dependencies import get_current_active_user, get_db
6
+ from ..db.models import User, ActivityType
7
+ from ..services.analytics import staff_analytics
8
+ from ..services.staff_reports import staff_reports
9
+ from ..db.schemas import (
10
+ StaffActivityCreate,
11
+ StaffActivityInDB,
12
+ StaffPerformanceReport,
13
+ RealtimeStaffMetrics
14
+ )
15
+
16
+ router = APIRouter()
17
+
18
+ @router.post("/activity", response_model=Dict[str, Any])
19
+ async def record_staff_activity(
20
+ activity: StaffActivityCreate,
21
+ current_user: User = Depends(get_current_active_user),
22
+ db: AsyncSession = Depends(get_db)
23
+ ) -> Dict[str, Any]:
24
+ """Record a staff activity"""
25
+ if not current_user.branch_id:
26
+ raise HTTPException(status_code=400, detail="User is not associated with a branch")
27
+
28
+ recorded_activity = await staff_analytics.record_activity(
29
+ user_id=current_user.id,
30
+ branch_id=current_user.branch_id,
31
+ activity_type=activity.activity_type,
32
+ details=activity.details,
33
+ duration=activity.duration
34
+ )
35
+
36
+ return {
37
+ "status": "success",
38
+ "activity_id": recorded_activity.id,
39
+ "performance_score": recorded_activity.performance_score
40
+ }
41
+
42
+ @router.get("/performance", response_model=StaffPerformanceReport)
43
+ async def get_staff_performance(
44
+ branch_id: Optional[int] = Query(None, description="Filter by branch ID"),
45
+ user_id: Optional[int] = Query(None, description="Filter by user ID"),
46
+ start_date: Optional[datetime] = Query(None, description="Start date for analysis"),
47
+ end_date: Optional[datetime] = Query(None, description="End date for analysis"),
48
+ current_user: User = Depends(get_current_active_user),
49
+ db: AsyncSession = Depends(get_db)
50
+ ) -> Dict[str, Any]:
51
+ """Get staff performance metrics"""
52
+ # Check permissions
53
+ if not current_user.is_superuser and (
54
+ (branch_id and branch_id != current_user.branch_id) or
55
+ (user_id and user_id != current_user.id)
56
+ ):
57
+ raise HTTPException(
58
+ status_code=403,
59
+ detail="You can only view performance metrics for your own branch"
60
+ )
61
+
62
+ # Non-superusers can only view their branch
63
+ if not current_user.is_superuser:
64
+ branch_id = current_user.branch_id
65
+
66
+ return await staff_analytics.get_staff_performance(
67
+ branch_id=branch_id,
68
+ user_id=user_id,
69
+ start_date=start_date,
70
+ end_date=end_date
71
+ )
72
+
73
+ @router.get("/metrics/summary")
74
+ async def get_performance_summary(
75
+ current_user: User = Depends(get_current_active_user),
76
+ db: AsyncSession = Depends(get_db)
77
+ ) -> Dict[str, Any]:
78
+ """Get a summary of current user's performance metrics"""
79
+ metrics = await staff_analytics.get_staff_performance(
80
+ user_id=current_user.id,
81
+ branch_id=current_user.branch_id
82
+ )
83
+
84
+ # Get user's performance data
85
+ user_metrics = next(
86
+ (p for p in metrics["staff_performance"] if p["user_id"] == current_user.id),
87
+ None
88
+ )
89
+
90
+ if not user_metrics:
91
+ return {
92
+ "message": "No performance metrics available",
93
+ "metrics": None
94
+ }
95
+
96
+ # Get comparison with branch averages
97
+ comparison = await staff_reports.generate_staff_comparison(
98
+ user_id=current_user.id,
99
+ branch_id=current_user.branch_id
100
+ )
101
+
102
+ return {
103
+ "metrics": user_metrics["metrics"],
104
+ "comparison": comparison,
105
+ "period": metrics["date_range"]
106
+ }
107
+
108
+ @router.get("/realtime", response_model=RealtimeStaffMetrics)
109
+ async def get_realtime_activity(
110
+ branch_id: Optional[int] = Query(None, description="Filter by branch ID"),
111
+ current_user: User = Depends(get_current_active_user),
112
+ db: AsyncSession = Depends(get_db)
113
+ ) -> Dict[str, Any]:
114
+ """Get real-time staff activity for the current day"""
115
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
116
+ raise HTTPException(
117
+ status_code=403,
118
+ detail="You can only view real-time activity for your own branch"
119
+ )
120
+
121
+ today = datetime.utcnow()
122
+ today_start = today.replace(hour=0, minute=0, second=0, microsecond=0)
123
+
124
+ metrics = await staff_analytics.get_staff_performance(
125
+ branch_id=branch_id or current_user.branch_id,
126
+ start_date=today_start,
127
+ end_date=today
128
+ )
129
+
130
+ # Calculate real-time metrics
131
+ active_staff = [
132
+ staff for staff in metrics["staff_performance"]
133
+ if staff["metrics"]["total_login_time"] > 0
134
+ ]
135
+
136
+ total_sales_today = sum(
137
+ staff["metrics"]["total_sales"] for staff in metrics["staff_performance"]
138
+ )
139
+ total_transactions = sum(
140
+ staff["metrics"]["transaction_count"] for staff in metrics["staff_performance"]
141
+ )
142
+
143
+ return {
144
+ "active_staff_count": len(active_staff),
145
+ "total_sales_today": total_sales_today,
146
+ "total_transactions": total_transactions,
147
+ "staff_metrics": active_staff,
148
+ "last_updated": datetime.utcnow().isoformat()
149
+ }
150
+
151
+ @router.get("/reports/performance")
152
+ async def get_performance_report(
153
+ branch_id: Optional[int] = Query(None, description="Filter by branch ID"),
154
+ start_date: Optional[datetime] = Query(None, description="Start date for report"),
155
+ end_date: Optional[datetime] = Query(None, description="End date for report"),
156
+ current_user: User = Depends(get_current_active_user),
157
+ db: AsyncSession = Depends(get_db)
158
+ ) -> Dict[str, Any]:
159
+ """Generate comprehensive performance report with insights"""
160
+ # Check permissions
161
+ if not current_user.is_superuser and branch_id != current_user.branch_id:
162
+ raise HTTPException(
163
+ status_code=403,
164
+ detail="You can only view reports for your own branch"
165
+ )
166
+
167
+ return await staff_reports.generate_performance_report(
168
+ branch_id=branch_id or current_user.branch_id,
169
+ start_date=start_date,
170
+ end_date=end_date
171
+ )
172
+
173
+ @router.get("/reports/comparison")
174
+ async def get_staff_comparison(
175
+ user_id: Optional[int] = Query(None, description="User ID to compare (defaults to current user)"),
176
+ date: Optional[datetime] = Query(None, description="Date for comparison"),
177
+ current_user: User = Depends(get_current_active_user),
178
+ db: AsyncSession = Depends(get_db)
179
+ ) -> Dict[str, Any]:
180
+ """Get performance comparison against branch averages"""
181
+ if not current_user.is_superuser and user_id != current_user.id:
182
+ raise HTTPException(
183
+ status_code=403,
184
+ detail="You can only view your own performance comparison"
185
+ )
186
+
187
+ return await staff_reports.generate_staff_comparison(
188
+ user_id=user_id or current_user.id,
189
+ branch_id=current_user.branch_id,
190
+ date=date
191
+ )
app/core/__pycache__/config.cpython-312.pyc CHANGED
Binary files a/app/core/__pycache__/config.cpython-312.pyc and b/app/core/__pycache__/config.cpython-312.pyc differ
 
app/core/config.py CHANGED
@@ -1,39 +1,41 @@
1
- from pydantic_settings import BaseSettings
2
- from typing import ClassVar
3
-
4
-
5
- class Settings(BaseSettings):
6
- API_V1_STR: str = "/api/v1"
7
- PROJECT_NAME: str = "Admin Dashboard"
8
- VERSION: str = "1.0.0"
9
-
10
- # Service Authentication
11
- SERVICE_TOKEN: str = "6f9aee88d5b94b0190c317abcbf4e6e7834cc2c7f02e45693e123456789abcde"
12
- POS_API_URL: str = "https://fred808-desk-back2.hf.space" # Default value, should be overridden in environment
13
-
14
- # PostgreSQL Database settings
15
- DATABASE_URL: ClassVar[str] = "postgresql+asyncpg://postgres.juycnkjuzylnbruwaqmp:Lovyelias5584.@aws-0-eu-central-1.pooler.supabase.com:5432/postgres"
16
-
17
- # JWT Settings
18
- SECRET_KEY: str = "your-secret-key-here"
19
- ALGORITHM: str = "HS256"
20
- ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
21
-
22
- # Redis settings
23
- REDIS_URL: str = f"redis://redis:6379/0"
24
-
25
- # Email settings
26
- MAIL_USERNAME: str = "yungdml31@gmail.com"
27
- MAIL_PASSWORD: str = ""
28
- MAIL_FROM: str = "admin@angelo.com"
29
- MAIL_PORT: int = 587
30
- MAIL_SERVER: str = "smtp.gmail.com"
31
-
32
- # Frontend URL
33
- FRONTEND_URL: str = "http://localhost:3000"
34
-
35
- class Config:
36
- case_sensitive = True
37
-
38
-
39
- settings = Settings()
 
 
 
1
+ from pydantic_settings import BaseSettings
2
+ from typing import ClassVar
3
+
4
+
5
+ class Settings(BaseSettings):
6
+ API_V1_STR: str = "/api/v1"
7
+ PROJECT_NAME: str = "Admin Dashboard"
8
+ VERSION: str = "1.0.0"
9
+
10
+ # Service Authentication
11
+ SERVICE_TOKEN: str = "6f9aee88d5b94b0190c317abcbf4e6e7834cc2c7f02e45693e123456789abcde"
12
+ POS_API_URL: str = "https://fred808-desk-back2.hf.space" # Default value, should be overridden in environment
13
+
14
+ # PostgreSQL Database settings
15
+ DATABASE_URL: ClassVar[str] = "postgresql+asyncpg://postgres.juycnkjuzylnbruwaqmp:Lovyelias5584.@aws-0-eu-central-1.pooler.supabase.com:5432/postgres"
16
+
17
+ # JWT Settings
18
+ SECRET_KEY: str = "your-secret-key-here"
19
+ ALGORITHM: str = "HS256"
20
+ ACCESS_TOKEN_EXPIRE_MINUTES: int = 30
21
+
22
+ # Redis settings
23
+ REDIS_HOST: str = "localhost"
24
+ REDIS_PORT: int = 6379
25
+ REDIS_URL: str = f"redis://{REDIS_HOST}:{REDIS_PORT}"
26
+
27
+ # Email settings
28
+ MAIL_USERNAME: str = "yungdml31@gmail.com"
29
+ MAIL_PASSWORD: str = ""
30
+ MAIL_FROM: str = "admin@angelo.com"
31
+ MAIL_PORT: int = 587
32
+ MAIL_SERVER: str = "smtp.gmail.com"
33
+
34
+ # Frontend URL
35
+ FRONTEND_URL: str = "http://localhost:3000"
36
+
37
+ class Config:
38
+ case_sensitive = True
39
+
40
+
41
+ settings = Settings()
app/core/dependencies.py CHANGED
@@ -1,53 +1,53 @@
1
- from fastapi import Depends, HTTPException, status
2
- from fastapi.security import OAuth2PasswordBearer
3
- from sqlalchemy.ext.asyncio import AsyncSession
4
- from sqlalchemy import select
5
- from sqlalchemy.orm import selectinload
6
- from jose import JWTError, jwt
7
- from ..db.database import get_db
8
- from ..db.models import User
9
- from ..core.config import settings
10
-
11
- oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/auth/login")
12
-
13
- async def get_current_user(
14
- token: str = Depends(oauth2_scheme),
15
- db: AsyncSession = Depends(get_db)
16
- ):
17
- credentials_exception = HTTPException(
18
- status_code=status.HTTP_401_UNAUTHORIZED,
19
- detail="Could not validate credentials",
20
- headers={"WWW-Authenticate": "Bearer"},
21
- )
22
-
23
- try:
24
- payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
25
- user_id: str = payload.get("sub")
26
- if user_id is None:
27
- raise credentials_exception
28
- except JWTError:
29
- raise credentials_exception
30
-
31
- stmt = select(User).options(selectinload(User.roles)).where(User.id == int(user_id))
32
- result = await db.execute(stmt)
33
- user = result.scalar_one_or_none()
34
-
35
- if user is None:
36
- raise credentials_exception
37
- return user
38
-
39
- async def get_current_active_user(
40
- current_user: User = Depends(get_current_user)
41
- ):
42
- if not current_user.is_active:
43
- raise HTTPException(status_code=400, detail="Inactive user")
44
- return current_user
45
-
46
- async def get_current_superuser(
47
- current_user: User = Depends(get_current_user)
48
- ):
49
- if not current_user.is_superuser:
50
- raise HTTPException(
51
- status_code=403, detail="The user doesn't have enough privileges"
52
- )
53
  return current_user
 
1
+ from fastapi import Depends, HTTPException, status
2
+ from fastapi.security import OAuth2PasswordBearer
3
+ from sqlalchemy.ext.asyncio import AsyncSession
4
+ from sqlalchemy import select
5
+ from sqlalchemy.orm import selectinload
6
+ from jose import JWTError, jwt
7
+ from ..db.database import get_db
8
+ from ..db.models import User
9
+ from ..core.config import settings
10
+
11
+ oauth2_scheme = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/auth/login")
12
+
13
+ async def get_current_user(
14
+ token: str = Depends(oauth2_scheme),
15
+ db: AsyncSession = Depends(get_db)
16
+ ):
17
+ credentials_exception = HTTPException(
18
+ status_code=status.HTTP_401_UNAUTHORIZED,
19
+ detail="Could not validate credentials",
20
+ headers={"WWW-Authenticate": "Bearer"},
21
+ )
22
+
23
+ try:
24
+ payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
25
+ user_id: str = payload.get("sub")
26
+ if user_id is None:
27
+ raise credentials_exception
28
+ except JWTError:
29
+ raise credentials_exception
30
+
31
+ stmt = select(User).options(selectinload(User.roles)).where(User.id == int(user_id))
32
+ result = await db.execute(stmt)
33
+ user = result.scalar_one_or_none()
34
+
35
+ if user is None:
36
+ raise credentials_exception
37
+ return user
38
+
39
+ async def get_current_active_user(
40
+ current_user: User = Depends(get_current_user)
41
+ ):
42
+ if not current_user.is_active:
43
+ raise HTTPException(status_code=400, detail="Inactive user")
44
+ return current_user
45
+
46
+ async def get_current_superuser(
47
+ current_user: User = Depends(get_current_user)
48
+ ):
49
+ if not current_user.is_superuser:
50
+ raise HTTPException(
51
+ status_code=403, detail="The user doesn't have enough privileges"
52
+ )
53
  return current_user
app/db/__pycache__/models.cpython-312.pyc CHANGED
Binary files a/app/db/__pycache__/models.cpython-312.pyc and b/app/db/__pycache__/models.cpython-312.pyc differ
 
app/db/models.py CHANGED
@@ -1,9 +1,10 @@
1
- from sqlalchemy import Column, Integer, String, Boolean, DateTime, Float, ForeignKey, ARRAY, JSON, Table
2
  from sqlalchemy.orm import relationship, mapped_column, Mapped
3
  from sqlalchemy.dialects.postgresql import JSONB
4
  from datetime import datetime
5
  from typing import List, Optional
6
  from .database import Base
 
7
 
8
  # Association tables for many-to-many relationships
9
  user_roles = Table(
@@ -44,6 +45,8 @@ class Branch(Base):
44
  users = relationship("User", back_populates="branch")
45
  products = relationship("Product", back_populates="branch")
46
  orders = relationship("Order", back_populates="branch")
 
 
47
 
48
  class User(Base):
49
  __tablename__ = "users"
@@ -67,6 +70,8 @@ class User(Base):
67
  orders = relationship("Order", back_populates="customer")
68
  notifications = relationship("Notification", back_populates="user")
69
  sessions = relationship("Session", back_populates="user", cascade="all, delete-orphan")
 
 
70
 
71
  class Product(Base):
72
  __tablename__ = "products"
@@ -181,6 +186,51 @@ class Session(Base):
181
  created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
182
  expires_at: Mapped[datetime] = mapped_column(DateTime)
183
  is_active: Mapped[bool] = mapped_column(Boolean, default=True)
 
184
 
185
  # Relationship
186
- user = relationship("User", back_populates="sessions")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import Column, Integer, String, Boolean, DateTime, Float, ForeignKey, ARRAY, JSON, Table, Enum
2
  from sqlalchemy.orm import relationship, mapped_column, Mapped
3
  from sqlalchemy.dialects.postgresql import JSONB
4
  from datetime import datetime
5
  from typing import List, Optional
6
  from .database import Base
7
+ import enum
8
 
9
  # Association tables for many-to-many relationships
10
  user_roles = Table(
 
45
  users = relationship("User", back_populates="branch")
46
  products = relationship("Product", back_populates="branch")
47
  orders = relationship("Order", back_populates="branch")
48
+ staff_activities = relationship("StaffActivity", back_populates="branch")
49
+ staff_metrics = relationship("PerformanceMetric", back_populates="branch")
50
 
51
  class User(Base):
52
  __tablename__ = "users"
 
70
  orders = relationship("Order", back_populates="customer")
71
  notifications = relationship("Notification", back_populates="user")
72
  sessions = relationship("Session", back_populates="user", cascade="all, delete-orphan")
73
+ activities = relationship("StaffActivity", back_populates="user")
74
+ performance_metrics = relationship("PerformanceMetric", back_populates="user")
75
 
76
  class Product(Base):
77
  __tablename__ = "products"
 
186
  created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
187
  expires_at: Mapped[datetime] = mapped_column(DateTime)
188
  is_active: Mapped[bool] = mapped_column(Boolean, default=True)
189
+ iteration_active: Mapped[bool] = mapped_column(Boolean, default=True)
190
 
191
  # Relationship
192
+ user = relationship("User", back_populates="sessions")
193
+
194
+ class ActivityType(str, enum.Enum):
195
+ LOGIN = "login"
196
+ LOGOUT = "logout"
197
+ SALE = "sale"
198
+ VOID = "void"
199
+ REFUND = "refund"
200
+ INVENTORY = "inventory"
201
+ CUSTOMER_SERVICE = "customer_service"
202
+
203
+ class StaffActivity(Base):
204
+ __tablename__ = "staff_activities"
205
+
206
+ id: Mapped[int] = mapped_column(primary_key=True)
207
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
208
+ branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id"))
209
+ activity_type: Mapped[str] = mapped_column(String)
210
+ details: Mapped[dict] = mapped_column(JSON)
211
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
212
+ duration: Mapped[Optional[float]] = mapped_column(Float, nullable=True) # Duration in minutes if applicable
213
+ performance_score: Mapped[Optional[float]] = mapped_column(Float, nullable=True) # Score based on activity type
214
+
215
+ # Relationships
216
+ user = relationship("User", back_populates="activities")
217
+ branch = relationship("Branch", back_populates="staff_activities")
218
+
219
+ class PerformanceMetric(Base):
220
+ __tablename__ = "performance_metrics"
221
+
222
+ id: Mapped[int] = mapped_column(primary_key=True)
223
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
224
+ branch_id: Mapped[int] = mapped_column(ForeignKey("branches.id"))
225
+ metric_date: Mapped[datetime] = mapped_column(DateTime)
226
+ total_sales: Mapped[float] = mapped_column(Float, default=0)
227
+ transaction_count: Mapped[int] = mapped_column(Integer, default=0)
228
+ average_transaction_value: Mapped[float] = mapped_column(Float, default=0)
229
+ void_count: Mapped[int] = mapped_column(Integer, default=0)
230
+ customer_interaction_count: Mapped[int] = mapped_column(Integer, default=0)
231
+ login_time: Mapped[float] = mapped_column(Float, default=0) # Total minutes logged in
232
+ efficiency_score: Mapped[float] = mapped_column(Float, default=0) # Calculated score based on metrics
233
+
234
+ # Relationships
235
+ user = relationship("User", back_populates="performance_metrics")
236
+ branch = relationship("Branch", back_populates="staff_metrics")
app/db/schemas.py CHANGED
@@ -1,318 +1,387 @@
1
- from sqlalchemy.orm import validates
2
- from sqlalchemy import event
3
- from datetime import datetime
4
- from typing import List, Optional, Dict
5
- from pydantic import BaseModel, EmailStr, validator
6
- from .models import User, Product, Order, Event, Notification, Session
7
- import re
8
-
9
- # Role schemas
10
- class RoleBase(BaseModel):
11
- name: str
12
- description: str
13
- permissions: List[str] = []
14
-
15
- class RoleCreate(RoleBase):
16
- pass
17
-
18
- class RoleUpdate(RoleBase):
19
- name: Optional[str] = None
20
- description: Optional[str] = None
21
- permissions: Optional[List[str]] = None
22
-
23
- class RoleInDB(RoleBase):
24
- id: int
25
- created_at: datetime
26
- updated_at: Optional[datetime] = None
27
-
28
- class Config:
29
- from_attributes = True
30
-
31
- class BranchBase(BaseModel):
32
- name: str
33
- address: str
34
- phone: str
35
- email: EmailStr
36
- is_active: bool = True
37
-
38
- class BranchCreate(BranchBase):
39
- pass
40
-
41
- class BranchInDB(BranchBase):
42
- id: int
43
- created_at: datetime
44
- updated_at: Optional[datetime] = None
45
-
46
- class Config:
47
- from_attributes = True
48
-
49
- # Update User schemas
50
- class UserBase(BaseModel):
51
- email: EmailStr
52
- username: str
53
- full_name: str
54
- is_active: bool = True
55
- is_superuser: bool = False
56
- branch_id: Optional[int] = None
57
-
58
- class UserCreate(UserBase):
59
- password: str
60
- role_ids: Optional[List[int]] = None # IDs of roles to assign
61
-
62
- class UserUpdate(BaseModel):
63
- email: Optional[EmailStr] = None
64
- username: Optional[str] = None
65
- full_name: Optional[str] = None
66
- is_active: Optional[bool] = None
67
- is_superuser: Optional[bool] = None
68
- password: Optional[str] = None
69
- branch_id: Optional[int] = None
70
- role_ids: Optional[List[int]] = None
71
-
72
- class UserInDB(UserBase):
73
- id: int
74
- created_at: datetime
75
- roles: List[RoleInDB]
76
-
77
- class Config:
78
- from_attributes = True
79
-
80
- class ProductBase(BaseModel):
81
- name: str
82
- description: str
83
- price: float
84
- category: str
85
- inventory_count: int
86
- seller_id: int
87
- branch_id: int
88
-
89
- class ProductCreate(ProductBase):
90
- pass
91
-
92
- class ProductInDB(ProductBase):
93
- id: int
94
- created_at: datetime
95
- updated_at: Optional[datetime] = None
96
-
97
- class Config:
98
- from_attributes = True
99
-
100
- class OrderItemBase(BaseModel):
101
- product_id: int
102
- quantity: int
103
- price: float
104
-
105
- class OrderItemCreate(OrderItemBase):
106
- pass
107
-
108
- class OrderItemInDB(OrderItemBase):
109
- id: int
110
- order_id: int
111
-
112
- class Config:
113
- from_attributes = True
114
-
115
- class OrderBase(BaseModel):
116
- customer_id: int
117
- branch_id: int
118
- total_amount: float
119
- status: str = "pending"
120
- items: List[OrderItemCreate]
121
-
122
- class OrderCreate(OrderBase):
123
- pass
124
-
125
- class OrderInDB(OrderBase):
126
- id: int
127
- created_at: datetime
128
- updated_at: Optional[datetime] = None
129
- items: List[OrderItemInDB]
130
-
131
- class Config:
132
- from_attributes = True
133
-
134
- class NotificationBase(BaseModel):
135
- user_id: int
136
- title: str
137
- message: str
138
- type: str
139
- data: Optional[dict] = None
140
- read: bool = False
141
-
142
- class NotificationCreate(NotificationBase):
143
- pass
144
-
145
- class NotificationInDB(NotificationBase):
146
- id: int
147
- created_at: datetime
148
-
149
- class Config:
150
- from_attributes = True
151
-
152
- class EventBase(BaseModel):
153
- title: str
154
- description: str
155
- start_time: datetime
156
- end_time: datetime
157
- is_all_day: bool = False
158
- reminder_minutes: int = 30
159
-
160
- @validator('end_time')
161
- def end_time_after_start_time(cls, v, values):
162
- if 'start_time' in values and v <= values['start_time']:
163
- raise ValueError('end_time must be after start_time')
164
- return v
165
-
166
- @validator('reminder_minutes')
167
- def valid_reminder_minutes(cls, v):
168
- if v < 0:
169
- raise ValueError('reminder_minutes cannot be negative')
170
- return v
171
-
172
- class EventCreate(EventBase):
173
- attendees: List[str] = []
174
-
175
- class EventUpdate(BaseModel):
176
- title: Optional[str] = None
177
- description: Optional[str] = None
178
- start_time: Optional[datetime] = None
179
- end_time: Optional[datetime] = None
180
- is_all_day: Optional[bool] = None
181
- reminder_minutes: Optional[int] = None
182
- attendees: Optional[List[str]] = None
183
-
184
- @validator('reminder_minutes')
185
- def valid_reminder_minutes(cls, v):
186
- if v is not None and v < 0:
187
- raise ValueError('reminder_minutes cannot be negative')
188
- return v
189
-
190
- class EventInDB(EventBase):
191
- id: int
192
- user_id: int
193
- attendees: List[str]
194
- status: str
195
- attendee_responses: Dict[str, str]
196
- created_at: datetime
197
- updated_at: Optional[datetime] = None
198
- reminder_sent: bool = False
199
- is_recurring: bool = False
200
- recurrence_group: Optional[str] = None
201
- parent_event_id: Optional[int] = None
202
- sequence_number: Optional[int] = None
203
-
204
- class Config:
205
- from_attributes = True
206
-
207
- class RecurringEventCreate(EventCreate):
208
- recurrence_pattern: str
209
- recurrence_end_date: Optional[datetime] = None
210
-
211
- @validator('recurrence_pattern')
212
- def valid_recurrence_pattern(cls, v):
213
- valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
214
- if v not in valid_patterns:
215
- raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
216
- return v
217
-
218
- @validator('recurrence_end_date')
219
- def end_date_after_start_time(cls, v, values):
220
- if v is not None and 'start_time' in values and v <= values['start_time']:
221
- raise ValueError('recurrence_end_date must be after start_time')
222
- return v
223
-
224
- class RecurringEventUpdate(EventUpdate):
225
- recurrence_pattern: Optional[str] = None
226
- recurrence_end_date: Optional[datetime] = None
227
-
228
- @validator('recurrence_pattern')
229
- def valid_recurrence_pattern(cls, v):
230
- if v is not None:
231
- valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
232
- if v not in valid_patterns:
233
- raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
234
- return v
235
-
236
- class SessionBase(BaseModel):
237
- user_id: int
238
- session_token: str
239
- expires_at: datetime
240
- is_active: bool = True
241
-
242
- class SessionCreate(SessionBase):
243
- pass
244
-
245
- class SessionUpdate(BaseModel):
246
- last_activity: Optional[datetime] = None
247
- expires_at: Optional[datetime] = None
248
- is_active: Optional[bool] = None
249
-
250
- class SessionInDB(SessionBase):
251
- id: int
252
- created_at: datetime
253
- last_activity: datetime
254
-
255
- class Config:
256
- from_attributes = True
257
-
258
- class LoginData(BaseModel):
259
- email: str
260
- password: str
261
-
262
- @validates('email')
263
- def validate_email(self, key, email):
264
- if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email):
265
- raise ValueError('Invalid email address')
266
- return email
267
-
268
- @validates('username')
269
- def validate_username(self, key, username):
270
- if len(username) < 3:
271
- raise ValueError('Username must be at least 3 characters long')
272
- return username
273
-
274
- @validates('inventory_count')
275
- def validate_inventory(self, key, count):
276
- if count < 0:
277
- raise ValueError('Inventory count cannot be negative')
278
- return count
279
-
280
- @validates('price')
281
- def validate_price(self, key, price):
282
- if price < 0:
283
- raise ValueError('Price cannot be negative')
284
- return price
285
-
286
- # Event listeners for automatic timestamps
287
- @event.listens_for(Product, 'before_insert')
288
- def set_created_at(mapper, connection, target):
289
- target.created_at = datetime.utcnow()
290
- target.updated_at = datetime.utcnow()
291
-
292
- @event.listens_for(Product, 'before_update')
293
- def set_updated_at(mapper, connection, target):
294
- target.updated_at = datetime.utcnow()
295
-
296
- @event.listens_for(Order, 'before_insert')
297
- def set_order_created_at(mapper, connection, target):
298
- target.created_at = datetime.utcnow()
299
- target.updated_at = datetime.utcnow()
300
-
301
- @event.listens_for(Order, 'before_update')
302
- def set_order_updated_at(mapper, connection, target):
303
- target.updated_at = datetime.utcnow()
304
-
305
- @event.listens_for(Event, 'before_insert')
306
- def set_event_created_at(mapper, connection, target):
307
- target.created_at = datetime.utcnow()
308
- target.updated_at = datetime.utcnow()
309
-
310
- @event.listens_for(Event, 'before_update')
311
- def set_event_updated_at(mapper, connection, target):
312
- target.updated_at = datetime.utcnow()
313
-
314
- # Add validators to models
315
- User.validate_email = validate_email
316
- User.validate_username = validate_username
317
- Product.validate_inventory = validate_inventory
318
- Product.validate_price = validate_price
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy.orm import validates
2
+ from sqlalchemy import event
3
+ from datetime import datetime
4
+ from typing import List, Optional, Dict, Any
5
+ from pydantic import BaseModel, EmailStr, validator
6
+ from .models import User, Product, Order, Event, Notification, Session, ActivityType
7
+ import re
8
+
9
+ # Role schemas
10
+ class RoleBase(BaseModel):
11
+ name: str
12
+ description: str
13
+ permissions: List[str] = []
14
+
15
+ class RoleCreate(RoleBase):
16
+ pass
17
+
18
+ class RoleUpdate(RoleBase):
19
+ name: Optional[str] = None
20
+ description: Optional[str] = None
21
+ permissions: Optional[List[str]] = None
22
+
23
+ class RoleInDB(RoleBase):
24
+ id: int
25
+ created_at: datetime
26
+ updated_at: Optional[datetime] = None
27
+
28
+ class Config:
29
+ from_attributes = True
30
+
31
+ class BranchBase(BaseModel):
32
+ name: str
33
+ address: str
34
+ phone: str
35
+ email: EmailStr
36
+ is_active: bool = True
37
+
38
+ class BranchCreate(BranchBase):
39
+ pass
40
+
41
+ class BranchInDB(BranchBase):
42
+ id: int
43
+ created_at: datetime
44
+ updated_at: Optional[datetime] = None
45
+
46
+ class Config:
47
+ from_attributes = True
48
+
49
+ # Update User schemas
50
+ class UserBase(BaseModel):
51
+ email: EmailStr
52
+ username: str
53
+ full_name: str
54
+ is_active: bool = True
55
+ is_superuser: bool = False
56
+ branch_id: Optional[int] = None
57
+
58
+ class UserCreate(UserBase):
59
+ password: str
60
+ role_ids: Optional[List[int]] = None # IDs of roles to assign
61
+
62
+ class UserUpdate(BaseModel):
63
+ email: Optional[EmailStr] = None
64
+ username: Optional[str] = None
65
+ full_name: Optional[str] = None
66
+ is_active: Optional[bool] = None
67
+ is_superuser: Optional[bool] = None
68
+ password: Optional[str] = None
69
+ branch_id: Optional[int] = None
70
+ role_ids: Optional[List[int]] = None
71
+
72
+ class UserInDB(UserBase):
73
+ id: int
74
+ created_at: datetime
75
+ roles: List[RoleInDB]
76
+
77
+ class Config:
78
+ from_attributes = True
79
+
80
+ class ProductBase(BaseModel):
81
+ name: str
82
+ description: str
83
+ price: float
84
+ category: str
85
+ inventory_count: int
86
+ seller_id: int
87
+ branch_id: int
88
+
89
+ class ProductCreate(ProductBase):
90
+ pass
91
+
92
+ class ProductInDB(ProductBase):
93
+ id: int
94
+ created_at: datetime
95
+ updated_at: Optional[datetime] = None
96
+
97
+ class Config:
98
+ from_attributes = True
99
+
100
+ class OrderItemBase(BaseModel):
101
+ product_id: int
102
+ quantity: int
103
+ price: float
104
+
105
+ class OrderItemCreate(OrderItemBase):
106
+ pass
107
+
108
+ class OrderItemInDB(OrderItemBase):
109
+ id: int
110
+ order_id: int
111
+
112
+ class Config:
113
+ from_attributes = True
114
+
115
+ class OrderBase(BaseModel):
116
+ customer_id: int
117
+ branch_id: int
118
+ total_amount: float
119
+ status: str = "pending"
120
+ items: List[OrderItemCreate]
121
+
122
+ class OrderCreate(OrderBase):
123
+ pass
124
+
125
+ class OrderInDB(OrderBase):
126
+ id: int
127
+ created_at: datetime
128
+ updated_at: Optional[datetime] = None
129
+ items: List[OrderItemInDB]
130
+
131
+ class Config:
132
+ from_attributes = True
133
+
134
+ class NotificationBase(BaseModel):
135
+ user_id: int
136
+ title: str
137
+ message: str
138
+ type: str
139
+ data: Optional[dict] = None
140
+ read: bool = False
141
+
142
+ class NotificationCreate(NotificationBase):
143
+ pass
144
+
145
+ class NotificationInDB(NotificationBase):
146
+ id: int
147
+ created_at: datetime
148
+
149
+ class Config:
150
+ from_attributes = True
151
+
152
+ class EventBase(BaseModel):
153
+ title: str
154
+ description: str
155
+ start_time: datetime
156
+ end_time: datetime
157
+ is_all_day: bool = False
158
+ reminder_minutes: int = 30
159
+
160
+ @validator('end_time')
161
+ def end_time_after_start_time(cls, v, values):
162
+ if 'start_time' in values and v <= values['start_time']:
163
+ raise ValueError('end_time must be after start_time')
164
+ return v
165
+
166
+ @validator('reminder_minutes')
167
+ def valid_reminder_minutes(cls, v):
168
+ if v < 0:
169
+ raise ValueError('reminder_minutes cannot be negative')
170
+ return v
171
+
172
+ class EventCreate(EventBase):
173
+ attendees: List[str] = []
174
+
175
+ class EventUpdate(BaseModel):
176
+ title: Optional[str] = None
177
+ description: Optional[str] = None
178
+ start_time: Optional[datetime] = None
179
+ end_time: Optional[datetime] = None
180
+ is_all_day: Optional[bool] = None
181
+ reminder_minutes: Optional[int] = None
182
+ attendees: Optional[List[str]] = None
183
+
184
+ @validator('reminder_minutes')
185
+ def valid_reminder_minutes(cls, v):
186
+ if v is not None and v < 0:
187
+ raise ValueError('reminder_minutes cannot be negative')
188
+ return v
189
+
190
+ class EventInDB(EventBase):
191
+ id: int
192
+ user_id: int
193
+ attendees: List[str]
194
+ status: str
195
+ attendee_responses: Dict[str, str]
196
+ created_at: datetime
197
+ updated_at: Optional[datetime] = None
198
+ reminder_sent: bool = False
199
+ is_recurring: bool = False
200
+ recurrence_group: Optional[str] = None
201
+ parent_event_id: Optional[int] = None
202
+ sequence_number: Optional[int] = None
203
+
204
+ class Config:
205
+ from_attributes = True
206
+
207
+ class RecurringEventCreate(EventCreate):
208
+ recurrence_pattern: str
209
+ recurrence_end_date: Optional[datetime] = None
210
+
211
+ @validator('recurrence_pattern')
212
+ def valid_recurrence_pattern(cls, v):
213
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
214
+ if v not in valid_patterns:
215
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
216
+ return v
217
+
218
+ @validator('recurrence_end_date')
219
+ def end_date_after_start_time(cls, v, values):
220
+ if v is not None and 'start_time' in values and v <= values['start_time']:
221
+ raise ValueError('recurrence_end_date must be after start_time')
222
+ return v
223
+
224
+ class RecurringEventUpdate(EventUpdate):
225
+ recurrence_pattern: Optional[str] = None
226
+ recurrence_end_date: Optional[datetime] = None
227
+
228
+ @validator('recurrence_pattern')
229
+ def valid_recurrence_pattern(cls, v):
230
+ if v is not None:
231
+ valid_patterns = ['daily', 'weekly', 'monthly', 'yearly']
232
+ if v not in valid_patterns:
233
+ raise ValueError(f'recurrence_pattern must be one of: {", ".join(valid_patterns)}')
234
+ return v
235
+
236
+ class SessionBase(BaseModel):
237
+ user_id: int
238
+ session_token: str
239
+ expires_at: datetime
240
+ is_active: bool = True
241
+ iteration_active: bool = False
242
+
243
+ class SessionCreate(SessionBase):
244
+ pass
245
+
246
+ class SessionUpdate(BaseModel):
247
+ last_activity: Optional[datetime] = None
248
+ expires_at: Optional[datetime] = None
249
+ is_active: Optional[bool] = None
250
+ iteration_active: Optional[bool] = None
251
+
252
+ class SessionInDB(SessionBase):
253
+ id: int
254
+ created_at: datetime
255
+ last_activity: datetime
256
+
257
+ class Config:
258
+ from_attributes = True
259
+
260
+ class LoginData(BaseModel):
261
+ email: str
262
+ password: str
263
+
264
+ @validates('email')
265
+ def validate_email(self, key, email):
266
+ if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', email):
267
+ raise ValueError('Invalid email address')
268
+ return email
269
+
270
+ @validates('username')
271
+ def validate_username(self, key, username):
272
+ if len(username) < 3:
273
+ raise ValueError('Username must be at least 3 characters long')
274
+ return username
275
+
276
+ @validates('inventory_count')
277
+ def validate_inventory(self, key, count):
278
+ if count < 0:
279
+ raise ValueError('Inventory count cannot be negative')
280
+ return count
281
+
282
+ @validates('price')
283
+ def validate_price(self, key, price):
284
+ if price < 0:
285
+ raise ValueError('Price cannot be negative')
286
+ return price
287
+
288
+ # Event listeners for automatic timestamps
289
+ @event.listens_for(Product, 'before_insert')
290
+ def set_created_at(mapper, connection, target):
291
+ target.created_at = datetime.utcnow()
292
+ target.updated_at = datetime.utcnow()
293
+
294
+ @event.listens_for(Product, 'before_update')
295
+ def set_updated_at(mapper, connection, target):
296
+ target.updated_at = datetime.utcnow()
297
+
298
+ @event.listens_for(Order, 'before_insert')
299
+ def set_order_created_at(mapper, connection, target):
300
+ target.created_at = datetime.utcnow()
301
+ target.updated_at = datetime.utcnow()
302
+
303
+ @event.listens_for(Order, 'before_update')
304
+ def set_order_updated_at(mapper, connection, target):
305
+ target.updated_at = datetime.utcnow()
306
+
307
+ @event.listens_for(Event, 'before_insert')
308
+ def set_event_created_at(mapper, connection, target):
309
+ target.created_at = datetime.utcnow()
310
+ target.updated_at = datetime.utcnow()
311
+
312
+ @event.listens_for(Event, 'before_update')
313
+ def set_event_updated_at(mapper, connection, target):
314
+ target.updated_at = datetime.utcnow()
315
+
316
+ # Add validators to models
317
+ User.validate_email = validate_email
318
+ User.validate_username = validate_username
319
+ Product.validate_inventory = validate_inventory
320
+ Product.validate_price = validate_price
321
+
322
+ # Staff analytics schemas
323
+ class StaffActivityBase(BaseModel):
324
+ activity_type: ActivityType
325
+ details: Dict[str, Any]
326
+ duration: Optional[float] = None
327
+
328
+ class StaffActivityCreate(StaffActivityBase):
329
+ user_id: int
330
+ branch_id: int
331
+
332
+ class StaffActivityInDB(StaffActivityBase):
333
+ id: int
334
+ user_id: int
335
+ branch_id: int
336
+ created_at: datetime
337
+ performance_score: Optional[float] = None
338
+
339
+ class Config:
340
+ from_attributes = True
341
+
342
+ class PerformanceMetricBase(BaseModel):
343
+ total_sales: float = 0
344
+ transaction_count: int = 0
345
+ average_transaction_value: float = 0
346
+ void_count: int = 0
347
+ customer_interaction_count: int = 0
348
+ login_time: float = 0
349
+ efficiency_score: float = 0
350
+
351
+ class PerformanceMetricCreate(PerformanceMetricBase):
352
+ user_id: int
353
+ branch_id: int
354
+ metric_date: datetime
355
+
356
+ class PerformanceMetricInDB(PerformanceMetricBase):
357
+ id: int
358
+ user_id: int
359
+ branch_id: int
360
+ metric_date: datetime
361
+
362
+ class Config:
363
+ from_attributes = True
364
+
365
+ class StaffPerformanceResponse(BaseModel):
366
+ user_id: int
367
+ username: str
368
+ full_name: str
369
+ metrics: PerformanceMetricBase
370
+
371
+ class BranchAverages(BaseModel):
372
+ average_daily_sales: float
373
+ average_daily_transactions: float
374
+ average_transaction_value: float
375
+ average_efficiency_score: float
376
+
377
+ class StaffPerformanceReport(BaseModel):
378
+ staff_performance: List[StaffPerformanceResponse]
379
+ branch_averages: Optional[BranchAverages]
380
+ date_range: Dict[str, str]
381
+
382
+ class RealtimeStaffMetrics(BaseModel):
383
+ active_staff_count: int
384
+ total_sales_today: float
385
+ total_transactions: int
386
+ staff_metrics: List[StaffPerformanceResponse]
387
+ last_updated: str
app/main.py CHANGED
@@ -1,137 +1,146 @@
1
- from fastapi import FastAPI, Request, WebSocket
2
- from fastapi.middleware.cors import CORSMiddleware
3
- from .core.config import settings
4
- from .db.database import async_engine as engine, Base
5
- from .api import auth, products, orders, users, analytics, files, notifications, calendar, scheduler, maintenance
6
- from .utils.rate_limiter import rate_limiter
7
- from .utils.logger import log_api_request
8
- from .utils.tasks import run_periodic_tasks
9
- from .services.websocket import connect, disconnect
10
- from .realtime.subscriber import subscribe_order_events
11
- from .routes.websocket import websocket_endpoint, manager
12
- import socketio
13
- import time
14
- import logging
15
- import asyncio
16
- from typing import List, Dict
17
-
18
- # Configure logging
19
- logging.basicConfig(level=logging.INFO)
20
- logger = logging.getLogger(__name__)
21
-
22
- # Create Socket.IO server
23
- sio = socketio.AsyncServer(async_mode='asgi', cors_allowed_origins='*')
24
- socket_app = socketio.ASGIApp(sio)
25
-
26
- app = FastAPI(title=settings.PROJECT_NAME, version=settings.VERSION)
27
-
28
- # Store background tasks
29
- background_tasks = set()
30
-
31
- # Configure CORS
32
- app.add_middleware(
33
- CORSMiddleware,
34
- allow_origins=["*"], # Configure appropriately for production
35
- allow_credentials=True,
36
- allow_methods=["*"],
37
- allow_headers=["*"],
38
- )
39
-
40
- # Socket.IO event handlers
41
- @sio.event
42
- async def connect(sid, environ):
43
- logger.info(f"Client connected: {sid}")
44
-
45
- @sio.event
46
- async def disconnect(sid):
47
- logger.info(f"Client disconnected: {sid}")
48
-
49
- @sio.event
50
- async def message(sid, data):
51
- logger.info(f"Message from {sid}: {data}")
52
- await sio.emit('message', {'response': 'Message received'}, room=sid)
53
-
54
- # Mount Socket.IO app
55
- app.mount("/socket.io", socket_app)
56
-
57
- @app.get("/api/v1/health")
58
- async def health_check() -> Dict[str, str]:
59
- """Public health check endpoint that doesn't require authentication"""
60
- return {
61
- "status": "healthy",
62
- "version": settings.VERSION,
63
- "service": settings.PROJECT_NAME
64
- }
65
-
66
- # WebSocket endpoint
67
- @app.websocket("/ws")
68
- async def websocket_endpoint(websocket: WebSocket):
69
- await connect(websocket)
70
- try:
71
- while True:
72
- data = await websocket.receive_text()
73
- except:
74
- await disconnect(websocket)
75
-
76
- @app.websocket("/ws/orders")
77
- async def orders_websocket_endpoint(websocket: WebSocket):
78
- await websocket_endpoint(websocket)
79
-
80
- # Request logging and rate limiting middleware
81
- @app.middleware("http")
82
- async def middleware(request: Request, call_next):
83
- await rate_limiter.check_rate_limit(request)
84
- start_time = time.time()
85
- response = await call_next(request)
86
- end_time = time.time()
87
- duration = end_time - start_time
88
- log_api_request(
89
- method=request.method,
90
- path=request.url.path,
91
- status_code=response.status_code,
92
- duration=duration
93
- )
94
- return response
95
-
96
- # Application startup and shutdown events
97
- @app.on_event("startup")
98
- async def startup_event():
99
- # Create all database tables
100
- async with engine.begin() as conn:
101
- await conn.run_sync(Base.metadata.create_all)
102
-
103
- # Start Redis subscriber in background
104
- task = asyncio.create_task(subscribe_order_events(manager))
105
- background_tasks.add(task)
106
- task.add_done_callback(background_tasks.discard)
107
-
108
- # Start periodic tasks
109
- task = asyncio.create_task(run_periodic_tasks())
110
- background_tasks.add(task)
111
- task.add_done_callback(background_tasks.discard)
112
-
113
- @app.on_event("shutdown")
114
- async def shutdown_event():
115
- # Cancel background tasks
116
- for task in background_tasks:
117
- task.cancel()
118
-
119
- # Include routers
120
- app.include_router(auth.router, prefix=f"{settings.API_V1_STR}/auth", tags=["auth"])
121
- app.include_router(users.router, prefix=f"{settings.API_V1_STR}/users", tags=["users"])
122
- app.include_router(products.router, prefix=f"{settings.API_V1_STR}/products", tags=["products"])
123
- app.include_router(orders.router, prefix=f"{settings.API_V1_STR}/orders", tags=["orders"])
124
- app.include_router(analytics.router, prefix=f"{settings.API_V1_STR}/analytics", tags=["analytics"])
125
- app.include_router(files.router, prefix=f"{settings.API_V1_STR}/files", tags=["files"])
126
- app.include_router(notifications.router, prefix=f"{settings.API_V1_STR}/notifications", tags=["notifications"])
127
- app.include_router(calendar.router, prefix=f"{settings.API_V1_STR}/calendar", tags=["calendar"])
128
- app.include_router(scheduler.router, prefix=f"{settings.API_V1_STR}/scheduler", tags=["scheduler"])
129
- app.include_router(maintenance.router, prefix=f"{settings.API_V1_STR}/maintenance", tags=["maintenance"])
130
-
131
- @app.get("/")
132
- async def root():
133
- return {
134
- "message": f"Welcome to {settings.PROJECT_NAME} v{settings.VERSION}",
135
- "docs_url": "/docs",
136
- "openapi_url": "/openapi.json"
 
 
 
 
 
 
 
 
 
137
  }
 
1
+ from fastapi import FastAPI, Request, WebSocket
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ from .core.config import settings
4
+ from .db.database import async_engine as engine, Base
5
+ from .api import auth, products, orders, users, analytics, files, notifications, calendar, scheduler, maintenance, branches, staff_analytics, sessions
6
+ from .utils.rate_limiter import rate_limiter
7
+ from .utils.logger import log_api_request
8
+ from .utils.tasks import run_periodic_tasks, sync_pos_metrics_task
9
+ from .services.websocket import connect, disconnect
10
+ from .realtime.subscriber import subscribe_order_events
11
+ from .routes.websocket import websocket_endpoint, manager, router as websocket_router
12
+ import socketio
13
+ import time
14
+ import logging
15
+ import asyncio
16
+ from typing import List, Dict
17
+
18
+ # Configure logging
19
+ logging.basicConfig(level=logging.INFO)
20
+ logger = logging.getLogger(__name__)
21
+
22
+ # Create Socket.IO server
23
+ sio = socketio.AsyncServer(async_mode='asgi', cors_allowed_origins='*')
24
+ socket_app = socketio.ASGIApp(sio)
25
+
26
+ app = FastAPI(title=settings.PROJECT_NAME, version=settings.VERSION, openapi_url=f"{settings.API_V1_STR}/openapi.json")
27
+
28
+ # Store background tasks
29
+ background_tasks = set()
30
+
31
+ # Configure CORS
32
+ app.add_middleware(
33
+ CORSMiddleware,
34
+ allow_origins=["*"], # Configure appropriately for production
35
+ allow_credentials=True,
36
+ allow_methods=["*"],
37
+ allow_headers=["*"],
38
+ )
39
+
40
+ # Socket.IO event handlers
41
+ @sio.event
42
+ async def connect(sid, environ):
43
+ logger.info(f"Client connected: {sid}")
44
+
45
+ @sio.event
46
+ async def disconnect(sid):
47
+ logger.info(f"Client disconnected: {sid}")
48
+
49
+ @sio.event
50
+ async def message(sid, data):
51
+ logger.info(f"Message from {sid}: {data}")
52
+ await sio.emit('message', {'response': 'Message received'}, room=sid)
53
+
54
+ # Mount Socket.IO app
55
+ app.mount("/socket.io", socket_app)
56
+
57
+ @app.get("/api/v1/health")
58
+ async def health_check() -> Dict[str, str]:
59
+ """Public health check endpoint that doesn't require authentication"""
60
+ return {
61
+ "status": "healthy",
62
+ "version": settings.VERSION,
63
+ "service": settings.PROJECT_NAME
64
+ }
65
+
66
+ # WebSocket endpoint
67
+ @app.websocket("/ws")
68
+ async def websocket_endpoint(websocket: WebSocket):
69
+ await connect(websocket)
70
+ try:
71
+ while True:
72
+ data = await websocket.receive_text()
73
+ except:
74
+ await disconnect(websocket)
75
+
76
+ @app.websocket("/ws/orders")
77
+ async def orders_websocket_endpoint(websocket: WebSocket):
78
+ await websocket_endpoint(websocket)
79
+
80
+ # Request logging and rate limiting middleware
81
+ @app.middleware("http")
82
+ async def middleware(request: Request, call_next):
83
+ await rate_limiter.check_rate_limit(request)
84
+ start_time = time.time()
85
+ response = await call_next(request)
86
+ end_time = time.time()
87
+ duration = end_time - start_time
88
+ log_api_request(
89
+ method=request.method,
90
+ path=request.url.path,
91
+ status_code=response.status_code,
92
+ duration=duration
93
+ )
94
+ return response
95
+
96
+ # Application startup and shutdown events
97
+ @app.on_event("startup")
98
+ async def startup_event():
99
+ # Create all database tables
100
+ async with engine.begin() as conn:
101
+ await conn.run_sync(Base.metadata.create_all)
102
+
103
+ # Start Redis subscriber in background
104
+ task = asyncio.create_task(subscribe_order_events(manager))
105
+ background_tasks.add(task)
106
+ task.add_done_callback(background_tasks.discard)
107
+
108
+ # Start periodic tasks
109
+ task = asyncio.create_task(run_periodic_tasks())
110
+ background_tasks.add(task)
111
+ task.add_done_callback(background_tasks.discard)
112
+
113
+ # Start POS metrics sync task
114
+ task = asyncio.create_task(sync_pos_metrics_task())
115
+ background_tasks.add(task)
116
+ task.add_done_callback(background_tasks.discard)
117
+
118
+ @app.on_event("shutdown")
119
+ async def shutdown_event():
120
+ # Cancel background tasks
121
+ for task in background_tasks:
122
+ task.cancel()
123
+
124
+ # Include routers
125
+ app.include_router(auth.router, prefix=f"{settings.API_V1_STR}/auth", tags=["auth"])
126
+ app.include_router(users.router, prefix=f"{settings.API_V1_STR}/users", tags=["users"])
127
+ app.include_router(products.router, prefix=f"{settings.API_V1_STR}/products", tags=["products"])
128
+ app.include_router(orders.router, prefix=f"{settings.API_V1_STR}/orders", tags=["orders"])
129
+ app.include_router(analytics.router, prefix=f"{settings.API_V1_STR}/analytics", tags=["analytics"])
130
+ app.include_router(files.router, prefix=f"{settings.API_V1_STR}/files", tags=["files"])
131
+ app.include_router(notifications.router, prefix=f"{settings.API_V1_STR}/notifications", tags=["notifications"])
132
+ app.include_router(calendar.router, prefix=f"{settings.API_V1_STR}/calendar", tags=["calendar"])
133
+ app.include_router(scheduler.router, prefix=f"{settings.API_V1_STR}/scheduler", tags=["scheduler"])
134
+ app.include_router(maintenance.router, prefix=f"{settings.API_V1_STR}/maintenance", tags=["maintenance"])
135
+ app.include_router(branches.router, prefix=f"{settings.API_V1_STR}/branches", tags=["branches"])
136
+ app.include_router(staff_analytics.router, prefix=f"{settings.API_V1_STR}/staff", tags=["staff"])
137
+ app.include_router(sessions.router, prefix=f"{settings.API_V1_STR}/sessions", tags=["sessions"])
138
+ app.include_router(websocket_router)
139
+
140
+ @app.get("/")
141
+ async def root():
142
+ return {
143
+ "message": f"Welcome to {settings.PROJECT_NAME} v{settings.VERSION}",
144
+ "docs_url": "/docs",
145
+ "openapi_url": "/openapi.json"
146
  }
app/realtime/subscriber.py CHANGED
@@ -1,58 +0,0 @@
1
- import json
2
- import asyncio
3
- from redis.asyncio import Redis
4
- from typing import Optional, Dict
5
- from ..core.config import settings
6
- from ..utils.logger import logger
7
-
8
- # Singleton Redis connection
9
- _redis: Optional[Redis] = None
10
-
11
- async def get_redis() -> Redis:
12
- """Get or create Redis connection"""
13
- global _redis
14
- if _redis is None:
15
- _redis = Redis.from_url(settings.REDIS_URL, decode_responses=True)
16
- return _redis
17
-
18
- async def _handle_message(channel: str, data: str, manager: "OrderWebSocketManager"):
19
- """Handle incoming Redis messages"""
20
- try:
21
- message = json.loads(data)
22
- await manager.broadcast_to_branch(message["branch_id"], {
23
- "type": "order.new",
24
- "data": message
25
- })
26
- except Exception as e:
27
- logger.error(f"Error handling Redis message: {str(e)}")
28
-
29
- async def subscribe_order_events(manager: "OrderWebSocketManager"):
30
- """Subscribe to order events from Redis"""
31
- redis = await get_redis()
32
- pubsub = redis.pubsub()
33
-
34
- try:
35
- await pubsub.subscribe("order:new")
36
- logger.info("Subscribed to order events channel")
37
-
38
- while True:
39
- try:
40
- message = await pubsub.get_message(ignore_subscribe_messages=True)
41
- if message and message["type"] == "message":
42
- await _handle_message(
43
- message["channel"],
44
- message["data"],
45
- manager
46
- )
47
- except Exception as e:
48
- logger.error(f"Error processing message: {str(e)}")
49
- await asyncio.sleep(1) # Avoid tight loop on errors
50
-
51
- await asyncio.sleep(0.1) # Avoid busy waiting
52
-
53
- except Exception as e:
54
- logger.error(f"Redis subscription error: {str(e)}")
55
- raise
56
- finally:
57
- await pubsub.unsubscribe("order:new")
58
- await redis.close()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/routes/websocket.py CHANGED
@@ -1,82 +1,76 @@
1
- from fastapi import WebSocket, WebSocketDisconnect, HTTPException, Depends
2
- from typing import Dict, List, Optional
3
- from ..core.dependencies import get_current_user
4
- from ..db.models import User
 
5
  from ..utils.logger import logger
6
- import asyncio
7
 
8
- class OrderWebSocketManager:
 
 
9
  def __init__(self):
10
- # Map of branch_id -> List[WebSocket]
11
- self.active_connections: Dict[str, List[WebSocket]] = {}
 
 
 
12
 
13
- async def connect(self, websocket: WebSocket, branch_id: str):
14
- """Register new WebSocket connection for a branch"""
15
  await websocket.accept()
16
- if branch_id not in self.active_connections:
17
- self.active_connections[branch_id] = []
18
- self.active_connections[branch_id].append(websocket)
19
 
20
- async def disconnect(self, websocket: WebSocket, branch_id: str):
21
- """Remove WebSocket connection"""
22
- if branch_id in self.active_connections:
23
- try:
24
- self.active_connections[branch_id].remove(websocket)
25
- if not self.active_connections[branch_id]:
26
- del self.active_connections[branch_id]
27
- except ValueError:
28
- pass
29
 
30
- async def broadcast_to_branch(self, branch_id: str, message: dict):
31
- """Send message to all connections for a branch"""
32
- if branch_id not in self.active_connections:
33
  return
34
 
35
- disconnected = []
36
- for connection in self.active_connections[branch_id]:
37
  try:
38
  await connection.send_json(message)
39
- except:
40
- disconnected.append(connection)
41
-
42
- # Clean up disconnected clients
43
- for connection in disconnected:
44
- await self.disconnect(connection, branch_id)
45
 
46
- # Global WebSocket manager instance
47
- manager = OrderWebSocketManager()
 
 
 
 
 
 
 
48
 
49
- async def get_websocket_user(websocket: WebSocket) -> Optional[User]:
50
- """Authenticate WebSocket connection using token from query params"""
51
- try:
52
- token = websocket.query_params.get("token")
53
- if not token:
54
- return None
55
-
56
- # Use existing auth dependency
57
- user = await get_current_user(token)
58
- if not user or not user.is_active:
59
- return None
60
-
61
- return user
62
- except Exception as e:
63
- logger.error(f"WebSocket auth error: {str(e)}")
64
- return None
65
-
66
- async def websocket_endpoint(websocket: WebSocket):
67
- """WebSocket endpoint for order events"""
68
- user = await get_websocket_user(websocket)
69
- if not user or not user.branch_id:
70
- await websocket.close(code=4001)
71
- return
72
 
 
 
 
73
  try:
74
- await manager.connect(websocket, str(user.branch_id))
75
  while True:
76
- # Keep connection alive and handle client messages if needed
77
  data = await websocket.receive_text()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  except WebSocketDisconnect:
79
- await manager.disconnect(websocket, str(user.branch_id))
80
- except Exception as e:
81
- logger.error(f"WebSocket error: {str(e)}")
82
- await manager.disconnect(websocket, str(user.branch_id))
 
 
1
+ from fastapi import WebSocket, WebSocketDisconnect, APIRouter
2
+ from typing import Dict, Set, List, Any
3
+ import json
4
+ from ..services.analytics import staff_analytics
5
+ from ..core.config import settings
6
  from ..utils.logger import logger
 
7
 
8
+ router = APIRouter()
9
+
10
+ class ConnectionManager:
11
  def __init__(self):
12
+ self.active_connections: Dict[str, Set[WebSocket]] = {
13
+ "orders": set(),
14
+ "notifications": set(),
15
+ "staff_metrics": set()
16
+ }
17
 
18
+ async def connect(self, websocket: WebSocket, channel: str):
 
19
  await websocket.accept()
20
+ if channel not in self.active_connections:
21
+ self.active_connections[channel] = set()
22
+ self.active_connections[channel].add(websocket)
23
 
24
+ def disconnect(self, websocket: WebSocket, channel: str):
25
+ self.active_connections[channel].remove(websocket)
 
 
 
 
 
 
 
26
 
27
+ async def broadcast(self, message: Dict[str, Any], channel: str):
28
+ if channel not in self.active_connections:
 
29
  return
30
 
31
+ for connection in self.active_connections[channel]:
 
32
  try:
33
  await connection.send_json(message)
34
+ except Exception as e:
35
+ logger.error(f"Error broadcasting to websocket: {str(e)}")
36
+ await self.disconnect(connection, channel)
 
 
 
37
 
38
+ async def broadcast_staff_metrics(self, metrics: Dict[str, Any]):
39
+ """Broadcast staff performance metrics update"""
40
+ await self.broadcast(
41
+ {
42
+ "type": "staff_metrics_update",
43
+ "data": metrics
44
+ },
45
+ "staff_metrics"
46
+ )
47
 
48
+ manager = ConnectionManager()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
+ @router.websocket("/ws/staff")
51
+ async def staff_metrics_websocket(websocket: WebSocket):
52
+ await manager.connect(websocket, "staff_metrics")
53
  try:
 
54
  while True:
 
55
  data = await websocket.receive_text()
56
+ try:
57
+ message = json.loads(data)
58
+ if message.get("type") == "subscribe" and message.get("branch_id"):
59
+ # Get real-time metrics for the branch
60
+ metrics = await staff_analytics.get_staff_performance(
61
+ branch_id=message["branch_id"]
62
+ )
63
+ await websocket.send_json({
64
+ "type": "staff_metrics_update",
65
+ "data": metrics
66
+ })
67
+ except json.JSONDecodeError:
68
+ logger.error("Invalid WebSocket message format")
69
+ except Exception as e:
70
+ logger.error(f"Error processing WebSocket message: {str(e)}")
71
  except WebSocketDisconnect:
72
+ manager.disconnect(websocket, "staff_metrics")
73
+
74
+ async def broadcast_staff_update(metrics: Dict[str, Any]):
75
+ """Broadcast staff performance update to all connected clients"""
76
+ await manager.broadcast_staff_metrics(metrics)
app/services/analytics.py CHANGED
@@ -1,7 +1,12 @@
1
  from datetime import datetime, timedelta
 
2
  from ..db.database import db
3
  from ..utils.cache import cache
4
- from typing import Dict, List, Any
 
 
 
 
5
 
6
  class AnalyticsService:
7
  @staticmethod
@@ -128,4 +133,284 @@ class AnalyticsService:
128
  await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
129
  return result
130
 
131
- analytics = AnalyticsService()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from datetime import datetime, timedelta
2
+ from sqlalchemy import func, and_, select, extract
3
  from ..db.database import db
4
  from ..utils.cache import cache
5
+ from ..db.models import StaffActivity, PerformanceMetric, User, ActivityType
6
+ from ..services.performance_notifications import performance_notifications
7
+ from typing import Dict, List, Any, Optional
8
+ import numpy as np
9
+ from collections import defaultdict
10
 
11
  class AnalyticsService:
12
  @staticmethod
 
133
  await cache.set_cache(cache_key, result, expire=3600) # Cache for 1 hour
134
  return result
135
 
136
+ class StaffAnalyticsService:
137
+ @staticmethod
138
+ async def record_activity(
139
+ user_id: int,
140
+ branch_id: int,
141
+ activity_type: ActivityType,
142
+ details: dict,
143
+ duration: Optional[float] = None
144
+ ) -> StaffActivity:
145
+ """Record a staff activity with performance scoring and notifications"""
146
+
147
+ # Calculate performance score based on activity type and details
148
+ score = None
149
+ if activity_type == ActivityType.SALE:
150
+ # Score based on sale amount and speed
151
+ amount = details.get('amount', 0)
152
+ duration = details.get('duration', 0) # duration in minutes
153
+ if duration > 0:
154
+ score = min((amount / 100) * (5 / duration), 10) # Max score of 10
155
+ elif activity_type == ActivityType.CUSTOMER_SERVICE:
156
+ # Score based on interaction quality
157
+ satisfaction = details.get('customer_satisfaction', 0)
158
+ resolution_time = details.get('resolution_time', 0)
159
+ if resolution_time > 0:
160
+ score = min((satisfaction * 2) * (10 / resolution_time), 10)
161
+
162
+ async with db() as session:
163
+ activity = StaffActivity(
164
+ user_id=user_id,
165
+ branch_id=branch_id,
166
+ activity_type=activity_type,
167
+ details=details,
168
+ duration=duration,
169
+ performance_score=score
170
+ )
171
+ session.add(activity)
172
+ await session.commit()
173
+ await session.refresh(activity)
174
+
175
+ # Get current metrics before update
176
+ prev_metrics = await StaffAnalyticsService._get_current_metrics(user_id, branch_id)
177
+
178
+ # Update daily performance metrics
179
+ new_metrics = await StaffAnalyticsService._update_performance_metrics(
180
+ user_id, branch_id, activity
181
+ )
182
+
183
+ # Check and send notifications based on new metrics
184
+ await performance_notifications.check_and_notify_performance(
185
+ user_id=user_id,
186
+ branch_id=branch_id,
187
+ metrics=new_metrics
188
+ )
189
+
190
+ # Check for realtime alerts by comparing with previous metrics
191
+ if prev_metrics:
192
+ await performance_notifications.notify_realtime_alerts(
193
+ branch_id=branch_id,
194
+ current_metrics=new_metrics,
195
+ previous_metrics=prev_metrics
196
+ )
197
+
198
+ return activity
199
+
200
+ @staticmethod
201
+ async def _get_current_metrics(user_id: int, branch_id: int) -> Optional[Dict[str, Any]]:
202
+ """Get current day's metrics for a user"""
203
+ today = datetime.utcnow().date()
204
+
205
+ async with db() as session:
206
+ stmt = select(PerformanceMetric).where(
207
+ and_(
208
+ PerformanceMetric.user_id == user_id,
209
+ PerformanceMetric.branch_id == branch_id,
210
+ func.date(PerformanceMetric.metric_date) == today
211
+ )
212
+ )
213
+ result = await session.execute(stmt)
214
+ metric = result.scalar_one_or_none()
215
+
216
+ if metric:
217
+ return {
218
+ "total_sales": metric.total_sales,
219
+ "transaction_count": metric.transaction_count,
220
+ "void_count": metric.void_count,
221
+ "efficiency_score": metric.efficiency_score,
222
+ "customer_interaction_count": metric.customer_interaction_count
223
+ }
224
+ return None
225
+
226
+ @staticmethod
227
+ async def _update_performance_metrics(
228
+ user_id: int,
229
+ branch_id: int,
230
+ activity: StaffActivity
231
+ ) -> Dict[str, Any]:
232
+ """Update daily performance metrics based on new activity"""
233
+ today = datetime.utcnow().date()
234
+
235
+ async with db() as session:
236
+ # Get or create today's metrics
237
+ stmt = select(PerformanceMetric).where(
238
+ and_(
239
+ PerformanceMetric.user_id == user_id,
240
+ PerformanceMetric.branch_id == branch_id,
241
+ func.date(PerformanceMetric.metric_date) == today
242
+ )
243
+ )
244
+ result = await session.execute(stmt)
245
+ metric = result.scalar_one_or_none()
246
+
247
+ if not metric:
248
+ metric = PerformanceMetric(
249
+ user_id=user_id,
250
+ branch_id=branch_id,
251
+ metric_date=datetime.utcnow()
252
+ )
253
+ session.add(metric)
254
+
255
+ # Update metrics based on activity type
256
+ if activity.activity_type == ActivityType.SALE:
257
+ metric.total_sales += activity.details.get('amount', 0)
258
+ metric.transaction_count += 1
259
+ metric.average_transaction_value = metric.total_sales / metric.transaction_count
260
+ elif activity.activity_type == ActivityType.VOID:
261
+ metric.void_count += 1
262
+ elif activity.activity_type == ActivityType.CUSTOMER_SERVICE:
263
+ metric.customer_interaction_count += 1
264
+ elif activity.activity_type in [ActivityType.LOGIN, ActivityType.LOGOUT]:
265
+ if activity.duration:
266
+ metric.login_time += activity.duration
267
+
268
+ # Calculate efficiency score
269
+ # Weight different factors in the score
270
+ weights = {
271
+ 'sales': 0.4,
272
+ 'speed': 0.2,
273
+ 'accuracy': 0.2,
274
+ 'customer_service': 0.2
275
+ }
276
+
277
+ # Calculate component scores
278
+ sales_score = min((metric.total_sales / 1000) * 10, 10) # Scale sales to 0-10
279
+ speed_score = 10 * (1 - (metric.login_time / (8 * 60))) # Assuming 8-hour day
280
+ accuracy_score = 10 * (1 - (metric.void_count / max(metric.transaction_count, 1)))
281
+ cs_score = min((metric.customer_interaction_count / 10) * 10, 10)
282
+
283
+ metric.efficiency_score = (
284
+ (sales_score * weights['sales']) +
285
+ (speed_score * weights['speed']) +
286
+ (accuracy_score * weights['accuracy']) +
287
+ (cs_score * weights['customer_service'])
288
+ )
289
+
290
+ await session.commit()
291
+ await session.refresh(metric)
292
+
293
+ return {
294
+ "total_sales": metric.total_sales,
295
+ "transaction_count": metric.transaction_count,
296
+ "average_transaction_value": metric.average_transaction_value,
297
+ "void_count": metric.void_count,
298
+ "customer_interaction_count": metric.customer_interaction_count,
299
+ "login_time": metric.login_time,
300
+ "efficiency_score": metric.efficiency_score
301
+ }
302
+
303
+ @staticmethod
304
+ async def get_staff_performance(
305
+ branch_id: Optional[int] = None,
306
+ user_id: Optional[int] = None,
307
+ start_date: Optional[datetime] = None,
308
+ end_date: Optional[datetime] = None
309
+ ) -> Dict[str, Any]:
310
+ """Get comprehensive staff performance metrics"""
311
+ if not start_date:
312
+ start_date = datetime.utcnow() - timedelta(days=30)
313
+ if not end_date:
314
+ end_date = datetime.utcnow()
315
+
316
+ cache_key = f"staff_performance:{branch_id}:{user_id}:{start_date.date()}:{end_date.date()}"
317
+ cached_data = await cache.get_cache(cache_key)
318
+ if cached_data:
319
+ return cached_data
320
+
321
+ async with db() as session:
322
+ # Base query conditions
323
+ conditions = [
324
+ PerformanceMetric.metric_date.between(start_date, end_date)
325
+ ]
326
+ if branch_id:
327
+ conditions.append(PerformanceMetric.branch_id == branch_id)
328
+ if user_id:
329
+ conditions.append(PerformanceMetric.user_id == user_id)
330
+
331
+ # Get aggregated metrics
332
+ metrics_stmt = select(
333
+ PerformanceMetric.user_id,
334
+ func.sum(PerformanceMetric.total_sales).label('total_sales'),
335
+ func.sum(PerformanceMetric.transaction_count).label('transaction_count'),
336
+ func.avg(PerformanceMetric.average_transaction_value).label('avg_transaction_value'),
337
+ func.sum(PerformanceMetric.void_count).label('void_count'),
338
+ func.sum(PerformanceMetric.customer_interaction_count).label('customer_interactions'),
339
+ func.sum(PerformanceMetric.login_time).label('total_login_time'),
340
+ func.avg(PerformanceMetric.efficiency_score).label('avg_efficiency_score')
341
+ ).where(
342
+ and_(*conditions)
343
+ ).group_by(
344
+ PerformanceMetric.user_id
345
+ )
346
+
347
+ result = await session.execute(metrics_stmt)
348
+ metrics_data = result.all()
349
+
350
+ # Get user details
351
+ user_ids = [m.user_id for m in metrics_data]
352
+ users_stmt = select(User).where(User.id.in_(user_ids))
353
+ users = (await session.execute(users_stmt)).scalars().all()
354
+ users_dict = {u.id: u for u in users}
355
+
356
+ # Format response data
357
+ performance_data = []
358
+ for metric in metrics_data:
359
+ user = users_dict.get(metric.user_id)
360
+ if user:
361
+ performance_data.append({
362
+ "user_id": user.id,
363
+ "username": user.username,
364
+ "full_name": user.full_name,
365
+ "metrics": {
366
+ "total_sales": metric.total_sales,
367
+ "transaction_count": metric.transaction_count,
368
+ "average_transaction_value": metric.avg_transaction_value,
369
+ "void_count": metric.void_count,
370
+ "customer_interactions": metric.customer_interactions,
371
+ "total_login_time": metric.total_login_time,
372
+ "efficiency_score": metric.avg_efficiency_score
373
+ }
374
+ })
375
+
376
+ # Calculate branch averages if branch_id is specified
377
+ branch_averages = None
378
+ if branch_id:
379
+ avg_stmt = select(
380
+ func.avg(PerformanceMetric.total_sales).label('avg_sales'),
381
+ func.avg(PerformanceMetric.transaction_count).label('avg_transactions'),
382
+ func.avg(PerformanceMetric.average_transaction_value).label('avg_transaction_value'),
383
+ func.avg(PerformanceMetric.efficiency_score).label('avg_efficiency')
384
+ ).where(
385
+ and_(
386
+ PerformanceMetric.branch_id == branch_id,
387
+ PerformanceMetric.metric_date.between(start_date, end_date)
388
+ )
389
+ )
390
+
391
+ avg_result = await session.execute(avg_stmt)
392
+ branch_avg = avg_result.one()
393
+ branch_averages = {
394
+ "average_daily_sales": branch_avg.avg_sales,
395
+ "average_daily_transactions": branch_avg.avg_transactions,
396
+ "average_transaction_value": branch_avg.avg_transaction_value,
397
+ "average_efficiency_score": branch_avg.avg_efficiency
398
+ }
399
+
400
+ response = {
401
+ "staff_performance": performance_data,
402
+ "date_range": {
403
+ "start_date": start_date.isoformat(),
404
+ "end_date": end_date.isoformat()
405
+ }
406
+ }
407
+
408
+ if branch_averages:
409
+ response["branch_averages"] = branch_averages
410
+
411
+ # Cache the response for 1 hour
412
+ await cache.set_cache(cache_key, response, expire=3600)
413
+ return response
414
+
415
+ analytics = AnalyticsService()
416
+ staff_analytics = StaffAnalyticsService()
app/services/maintenance.py CHANGED
@@ -9,8 +9,214 @@ from ..utils.logger import logger
9
  from ..core.config import settings
10
  from ..services.websocket import create_and_broadcast_notification
11
  from ..db.models import User, Order, Notification, Session
 
 
 
 
 
12
 
13
  class MaintenanceService:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  async def cleanup_expired_sessions(self) -> int:
15
  """Clean up expired sessions"""
16
  try:
@@ -95,6 +301,7 @@ class MaintenanceService:
95
  logger.error(f"Health check error: {str(e)}")
96
  return {"status": "unhealthy", "error": str(e)}
97
 
 
98
  async def monitor_system_resources(self) -> Dict[str, Any]:
99
  """Monitor system resources"""
100
  try:
@@ -174,6 +381,7 @@ class MaintenanceService:
174
  except Exception as e:
175
  logger.error(f"Log rotation error: {str(e)}")
176
 
 
177
  async def manage_storage_quotas(self) -> Dict[str, Any]:
178
  """Manage storage quotas and cleanup"""
179
  try:
@@ -214,4 +422,17 @@ class MaintenanceService:
214
  logger.error(f"Storage quota management error: {str(e)}")
215
  return {"error": str(e)}
216
 
217
- maintenance = MaintenanceService()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  from ..core.config import settings
10
  from ..services.websocket import create_and_broadcast_notification
11
  from ..db.models import User, Order, Notification, Session
12
+ from ..utils.cache import cache
13
+ from sqlalchemy import text
14
+ import aioredis
15
+ import asyncio
16
+ from ..utils.retry import retry_with_backoff, circuit_breaker, CircuitBreaker
17
 
18
  class MaintenanceService:
19
+ def __init__(self):
20
+ self._last_sync_time = None
21
+ self._background_tasks = {}
22
+ self.last_health_check = None
23
+ self.last_maintenance = None
24
+ self.circuit_breaker = CircuitBreaker(
25
+ failure_threshold=5,
26
+ reset_timeout=60
27
+ )
28
+
29
+ def get_timestamp(self) -> str:
30
+ """Get current timestamp in ISO format"""
31
+ return datetime.utcnow().isoformat()
32
+
33
+ def get_last_sync_time(self) -> str:
34
+ """Get last sync timestamp"""
35
+ return self._last_sync_time or "Never"
36
+
37
+ def get_next_sync_time(self) -> str:
38
+ """Calculate next scheduled sync time"""
39
+ if not self._last_sync_time:
40
+ return "Not scheduled"
41
+
42
+ last_sync = datetime.fromisoformat(self._last_sync_time)
43
+ next_sync = last_sync + timedelta(minutes=15) # 15-minute sync interval
44
+ return next_sync.isoformat()
45
+
46
+ @retry_with_backoff(max_retries=3, initial_delay=1.0)
47
+ async def check_database(self) -> Dict[str, Any]:
48
+ """Check database connectivity and performance metrics"""
49
+ try:
50
+ async with db() as session:
51
+ # Check basic connectivity
52
+ await session.execute(select(1))
53
+
54
+ # Check database stats
55
+ result = await session.execute(
56
+ text("""
57
+ SELECT
58
+ (SELECT count(*) FROM pg_stat_activity) as active_connections,
59
+ (SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_queries,
60
+ (SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections,
61
+ (SELECT round(sum(blks_hit)*100/sum(blks_hit + blks_read), 2)
62
+ FROM pg_stat_database) as cache_hit_ratio
63
+ """)
64
+ )
65
+ stats = result.mappings().one()
66
+
67
+ return {
68
+ "status": "healthy",
69
+ "active_connections": stats["active_connections"],
70
+ "active_queries": stats["active_queries"],
71
+ "idle_connections": stats["idle_connections"],
72
+ "cache_hit_ratio": stats["cache_hit_ratio"]
73
+ }
74
+
75
+ except Exception as e:
76
+ logger.error(f"Database health check error: {str(e)}")
77
+ return {
78
+ "status": "unhealthy",
79
+ "error": str(e)
80
+ }
81
+
82
+ @retry_with_backoff(max_retries=2, initial_delay=0.5)
83
+ async def check_redis(self) -> Dict[str, Any]:
84
+ """Check Redis connectivity and performance"""
85
+ try:
86
+ # Test Redis connection
87
+ await cache.redis.ping()
88
+
89
+ # Get Redis info
90
+ info = await cache.redis.info()
91
+
92
+ return {
93
+ "status": "healthy",
94
+ "connected_clients": info["connected_clients"],
95
+ "used_memory_human": info["used_memory_human"],
96
+ "total_connections_received": info["total_connections_received"],
97
+ "uptime_in_seconds": info["uptime_in_seconds"]
98
+ }
99
+
100
+ except Exception as e:
101
+ logger.error(f"Redis health check error: {str(e)}")
102
+ return {
103
+ "status": "unhealthy",
104
+ "error": str(e)
105
+ }
106
+
107
+ @circuit_breaker(failure_threshold=5, reset_timeout=300.0)
108
+ async def check_background_tasks(self) -> Dict[str, Any]:
109
+ """Check status of background tasks"""
110
+ active_tasks = []
111
+ completed_tasks = []
112
+ failed_tasks = []
113
+
114
+ for task_id, task_info in self._background_tasks.items():
115
+ status = {
116
+ "id": task_id,
117
+ "name": task_info["name"],
118
+ "started_at": task_info["started_at"].isoformat(),
119
+ "last_updated": task_info["last_updated"].isoformat()
120
+ }
121
+
122
+ if task_info["task"].done():
123
+ if task_info["task"].exception():
124
+ failed_tasks.append({
125
+ **status,
126
+ "error": str(task_info["task"].exception())
127
+ })
128
+ else:
129
+ completed_tasks.append(status)
130
+ else:
131
+ active_tasks.append(status)
132
+
133
+ return {
134
+ "status": "healthy" if not failed_tasks else "warning",
135
+ "active_tasks": active_tasks,
136
+ "completed_tasks": completed_tasks,
137
+ "failed_tasks": failed_tasks
138
+ }
139
+
140
+ async def register_background_task(
141
+ self,
142
+ task: asyncio.Task,
143
+ name: str
144
+ ) -> str:
145
+ """Register a new background task for monitoring"""
146
+ task_id = f"{name}_{datetime.utcnow().timestamp()}"
147
+ self._background_tasks[task_id] = {
148
+ "task": task,
149
+ "name": name,
150
+ "started_at": datetime.utcnow(),
151
+ "last_updated": datetime.utcnow()
152
+ }
153
+ return task_id
154
+
155
+ async def update_task_status(self, task_id: str):
156
+ """Update last_updated timestamp for a task"""
157
+ if task_id in self._background_tasks:
158
+ self._background_tasks[task_id]["last_updated"] = datetime.utcnow()
159
+
160
+ async def cleanup_completed_tasks(self):
161
+ """Remove completed tasks older than 24 hours"""
162
+ current_time = datetime.utcnow()
163
+ to_remove = []
164
+
165
+ for task_id, task_info in self._background_tasks.items():
166
+ if task_info["task"].done():
167
+ age = current_time - task_info["last_updated"]
168
+ if age > timedelta(hours=24):
169
+ to_remove.append(task_id)
170
+
171
+ for task_id in to_remove:
172
+ del self._background_tasks[task_id]
173
+
174
+ async def perform_maintenance(self) -> Dict[str, Any]:
175
+ """Perform system maintenance tasks"""
176
+ async with self.circuit_breaker:
177
+ try:
178
+ # Clean up old background tasks
179
+ await self.cleanup_completed_tasks()
180
+
181
+ # Clean up expired cache entries
182
+ await cache.cleanup_expired()
183
+
184
+ # Run database maintenance
185
+ async with db() as session:
186
+ # Clean up old sessions
187
+ await session.execute(
188
+ text("DELETE FROM sessions WHERE last_activity < NOW() - INTERVAL '7 days'")
189
+ )
190
+
191
+ # Clean up old notifications
192
+ await session.execute(
193
+ text("""
194
+ DELETE FROM notifications
195
+ WHERE created_at < NOW() - INTERVAL '30 days'
196
+ AND read = true
197
+ """)
198
+ )
199
+
200
+ # Analyze tables for query optimization
201
+ await session.execute(text("ANALYZE"))
202
+
203
+ await session.commit()
204
+
205
+ return {
206
+ "status": "success",
207
+ "message": "Maintenance completed successfully",
208
+ "timestamp": self.get_timestamp()
209
+ }
210
+
211
+ except Exception as e:
212
+ logger.error(f"Maintenance error: {str(e)}")
213
+ return {
214
+ "status": "error",
215
+ "message": f"Maintenance failed: {str(e)}",
216
+ "timestamp": self.get_timestamp()
217
+ }
218
+
219
+ @retry_with_backoff(max_retries=2)
220
  async def cleanup_expired_sessions(self) -> int:
221
  """Clean up expired sessions"""
222
  try:
 
301
  logger.error(f"Health check error: {str(e)}")
302
  return {"status": "unhealthy", "error": str(e)}
303
 
304
+ @retry_with_backoff(max_retries=2, initial_delay=1.0)
305
  async def monitor_system_resources(self) -> Dict[str, Any]:
306
  """Monitor system resources"""
307
  try:
 
381
  except Exception as e:
382
  logger.error(f"Log rotation error: {str(e)}")
383
 
384
+ @retry_with_backoff(max_retries=3)
385
  async def manage_storage_quotas(self) -> Dict[str, Any]:
386
  """Manage storage quotas and cleanup"""
387
  try:
 
422
  logger.error(f"Storage quota management error: {str(e)}")
423
  return {"error": str(e)}
424
 
425
+ def _calculate_storage_usage(self, storage_type: str) -> Dict[str, Any]:
426
+ """Helper method to calculate storage usage"""
427
+ base_path = settings.UPLOAD_DIR / storage_type
428
+ total_size = sum(f.stat().st_size for f in base_path.glob('**/*') if f.is_file())
429
+ quota = getattr(settings, f"{storage_type.upper()}_QUOTA_BYTES")
430
+ usage_percent = (total_size / quota) * 100 if quota > 0 else 0
431
+
432
+ return {
433
+ "current_size": total_size,
434
+ "quota": quota,
435
+ "usage_percent": round(usage_percent, 2)
436
+ }
437
+
438
+ maintenance_service = MaintenanceService()
app/services/performance_notifications.py ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from typing import Dict, Any, List, Optional
3
+ from ..services.notifications import create_and_broadcast_notification
4
+ from ..services.analytics import staff_analytics
5
+ from ..db.models import ActivityType
6
+ from ..utils.logger import logger
7
+
8
+ class PerformanceNotificationService:
9
+ # Performance thresholds for notifications
10
+ THRESHOLDS = {
11
+ "high_efficiency": 9.0, # Notify for efficiency scores above 9
12
+ "low_efficiency": 5.0, # Warn for efficiency scores below 5
13
+ "sales_milestone": 10000, # Notify for sales milestones (every 10k)
14
+ "high_void_rate": 0.1, # Warn when void rate exceeds 10%
15
+ "customer_service": 20, # Notify for high customer service interactions
16
+ }
17
+
18
+ @staticmethod
19
+ async def check_and_notify_performance(
20
+ user_id: int,
21
+ branch_id: int,
22
+ metrics: Dict[str, Any]
23
+ ):
24
+ """Check performance metrics and send notifications if thresholds are met"""
25
+ try:
26
+ notifications = []
27
+
28
+ # Check efficiency score
29
+ if metrics["efficiency_score"] >= PerformanceNotificationService.THRESHOLDS["high_efficiency"]:
30
+ notifications.append({
31
+ "title": "Outstanding Performance!",
32
+ "message": "Your efficiency score is exceptional. Keep up the great work!",
33
+ "type": "performance_achievement",
34
+ "data": {
35
+ "metric": "efficiency_score",
36
+ "value": metrics["efficiency_score"]
37
+ }
38
+ })
39
+ elif metrics["efficiency_score"] <= PerformanceNotificationService.THRESHOLDS["low_efficiency"]:
40
+ notifications.append({
41
+ "title": "Performance Alert",
42
+ "message": "Your efficiency score needs improvement. Contact your supervisor for support.",
43
+ "type": "performance_alert",
44
+ "data": {
45
+ "metric": "efficiency_score",
46
+ "value": metrics["efficiency_score"]
47
+ }
48
+ })
49
+
50
+ # Check sales milestones
51
+ milestone = PerformanceNotificationService.THRESHOLDS["sales_milestone"]
52
+ if metrics["total_sales"] > 0 and metrics["total_sales"] % milestone < 100:
53
+ notifications.append({
54
+ "title": "Sales Milestone Achieved!",
55
+ "message": f"Congratulations! You've reached {int(metrics['total_sales'] / 1000)}k in sales.",
56
+ "type": "sales_milestone",
57
+ "data": {
58
+ "metric": "total_sales",
59
+ "value": metrics["total_sales"]
60
+ }
61
+ })
62
+
63
+ # Check void rate
64
+ if metrics["transaction_count"] > 0:
65
+ void_rate = metrics["void_count"] / metrics["transaction_count"]
66
+ if void_rate > PerformanceNotificationService.THRESHOLDS["high_void_rate"]:
67
+ notifications.append({
68
+ "title": "High Void Rate Alert",
69
+ "message": "Your void transaction rate is above average. Please review procedures.",
70
+ "type": "void_rate_alert",
71
+ "data": {
72
+ "metric": "void_rate",
73
+ "value": void_rate
74
+ }
75
+ })
76
+
77
+ # Check customer service interactions
78
+ if metrics["customer_interaction_count"] >= PerformanceNotificationService.THRESHOLDS["customer_service"]:
79
+ notifications.append({
80
+ "title": "Customer Service Achievement",
81
+ "message": "Great job handling customer interactions today!",
82
+ "type": "customer_service_achievement",
83
+ "data": {
84
+ "metric": "customer_interactions",
85
+ "value": metrics["customer_interaction_count"]
86
+ }
87
+ })
88
+
89
+ # Send all notifications
90
+ for notif in notifications:
91
+ await create_and_broadcast_notification(
92
+ user_id=str(user_id),
93
+ title=notif["title"],
94
+ message=notif["message"],
95
+ notification_type=notif["type"],
96
+ data=notif["data"]
97
+ )
98
+
99
+ # If it's an alert, also notify supervisor
100
+ if "alert" in notif["type"]:
101
+ await create_and_broadcast_notification(
102
+ user_id="supervisor",
103
+ title=f"Staff Alert: {notif['title']}",
104
+ message=f"Staff member {user_id} in branch {branch_id}: {notif['message']}",
105
+ notification_type=f"supervisor_{notif['type']}",
106
+ data={
107
+ **notif["data"],
108
+ "staff_id": user_id,
109
+ "branch_id": branch_id
110
+ }
111
+ )
112
+
113
+ except Exception as e:
114
+ logger.error(f"Error in performance notifications: {str(e)}")
115
+
116
+ @staticmethod
117
+ async def notify_performance_insights(
118
+ branch_id: int,
119
+ insights: List[Dict[str, Any]]
120
+ ):
121
+ """Send notifications for performance insights"""
122
+ try:
123
+ for insight in insights:
124
+ if insight["type"] in ["warning", "positive"]:
125
+ await create_and_broadcast_notification(
126
+ user_id="supervisor",
127
+ title=f"Branch Performance Insight",
128
+ message=insight["message"],
129
+ notification_type=f"performance_insight_{insight['type']}",
130
+ data={
131
+ "branch_id": branch_id,
132
+ "category": insight["category"],
133
+ "insight_type": insight["type"]
134
+ }
135
+ )
136
+
137
+ except Exception as e:
138
+ logger.error(f"Error in insight notifications: {str(e)}")
139
+
140
+ @staticmethod
141
+ async def notify_realtime_alerts(
142
+ branch_id: int,
143
+ current_metrics: Dict[str, Any],
144
+ previous_metrics: Optional[Dict[str, Any]] = None
145
+ ):
146
+ """Send notifications for significant real-time changes"""
147
+ try:
148
+ if not previous_metrics:
149
+ return
150
+
151
+ alerts = []
152
+
153
+ # Check for significant drops in performance
154
+ if (previous_metrics["efficiency_score"] - current_metrics["efficiency_score"]) > 2:
155
+ alerts.append({
156
+ "title": "Sudden Performance Drop",
157
+ "message": "Notable decrease in staff performance detected.",
158
+ "type": "realtime_performance_alert"
159
+ })
160
+
161
+ # Check for unusual void patterns
162
+ current_void_rate = (
163
+ current_metrics["void_count"] / current_metrics["transaction_count"]
164
+ if current_metrics["transaction_count"] > 0 else 0
165
+ )
166
+ previous_void_rate = (
167
+ previous_metrics["void_count"] / previous_metrics["transaction_count"]
168
+ if previous_metrics["transaction_count"] > 0 else 0
169
+ )
170
+
171
+ if current_void_rate > previous_void_rate * 2:
172
+ alerts.append({
173
+ "title": "Unusual Void Pattern",
174
+ "message": "Significant increase in void transactions detected.",
175
+ "type": "realtime_void_alert"
176
+ })
177
+
178
+ # Send alerts
179
+ for alert in alerts:
180
+ await create_and_broadcast_notification(
181
+ user_id="supervisor",
182
+ title=alert["title"],
183
+ message=alert["message"],
184
+ notification_type=alert["type"],
185
+ data={
186
+ "branch_id": branch_id,
187
+ "current_metrics": current_metrics,
188
+ "previous_metrics": previous_metrics
189
+ }
190
+ )
191
+
192
+ except Exception as e:
193
+ logger.error(f"Error in realtime alert notifications: {str(e)}")
194
+
195
+ performance_notifications = PerformanceNotificationService()
app/services/pos_analytics.py ADDED
@@ -0,0 +1,376 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from typing import Dict, Any, Optional, List
3
+ from ..core.config import settings
4
+ from ..utils.logger import logger, log_health_check
5
+ from ..services.analytics import staff_analytics
6
+ from ..db.models import ActivityType
7
+ from ..routes.websocket import broadcast_staff_update
8
+ from ..utils.retry import with_retry, retry_with_backoff
9
+ import httpx
10
+ import asyncio
11
+ import aiohttp
12
+
13
+ class POSAnalyticsService:
14
+ def __init__(self):
15
+ self.pos_api_url = settings.POS_API_URL
16
+ self.service_token = settings.SERVICE_TOKEN
17
+ self._headers = {
18
+ "Authorization": f"Bearer {self.service_token}",
19
+ "Content-Type": "application/json"
20
+ }
21
+ self._last_sync_time = None
22
+ self._metrics_cache = {}
23
+ self._failed_operations = []
24
+ self._last_error = None
25
+
26
+ @with_retry(max_retries=3, delay=1.0, exceptions=(httpx.HTTPError, asyncio.TimeoutError))
27
+ async def sync_pos_activity(self, pos_data: Dict[str, Any]) -> bool:
28
+ """
29
+ Sync staff activity data from POS system with retry mechanism
30
+ """
31
+ try:
32
+ # Map POS activity types to our ActivityType enum
33
+ activity_mapping = {
34
+ "sale": ActivityType.SALE,
35
+ "void": ActivityType.VOID,
36
+ "refund": ActivityType.REFUND,
37
+ "inventory": ActivityType.INVENTORY,
38
+ "customer_service": ActivityType.CUSTOMER_SERVICE,
39
+ "login": ActivityType.LOGIN,
40
+ "logout": ActivityType.LOGOUT
41
+ }
42
+
43
+ activity_type = activity_mapping.get(pos_data["activity_type"].lower())
44
+ if not activity_type:
45
+ logger.warning(f"Unknown POS activity type: {pos_data['activity_type']}")
46
+ return False
47
+
48
+ # Record the activity in our system
49
+ await staff_analytics.record_activity(
50
+ user_id=pos_data["user_id"],
51
+ branch_id=pos_data["branch_id"],
52
+ activity_type=activity_type,
53
+ details=pos_data["details"],
54
+ duration=pos_data.get("duration")
55
+ )
56
+
57
+ # Get updated metrics for the branch and broadcast
58
+ metrics = await staff_analytics.get_staff_performance(
59
+ branch_id=pos_data["branch_id"],
60
+ start_date=datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
61
+ )
62
+ await broadcast_staff_update(metrics)
63
+
64
+ return True
65
+
66
+ except Exception as e:
67
+ logger.error(f"Error syncing POS activity: {str(e)}")
68
+ raise
69
+
70
+ @with_retry(max_retries=3, delay=1.0, exceptions=(httpx.HTTPError, asyncio.TimeoutError))
71
+ async def get_pos_metrics(self, user_id: int, branch_id: int, date: datetime) -> Optional[Dict[str, Any]]:
72
+ """Fetch staff metrics from POS system with retry mechanism"""
73
+ try:
74
+ async with httpx.AsyncClient() as client:
75
+ response = await client.get(
76
+ f"{self.pos_api_url}/api/v1/staff/metrics",
77
+ headers=self._headers,
78
+ params={
79
+ "user_id": user_id,
80
+ "branch_id": branch_id,
81
+ "date": date.date().isoformat()
82
+ },
83
+ timeout=10.0
84
+ )
85
+
86
+ response.raise_for_status()
87
+ return response.json()
88
+
89
+ except httpx.HTTPError as e:
90
+ logger.error(f"HTTP error fetching POS metrics: {str(e)}")
91
+ raise
92
+ except Exception as e:
93
+ logger.error(f"Error fetching POS metrics: {str(e)}")
94
+ return None
95
+
96
+ @with_retry(max_retries=3, delay=2.0, exceptions=(httpx.HTTPError, asyncio.TimeoutError))
97
+ async def sync_all_metrics(self, branch_id: Optional[int] = None) -> bool:
98
+ """
99
+ Sync all staff metrics from POS system with retry mechanism
100
+ """
101
+ try:
102
+ today = datetime.utcnow()
103
+ params = {"date": today.date().isoformat()}
104
+ if branch_id:
105
+ params["branch_id"] = branch_id
106
+
107
+ async with httpx.AsyncClient() as client:
108
+ response = await client.get(
109
+ f"{self.pos_api_url}/api/v1/staff/metrics/daily",
110
+ headers=self._headers,
111
+ params=params,
112
+ timeout=30.0
113
+ )
114
+
115
+ response.raise_for_status()
116
+ metrics_data = response.json()
117
+ sync_tasks = []
118
+ processed_branches = set()
119
+
120
+ for metric in metrics_data:
121
+ activities = self._convert_metrics_to_activities(metric)
122
+ processed_branches.add(metric["branch_id"])
123
+
124
+ for activity in activities:
125
+ sync_tasks.append(self.sync_pos_activity(activity))
126
+
127
+ # Run all sync tasks concurrently with individual retries
128
+ results = await asyncio.gather(*sync_tasks, return_exceptions=True)
129
+
130
+ # Broadcast updates for each affected branch
131
+ for branch_id in processed_branches:
132
+ metrics = await staff_analytics.get_staff_performance(branch_id=branch_id)
133
+ await broadcast_staff_update(metrics)
134
+
135
+ # Check for any failures
136
+ success = all(
137
+ result is True if not isinstance(result, Exception) else False
138
+ for result in results
139
+ )
140
+
141
+ if not success:
142
+ logger.warning("Some metrics failed to sync")
143
+
144
+ return success
145
+
146
+ except httpx.HTTPError as e:
147
+ logger.error(f"HTTP error in sync_all_metrics: {str(e)}")
148
+ raise
149
+ except Exception as e:
150
+ logger.error(f"Error in sync_all_metrics: {str(e)}")
151
+ return False
152
+
153
+ def _convert_metrics_to_activities(self, metric: Dict[str, Any]) -> List[Dict[str, Any]]:
154
+ """Convert POS metrics into individual activity records"""
155
+ activities = []
156
+ timestamp = datetime.utcnow().isoformat()
157
+
158
+ # Convert sales metrics
159
+ if metric.get("sales_amount"):
160
+ activities.append({
161
+ "user_id": metric["user_id"],
162
+ "branch_id": metric["branch_id"],
163
+ "activity_type": "sale",
164
+ "details": {
165
+ "amount": metric["sales_amount"],
166
+ "transaction_count": metric["transaction_count"]
167
+ },
168
+ "timestamp": timestamp
169
+ })
170
+
171
+ # Convert void transactions
172
+ if metric.get("void_count"):
173
+ activities.append({
174
+ "user_id": metric["user_id"],
175
+ "branch_id": metric["branch_id"],
176
+ "activity_type": "void",
177
+ "details": {
178
+ "count": metric["void_count"],
179
+ "amount": metric.get("void_amount", 0)
180
+ },
181
+ "timestamp": timestamp
182
+ })
183
+
184
+ # Convert customer service interactions
185
+ if metric.get("customer_interactions"):
186
+ activities.append({
187
+ "user_id": metric["user_id"],
188
+ "branch_id": metric["branch_id"],
189
+ "activity_type": "customer_service",
190
+ "details": {
191
+ "interaction_count": metric["customer_interactions"],
192
+ "satisfaction_score": metric.get("customer_satisfaction", 0)
193
+ },
194
+ "timestamp": timestamp
195
+ })
196
+
197
+ # Convert login time
198
+ if metric.get("login_duration"):
199
+ activities.append({
200
+ "user_id": metric["user_id"],
201
+ "branch_id": metric["branch_id"],
202
+ "activity_type": "login",
203
+ "details": {
204
+ "session_type": "pos",
205
+ "login_time": metric["login_duration"]
206
+ },
207
+ "timestamp": timestamp,
208
+ "duration": metric["login_duration"]
209
+ })
210
+
211
+ return activities
212
+
213
+ async def health_check(self) -> Dict[str, Any]:
214
+ """Check POS integration health status"""
215
+ try:
216
+ status = "healthy"
217
+ details = {
218
+ "last_sync": self._last_sync_time,
219
+ "failed_operations": len(self._failed_operations),
220
+ "cache_size": len(self._metrics_cache)
221
+ }
222
+
223
+ if self._last_error:
224
+ details["last_error"] = str(self._last_error)
225
+ if (datetime.utcnow() - self._last_sync_time) > timedelta(minutes=30):
226
+ status = "warning"
227
+
228
+ if len(self._failed_operations) > 5:
229
+ status = "degraded"
230
+
231
+ # Check POS API connectivity
232
+ try:
233
+ async with aiohttp.ClientSession() as session:
234
+ async with session.get(
235
+ f"{settings.POS_API_URL}/health",
236
+ timeout=5
237
+ ) as response:
238
+ if response.status == 200:
239
+ details["api_status"] = "connected"
240
+ else:
241
+ details["api_status"] = "error"
242
+ status = "unhealthy"
243
+ except Exception as e:
244
+ details["api_status"] = "connection_failed"
245
+ details["api_error"] = str(e)
246
+ status = "unhealthy"
247
+
248
+ log_health_check("pos_integration", status, details)
249
+ return {
250
+ "status": status,
251
+ "details": details
252
+ }
253
+
254
+ except Exception as e:
255
+ logger.error(f"POS health check failed: {str(e)}")
256
+ return {
257
+ "status": "error",
258
+ "details": {"error": str(e)}
259
+ }
260
+
261
+ @retry_with_backoff(max_retries=3)
262
+ async def sync_all_metrics(self) -> bool:
263
+ """Sync all metrics from POS system"""
264
+ try:
265
+ # Reset failed operations list
266
+ self._failed_operations = []
267
+
268
+ # Sync various metrics
269
+ tasks = [
270
+ self._sync_sales_metrics(),
271
+ self._sync_inventory_metrics(),
272
+ self._sync_staff_metrics(),
273
+ self._sync_customer_metrics()
274
+ ]
275
+
276
+ results = await asyncio.gather(*tasks, return_exceptions=True)
277
+
278
+ # Check for failures
279
+ for result in results:
280
+ if isinstance(result, Exception):
281
+ self._failed_operations.append(str(result))
282
+ logger.error(f"Metric sync failed: {str(result)}")
283
+
284
+ self._last_sync_time = datetime.utcnow()
285
+ self._last_error = None if not self._failed_operations else self._failed_operations[-1]
286
+
287
+ return len(self._failed_operations) == 0
288
+
289
+ except Exception as e:
290
+ self._last_error = str(e)
291
+ logger.error(f"Failed to sync metrics: {str(e)}")
292
+ return False
293
+
294
+ @retry_with_backoff(max_retries=2)
295
+ async def _sync_sales_metrics(self) -> Dict[str, Any]:
296
+ """Sync sales metrics from POS"""
297
+ async with aiohttp.ClientSession() as session:
298
+ async with session.get(
299
+ f"{settings.POS_API_URL}/metrics/sales",
300
+ headers=self._get_auth_headers()
301
+ ) as response:
302
+ if response.status == 200:
303
+ data = await response.json()
304
+ self._metrics_cache["sales"] = data
305
+ return data
306
+ else:
307
+ raise Exception(f"Failed to sync sales metrics: {response.status}")
308
+
309
+ @retry_with_backoff(max_retries=2)
310
+ async def _sync_inventory_metrics(self) -> Dict[str, Any]:
311
+ """Sync inventory metrics from POS"""
312
+ async with aiohttp.ClientSession() as session:
313
+ async with session.get(
314
+ f"{settings.POS_API_URL}/metrics/inventory",
315
+ headers=self._get_auth_headers()
316
+ ) as response:
317
+ if response.status == 200:
318
+ data = await response.json()
319
+ self._metrics_cache["inventory"] = data
320
+ return data
321
+ else:
322
+ raise Exception(f"Failed to sync inventory metrics: {response.status}")
323
+
324
+ @retry_with_backoff(max_retries=2)
325
+ async def _sync_staff_metrics(self) -> Dict[str, Any]:
326
+ """Sync staff performance metrics from POS"""
327
+ async with aiohttp.ClientSession() as session:
328
+ async with session.get(
329
+ f"{settings.POS_API_URL}/metrics/staff",
330
+ headers=self._get_auth_headers()
331
+ ) as response:
332
+ if response.status == 200:
333
+ data = await response.json()
334
+ self._metrics_cache["staff"] = data
335
+ return data
336
+ else:
337
+ raise Exception(f"Failed to sync staff metrics: {response.status}")
338
+
339
+ @retry_with_backoff(max_retries=2)
340
+ async def _sync_customer_metrics(self) -> Dict[str, Any]:
341
+ """Sync customer metrics from POS"""
342
+ async with aiohttp.ClientSession() as session:
343
+ async with session.get(
344
+ f"{settings.POS_API_URL}/metrics/customers",
345
+ headers=self._get_auth_headers()
346
+ ) as response:
347
+ if response.status == 200:
348
+ data = await response.json()
349
+ self._metrics_cache["customers"] = data
350
+ return data
351
+ else:
352
+ raise Exception(f"Failed to sync customer metrics: {response.status}")
353
+
354
+ def _get_auth_headers(self) -> Dict[str, str]:
355
+ """Get authentication headers for POS API"""
356
+ return {
357
+ "Authorization": f"Bearer {settings.POS_API_KEY}",
358
+ "X-API-Version": settings.POS_API_VERSION
359
+ }
360
+
361
+ def get_cached_metrics(self, metric_type: str) -> Optional[Dict[str, Any]]:
362
+ """Get cached metrics by type"""
363
+ return self._metrics_cache.get(metric_type)
364
+
365
+ def get_sync_status(self) -> Dict[str, Any]:
366
+ """Get current sync status"""
367
+ return {
368
+ "last_sync": self._last_sync_time,
369
+ "failed_operations": self._failed_operations,
370
+ "cache_status": {
371
+ k: "cached" for k in self._metrics_cache.keys()
372
+ },
373
+ "last_error": self._last_error
374
+ }
375
+
376
+ pos_analytics = POSAnalyticsService()
app/services/scheduler.py CHANGED
@@ -5,9 +5,11 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler
5
  from apscheduler.triggers.cron import CronTrigger
6
  from ..db.database import db
7
  from ..utils.cache import cache
8
- from ..utils.logger import logger
9
  from ..services.calendar import calendar
10
- from ..services.maintenance import maintenance
 
 
11
 
12
  class SchedulerService:
13
  def __init__(self):
@@ -15,80 +17,123 @@ class SchedulerService:
15
  self._setup_maintenance_jobs()
16
 
17
  def _setup_maintenance_jobs(self):
18
- """Setup all maintenance related scheduled jobs"""
19
- # Daily database maintenance at 2 AM
20
  self.scheduler.add_job(
21
- maintenance.perform_database_maintenance,
22
- CronTrigger(hour=2),
23
- id="daily_db_maintenance",
24
  replace_existing=True
25
  )
26
 
27
- # Session cleanup every 6 hours
28
  self.scheduler.add_job(
29
- maintenance.cleanup_expired_sessions,
30
- CronTrigger(hour="*/6"),
31
- id="session_cleanup",
32
  replace_existing=True
33
  )
34
 
35
- # System health check every 15 minutes
36
  self.scheduler.add_job(
37
- maintenance.monitor_system_resources,
38
- CronTrigger(minute="*/15"),
39
- id="health_check",
40
  replace_existing=True
41
  )
42
 
43
- # Daily backup at 1 AM
44
  self.scheduler.add_job(
45
- maintenance.perform_scheduled_backup,
46
- CronTrigger(hour=1),
47
- id="daily_backup",
48
  replace_existing=True
49
  )
50
 
51
- # Daily log rotation at 3 AM
52
- self.scheduler.add_job(
53
- maintenance.rotate_log_files,
54
- CronTrigger(hour=3),
55
- id="log_rotation",
56
- replace_existing=True
57
- )
58
 
59
- # Storage quota check every 2 hours
60
- self.scheduler.add_job(
61
- maintenance.manage_storage_quotas,
62
- CronTrigger(hour="*/2"),
63
- id="storage_quota_check",
64
- replace_existing=True
65
- )
66
 
67
- # Monthly data archiving at 4 AM on the 1st of each month
68
- self.scheduler.add_job(
69
- maintenance.archive_old_data,
70
- CronTrigger(day=1, hour=4),
71
- id="monthly_archiving",
72
- replace_existing=True
73
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
 
75
  def start(self):
76
  """Start the scheduler"""
77
- try:
78
  self.scheduler.start()
79
- logger.info("Scheduler started successfully")
80
- except Exception as e:
81
- logger.error(f"Failed to start scheduler: {str(e)}")
82
- raise
83
 
84
  def shutdown(self):
85
  """Shutdown the scheduler"""
86
- try:
87
  self.scheduler.shutdown()
88
- logger.info("Scheduler shutdown successfully")
89
- except Exception as e:
90
- logger.error(f"Error during scheduler shutdown: {str(e)}")
91
- raise
92
 
93
  def get_jobs(self):
94
  """Get all scheduled jobs"""
@@ -240,4 +285,4 @@ class SchedulerService:
240
 
241
  return sorted(events, key=lambda x: x["start_time"])
242
 
243
- scheduler = SchedulerService()
 
5
  from apscheduler.triggers.cron import CronTrigger
6
  from ..db.database import db
7
  from ..utils.cache import cache
8
+ from ..utils.logger import logger, log_health_check, log_maintenance_activity
9
  from ..services.calendar import calendar
10
+ from ..services.maintenance import maintenance_service
11
+ from ..services.pos_analytics import pos_analytics
12
+ from ..core.config import settings
13
 
14
  class SchedulerService:
15
  def __init__(self):
 
17
  self._setup_maintenance_jobs()
18
 
19
  def _setup_maintenance_jobs(self):
20
+ """Set up scheduled maintenance and health check jobs"""
21
+ # Health checks every 5 minutes
22
  self.scheduler.add_job(
23
+ self._run_health_checks,
24
+ trigger=CronTrigger(minute='*/5'),
25
+ id='health_checks',
26
  replace_existing=True
27
  )
28
 
29
+ # System maintenance daily at 2 AM
30
  self.scheduler.add_job(
31
+ self._run_maintenance,
32
+ trigger=CronTrigger(hour=2),
33
+ id='daily_maintenance',
34
  replace_existing=True
35
  )
36
 
37
+ # POS metrics sync every 15 minutes
38
  self.scheduler.add_job(
39
+ self._sync_pos_metrics,
40
+ trigger=CronTrigger(minute='*/15'),
41
+ id='pos_sync',
42
  replace_existing=True
43
  )
44
 
45
+ # Resource monitoring every minute
46
  self.scheduler.add_job(
47
+ self._monitor_resources,
48
+ trigger=CronTrigger(minute='*'),
49
+ id='resource_monitoring',
50
  replace_existing=True
51
  )
52
 
53
+ async def _run_health_checks(self):
54
+ """Run comprehensive system health checks"""
55
+ try:
56
+ # Check database health
57
+ db_health = await maintenance_service.check_database()
58
+ log_health_check("database", db_health["status"], db_health)
 
59
 
60
+ # Check Redis health
61
+ redis_health = await maintenance_service.check_redis()
62
+ log_health_check("redis", redis_health["status"], redis_health)
 
 
 
 
63
 
64
+ # Check POS integration
65
+ pos_health = await pos_analytics.health_check()
66
+ log_health_check("pos_integration", pos_health["status"], pos_health)
67
+
68
+ # Check background tasks
69
+ tasks_health = await maintenance_service.check_background_tasks()
70
+ log_health_check("background_tasks", tasks_health["status"], tasks_health)
71
+
72
+ except Exception as e:
73
+ logger.error(f"Health check failed: {str(e)}", exc_info=True)
74
+
75
+ async def _run_maintenance(self):
76
+ """Run daily system maintenance tasks"""
77
+ try:
78
+ # Perform system maintenance
79
+ maintenance_result = await maintenance_service.perform_maintenance()
80
+ log_maintenance_activity("system", maintenance_result["status"], maintenance_result)
81
+
82
+ # Clean up expired sessions
83
+ sessions_cleaned = await maintenance_service.cleanup_expired_sessions()
84
+ log_maintenance_activity("session_cleanup", "completed", {"cleaned": sessions_cleaned})
85
+
86
+ # Archive old data
87
+ archive_result = await maintenance_service.archive_old_data()
88
+ if archive_result:
89
+ log_maintenance_activity("data_archiving", "completed", archive_result)
90
+
91
+ # Rotate log files
92
+ await maintenance_service.rotate_log_files()
93
+ log_maintenance_activity("log_rotation", "completed")
94
+
95
+ # Manage storage quotas
96
+ quota_result = await maintenance_service.manage_storage_quotas()
97
+ log_maintenance_activity("storage_quotas", "completed", quota_result)
98
+
99
+ except Exception as e:
100
+ logger.error(f"Maintenance tasks failed: {str(e)}", exc_info=True)
101
+
102
+ async def _sync_pos_metrics(self):
103
+ """Sync POS metrics data"""
104
+ try:
105
+ success = await pos_analytics.sync_all_metrics()
106
+ status = "success" if success else "partial_failure"
107
+ log_maintenance_activity("pos_sync", status)
108
+
109
+ except Exception as e:
110
+ logger.error(f"POS metrics sync failed: {str(e)}", exc_info=True)
111
+ log_maintenance_activity("pos_sync", "failed", {"error": str(e)})
112
+
113
+ async def _monitor_resources(self):
114
+ """Monitor system resources"""
115
+ try:
116
+ resources = await maintenance_service.monitor_system_resources()
117
+ if "error" not in resources:
118
+ log_health_check("resources", "healthy", resources)
119
+ else:
120
+ log_health_check("resources", "warning", resources)
121
+
122
+ except Exception as e:
123
+ logger.error(f"Resource monitoring failed: {str(e)}", exc_info=True)
124
+ log_health_check("resources", "error", {"error": str(e)})
125
 
126
  def start(self):
127
  """Start the scheduler"""
128
+ if not self.scheduler.running:
129
  self.scheduler.start()
130
+ logger.info("Scheduler started - Maintenance and health check tasks initialized")
 
 
 
131
 
132
  def shutdown(self):
133
  """Shutdown the scheduler"""
134
+ if self.scheduler.running:
135
  self.scheduler.shutdown()
136
+ logger.info("Scheduler shutdown - Maintenance and health check tasks stopped")
 
 
 
137
 
138
  def get_jobs(self):
139
  """Get all scheduled jobs"""
 
285
 
286
  return sorted(events, key=lambda x: x["start_time"])
287
 
288
+ scheduler_service = SchedulerService()
app/services/staff_reports.py ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from typing import Dict, Any, List, Optional
3
+ from sqlalchemy import func, and_, select
4
+ from ..db.database import db
5
+ from ..db.models import StaffActivity, PerformanceMetric, User, ActivityType
6
+ from ..utils.cache import cache
7
+ import numpy as np
8
+ from collections import defaultdict
9
+
10
+ class StaffReportService:
11
+ @staticmethod
12
+ async def generate_performance_report(
13
+ branch_id: Optional[int] = None,
14
+ start_date: Optional[datetime] = None,
15
+ end_date: Optional[datetime] = None
16
+ ) -> Dict[str, Any]:
17
+ """Generate comprehensive performance report with insights"""
18
+ if not start_date:
19
+ start_date = datetime.utcnow() - timedelta(days=30)
20
+ if not end_date:
21
+ end_date = datetime.utcnow()
22
+
23
+ cache_key = f"performance_report:{branch_id}:{start_date.date()}:{end_date.date()}"
24
+ cached_data = await cache.get_cache(cache_key)
25
+ if cached_data:
26
+ return cached_data
27
+
28
+ async with db() as session:
29
+ # Base query conditions
30
+ conditions = [PerformanceMetric.metric_date.between(start_date, end_date)]
31
+ if branch_id:
32
+ conditions.append(PerformanceMetric.branch_id == branch_id)
33
+
34
+ # Get daily metrics for trend analysis
35
+ daily_metrics = await session.execute(
36
+ select(
37
+ PerformanceMetric.user_id,
38
+ PerformanceMetric.metric_date,
39
+ PerformanceMetric.total_sales,
40
+ PerformanceMetric.transaction_count,
41
+ PerformanceMetric.efficiency_score
42
+ ).where(and_(*conditions))
43
+ )
44
+ daily_data = daily_metrics.all()
45
+
46
+ # Calculate trends and patterns
47
+ trends = await StaffReportService._analyze_trends(daily_data)
48
+
49
+ # Get top performers
50
+ top_performers = await session.execute(
51
+ select(
52
+ PerformanceMetric.user_id,
53
+ func.avg(PerformanceMetric.efficiency_score).label('avg_efficiency'),
54
+ func.sum(PerformanceMetric.total_sales).label('total_sales'),
55
+ func.count().label('days_worked')
56
+ ).where(
57
+ and_(*conditions)
58
+ ).group_by(
59
+ PerformanceMetric.user_id
60
+ ).order_by(
61
+ func.avg(PerformanceMetric.efficiency_score).desc()
62
+ ).limit(5)
63
+ )
64
+ top_performers_data = top_performers.all()
65
+
66
+ # Get user details for top performers
67
+ user_ids = [p.user_id for p in top_performers_data]
68
+ users = await session.execute(
69
+ select(User).where(User.id.in_(user_ids))
70
+ )
71
+ users_dict = {u.id: u for u in users.scalars().all()}
72
+
73
+ # Format top performers data
74
+ top_performers_list = [{
75
+ "user_id": p.user_id,
76
+ "username": users_dict[p.user_id].username,
77
+ "full_name": users_dict[p.user_id].full_name,
78
+ "average_efficiency": round(p.avg_efficiency, 2),
79
+ "total_sales": round(p.total_sales, 2),
80
+ "days_worked": p.days_worked
81
+ } for p in top_performers_data]
82
+
83
+ # Get activity distribution
84
+ activities = await session.execute(
85
+ select(
86
+ StaffActivity.activity_type,
87
+ func.count().label('count')
88
+ ).where(
89
+ and_(
90
+ StaffActivity.created_at.between(start_date, end_date),
91
+ branch_id is None or StaffActivity.branch_id == branch_id
92
+ )
93
+ ).group_by(
94
+ StaffActivity.activity_type
95
+ )
96
+ )
97
+ activity_dist = {
98
+ str(a.activity_type): a.count
99
+ for a in activities.all()
100
+ }
101
+
102
+ # Generate insights
103
+ insights = await StaffReportService._generate_insights(
104
+ trends,
105
+ activity_dist,
106
+ top_performers_list
107
+ )
108
+
109
+ report = {
110
+ "period": {
111
+ "start_date": start_date.isoformat(),
112
+ "end_date": end_date.isoformat()
113
+ },
114
+ "trends": trends,
115
+ "top_performers": top_performers_list,
116
+ "activity_distribution": activity_dist,
117
+ "insights": insights
118
+ }
119
+
120
+ # Cache report for 1 hour
121
+ await cache.set_cache(cache_key, report, expire=3600)
122
+ return report
123
+
124
+ @staticmethod
125
+ async def _analyze_trends(daily_data: List[Any]) -> Dict[str, Any]:
126
+ """Analyze daily metrics for trends"""
127
+ if not daily_data:
128
+ return {}
129
+
130
+ # Group data by date
131
+ daily_stats = defaultdict(lambda: {
132
+ "total_sales": 0,
133
+ "transaction_count": 0,
134
+ "efficiency_scores": []
135
+ })
136
+
137
+ for record in daily_data:
138
+ date = record.metric_date.date()
139
+ daily_stats[date]["total_sales"] += record.total_sales
140
+ daily_stats[date]["transaction_count"] += record.transaction_count
141
+ daily_stats[date]["efficiency_scores"].append(record.efficiency_score)
142
+
143
+ # Convert to lists for analysis
144
+ dates = sorted(daily_stats.keys())
145
+ sales = [daily_stats[d]["total_sales"] for d in dates]
146
+ transactions = [daily_stats[d]["transaction_count"] for d in dates]
147
+ efficiency = [
148
+ np.mean(daily_stats[d]["efficiency_scores"])
149
+ for d in dates
150
+ ]
151
+
152
+ # Calculate trends
153
+ trends = {
154
+ "sales_trend": StaffReportService._calculate_trend(sales),
155
+ "transaction_trend": StaffReportService._calculate_trend(transactions),
156
+ "efficiency_trend": StaffReportService._calculate_trend(efficiency),
157
+ "daily_data": [
158
+ {
159
+ "date": date.isoformat(),
160
+ "sales": daily_stats[date]["total_sales"],
161
+ "transactions": daily_stats[date]["transaction_count"],
162
+ "avg_efficiency": np.mean(daily_stats[date]["efficiency_scores"])
163
+ }
164
+ for date in dates
165
+ ]
166
+ }
167
+
168
+ return trends
169
+
170
+ @staticmethod
171
+ def _calculate_trend(values: List[float]) -> str:
172
+ """Calculate trend direction and magnitude"""
173
+ if not values or len(values) < 2:
174
+ return "stable"
175
+
176
+ # Calculate percentage change
177
+ start_avg = np.mean(values[:3]) if len(values) >= 3 else values[0]
178
+ end_avg = np.mean(values[-3:]) if len(values) >= 3 else values[-1]
179
+
180
+ if start_avg == 0:
181
+ return "stable"
182
+
183
+ change = ((end_avg - start_avg) / start_avg) * 100
184
+
185
+ if change > 10:
186
+ return "strongly_increasing"
187
+ elif change > 5:
188
+ return "increasing"
189
+ elif change < -10:
190
+ return "strongly_decreasing"
191
+ elif change < -5:
192
+ return "decreasing"
193
+ else:
194
+ return "stable"
195
+
196
+ @staticmethod
197
+ async def _generate_insights(
198
+ trends: Dict[str, Any],
199
+ activity_dist: Dict[str, int],
200
+ top_performers: List[Dict[str, Any]]
201
+ ) -> List[Dict[str, Any]]:
202
+ """Generate actionable insights from the data"""
203
+ insights = []
204
+
205
+ # Analyze sales trend
206
+ if trends.get("sales_trend") in ["strongly_increasing", "increasing"]:
207
+ insights.append({
208
+ "type": "positive",
209
+ "category": "sales",
210
+ "message": "Sales are showing a positive trend. Keep up the good work!"
211
+ })
212
+ elif trends.get("sales_trend") in ["strongly_decreasing", "decreasing"]:
213
+ insights.append({
214
+ "type": "warning",
215
+ "category": "sales",
216
+ "message": "Sales are declining. Consider reviewing sales strategies and providing additional training."
217
+ })
218
+
219
+ # Analyze efficiency trends
220
+ if trends.get("efficiency_trend") in ["strongly_decreasing", "decreasing"]:
221
+ insights.append({
222
+ "type": "warning",
223
+ "category": "efficiency",
224
+ "message": "Staff efficiency is declining. Consider investigating potential bottlenecks or training needs."
225
+ })
226
+
227
+ # Analyze activity distribution
228
+ total_activities = sum(activity_dist.values())
229
+ if total_activities > 0:
230
+ void_ratio = activity_dist.get("void", 0) / total_activities
231
+ if void_ratio > 0.1: # More than 10% voids
232
+ insights.append({
233
+ "type": "warning",
234
+ "category": "operations",
235
+ "message": "High number of void transactions detected. Review transaction procedures and provide additional training if needed."
236
+ })
237
+
238
+ # Analyze top performers
239
+ if top_performers:
240
+ top_efficiency = top_performers[0]["average_efficiency"]
241
+ if top_efficiency > 8.5: # Very high efficiency
242
+ insights.append({
243
+ "type": "suggestion",
244
+ "category": "training",
245
+ "message": f"Consider having {top_performers[0]['full_name']} share best practices with the team."
246
+ })
247
+
248
+ return insights
249
+
250
+ @staticmethod
251
+ async def generate_staff_comparison(
252
+ user_id: int,
253
+ branch_id: int,
254
+ date: Optional[datetime] = None
255
+ ) -> Dict[str, Any]:
256
+ """Generate performance comparison against branch averages"""
257
+ if not date:
258
+ date = datetime.utcnow()
259
+
260
+ cache_key = f"staff_comparison:{user_id}:{branch_id}:{date.date()}"
261
+ cached_data = await cache.get_cache(cache_key)
262
+ if cached_data:
263
+ return cached_data
264
+
265
+ async with db() as session:
266
+ # Get user's metrics
267
+ user_metrics = await session.execute(
268
+ select(PerformanceMetric).where(
269
+ and_(
270
+ PerformanceMetric.user_id == user_id,
271
+ PerformanceMetric.branch_id == branch_id,
272
+ func.date(PerformanceMetric.metric_date) == date.date()
273
+ )
274
+ )
275
+ )
276
+ user_data = user_metrics.scalar_one_or_none()
277
+
278
+ if not user_data:
279
+ return {
280
+ "message": "No metrics available for the specified date",
281
+ "comparison": None
282
+ }
283
+
284
+ # Get branch averages
285
+ branch_avg = await session.execute(
286
+ select(
287
+ func.avg(PerformanceMetric.total_sales).label('avg_sales'),
288
+ func.avg(PerformanceMetric.transaction_count).label('avg_transactions'),
289
+ func.avg(PerformanceMetric.efficiency_score).label('avg_efficiency')
290
+ ).where(
291
+ and_(
292
+ PerformanceMetric.branch_id == branch_id,
293
+ func.date(PerformanceMetric.metric_date) == date.date()
294
+ )
295
+ )
296
+ )
297
+ branch_data = branch_avg.one()
298
+
299
+ comparison = {
300
+ "metrics": {
301
+ "total_sales": user_data.total_sales,
302
+ "transaction_count": user_data.transaction_count,
303
+ "efficiency_score": user_data.efficiency_score
304
+ },
305
+ "branch_averages": {
306
+ "avg_sales": float(branch_data.avg_sales),
307
+ "avg_transactions": float(branch_data.avg_transactions),
308
+ "avg_efficiency": float(branch_data.avg_efficiency)
309
+ },
310
+ "comparisons": {
311
+ "sales_percentage": (
312
+ ((user_data.total_sales / branch_data.avg_sales) - 1) * 100
313
+ if branch_data.avg_sales else 0
314
+ ),
315
+ "transactions_percentage": (
316
+ ((user_data.transaction_count / branch_data.avg_transactions) - 1) * 100
317
+ if branch_data.avg_transactions else 0
318
+ ),
319
+ "efficiency_percentage": (
320
+ ((user_data.efficiency_score / branch_data.avg_efficiency) - 1) * 100
321
+ if branch_data.avg_efficiency else 0
322
+ )
323
+ }
324
+ }
325
+
326
+ await cache.set_cache(cache_key, comparison, expire=3600)
327
+ return comparison
328
+
329
+ staff_reports = StaffReportService()
app/utils/cache.py CHANGED
@@ -1,177 +1,177 @@
1
- import redis
2
- import json
3
- import inspect
4
- import functools
5
- from ..core.config import settings
6
- from typing import Any, Optional, Callable, TypeVar
7
- from ..utils.logger import logger
8
-
9
- T = TypeVar('T')
10
-
11
- def cached(ttl_seconds: int):
12
- """
13
- Cache decorator that stores function results in Redis.
14
- Works with both sync and async functions.
15
-
16
- Args:
17
- ttl_seconds: Time to live in seconds for cached results
18
-
19
- Example:
20
- @cached(300) # Cache for 5 minutes
21
- async def get_user(user_id: int):
22
- return await db.fetch_user(user_id)
23
- """
24
- def decorator(func: Callable[..., T]) -> Callable[..., T]:
25
- is_async = inspect.iscoroutinefunction(func)
26
-
27
- def get_cache_key(*args, **kwargs) -> str:
28
- """Generate cache key from function name and arguments"""
29
- # Sort kwargs to ensure consistent key generation
30
- sorted_kwargs = sorted(kwargs.items())
31
- args_str = ":".join(str(arg) for arg in args)
32
- kwargs_str = ":".join(f"{k}={v}" for k, v in sorted_kwargs)
33
- return f"cache:{func.__module__}:{func.__name__}:{args_str}:{kwargs_str}"
34
-
35
- if is_async:
36
- @functools.wraps(func)
37
- async def async_wrapper(*args, **kwargs) -> T:
38
- cache_key = get_cache_key(*args, **kwargs)
39
-
40
- # Try to get from cache
41
- cached_value = await cache.get_cache(cache_key)
42
- if cached_value is not None:
43
- return cached_value
44
-
45
- # Call function and cache result
46
- result = await func(*args, **kwargs)
47
- await cache.set_cache(cache_key, result, ttl_seconds)
48
- return result
49
- return async_wrapper
50
- else:
51
- @functools.wraps(func)
52
- def sync_wrapper(*args, **kwargs) -> T:
53
- cache_key = get_cache_key(*args, **kwargs)
54
-
55
- # Try to get from cache
56
- try:
57
- cached_value = cache.redis_client.get(cache_key)
58
- if cached_value:
59
- return json.loads(cached_value)
60
- except:
61
- if cache.is_connected:
62
- logger.error("Redis error in sync cache access")
63
- return cache.fallback_cache.get(cache_key)
64
-
65
- # Call function and cache result
66
- result = func(*args, **kwargs)
67
- try:
68
- if cache.is_connected:
69
- cache.redis_client.setex(
70
- cache_key,
71
- ttl_seconds,
72
- json.dumps(result)
73
- )
74
- else:
75
- cache.fallback_cache[cache_key] = result
76
- except Exception as e:
77
- logger.error(f"Cache set error in sync wrapper: {str(e)}")
78
- return result
79
- return sync_wrapper
80
-
81
- return decorator
82
-
83
- class RedisCache:
84
- _instance = None
85
-
86
- def __new__(cls):
87
- if cls._instance is None:
88
- cls._instance = super(RedisCache, cls).__new__(cls)
89
- cls._instance.initialize()
90
- return cls._instance
91
-
92
- def initialize(self):
93
- """Initialize Redis connection with fallback to dummy cache"""
94
- try:
95
- self.redis_client = redis.Redis(
96
- host=settings.REDIS_HOST,
97
- port=settings.REDIS_PORT,
98
- decode_responses=True,
99
- socket_timeout=1 # 1 second timeout
100
- )
101
- self.redis_client.ping() # Test connection
102
- self.is_connected = True
103
- logger.info("Redis cache initialized successfully")
104
- except Exception as e:
105
- self.is_connected = False
106
- self.fallback_cache = {}
107
- logger.warning(f"Redis connection failed, using in-memory fallback: {str(e)}")
108
-
109
- async def set_cache(self, key: str, value: Any, expire: int = 3600):
110
- """Set a cache entry with optional expiration time (default 1 hour)"""
111
- try:
112
- if not self.is_connected:
113
- self.fallback_cache[key] = value
114
- return True
115
-
116
- return bool(self.redis_client.setex(
117
- key,
118
- expire,
119
- json.dumps(value)
120
- ))
121
- except Exception as e:
122
- logger.error(f"Cache set error: {str(e)}")
123
- return False
124
-
125
- async def get_cache(self, key: str) -> Optional[Any]:
126
- """Get a cached value by key"""
127
- try:
128
- if not self.is_connected:
129
- return self.fallback_cache.get(key)
130
-
131
- value = self.redis_client.get(key)
132
- return json.loads(value) if value else None
133
- except Exception as e:
134
- logger.error(f"Cache get error: {str(e)}")
135
- return None
136
-
137
- async def delete_cache(self, key: str) -> bool:
138
- """Delete a cache entry by key"""
139
- try:
140
- if not self.is_connected:
141
- self.fallback_cache.pop(key, None)
142
- return True
143
-
144
- return bool(self.redis_client.delete(key))
145
- except Exception as e:
146
- logger.error(f"Cache delete error: {str(e)}")
147
- return False
148
-
149
- async def clear_cache_pattern(self, pattern: str) -> bool:
150
- """Clear all cache entries matching a pattern"""
151
- try:
152
- if not self.is_connected:
153
- # Simple pattern matching for fallback cache
154
- keys_to_delete = [k for k in self.fallback_cache if pattern in k]
155
- for k in keys_to_delete:
156
- del self.fallback_cache[k]
157
- return True
158
-
159
- keys = self.redis_client.keys(pattern)
160
- if keys:
161
- return bool(self.redis_client.delete(*keys))
162
- return True
163
- except Exception as e:
164
- logger.error(f"Cache clear error: {str(e)}")
165
- return False
166
-
167
- def check_connection(self) -> bool:
168
- """Check if Redis connection is alive"""
169
- try:
170
- self.redis_client.ping()
171
- self.is_connected = True
172
- return True
173
- except:
174
- self.is_connected = False
175
- return False
176
-
177
  cache = RedisCache()
 
1
+ import redis
2
+ import json
3
+ import inspect
4
+ import functools
5
+ from ..core.config import settings
6
+ from typing import Any, Optional, Callable, TypeVar
7
+ from ..utils.logger import logger
8
+
9
+ T = TypeVar('T')
10
+
11
+ def cached(ttl_seconds: int):
12
+ """
13
+ Cache decorator that stores function results in Redis.
14
+ Works with both sync and async functions.
15
+
16
+ Args:
17
+ ttl_seconds: Time to live in seconds for cached results
18
+
19
+ Example:
20
+ @cached(300) # Cache for 5 minutes
21
+ async def get_user(user_id: int):
22
+ return await db.fetch_user(user_id)
23
+ """
24
+ def decorator(func: Callable[..., T]) -> Callable[..., T]:
25
+ is_async = inspect.iscoroutinefunction(func)
26
+
27
+ def get_cache_key(*args, **kwargs) -> str:
28
+ """Generate cache key from function name and arguments"""
29
+ # Sort kwargs to ensure consistent key generation
30
+ sorted_kwargs = sorted(kwargs.items())
31
+ args_str = ":".join(str(arg) for arg in args)
32
+ kwargs_str = ":".join(f"{k}={v}" for k, v in sorted_kwargs)
33
+ return f"cache:{func.__module__}:{func.__name__}:{args_str}:{kwargs_str}"
34
+
35
+ if is_async:
36
+ @functools.wraps(func)
37
+ async def async_wrapper(*args, **kwargs) -> T:
38
+ cache_key = get_cache_key(*args, **kwargs)
39
+
40
+ # Try to get from cache
41
+ cached_value = await cache.get_cache(cache_key)
42
+ if cached_value is not None:
43
+ return cached_value
44
+
45
+ # Call function and cache result
46
+ result = await func(*args, **kwargs)
47
+ await cache.set_cache(cache_key, result, ttl_seconds)
48
+ return result
49
+ return async_wrapper
50
+ else:
51
+ @functools.wraps(func)
52
+ def sync_wrapper(*args, **kwargs) -> T:
53
+ cache_key = get_cache_key(*args, **kwargs)
54
+
55
+ # Try to get from cache
56
+ try:
57
+ cached_value = cache.redis_client.get(cache_key)
58
+ if cached_value:
59
+ return json.loads(cached_value)
60
+ except:
61
+ if cache.is_connected:
62
+ logger.error("Redis error in sync cache access")
63
+ return cache.fallback_cache.get(cache_key)
64
+
65
+ # Call function and cache result
66
+ result = func(*args, **kwargs)
67
+ try:
68
+ if cache.is_connected:
69
+ cache.redis_client.setex(
70
+ cache_key,
71
+ ttl_seconds,
72
+ json.dumps(result)
73
+ )
74
+ else:
75
+ cache.fallback_cache[cache_key] = result
76
+ except Exception as e:
77
+ logger.error(f"Cache set error in sync wrapper: {str(e)}")
78
+ return result
79
+ return sync_wrapper
80
+
81
+ return decorator
82
+
83
+ class RedisCache:
84
+ _instance = None
85
+
86
+ def __new__(cls):
87
+ if cls._instance is None:
88
+ cls._instance = super(RedisCache, cls).__new__(cls)
89
+ cls._instance.initialize()
90
+ return cls._instance
91
+
92
+ def initialize(self):
93
+ """Initialize Redis connection with fallback to dummy cache"""
94
+ try:
95
+ self.redis_client = redis.Redis(
96
+ host=settings.REDIS_HOST,
97
+ port=settings.REDIS_PORT,
98
+ decode_responses=True,
99
+ socket_timeout=1 # 1 second timeout
100
+ )
101
+ self.redis_client.ping() # Test connection
102
+ self.is_connected = True
103
+ logger.info("Redis cache initialized successfully")
104
+ except Exception as e:
105
+ self.is_connected = False
106
+ self.fallback_cache = {}
107
+ logger.warning(f"Redis connection failed, using in-memory fallback: {str(e)}")
108
+
109
+ async def set_cache(self, key: str, value: Any, expire: int = 3600):
110
+ """Set a cache entry with optional expiration time (default 1 hour)"""
111
+ try:
112
+ if not self.is_connected:
113
+ self.fallback_cache[key] = value
114
+ return True
115
+
116
+ return bool(self.redis_client.setex(
117
+ key,
118
+ expire,
119
+ json.dumps(value)
120
+ ))
121
+ except Exception as e:
122
+ logger.error(f"Cache set error: {str(e)}")
123
+ return False
124
+
125
+ async def get_cache(self, key: str) -> Optional[Any]:
126
+ """Get a cached value by key"""
127
+ try:
128
+ if not self.is_connected:
129
+ return self.fallback_cache.get(key)
130
+
131
+ value = self.redis_client.get(key)
132
+ return json.loads(value) if value else None
133
+ except Exception as e:
134
+ logger.error(f"Cache get error: {str(e)}")
135
+ return None
136
+
137
+ async def delete_cache(self, key: str) -> bool:
138
+ """Delete a cache entry by key"""
139
+ try:
140
+ if not self.is_connected:
141
+ self.fallback_cache.pop(key, None)
142
+ return True
143
+
144
+ return bool(self.redis_client.delete(key))
145
+ except Exception as e:
146
+ logger.error(f"Cache delete error: {str(e)}")
147
+ return False
148
+
149
+ async def clear_cache_pattern(self, pattern: str) -> bool:
150
+ """Clear all cache entries matching a pattern"""
151
+ try:
152
+ if not self.is_connected:
153
+ # Simple pattern matching for fallback cache
154
+ keys_to_delete = [k for k in self.fallback_cache if pattern in k]
155
+ for k in keys_to_delete:
156
+ del self.fallback_cache[k]
157
+ return True
158
+
159
+ keys = self.redis_client.keys(pattern)
160
+ if keys:
161
+ return bool(self.redis_client.delete(*keys))
162
+ return True
163
+ except Exception as e:
164
+ logger.error(f"Cache clear error: {str(e)}")
165
+ return False
166
+
167
+ def check_connection(self) -> bool:
168
+ """Check if Redis connection is alive"""
169
+ try:
170
+ self.redis_client.ping()
171
+ self.is_connected = True
172
+ return True
173
+ except:
174
+ self.is_connected = False
175
+ return False
176
+
177
  cache = RedisCache()
app/utils/logger.py CHANGED
@@ -1,59 +1,245 @@
1
  import logging
2
  import sys
 
 
 
 
3
  from datetime import datetime
4
  from pathlib import Path
5
- from logging.handlers import RotatingFileHandler
 
 
 
6
  from ..core.config import settings
7
 
8
  # Create logs directory if it doesn't exist
9
  logs_dir = Path("logs")
10
  logs_dir.mkdir(exist_ok=True)
11
 
12
- # Configure logging format
13
- log_format = logging.Formatter(
14
- "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
15
- )
16
 
17
- def setup_logger(name: str) -> logging.Logger:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  logger = logging.getLogger(name)
19
  logger.setLevel(logging.INFO)
20
 
21
- # Console handler
 
 
 
22
  console_handler = logging.StreamHandler(sys.stdout)
23
- console_handler.setFormatter(log_format)
 
 
24
  logger.addHandler(console_handler)
25
 
26
- # File handler with rotation
27
- file_handler = RotatingFileHandler(
28
  logs_dir / f"{name}.log",
 
 
 
 
 
 
 
 
 
 
 
29
  maxBytes=10485760, # 10MB
30
- backupCount=5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  )
32
- file_handler.setFormatter(log_format)
33
- logger.addHandler(file_handler)
 
34
 
35
  return logger
36
 
37
  # Create main application logger
38
  logger = setup_logger("admin_dashboard")
39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  def log_api_request(method: str, path: str, status_code: int, duration: float):
41
- """Log API request details"""
42
  logger.info(
43
- f"API Request - Method: {method}, Path: {path}, "
44
- f"Status: {status_code}, Duration: {duration:.3f}s"
 
 
 
 
 
 
45
  )
 
 
 
46
 
47
- def log_error(error: Exception, context: dict = None):
48
- """Log error with context"""
49
  logger.error(
50
- f"Error: {str(error)}, Type: {type(error).__name__}, "
51
- f"Context: {context or {}}"
 
 
 
 
 
52
  )
 
53
 
54
- def log_database_operation(operation: str, collection: str, success: bool):
55
- """Log database operations"""
56
  logger.info(
57
- f"Database Operation - Type: {operation}, Collection: {collection}, "
58
- f"Success: {success}"
59
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import logging
2
  import sys
3
+ import json
4
+ import time
5
+ import uuid
6
+ import traceback
7
  from datetime import datetime
8
  from pathlib import Path
9
+ from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
10
+ from typing import Optional, Dict, Any, Union
11
+ from contextvars import ContextVar
12
+ from functools import wraps
13
  from ..core.config import settings
14
 
15
  # Create logs directory if it doesn't exist
16
  logs_dir = Path("logs")
17
  logs_dir.mkdir(exist_ok=True)
18
 
19
+ # Correlation ID context
20
+ correlation_id: ContextVar[str] = ContextVar('correlation_id', default='')
 
 
21
 
22
+ class StructuredFormatter(logging.Formatter):
23
+ """Custom formatter that outputs logs in JSON format"""
24
+
25
+ def __init__(self):
26
+ super().__init__()
27
+ self.default_fields = {
28
+ 'hostname': settings.PROJECT_NAME,
29
+ 'version': settings.VERSION
30
+ }
31
+
32
+ def format(self, record: logging.LogRecord) -> str:
33
+ message = {
34
+ 'timestamp': datetime.utcfromtimestamp(record.created).isoformat(),
35
+ 'level': record.levelname,
36
+ 'logger': record.name,
37
+ 'message': record.getMessage(),
38
+ 'correlation_id': correlation_id.get(),
39
+ **self.default_fields
40
+ }
41
+
42
+ if hasattr(record, 'duration'):
43
+ message['duration'] = f"{record.duration:.3f}s"
44
+
45
+ if hasattr(record, 'request_id'):
46
+ message['request_id'] = record.request_id
47
+
48
+ if record.exc_info:
49
+ message['exception'] = {
50
+ 'type': record.exc_info[0].__name__,
51
+ 'message': str(record.exc_info[1]),
52
+ 'stacktrace': traceback.format_exception(*record.exc_info)
53
+ }
54
+
55
+ # Add any extra fields
56
+ if hasattr(record, 'extra_fields'):
57
+ message.update(record.extra_fields)
58
+
59
+ return json.dumps(message)
60
+
61
+ class CustomLogger(logging.Logger):
62
+ """Enhanced logger with additional functionality"""
63
+
64
+ def __init__(self, name: str):
65
+ super().__init__(name)
66
+ self.metrics: Dict[str, Dict[str, Union[int, float]]] = {
67
+ 'requests': {'count': 0, 'total_duration': 0},
68
+ 'errors': {'count': 0},
69
+ 'database': {'operations': 0, 'failures': 0}
70
+ }
71
+
72
+ def log_with_context(self, level: int, msg: str, extra_fields: Optional[Dict[str, Any]] = None, **kwargs):
73
+ """Log message with additional context"""
74
+ if extra_fields:
75
+ kwargs['extra'] = {'extra_fields': extra_fields}
76
+ self.log(level, msg, **kwargs)
77
+
78
+ def start_operation(self, operation_name: str) -> float:
79
+ """Start timing an operation"""
80
+ return time.time()
81
+
82
+ def end_operation(self, start_time: float, operation_name: str, success: bool = True):
83
+ """End timing an operation and log its duration"""
84
+ duration = time.time() - start_time
85
+ self.info(
86
+ f"Operation completed: {operation_name}",
87
+ extra={'extra_fields': {
88
+ 'operation': operation_name,
89
+ 'duration': duration,
90
+ 'success': success
91
+ }}
92
+ )
93
+ return duration
94
+
95
+ class HealthCheckFilter(logging.Filter):
96
+ """Filter to add health check information to log records"""
97
+ def filter(self, record):
98
+ record.health_check = getattr(record, 'health_check', False)
99
+ return True
100
+
101
+ def setup_logger(name: str) -> CustomLogger:
102
+ """Set up enhanced logger with multiple handlers and formatters"""
103
+ # Use custom logger class
104
+ logging.setLoggerClass(CustomLogger)
105
  logger = logging.getLogger(name)
106
  logger.setLevel(logging.INFO)
107
 
108
+ # Structured JSON formatter for file logs
109
+ json_formatter = StructuredFormatter()
110
+
111
+ # Console handler with standard formatting
112
  console_handler = logging.StreamHandler(sys.stdout)
113
+ console_handler.setFormatter(logging.Formatter(
114
+ "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
115
+ ))
116
  logger.addHandler(console_handler)
117
 
118
+ # File handler with JSON formatting and daily rotation
119
+ daily_handler = TimedRotatingFileHandler(
120
  logs_dir / f"{name}.log",
121
+ when="midnight",
122
+ interval=1,
123
+ backupCount=30, # Keep a month of logs
124
+ encoding="utf-8"
125
+ )
126
+ daily_handler.setFormatter(json_formatter)
127
+ logger.addHandler(daily_handler)
128
+
129
+ # Error file handler for error-level logs
130
+ error_handler = RotatingFileHandler(
131
+ logs_dir / f"{name}_error.log",
132
  maxBytes=10485760, # 10MB
133
+ backupCount=10
134
+ )
135
+ error_handler.setLevel(logging.ERROR)
136
+ error_handler.setFormatter(json_formatter)
137
+ logger.addHandler(error_handler)
138
+
139
+ # Add health check filter
140
+ logger.addFilter(HealthCheckFilter())
141
+
142
+ # Separate file handler for health check logs
143
+ health_handler = TimedRotatingFileHandler(
144
+ logs_dir / f"{name}_health_checks.log",
145
+ when='midnight',
146
+ interval=1,
147
+ backupCount=30
148
+ )
149
+ health_handler.setLevel(logging.INFO)
150
+ health_format = logging.Formatter(
151
+ '%(asctime)s - %(levelname)s - %(message)s - Health: %(health_check)s'
152
  )
153
+ health_handler.setFormatter(health_format)
154
+ health_handler.addFilter(lambda record: getattr(record, 'health_check', False))
155
+ logger.addHandler(health_handler)
156
 
157
  return logger
158
 
159
  # Create main application logger
160
  logger = setup_logger("admin_dashboard")
161
 
162
+ def with_correlation_id():
163
+ """Decorator to add correlation ID to context"""
164
+ def decorator(func):
165
+ @wraps(func)
166
+ async def wrapper(*args, **kwargs):
167
+ correlation = str(uuid.uuid4())
168
+ token = correlation_id.set(correlation)
169
+ try:
170
+ return await func(*args, **kwargs)
171
+ finally:
172
+ correlation_id.reset(token)
173
+ return wrapper
174
+ return decorator
175
+
176
  def log_api_request(method: str, path: str, status_code: int, duration: float):
177
+ """Log API request details with enhanced context"""
178
  logger.info(
179
+ "API Request",
180
+ extra={'extra_fields': {
181
+ 'method': method,
182
+ 'path': path,
183
+ 'status_code': status_code,
184
+ 'duration': duration,
185
+ 'timestamp': datetime.utcnow().isoformat()
186
+ }}
187
  )
188
+ # Update metrics
189
+ logger.metrics['requests']['count'] += 1
190
+ logger.metrics['requests']['total_duration'] += duration
191
 
192
+ def log_error(error: Exception, context: Optional[Dict] = None):
193
+ """Log error with enhanced context and tracking"""
194
  logger.error(
195
+ f"Error occurred: {str(error)}",
196
+ exc_info=True,
197
+ extra={'extra_fields': {
198
+ 'error_type': type(error).__name__,
199
+ 'context': context or {},
200
+ 'timestamp': datetime.utcnow().isoformat()
201
+ }}
202
  )
203
+ logger.metrics['errors']['count'] += 1
204
 
205
+ def log_database_operation(operation: str, collection: str, success: bool, duration: Optional[float] = None):
206
+ """Log database operations with performance metrics"""
207
  logger.info(
208
+ "Database Operation",
209
+ extra={'extra_fields': {
210
+ 'operation_type': operation,
211
+ 'collection': collection,
212
+ 'success': success,
213
+ 'duration': duration,
214
+ 'timestamp': datetime.utcnow().isoformat()
215
+ }}
216
+ )
217
+ logger.metrics['database']['operations'] += 1
218
+ if not success:
219
+ logger.metrics['database']['failures'] += 1
220
+
221
+ def log_health_check(component: str, status: str, details: dict = None):
222
+ """Log health check results"""
223
+ msg = f"Health Check - {component}: {status}"
224
+ if details:
225
+ msg += f" - Details: {details}"
226
+ logger.info(msg, extra={'health_check': True})
227
+
228
+ def log_maintenance_activity(activity: str, result: str, details: dict = None):
229
+ """Log maintenance activities"""
230
+ msg = f"Maintenance - {activity}: {result}"
231
+ if details:
232
+ msg += f" - Details: {details}"
233
+ logger.info(msg)
234
+
235
+ def get_metrics() -> Dict[str, Dict[str, Union[int, float]]]:
236
+ """Get current logging metrics"""
237
+ return logger.metrics
238
+
239
+ def reset_metrics():
240
+ """Reset all logging metrics"""
241
+ logger.metrics = {
242
+ 'requests': {'count': 0, 'total_duration': 0},
243
+ 'errors': {'count': 0},
244
+ 'database': {'operations': 0, 'failures': 0}
245
+ }
app/utils/retry.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import functools
3
+ from typing import Type, Tuple, Optional, TypeVar, Callable, Any
4
+ from ..utils.logger import logger
5
+
6
+ T = TypeVar('T')
7
+
8
+ def with_retry(
9
+ max_retries: int = 3,
10
+ delay: float = 1.0,
11
+ backoff_factor: float = 2.0,
12
+ exceptions: Tuple[Type[Exception], ...] = (Exception,)
13
+ ) -> Callable:
14
+ """
15
+ Decorator that implements retry logic with exponential backoff
16
+
17
+ Args:
18
+ max_retries (int): Maximum number of retry attempts
19
+ delay (float): Initial delay between retries in seconds
20
+ backoff_factor (float): Multiplier for delay after each retry
21
+ exceptions (tuple): Tuple of exceptions to catch and retry on
22
+ """
23
+ def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
24
+ @functools.wraps(func)
25
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
26
+ last_exception = None
27
+ current_delay = delay
28
+
29
+ # Initial attempt plus retries
30
+ for attempt in range(max_retries + 1):
31
+ try:
32
+ return await func(*args, **kwargs)
33
+ except exceptions as e:
34
+ last_exception = e
35
+
36
+ # Don't sleep on the last attempt
37
+ if attempt < max_retries:
38
+ logger.warning(
39
+ f"Attempt {attempt + 1} failed for {func.__name__}: {str(e)}. "
40
+ f"Retrying in {current_delay} seconds..."
41
+ )
42
+ await asyncio.sleep(current_delay)
43
+ current_delay *= backoff_factor
44
+ else:
45
+ logger.error(
46
+ f"All {max_retries + 1} attempts failed for {func.__name__}: {str(e)}"
47
+ )
48
+
49
+ raise last_exception
50
+
51
+ return wrapper
52
+ return decorator
53
+
54
+ def retry_with_backoff(
55
+ max_retries: int = 3,
56
+ initial_delay: float = 1.0,
57
+ max_delay: float = 30.0,
58
+ backoff_factor: float = 2.0,
59
+ exceptions: Optional[Tuple[Type[Exception], ...]] = None
60
+ ) -> Callable:
61
+ """
62
+ More advanced retry decorator with capped exponential backoff and jitter
63
+
64
+ Args:
65
+ max_retries (int): Maximum number of retry attempts
66
+ initial_delay (float): Initial delay between retries in seconds
67
+ max_delay (float): Maximum delay between retries in seconds
68
+ backoff_factor (float): Multiplier for delay after each retry
69
+ exceptions (tuple): Tuple of exceptions to catch and retry on
70
+ """
71
+ if exceptions is None:
72
+ exceptions = (Exception,)
73
+
74
+ def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
75
+ @functools.wraps(func)
76
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
77
+ retry_count = 0
78
+ current_delay = initial_delay
79
+ operation_name = func.__name__
80
+
81
+ while True:
82
+ try:
83
+ return await func(*args, **kwargs)
84
+
85
+ except exceptions as e:
86
+ retry_count += 1
87
+ if retry_count > max_retries:
88
+ logger.error(
89
+ f"Operation {operation_name} failed after {max_retries} retries: {str(e)}"
90
+ )
91
+ raise
92
+
93
+ # Add jitter to prevent thundering herd
94
+ jitter = (asyncio.get_event_loop().time() * 1000) % 1.0
95
+ sleep_time = min(current_delay + jitter, max_delay)
96
+
97
+ logger.warning(
98
+ f"Operation {operation_name} failed (attempt {retry_count}/{max_retries}): "
99
+ f"{str(e)}. Retrying in {sleep_time:.2f} seconds..."
100
+ )
101
+
102
+ await asyncio.sleep(sleep_time)
103
+ current_delay = min(current_delay * backoff_factor, max_delay)
104
+
105
+ return wrapper
106
+ return decorator
107
+
108
+ def circuit_breaker(
109
+ failure_threshold: int = 5,
110
+ reset_timeout: float = 60.0
111
+ ) -> Callable:
112
+ """
113
+ Circuit breaker decorator to prevent repeated calls to failing services
114
+
115
+ Args:
116
+ failure_threshold (int): Number of failures before opening circuit
117
+ reset_timeout (float): Time in seconds before attempting to close circuit
118
+ """
119
+ def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
120
+ # State for the circuit breaker
121
+ state = {
122
+ 'failures': 0,
123
+ 'last_failure_time': 0,
124
+ 'is_open': False
125
+ }
126
+
127
+ @functools.wraps(func)
128
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
129
+ current_time = asyncio.get_event_loop().time()
130
+
131
+ # Check if circuit is open
132
+ if state['is_open']:
133
+ if current_time - state['last_failure_time'] > reset_timeout:
134
+ # Try to close the circuit
135
+ state['is_open'] = False
136
+ state['failures'] = 0
137
+ else:
138
+ raise Exception(
139
+ f"Circuit breaker is open for {func.__name__}. "
140
+ f"Try again in {reset_timeout - (current_time - state['last_failure_time']):.1f} seconds"
141
+ )
142
+
143
+ try:
144
+ result = await func(*args, **kwargs)
145
+ # Success - reset failure count
146
+ state['failures'] = 0
147
+ return result
148
+
149
+ except Exception as e:
150
+ # Record failure
151
+ state['failures'] += 1
152
+ state['last_failure_time'] = current_time
153
+
154
+ # Check if we need to open the circuit
155
+ if state['failures'] >= failure_threshold:
156
+ state['is_open'] = True
157
+ logger.error(
158
+ f"Circuit breaker opened for {func.__name__} after {failure_threshold} failures"
159
+ )
160
+
161
+ raise
162
+
163
+ return wrapper
164
+ return decorator
app/utils/tasks.py CHANGED
@@ -6,6 +6,7 @@ from ..services.websocket import create_and_broadcast_notification
6
  import asyncio
7
  from sqlalchemy import select, delete
8
  from ..db.models import Event, User, Notification
 
9
 
10
  async def check_event_reminders():
11
  """Check and send event reminders"""
@@ -118,6 +119,21 @@ async def perform_weekly_maintenance():
118
  except Exception as e:
119
  logger.error(f"Error in weekly maintenance: {str(e)}")
120
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  async def run_periodic_tasks():
122
  """Run periodic maintenance tasks"""
123
  daily_maintenance_run = False
 
6
  import asyncio
7
  from sqlalchemy import select, delete
8
  from ..db.models import Event, User, Notification
9
+ from ..services.pos_analytics import pos_analytics
10
 
11
  async def check_event_reminders():
12
  """Check and send event reminders"""
 
119
  except Exception as e:
120
  logger.error(f"Error in weekly maintenance: {str(e)}")
121
 
122
+ async def sync_pos_metrics_task():
123
+ """Background task to sync POS metrics every 5 minutes"""
124
+ while True:
125
+ try:
126
+ success = await pos_analytics.sync_all_metrics()
127
+ if success:
128
+ logger.info("Successfully synced POS metrics")
129
+ else:
130
+ logger.error("Failed to sync POS metrics")
131
+ except Exception as e:
132
+ logger.error(f"Error in POS metrics sync task: {str(e)}")
133
+
134
+ # Wait for 5 minutes before next sync
135
+ await asyncio.sleep(300)
136
+
137
  async def run_periodic_tasks():
138
  """Run periodic maintenance tasks"""
139
  daily_maintenance_run = False