prabha-bms commited on
Commit
7e3d0ea
Β·
1 Parent(s): 061f8a4

core lib integration

Browse files
Dockerfile CHANGED
@@ -15,7 +15,9 @@ USER user
15
  WORKDIR /app
16
 
17
  COPY --chown=user ./requirements.txt requirements.txt
 
18
  RUN pip install --no-cache-dir --upgrade pip && \
 
19
  pip install --no-cache-dir --upgrade -r requirements.txt
20
 
21
  COPY --chown=user . /app
 
15
  WORKDIR /app
16
 
17
  COPY --chown=user ./requirements.txt requirements.txt
18
+ COPY --chown=user ./app/insightfy_utils-0.1.0-py3-none-any.whl insightfy_utils-0.1.0-py3-none-any.whl
19
  RUN pip install --no-cache-dir --upgrade pip && \
20
+ pip install --no-cache-dir insightfy_utils-0.1.0-py3-none-any.whl && \
21
  pip install --no-cache-dir --upgrade -r requirements.txt
22
 
23
  COPY --chown=user . /app
app/app.py CHANGED
@@ -1,16 +1,23 @@
1
  from fastapi import FastAPI
2
  from fastapi.middleware.cors import CORSMiddleware
 
 
 
3
  from app.routers.catalogue_router import router as catalogue_router
4
  from app.routers.supplier_route import router as supplier_router
5
  from app.routers.taxonomy_route import router as taxonomy_router
6
  from app.routers.promotion_router import router as promotion_router
7
  from app.routers.gift_card_router import router as gift_card_router
8
 
 
 
 
 
9
  # Initialize FastAPI application
10
  app = FastAPI(
11
- title="Entity Managment API",
12
  version="1.0",
13
- description="Entity Managment API for Suppliers, Products and Services",
14
  )
15
 
16
  # CORS configuration
@@ -62,7 +69,16 @@ async def health_check():
62
  """
63
  Health check endpoint to verify the API is running.
64
  """
65
- return {"message": "Resource Managment API is up and running!"}
 
 
 
 
 
 
 
 
 
66
 
67
  # Ensure database connection is started/stopped with FastAPI lifecycle
68
  from app.sql import connect_to_database, disconnect_from_database
@@ -70,7 +86,9 @@ from app.sql import connect_to_database, disconnect_from_database
70
  @app.on_event("startup")
71
  async def startup():
72
  await connect_to_database()
 
73
 
74
  @app.on_event("shutdown")
75
  async def shutdown():
76
- await disconnect_from_database()
 
 
1
  from fastapi import FastAPI
2
  from fastapi.middleware.cors import CORSMiddleware
3
+ from insightfy_utils.logging import setup_logging, get_logger
4
+ from insightfy_utils.telemetry import check_service_health
5
+
6
  from app.routers.catalogue_router import router as catalogue_router
7
  from app.routers.supplier_route import router as supplier_router
8
  from app.routers.taxonomy_route import router as taxonomy_router
9
  from app.routers.promotion_router import router as promotion_router
10
  from app.routers.gift_card_router import router as gift_card_router
11
 
12
+ # Setup logging at module level
13
+ setup_logging(level="INFO", format_type="json", app_name="insightfy-bloom-ms-ems")
14
+ logger = get_logger(__name__)
15
+
16
  # Initialize FastAPI application
17
  app = FastAPI(
18
+ title="Entity Management API",
19
  version="1.0",
20
+ description="Entity Management API for Suppliers, Products and Services",
21
  )
22
 
23
  # CORS configuration
 
69
  """
70
  Health check endpoint to verify the API is running.
71
  """
72
+ return {"message": "Entity Management API is up and running!", "version": "1.0"}
73
+
74
+ # Health check endpoint with service status
75
+ @app.get("/health", tags=["Health"])
76
+ async def health():
77
+ """
78
+ Health check endpoint with service dependency status.
79
+ """
80
+ # TODO: Pass actual db connectors once migrated
81
+ return {"status": "healthy", "service": "insightfy-bloom-ms-ems", "version": "1.0"}
82
 
83
  # Ensure database connection is started/stopped with FastAPI lifecycle
84
  from app.sql import connect_to_database, disconnect_from_database
 
86
  @app.on_event("startup")
87
  async def startup():
88
  await connect_to_database()
89
+ logger.info("Application started successfully")
90
 
91
  @app.on_event("shutdown")
92
  async def shutdown():
93
+ await disconnect_from_database()
94
+ logger.info("Application shutdown complete")
app/dependencies/auth.py CHANGED
@@ -1,13 +1,16 @@
1
- import logging
2
  from fastapi import Depends, HTTPException, status
3
  from enum import Enum
4
- from app.utils.jwt import decode_jwt_token
 
5
  from settings import SECRET_KEY, ALGORITHM
6
  from fastapi.security import APIKeyHeader
7
  from app.nosql import mongo_db
8
 
9
- # Logger setup (consistent with project)
10
- logger = logging.getLogger(__name__)
 
 
 
11
 
12
  # AccessID Enum for permission strings
13
  class AccessID(str, Enum):
@@ -50,16 +53,22 @@ def get_current_user(token: str = Depends(oauth2_scheme)) -> dict:
50
  try:
51
  if token.startswith("Bearer "):
52
  token = token.split(" ")[1]
53
- payload = decode_jwt_token(token)
54
- logger.info(f"Authenticated user {payload.get('associate_id')} for merchant {payload.get('merchant_id')}")
 
 
 
 
 
 
55
  return {
56
  "associate_id": payload["associate_id"],
57
  "merchant_id": payload["merchant_id"],
58
- "branch_id":payload["branch_id"],
59
  "role_id": payload.get("role_id", "user")
60
  }
61
  except Exception as e:
62
- logger.warning(f"Authentication failed: {str(e)}")
63
  raise HTTPException(
64
  status_code=status.HTTP_401_UNAUTHORIZED,
65
  detail="Invalid authentication credentials",
@@ -69,49 +78,72 @@ def get_current_user(token: str = Depends(oauth2_scheme)) -> dict:
69
  async def user_has_permission(merchant_id: str, role_id: str, permission: str) -> bool:
70
  """
71
  Check if the user has the required permission using MongoDB filtering.
72
- permission format: RESOURCE:ACTION (e.g., appointments:create)
73
  """
74
  try:
75
- # Support both 'RESOURCE:ACTION' and 'ACTION_RESOURCE' formats
76
  if "_" in permission:
77
  parts = permission.split("_", 1)
78
  if len(parts) == 2:
79
  action, resource = parts
80
  else:
81
- logger.warning(f"Permission format invalid: {permission}")
82
  return False
 
 
 
 
83
  query = {
84
  "merchant_id": merchant_id,
85
  "role_id": role_id,
86
  f"permissions.{resource}": action
87
  }
88
- print(query,"queryyy")
89
  role_doc = await mongo_db["access_roles"].find_one(query)
90
  has_perm = role_doc is not None
91
- logger.info(f"Permission check for role {role_id}, merchant {merchant_id}, permission {permission}: {has_perm}")
 
 
 
 
 
 
 
 
92
  return has_perm
93
  except Exception as e:
94
- logger.error(f"Permission check error: {str(e)}")
95
  return False
96
 
97
- # Reusable FastAPI dependency for permission checks
98
- async def require_permission(
99
  access_id: str,
100
  current_user: dict = Depends(get_current_user)
101
  ) -> dict:
102
  """
103
  Dependency to check user permission for a given access_id (permission string).
104
- Raises HTTPException(403) if not permitted.
105
- Returns current_user if permitted.
106
  """
107
  merchant_id = current_user.get("merchant_id")
108
  user_id = current_user.get("associate_id")
109
  role_id = current_user.get("role_id")
 
110
  if not user_id or not merchant_id or not role_id:
111
- logger.warning(f"Permission denied: missing user/merchant/role info for user {user_id}")
 
 
 
112
  raise HTTPException(status_code=403, detail="Forbidden")
 
113
  if not await user_has_permission(merchant_id, role_id, access_id):
114
- logger.warning(f"Permission denied for user {user_id} on {access_id}")
 
 
 
115
  raise HTTPException(status_code=403, detail="Forbidden")
116
- logger.info(f"Permission granted for user {user_id} on {access_id}")
117
- return current_user
 
 
 
 
 
 
 
 
 
1
  from fastapi import Depends, HTTPException, status
2
  from enum import Enum
3
+ from insightfy_utils.auth import JWTHandler
4
+ from insightfy_utils.logging import get_logger
5
  from settings import SECRET_KEY, ALGORITHM
6
  from fastapi.security import APIKeyHeader
7
  from app.nosql import mongo_db
8
 
9
+ # Logger setup (migrated to insightfy-utils)
10
+ logger = get_logger(__name__)
11
+
12
+ # Initialize JWT handler
13
+ jwt_handler = JWTHandler(secret=SECRET_KEY, algorithm=ALGORITHM)
14
 
15
  # AccessID Enum for permission strings
16
  class AccessID(str, Enum):
 
53
  try:
54
  if token.startswith("Bearer "):
55
  token = token.split(" ")[1]
56
+ payload = jwt_handler.decode_token(token)
57
+ logger.info(
58
+ "Authenticated user",
59
+ extra={
60
+ "associate_id": payload.get('associate_id'),
61
+ "merchant_id": payload.get('merchant_id')
62
+ }
63
+ )
64
  return {
65
  "associate_id": payload["associate_id"],
66
  "merchant_id": payload["merchant_id"],
67
+ "branch_id": payload["branch_id"],
68
  "role_id": payload.get("role_id", "user")
69
  }
70
  except Exception as e:
71
+ logger.warning("Authentication failed", extra={"error": str(e)})
72
  raise HTTPException(
73
  status_code=status.HTTP_401_UNAUTHORIZED,
74
  detail="Invalid authentication credentials",
 
78
  async def user_has_permission(merchant_id: str, role_id: str, permission: str) -> bool:
79
  """
80
  Check if the user has the required permission using MongoDB filtering.
 
81
  """
82
  try:
 
83
  if "_" in permission:
84
  parts = permission.split("_", 1)
85
  if len(parts) == 2:
86
  action, resource = parts
87
  else:
88
+ logger.warning("Permission format invalid", extra={"permission": permission})
89
  return False
90
+ else:
91
+ logger.warning("Permission format invalid", extra={"permission": permission})
92
+ return False
93
+
94
  query = {
95
  "merchant_id": merchant_id,
96
  "role_id": role_id,
97
  f"permissions.{resource}": action
98
  }
99
+ logger.debug("Permission query", extra={"query": query})
100
  role_doc = await mongo_db["access_roles"].find_one(query)
101
  has_perm = role_doc is not None
102
+ logger.info(
103
+ "Permission check result",
104
+ extra={
105
+ "role_id": role_id,
106
+ "merchant_id": merchant_id,
107
+ "permission": permission,
108
+ "has_permission": has_perm
109
+ }
110
+ )
111
  return has_perm
112
  except Exception as e:
113
+ logger.error("Permission check error", extra={"error": str(e)}, exc_info=e)
114
  return False
115
 
116
+ async def require_permission_dependency(
 
117
  access_id: str,
118
  current_user: dict = Depends(get_current_user)
119
  ) -> dict:
120
  """
121
  Dependency to check user permission for a given access_id (permission string).
122
+ Renamed to avoid conflict with insightfy_utils.auth.require_permission
 
123
  """
124
  merchant_id = current_user.get("merchant_id")
125
  user_id = current_user.get("associate_id")
126
  role_id = current_user.get("role_id")
127
+
128
  if not user_id or not merchant_id or not role_id:
129
+ logger.warning(
130
+ "Permission denied: missing user info",
131
+ extra={"user_id": user_id, "merchant_id": merchant_id, "role_id": role_id}
132
+ )
133
  raise HTTPException(status_code=403, detail="Forbidden")
134
+
135
  if not await user_has_permission(merchant_id, role_id, access_id):
136
+ logger.warning(
137
+ "Permission denied",
138
+ extra={"user_id": user_id, "access_id": access_id}
139
+ )
140
  raise HTTPException(status_code=403, detail="Forbidden")
141
+
142
+ logger.info(
143
+ "Permission granted",
144
+ extra={"user_id": user_id, "access_id": access_id}
145
+ )
146
+ return current_user
147
+
148
+ # Backward compatibility alias
149
+ require_permission = require_permission_dependency
app/insightfy_utils-0.1.0-py3-none-any.whl ADDED
Binary file (32.2 kB). View file
 
app/models/catalogue_models.py CHANGED
@@ -1,4 +1,3 @@
1
- import logging
2
  import re
3
  from typing import Any, Dict, List, Optional, Union
4
  import uuid
@@ -6,6 +5,7 @@ from bson import ObjectId
6
  from fastapi import HTTPException
7
  from pydantic import ValidationError
8
  from sqlalchemy import func, update
 
9
  from app.constants.collections import CATALOGUES_COLLECTION
10
  from app.repositories.db import db, fetch_documents, fetch_pos_documents, fetch_many_aggregate
11
  from app.repositories.inventory_repository import fetch_stock_by_catalogue_ids
@@ -16,8 +16,8 @@ from app.utils.info_widget_utils import build_aggregation_pipeline_for_widget
16
  from app.sql import async_session
17
  from sqlalchemy.exc import SQLAlchemyError
18
  from app.schemas.inventory_schema import branch_inventory
19
- # Configure logging for this module
20
- logger = logging.getLogger(__name__)
21
 
22
  # Constants for aggregation pipeline stages
23
  CATEGORY_FIELD = "$category"
@@ -30,31 +30,19 @@ class CatalogueModel:
30
  @staticmethod
31
  async def create_catalogue_item(data: Dict[str, Any]) -> str:
32
  try:
33
- # Generate a UUID for associate_id
34
- catalogue_id = str(uuid.uuid4()) # Generates a random UUID
35
-
36
- data['catalogue_id'] = catalogue_id #
37
  data = convert_dates(data)
38
-
39
- # Insert data into MongoDB collection
40
  result = await db['catalogues'].insert_one(data)
41
-
42
- # Extract the inserted ID and convert to string
43
  catalogue_inserted_id = str(result.inserted_id)
44
-
45
- # Log success for traceability
46
- logger.info(f"Catalogue item created successfully with ID: {catalogue_inserted_id}")
47
-
48
  return catalogue_inserted_id
49
  except Exception as e:
50
- # Log the error with stack trace for debugging
51
- logger.error(f"Error creating Catalogue item: {e}", exc_info=True)
52
-
53
- # Raise a more specific error with a custom message
54
  raise RuntimeError("Failed to create Catalogue item") from e
55
 
56
  @staticmethod
57
- async def update_catalogue_data(catalogue_id: str, update_fields: dict,merchant_id:str) -> bool:
58
  try:
59
  existing_doc = await db['catalogues'].find_one(
60
  {"catalogue_id": catalogue_id, "merchant_id": merchant_id}
@@ -66,29 +54,15 @@ class CatalogueModel:
66
  {"catalogue_id": catalogue_id, "merchant_id": merchant_id},
67
  {"$set": update_fields}
68
  )
69
-
70
  return result.modified_count > 0
71
  except RuntimeError as e:
72
- logger.error(f"Error updating catalogue data: {e}", exc_info=True)
73
  raise RuntimeError("Failed to update catalogue data") from e
74
 
75
  @staticmethod
76
- async def list_items(filter_criteria: Dict[str, Any], offset: int, limit: int, projection_list: Union[List[str], None] = None,branch_id=None) -> Dict[str, Any]:
77
- """
78
- Fetches a paginated list of catalogue items based on filter criteria.
79
-
80
- Args:
81
- filter_criteria (Dict[str, Any]): Criteria to filter the items.
82
- offset (int): Number of items to skip for pagination.
83
- limit (int): Maximum number of items to return.
84
-
85
- Returns:
86
- Dict[str, Any]: A dictionary containing the list of items and metadata.
87
-
88
- Raises:
89
- RuntimeError: If the operation fails.
90
- """
91
- logger.info(f"Fetching catalogue items with criteria={filter_criteria}, offset={offset}, limit={limit}")
92
  try:
93
  projection = None
94
  if projection_list:
@@ -99,61 +73,35 @@ class CatalogueModel:
99
  items_data = raw_items.get("documents", [])
100
 
101
  catalogue_ids = [item.get("catalogue_id") for item in items_data if item.get("catalogue_id")]
102
- stock_map = await fetch_stock_by_catalogue_ids(filter_criteria
103
- ["merchant_id"],branch_id, catalogue_ids) if branch_id else {}
104
-
105
 
106
  parsed_items = []
107
  for item in items_data:
108
- # Inject current stock from Postgres if exists
109
  if item.get("catalogue_id") in stock_map:
110
  item.update(stock_map[item["catalogue_id"]])
111
  try:
112
  parsed_items.append(item)
113
  except ValidationError as e:
114
- logger.warning(f"Failed to parse item: {item}, error: {e}")
115
 
116
- logger.debug(f"Fetched and parsed {len(parsed_items)} items")
117
  return {
118
  "total": raw_items.get("total", len(parsed_items)),
119
  "items": parsed_items
120
  }
121
-
122
  except Exception as e:
123
- logger.error(f"Error listing catalogue items: {e}", exc_info=True)
124
  raise RuntimeError("Failed to list catalogue items") from e
125
 
126
  @staticmethod
127
- async def pos_items(
128
- filter_criteria: Dict[str, Any], fields: Optional[Dict[str, int]] = None,branch_id=None) -> List[Dict[str, Any]]:
129
- """
130
- Fetches POS catalogue items with optional projection fields.
131
-
132
- Args:
133
- filter_criteria (Dict[str, Any]): Criteria to filter the items.
134
- fields (Optional[Dict[str, int]]): Projection fields for MongoDB query.
135
-
136
- Returns:
137
- List[Dict[str, Any]]: A list of POS-specific catalogue items.
138
- """
139
-
140
  try:
141
  items = await fetch_pos_documents("catalogues", filter_criteria, fields)
142
- '''
143
- Commented because we are not fetching stock information for POS items
144
-
145
- catalogue_ids = [item.get("catalogue_id") for item in items if item.get("catalogue_id")]
146
- stock_map = await fetch_stock_by_catalogue_ids(branch_id, catalogue_ids) if branch_id else {}
147
-
148
- for item in items:
149
- if item.get("catalogue_id") in stock_map:
150
- item.update(stock_map[item["catalogue_id"]])
151
- '''
152
- logger.info(f"Fetched {len(items)} POS items")
153
  return items
154
-
155
  except Exception as e:
156
- logger.error(f"Error fetching POS catalogue items: {e}", exc_info=True)
157
  raise RuntimeError("Failed to fetch POS items") from e
158
 
159
  @staticmethod
@@ -290,97 +238,43 @@ class CatalogueModel:
290
  filter_criteria: Dict[str, Any],
291
  branch_id: Optional[str] = None
292
  ) -> Dict[str, Any]:
293
- """
294
- Optimized aggregation pipeline for POS catalogue items.
295
-
296
- Performance improvements:
297
- - Database-level grouping and sorting
298
- - Reduced memory usage
299
- - Optimized field projection
300
- - Single query execution
301
- """
302
  try:
303
- # Build and execute aggregation pipeline
304
  pipeline = CatalogueModel._build_pos_aggregation_pipeline(filter_criteria)
305
  result = await fetch_many_aggregate(CATALOGUES_COLLECTION, pipeline)
306
 
307
  if not result:
308
- return {
309
- "favorites": [],
310
- "categories": [],
311
- "catalogues": {}
312
- }
313
 
314
- # Process aggregation result
315
  response_data = CatalogueModel._process_aggregation_result(result[0])
316
-
317
- # Integrate stock data if branch_id provided
318
- # if branch_id:
319
- # await CatalogueModel._integrate_stock_data(response_data, branch_id)
320
-
321
-
322
  return response_data
323
-
324
  except Exception as e:
325
- logger.error(f"Error in POS aggregation: {e}", exc_info=True)
326
  raise RuntimeError("Failed to aggregate POS items") from e
327
 
328
-
329
  @staticmethod
330
  async def delete_item(filter_criteria: dict) -> bool:
331
- """
332
- Deletes a catalogue item by its ID.
333
-
334
- Args:
335
- item_id (str): The ID of the item to delete.
336
-
337
- Returns:
338
- bool: True if the deletion was successful, False otherwise.
339
-
340
- Raises:
341
- ValueError: If the item_id is invalid.
342
- RuntimeError: If the operation fails.
343
- """
344
- logger.info(f"Deleting catalogue item with ID: {filter_criteria}")
345
-
346
- # Validate item_id format
347
- # if not re.fullmatch(r"[0-9a-fA-F]{24}", filter_criteria["catalogue_id"]):
348
- # raise HTTPException(status_code=400, detail="Invalid ObjectId format")
349
-
350
-
351
  try:
352
- # Perform the delete operation
353
  result = await db["catalogues"].delete_one(filter_criteria)
354
  deleted = result.deleted_count > 0
355
-
356
  if deleted:
357
- logger.debug(f"Catalogue item {filter_criteria} deleted successfully.")
358
  else:
359
- logger.debug(f"Catalogue item {filter_criteria} not found.")
360
-
361
  return deleted
362
  except Exception as e:
363
- logger.error(f"Error deleting catalogue item: {e}", exc_info=True)
364
  raise RuntimeError("Failed to delete item") from e
365
 
366
  @staticmethod
367
  async def catalogue_list_items(filter_criteria: Dict[str, Any]) -> Dict[str, Any]:
368
- """
369
- Fetches a paginated list of catalogue items based on filter criteria.
370
-
371
- Args:
372
- filter_criteria (Dict[str, Any]): Criteria to filter the items.
373
-
374
- Returns:
375
- Dict[str, Any]: A dictionary containing the list of items and metadata.
376
-
377
- Raises:
378
- RuntimeError: If the operation fails.
379
- """
380
- logger.info(f"Fetching catalogue items with criteria={filter_criteria}")
381
  try:
382
  raw_items = await fetch_documents("catalogues", filter_criteria, None, 0, 1000)
383
- logger.info(f"Raw items: {raw_items}")
384
  items_data = raw_items.get("documents", [])
385
 
386
  parsed_items = []
@@ -388,16 +282,12 @@ class CatalogueModel:
388
  try:
389
  parsed_items.append(InventoryInfo(**item))
390
  except ValidationError as e:
391
- logger.warning(f"Failed to parse item: {item}, error: {e}")
392
-
393
- logger.debug(f"Fetched and parsed {len(parsed_items)} items")
394
- return {
395
- "items": [i.dict() for i in parsed_items],
396
- "count": len(parsed_items)
397
- }
398
 
 
 
399
  except Exception as e:
400
- logger.error(f"Error listing catalogue items: {e}", exc_info=True)
401
  raise RuntimeError("Failed to list items") from e
402
 
403
  @staticmethod
@@ -412,73 +302,52 @@ class CatalogueModel:
412
  branch_inventory.c.branch_id == filter_criteria.get("branch_id"),
413
  branch_inventory.c.catalogue_id == filter_criteria.get("catalogue_id")
414
  )
415
- .values(**update_fields, updated_at=func.now())
416
  )
417
  result = await session.execute(stmt)
418
- return result.rowcount > 0
419
-
420
  except Exception as e:
421
- logging.error(f"Error updating catalogue inventory: {e}", exc_info=True)
422
  raise RuntimeError("Failed to update catalogue inventory") from e
423
 
424
-
425
-
426
  @staticmethod
427
  async def get_catalogue_data(catalogue_id: str, merchant_id: str):
428
  try:
429
- filter_criteria = {
430
- "catalogue_id": catalogue_id,
431
- "merchant_id": merchant_id,
432
- }
433
-
434
- logger.info(f"Fetching catalogue with ID: {filter_criteria}")
435
  catalogue_data = await db['catalogues'].find_one(filter_criteria)
436
  if catalogue_data:
437
- logger.info(f"catalogue item {catalogue_id} fetched successfully.")
438
  return catalogue_utils.sanitize_document_for_mongo(catalogue_data)
439
  else:
440
- logger.info(f"catalogue item {catalogue_id} not found.")
441
- return None
442
- except Exception as e:
443
- logger.error(f"Error fetching catalogue data: {e}", exc_info=True)
444
  raise RuntimeError("Failed to fetch catalogue data") from e
445
 
446
  @staticmethod
447
  async def bulk_insert(items: list[dict]) -> int:
448
- result_items = []
449
- for item in items:
450
- result_items.append(convert_dates(item))
451
  result = await db['catalogues'].insert_many(result_items)
452
  return len(result.inserted_ids)
453
 
454
  @staticmethod
455
  async def bulk_create(catalogues: List[dict]):
456
- # Example using MongoDB
457
-
458
  serialized_docs = [sanitize_document_for_mongo(doc) for doc in catalogues]
459
  result = await db["catalogues"].insert_many(serialized_docs)
460
  return result.inserted_ids
461
 
462
  @staticmethod
463
- async def get_info_widget_data(
464
- merchant_id: str,
465
- branch_id: str, # <-- ADD THIS
466
- user_pref: dict
467
- ) -> Dict[str, Any]:
468
  try:
469
-
470
- # Safely handle user_pref being None
471
  widgets = user_pref.get("widgets", []) if user_pref else []
472
  results = {}
473
 
474
  for widget in widgets:
475
  widget_type = widget.get("type")
476
-
477
-
478
-
479
  pipeline, post_process = build_aggregation_pipeline_for_widget(widget, merchant_id, branch_id)
480
-
481
- logger.info(f"Executing pipeline for widget {widget_type}: {pipeline}")
482
  if pipeline:
483
  data = await fetch_many_aggregate(CATALOGUES_COLLECTION, pipeline)
484
  results[widget_type] = post_process(data) if post_process else data
@@ -486,7 +355,6 @@ class CatalogueModel:
486
  results[widget_type] = post_process([]) if post_process else []
487
 
488
  return results
489
-
490
  except Exception as e:
491
- logger.error(f"Error fetching catalogue data: {e}", exc_info=True)
492
  raise RuntimeError("Failed to fetch catalogue data") from e
 
 
1
  import re
2
  from typing import Any, Dict, List, Optional, Union
3
  import uuid
 
5
  from fastapi import HTTPException
6
  from pydantic import ValidationError
7
  from sqlalchemy import func, update
8
+ from insightfy_utils.logging import get_logger
9
  from app.constants.collections import CATALOGUES_COLLECTION
10
  from app.repositories.db import db, fetch_documents, fetch_pos_documents, fetch_many_aggregate
11
  from app.repositories.inventory_repository import fetch_stock_by_catalogue_ids
 
16
  from app.sql import async_session
17
  from sqlalchemy.exc import SQLAlchemyError
18
  from app.schemas.inventory_schema import branch_inventory
19
+
20
+ logger = get_logger(__name__)
21
 
22
  # Constants for aggregation pipeline stages
23
  CATEGORY_FIELD = "$category"
 
30
  @staticmethod
31
  async def create_catalogue_item(data: Dict[str, Any]) -> str:
32
  try:
33
+ catalogue_id = str(uuid.uuid4())
34
+ data['catalogue_id'] = catalogue_id
 
 
35
  data = convert_dates(data)
 
 
36
  result = await db['catalogues'].insert_one(data)
 
 
37
  catalogue_inserted_id = str(result.inserted_id)
38
+ logger.info("Catalogue item created", extra={"catalogue_id": catalogue_inserted_id})
 
 
 
39
  return catalogue_inserted_id
40
  except Exception as e:
41
+ logger.error("Error creating Catalogue item", exc_info=e)
 
 
 
42
  raise RuntimeError("Failed to create Catalogue item") from e
43
 
44
  @staticmethod
45
+ async def update_catalogue_data(catalogue_id: str, update_fields: dict, merchant_id: str) -> bool:
46
  try:
47
  existing_doc = await db['catalogues'].find_one(
48
  {"catalogue_id": catalogue_id, "merchant_id": merchant_id}
 
54
  {"catalogue_id": catalogue_id, "merchant_id": merchant_id},
55
  {"$set": update_fields}
56
  )
 
57
  return result.modified_count > 0
58
  except RuntimeError as e:
59
+ logger.error("Error updating catalogue data", exc_info=e)
60
  raise RuntimeError("Failed to update catalogue data") from e
61
 
62
  @staticmethod
63
+ async def list_items(filter_criteria: Dict[str, Any], offset: int, limit: int, projection_list: Union[List[str], None] = None, branch_id=None) -> Dict[str, Any]:
64
+ """Fetches a paginated list of catalogue items based on filter criteria."""
65
+ logger.info("Fetching catalogue items", extra={"criteria": filter_criteria, "offset": offset, "limit": limit})
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  try:
67
  projection = None
68
  if projection_list:
 
73
  items_data = raw_items.get("documents", [])
74
 
75
  catalogue_ids = [item.get("catalogue_id") for item in items_data if item.get("catalogue_id")]
76
+ stock_map = await fetch_stock_by_catalogue_ids(filter_criteria["merchant_id"], branch_id, catalogue_ids) if branch_id else {}
 
 
77
 
78
  parsed_items = []
79
  for item in items_data:
 
80
  if item.get("catalogue_id") in stock_map:
81
  item.update(stock_map[item["catalogue_id"]])
82
  try:
83
  parsed_items.append(item)
84
  except ValidationError as e:
85
+ logger.warning("Failed to parse item", extra={"item": item, "error": str(e)})
86
 
87
+ logger.debug("Fetched and parsed items", extra={"count": len(parsed_items)})
88
  return {
89
  "total": raw_items.get("total", len(parsed_items)),
90
  "items": parsed_items
91
  }
 
92
  except Exception as e:
93
+ logger.error("Error listing catalogue items", exc_info=e)
94
  raise RuntimeError("Failed to list catalogue items") from e
95
 
96
  @staticmethod
97
+ async def pos_items(filter_criteria: Dict[str, Any], fields: Optional[Dict[str, int]] = None, branch_id=None) -> List[Dict[str, Any]]:
98
+ """Fetches POS catalogue items with optional projection fields."""
 
 
 
 
 
 
 
 
 
 
 
99
  try:
100
  items = await fetch_pos_documents("catalogues", filter_criteria, fields)
101
+ logger.info("Fetched POS items", extra={"count": len(items)})
 
 
 
 
 
 
 
 
 
 
102
  return items
 
103
  except Exception as e:
104
+ logger.error("Error fetching POS catalogue items", exc_info=e)
105
  raise RuntimeError("Failed to fetch POS items") from e
106
 
107
  @staticmethod
 
238
  filter_criteria: Dict[str, Any],
239
  branch_id: Optional[str] = None
240
  ) -> Dict[str, Any]:
241
+ """Optimized aggregation pipeline for POS catalogue items."""
 
 
 
 
 
 
 
 
242
  try:
 
243
  pipeline = CatalogueModel._build_pos_aggregation_pipeline(filter_criteria)
244
  result = await fetch_many_aggregate(CATALOGUES_COLLECTION, pipeline)
245
 
246
  if not result:
247
+ return {"favorites": [], "categories": [], "catalogues": {}}
 
 
 
 
248
 
 
249
  response_data = CatalogueModel._process_aggregation_result(result[0])
 
 
 
 
 
 
250
  return response_data
 
251
  except Exception as e:
252
+ logger.error("Error in POS aggregation", exc_info=e)
253
  raise RuntimeError("Failed to aggregate POS items") from e
254
 
 
255
  @staticmethod
256
  async def delete_item(filter_criteria: dict) -> bool:
257
+ """Deletes a catalogue item by its ID."""
258
+ logger.info("Deleting catalogue item", extra={"filter": filter_criteria})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
259
  try:
 
260
  result = await db["catalogues"].delete_one(filter_criteria)
261
  deleted = result.deleted_count > 0
 
262
  if deleted:
263
+ logger.debug("Catalogue item deleted", extra={"filter": filter_criteria})
264
  else:
265
+ logger.debug("Catalogue item not found", extra={"filter": filter_criteria})
 
266
  return deleted
267
  except Exception as e:
268
+ logger.error("Error deleting catalogue item", exc_info=e)
269
  raise RuntimeError("Failed to delete item") from e
270
 
271
  @staticmethod
272
  async def catalogue_list_items(filter_criteria: Dict[str, Any]) -> Dict[str, Any]:
273
+ """Fetches a paginated list of catalogue items based on filter criteria."""
274
+ logger.info("Fetching catalogue items", extra={"criteria": filter_criteria})
 
 
 
 
 
 
 
 
 
 
 
275
  try:
276
  raw_items = await fetch_documents("catalogues", filter_criteria, None, 0, 1000)
277
+ logger.info("Raw items fetched", extra={"raw_items": raw_items})
278
  items_data = raw_items.get("documents", [])
279
 
280
  parsed_items = []
 
282
  try:
283
  parsed_items.append(InventoryInfo(**item))
284
  except ValidationError as e:
285
+ logger.warning("Failed to parse item", extra={"item": item, "error": str(e)})
 
 
 
 
 
 
286
 
287
+ logger.debug("Fetched and parsed items", extra={"count": len(parsed_items)})
288
+ return {"items": [i.dict() for i in parsed_items], "count": len(parsed_items)}
289
  except Exception as e:
290
+ logger.error("Error listing catalogue items", exc_info=e)
291
  raise RuntimeError("Failed to list items") from e
292
 
293
  @staticmethod
 
302
  branch_inventory.c.branch_id == filter_criteria.get("branch_id"),
303
  branch_inventory.c.catalogue_id == filter_criteria.get("catalogue_id")
304
  )
305
+ .values(**update_fields, updated_at=func.now())
306
  )
307
  result = await session.execute(stmt)
308
+ return result.rowcount > 0
 
309
  except Exception as e:
310
+ logger.error("Error updating catalogue inventory", exc_info=e)
311
  raise RuntimeError("Failed to update catalogue inventory") from e
312
 
 
 
313
  @staticmethod
314
  async def get_catalogue_data(catalogue_id: str, merchant_id: str):
315
  try:
316
+ filter_criteria = {"catalogue_id": catalogue_id, "merchant_id": merchant_id}
317
+ logger.info("Fetching catalogue", extra={"filter": filter_criteria})
 
 
 
 
318
  catalogue_data = await db['catalogues'].find_one(filter_criteria)
319
  if catalogue_data:
320
+ logger.info("Catalogue fetched", extra={"catalogue_id": catalogue_id})
321
  return catalogue_utils.sanitize_document_for_mongo(catalogue_data)
322
  else:
323
+ logger.info("Catalogue not found", extra={"catalogue_id": catalogue_id})
324
+ return None
325
+ except Exception as e:
326
+ logger.error("Error fetching catalogue data", exc_info=e)
327
  raise RuntimeError("Failed to fetch catalogue data") from e
328
 
329
  @staticmethod
330
  async def bulk_insert(items: list[dict]) -> int:
331
+ result_items = [convert_dates(item) for item in items]
 
 
332
  result = await db['catalogues'].insert_many(result_items)
333
  return len(result.inserted_ids)
334
 
335
  @staticmethod
336
  async def bulk_create(catalogues: List[dict]):
 
 
337
  serialized_docs = [sanitize_document_for_mongo(doc) for doc in catalogues]
338
  result = await db["catalogues"].insert_many(serialized_docs)
339
  return result.inserted_ids
340
 
341
  @staticmethod
342
+ async def get_info_widget_data(merchant_id: str, branch_id: str, user_pref: dict) -> Dict[str, Any]:
 
 
 
 
343
  try:
 
 
344
  widgets = user_pref.get("widgets", []) if user_pref else []
345
  results = {}
346
 
347
  for widget in widgets:
348
  widget_type = widget.get("type")
 
 
 
349
  pipeline, post_process = build_aggregation_pipeline_for_widget(widget, merchant_id, branch_id)
350
+ logger.info("Executing pipeline for widget", extra={"widget_type": widget_type, "pipeline": pipeline})
 
351
  if pipeline:
352
  data = await fetch_many_aggregate(CATALOGUES_COLLECTION, pipeline)
353
  results[widget_type] = post_process(data) if post_process else data
 
355
  results[widget_type] = post_process([]) if post_process else []
356
 
357
  return results
 
358
  except Exception as e:
359
+ logger.error("Error fetching catalogue data", exc_info=e)
360
  raise RuntimeError("Failed to fetch catalogue data") from e
app/models/gift_card_models.py CHANGED
@@ -1,15 +1,13 @@
1
- import logging
2
  import uuid
3
  from typing import Any, Dict, List, Optional
4
  from datetime import datetime, timezone
5
  from bson import ObjectId
6
  from fastapi import HTTPException
 
7
  from app.repositories.db import db
8
 
9
- # Configure logging for this module
10
- logger = logging.getLogger(__name__)
11
 
12
- # Collection name
13
  GIFT_CARD_TEMPLATES_COLLECTION = "gift_card_templates"
14
 
15
  class GiftCardTemplateModel:
@@ -19,7 +17,7 @@ class GiftCardTemplateModel:
19
  """
20
 
21
  @staticmethod
22
- async def create_template(data: Dict[str, Any],merchant_id:str) -> str:
23
  """
24
  Create a new gift card template.
25
 
@@ -32,27 +30,20 @@ class GiftCardTemplateModel:
32
  Raises:
33
  RuntimeError: If the creation fails
34
  """
35
-
36
- # Generate unique template ID
37
  template_id = f"tpl_{str(uuid.uuid4())[:8]}"
38
  data['_id'] = template_id
39
- data['issued_count'] = 0 # Initialize issued count
40
  data['created_at'] = datetime.now(timezone.utc)
41
  data['updated_at'] = None
42
 
43
- #Check if name already exists for this merchant
44
  existing = await db.gift_card_templates.find_one(
45
- { "name": data["name"], "merchant_id": merchant_id}
46
  )
47
  if existing:
48
  raise HTTPException(status_code=400, detail="Gift card template with this name already exists.")
49
 
50
- # Insert data into MongoDB collection
51
  await db[GIFT_CARD_TEMPLATES_COLLECTION].insert_one(data)
52
-
53
- # Log success for traceability
54
- logger.info(f"Gift card template created successfully with ID: {template_id}")
55
-
56
  return template_id
57
 
58
  @staticmethod
@@ -73,22 +64,16 @@ class GiftCardTemplateModel:
73
  Dict[str, Any]: Dictionary containing templates and total count
74
  """
75
  try:
76
- # Get total count
77
  total = await db[GIFT_CARD_TEMPLATES_COLLECTION].count_documents(filter_criteria)
78
-
79
- # Get paginated documents
80
  cursor = db[GIFT_CARD_TEMPLATES_COLLECTION].find(filter_criteria)
81
  cursor = cursor.skip(offset).limit(limit).sort("created_at", -1)
82
-
83
  templates = await cursor.to_list(length=limit)
84
 
85
- # Convert ObjectId to string for JSON serialization
86
  for template in templates:
87
  if isinstance(template.get('_id'), ObjectId):
88
  template['_id'] = str(template['_id'])
89
 
90
- logger.info(f"Retrieved {len(templates)} gift card templates")
91
-
92
  return {
93
  "templates": templates,
94
  "total": total,
@@ -96,7 +81,7 @@ class GiftCardTemplateModel:
96
  "limit": limit
97
  }
98
  except Exception as e:
99
- logger.error(f"Error retrieving gift card templates: {e}", exc_info=True)
100
  raise RuntimeError("Failed to retrieve gift card templates") from e
101
 
102
  @staticmethod
@@ -114,17 +99,15 @@ class GiftCardTemplateModel:
114
  template = await db[GIFT_CARD_TEMPLATES_COLLECTION].find_one({"_id": template_id})
115
 
116
  if template:
117
- # Convert ObjectId to string for JSON serialization
118
  if isinstance(template.get('_id'), ObjectId):
119
  template['_id'] = str(template['_id'])
120
-
121
- logger.info(f"Gift card template {template_id} retrieved successfully")
122
  else:
123
- logger.info(f"Gift card template {template_id} not found")
124
 
125
  return template
126
  except Exception as e:
127
- logger.error(f"Error retrieving gift card template {template_id}: {e}", exc_info=True)
128
  raise RuntimeError("Failed to retrieve gift card template") from e
129
 
130
  @staticmethod
@@ -140,32 +123,27 @@ class GiftCardTemplateModel:
140
  bool: True if the update was successful, False otherwise
141
  """
142
  try:
143
- # Check if template exists
144
  existing_template = await db[GIFT_CARD_TEMPLATES_COLLECTION].find_one({"_id": template_id})
145
  if not existing_template:
146
  raise HTTPException(status_code=404, detail="Gift card template not found")
147
 
148
- # Add updated_at timestamp
149
  update_data['updated_at'] = datetime.now(timezone.utc)
150
-
151
- # Perform update
152
  result = await db[GIFT_CARD_TEMPLATES_COLLECTION].update_one(
153
  {"_id": template_id},
154
  {"$set": update_data}
155
  )
156
 
157
  success = result.modified_count > 0
158
-
159
  if success:
160
- logger.info(f"Gift card template {template_id} updated successfully")
161
  else:
162
- logger.warning(f"No changes made to gift card template {template_id}")
163
 
164
  return success
165
  except HTTPException:
166
  raise
167
  except Exception as e:
168
- logger.error(f"Error updating gift card template {template_id}: {e}", exc_info=True)
169
  raise RuntimeError("Failed to update gift card template") from e
170
 
171
  @staticmethod
@@ -181,17 +159,16 @@ class GiftCardTemplateModel:
181
  """
182
  try:
183
  result = await db[GIFT_CARD_TEMPLATES_COLLECTION].delete_one({"_id": template_id})
184
-
185
  success = result.deleted_count > 0
186
 
187
  if success:
188
- logger.info(f"Gift card template {template_id} deleted successfully")
189
  else:
190
- logger.warning(f"Gift card template {template_id} not found for deletion")
191
 
192
  return success
193
  except Exception as e:
194
- logger.error(f"Error deleting gift card template {template_id}: {e}", exc_info=True)
195
  raise RuntimeError("Failed to delete gift card template") from e
196
 
197
  @staticmethod
@@ -215,15 +192,14 @@ class GiftCardTemplateModel:
215
  )
216
 
217
  success = result.modified_count > 0
218
-
219
  if success:
220
- logger.info(f"Issued count incremented for template {template_id}")
221
  else:
222
- logger.warning(f"Failed to increment issued count for template {template_id}")
223
 
224
  return success
225
  except Exception as e:
226
- logger.error(f"Error incrementing issued count for template {template_id}: {e}", exc_info=True)
227
  raise RuntimeError("Failed to increment issued count") from e
228
 
229
  @staticmethod
@@ -246,15 +222,9 @@ class GiftCardTemplateModel:
246
  max_issues = template.get("max_issues")
247
  issued_count = template.get("issued_count", 0)
248
 
249
- # For digital cards or unlimited templates
250
  if max_issues is None:
251
- return {
252
- "available": True,
253
- "unlimited": True,
254
- "remaining": None
255
- }
256
 
257
- # For physical cards with limited stock
258
  remaining = max_issues - issued_count
259
  available = remaining > 0
260
 
@@ -268,5 +238,5 @@ class GiftCardTemplateModel:
268
  except HTTPException:
269
  raise
270
  except Exception as e:
271
- logger.error(f"Error checking stock availability for template {template_id}: {e}", exc_info=True)
272
  raise RuntimeError("Failed to check stock availability") from e
 
 
1
  import uuid
2
  from typing import Any, Dict, List, Optional
3
  from datetime import datetime, timezone
4
  from bson import ObjectId
5
  from fastapi import HTTPException
6
+ from insightfy_utils.logging import get_logger
7
  from app.repositories.db import db
8
 
9
+ logger = get_logger(__name__)
 
10
 
 
11
  GIFT_CARD_TEMPLATES_COLLECTION = "gift_card_templates"
12
 
13
  class GiftCardTemplateModel:
 
17
  """
18
 
19
  @staticmethod
20
+ async def create_template(data: Dict[str, Any], merchant_id: str) -> str:
21
  """
22
  Create a new gift card template.
23
 
 
30
  Raises:
31
  RuntimeError: If the creation fails
32
  """
 
 
33
  template_id = f"tpl_{str(uuid.uuid4())[:8]}"
34
  data['_id'] = template_id
35
+ data['issued_count'] = 0
36
  data['created_at'] = datetime.now(timezone.utc)
37
  data['updated_at'] = None
38
 
 
39
  existing = await db.gift_card_templates.find_one(
40
+ {"name": data["name"], "merchant_id": merchant_id}
41
  )
42
  if existing:
43
  raise HTTPException(status_code=400, detail="Gift card template with this name already exists.")
44
 
 
45
  await db[GIFT_CARD_TEMPLATES_COLLECTION].insert_one(data)
46
+ logger.info("Gift card template created", extra={"template_id": template_id})
 
 
 
47
  return template_id
48
 
49
  @staticmethod
 
64
  Dict[str, Any]: Dictionary containing templates and total count
65
  """
66
  try:
 
67
  total = await db[GIFT_CARD_TEMPLATES_COLLECTION].count_documents(filter_criteria)
 
 
68
  cursor = db[GIFT_CARD_TEMPLATES_COLLECTION].find(filter_criteria)
69
  cursor = cursor.skip(offset).limit(limit).sort("created_at", -1)
 
70
  templates = await cursor.to_list(length=limit)
71
 
 
72
  for template in templates:
73
  if isinstance(template.get('_id'), ObjectId):
74
  template['_id'] = str(template['_id'])
75
 
76
+ logger.info("Retrieved gift card templates", extra={"count": len(templates)})
 
77
  return {
78
  "templates": templates,
79
  "total": total,
 
81
  "limit": limit
82
  }
83
  except Exception as e:
84
+ logger.error("Error retrieving gift card templates", exc_info=e)
85
  raise RuntimeError("Failed to retrieve gift card templates") from e
86
 
87
  @staticmethod
 
99
  template = await db[GIFT_CARD_TEMPLATES_COLLECTION].find_one({"_id": template_id})
100
 
101
  if template:
 
102
  if isinstance(template.get('_id'), ObjectId):
103
  template['_id'] = str(template['_id'])
104
+ logger.info("Gift card template retrieved", extra={"template_id": template_id})
 
105
  else:
106
+ logger.info("Gift card template not found", extra={"template_id": template_id})
107
 
108
  return template
109
  except Exception as e:
110
+ logger.error("Error retrieving gift card template", extra={"template_id": template_id}, exc_info=e)
111
  raise RuntimeError("Failed to retrieve gift card template") from e
112
 
113
  @staticmethod
 
123
  bool: True if the update was successful, False otherwise
124
  """
125
  try:
 
126
  existing_template = await db[GIFT_CARD_TEMPLATES_COLLECTION].find_one({"_id": template_id})
127
  if not existing_template:
128
  raise HTTPException(status_code=404, detail="Gift card template not found")
129
 
 
130
  update_data['updated_at'] = datetime.now(timezone.utc)
 
 
131
  result = await db[GIFT_CARD_TEMPLATES_COLLECTION].update_one(
132
  {"_id": template_id},
133
  {"$set": update_data}
134
  )
135
 
136
  success = result.modified_count > 0
 
137
  if success:
138
+ logger.info("Gift card template updated", extra={"template_id": template_id})
139
  else:
140
+ logger.warning("No changes made to gift card template", extra={"template_id": template_id})
141
 
142
  return success
143
  except HTTPException:
144
  raise
145
  except Exception as e:
146
+ logger.error("Error updating gift card template", extra={"template_id": template_id}, exc_info=e)
147
  raise RuntimeError("Failed to update gift card template") from e
148
 
149
  @staticmethod
 
159
  """
160
  try:
161
  result = await db[GIFT_CARD_TEMPLATES_COLLECTION].delete_one({"_id": template_id})
 
162
  success = result.deleted_count > 0
163
 
164
  if success:
165
+ logger.info("Gift card template deleted", extra={"template_id": template_id})
166
  else:
167
+ logger.warning("Gift card template not found for deletion", extra={"template_id": template_id})
168
 
169
  return success
170
  except Exception as e:
171
+ logger.error("Error deleting gift card template", extra={"template_id": template_id}, exc_info=e)
172
  raise RuntimeError("Failed to delete gift card template") from e
173
 
174
  @staticmethod
 
192
  )
193
 
194
  success = result.modified_count > 0
 
195
  if success:
196
+ logger.info("Issued count incremented", extra={"template_id": template_id})
197
  else:
198
+ logger.warning("Failed to increment issued count", extra={"template_id": template_id})
199
 
200
  return success
201
  except Exception as e:
202
+ logger.error("Error incrementing issued count", extra={"template_id": template_id}, exc_info=e)
203
  raise RuntimeError("Failed to increment issued count") from e
204
 
205
  @staticmethod
 
222
  max_issues = template.get("max_issues")
223
  issued_count = template.get("issued_count", 0)
224
 
 
225
  if max_issues is None:
226
+ return {"available": True, "unlimited": True, "remaining": None}
 
 
 
 
227
 
 
228
  remaining = max_issues - issued_count
229
  available = remaining > 0
230
 
 
238
  except HTTPException:
239
  raise
240
  except Exception as e:
241
+ logger.error("Error checking stock availability", extra={"template_id": template_id}, exc_info=e)
242
  raise RuntimeError("Failed to check stock availability") from e
app/models/promotion_model.py CHANGED
@@ -1,10 +1,10 @@
1
  from typing import Any, Dict, Optional, Union, List
2
 
3
- import logging
4
  import uuid
5
 
6
  from fastapi import HTTPException
7
  from pymongo import ReturnDocument
 
8
 
9
  from app.repositories.db import db, fetch_documents, serialize_mongo_document, fetch_many_aggregate
10
  from app.schemas.promotion_schema import PromotionMetaData
@@ -14,13 +14,12 @@ from app.utils.info_widget_utils import build_aggregation_pipeline_for_widget
14
 
15
  from app.constants.collections import PROMOTION_COLLECTION
16
 
17
- logger = logging.getLogger(__name__)
18
 
19
  class PromotionManagementModel:
20
 
21
  @staticmethod
22
  async def create_promotion_data(data: Dict[str, Any]) -> str:
23
-
24
  data = sanitize_document_for_mongo(data)
25
  existing = await db[PROMOTION_COLLECTION].find_one(
26
  {"code": data["code"], "merchant_id": data["merchant_id"]}
@@ -30,9 +29,8 @@ class PromotionManagementModel:
30
 
31
  result = await db[PROMOTION_COLLECTION].insert_one(data)
32
  promotion_inserted_id = str(result.inserted_id)
33
- logger.info(f"promotion created successfully with ID: {promotion_inserted_id}")
34
  return promotion_inserted_id
35
-
36
 
37
  @staticmethod
38
  async def list_promotion(
@@ -66,77 +64,72 @@ class PromotionManagementModel:
66
  PROMOTION_COLLECTION, filter_criteria, sort_criteria, offset, limit
67
  )
68
 
69
- logger.debug(f"Fetched {len(promotion_list.get('items', []))} promotions")
70
  return promotion_list
71
  except Exception as e:
72
- logger.error(f"Error listing promotions: {e}", exc_info=True)
73
  raise RuntimeError("Failed to list promotions") from e
74
 
75
  @staticmethod
76
- async def get_promotion_by_code(code:str)->Dict[str,Any]:
77
  try:
78
- logging.info("promotion data base on code")
79
- promotion_data= await db[PROMOTION_COLLECTION].find_one({"code": code})
80
  return promotion_data
81
  except Exception as e:
82
- logger.error(f"Error while retrieving promotion by code: {e}", exc_info=True)
83
 
84
  @staticmethod
85
- async def get_promotion_by_merchant_id(merchant_id:str)->Dict[str,Any]:
86
  try:
87
- logging.info("promotion data base on with out code")
88
- promotion= db[PROMOTION_COLLECTION].find({"merchant_id": merchant_id, "auto_apply": True})
89
- promotion_data = await promotion.to_list(length=100)
90
- return [catalogue_utils.sanitize_document_for_mongo(doc) for doc in promotion_data]
91
  except Exception as e:
92
- logger.error(f"Error while retrieving promotion by code: {e}", exc_info=True)
93
-
94
  @staticmethod
95
- async def update(code: str,user_id:str) -> int:
96
  try:
97
  result = await db[PROMOTION_COLLECTION].update_one(
98
  {"code": code},
99
  {
100
- "$inc": {'used_count': 1},
101
- "$addToSet": {"used_by": user_id}
102
- } )
103
- logger.info(f"Modified {result.modified_count} document(s) for promotion code {code}")
 
104
  return result.modified_count
105
  except Exception as e:
106
- logger.error(f"Error while updating promotion: {e}", exc_info=True)
107
  raise RuntimeError("Failed to update in DB") from e
108
-
109
  @staticmethod
110
  async def get_info_widget_data(
111
  merchant_id: str,
112
- branch_id: str, # <-- ADD THIS
113
  user_pref: dict
114
  ) -> Dict[str, Any]:
115
  try:
116
-
117
- # Safely handle user_pref being None
118
  widgets = user_pref.get("widgets", []) if user_pref else []
119
  results = {}
120
  for widget in widgets:
121
  widget_type = widget.get("type")
122
-
123
  pipeline, post_process = build_aggregation_pipeline_for_widget(widget, merchant_id, branch_id)
124
-
125
- logger.info(f"Executing pipeline for widget {widget_type}: {pipeline}")
126
  if pipeline:
127
  data = await fetch_many_aggregate(PROMOTION_COLLECTION, pipeline)
128
  results[widget_type] = post_process(data) if post_process else data
129
  else:
130
  results[widget_type] = post_process([]) if post_process else []
131
-
132
  return results
133
-
134
  except Exception as e:
135
- logger.error(f"Error fetching catalogue data: {e}", exc_info=True)
136
- raise RuntimeError("Failed to fetch catalogue data") from e
137
-
138
  @staticmethod
139
- async def get_by_promotion_id(promotion_id: str) -> Dict[str, Any]:
140
  result = await db.promotions.find_one({"promotion_id": promotion_id})
141
  return result if result else None
142
 
@@ -148,8 +141,8 @@ class PromotionManagementModel:
148
  {"$set": update_data},
149
  return_document=ReturnDocument.AFTER
150
  )
151
- logger.info(f"promotion updated successfully")
152
  return result if result else None
153
  except Exception as e:
154
- logger.error(f"Error updating promotion data: {e}", exc_info=True)
155
  raise RuntimeError("Failed to update promotion data") from e
 
1
  from typing import Any, Dict, Optional, Union, List
2
 
 
3
  import uuid
4
 
5
  from fastapi import HTTPException
6
  from pymongo import ReturnDocument
7
+ from insightfy_utils.logging import get_logger
8
 
9
  from app.repositories.db import db, fetch_documents, serialize_mongo_document, fetch_many_aggregate
10
  from app.schemas.promotion_schema import PromotionMetaData
 
14
 
15
  from app.constants.collections import PROMOTION_COLLECTION
16
 
17
+ logger = get_logger(__name__)
18
 
19
  class PromotionManagementModel:
20
 
21
  @staticmethod
22
  async def create_promotion_data(data: Dict[str, Any]) -> str:
 
23
  data = sanitize_document_for_mongo(data)
24
  existing = await db[PROMOTION_COLLECTION].find_one(
25
  {"code": data["code"], "merchant_id": data["merchant_id"]}
 
29
 
30
  result = await db[PROMOTION_COLLECTION].insert_one(data)
31
  promotion_inserted_id = str(result.inserted_id)
32
+ logger.info("Promotion created", extra={"promotion_id": promotion_inserted_id})
33
  return promotion_inserted_id
 
34
 
35
  @staticmethod
36
  async def list_promotion(
 
64
  PROMOTION_COLLECTION, filter_criteria, sort_criteria, offset, limit
65
  )
66
 
67
+ logger.debug("Fetched promotions", extra={"count": len(promotion_list.get('items', []))})
68
  return promotion_list
69
  except Exception as e:
70
+ logger.error("Error listing promotions", exc_info=e)
71
  raise RuntimeError("Failed to list promotions") from e
72
 
73
  @staticmethod
74
+ async def get_promotion_by_code(code: str) -> Dict[str, Any]:
75
  try:
76
+ logger.info("Fetching promotion by code", extra={"code": code})
77
+ promotion_data = await db[PROMOTION_COLLECTION].find_one({"code": code})
78
  return promotion_data
79
  except Exception as e:
80
+ logger.error("Error retrieving promotion by code", extra={"code": code}, exc_info=e)
81
 
82
  @staticmethod
83
+ async def get_promotion_by_merchant_id(merchant_id: str) -> Dict[str, Any]:
84
  try:
85
+ logger.info("Fetching promotions by merchant", extra={"merchant_id": merchant_id})
86
+ promotion = db[PROMOTION_COLLECTION].find({"merchant_id": merchant_id, "auto_apply": True})
87
+ promotion_data = await promotion.to_list(length=100)
88
+ return [catalogue_utils.sanitize_document_for_mongo(doc) for doc in promotion_data]
89
  except Exception as e:
90
+ logger.error("Error retrieving promotion by merchant", extra={"merchant_id": merchant_id}, exc_info=e)
91
+
92
  @staticmethod
93
+ async def update(code: str, user_id: str) -> int:
94
  try:
95
  result = await db[PROMOTION_COLLECTION].update_one(
96
  {"code": code},
97
  {
98
+ "$inc": {'used_count': 1},
99
+ "$addToSet": {"used_by": user_id}
100
+ }
101
+ )
102
+ logger.info("Promotion updated", extra={"code": code, "modified_count": result.modified_count})
103
  return result.modified_count
104
  except Exception as e:
105
+ logger.error("Error updating promotion", extra={"code": code}, exc_info=e)
106
  raise RuntimeError("Failed to update in DB") from e
107
+
108
  @staticmethod
109
  async def get_info_widget_data(
110
  merchant_id: str,
111
+ branch_id: str,
112
  user_pref: dict
113
  ) -> Dict[str, Any]:
114
  try:
 
 
115
  widgets = user_pref.get("widgets", []) if user_pref else []
116
  results = {}
117
  for widget in widgets:
118
  widget_type = widget.get("type")
 
119
  pipeline, post_process = build_aggregation_pipeline_for_widget(widget, merchant_id, branch_id)
120
+ logger.info("Executing pipeline for widget", extra={"widget_type": widget_type, "pipeline": pipeline})
 
121
  if pipeline:
122
  data = await fetch_many_aggregate(PROMOTION_COLLECTION, pipeline)
123
  results[widget_type] = post_process(data) if post_process else data
124
  else:
125
  results[widget_type] = post_process([]) if post_process else []
 
126
  return results
 
127
  except Exception as e:
128
+ logger.error("Error fetching promotion widget data", exc_info=e)
129
+ raise RuntimeError("Failed to fetch promotion data") from e
130
+
131
  @staticmethod
132
+ async def get_by_promotion_id(promotion_id: str) -> Dict[str, Any]:
133
  result = await db.promotions.find_one({"promotion_id": promotion_id})
134
  return result if result else None
135
 
 
141
  {"$set": update_data},
142
  return_document=ReturnDocument.AFTER
143
  )
144
+ logger.info("Promotion updated", extra={"promotion_id": promotion_id})
145
  return result if result else None
146
  except Exception as e:
147
+ logger.error("Error updating promotion", extra={"promotion_id": promotion_id}, exc_info=e)
148
  raise RuntimeError("Failed to update promotion data") from e
app/models/supplier_models.py CHANGED
@@ -1,9 +1,9 @@
1
-
2
  import logging
3
  from typing import Any, Dict, List
4
  import uuid
5
  from bson import ObjectId
6
  from fastapi import HTTPException
 
7
  from app.repositories.db import db, fetch_many_aggregate
8
  from app.schemas.supplier_schema import supply_history
9
  from app.utils import catalogue_utils
@@ -12,124 +12,98 @@ from app.utils.id_utils import generate_unique_id
12
  from app.constants.collections import SUPPLIERS_COLLECTION
13
  from app.utils.info_widget_utils import build_aggregation_pipeline_for_widget
14
 
15
- logger = logging.getLogger(__name__)
16
 
17
  class SupplierModel:
18
 
19
  @staticmethod
20
  async def create_supplier(data: Dict[str, Any]) -> str:
21
  try:
22
- merchant_id = data.get('merchant_id')
23
- supplier_id = generate_unique_id(merchant_id)
24
- data['supplier_id'] = supplier_id
25
- data = catalogue_utils.convert_dates(data)
26
- # Insert data into MongoDB collection
27
- result = await db[SUPPLIERS_COLLECTION].insert_one(data)
28
- # Extract the inserted ID and convert to string
29
- supplier_inserted_id = str(result.inserted_id)
30
- # Log success for traceability
31
- logger.info(f"supplier created successfully with ID: {supplier_inserted_id}")
32
- return supplier_inserted_id
33
  except Exception as e:
34
- # Log the error with stack trace for debugging
35
- logger.error(f"Error creating supplier : {e}", exc_info=True)
36
-
37
- # Raise a more specific error with a custom message
38
  raise RuntimeError("Failed to create supplier") from e
39
 
40
-
41
  @staticmethod
42
  async def get_supplier_data(supplier_id: str, merchant_id: str):
43
  try:
44
-
45
- filter_criteria = {
46
- "supplier_id": supplier_id,
47
- "merchant_id": merchant_id,
48
- }
49
-
50
- logger.info(f"Fetching Supplier with ID: {filter_criteria}")
51
  Supplier_data = await db[SUPPLIERS_COLLECTION].find_one(filter_criteria)
52
  if Supplier_data:
53
- logger.info(f"Supplier {supplier_id} fetched successfully.")
54
  return catalogue_utils.sanitize_document_for_mongo(Supplier_data)
55
  else:
56
- logger.info(f"Supplier {supplier_id} not found.")
57
- return None
58
- except Exception as e:
59
- logger.error(f"Error fetching Supplier data: {e}", exc_info=True)
60
  raise RuntimeError("Failed to fetch Supplier data") from e
61
 
62
  @staticmethod
63
  async def update_supplier(supplier_id: str, update_fields: dict) -> bool:
64
  try:
65
  result = await db[SUPPLIERS_COLLECTION].update_one(
66
- {"supplier_id":supplier_id},
67
  {"$set": update_fields}
68
  )
69
  return result.modified_count > 0
70
  except Exception as e:
71
- logger.error(f"Error updating supplier data: {e}", exc_info=True)
72
  raise RuntimeError("Failed to update supplier data") from e
73
 
74
  @staticmethod
75
  async def delete_item(supplier_id: str) -> bool:
76
-
77
- logger.info(f"Soft deleting supplier with ID: {supplier_id}")
78
  try:
79
  result = await db[SUPPLIERS_COLLECTION].update_one(
80
- {"supplier_id":supplier_id},
81
- {"$set": {"status": "inactive"}} # or use "deleted": True
82
  )
83
  updated = result.modified_count > 0
84
-
85
  if updated:
86
- logger.debug(f"Supplier {supplier_id} soft deleted successfully.")
87
  else:
88
- logger.debug(f"Supplier {supplier_id} not found")
89
-
90
  return updated
91
-
92
  except Exception as e:
93
- logger.error(f"Error soft deleting supplier: {e}", exc_info=True)
94
  raise RuntimeError("Failed to soft delete item") from e
95
 
96
  @staticmethod
97
  async def get_all_suppliers(query: Dict[str, Any]) -> List[Dict[str, Any]]:
98
  try:
99
- logger.info(f"Fetching suppliers with query: {query}")
100
-
101
  cursor = db[SUPPLIERS_COLLECTION].find(query)
102
- suppliers = await cursor.to_list(length=100) # Add pagination if needed
103
 
104
- if suppliers:
105
- logger.info(f"Found {len(suppliers)} supplier(s) matching query.")
106
  return [catalogue_utils.sanitize_document_for_mongo(doc) for doc in suppliers]
107
  else:
108
- logger.info("No suppliers matched the query.")
109
  return []
110
-
111
  except Exception as e:
112
- logger.error(f"Error fetching suppliers: {e}", exc_info=True)
113
  raise RuntimeError("Failed to fetch suppliers") from e
114
 
115
  @staticmethod
116
  async def count_suppliers(query: Dict[str, Any]) -> int:
117
- """
118
- Count the number of suppliers matching the given query.
119
-
120
- Args:
121
- query: Dictionary of filter criteria
122
-
123
- Returns:
124
- Total count of matching suppliers
125
- """
126
  try:
127
- logger.info(f"Counting suppliers with query: {query}")
128
  count = await db[SUPPLIERS_COLLECTION].count_documents(query)
129
- logger.info(f"Found {count} supplier(s) matching query.")
130
  return count
131
  except Exception as e:
132
- logger.error(f"Error counting suppliers: {e}", exc_info=True)
133
  raise RuntimeError("Failed to count suppliers") from e
134
 
135
  @staticmethod
@@ -139,109 +113,80 @@ class SupplierModel:
139
  limit: int = 10,
140
  projection_list: List[str] = None
141
  ) -> List[Dict[str, Any]]:
142
- """
143
- Get a paginated list of suppliers matching the given query.
144
-
145
- Args:
146
- query: Dictionary of filter criteria
147
- offset: Number of records to skip
148
- limit: Maximum number of records to return
149
- projection_list: Optional list of fields to include in the response
150
-
151
- Returns:
152
- List of supplier documents
153
- """
154
  try:
155
- logger.info(f"Fetching suppliers list with query: {query}, offset: {offset}, limit: {limit}")
156
 
157
- # Create projection if needed
158
  projection = None
159
  if projection_list:
160
  projection = dict.fromkeys(projection_list, 1)
161
- # Always include _id for document identification
162
  if '_id' not in projection:
163
  projection['_id'] = 1
164
 
165
- # Create cursor with pagination
166
  cursor = db[SUPPLIERS_COLLECTION].find(query, projection)
167
  cursor.skip(offset).limit(limit)
168
-
169
- # Sort by created_at descending (newest first) if available
170
  cursor.sort("created_at", -1)
171
-
172
- # Retrieve documents
173
  suppliers = await cursor.to_list(length=limit)
174
 
175
  if suppliers:
176
- logger.info(f"Found {len(suppliers)} supplier(s) for the paginated query.")
177
  return suppliers
178
  else:
179
- logger.info("No suppliers found for the paginated query.")
180
  return []
181
-
182
  except Exception as e:
183
- logger.error(f"Error fetching suppliers list: {e}", exc_info=True)
184
  raise RuntimeError("Failed to fetch suppliers list") from e
185
 
186
  @staticmethod
187
  async def append_supply_history(supplier_id: str, history_entry: any) -> Dict[str, Any]:
188
  try:
189
- logger.info(f"appending supply history: {history_entry}")
190
  result = await db[SUPPLIERS_COLLECTION].update_one(
191
- {"supplier_id": supplier_id},
192
- {"$push": {"supply_history": history_entry.dict()}}
193
-
194
- )
195
  if result.modified_count == 0:
196
- logger.debug(f"Supplier {supplier_id} not found.")
197
-
198
- return{"message":"supplier history appended successfully"}
199
-
200
  except Exception as e:
201
- logger.error(f"Error in appending supplier history: {e}", exc_info=True)
202
  raise RuntimeError("Failed to append supplier history") from e
203
 
204
  @staticmethod
205
  async def upload_document(data: Any, supplier_id: str) -> Dict[Any, str]:
206
  try:
207
  result = await db[SUPPLIERS_COLLECTION].update_one(
208
- {"supplier_id": supplier_id},
209
- {"$push": {"documents": data.dict()}}
210
- )
211
- return{"message":"supplier documents uploaded successfully"}
212
-
213
  except Exception as e:
214
- logger.error(f"Error in uploading supplier documents: {e}", exc_info=True)
215
  raise RuntimeError("Failed to upload supplier documents") from e
216
 
217
  @staticmethod
218
  async def get_info_widget_data(
219
  merchant_id: str,
220
- branch_id: str, # <-- ADD THIS
221
  user_pref: dict
222
  ) -> Dict[str, Any]:
223
  try:
224
-
225
- # Safely handle user_pref being None
226
  widgets = user_pref.get("widgets", []) if user_pref else []
227
  results = {}
228
  for widget in widgets:
229
  widget_type = widget.get("type")
230
-
231
  pipeline, post_process = build_aggregation_pipeline_for_widget(widget, merchant_id, branch_id)
232
-
233
- logger.info(f"Executing pipeline for widget {widget_type}: {pipeline}")
234
  if pipeline:
235
  data = await fetch_many_aggregate(SUPPLIERS_COLLECTION, pipeline)
236
  results[widget_type] = post_process(data) if post_process else data
237
  else:
238
  results[widget_type] = post_process([]) if post_process else []
239
-
240
  return results
241
-
242
  except Exception as e:
243
- logger.error(f"Error fetching supplier data: {e}", exc_info=True)
244
  raise RuntimeError("Failed to fetch supplier data") from e
245
-
246
 
247
-
 
 
 
1
  import logging
2
  from typing import Any, Dict, List
3
  import uuid
4
  from bson import ObjectId
5
  from fastapi import HTTPException
6
+ from insightfy_utils.logging import get_logger
7
  from app.repositories.db import db, fetch_many_aggregate
8
  from app.schemas.supplier_schema import supply_history
9
  from app.utils import catalogue_utils
 
12
  from app.constants.collections import SUPPLIERS_COLLECTION
13
  from app.utils.info_widget_utils import build_aggregation_pipeline_for_widget
14
 
15
+ logger = get_logger(__name__)
16
 
17
  class SupplierModel:
18
 
19
  @staticmethod
20
  async def create_supplier(data: Dict[str, Any]) -> str:
21
  try:
22
+ merchant_id = data.get('merchant_id')
23
+ supplier_id = generate_unique_id(merchant_id)
24
+ data['supplier_id'] = supplier_id
25
+ data = catalogue_utils.convert_dates(data)
26
+ result = await db[SUPPLIERS_COLLECTION].insert_one(data)
27
+ supplier_inserted_id = str(result.inserted_id)
28
+ logger.info("Supplier created", extra={"supplier_id": supplier_inserted_id})
29
+ return supplier_inserted_id
 
 
 
30
  except Exception as e:
31
+ logger.error("Error creating supplier", exc_info=e)
 
 
 
32
  raise RuntimeError("Failed to create supplier") from e
33
 
 
34
  @staticmethod
35
  async def get_supplier_data(supplier_id: str, merchant_id: str):
36
  try:
37
+ filter_criteria = {"supplier_id": supplier_id, "merchant_id": merchant_id}
38
+ logger.info("Fetching Supplier", extra={"filter": filter_criteria})
 
 
 
 
 
39
  Supplier_data = await db[SUPPLIERS_COLLECTION].find_one(filter_criteria)
40
  if Supplier_data:
41
+ logger.info("Supplier fetched", extra={"supplier_id": supplier_id})
42
  return catalogue_utils.sanitize_document_for_mongo(Supplier_data)
43
  else:
44
+ logger.info("Supplier not found", extra={"supplier_id": supplier_id})
45
+ return None
46
+ except Exception as e:
47
+ logger.error("Error fetching Supplier data", exc_info=e)
48
  raise RuntimeError("Failed to fetch Supplier data") from e
49
 
50
  @staticmethod
51
  async def update_supplier(supplier_id: str, update_fields: dict) -> bool:
52
  try:
53
  result = await db[SUPPLIERS_COLLECTION].update_one(
54
+ {"supplier_id": supplier_id},
55
  {"$set": update_fields}
56
  )
57
  return result.modified_count > 0
58
  except Exception as e:
59
+ logger.error("Error updating supplier data", exc_info=e)
60
  raise RuntimeError("Failed to update supplier data") from e
61
 
62
  @staticmethod
63
  async def delete_item(supplier_id: str) -> bool:
64
+ logger.info("Soft deleting supplier", extra={"supplier_id": supplier_id})
 
65
  try:
66
  result = await db[SUPPLIERS_COLLECTION].update_one(
67
+ {"supplier_id": supplier_id},
68
+ {"$set": {"status": "inactive"}}
69
  )
70
  updated = result.modified_count > 0
 
71
  if updated:
72
+ logger.debug("Supplier soft deleted", extra={"supplier_id": supplier_id})
73
  else:
74
+ logger.debug("Supplier not found", extra={"supplier_id": supplier_id})
 
75
  return updated
 
76
  except Exception as e:
77
+ logger.error("Error soft deleting supplier", exc_info=e)
78
  raise RuntimeError("Failed to soft delete item") from e
79
 
80
  @staticmethod
81
  async def get_all_suppliers(query: Dict[str, Any]) -> List[Dict[str, Any]]:
82
  try:
83
+ logger.info("Fetching suppliers", extra={"query": query})
 
84
  cursor = db[SUPPLIERS_COLLECTION].find(query)
85
+ suppliers = await cursor.to_list(length=100)
86
 
87
+ if suppliers:
88
+ logger.info("Found suppliers", extra={"count": len(suppliers)})
89
  return [catalogue_utils.sanitize_document_for_mongo(doc) for doc in suppliers]
90
  else:
91
+ logger.info("No suppliers matched the query")
92
  return []
 
93
  except Exception as e:
94
+ logger.error("Error fetching suppliers", exc_info=e)
95
  raise RuntimeError("Failed to fetch suppliers") from e
96
 
97
  @staticmethod
98
  async def count_suppliers(query: Dict[str, Any]) -> int:
99
+ """Count the number of suppliers matching the given query."""
 
 
 
 
 
 
 
 
100
  try:
101
+ logger.info("Counting suppliers", extra={"query": query})
102
  count = await db[SUPPLIERS_COLLECTION].count_documents(query)
103
+ logger.info("Counted suppliers", extra={"count": count})
104
  return count
105
  except Exception as e:
106
+ logger.error("Error counting suppliers", exc_info=e)
107
  raise RuntimeError("Failed to count suppliers") from e
108
 
109
  @staticmethod
 
113
  limit: int = 10,
114
  projection_list: List[str] = None
115
  ) -> List[Dict[str, Any]]:
116
+ """Get a paginated list of suppliers matching the given query."""
 
 
 
 
 
 
 
 
 
 
 
117
  try:
118
+ logger.info("Fetching suppliers list", extra={"query": query, "offset": offset, "limit": limit})
119
 
 
120
  projection = None
121
  if projection_list:
122
  projection = dict.fromkeys(projection_list, 1)
 
123
  if '_id' not in projection:
124
  projection['_id'] = 1
125
 
 
126
  cursor = db[SUPPLIERS_COLLECTION].find(query, projection)
127
  cursor.skip(offset).limit(limit)
 
 
128
  cursor.sort("created_at", -1)
 
 
129
  suppliers = await cursor.to_list(length=limit)
130
 
131
  if suppliers:
132
+ logger.info("Found suppliers for paginated query", extra={"count": len(suppliers)})
133
  return suppliers
134
  else:
135
+ logger.info("No suppliers found for paginated query")
136
  return []
 
137
  except Exception as e:
138
+ logger.error("Error fetching suppliers list", exc_info=e)
139
  raise RuntimeError("Failed to fetch suppliers list") from e
140
 
141
  @staticmethod
142
  async def append_supply_history(supplier_id: str, history_entry: any) -> Dict[str, Any]:
143
  try:
144
+ logger.info("Appending supply history", extra={"history_entry": str(history_entry)})
145
  result = await db[SUPPLIERS_COLLECTION].update_one(
146
+ {"supplier_id": supplier_id},
147
+ {"$push": {"supply_history": history_entry.dict()}}
148
+ )
 
149
  if result.modified_count == 0:
150
+ logger.debug("Supplier not found", extra={"supplier_id": supplier_id})
151
+ return {"message": "supplier history appended successfully"}
 
 
152
  except Exception as e:
153
+ logger.error("Error appending supplier history", exc_info=e)
154
  raise RuntimeError("Failed to append supplier history") from e
155
 
156
  @staticmethod
157
  async def upload_document(data: Any, supplier_id: str) -> Dict[Any, str]:
158
  try:
159
  result = await db[SUPPLIERS_COLLECTION].update_one(
160
+ {"supplier_id": supplier_id},
161
+ {"$push": {"documents": data.dict()}}
162
+ )
163
+ return {"message": "supplier documents uploaded successfully"}
 
164
  except Exception as e:
165
+ logger.error("Error uploading supplier documents", exc_info=e)
166
  raise RuntimeError("Failed to upload supplier documents") from e
167
 
168
  @staticmethod
169
  async def get_info_widget_data(
170
  merchant_id: str,
171
+ branch_id: str,
172
  user_pref: dict
173
  ) -> Dict[str, Any]:
174
  try:
 
 
175
  widgets = user_pref.get("widgets", []) if user_pref else []
176
  results = {}
177
  for widget in widgets:
178
  widget_type = widget.get("type")
 
179
  pipeline, post_process = build_aggregation_pipeline_for_widget(widget, merchant_id, branch_id)
180
+ logger.info("Executing pipeline for widget", extra={"widget_type": widget_type, "pipeline": pipeline})
 
181
  if pipeline:
182
  data = await fetch_many_aggregate(SUPPLIERS_COLLECTION, pipeline)
183
  results[widget_type] = post_process(data) if post_process else data
184
  else:
185
  results[widget_type] = post_process([]) if post_process else []
 
186
  return results
 
187
  except Exception as e:
188
+ logger.error("Error fetching supplier data", exc_info=e)
189
  raise RuntimeError("Failed to fetch supplier data") from e
 
190
 
191
+
192
+
app/models/taxonomy_model.py CHANGED
@@ -1,27 +1,25 @@
1
  from datetime import datetime
2
- import logging
3
  from typing import Any, Dict, List
4
  import uuid
5
 
6
  from bson import ObjectId
 
7
 
8
  from app.repositories.db import db
9
  from app.schemas.taxonomy_schema import TaxonomyInfo
10
 
11
- logger = logging.getLogger(__name__)
12
 
13
  class TaxonomyModel:
14
 
15
  @staticmethod
16
  async def create_append_delete_taxonomy(data: Any) -> Dict[str, Any]:
17
  try:
18
-
19
  if isinstance(data, dict):
20
  data = TaxonomyInfo(**data)
21
 
22
  merchant_id = data.merchant_id
23
  filter_criteria = {"merchant_id": merchant_id}
24
-
25
  existing = await db["settings_taxonomy"].find_one(filter_criteria)
26
 
27
  if data.is_delete:
@@ -81,12 +79,12 @@ class TaxonomyModel:
81
  if pull_query:
82
  update_query["$pull"] = pull_query
83
 
84
- if len(update_query) > 1:
85
  await db["settings_taxonomy"].update_one(
86
  {"merchant_id": merchant_id},
87
  update_query
88
  )
89
- logger.info(f"Deleted taxonomy values for merchant_id={merchant_id}")
90
  return {"status": "deleted", "merchant_id": merchant_id}
91
 
92
  return {"status": "no_changes", "merchant_id": merchant_id}
@@ -148,12 +146,12 @@ class TaxonomyModel:
148
  if add_to_set:
149
  update_query["$addToSet"] = add_to_set
150
 
151
- if len(update_query) > 1:
152
  await db["settings_taxonomy"].update_one(
153
  {"merchant_id": merchant_id},
154
  update_query
155
  )
156
- logger.info(f"Updated taxonomy for merchant_id={merchant_id}")
157
  return {"status": "updated", "merchant_id": merchant_id}
158
 
159
  return {"status": "no_changes", "merchant_id": merchant_id}
@@ -166,11 +164,11 @@ class TaxonomyModel:
166
  doc["updated_at"] = datetime.utcnow()
167
  await db["settings_taxonomy"].insert_one(doc)
168
 
169
- logger.info(f"Created new taxonomy for merchant_id={merchant_id}")
170
  return {"status": "created", "taxonomy_id": taxonomy_id, "merchant_id": merchant_id}
171
 
172
  except Exception as e:
173
- logger.error(f"Error in create_taxonomy: {e}", exc_info=True)
174
  raise RuntimeError("Failed to create/append taxonomy") from e
175
 
176
  @staticmethod
@@ -178,44 +176,40 @@ class TaxonomyModel:
178
  try:
179
  obj_id = ObjectId(id)
180
  result = await db['settings_taxonomy'].update_one(
181
- {"_id":obj_id},
182
  {"$set": update_fields}
183
  )
184
  return result.modified_count > 0
185
  except Exception as e:
186
- logger.error(f"Error updating taxonomy data: {e}", exc_info=True)
187
  raise RuntimeError("Failed to update taxonomy data") from e
188
 
189
  @staticmethod
190
  async def delete_taxonomy(id: str) -> bool:
191
-
192
- logger.info(f"Soft deleting taxonomy with ID: {id}")
193
  try:
194
  obj_id = ObjectId(id)
195
  result = await db["settings_taxonomy"].update_one(
196
- {"_id":obj_id},
197
- {"$set": {"is_active": "false"}} # or use "deleted": True
198
  )
199
  updated = result.modified_count > 0
200
 
201
  if updated:
202
- logger.debug(f"Taxonomy {id} soft deleted successfully.")
203
  else:
204
- logger.debug(f"Taxonomy {id} not found")
205
 
206
  return updated
207
-
208
  except Exception as e:
209
- logger.error(f"Error soft deleting taxonomy: {e}", exc_info=True)
210
  raise RuntimeError("Failed to soft delete item") from e
211
 
212
  @staticmethod
213
  async def list_taxonomy(merchant_id: str, taxonomy_type=None) -> List[dict]:
214
- logger.info(f"Fetching taxonomy list for merchant ID: {merchant_id}")
215
  try:
216
- filter_criteria = {
217
- "merchant_id": merchant_id,
218
- }
219
  projection = None
220
  if taxonomy_type in ["brands", "categories", "classes", "lines", "subcategories", "job_role", "specializations", "languages", "customer_group", "pos_tender_modes", "payment_types", "payment_methods", "asset_location", "asset_category", "stock_bin_location", "branch_types"]:
221
  projection = {taxonomy_type: 1, "_id": 0}
@@ -223,13 +217,12 @@ class TaxonomyModel:
223
  results = (
224
  await db["settings_taxonomy"]
225
  .find(filter_criteria, projection)
226
- .to_list(length=None) # Retrieve all records without pagination
227
  )
228
- logger.debug(f"Retrieved {len(results)} taxonomy records.")
229
  return results
230
 
231
  except Exception as e:
232
- logger.error(f"Error fetching taxonomy list: {e}", exc_info=True)
233
  raise RuntimeError("Failed to fetch taxonomy list") from e
234
 
235
-
 
1
  from datetime import datetime
 
2
  from typing import Any, Dict, List
3
  import uuid
4
 
5
  from bson import ObjectId
6
+ from insightfy_utils.logging import get_logger
7
 
8
  from app.repositories.db import db
9
  from app.schemas.taxonomy_schema import TaxonomyInfo
10
 
11
+ logger = get_logger(__name__)
12
 
13
  class TaxonomyModel:
14
 
15
  @staticmethod
16
  async def create_append_delete_taxonomy(data: Any) -> Dict[str, Any]:
17
  try:
 
18
  if isinstance(data, dict):
19
  data = TaxonomyInfo(**data)
20
 
21
  merchant_id = data.merchant_id
22
  filter_criteria = {"merchant_id": merchant_id}
 
23
  existing = await db["settings_taxonomy"].find_one(filter_criteria)
24
 
25
  if data.is_delete:
 
79
  if pull_query:
80
  update_query["$pull"] = pull_query
81
 
82
+ if len(update_query) > 1:
83
  await db["settings_taxonomy"].update_one(
84
  {"merchant_id": merchant_id},
85
  update_query
86
  )
87
+ logger.info("Deleted taxonomy values", extra={"merchant_id": merchant_id})
88
  return {"status": "deleted", "merchant_id": merchant_id}
89
 
90
  return {"status": "no_changes", "merchant_id": merchant_id}
 
146
  if add_to_set:
147
  update_query["$addToSet"] = add_to_set
148
 
149
+ if len(update_query) > 1:
150
  await db["settings_taxonomy"].update_one(
151
  {"merchant_id": merchant_id},
152
  update_query
153
  )
154
+ logger.info("Updated taxonomy", extra={"merchant_id": merchant_id})
155
  return {"status": "updated", "merchant_id": merchant_id}
156
 
157
  return {"status": "no_changes", "merchant_id": merchant_id}
 
164
  doc["updated_at"] = datetime.utcnow()
165
  await db["settings_taxonomy"].insert_one(doc)
166
 
167
+ logger.info("Created new taxonomy", extra={"merchant_id": merchant_id, "taxonomy_id": taxonomy_id})
168
  return {"status": "created", "taxonomy_id": taxonomy_id, "merchant_id": merchant_id}
169
 
170
  except Exception as e:
171
+ logger.error("Error in create_taxonomy", exc_info=e)
172
  raise RuntimeError("Failed to create/append taxonomy") from e
173
 
174
  @staticmethod
 
176
  try:
177
  obj_id = ObjectId(id)
178
  result = await db['settings_taxonomy'].update_one(
179
+ {"_id": obj_id},
180
  {"$set": update_fields}
181
  )
182
  return result.modified_count > 0
183
  except Exception as e:
184
+ logger.error("Error updating taxonomy data", exc_info=e)
185
  raise RuntimeError("Failed to update taxonomy data") from e
186
 
187
  @staticmethod
188
  async def delete_taxonomy(id: str) -> bool:
189
+ logger.info("Soft deleting taxonomy", extra={"id": id})
 
190
  try:
191
  obj_id = ObjectId(id)
192
  result = await db["settings_taxonomy"].update_one(
193
+ {"_id": obj_id},
194
+ {"$set": {"is_active": "false"}}
195
  )
196
  updated = result.modified_count > 0
197
 
198
  if updated:
199
+ logger.debug("Taxonomy soft deleted", extra={"id": id})
200
  else:
201
+ logger.debug("Taxonomy not found", extra={"id": id})
202
 
203
  return updated
 
204
  except Exception as e:
205
+ logger.error("Error soft deleting taxonomy", exc_info=e)
206
  raise RuntimeError("Failed to soft delete item") from e
207
 
208
  @staticmethod
209
  async def list_taxonomy(merchant_id: str, taxonomy_type=None) -> List[dict]:
210
+ logger.info("Fetching taxonomy list", extra={"merchant_id": merchant_id})
211
  try:
212
+ filter_criteria = {"merchant_id": merchant_id}
 
 
213
  projection = None
214
  if taxonomy_type in ["brands", "categories", "classes", "lines", "subcategories", "job_role", "specializations", "languages", "customer_group", "pos_tender_modes", "payment_types", "payment_methods", "asset_location", "asset_category", "stock_bin_location", "branch_types"]:
215
  projection = {taxonomy_type: 1, "_id": 0}
 
217
  results = (
218
  await db["settings_taxonomy"]
219
  .find(filter_criteria, projection)
220
+ .to_list(length=None)
221
  )
222
+ logger.debug("Retrieved taxonomy records", extra={"count": len(results)})
223
  return results
224
 
225
  except Exception as e:
226
+ logger.error("Error fetching taxonomy list", exc_info=e)
227
  raise RuntimeError("Failed to fetch taxonomy list") from e
228
 
 
app/nosql.py CHANGED
@@ -1,12 +1,13 @@
1
  # data_stores.py (or your current filename)
2
  from __future__ import annotations
3
 
4
- import logging
5
  from urllib.parse import urlparse
6
 
7
  import motor.motor_asyncio
8
  import redis.asyncio as redis
9
  from redis.exceptions import RedisError
 
 
10
 
11
  # Single source of truth
12
  from settings import (
@@ -19,13 +20,10 @@ from settings import (
19
  )
20
 
21
  # -----------------------------------------------------------------------------
22
- # Logging
23
  # -----------------------------------------------------------------------------
24
- logging.basicConfig(
25
- level=logging.INFO,
26
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
27
- )
28
- logger = logging.getLogger(__name__)
29
 
30
  # -----------------------------------------------------------------------------
31
  # Validate configuration
@@ -51,9 +49,9 @@ try:
51
  connectTimeoutMS=60_000,
52
  )
53
  mongo_db = mongo_client[MONGO_DB_NAME]
54
- logger.info("MongoDB client initialized (db=%s).", MONGO_DB_NAME)
55
  except Exception as e:
56
- logger.exception("Failed to initialize MongoDB client.")
57
  raise
58
 
59
  # -----------------------------------------------------------------------------
@@ -92,7 +90,7 @@ try:
92
  redis_client = _redis_from_settings()
93
  logger.info("Redis client initialized.")
94
  except Exception as e:
95
- logger.exception("Failed to initialize Redis client.")
96
  raise
97
 
98
  # -----------------------------------------------------------------------------
 
1
  # data_stores.py (or your current filename)
2
  from __future__ import annotations
3
 
 
4
  from urllib.parse import urlparse
5
 
6
  import motor.motor_asyncio
7
  import redis.asyncio as redis
8
  from redis.exceptions import RedisError
9
+ from insightfy_utils.db import MongoConnector, RedisConnector
10
+ from insightfy_utils.logging import setup_logging, get_logger
11
 
12
  # Single source of truth
13
  from settings import (
 
20
  )
21
 
22
  # -----------------------------------------------------------------------------
23
+ # Logging (migrated to insightfy-utils)
24
  # -----------------------------------------------------------------------------
25
+ setup_logging(level="INFO", format_type="json", app_name="insightfy-bloom-ms-ems-nosql")
26
+ logger = get_logger(__name__)
 
 
 
27
 
28
  # -----------------------------------------------------------------------------
29
  # Validate configuration
 
49
  connectTimeoutMS=60_000,
50
  )
51
  mongo_db = mongo_client[MONGO_DB_NAME]
52
+ logger.info("MongoDB client initialized", extra={"database": MONGO_DB_NAME})
53
  except Exception as e:
54
+ logger.exception("Failed to initialize MongoDB client", exc_info=e)
55
  raise
56
 
57
  # -----------------------------------------------------------------------------
 
90
  redis_client = _redis_from_settings()
91
  logger.info("Redis client initialized.")
92
  except Exception as e:
93
+ logger.exception("Failed to initialize Redis client", exc_info=e)
94
  raise
95
 
96
  # -----------------------------------------------------------------------------
app/repositories/cache.py CHANGED
@@ -1,9 +1,9 @@
1
  from typing import Any
2
  from app.nosql import redis_client
 
3
  import json
4
- import logging
5
 
6
- logger = logging.getLogger(__name__)
7
 
8
  CACHE_EXPIRY_SECONDS = 3600
9
 
@@ -14,10 +14,10 @@ async def get_or_set_cache(key: str, fetch_func, expiry: int = CACHE_EXPIRY_SECO
14
  """
15
  cached_data = await redis_client.get(key)
16
  if cached_data:
17
- logger.info(f"Cache hit for key: {key}")
18
  return json.loads(cached_data)
19
 
20
- logger.info(f"Cache miss for key: {key}")
21
  data = await fetch_func()
22
  await redis_client.set(key, json.dumps(data), ex=expiry)
23
  return data
 
1
  from typing import Any
2
  from app.nosql import redis_client
3
+ from insightfy_utils.logging import get_logger
4
  import json
 
5
 
6
+ logger = get_logger(__name__)
7
 
8
  CACHE_EXPIRY_SECONDS = 3600
9
 
 
14
  """
15
  cached_data = await redis_client.get(key)
16
  if cached_data:
17
+ logger.info("Cache hit", extra={"key": key})
18
  return json.loads(cached_data)
19
 
20
+ logger.info("Cache miss", extra={"key": key})
21
  data = await fetch_func()
22
  await redis_client.set(key, json.dumps(data), ex=expiry)
23
  return data
app/repositories/db.py CHANGED
@@ -3,32 +3,32 @@ from typing import Any, Dict, List, Optional
3
 
4
  from sqlalchemy import text
5
  from app.nosql import db
6
- from pymongo.errors import PyMongoError # βœ… Unified error handling
7
  from bson import ObjectId
8
  from bson.decimal128 import Decimal128
9
- import logging
10
  from app.sql import async_session
11
  from sqlalchemy.exc import SQLAlchemyError
12
- logger = logging.getLogger(__name__)
 
13
 
14
  async def count_documents(collection: str, filter_criteria: Dict) -> int:
15
  """
16
  Count documents in a MongoDB collection that match the filter criteria.
17
  """
18
  try:
19
-
20
- logger.info(f"Counting documents in collection: {collection} with filter: {filter_criteria}")
21
 
22
  if filter_criteria is None:
23
- logger.debug(f"Filter criteria: {filter_criteria}")
24
  else:
25
- logger.debug("No filter criteria provided, counting all documents in the collection.")
26
 
27
  count = await db[collection].count_documents(filter_criteria)
28
- logger.info(f"Counted {count} documents in collection: {collection}")
29
  return count
30
  except PyMongoError as e:
31
- logger.error(f"MongoDB count error in collection '{collection}': {e}")
32
  raise RuntimeError("Database count operation failed") from e
33
 
34
  def serialize_mongo_document(doc: Any) -> Any:
@@ -41,11 +41,11 @@ def serialize_mongo_document(doc: Any) -> Any:
41
  if isinstance(doc, dict):
42
  return {key: serialize_mongo_document(value) for key, value in doc.items()}
43
  if isinstance(doc, ObjectId):
44
- return str(doc) # βœ… Convert ObjectId to string
45
  if isinstance(doc, Decimal128):
46
- return float(doc.to_decimal()) # βœ… Convert Decimal128 to float
47
  if isinstance(doc, datetime):
48
- return doc.isoformat() # βœ… Convert datetime to ISO 8601 string
49
  return doc
50
 
51
  async def execute_query(collection: str, pipeline: List[Dict]) -> Any:
@@ -53,13 +53,13 @@ async def execute_query(collection: str, pipeline: List[Dict]) -> Any:
53
  Execute MongoDB aggregation pipeline with error handling and serialization.
54
  """
55
  try:
56
- logger.info(f"Executing aggregation pipeline on collection: {collection} with pipeline: {pipeline}")
57
  results = await db[collection].aggregate(pipeline).to_list(length=None)
58
  serialized_results = serialize_mongo_document(results)
59
- logger.info(f"Aggregation query returned {len(serialized_results)} documents from {collection}")
60
  return serialized_results
61
  except PyMongoError as e:
62
- logger.error(f"MongoDB query error in collection '{collection}': {e}")
63
  raise RuntimeError("Database query failed") from e
64
 
65
  async def fetch_documents(
@@ -82,7 +82,15 @@ async def fetch_documents(
82
  .to_list(length=limit)
83
  )
84
  has_more = (offset + limit) < total
85
- logger.info(f"Fetched {len(results)} documents from {collection}. Total: {total}, Has More: {has_more}")
 
 
 
 
 
 
 
 
86
 
87
  return {
88
  "documents": serialize_mongo_document(results),
@@ -90,7 +98,7 @@ async def fetch_documents(
90
  "has_more": has_more,
91
  }
92
  except PyMongoError as e:
93
- logger.error(f"MongoDB fetch error in collection '{collection}': {e}")
94
  raise RuntimeError("Database fetch operation failed") from e
95
 
96
  async def fetch_pos_documents(
@@ -102,19 +110,12 @@ async def fetch_pos_documents(
102
  ) -> List[Dict[str, Any]]:
103
  """
104
  Fetch POS-specific documents from MongoDB with filtering, pagination, and projection.
105
-
106
- Args:
107
- collection (str): The name of the MongoDB collection.
108
- filter_criteria (Dict[str, Any]): Criteria to filter the items.
109
- projection (Optional[Dict[str, int]]): Fields to include or exclude.
110
- offset (int): Number of documents to skip for pagination.
111
- limit (int): Maximum number of documents to fetch.
112
-
113
- Returns:
114
- List[Dict[str, Any]]: A list of documents matching the filter criteria.
115
  """
116
  try:
117
- logger.info(f"Fetching POS documents from {collection} with filter: {filter_criteria} and projection: {projection}")
 
 
 
118
 
119
  results = (
120
  await db[collection]
@@ -124,12 +125,11 @@ async def fetch_pos_documents(
124
  .to_list(length=limit)
125
  )
126
 
127
- logger.info(f"Fetched {len(results)} POS documents from collection: {collection}")
128
  return serialize_mongo_document(results)
129
  except PyMongoError as e:
130
- logger.error(f"MongoDB fetch error for POS documents in collection '{collection}': {e}")
131
  raise RuntimeError("Failed to fetch POS documents") from e
132
-
133
 
134
  async def fetch_one_document(
135
  collection_name: str,
@@ -137,54 +137,33 @@ async def fetch_one_document(
137
  ) -> Optional[Dict[str, Any]]:
138
  """
139
  Fetches a single document from the given MongoDB collection.
140
-
141
- Args:
142
- collection_name (str): Name of the collection.
143
- query (Dict[str, Any]): Filter query to find the document.
144
-
145
- Returns:
146
- Optional[Dict[str, Any]]: The found document or None.
147
-
148
- Raises:
149
- RuntimeError: If a DB operation fails.
150
  """
151
  try:
152
- logger.debug(f"Fetching one document from {collection_name} with query: {query}")
153
  result = await db[collection_name].find_one(query)
154
- logger.debug(f"Found document: {result}")
155
  return result
156
  except PyMongoError as e:
157
- logger.error(f"MongoDB error fetching from {collection_name}: {e}", exc_info=True)
158
  raise RuntimeError(f"Database read failed for {collection_name}") from e
159
-
160
  async def fetch_many_aggregate(
161
  collection: str,
162
  pipeline: List[Dict[str, Any]]
163
  ) -> List[Dict[str, Any]]:
164
  """
165
  Execute a MongoDB aggregation pipeline and return serialized results.
166
-
167
- Args:
168
- collection (str): The name of the MongoDB collection.
169
- pipeline (List[Dict[str, Any]]): The aggregation pipeline stages.
170
-
171
- Returns:
172
- List[Dict[str, Any]]: Aggregated documents.
173
-
174
- Raises:
175
- RuntimeError: If the aggregation query fails.
176
  """
177
  try:
178
- logger.info(f"Running aggregation on collection '{collection}' with pipeline: {pipeline}")
179
  cursor = db[collection].aggregate(pipeline)
180
  results = await cursor.to_list(length=None)
181
  serialized = serialize_mongo_document(results)
182
- logger.info(f"Aggregation returned {len(serialized)} results from collection '{collection}'")
183
  return serialized
184
  except PyMongoError as e:
185
- logger.error(f"Aggregation failed on collection '{collection}': {e}", exc_info=True)
186
  raise RuntimeError("Aggregation query failed") from e
187
-
188
 
189
  async def fetch_many_pg(
190
  sql_query: str,
@@ -192,23 +171,13 @@ async def fetch_many_pg(
192
  ) -> List[Dict[str, Any]]:
193
  """
194
  Execute a raw SQL query in Postgres and return serialized results.
195
-
196
- Args:
197
- sql_query (str): SQL query string.
198
- params (Dict[str, Any]): Query parameters for safe binding.
199
-
200
- Returns:
201
- List[Dict[str, Any]]: List of row dicts.
202
-
203
- Raises:
204
- RuntimeError: If the query fails.
205
  """
206
  try:
207
- async with async_session() as session: # type: AsyncSession
208
- logger.info(f"Running Postgres query: {sql_query} with params: {params}")
209
  result = await session.execute(text(sql_query), params or {})
210
- rows = result.mappings().all() # returns list of dict-like row objects
211
  return [dict(row) for row in rows]
212
  except SQLAlchemyError as e:
213
- logger.error(f"Postgres query failed: {e}", exc_info=True)
214
  raise RuntimeError("Postgres query failed") from e
 
3
 
4
  from sqlalchemy import text
5
  from app.nosql import db
6
+ from pymongo.errors import PyMongoError
7
  from bson import ObjectId
8
  from bson.decimal128 import Decimal128
9
+ from insightfy_utils.logging import get_logger
10
  from app.sql import async_session
11
  from sqlalchemy.exc import SQLAlchemyError
12
+
13
+ logger = get_logger(__name__)
14
 
15
  async def count_documents(collection: str, filter_criteria: Dict) -> int:
16
  """
17
  Count documents in a MongoDB collection that match the filter criteria.
18
  """
19
  try:
20
+ logger.info("Counting documents", extra={"collection": collection, "filter": filter_criteria})
 
21
 
22
  if filter_criteria is None:
23
+ logger.debug("Filter criteria is None", extra={"filter": filter_criteria})
24
  else:
25
+ logger.debug("No filter criteria provided, counting all documents")
26
 
27
  count = await db[collection].count_documents(filter_criteria)
28
+ logger.info("Counted documents", extra={"collection": collection, "count": count})
29
  return count
30
  except PyMongoError as e:
31
+ logger.error("MongoDB count error", extra={"collection": collection}, exc_info=e)
32
  raise RuntimeError("Database count operation failed") from e
33
 
34
  def serialize_mongo_document(doc: Any) -> Any:
 
41
  if isinstance(doc, dict):
42
  return {key: serialize_mongo_document(value) for key, value in doc.items()}
43
  if isinstance(doc, ObjectId):
44
+ return str(doc)
45
  if isinstance(doc, Decimal128):
46
+ return float(doc.to_decimal())
47
  if isinstance(doc, datetime):
48
+ return doc.isoformat()
49
  return doc
50
 
51
  async def execute_query(collection: str, pipeline: List[Dict]) -> Any:
 
53
  Execute MongoDB aggregation pipeline with error handling and serialization.
54
  """
55
  try:
56
+ logger.info("Executing aggregation pipeline", extra={"collection": collection, "pipeline": pipeline})
57
  results = await db[collection].aggregate(pipeline).to_list(length=None)
58
  serialized_results = serialize_mongo_document(results)
59
+ logger.info("Aggregation completed", extra={"collection": collection, "result_count": len(serialized_results)})
60
  return serialized_results
61
  except PyMongoError as e:
62
+ logger.error("MongoDB query error", extra={"collection": collection}, exc_info=e)
63
  raise RuntimeError("Database query failed") from e
64
 
65
  async def fetch_documents(
 
82
  .to_list(length=limit)
83
  )
84
  has_more = (offset + limit) < total
85
+ logger.info(
86
+ "Fetched documents",
87
+ extra={
88
+ "collection": collection,
89
+ "count": len(results),
90
+ "total": total,
91
+ "has_more": has_more
92
+ }
93
+ )
94
 
95
  return {
96
  "documents": serialize_mongo_document(results),
 
98
  "has_more": has_more,
99
  }
100
  except PyMongoError as e:
101
+ logger.error("MongoDB fetch error", extra={"collection": collection}, exc_info=e)
102
  raise RuntimeError("Database fetch operation failed") from e
103
 
104
  async def fetch_pos_documents(
 
110
  ) -> List[Dict[str, Any]]:
111
  """
112
  Fetch POS-specific documents from MongoDB with filtering, pagination, and projection.
 
 
 
 
 
 
 
 
 
 
113
  """
114
  try:
115
+ logger.info(
116
+ "Fetching POS documents",
117
+ extra={"collection": collection, "filter": filter_criteria, "projection": projection}
118
+ )
119
 
120
  results = (
121
  await db[collection]
 
125
  .to_list(length=limit)
126
  )
127
 
128
+ logger.info("Fetched POS documents", extra={"collection": collection, "count": len(results)})
129
  return serialize_mongo_document(results)
130
  except PyMongoError as e:
131
+ logger.error("MongoDB fetch error for POS documents", extra={"collection": collection}, exc_info=e)
132
  raise RuntimeError("Failed to fetch POS documents") from e
 
133
 
134
  async def fetch_one_document(
135
  collection_name: str,
 
137
  ) -> Optional[Dict[str, Any]]:
138
  """
139
  Fetches a single document from the given MongoDB collection.
 
 
 
 
 
 
 
 
 
 
140
  """
141
  try:
142
+ logger.debug("Fetching one document", extra={"collection": collection_name, "query": query})
143
  result = await db[collection_name].find_one(query)
144
+ logger.debug("Found document", extra={"collection": collection_name, "found": result is not None})
145
  return result
146
  except PyMongoError as e:
147
+ logger.error("MongoDB error fetching document", extra={"collection": collection_name}, exc_info=e)
148
  raise RuntimeError(f"Database read failed for {collection_name}") from e
149
+
150
  async def fetch_many_aggregate(
151
  collection: str,
152
  pipeline: List[Dict[str, Any]]
153
  ) -> List[Dict[str, Any]]:
154
  """
155
  Execute a MongoDB aggregation pipeline and return serialized results.
 
 
 
 
 
 
 
 
 
 
156
  """
157
  try:
158
+ logger.info("Running aggregation", extra={"collection": collection, "pipeline": pipeline})
159
  cursor = db[collection].aggregate(pipeline)
160
  results = await cursor.to_list(length=None)
161
  serialized = serialize_mongo_document(results)
162
+ logger.info("Aggregation completed", extra={"collection": collection, "result_count": len(serialized)})
163
  return serialized
164
  except PyMongoError as e:
165
+ logger.error("Aggregation failed", extra={"collection": collection}, exc_info=e)
166
  raise RuntimeError("Aggregation query failed") from e
 
167
 
168
  async def fetch_many_pg(
169
  sql_query: str,
 
171
  ) -> List[Dict[str, Any]]:
172
  """
173
  Execute a raw SQL query in Postgres and return serialized results.
 
 
 
 
 
 
 
 
 
 
174
  """
175
  try:
176
+ async with async_session() as session:
177
+ logger.info("Running Postgres query", extra={"query": sql_query, "params": params})
178
  result = await session.execute(text(sql_query), params or {})
179
+ rows = result.mappings().all()
180
  return [dict(row) for row in rows]
181
  except SQLAlchemyError as e:
182
+ logger.error("Postgres query failed", exc_info=e)
183
  raise RuntimeError("Postgres query failed") from e
app/repositories/gift_card_repository.py CHANGED
@@ -1,34 +1,22 @@
1
- import logging
2
  from typing import Any, Dict, List, Optional
 
3
  from app.models.gift_card_models import GiftCardTemplateModel
4
  from app.schemas.gift_card_schema import GiftCardTemplateFilter
5
 
6
- # Configure logging for this module
7
- logger = logging.getLogger(__name__)
8
 
9
  class GiftCardRepository:
10
- """
11
- Repository layer for Gift Card operations.
12
- Provides a clean interface between services and the data model.
13
- """
14
 
15
  @staticmethod
16
- async def create_template(template_data: Dict[str, Any],merchant_id:str) -> str:
17
- """
18
- Create a new gift card template.
19
-
20
- Args:
21
- template_data (Dict[str, Any]): Template data to create
22
-
23
- Returns:
24
- str: Created template ID
25
- """
26
  try:
27
- template_id = await GiftCardTemplateModel.create_template(template_data,merchant_id)
28
- logger.info(f"Template created with ID: {template_id}")
29
  return template_id
30
  except Exception as e:
31
- logger.error(f"Repository error creating template: {e}")
32
  raise
33
 
34
  @staticmethod
@@ -37,147 +25,88 @@ class GiftCardRepository:
37
  offset: int = 0,
38
  limit: int = 100
39
  ) -> Dict[str, Any]:
40
- """
41
- Get gift card templates with pagination and filtering.
42
-
43
- Args:
44
- filter_criteria (Dict[str, Any]): MongoDB filter criteria
45
- offset (int): Number of documents to skip
46
- limit (int): Maximum number of documents to return
47
-
48
- Returns:
49
- Dict[str, Any]: Templates with pagination info
50
- """
51
  try:
52
  result = await GiftCardTemplateModel.list_gift_card(filter_criteria, offset, limit)
53
- logger.info(f"Retrieved {len(result['templates'])} templates")
54
  return result
55
  except Exception as e:
56
- logger.error(f"Repository error getting templates: {e}")
57
  raise
58
 
59
  @staticmethod
60
  async def get_template_by_id(template_id: str) -> Optional[Dict[str, Any]]:
61
- """
62
- Get a specific gift card template by ID.
63
-
64
- Args:
65
- template_id (str): Template ID to retrieve
66
-
67
- Returns:
68
- Optional[Dict[str, Any]]: Template data or None if not found
69
- """
70
  try:
71
  template = await GiftCardTemplateModel.get_template_by_id(template_id)
72
  if template:
73
- logger.info(f"Retrieved template: {template_id}")
74
  else:
75
- logger.info(f"Template not found: {template_id}")
76
  return template
77
  except Exception as e:
78
- logger.error(f"Repository error getting template {template_id}: {e}")
79
  raise
80
 
81
  @staticmethod
82
  async def update_template(template_id: str, update_data: Dict[str, Any]) -> bool:
83
- """
84
- Update a gift card template.
85
-
86
- Args:
87
- template_id (str): Template ID to update
88
- update_data (Dict[str, Any]): Fields to update
89
-
90
- Returns:
91
- bool: True if update was successful
92
- """
93
  try:
94
  success = await GiftCardTemplateModel.update_template(template_id, update_data)
95
  if success:
96
- logger.info(f"Template updated: {template_id}")
97
  else:
98
- logger.warning(f"No changes made to template: {template_id}")
99
  return success
100
  except Exception as e:
101
- logger.error(f"Repository error updating template {template_id}: {e}")
102
  raise
103
 
104
  @staticmethod
105
  async def delete_template(template_id: str) -> bool:
106
- """
107
- Delete a gift card template.
108
-
109
- Args:
110
- template_id (str): Template ID to delete
111
-
112
- Returns:
113
- bool: True if deletion was successful
114
- """
115
  try:
116
  success = await GiftCardTemplateModel.delete_template(template_id)
117
  if success:
118
- logger.info(f"Template deleted: {template_id}")
119
  else:
120
- logger.warning(f"Template not found for deletion: {template_id}")
121
  return success
122
  except Exception as e:
123
- logger.error(f"Repository error deleting template {template_id}: {e}")
124
  raise
125
 
126
  @staticmethod
127
  async def check_template_exists(template_id: str) -> bool:
128
- """
129
- Check if a template exists.
130
-
131
- Args:
132
- template_id (str): Template ID to check
133
-
134
- Returns:
135
- bool: True if template exists
136
- """
137
  try:
138
  template = await GiftCardTemplateModel.get_template_by_id(template_id)
139
  return template is not None
140
  except Exception as e:
141
- logger.error(f"Repository error checking template existence {template_id}: {e}")
142
  raise
143
 
144
  @staticmethod
145
  async def increment_issued_count(template_id: str) -> bool:
146
- """
147
- Increment the issued count for a template.
148
-
149
- Args:
150
- template_id (str): Template ID to increment count for
151
-
152
- Returns:
153
- bool: True if increment was successful
154
- """
155
  try:
156
  success = await GiftCardTemplateModel.increment_issued_count(template_id)
157
  if success:
158
- logger.info(f"Issued count incremented for template: {template_id}")
159
  return success
160
  except Exception as e:
161
- logger.error(f"Repository error incrementing issued count {template_id}: {e}")
162
  raise
163
 
164
  @staticmethod
165
  async def check_stock_availability(template_id: str) -> Dict[str, Any]:
166
- """
167
- Check stock availability for a template.
168
-
169
- Args:
170
- template_id (str): Template ID to check
171
-
172
- Returns:
173
- Dict[str, Any]: Stock availability information
174
- """
175
  try:
176
  stock_info = await GiftCardTemplateModel.check_stock_availability(template_id)
177
- logger.info(f"Stock check for template {template_id}: available={stock_info['available']}")
178
  return stock_info
179
  except Exception as e:
180
- logger.error(f"Repository error checking stock for template {template_id}: {e}")
181
  raise
182
 
183
  @staticmethod
@@ -185,16 +114,7 @@ class GiftCardRepository:
185
  merchant_id: str,
186
  filters: Optional[GiftCardTemplateFilter] = None
187
  ) -> Dict[str, Any]:
188
- """
189
- Build MongoDB filter criteria from filter parameters.
190
-
191
- Args:
192
- merchant_id (str): Merchant ID to filter by
193
- filters (Optional[GiftCardTemplateFilter]): Filter parameters
194
-
195
- Returns:
196
- Dict[str, Any]: MongoDB filter criteria
197
- """
198
  if filters:
199
  return filters.to_mongo_filter(merchant_id)
200
  else:
@@ -206,38 +126,19 @@ class GiftCardRepository:
206
  status: str,
207
  limit: int = 100
208
  ) -> List[Dict[str, Any]]:
209
- """
210
- Get templates by status for a specific merchant.
211
-
212
- Args:
213
- merchant_id (str): Merchant ID
214
- status (str): Template status
215
- limit (int): Maximum number of templates to return
216
-
217
- Returns:
218
- List[Dict[str, Any]]: List of templates
219
- """
220
  try:
221
  filter_criteria = {"merchant_id": merchant_id, "status": status}
222
  result = await GiftCardTemplateModel.get_templates(filter_criteria, 0, limit)
223
- logger.info(f"Retrieved {len(result['templates'])} templates with status {status}")
224
  return result["templates"]
225
  except Exception as e:
226
- logger.error(f"Repository error getting templates by status {status}: {e}")
227
  raise
228
 
229
  @staticmethod
230
  async def xxget_active_templates(merchant_id: str, limit: int = 100) -> List[Dict[str, Any]]:
231
- """
232
- Get active templates for a specific merchant.
233
-
234
- Args:
235
- merchant_id (str): Merchant ID
236
- limit (int): Maximum number of templates to return
237
-
238
- Returns:
239
- List[Dict[str, Any]]: List of active templates
240
- """
241
  return await GiftCardRepository.get_templates_by_status(merchant_id, "active", limit)
242
 
243
  @staticmethod
@@ -246,24 +147,13 @@ class GiftCardRepository:
246
  delivery_type: str,
247
  limit: int = 100
248
  ) -> List[Dict[str, Any]]:
249
- """
250
- Get templates by delivery type for a specific merchant.
251
-
252
- Args:
253
- merchant_id (str): Merchant ID
254
- delivery_type (str): Delivery type (digital/physical)
255
- limit (int): Maximum number of templates to return
256
-
257
- Returns:
258
- List[Dict[str, Any]]: List of templates
259
- """
260
  try:
261
  filter_criteria = {"merchant_id": merchant_id, "delivery_type": delivery_type}
262
  result = await GiftCardTemplateModel.get_templates(filter_criteria, 0, limit)
263
- logger.info(f"Retrieved {len(result['templates'])} {delivery_type} templates")
264
  return result["templates"]
265
  except Exception as e:
266
- logger.error(f"Repository error getting templates by delivery type {delivery_type}: {e}")
267
  raise
268
-
269
-
 
 
1
  from typing import Any, Dict, List, Optional
2
+ from insightfy_utils.logging import get_logger
3
  from app.models.gift_card_models import GiftCardTemplateModel
4
  from app.schemas.gift_card_schema import GiftCardTemplateFilter
5
 
6
+ logger = get_logger(__name__)
 
7
 
8
  class GiftCardRepository:
9
+ """Repository layer for Gift Card operations."""
 
 
 
10
 
11
  @staticmethod
12
+ async def create_template(template_data: Dict[str, Any], merchant_id: str) -> str:
13
+ """Create a new gift card template."""
 
 
 
 
 
 
 
 
14
  try:
15
+ template_id = await GiftCardTemplateModel.create_template(template_data, merchant_id)
16
+ logger.info("Template created", extra={"template_id": template_id})
17
  return template_id
18
  except Exception as e:
19
+ logger.error("Repository error creating template", exc_info=e)
20
  raise
21
 
22
  @staticmethod
 
25
  offset: int = 0,
26
  limit: int = 100
27
  ) -> Dict[str, Any]:
28
+ """Get gift card templates with pagination and filtering."""
 
 
 
 
 
 
 
 
 
 
29
  try:
30
  result = await GiftCardTemplateModel.list_gift_card(filter_criteria, offset, limit)
31
+ logger.info("Retrieved templates", extra={"count": len(result['templates'])})
32
  return result
33
  except Exception as e:
34
+ logger.error("Repository error getting templates", exc_info=e)
35
  raise
36
 
37
  @staticmethod
38
  async def get_template_by_id(template_id: str) -> Optional[Dict[str, Any]]:
39
+ """Get a specific gift card template by ID."""
 
 
 
 
 
 
 
 
40
  try:
41
  template = await GiftCardTemplateModel.get_template_by_id(template_id)
42
  if template:
43
+ logger.info("Retrieved template", extra={"template_id": template_id})
44
  else:
45
+ logger.info("Template not found", extra={"template_id": template_id})
46
  return template
47
  except Exception as e:
48
+ logger.error("Repository error getting template", extra={"template_id": template_id}, exc_info=e)
49
  raise
50
 
51
  @staticmethod
52
  async def update_template(template_id: str, update_data: Dict[str, Any]) -> bool:
53
+ """Update a gift card template."""
 
 
 
 
 
 
 
 
 
54
  try:
55
  success = await GiftCardTemplateModel.update_template(template_id, update_data)
56
  if success:
57
+ logger.info("Template updated", extra={"template_id": template_id})
58
  else:
59
+ logger.warning("No changes made to template", extra={"template_id": template_id})
60
  return success
61
  except Exception as e:
62
+ logger.error("Repository error updating template", extra={"template_id": template_id}, exc_info=e)
63
  raise
64
 
65
  @staticmethod
66
  async def delete_template(template_id: str) -> bool:
67
+ """Delete a gift card template."""
 
 
 
 
 
 
 
 
68
  try:
69
  success = await GiftCardTemplateModel.delete_template(template_id)
70
  if success:
71
+ logger.info("Template deleted", extra={"template_id": template_id})
72
  else:
73
+ logger.warning("Template not found for deletion", extra={"template_id": template_id})
74
  return success
75
  except Exception as e:
76
+ logger.error("Repository error deleting template", extra={"template_id": template_id}, exc_info=e)
77
  raise
78
 
79
  @staticmethod
80
  async def check_template_exists(template_id: str) -> bool:
81
+ """Check if a template exists."""
 
 
 
 
 
 
 
 
82
  try:
83
  template = await GiftCardTemplateModel.get_template_by_id(template_id)
84
  return template is not None
85
  except Exception as e:
86
+ logger.error("Repository error checking template existence", extra={"template_id": template_id}, exc_info=e)
87
  raise
88
 
89
  @staticmethod
90
  async def increment_issued_count(template_id: str) -> bool:
91
+ """Increment the issued count for a template."""
 
 
 
 
 
 
 
 
92
  try:
93
  success = await GiftCardTemplateModel.increment_issued_count(template_id)
94
  if success:
95
+ logger.info("Issued count incremented", extra={"template_id": template_id})
96
  return success
97
  except Exception as e:
98
+ logger.error("Repository error incrementing issued count", extra={"template_id": template_id}, exc_info=e)
99
  raise
100
 
101
  @staticmethod
102
  async def check_stock_availability(template_id: str) -> Dict[str, Any]:
103
+ """Check stock availability for a template."""
 
 
 
 
 
 
 
 
104
  try:
105
  stock_info = await GiftCardTemplateModel.check_stock_availability(template_id)
106
+ logger.info("Stock check completed", extra={"template_id": template_id, "available": stock_info['available']})
107
  return stock_info
108
  except Exception as e:
109
+ logger.error("Repository error checking stock", extra={"template_id": template_id}, exc_info=e)
110
  raise
111
 
112
  @staticmethod
 
114
  merchant_id: str,
115
  filters: Optional[GiftCardTemplateFilter] = None
116
  ) -> Dict[str, Any]:
117
+ """Build MongoDB filter criteria from filter parameters."""
 
 
 
 
 
 
 
 
 
118
  if filters:
119
  return filters.to_mongo_filter(merchant_id)
120
  else:
 
126
  status: str,
127
  limit: int = 100
128
  ) -> List[Dict[str, Any]]:
129
+ """Get templates by status for a specific merchant."""
 
 
 
 
 
 
 
 
 
 
130
  try:
131
  filter_criteria = {"merchant_id": merchant_id, "status": status}
132
  result = await GiftCardTemplateModel.get_templates(filter_criteria, 0, limit)
133
+ logger.info("Retrieved templates by status", extra={"count": len(result['templates']), "status": status})
134
  return result["templates"]
135
  except Exception as e:
136
+ logger.error("Repository error getting templates by status", extra={"status": status}, exc_info=e)
137
  raise
138
 
139
  @staticmethod
140
  async def xxget_active_templates(merchant_id: str, limit: int = 100) -> List[Dict[str, Any]]:
141
+ """Get active templates for a specific merchant."""
 
 
 
 
 
 
 
 
 
142
  return await GiftCardRepository.get_templates_by_status(merchant_id, "active", limit)
143
 
144
  @staticmethod
 
147
  delivery_type: str,
148
  limit: int = 100
149
  ) -> List[Dict[str, Any]]:
150
+ """Get templates by delivery type for a specific merchant."""
 
 
 
 
 
 
 
 
 
 
151
  try:
152
  filter_criteria = {"merchant_id": merchant_id, "delivery_type": delivery_type}
153
  result = await GiftCardTemplateModel.get_templates(filter_criteria, 0, limit)
154
+ logger.info("Retrieved templates by delivery type", extra={"count": len(result['templates']), "delivery_type": delivery_type})
155
  return result["templates"]
156
  except Exception as e:
157
+ logger.error("Repository error getting templates by delivery type", extra={"delivery_type": delivery_type}, exc_info=e)
158
  raise
159
+
 
app/repositories/inventory_repository.py CHANGED
@@ -1,25 +1,22 @@
1
-
2
  import uuid
3
  from typing import List, Dict
4
  from datetime import datetime
 
5
  from app.sql import database
6
  from app.schemas.inventory_schema import branch_inventory, inventory_audit_log
7
- import logging
8
  import sqlalchemy
9
 
10
- logger = logging.getLogger(__name__)
11
 
12
 
13
- async def fetch_stock_by_catalogue_ids(merchant_id:str,branch_id: str, catalogue_ids: List[str]) -> Dict[str, int]:
14
- """
15
- Returns a dictionary of catalogue_id -> stock_on_hand
16
- """
17
  if not merchant_id or not branch_id or not catalogue_ids:
18
  return {}
19
 
20
  try:
21
  query = branch_inventory.select().where(
22
- branch_inventory.c.merchant_id==merchant_id,
23
  branch_inventory.c.branch_id == branch_id,
24
  branch_inventory.c.catalogue_id.in_(catalogue_ids)
25
  )
@@ -27,15 +24,12 @@ async def fetch_stock_by_catalogue_ids(merchant_id:str,branch_id: str, catalogue
27
  rows = await database.fetch_all(query)
28
  return {row["catalogue_id"]: row["stock_on_hand"] for row in rows}
29
  except Exception as e:
30
- logger.error(f"Error fetching stock for branch_id={branch_id}: {e}")
31
  raise
32
 
33
 
34
  async def fetch_stock_by_catalogue_id(branch_id: str, catalogue_id: str) -> int:
35
- """
36
- Returns stock_on_hand for a single catalogue_id in a branch.
37
- Returns 0 if not found.
38
- """
39
  stock_map = await fetch_stock_by_catalogue_ids(branch_id, [catalogue_id])
40
  return stock_map.get(catalogue_id, 0)
41
 
@@ -71,19 +65,12 @@ async def update_inventory(
71
  reason: str = "",
72
  allow_negative_stock: bool = False
73
  ) -> None:
74
- """
75
- Updates stock and logs the audit with merchant_id and associate_id included.
76
- Now includes transaction management, input validation, and stock validation.
77
-
78
- Args:
79
- allow_negative_stock: If True, allows stock to go negative. If False, raises error on negative stock.
80
- """
81
  _validate_inventory_params(merchant_id, branch_id, catalogue_id, associate_id, change)
82
- now = datetime.now() # Using datetime.now() instead of utcnow()
83
 
84
  try:
85
  async with database.transaction():
86
- # Fetch current stock
87
  select_query = branch_inventory.select().where(
88
  branch_inventory.c.branch_id == branch_id,
89
  branch_inventory.c.catalogue_id == catalogue_id
@@ -94,7 +81,15 @@ async def update_inventory(
94
  current_stock = current_record["stock_on_hand"]
95
  new_stock = current_stock + change
96
  if new_stock < 0 and not allow_negative_stock:
97
- logger.warning(f"Negative stock detected: {new_stock} for catalogue_id={catalogue_id}")
 
 
 
 
 
 
 
 
98
  raise ValueError(f"Stock cannot be negative. Current: {current_stock}, Change: {change}")
99
 
100
  update_query = (
@@ -117,7 +112,6 @@ async def update_inventory(
117
  await database.execute(insert_query)
118
  current_stock = 0
119
 
120
- # Insert audit log
121
  audit_query = inventory_audit_log.insert().values(
122
  id=str(uuid.uuid4()),
123
  merchant_id=merchant_id,
@@ -132,19 +126,24 @@ async def update_inventory(
132
  )
133
  await database.execute(audit_query)
134
  logger.info(
135
- f"Inventory updated for catalogue_id={catalogue_id}, branch_id={branch_id}, "
136
- f"merchant_id={merchant_id}. Change: {change}, By: {associate_id}"
 
 
 
 
 
 
137
  )
138
  except sqlalchemy.exc.IntegrityError as e:
139
- logger.error(f"Database constraint violation during inventory update: {e}")
140
  raise ValueError("Invalid inventory update - constraint violation") from e
141
  except sqlalchemy.exc.DatabaseError as e:
142
- logger.error(f"Database error during inventory update: {e}")
143
  raise RuntimeError("Database operation failed") from e
144
  except ValueError as e:
145
- # Re-raise validation errors as-is
146
- logger.error(f"Validation error during inventory update: {e}")
147
  raise
148
  except Exception as e:
149
- logger.error(f"Unexpected error updating inventory: {e}")
150
  raise RuntimeError("Inventory update failed") from e
 
 
1
  import uuid
2
  from typing import List, Dict
3
  from datetime import datetime
4
+ from insightfy_utils.logging import get_logger
5
  from app.sql import database
6
  from app.schemas.inventory_schema import branch_inventory, inventory_audit_log
 
7
  import sqlalchemy
8
 
9
+ logger = get_logger(__name__)
10
 
11
 
12
+ async def fetch_stock_by_catalogue_ids(merchant_id: str, branch_id: str, catalogue_ids: List[str]) -> Dict[str, int]:
13
+ """Returns a dictionary of catalogue_id -> stock_on_hand"""
 
 
14
  if not merchant_id or not branch_id or not catalogue_ids:
15
  return {}
16
 
17
  try:
18
  query = branch_inventory.select().where(
19
+ branch_inventory.c.merchant_id == merchant_id,
20
  branch_inventory.c.branch_id == branch_id,
21
  branch_inventory.c.catalogue_id.in_(catalogue_ids)
22
  )
 
24
  rows = await database.fetch_all(query)
25
  return {row["catalogue_id"]: row["stock_on_hand"] for row in rows}
26
  except Exception as e:
27
+ logger.error("Error fetching stock", extra={"branch_id": branch_id, "error": str(e)}, exc_info=e)
28
  raise
29
 
30
 
31
  async def fetch_stock_by_catalogue_id(branch_id: str, catalogue_id: str) -> int:
32
+ """Returns stock_on_hand for a single catalogue_id in a branch. Returns 0 if not found."""
 
 
 
33
  stock_map = await fetch_stock_by_catalogue_ids(branch_id, [catalogue_id])
34
  return stock_map.get(catalogue_id, 0)
35
 
 
65
  reason: str = "",
66
  allow_negative_stock: bool = False
67
  ) -> None:
68
+ """Updates stock and logs the audit with merchant_id and associate_id included."""
 
 
 
 
 
 
69
  _validate_inventory_params(merchant_id, branch_id, catalogue_id, associate_id, change)
70
+ now = datetime.now()
71
 
72
  try:
73
  async with database.transaction():
 
74
  select_query = branch_inventory.select().where(
75
  branch_inventory.c.branch_id == branch_id,
76
  branch_inventory.c.catalogue_id == catalogue_id
 
81
  current_stock = current_record["stock_on_hand"]
82
  new_stock = current_stock + change
83
  if new_stock < 0 and not allow_negative_stock:
84
+ logger.warning(
85
+ "Negative stock detected",
86
+ extra={
87
+ "new_stock": new_stock,
88
+ "catalogue_id": catalogue_id,
89
+ "current_stock": current_stock,
90
+ "change": change
91
+ }
92
+ )
93
  raise ValueError(f"Stock cannot be negative. Current: {current_stock}, Change: {change}")
94
 
95
  update_query = (
 
112
  await database.execute(insert_query)
113
  current_stock = 0
114
 
 
115
  audit_query = inventory_audit_log.insert().values(
116
  id=str(uuid.uuid4()),
117
  merchant_id=merchant_id,
 
126
  )
127
  await database.execute(audit_query)
128
  logger.info(
129
+ "Inventory updated",
130
+ extra={
131
+ "catalogue_id": catalogue_id,
132
+ "branch_id": branch_id,
133
+ "merchant_id": merchant_id,
134
+ "change": change,
135
+ "associate_id": associate_id
136
+ }
137
  )
138
  except sqlalchemy.exc.IntegrityError as e:
139
+ logger.error("Database constraint violation during inventory update", exc_info=e)
140
  raise ValueError("Invalid inventory update - constraint violation") from e
141
  except sqlalchemy.exc.DatabaseError as e:
142
+ logger.error("Database error during inventory update", exc_info=e)
143
  raise RuntimeError("Database operation failed") from e
144
  except ValueError as e:
145
+ logger.error("Validation error during inventory update", exc_info=e)
 
146
  raise
147
  except Exception as e:
148
+ logger.error("Unexpected error updating inventory", exc_info=e)
149
  raise RuntimeError("Inventory update failed") from e
app/routers/catalogue_router.py CHANGED
@@ -1,7 +1,4 @@
1
-
2
-
3
  # Standard library imports
4
- import logging
5
  import re
6
  import time
7
  from io import BytesIO, StringIO
@@ -10,6 +7,7 @@ from io import BytesIO, StringIO
10
  import pandas as pd
11
  from fastapi import APIRouter, Body, Depends, File, HTTPException, Query, Path, UploadFile
12
  from typing import Optional, Union, Dict, Any
 
13
 
14
  # Local imports
15
  from app.nosql import redis_client
@@ -23,7 +21,7 @@ from app.schemas.catalogue_schema import BulkUploadResponse, CatalogueBulkImport
23
 
24
  # Router Initialization
25
  router = APIRouter()
26
- logger = logging.getLogger(__name__)
27
 
28
 
29
  # Constants
@@ -62,10 +60,10 @@ async def create_item(
62
  data.merchant_id = current_user["merchant_id"]
63
  data.created_by = current_user["associate_id"]
64
  catelogue_id = await CatalogueService.create_catalogue_item(data)
65
- logger.info(f"/catalogue/ create completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
66
  return {"catelogue_id": catelogue_id, "correlation_id": correlation_id}
67
  except RuntimeError as re:
68
- logger.error(f"RuntimeError while creating catalogue item: {re} | correlation_id={correlation_id}", exc_info=True)
69
  raise HTTPException(status_code=500, detail=str(re))
70
 
71
 
@@ -81,23 +79,17 @@ async def update_item(
81
  merchant_id = current_user["merchant_id"]
82
  data["updated_by"] = current_user["associate_id"]
83
 
84
- result=await catalogue_service.CatalogueService.update_catalogue_item(catalogue_id, data,merchant_id)
85
 
86
- logger.info(f"/catalogue/{{id}} update completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
87
  result_data = catalogue_utils.sanitize_document_for_mongo(result)
88
  return {**result_data, "correlation_id": correlation_id}
89
 
90
  except HTTPException as he:
91
- # Pass through 400 or any raised HTTPException
92
- logger.warning(
93
- f"HTTPException while updating catalogue: {he.detail} | correlation_id={correlation_id}"
94
- )
95
  raise he
96
  except RuntimeError as re:
97
- logger.error(
98
- f"RuntimeError while updating catalogue: {re} | correlation_id={correlation_id}",
99
- exc_info=True,
100
- )
101
  raise HTTPException(status_code=500, detail=str(re))
102
  @router.post("/list", status_code=200)
103
  async def list_catalogue_items(
@@ -105,30 +97,9 @@ async def list_catalogue_items(
105
  current_user: dict = Depends(require_view_catalogue_permission),
106
  correlation_id: str = Depends(get_request_id)
107
  ) -> Dict[str, Any]:
108
- """
109
- List catalogue items with optional filters, pagination, and field projection.
110
-
111
- Args:
112
- payload: CatalogueListFilter containing:
113
- - filters: Optional dictionary of filter criteria
114
- - offset: Pagination offset (default: 0)
115
- - limit: Pagination limit (default: 10, max: 1000)
116
- - catalogue_type: Optional catalogue type filter (product/service)
117
- - projection_list: Optional list of fields to include in response
118
- current_user: Current authenticated user information
119
- correlation_id: Request correlation ID for tracking
120
-
121
- Raises:
122
- HTTPException:
123
- - 500 if RuntimeError occurs during processing
124
- - 500 if any unexpected error occurs
125
-
126
- Returns:
127
- dict: Paginated catalogue items with optional field projection applied
128
- """
129
  start_time = time.time()
130
  try:
131
- # Call the service layer to list items with pagination
132
  merchant_id = current_user.get("merchant_id")
133
  branch_id = current_user.get("branch_id")
134
  associate_id = current_user["associate_id"]
@@ -147,268 +118,86 @@ async def list_catalogue_items(
147
  branch_id=branch_id,
148
  projection_list=payload.projection_list
149
  )
150
- logger.info(
151
- f"/catalogue/list completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
152
  return {**result, "correlation_id": correlation_id}
153
 
154
  except RuntimeError as re:
155
- logger.error(
156
- f"RuntimeError while listing items: {re} | correlation_id={correlation_id}", exc_info=True)
157
  raise HTTPException(status_code=500, detail=str(re))
158
  except Exception as e:
159
- logger.error(
160
- f"Error in list_catalogues: {e} | correlation_id={correlation_id}", exc_info=True)
161
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
162
 
163
 
164
  @router.get("/POS/", status_code=200)
165
  async def list_pos_catalogue_items(
166
- catalogue_type: Optional[str] = Query(
167
- None, description="Catalogue Type (product or service)"),
168
- search_query: Optional[str] = Query(
169
- None, description="Free search across name, alias_code, ean_code, and sku_code"),
170
  current_user: dict = Depends(require_view_catalogue_permission),
171
  correlation_id: str = Depends(get_request_id)
172
  ) -> Dict[str, Any]:
173
- """
174
- Optimized POS catalogue items endpoint with improved performance:
175
- - Consolidated filter building
176
- - Database-level aggregation for grouping
177
- - Proper input validation and sanitization
178
- - Reduced memory usage
179
- - Redis caching for improved response times
180
- """
181
  start_time = time.time()
182
 
183
  try:
184
  merchant_id = current_user.get("merchant_id")
185
  branch_id = current_user.get("branch_id")
186
 
187
- # Generate cache key for this specific query
188
- cache_key = _generate_pos_cache_key(
189
- merchant_id, branch_id, catalogue_type, search_query)
190
 
191
- # Build optimized filter criteria
192
- filter_criteria = _build_pos_filter_criteria(
193
- merchant_id, catalogue_type, search_query
194
- )
195
-
196
- # Define async function to fetch data from database
197
  async def fetch_pos_catalogue_data():
198
- """Fetch POS catalogue data from database"""
199
- return await CatalogueService.get_pos_catalogue_aggregated(
200
- filter_criteria, branch_id
201
- )
202
 
203
- # Initialize cache_exists as False for logging
204
  cache_exists = False
205
-
206
  try:
207
- # Check if data exists in cache for logging purposes
208
  cache_exists = await redis_client.exists(cache_key)
209
  except Exception as redis_error:
210
- logger.warning(
211
- f"Redis connection error during cache check, proceeding without cache: {redis_error} | correlation_id={correlation_id}"
212
- )
213
- # If Redis is down, fetch directly from database
214
  result = await fetch_pos_catalogue_data()
215
- response = {
216
- "status": "success",
217
- "data": result,
218
- "correlation_id": correlation_id,
219
- "cached": False
220
- }
221
- logger.info(
222
- f"/catalogue/POS completed in {time.time() - start_time:.2f}s | cached: False (Redis error) | correlation_id={correlation_id}")
223
  return response
224
 
225
- # Use cache with 1 hour expiry for POS data (3600 seconds)
226
- result = await get_or_set_cache(
227
- key=cache_key,
228
- fetch_func=fetch_pos_catalogue_data,
229
- expiry=3600 # 1 hour cache
230
- )
231
-
232
- response = {
233
- "status": "success",
234
- "data": result,
235
- "correlation_id": correlation_id,
236
- "cached": bool(cache_exists)
237
- }
238
 
239
- logger.info(
240
- f"/catalogue/POS completed in {time.time() - start_time:.2f}s | cached: {response.get('cached', False)} | correlation_id={correlation_id}")
241
  return response
242
 
243
  except ValueError as ve:
244
- logger.error(
245
- f"Validation error: {ve} | correlation_id={correlation_id}")
246
  raise HTTPException(status_code=400, detail=str(ve))
247
  except RuntimeError as re:
248
- logger.error(
249
- f"RuntimeError while listing items: {re} | correlation_id={correlation_id}", exc_info=True)
250
  raise HTTPException(status_code=500, detail=str(re))
251
  except Exception as e:
252
- logger.error(
253
- f"Unexpected error while listing items: {e} | correlation_id={correlation_id}", exc_info=True)
254
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
255
 
256
 
257
- def _build_pos_filter_criteria(
258
- merchant_id: str,
259
- catalogue_type: Optional[str],
260
- search_query: Optional[str]
261
- ) -> Dict[str, Any]:
262
- """
263
- Build optimized filter criteria for POS catalogue queries.
264
-
265
- Performance improvements:
266
- - Consolidated filter building logic
267
- - Input validation and sanitization
268
- - Optimized query patterns
269
- """
270
- # Input validation
271
- if search_query and len(search_query.strip()) < 2:
272
- raise ValueError("Search query must be at least 2 characters long")
273
-
274
- # Base filter with compound index support
275
- filter_criteria = {
276
- "merchant_id": merchant_id,
277
- "catalogue_type": {"$nin": ["BackBar"]} # Exclude BackBar by default
278
- }
279
-
280
- # Optimize catalogue type filtering - exact match instead of regex
281
- if catalogue_type:
282
- if catalogue_type.lower() not in ["product", "service"]:
283
- raise ValueError(
284
- "Invalid catalogue_type. Must be 'product' or 'service'")
285
- filter_criteria["catalogue_type"] = catalogue_type.lower()
286
-
287
- # Optimize search query with text index if available, fallback to regex
288
- if search_query:
289
- search_query = search_query.strip()
290
- escaped_query = re.escape(search_query)
291
-
292
- # Use compound OR condition for better index utilization
293
- filter_criteria["$or"] = [
294
- # Prefix match is faster
295
- {"name": {"$regex": f"^{escaped_query}", "$options": "i"}},
296
- {"alias_code": {"$regex": f"^{escaped_query}", "$options": "i"}},
297
- {"ean_code": escaped_query}, # Exact match for codes
298
- {"sku_code": escaped_query},
299
- ]
300
-
301
- if catalogue_type == "product":
302
- # Only add valid objects to $and
303
- stock_condition = {
304
- "$or": [
305
- {"allow_negative_stock": True},
306
- {
307
- "$and": [
308
- {"allow_negative_stock": False},
309
- {"stock_on_hand": {"$gt": 0}}
310
- ]
311
- }
312
- ]
313
- }
314
- if "$and" in filter_criteria:
315
- filter_criteria["$and"].append(stock_condition)
316
- else:
317
- filter_criteria["$and"] = [stock_condition]
318
- ''' # Add optimized stock conditions for products only
319
- if catalogue_type == "product":
320
- # Simplified stock condition logic
321
- filter_criteria["$and"] = [
322
- filter_criteria.get("$and", []),
323
- {
324
- "$or": [
325
- {"allow_negative_stock": True},
326
- {
327
- "$and": [
328
- {"allow_negative_stock": False},
329
- {"stock_on_hand": {"$gt": 0}}
330
- ]
331
- }
332
- ]
333
- }
334
- ]'''
335
-
336
- return filter_criteria
337
-
338
-
339
- def _generate_pos_cache_key(
340
- merchant_id: str,
341
- branch_id: Optional[str],
342
- catalogue_type: Optional[str],
343
- search_query: Optional[str]
344
- ) -> str:
345
- """
346
- Generate a unique cache key for POS catalogue queries.
347
-
348
- Args:
349
- merchant_id: Merchant identifier
350
- branch_id: Branch identifier (optional)
351
- catalogue_type: Type of catalogue (product/service)
352
- search_query: Search query string
353
-
354
- Returns:
355
- str: Unique cache key for the query
356
- """
357
- # Normalize search query for consistent caching
358
- normalized_search = search_query.strip().lower() if search_query else ""
359
- normalized_type = catalogue_type.lower() if catalogue_type else "all"
360
- branch_suffix = f":{branch_id}" if branch_id else ":all_branches"
361
-
362
- # Create cache key with all relevant parameters
363
- cache_key = f"pos_catalogue:{merchant_id}{branch_suffix}:{normalized_type}:{normalized_search}"
364
-
365
- return cache_key
366
-
367
-
368
  @router.delete("/", status_code=200)
369
  async def delete_catalogue_item(
370
- catalogue_id: str = Query(...,
371
- description="Catalogue ID as the primary identifier"),
372
  current_user: dict = Depends(require_delete_catalogue_permission),
373
  correlation_id: str = Depends(get_request_id)
374
  ) -> Dict[str, Any]:
375
- """
376
- Delete a catalogue item based on merchant_id, location_id, and catalogue_id.
377
-
378
- Args:
379
- catalogue_id (str): The catalogue ID of the item to delete.
380
-
381
- Returns:
382
- dict: Success message or error message if the item was not found.
383
-
384
- Raises:
385
- HTTPException:
386
- - 400 if any of the IDs are invalid.
387
- - 404 if the item is not found.
388
- - 500 if an error occurs during the deletion.
389
- """
390
  start_time = time.time()
391
  try:
392
  merchant_id = current_user.get("merchant_id")
393
- filter_criteria = {
394
- "merchant_id": merchant_id,
395
- "catalogue_id": catalogue_id,
396
- }
397
  response = await CatalogueService.delete_item(filter_criteria)
398
  if response["message"] == "Catalogue item deleted":
399
- logger.info(
400
- f"/catalogue {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
401
  return {**response, "correlation_id": correlation_id}
402
  else:
403
- raise HTTPException(
404
- status_code=404, detail="Catalogue item not found")
405
  except ValueError as ve:
406
- logger.error(
407
- f"ValueError while deleting item: {ve} | correlation_id={correlation_id}", exc_info=True)
408
  raise HTTPException(status_code=400, detail=str(ve))
409
  except RuntimeError as re:
410
- logger.error(
411
- f"RuntimeError while deleting item: {re} | correlation_id={correlation_id}", exc_info=True)
412
  raise HTTPException(status_code=500, detail=str(re))
413
 
414
 
@@ -419,15 +208,12 @@ async def get_catalogue_inventory(
419
  current_user: dict = Depends(require_view_catalogue_permission),
420
  correlation_id: str = Depends(get_request_id)
421
  ) -> Dict[str, Any]:
422
- """
423
- Get inventory details for a specific catalogue item.
424
- """
425
  start_time = time.time()
426
  try:
427
  merchant_id = current_user.get("merchant_id")
428
  result = await CatalogueService.get_inventory_by_id(merchant_id, catalogue_id)
429
- logger.info(
430
- f"/catalogue/inventory completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
431
  return {**result, "correlation_id": correlation_id}
432
  except ValueError as ve:
433
  raise HTTPException(status_code=400, detail=str(ve))
@@ -447,14 +233,11 @@ async def update_catalogue_inventory(
447
  try:
448
  merchant_id = current_user.get("merchant_id")
449
  data["updated_by"] = current_user["associate_id"]
450
- # update branch_inventory in postgressql
451
  result = await catalogue_service.CatalogueService.update_catalogue_inventory(catalogue_id, branch_id, merchant_id, data)
452
- logger.info(
453
- f"/catalogue/inventory/{catalogue_id} completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
454
  return {**result, "correlation_id": correlation_id}
455
  except RuntimeError as re:
456
- logger.error(
457
- f"RuntimeError while updating catalogue: {re} | correlation_id={correlation_id}", exc_info=True)
458
  raise HTTPException(status_code=500, detail=str(re))
459
 
460
 
@@ -464,9 +247,7 @@ async def get_catalogue(
464
  current_user: dict = Depends(require_view_catalogue_permission),
465
  correlation_id: str = Depends(get_request_id)
466
  ) -> Dict[str, Any]:
467
- """
468
- Fetches information for a single catalogue item by their ID.
469
- """
470
  start_time = time.time()
471
  try:
472
  merchant_id = current_user.get("merchant_id")
@@ -474,19 +255,15 @@ async def get_catalogue(
474
  if not catalogue_data:
475
  raise HTTPException(status_code=404, detail="catalogue not found")
476
  result = catalogue_utils.sanitize_document_for_mongo(catalogue_data)
477
- logger.info(
478
- f"/catalogue/ /items/{id} completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
479
  return {**result, "correlation_id": correlation_id}
480
 
481
  except HTTPException as http_err:
482
- # Log specific HTTP exceptions without re-wrapping them
483
- logger.warning(
484
- f"catalogue retrieval failed: {http_err.detail} | id={id}, merchant_id={merchant_id} | correlation_id={correlation_id}")
485
  raise
486
 
487
  except Exception as e:
488
- logger.error(
489
- f"Unexpected error while fetching catalogue | id={id}, merchant_id={merchant_id}: {e} | correlation_id={correlation_id}", exc_info=True)
490
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
491
 
492
 
@@ -500,7 +277,6 @@ async def bulk_import_catalogue_items(
500
  try:
501
  merchant_id = current_user["merchant_id"]
502
  created_by = current_user["associate_id"]
503
- # Add merchant_id and created_by to each item
504
  items = []
505
  for item in payload.root:
506
  item_dict = item.model_dump()
@@ -509,14 +285,11 @@ async def bulk_import_catalogue_items(
509
  item_dict["created_by"] = created_by
510
  items.append(item_dict)
511
  result = await CatalogueService.bulk_import(items)
512
- logger.info(
513
- f"/catalogue/items/bulk-import completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
514
  return {"message": "Catalogue items imported successfully", "inserted_count": result, "correlation_id": correlation_id}
515
  except Exception as e:
516
- logger.error(
517
- f"Error during file upload: {e} | correlation_id={correlation_id}", exc_info=True)
518
- raise HTTPException(
519
- status_code=500, detail="Failed to process the file.")
520
 
521
 
522
  @router.post("/items/bulk-upload-file", response_model=BulkUploadResponse)
@@ -528,47 +301,39 @@ async def bulk_upload_catalogues_file(
528
  start_time = time.time()
529
  try:
530
  file_ext = file.filename.split('.')[-1].lower()
531
- logger.info(f"Received file: {file.filename} with type: {file_ext}")
532
 
533
  content = await file.read()
534
- records = []
535
 
536
  if file_ext == "csv":
537
  try:
538
  df = pd.read_csv(StringIO(content.decode("utf-8")))
539
  except Exception as e:
540
- logger.error(f"Failed to parse CSV file: {e}")
541
- raise HTTPException(
542
- status_code=400, detail="Invalid CSV format.")
543
  elif file_ext in ("xlsx", "xls"):
544
  try:
545
  df = pd.read_excel(BytesIO(content))
546
  except Exception as e:
547
- logger.error(f"Failed to parse Excel file: {e}")
548
- raise HTTPException(
549
- status_code=400, detail="Invalid Excel format.")
550
  else:
551
- raise HTTPException(
552
- status_code=400, detail="Unsupported file type. Use CSV or Excel.")
553
 
554
  records = df.fillna("").to_dict(orient="records")
555
 
556
  if not records:
557
- raise HTTPException(
558
- status_code=400, detail="Uploaded file is empty or invalid.")
559
 
560
- logger.info(f"Parsed {len(records)} records from file.")
561
 
562
  result = await CatalogueService.bulk_upload_catalogues(records, current_user)
563
- logger.info(
564
- f"/catalogue/items/bulk-upload-file completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
565
  return {**result, "correlation_id": correlation_id}
566
 
567
  except Exception as e:
568
- logger.error(
569
- f"Error during file upload: {e} | correlation_id={correlation_id}", exc_info=True)
570
- raise HTTPException(
571
- status_code=500, detail="Failed to process the file.")
572
 
573
 
574
  @router.get("/info/widgets", status_code=200)
@@ -582,28 +347,21 @@ async def get_info_widgets(
582
  associate_id = current_user["associate_id"]
583
  branch_id = current_user.get("branch_id")
584
 
585
- logger.info(
586
- f"Fetching info widgets for merchant_id={merchant_id}, associate_id={associate_id}, branch_id={branch_id} | correlation_id={correlation_id}")
587
 
588
  result = await CatalogueService.get_info_widget_data(merchant_id, associate_id, branch_id)
589
 
590
- # Handle case where result is None
591
  if result is None:
592
- logger.warning(
593
- f"No widget data found for merchant_id={merchant_id}, associate_id={associate_id} | correlation_id={correlation_id}")
594
  return {"data": {}, "correlation_id": correlation_id}
595
 
596
- # Handle case where result is not a dictionary
597
  if not isinstance(result, dict):
598
- logger.warning(
599
- f"Widget data is not a dictionary: {type(result)} | correlation_id={correlation_id}")
600
  return {"data": result, "correlation_id": correlation_id}
601
- logger.info(
602
- f"/catalogue/info/widgets completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
603
  return {**result, "correlation_id": correlation_id}
604
 
605
  except Exception as e:
606
- logger.error(
607
- f"Error while fetching info widgets: {e} | correlation_id={correlation_id}", exc_info=True)
608
- raise HTTPException(
609
- status_code=500, detail="Unable to fetch dashboard widgets")
 
 
 
1
  # Standard library imports
 
2
  import re
3
  import time
4
  from io import BytesIO, StringIO
 
7
  import pandas as pd
8
  from fastapi import APIRouter, Body, Depends, File, HTTPException, Query, Path, UploadFile
9
  from typing import Optional, Union, Dict, Any
10
+ from insightfy_utils.logging import get_logger
11
 
12
  # Local imports
13
  from app.nosql import redis_client
 
21
 
22
  # Router Initialization
23
  router = APIRouter()
24
+ logger = get_logger(__name__)
25
 
26
 
27
  # Constants
 
60
  data.merchant_id = current_user["merchant_id"]
61
  data.created_by = current_user["associate_id"]
62
  catelogue_id = await CatalogueService.create_catalogue_item(data)
63
+ logger.info("Catalogue create completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
64
  return {"catelogue_id": catelogue_id, "correlation_id": correlation_id}
65
  except RuntimeError as re:
66
+ logger.error("RuntimeError while creating catalogue item", extra={"correlation_id": correlation_id}, exc_info=re)
67
  raise HTTPException(status_code=500, detail=str(re))
68
 
69
 
 
79
  merchant_id = current_user["merchant_id"]
80
  data["updated_by"] = current_user["associate_id"]
81
 
82
+ result=await catalogue_service.CatalogueService.update_catalogue_item(catalogue_id, data, merchant_id)
83
 
84
+ logger.info("Catalogue update completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
85
  result_data = catalogue_utils.sanitize_document_for_mongo(result)
86
  return {**result_data, "correlation_id": correlation_id}
87
 
88
  except HTTPException as he:
89
+ logger.warning("HTTPException while updating catalogue", extra={"detail": he.detail, "correlation_id": correlation_id})
 
 
 
90
  raise he
91
  except RuntimeError as re:
92
+ logger.error("RuntimeError while updating catalogue", extra={"correlation_id": correlation_id}, exc_info=re)
 
 
 
93
  raise HTTPException(status_code=500, detail=str(re))
94
  @router.post("/list", status_code=200)
95
  async def list_catalogue_items(
 
97
  current_user: dict = Depends(require_view_catalogue_permission),
98
  correlation_id: str = Depends(get_request_id)
99
  ) -> Dict[str, Any]:
100
+ """List catalogue items with optional filters, pagination, and field projection."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
101
  start_time = time.time()
102
  try:
 
103
  merchant_id = current_user.get("merchant_id")
104
  branch_id = current_user.get("branch_id")
105
  associate_id = current_user["associate_id"]
 
118
  branch_id=branch_id,
119
  projection_list=payload.projection_list
120
  )
121
+ logger.info("Catalogue list completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
122
  return {**result, "correlation_id": correlation_id}
123
 
124
  except RuntimeError as re:
125
+ logger.error("RuntimeError while listing items", extra={"correlation_id": correlation_id}, exc_info=re)
 
126
  raise HTTPException(status_code=500, detail=str(re))
127
  except Exception as e:
128
+ logger.error("Error in list_catalogues", extra={"correlation_id": correlation_id}, exc_info=e)
 
129
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
130
 
131
 
132
  @router.get("/POS/", status_code=200)
133
  async def list_pos_catalogue_items(
134
+ catalogue_type: Optional[str] = Query(None, description="Catalogue Type (product or service)"),
135
+ search_query: Optional[str] = Query(None, description="Free search across name, alias_code, ean_code, and sku_code"),
 
 
136
  current_user: dict = Depends(require_view_catalogue_permission),
137
  correlation_id: str = Depends(get_request_id)
138
  ) -> Dict[str, Any]:
139
+ """Optimized POS catalogue items endpoint with Redis caching."""
 
 
 
 
 
 
 
140
  start_time = time.time()
141
 
142
  try:
143
  merchant_id = current_user.get("merchant_id")
144
  branch_id = current_user.get("branch_id")
145
 
146
+ cache_key = _generate_pos_cache_key(merchant_id, branch_id, catalogue_type, search_query)
147
+ filter_criteria = _build_pos_filter_criteria(merchant_id, catalogue_type, search_query)
 
148
 
 
 
 
 
 
 
149
  async def fetch_pos_catalogue_data():
150
+ return await CatalogueService.get_pos_catalogue_aggregated(filter_criteria, branch_id)
 
 
 
151
 
 
152
  cache_exists = False
 
153
  try:
 
154
  cache_exists = await redis_client.exists(cache_key)
155
  except Exception as redis_error:
156
+ logger.warning("Redis connection error, proceeding without cache", extra={"error": str(redis_error), "correlation_id": correlation_id})
 
 
 
157
  result = await fetch_pos_catalogue_data()
158
+ response = {"status": "success", "data": result, "correlation_id": correlation_id, "cached": False}
159
+ logger.info("POS catalogue completed", extra={"duration": time.time() - start_time, "cached": False, "correlation_id": correlation_id})
 
 
 
 
 
 
160
  return response
161
 
162
+ result = await get_or_set_cache(key=cache_key, fetch_func=fetch_pos_catalogue_data, expiry=3600)
 
 
 
 
 
 
 
 
 
 
 
 
163
 
164
+ response = {"status": "success", "data": result, "correlation_id": correlation_id, "cached": bool(cache_exists)}
165
+ logger.info("POS catalogue completed", extra={"duration": time.time() - start_time, "cached": bool(cache_exists), "correlation_id": correlation_id})
166
  return response
167
 
168
  except ValueError as ve:
169
+ logger.error("Validation error", extra={"error": str(ve), "correlation_id": correlation_id})
 
170
  raise HTTPException(status_code=400, detail=str(ve))
171
  except RuntimeError as re:
172
+ logger.error("RuntimeError while listing items", extra={"correlation_id": correlation_id}, exc_info=re)
 
173
  raise HTTPException(status_code=500, detail=str(re))
174
  except Exception as e:
175
+ logger.error("Unexpected error while listing items", extra={"correlation_id": correlation_id}, exc_info=e)
 
176
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
177
 
178
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179
  @router.delete("/", status_code=200)
180
  async def delete_catalogue_item(
181
+ catalogue_id: str = Query(..., description="Catalogue ID as the primary identifier"),
 
182
  current_user: dict = Depends(require_delete_catalogue_permission),
183
  correlation_id: str = Depends(get_request_id)
184
  ) -> Dict[str, Any]:
185
+ """Delete a catalogue item."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  start_time = time.time()
187
  try:
188
  merchant_id = current_user.get("merchant_id")
189
+ filter_criteria = {"merchant_id": merchant_id, "catalogue_id": catalogue_id}
 
 
 
190
  response = await CatalogueService.delete_item(filter_criteria)
191
  if response["message"] == "Catalogue item deleted":
192
+ logger.info("Catalogue deleted", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
193
  return {**response, "correlation_id": correlation_id}
194
  else:
195
+ raise HTTPException(status_code=404, detail="Catalogue item not found")
 
196
  except ValueError as ve:
197
+ logger.error("ValueError while deleting item", extra={"correlation_id": correlation_id}, exc_info=ve)
 
198
  raise HTTPException(status_code=400, detail=str(ve))
199
  except RuntimeError as re:
200
+ logger.error("RuntimeError while deleting item", extra={"correlation_id": correlation_id}, exc_info=re)
 
201
  raise HTTPException(status_code=500, detail=str(re))
202
 
203
 
 
208
  current_user: dict = Depends(require_view_catalogue_permission),
209
  correlation_id: str = Depends(get_request_id)
210
  ) -> Dict[str, Any]:
211
+ """Get inventory details for a specific catalogue item."""
 
 
212
  start_time = time.time()
213
  try:
214
  merchant_id = current_user.get("merchant_id")
215
  result = await CatalogueService.get_inventory_by_id(merchant_id, catalogue_id)
216
+ logger.info("Catalogue inventory retrieved", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
217
  return {**result, "correlation_id": correlation_id}
218
  except ValueError as ve:
219
  raise HTTPException(status_code=400, detail=str(ve))
 
233
  try:
234
  merchant_id = current_user.get("merchant_id")
235
  data["updated_by"] = current_user["associate_id"]
 
236
  result = await catalogue_service.CatalogueService.update_catalogue_inventory(catalogue_id, branch_id, merchant_id, data)
237
+ logger.info("Catalogue inventory updated", extra={"catalogue_id": catalogue_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
 
238
  return {**result, "correlation_id": correlation_id}
239
  except RuntimeError as re:
240
+ logger.error("RuntimeError while updating catalogue", extra={"correlation_id": correlation_id}, exc_info=re)
 
241
  raise HTTPException(status_code=500, detail=str(re))
242
 
243
 
 
247
  current_user: dict = Depends(require_view_catalogue_permission),
248
  correlation_id: str = Depends(get_request_id)
249
  ) -> Dict[str, Any]:
250
+ """Fetches information for a single catalogue item by their ID."""
 
 
251
  start_time = time.time()
252
  try:
253
  merchant_id = current_user.get("merchant_id")
 
255
  if not catalogue_data:
256
  raise HTTPException(status_code=404, detail="catalogue not found")
257
  result = catalogue_utils.sanitize_document_for_mongo(catalogue_data)
258
+ logger.info("Catalogue retrieved", extra={"catalogue_id": catalogue_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
 
259
  return {**result, "correlation_id": correlation_id}
260
 
261
  except HTTPException as http_err:
262
+ logger.warning("Catalogue retrieval failed", extra={"detail": http_err.detail, "catalogue_id": catalogue_id, "merchant_id": merchant_id, "correlation_id": correlation_id})
 
 
263
  raise
264
 
265
  except Exception as e:
266
+ logger.error("Unexpected error while fetching catalogue", extra={"catalogue_id": catalogue_id, "merchant_id": merchant_id, "correlation_id": correlation_id}, exc_info=e)
 
267
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
268
 
269
 
 
277
  try:
278
  merchant_id = current_user["merchant_id"]
279
  created_by = current_user["associate_id"]
 
280
  items = []
281
  for item in payload.root:
282
  item_dict = item.model_dump()
 
285
  item_dict["created_by"] = created_by
286
  items.append(item_dict)
287
  result = await CatalogueService.bulk_import(items)
288
+ logger.info("Bulk import completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
289
  return {"message": "Catalogue items imported successfully", "inserted_count": result, "correlation_id": correlation_id}
290
  except Exception as e:
291
+ logger.error("Error during bulk import", extra={"correlation_id": correlation_id}, exc_info=e)
292
+ raise HTTPException(status_code=500, detail="Failed to process the file.")
 
 
293
 
294
 
295
  @router.post("/items/bulk-upload-file", response_model=BulkUploadResponse)
 
301
  start_time = time.time()
302
  try:
303
  file_ext = file.filename.split('.')[-1].lower()
304
+ logger.info("Received file", extra={"filename": file.filename, "type": file_ext})
305
 
306
  content = await file.read()
 
307
 
308
  if file_ext == "csv":
309
  try:
310
  df = pd.read_csv(StringIO(content.decode("utf-8")))
311
  except Exception as e:
312
+ logger.error("Failed to parse CSV file", exc_info=e)
313
+ raise HTTPException(status_code=400, detail="Invalid CSV format.")
 
314
  elif file_ext in ("xlsx", "xls"):
315
  try:
316
  df = pd.read_excel(BytesIO(content))
317
  except Exception as e:
318
+ logger.error("Failed to parse Excel file", exc_info=e)
319
+ raise HTTPException(status_code=400, detail="Invalid Excel format.")
 
320
  else:
321
+ raise HTTPException(status_code=400, detail="Unsupported file type. Use CSV or Excel.")
 
322
 
323
  records = df.fillna("").to_dict(orient="records")
324
 
325
  if not records:
326
+ raise HTTPException(status_code=400, detail="Uploaded file is empty or invalid.")
 
327
 
328
+ logger.info("Parsed records from file", extra={"count": len(records)})
329
 
330
  result = await CatalogueService.bulk_upload_catalogues(records, current_user)
331
+ logger.info("Bulk upload completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
332
  return {**result, "correlation_id": correlation_id}
333
 
334
  except Exception as e:
335
+ logger.error("Error during file upload", extra={"correlation_id": correlation_id}, exc_info=e)
336
+ raise HTTPException(status_code=500, detail="Failed to process the file.")
 
 
337
 
338
 
339
  @router.get("/info/widgets", status_code=200)
 
347
  associate_id = current_user["associate_id"]
348
  branch_id = current_user.get("branch_id")
349
 
350
+ logger.info("Fetching info widgets", extra={"merchant_id": merchant_id, "associate_id": associate_id, "branch_id": branch_id, "correlation_id": correlation_id})
 
351
 
352
  result = await CatalogueService.get_info_widget_data(merchant_id, associate_id, branch_id)
353
 
 
354
  if result is None:
355
+ logger.warning("No widget data found", extra={"merchant_id": merchant_id, "associate_id": associate_id, "correlation_id": correlation_id})
 
356
  return {"data": {}, "correlation_id": correlation_id}
357
 
 
358
  if not isinstance(result, dict):
359
+ logger.warning("Widget data is not a dictionary", extra={"type": type(result).__name__, "correlation_id": correlation_id})
 
360
  return {"data": result, "correlation_id": correlation_id}
361
+
362
+ logger.info("Info widgets completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
363
  return {**result, "correlation_id": correlation_id}
364
 
365
  except Exception as e:
366
+ logger.error("Error while fetching info widgets", extra={"correlation_id": correlation_id}, exc_info=e)
367
+ raise HTTPException(status_code=500, detail="Unable to fetch dashboard widgets")
 
 
app/routers/gift_card_router.py CHANGED
@@ -1,9 +1,8 @@
1
-
2
  from datetime import datetime, timezone
3
- import logging
4
  import time
5
  from typing import Any, Dict, Optional
6
  from fastapi import APIRouter, Body, Depends, HTTPException, Query, Path
 
7
 
8
  from app.repositories.gift_card_repository import GiftCardRepository
9
  from app.services.gift_card_service import GiftCardService
@@ -24,7 +23,7 @@ from app.utils.request_id_utils import get_request_id
24
 
25
  # Router Initialization
26
  router = APIRouter()
27
- logger = logging.getLogger(__name__)
28
 
29
  # Constants
30
  INTERNAL_SERVER_ERROR = "Internal server error"
@@ -48,71 +47,37 @@ async def require_delete_giftcard_permission(current_user: dict = Depends(get_cu
48
 
49
  # Route Handlers
50
 
51
- @router.post("/", status_code=201,response_model=GiftCardTemplateCreateResponse)
52
  async def create_gift_card_template(
53
  template_data: GiftCardTemplateCreate,
54
  current_user: dict = Depends(require_create_giftcard_permission),
55
  correlation_id: str = Depends(get_request_id)
56
  ) -> GiftCardTemplateCreateResponse:
57
- """
58
- Create a new gift card template (blueprint).
59
-
60
- This endpoint allows merchants to create gift card templates that define
61
- the characteristics and rules for gift cards that can be issued later.
62
-
63
- **Business Rules:**
64
- - Physical cards must have max_issues specified
65
- - Physical cards cannot be reloadable
66
- - Either allow_custom_amount must be True or predefined_amounts must be provided
67
- - Template names must be unique within a merchant
68
-
69
- **Example Request:**
70
- ```json
71
- {
72
- "name": "Birthday Card",
73
- "description": "Special card for birthdays",
74
- "delivery_type": "digital",
75
- "currency": "INR",
76
- "validity_days": 365,
77
- "allow_custom_amount": true,
78
- "predefined_amounts": [500, 1000, 2000],
79
- "reloadable": false,
80
- "allow_partial_redemption": true,
81
- "requires_pin": true,
82
- "requires_otp": false,
83
- "status": "active",
84
- "design_template": {
85
- "theme": "blue",
86
- "image_url": "https://..."
87
- }
88
- }
89
- ```
90
- """
91
  start_time = time.time()
92
  try:
93
  merchant_id = current_user.get("merchant_id")
94
  user_id = current_user.get("associate_id")
95
- branch_id=current_user.get("branch_id")
96
 
97
  if not merchant_id:
98
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
99
  if not user_id:
100
  raise HTTPException(status_code=400, detail=USER_ID_REQUIRED)
101
 
102
- logger.info(f"Creating gift card template for merchant {merchant_id} by user {user_id}")
103
 
104
- result = await GiftCardService.create_template(template_data, merchant_id, user_id,branch_id)
105
 
106
- logger.info(f"/customer/ Gift card template created successfully: {result} in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
107
- return {"template_id": result,"message":"Gift card template created successfully" , "correlation_id": correlation_id}
108
 
109
  except HTTPException:
110
  raise
111
  except Exception as e:
112
- logger.error(f"Unexpected error creating gift card template: {e} | correlation_id={correlation_id}", exc_info=True)
113
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
114
 
115
-
116
  @router.post("/list", response_model=GiftCardTemplateListResponse)
117
  async def list_gift_card_templates(
118
  status: Optional[TemplateStatus] = Query(None, description="Filter by template status"),
@@ -125,28 +90,7 @@ async def list_gift_card_templates(
125
  current_user: dict = Depends(require_view_giftcard_permission),
126
  correlation_id: str = Depends(get_request_id)
127
  ) -> GiftCardTemplateListResponse:
128
- """
129
-
130
- List all gift card templates with optional filtering.
131
-
132
- This endpoint returns a paginated list of gift card templates for the merchant
133
- with optional filtering by various criteria.
134
-
135
- **Query Parameters:**
136
- - **status**: Filter by template status (active, inactive, archived)
137
- - **delivery_type**: Filter by delivery type (digital, physical)
138
- - **currency**: Filter by currency (INR, USD, EUR, GBP)
139
- - **branch_id**: Filter templates available for specific branch
140
- - **campaign_id**: Filter templates linked to specific campaign
141
- - **created_by**: Filter by creator user ID
142
- - **offset**: Number of items to skip (for pagination)
143
- - **limit**: Maximum items to return (1-1000)
144
-
145
- **Response includes:**
146
- - List of templates matching the criteria
147
- - Total count of matching templates
148
- - Pagination information
149
- """
150
  start_time = time.time()
151
  try:
152
  merchant_id = current_user.get("merchant_id")
@@ -154,7 +98,6 @@ async def list_gift_card_templates(
154
  if not merchant_id:
155
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
156
 
157
- # Build filter object
158
  filters = GiftCardTemplateFilter(
159
  status=status,
160
  delivery_type=delivery_type,
@@ -162,16 +105,16 @@ async def list_gift_card_templates(
162
  branch_id=branch_id,
163
  filter=filter
164
  )
165
- logger.info(f"Fetching gift card templates for merchant {merchant_id}")
166
 
167
  result = await GiftCardService.list_gift_card(merchant_id, filters, offset, limit)
168
- logger.info(f"/customer/list Retrieved {len(result.templates)} gift card templates in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
169
  return result
170
 
171
  except HTTPException:
172
  raise
173
  except Exception as e:
174
- logger.error(f"Unexpected error fetching gift card templates: {e} correlation_id={correlation_id}", exc_info=True)
175
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
176
 
177
  @router.put("/{template_id}")
@@ -181,34 +124,7 @@ async def update_gift_card_template(
181
  current_user: dict = Depends(require_update_giftcard_permission),
182
  correlation_id: str = Depends(get_request_id)
183
  ) -> Dict[str, Any]:
184
- """
185
- Update an existing gift card template.
186
-
187
- This endpoint allows updating specific fields of a gift card template.
188
- Some fields have restrictions based on business rules.
189
-
190
- **Path Parameters:**
191
- - **template_id**: The unique identifier of the gift card template
192
-
193
- **Business Rules:**
194
- - Cannot change delivery_type of existing template
195
- - Physical cards cannot be made reloadable
196
- - Cannot reduce max_issues below current issued_count
197
- - Physical cards must always have max_issues specified
198
-
199
- **Common Use Cases:**
200
- - Deactivate template: `{"status": "inactive"}`
201
- - Update predefined amounts: `{"predefined_amounts": [1000, 2000]}`
202
- - Change design: `{"design_template": {"theme": "gold", "image_url": "..."}}`
203
-
204
- **Example Request:**
205
- ```json
206
- {
207
- "status": "inactive",
208
- "predefined_amounts": [1000, 2000]
209
- }
210
- ```
211
- """
212
  start_time = time.time()
213
  try:
214
  merchant_id = current_user.get("merchant_id")
@@ -216,48 +132,29 @@ async def update_gift_card_template(
216
  if not merchant_id:
217
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
218
 
219
- logger.info(f"Updating gift card template {template_id} for merchant {merchant_id}")
220
 
221
  result = await GiftCardService.update_template(template_id, update_data, merchant_id)
222
 
223
- logger.info(f"Gift card template updated successfully: {template_id}")
224
- logger.info(f"/customer/{template_id} Gift card template updated successfully: {template_id} in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
225
- return {"status": "success","id": result.id,"data":result, "correlation_id": correlation_id}
226
 
227
  except HTTPException:
228
  raise
229
  except Exception as e:
230
- logger.error(f"Unexpected error updating gift card template {template_id}: {e}", exc_info=True)
231
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
232
 
233
-
234
  @router.delete("/{template_id}")
235
  async def delete_gift_card_template(
236
  template_id: str = Path(..., description=TEMPLATE_ID_DESCRIPTION),
237
  current_user: dict = Depends(require_delete_giftcard_permission),
238
  correlation_id: str = Depends(get_request_id)
239
  ) -> dict:
240
- """
241
- Delete a gift card template.
242
-
243
- This endpoint deletes a gift card template. Templates can only be deleted
244
- if no gift cards have been issued from them.
245
-
246
- **Path Parameters:**
247
- - **template_id**: The unique identifier of the gift card template
248
-
249
- **Business Rules:**
250
- - Cannot delete templates that have issued_count > 0
251
- - Consider deactivating instead of deleting for templates with issued cards
252
-
253
- **Security:**
254
- - Only allows deletion of templates that belong to the authenticated merchant
255
- """
256
  start_time = time.time()
257
  try:
258
  merchant_id = current_user.get("merchant_id")
259
-
260
-
261
 
262
  if not merchant_id:
263
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
@@ -265,16 +162,10 @@ async def delete_gift_card_template(
265
  template = await GiftCardService.get_template_by_id(template_id, merchant_id)
266
 
267
  if template["merchant_id"] != merchant_id:
268
- raise HTTPException(
269
- status_code=403,
270
- detail="You are not authorized to delete this template."
271
- )
272
 
273
  if template.get("issued_count", 0) > 0:
274
- data={
275
- "status":"inactive",
276
- "updated_at": datetime.now(timezone.utc)
277
- }
278
  success = await GiftCardRepository.update_template(template_id, data)
279
  return {
280
  "status": "success",
@@ -282,130 +173,87 @@ async def delete_gift_card_template(
282
  "message": "Template has issued cards and was deactivated instead of deleted."
283
  }
284
 
285
- logger.info(f"Deleting gift card template {template_id} for merchant {merchant_id}")
286
 
287
  result = await GiftCardService.delete_template(template_id, merchant_id)
288
 
289
- logger.info(f"Gift card template deleted successfully: {template_id} in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
290
- return {"status": "success","message": f"Template {template_id} deleted successfully", "correlation_id": correlation_id}
291
 
292
  except HTTPException:
293
  raise
294
  except Exception as e:
295
- logger.error(f"Unexpected error deleting gift card template {template_id}: {e}", exc_info=True)
296
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
297
 
298
-
299
  @router.get("/{template_id}", response_model=GiftCardTemplateResponse)
300
  async def get_gift_card_template(
301
  template_id: str = Path(..., description=TEMPLATE_ID_DESCRIPTION),
302
  current_user: dict = Depends(require_view_giftcard_permission)
303
  ) -> GiftCardTemplateResponse:
304
- """
305
- Get a specific gift card template by ID.
306
-
307
- This endpoint returns detailed information about a specific gift card template.
308
-
309
- **Path Parameters:**
310
- - **template_id**: The unique identifier of the gift card template
311
-
312
- **Security:**
313
- - Only returns templates that belong to the authenticated merchant
314
- - Returns 404 if template doesn't exist or doesn't belong to merchant
315
- """
316
  try:
317
  merchant_id = current_user.get("merchant_id")
318
 
319
  if not merchant_id:
320
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
321
 
322
- logger.info(f"Fetching gift card template {template_id} for merchant {merchant_id}")
323
 
324
  result = await GiftCardService.get_template_by_id(template_id, merchant_id)
325
 
326
- logger.info(f"Retrieved gift card template: {template_id}")
327
  return result
328
 
329
  except HTTPException:
330
  raise
331
  except Exception as e:
332
- logger.error(f"Unexpected error fetching gift card template {template_id}: {e}", exc_info=True)
333
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
334
 
335
-
336
-
337
-
338
- # Additional utility endpoints
339
-
340
  @router.get("/{template_id}/stock", response_model=GiftCardStockResponse)
341
  async def check_gift_card_template_stock(
342
  template_id: str = Path(..., description=TEMPLATE_ID_DESCRIPTION),
343
  current_user: dict = Depends(require_view_giftcard_permission)
344
  ) -> GiftCardStockResponse:
345
- """
346
- Check stock availability for a gift card template.
347
-
348
- This endpoint returns information about whether a template has available
349
- stock for issuing new gift cards.
350
-
351
- **Path Parameters:**
352
- - **template_id**: The unique identifier of the gift card template
353
-
354
- **Response:**
355
- - **available**: Whether cards can be issued from this template
356
- - **unlimited**: Whether the template has unlimited stock (digital cards)
357
- - **remaining**: Number of cards remaining (null for unlimited)
358
- - **max_issues**: Maximum number of cards that can be issued
359
- - **issued_count**: Number of cards already issued
360
- """
361
  try:
362
  merchant_id = current_user.get("merchant_id")
363
 
364
  if not merchant_id:
365
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
366
 
367
- logger.info(f"Checking stock for gift card template {template_id}")
368
 
369
  result = await GiftCardService.check_stock_availability(template_id, merchant_id)
370
-
371
  return result
372
 
373
  except HTTPException:
374
  raise
375
  except Exception as e:
376
- logger.error(f"Unexpected error checking stock for template {template_id}: {e}", exc_info=True)
377
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
378
 
379
-
380
  @router.get("/active/list", response_model=list[GiftCardTemplateResponse])
381
  async def get_active_gift_card_templates(
382
  current_user: dict = Depends(require_view_giftcard_permission)
383
  ) -> list[GiftCardTemplateResponse]:
384
- """
385
- Get all active gift card templates for the merchant.
386
-
387
- This is a convenience endpoint that returns only active templates,
388
- commonly used for displaying available templates to customers.
389
-
390
- **Returns:**
391
- - List of all active gift card templates for the merchant
392
- - Templates are sorted by creation date (newest first)
393
- """
394
  try:
395
  merchant_id = current_user.get("merchant_id")
396
 
397
  if not merchant_id:
398
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
399
 
400
- logger.info(f"Fetching active gift card templates for merchant {merchant_id}")
401
 
402
  result = await GiftCardService.get_active_templates(merchant_id)
403
 
404
- logger.info(f"Retrieved {len(result)} active gift card templates")
405
  return result
406
 
407
  except HTTPException:
408
  raise
409
  except Exception as e:
410
- logger.error(f"Unexpected error fetching active gift card templates: {e}", exc_info=True)
411
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
 
 
1
  from datetime import datetime, timezone
 
2
  import time
3
  from typing import Any, Dict, Optional
4
  from fastapi import APIRouter, Body, Depends, HTTPException, Query, Path
5
+ from insightfy_utils.logging import get_logger
6
 
7
  from app.repositories.gift_card_repository import GiftCardRepository
8
  from app.services.gift_card_service import GiftCardService
 
23
 
24
  # Router Initialization
25
  router = APIRouter()
26
+ logger = get_logger(__name__)
27
 
28
  # Constants
29
  INTERNAL_SERVER_ERROR = "Internal server error"
 
47
 
48
  # Route Handlers
49
 
50
+ @router.post("/", status_code=201, response_model=GiftCardTemplateCreateResponse)
51
  async def create_gift_card_template(
52
  template_data: GiftCardTemplateCreate,
53
  current_user: dict = Depends(require_create_giftcard_permission),
54
  correlation_id: str = Depends(get_request_id)
55
  ) -> GiftCardTemplateCreateResponse:
56
+ """Create a new gift card template (blueprint)."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  start_time = time.time()
58
  try:
59
  merchant_id = current_user.get("merchant_id")
60
  user_id = current_user.get("associate_id")
61
+ branch_id = current_user.get("branch_id")
62
 
63
  if not merchant_id:
64
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
65
  if not user_id:
66
  raise HTTPException(status_code=400, detail=USER_ID_REQUIRED)
67
 
68
+ logger.info("Creating gift card template", extra={"merchant_id": merchant_id, "user_id": user_id})
69
 
70
+ result = await GiftCardService.create_template(template_data, merchant_id, user_id, branch_id)
71
 
72
+ logger.info("Gift card template created", extra={"template_id": result, "duration": time.time() - start_time, "correlation_id": correlation_id})
73
+ return {"template_id": result, "message": "Gift card template created successfully", "correlation_id": correlation_id}
74
 
75
  except HTTPException:
76
  raise
77
  except Exception as e:
78
+ logger.error("Unexpected error creating gift card template", extra={"correlation_id": correlation_id}, exc_info=e)
79
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
80
 
 
81
  @router.post("/list", response_model=GiftCardTemplateListResponse)
82
  async def list_gift_card_templates(
83
  status: Optional[TemplateStatus] = Query(None, description="Filter by template status"),
 
90
  current_user: dict = Depends(require_view_giftcard_permission),
91
  correlation_id: str = Depends(get_request_id)
92
  ) -> GiftCardTemplateListResponse:
93
+ """List all gift card templates with optional filtering."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  start_time = time.time()
95
  try:
96
  merchant_id = current_user.get("merchant_id")
 
98
  if not merchant_id:
99
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
100
 
 
101
  filters = GiftCardTemplateFilter(
102
  status=status,
103
  delivery_type=delivery_type,
 
105
  branch_id=branch_id,
106
  filter=filter
107
  )
108
+ logger.info("Fetching gift card templates", extra={"merchant_id": merchant_id})
109
 
110
  result = await GiftCardService.list_gift_card(merchant_id, filters, offset, limit)
111
+ logger.info("Gift card templates retrieved", extra={"count": len(result.templates), "duration": time.time() - start_time, "correlation_id": correlation_id})
112
  return result
113
 
114
  except HTTPException:
115
  raise
116
  except Exception as e:
117
+ logger.error("Unexpected error fetching gift card templates", extra={"correlation_id": correlation_id}, exc_info=e)
118
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
119
 
120
  @router.put("/{template_id}")
 
124
  current_user: dict = Depends(require_update_giftcard_permission),
125
  correlation_id: str = Depends(get_request_id)
126
  ) -> Dict[str, Any]:
127
+ """Update an existing gift card template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128
  start_time = time.time()
129
  try:
130
  merchant_id = current_user.get("merchant_id")
 
132
  if not merchant_id:
133
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
134
 
135
+ logger.info("Updating gift card template", extra={"template_id": template_id, "merchant_id": merchant_id})
136
 
137
  result = await GiftCardService.update_template(template_id, update_data, merchant_id)
138
 
139
+ logger.info("Gift card template updated", extra={"template_id": template_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
140
+ return {"status": "success", "id": result.id, "data": result, "correlation_id": correlation_id}
 
141
 
142
  except HTTPException:
143
  raise
144
  except Exception as e:
145
+ logger.error("Unexpected error updating gift card template", extra={"template_id": template_id}, exc_info=e)
146
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
147
 
 
148
  @router.delete("/{template_id}")
149
  async def delete_gift_card_template(
150
  template_id: str = Path(..., description=TEMPLATE_ID_DESCRIPTION),
151
  current_user: dict = Depends(require_delete_giftcard_permission),
152
  correlation_id: str = Depends(get_request_id)
153
  ) -> dict:
154
+ """Delete a gift card template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
  start_time = time.time()
156
  try:
157
  merchant_id = current_user.get("merchant_id")
 
 
158
 
159
  if not merchant_id:
160
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
 
162
  template = await GiftCardService.get_template_by_id(template_id, merchant_id)
163
 
164
  if template["merchant_id"] != merchant_id:
165
+ raise HTTPException(status_code=403, detail="You are not authorized to delete this template.")
 
 
 
166
 
167
  if template.get("issued_count", 0) > 0:
168
+ data = {"status": "inactive", "updated_at": datetime.now(timezone.utc)}
 
 
 
169
  success = await GiftCardRepository.update_template(template_id, data)
170
  return {
171
  "status": "success",
 
173
  "message": "Template has issued cards and was deactivated instead of deleted."
174
  }
175
 
176
+ logger.info("Deleting gift card template", extra={"template_id": template_id, "merchant_id": merchant_id})
177
 
178
  result = await GiftCardService.delete_template(template_id, merchant_id)
179
 
180
+ logger.info("Gift card template deleted", extra={"template_id": template_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
181
+ return {"status": "success", "message": f"Template {template_id} deleted successfully", "correlation_id": correlation_id}
182
 
183
  except HTTPException:
184
  raise
185
  except Exception as e:
186
+ logger.error("Unexpected error deleting gift card template", extra={"template_id": template_id}, exc_info=e)
187
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
188
 
 
189
  @router.get("/{template_id}", response_model=GiftCardTemplateResponse)
190
  async def get_gift_card_template(
191
  template_id: str = Path(..., description=TEMPLATE_ID_DESCRIPTION),
192
  current_user: dict = Depends(require_view_giftcard_permission)
193
  ) -> GiftCardTemplateResponse:
194
+ """Get a specific gift card template by ID."""
 
 
 
 
 
 
 
 
 
 
 
195
  try:
196
  merchant_id = current_user.get("merchant_id")
197
 
198
  if not merchant_id:
199
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
200
 
201
+ logger.info("Fetching gift card template", extra={"template_id": template_id, "merchant_id": merchant_id})
202
 
203
  result = await GiftCardService.get_template_by_id(template_id, merchant_id)
204
 
205
+ logger.info("Gift card template retrieved", extra={"template_id": template_id})
206
  return result
207
 
208
  except HTTPException:
209
  raise
210
  except Exception as e:
211
+ logger.error("Unexpected error fetching gift card template", extra={"template_id": template_id}, exc_info=e)
212
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
213
 
 
 
 
 
 
214
  @router.get("/{template_id}/stock", response_model=GiftCardStockResponse)
215
  async def check_gift_card_template_stock(
216
  template_id: str = Path(..., description=TEMPLATE_ID_DESCRIPTION),
217
  current_user: dict = Depends(require_view_giftcard_permission)
218
  ) -> GiftCardStockResponse:
219
+ """Check stock availability for a gift card template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
220
  try:
221
  merchant_id = current_user.get("merchant_id")
222
 
223
  if not merchant_id:
224
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
225
 
226
+ logger.info("Checking stock for gift card template", extra={"template_id": template_id})
227
 
228
  result = await GiftCardService.check_stock_availability(template_id, merchant_id)
 
229
  return result
230
 
231
  except HTTPException:
232
  raise
233
  except Exception as e:
234
+ logger.error("Unexpected error checking stock", extra={"template_id": template_id}, exc_info=e)
235
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
236
 
 
237
  @router.get("/active/list", response_model=list[GiftCardTemplateResponse])
238
  async def get_active_gift_card_templates(
239
  current_user: dict = Depends(require_view_giftcard_permission)
240
  ) -> list[GiftCardTemplateResponse]:
241
+ """Get all active gift card templates for the merchant."""
 
 
 
 
 
 
 
 
 
242
  try:
243
  merchant_id = current_user.get("merchant_id")
244
 
245
  if not merchant_id:
246
  raise HTTPException(status_code=400, detail=MERCHANT_ID_REQUIRED)
247
 
248
+ logger.info("Fetching active gift card templates", extra={"merchant_id": merchant_id})
249
 
250
  result = await GiftCardService.get_active_templates(merchant_id)
251
 
252
+ logger.info("Active gift card templates retrieved", extra={"count": len(result)})
253
  return result
254
 
255
  except HTTPException:
256
  raise
257
  except Exception as e:
258
+ logger.error("Unexpected error fetching active gift card templates", exc_info=e)
259
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
app/routers/promotion_router.py CHANGED
@@ -1,13 +1,22 @@
1
  from datetime import datetime
2
- import logging
3
  import time
4
  from typing import Any, Dict, Optional
5
  import uuid
6
- from fastapi import APIRouter, Body, Depends, HTTPException, Query, logger, UploadFile, File
7
- from fastapi.responses import FileResponse
 
8
 
9
  from app.schemas.promotion_schema import PromotionMetaData, PromotionUpdate, PromotionValidateRequest
10
  from app.dependencies.auth import get_current_user, require_permission, AccessID
 
 
 
 
 
 
 
 
 
11
 
12
  # Async wrappers for permission dependencies
13
  async def require_create_promotion_permission(current_user: dict = Depends(get_current_user)):
@@ -19,17 +28,6 @@ async def require_view_promotion_permission(current_user: dict = Depends(get_cur
19
  async def require_update_promotion_permission(current_user: dict = Depends(get_current_user)):
20
  return await require_permission(AccessID.UPDATE_PROMOTION.value, current_user)
21
 
22
- from app.utils.request_id_utils import get_request_id
23
- from app.utils.user_role_enum import UserRole
24
- from app.services import promotion_service
25
-
26
- from io import StringIO, BytesIO
27
- import pandas as pd
28
-
29
-
30
- logger = logging.getLogger(__name__)
31
- router=APIRouter()
32
-
33
  @router.post("/", status_code=201)
34
  async def create_promotion(
35
  promotion_meta: PromotionMetaData = Body(..., description="promotion details"),
@@ -38,12 +36,11 @@ async def create_promotion(
38
  ):
39
  start_time = time.time()
40
  promotion_meta.merchant_id = current_user["merchant_id"]
41
- promotion_meta.promotion_id=str(uuid.uuid4())
42
  promotion_id = await promotion_service.PromotionManagementServices.create_promotion_data(promotion_meta)
43
- logger.info(f"/promotion/ create completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
44
  return {"promotion_id": promotion_id, "correlation_id": correlation_id}
45
 
46
-
47
  @router.get("/", status_code=200)
48
  async def list_promotions(
49
  status: Optional[str] = Query(None, description="Promotion status filter"),
@@ -60,18 +57,17 @@ async def list_promotions(
60
  merchant_id, status, is_active, q, sort, offset, limit
61
  )
62
  except RuntimeError as re:
63
- logger.error(f"RuntimeError while listing Promotion: {re}", exc_info=True)
64
  raise HTTPException(status_code=500, detail=str(re))
65
 
66
-
67
- @router.post('/apply',status_code=201)
68
- async def validate_promotion(
69
- data: PromotionValidateRequest = Body(..., description="promotion data to apply"),
70
- ):
71
  try:
72
  return await promotion_service.PromotionManagementServices.validate_promotion(data)
73
  except RuntimeError as re:
74
- logger.error(f"RuntimeError while applying Promotion: {re}", exc_info=True)
75
  raise HTTPException(status_code=500, detail=str(re))
76
 
77
  @router.get("/info/widgets", status_code=200)
@@ -85,31 +81,25 @@ async def get_info_widgets(
85
  associate_id = current_user["associate_id"]
86
  branch_id = current_user.get("branch_id")
87
 
88
- logger.info(
89
- f"Fetching info widgets for merchant_id={merchant_id}, associate_id={associate_id}, branch_id={branch_id} | correlation_id={correlation_id}")
90
 
91
  result = await promotion_service.PromotionManagementServices.get_info_widget_data(merchant_id, associate_id, branch_id)
92
 
93
  if result is None:
94
- logger.warning(
95
- f"No widget data found for merchant_id={merchant_id}, associate_id={associate_id} | correlation_id={correlation_id}")
96
  return {"data": {}, "correlation_id": correlation_id}
97
 
98
  if not isinstance(result, dict):
99
- logger.warning(
100
- f"Widget data is not a dictionary: {type(result)} | correlation_id={correlation_id}")
101
  return {"data": result, "correlation_id": correlation_id}
102
- logger.info(
103
- f"/promotion/info/widgets completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
104
  return {**result, "correlation_id": correlation_id}
105
 
106
  except Exception as e:
107
- logger.error(
108
- f"Error while fetching info widgets: {e} | correlation_id={correlation_id}", exc_info=True)
109
- raise HTTPException(
110
- status_code=500, detail="Unable to fetch dashboard widgets")
111
-
112
- # --- ROUTER LAYER ---
113
  @router.put("/{promotion_id}")
114
  async def update_promotion(
115
  promotion_id: str,
@@ -127,26 +117,23 @@ async def update_promotion(
127
  if not promotion:
128
  raise HTTPException(status_code=404, detail=f"Promotion {promotion_id} not found")
129
 
130
- logger.info(f"/promotion/{promotion_id} update completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
131
- return {
132
- "status": "success",
133
- "data": promotion,
134
- "correlation_id": correlation_id
135
- }
136
  except HTTPException:
137
  raise
138
  except ValueError as ve:
139
- logger.error(f"Validation error while updating promotion {promotion_id}: {ve} | correlation_id={correlation_id}")
140
  raise HTTPException(status_code=400, detail=str(ve))
141
  except Exception as e:
142
- logger.error(f"Error while updating promotion {promotion_id}: {e} | correlation_id={correlation_id}", exc_info=True)
143
  raise HTTPException(status_code=500, detail="Unable to update promotion")
144
-
145
  @router.get("/{promotion_id}")
146
  async def get_by_promotion_id(
147
- promotion_id: str,
148
  current_user: dict = Depends(require_view_promotion_permission),
149
- correlation_id: str = Depends(get_request_id)
150
  ):
151
  start_time = time.time()
152
  try:
@@ -154,18 +141,14 @@ async def get_by_promotion_id(
154
  if not promotion_data:
155
  raise HTTPException(status_code=404, detail="promotion id not found")
156
 
157
- logger.info(f"/promotion/{promotion_id} fetch completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
158
- return {
159
- "status": "success",
160
- "data": promotion_data,
161
- "correlation_id": correlation_id
162
- }
163
  except HTTPException:
164
  raise
165
  except Exception as e:
166
- logger.error(f"Error while fetching promotion {promotion_id}: {e} | correlation_id={correlation_id}", exc_info=True)
167
  raise HTTPException(status_code=500, detail="Unable to fetch promotion")
168
 
169
-
170
-
171
-
 
1
  from datetime import datetime
 
2
  import time
3
  from typing import Any, Dict, Optional
4
  import uuid
5
+ from fastapi import APIRouter, Body, Depends, HTTPException, Query, UploadFile, File
6
+ from fastapi.responses import FileResponse
7
+ from insightfy_utils.logging import get_logger
8
 
9
  from app.schemas.promotion_schema import PromotionMetaData, PromotionUpdate, PromotionValidateRequest
10
  from app.dependencies.auth import get_current_user, require_permission, AccessID
11
+ from app.utils.request_id_utils import get_request_id
12
+ from app.utils.user_role_enum import UserRole
13
+ from app.services import promotion_service
14
+
15
+ from io import StringIO, BytesIO
16
+ import pandas as pd
17
+
18
+ logger = get_logger(__name__)
19
+ router = APIRouter()
20
 
21
  # Async wrappers for permission dependencies
22
  async def require_create_promotion_permission(current_user: dict = Depends(get_current_user)):
 
28
  async def require_update_promotion_permission(current_user: dict = Depends(get_current_user)):
29
  return await require_permission(AccessID.UPDATE_PROMOTION.value, current_user)
30
 
 
 
 
 
 
 
 
 
 
 
 
31
  @router.post("/", status_code=201)
32
  async def create_promotion(
33
  promotion_meta: PromotionMetaData = Body(..., description="promotion details"),
 
36
  ):
37
  start_time = time.time()
38
  promotion_meta.merchant_id = current_user["merchant_id"]
39
+ promotion_meta.promotion_id = str(uuid.uuid4())
40
  promotion_id = await promotion_service.PromotionManagementServices.create_promotion_data(promotion_meta)
41
+ logger.info("Promotion created", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
42
  return {"promotion_id": promotion_id, "correlation_id": correlation_id}
43
 
 
44
  @router.get("/", status_code=200)
45
  async def list_promotions(
46
  status: Optional[str] = Query(None, description="Promotion status filter"),
 
57
  merchant_id, status, is_active, q, sort, offset, limit
58
  )
59
  except RuntimeError as re:
60
+ logger.error("RuntimeError while listing Promotion", exc_info=re)
61
  raise HTTPException(status_code=500, detail=str(re))
62
 
63
+ @router.post('/apply', status_code=201)
64
+ async def validate_promotion(
65
+ data: PromotionValidateRequest = Body(..., description="promotion data to apply"),
66
+ ):
 
67
  try:
68
  return await promotion_service.PromotionManagementServices.validate_promotion(data)
69
  except RuntimeError as re:
70
+ logger.error("RuntimeError while applying Promotion", exc_info=re)
71
  raise HTTPException(status_code=500, detail=str(re))
72
 
73
  @router.get("/info/widgets", status_code=200)
 
81
  associate_id = current_user["associate_id"]
82
  branch_id = current_user.get("branch_id")
83
 
84
+ logger.info("Fetching info widgets", extra={"merchant_id": merchant_id, "associate_id": associate_id, "branch_id": branch_id, "correlation_id": correlation_id})
 
85
 
86
  result = await promotion_service.PromotionManagementServices.get_info_widget_data(merchant_id, associate_id, branch_id)
87
 
88
  if result is None:
89
+ logger.warning("No widget data found", extra={"merchant_id": merchant_id, "associate_id": associate_id, "correlation_id": correlation_id})
 
90
  return {"data": {}, "correlation_id": correlation_id}
91
 
92
  if not isinstance(result, dict):
93
+ logger.warning("Widget data is not a dictionary", extra={"type": type(result).__name__, "correlation_id": correlation_id})
 
94
  return {"data": result, "correlation_id": correlation_id}
95
+
96
+ logger.info("Info widgets completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
97
  return {**result, "correlation_id": correlation_id}
98
 
99
  except Exception as e:
100
+ logger.error("Error while fetching info widgets", extra={"correlation_id": correlation_id}, exc_info=e)
101
+ raise HTTPException(status_code=500, detail="Unable to fetch dashboard widgets")
102
+
 
 
 
103
  @router.put("/{promotion_id}")
104
  async def update_promotion(
105
  promotion_id: str,
 
117
  if not promotion:
118
  raise HTTPException(status_code=404, detail=f"Promotion {promotion_id} not found")
119
 
120
+ logger.info("Promotion updated", extra={"promotion_id": promotion_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
121
+ return {"status": "success", "data": promotion, "correlation_id": correlation_id}
122
+
 
 
 
123
  except HTTPException:
124
  raise
125
  except ValueError as ve:
126
+ logger.error("Validation error while updating promotion", extra={"promotion_id": promotion_id, "correlation_id": correlation_id}, exc_info=ve)
127
  raise HTTPException(status_code=400, detail=str(ve))
128
  except Exception as e:
129
+ logger.error("Error while updating promotion", extra={"promotion_id": promotion_id, "correlation_id": correlation_id}, exc_info=e)
130
  raise HTTPException(status_code=500, detail="Unable to update promotion")
131
+
132
  @router.get("/{promotion_id}")
133
  async def get_by_promotion_id(
134
+ promotion_id: str,
135
  current_user: dict = Depends(require_view_promotion_permission),
136
+ correlation_id: str = Depends(get_request_id)
137
  ):
138
  start_time = time.time()
139
  try:
 
141
  if not promotion_data:
142
  raise HTTPException(status_code=404, detail="promotion id not found")
143
 
144
+ logger.info("Promotion fetched", extra={"promotion_id": promotion_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
145
+ return {"status": "success", "data": promotion_data, "correlation_id": correlation_id}
146
+
 
 
 
147
  except HTTPException:
148
  raise
149
  except Exception as e:
150
+ logger.error("Error while fetching promotion", extra={"promotion_id": promotion_id, "correlation_id": correlation_id}, exc_info=e)
151
  raise HTTPException(status_code=500, detail="Unable to fetch promotion")
152
 
153
+
154
+
 
app/routers/supplier_route.py CHANGED
@@ -1,9 +1,8 @@
1
-
2
  from datetime import date
3
- import logging
4
  import time
5
  from typing import Any, Dict, List, Optional
6
  from fastapi import APIRouter, Body, Depends, File, HTTPException, Path, Query, UploadFile
 
7
 
8
  from app.dependencies.auth import get_current_user, require_permission, AccessID
9
 
@@ -25,12 +24,13 @@ from app.services.supplier_service import SupplierService
25
  from app.utils import catalogue_utils
26
  from app.utils.request_id_utils import get_request_id
27
 
28
- router=APIRouter()
29
- logger = logging.getLogger(__name__)
30
 
31
 
32
  @router.post("/", status_code=201)
33
- async def create_supplier(data: Supplier,
 
34
  current_user: dict = Depends(require_create_supplier_permission),
35
  correlation_id: str = Depends(get_request_id)
36
  ):
@@ -39,16 +39,16 @@ async def create_supplier(data: Supplier,
39
  # Call the service layer to create the item
40
  data.merchant_id = current_user["merchant_id"]
41
  supplier_id = await SupplierService.create_supplier(data)
42
- logger.info(f"/supplier/ create completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
43
  return {"supplier_id": supplier_id, "correlation_id": correlation_id}
44
  except RuntimeError as re:
45
- logger.error(f"RuntimeError while creating supplier: {re} | correlation_id={correlation_id}", exc_info=True)
46
  raise HTTPException(status_code=500, detail=str(re))
47
  except Exception as e:
48
- logger.error(f"Unexpected error while creating item: {e} | correlation_id={correlation_id}", exc_info=True)
49
  raise HTTPException(status_code=500, detail="Internal server error")
50
 
51
- @router.get("/{supplier_id}",status_code=200)
52
  async def get_supplier(
53
  supplier_id: str = Path(..., description="ID"),
54
  current_user: dict = Depends(require_view_supplier_permission),
@@ -56,21 +56,20 @@ async def get_supplier(
56
  ):
57
  start_time = time.time()
58
  try:
59
- merchant_id = current_user.get("merchant_id")
60
  supplier_data = await SupplierService.get_supplier_data(supplier_id, merchant_id)
61
  if not supplier_data:
62
  raise HTTPException(status_code=404, detail="Supplier not found")
63
- result= catalogue_utils.sanitize_document_for_mongo(supplier_data)
64
- logger.info(f"/suppliers/{{supplier_id}} completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
65
  return {**result, "correlation_id": correlation_id}
66
 
67
  except HTTPException as http_err:
68
- # Log specific HTTP exceptions without re-wrapping them
69
- logger.warning(f"Supplier retrieval failed: {http_err.detail} | supplier_id={supplier_id}, merchant_id={merchant_id} | correlation_id={correlation_id}")
70
  raise
71
 
72
  except Exception as e:
73
- logger.error(f"Unexpected error while fetching Supplier | supplier_id={supplier_id}, merchant_id={merchant_id}: {e} | correlation_id={correlation_id}", exc_info=True)
74
  raise HTTPException(status_code=500, detail="Internal server error")
75
 
76
  @router.put("/{supplier_id}", status_code=200)
@@ -83,15 +82,14 @@ async def update_supplier(
83
  start_time = time.time()
84
  try:
85
  data["merchant_id"] = current_user["merchant_id"]
86
- result=await SupplierService.update_supplier(supplier_id, data)
87
- logger.info(f"/supplier/{{supplier_id}} update completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
88
  result_data = catalogue_utils.sanitize_document_for_mongo(result)
89
  return {**result_data, "correlation_id": correlation_id}
90
 
91
  except RuntimeError as re:
92
- logger.error(f"RuntimeError while updating supplier: {re} | correlation_id={correlation_id}", exc_info=True)
93
  raise HTTPException(status_code=500, detail=str(re))
94
-
95
 
96
  @router.delete("/{supplier_id}", status_code=200)
97
  async def delete_supplier(
@@ -101,34 +99,26 @@ async def delete_supplier(
101
  ):
102
  start_time = time.time()
103
  try:
104
- # Composite filter for identifying the catalogue item
105
-
106
- merchant_id = current_user.get("merchant_id")
107
-
108
- filter_criteria = {
109
- "merchant_id": merchant_id,
110
- "supplier_id": supplier_id,
111
- }
112
 
113
  response = await SupplierService.delete_item(filter_criteria["supplier_id"])
114
  if response["message"] == "Supplier soft deleted":
115
- logger.info(f"/Supplier/{supplier_id} {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
116
- return {**response,"correlation_id": correlation_id}
117
  else:
118
  raise HTTPException(status_code=404, detail="Supplier not found")
119
 
120
  except ValueError as ve:
121
- logger.error(f"ValueError while deleting Supplier: {ve} | correlation_id={correlation_id}", exc_info=True)
122
  raise HTTPException(status_code=400, detail=str(ve))
123
  except RuntimeError as re:
124
- logger.error(f"RuntimeError while deleting Supplier: {re} | correlation_id={correlation_id}", exc_info=True)
125
  raise HTTPException(status_code=500, detail=str(re))
126
  except Exception as e:
127
- logger.error(f"Unexpected error while deleting Supplier: {e} | correlation_id={correlation_id}", exc_info=True)
128
  raise HTTPException(status_code=500, detail="Internal server error")
129
 
130
-
131
-
132
  @router.post("/list", status_code=200)
133
  async def list_suppliers(
134
  payload: SupplierListFilter = Body(...),
@@ -141,12 +131,9 @@ async def list_suppliers(
141
  filters = payload.filters or {}
142
  offset = payload.offset or 0
143
  limit = payload.limit or 10
144
-
145
  projection_list = payload.projection_list or None
146
 
147
- logger.info(
148
- f"Listing suppliers for merchant_id={merchant_id} with filters={filters}, offset={offset}, limit={limit} | correlation_id={correlation_id}, projection_list={projection_list}"
149
- )
150
 
151
  result = await SupplierService.list_supplier_data(
152
  merchant_id=merchant_id,
@@ -155,49 +142,45 @@ async def list_suppliers(
155
  limit=limit,
156
  projection_list=projection_list
157
  )
158
- logger.info(
159
- f"/supplier/list completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}"
160
- )
161
  return {**result, "correlation_id": correlation_id}
162
  except Exception as e:
163
- logger.error(
164
- f"Error in list_suppliers: {e} | correlation_id={correlation_id}", exc_info=True
165
- )
166
- # Removed erroneous duplicate and unclosed method call from exception block
167
-
168
 
169
  @router.post("/{supplier_id}/history", status_code=201)
170
  async def append_supply_history(
171
  supplier_id: str = Path(..., description="Supplier ID"),
172
- history_entry: supply_history=None,
173
  current_user: dict = Depends(require_update_supplier_permission),
174
  correlation_id: str = Depends(get_request_id)
175
  ):
176
  start_time = time.time()
177
  try:
178
- merchant_id = current_user.get("merchant_id")
179
- result=await SupplierService.append_supply_history(supplier_id,history_entry)
180
- logger.info(f"/{supplier_id}/history completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
181
  return {**result, "correlation_id": correlation_id}
182
 
183
  except Exception as e:
184
- logger.error(f"RuntimeError while appending supply history: {e} | correlation_id={correlation_id}", exc_info=True)
185
  raise HTTPException(status_code=404, detail="Supplier not found or not updated")
186
 
187
  @router.post("/{supplier_id}/documents", status_code=201)
188
- async def upload_document( data:Document,
 
189
  supplier_id: str = Path(..., description="Supplier ID"),
190
  current_user: dict = Depends(require_update_supplier_permission),
191
  correlation_id: str = Depends(get_request_id)
192
  ):
193
  start_time = time.time()
194
  try:
195
- result=await SupplierService.upload_document(data,supplier_id,)
196
- logger.info(f"/{supplier_id}/upload document completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
197
  return {**result, "correlation_id": correlation_id}
198
 
199
  except Exception as e:
200
- logger.error(f"RuntimeError while uploading document: {e} | correlation_id={correlation_id}", exc_info=True)
201
  raise HTTPException(status_code=500, detail="document not uploaded")
202
 
203
  @router.get("/info/widgets", status_code=200)
@@ -210,26 +193,21 @@ async def get_info_widgets(
210
  merchant_id = current_user["merchant_id"]
211
  branch_id = current_user.get("branch_id")
212
 
213
- logger.info(
214
- f"Fetching info widgets for merchant_id={merchant_id}, branch_id={branch_id} | correlation_id={correlation_id}")
215
 
216
- result = await SupplierService.get_info_widget_data(merchant_id, branch_id)
217
 
218
  if result is None:
219
- logger.warning(
220
- f"No widget data found for merchant_id={merchant_id} | correlation_id={correlation_id}")
221
  return {"data": {}, "correlation_id": correlation_id}
222
 
223
  if not isinstance(result, dict):
224
- logger.warning(
225
- f"Widget data is not a dictionary: {type(result)} | correlation_id={correlation_id}")
226
  return {"data": result, "correlation_id": correlation_id}
227
- logger.info(
228
- f"/supplier/info/widgets completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
229
  return {**result, "correlation_id": correlation_id}
230
 
231
  except Exception as e:
232
- logger.error(
233
- f"Error while fetching info widgets: {e} | correlation_id={correlation_id}", exc_info=True)
234
- raise HTTPException(
235
- status_code=500, detail="Unable to fetch dashboard widgets")
 
 
1
  from datetime import date
 
2
  import time
3
  from typing import Any, Dict, List, Optional
4
  from fastapi import APIRouter, Body, Depends, File, HTTPException, Path, Query, UploadFile
5
+ from insightfy_utils.logging import get_logger
6
 
7
  from app.dependencies.auth import get_current_user, require_permission, AccessID
8
 
 
24
  from app.utils import catalogue_utils
25
  from app.utils.request_id_utils import get_request_id
26
 
27
+ router = APIRouter()
28
+ logger = get_logger(__name__)
29
 
30
 
31
  @router.post("/", status_code=201)
32
+ async def create_supplier(
33
+ data: Supplier,
34
  current_user: dict = Depends(require_create_supplier_permission),
35
  correlation_id: str = Depends(get_request_id)
36
  ):
 
39
  # Call the service layer to create the item
40
  data.merchant_id = current_user["merchant_id"]
41
  supplier_id = await SupplierService.create_supplier(data)
42
+ logger.info("Supplier created", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
43
  return {"supplier_id": supplier_id, "correlation_id": correlation_id}
44
  except RuntimeError as re:
45
+ logger.error("RuntimeError while creating supplier", extra={"correlation_id": correlation_id}, exc_info=re)
46
  raise HTTPException(status_code=500, detail=str(re))
47
  except Exception as e:
48
+ logger.error("Unexpected error while creating supplier", extra={"correlation_id": correlation_id}, exc_info=e)
49
  raise HTTPException(status_code=500, detail="Internal server error")
50
 
51
+ @router.get("/{supplier_id}", status_code=200)
52
  async def get_supplier(
53
  supplier_id: str = Path(..., description="ID"),
54
  current_user: dict = Depends(require_view_supplier_permission),
 
56
  ):
57
  start_time = time.time()
58
  try:
59
+ merchant_id = current_user.get("merchant_id")
60
  supplier_data = await SupplierService.get_supplier_data(supplier_id, merchant_id)
61
  if not supplier_data:
62
  raise HTTPException(status_code=404, detail="Supplier not found")
63
+ result = catalogue_utils.sanitize_document_for_mongo(supplier_data)
64
+ logger.info("Supplier retrieved", extra={"supplier_id": supplier_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
65
  return {**result, "correlation_id": correlation_id}
66
 
67
  except HTTPException as http_err:
68
+ logger.warning("Supplier retrieval failed", extra={"detail": http_err.detail, "supplier_id": supplier_id, "merchant_id": merchant_id, "correlation_id": correlation_id})
 
69
  raise
70
 
71
  except Exception as e:
72
+ logger.error("Unexpected error while fetching supplier", extra={"supplier_id": supplier_id, "merchant_id": merchant_id, "correlation_id": correlation_id}, exc_info=e)
73
  raise HTTPException(status_code=500, detail="Internal server error")
74
 
75
  @router.put("/{supplier_id}", status_code=200)
 
82
  start_time = time.time()
83
  try:
84
  data["merchant_id"] = current_user["merchant_id"]
85
+ result = await SupplierService.update_supplier(supplier_id, data)
86
+ logger.info("Supplier updated", extra={"supplier_id": supplier_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
87
  result_data = catalogue_utils.sanitize_document_for_mongo(result)
88
  return {**result_data, "correlation_id": correlation_id}
89
 
90
  except RuntimeError as re:
91
+ logger.error("RuntimeError while updating supplier", extra={"correlation_id": correlation_id}, exc_info=re)
92
  raise HTTPException(status_code=500, detail=str(re))
 
93
 
94
  @router.delete("/{supplier_id}", status_code=200)
95
  async def delete_supplier(
 
99
  ):
100
  start_time = time.time()
101
  try:
102
+ merchant_id = current_user.get("merchant_id")
103
+ filter_criteria = {"merchant_id": merchant_id, "supplier_id": supplier_id}
 
 
 
 
 
 
104
 
105
  response = await SupplierService.delete_item(filter_criteria["supplier_id"])
106
  if response["message"] == "Supplier soft deleted":
107
+ logger.info("Supplier deleted", extra={"supplier_id": supplier_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
108
+ return {**response, "correlation_id": correlation_id}
109
  else:
110
  raise HTTPException(status_code=404, detail="Supplier not found")
111
 
112
  except ValueError as ve:
113
+ logger.error("ValueError while deleting supplier", extra={"correlation_id": correlation_id}, exc_info=ve)
114
  raise HTTPException(status_code=400, detail=str(ve))
115
  except RuntimeError as re:
116
+ logger.error("RuntimeError while deleting supplier", extra={"correlation_id": correlation_id}, exc_info=re)
117
  raise HTTPException(status_code=500, detail=str(re))
118
  except Exception as e:
119
+ logger.error("Unexpected error while deleting supplier", extra={"correlation_id": correlation_id}, exc_info=e)
120
  raise HTTPException(status_code=500, detail="Internal server error")
121
 
 
 
122
  @router.post("/list", status_code=200)
123
  async def list_suppliers(
124
  payload: SupplierListFilter = Body(...),
 
131
  filters = payload.filters or {}
132
  offset = payload.offset or 0
133
  limit = payload.limit or 10
 
134
  projection_list = payload.projection_list or None
135
 
136
+ logger.info("Listing suppliers", extra={"merchant_id": merchant_id, "filters": filters, "offset": offset, "limit": limit, "projection_list": projection_list, "correlation_id": correlation_id})
 
 
137
 
138
  result = await SupplierService.list_supplier_data(
139
  merchant_id=merchant_id,
 
142
  limit=limit,
143
  projection_list=projection_list
144
  )
145
+ logger.info("Supplier list completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
 
146
  return {**result, "correlation_id": correlation_id}
147
  except Exception as e:
148
+ logger.error("Error in list_suppliers", extra={"correlation_id": correlation_id}, exc_info=e)
149
+ raise HTTPException(status_code=500, detail="Internal server error")
 
 
 
150
 
151
  @router.post("/{supplier_id}/history", status_code=201)
152
  async def append_supply_history(
153
  supplier_id: str = Path(..., description="Supplier ID"),
154
+ history_entry: supply_history = None,
155
  current_user: dict = Depends(require_update_supplier_permission),
156
  correlation_id: str = Depends(get_request_id)
157
  ):
158
  start_time = time.time()
159
  try:
160
+ merchant_id = current_user.get("merchant_id")
161
+ result = await SupplierService.append_supply_history(supplier_id, history_entry)
162
+ logger.info("Supply history appended", extra={"supplier_id": supplier_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
163
  return {**result, "correlation_id": correlation_id}
164
 
165
  except Exception as e:
166
+ logger.error("Error while appending supply history", extra={"correlation_id": correlation_id}, exc_info=e)
167
  raise HTTPException(status_code=404, detail="Supplier not found or not updated")
168
 
169
  @router.post("/{supplier_id}/documents", status_code=201)
170
+ async def upload_document(
171
+ data: Document,
172
  supplier_id: str = Path(..., description="Supplier ID"),
173
  current_user: dict = Depends(require_update_supplier_permission),
174
  correlation_id: str = Depends(get_request_id)
175
  ):
176
  start_time = time.time()
177
  try:
178
+ result = await SupplierService.upload_document(data, supplier_id)
179
+ logger.info("Document uploaded", extra={"supplier_id": supplier_id, "duration": time.time() - start_time, "correlation_id": correlation_id})
180
  return {**result, "correlation_id": correlation_id}
181
 
182
  except Exception as e:
183
+ logger.error("Error while uploading document", extra={"correlation_id": correlation_id}, exc_info=e)
184
  raise HTTPException(status_code=500, detail="document not uploaded")
185
 
186
  @router.get("/info/widgets", status_code=200)
 
193
  merchant_id = current_user["merchant_id"]
194
  branch_id = current_user.get("branch_id")
195
 
196
+ logger.info("Fetching info widgets", extra={"merchant_id": merchant_id, "branch_id": branch_id, "correlation_id": correlation_id})
 
197
 
198
+ result = await SupplierService.get_info_widget_data(merchant_id, branch_id)
199
 
200
  if result is None:
201
+ logger.warning("No widget data found", extra={"merchant_id": merchant_id, "correlation_id": correlation_id})
 
202
  return {"data": {}, "correlation_id": correlation_id}
203
 
204
  if not isinstance(result, dict):
205
+ logger.warning("Widget data is not a dictionary", extra={"type": type(result).__name__, "correlation_id": correlation_id})
 
206
  return {"data": result, "correlation_id": correlation_id}
207
+
208
+ logger.info("Info widgets completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
209
  return {**result, "correlation_id": correlation_id}
210
 
211
  except Exception as e:
212
+ logger.error("Error while fetching info widgets", extra={"correlation_id": correlation_id}, exc_info=e)
213
+ raise HTTPException(status_code=500, detail="Unable to fetch dashboard widgets")
 
 
app/routers/taxonomy_route.py CHANGED
@@ -1,7 +1,7 @@
1
- import logging
2
  import time
3
  from typing import Optional
4
  from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query
 
5
  from app.dependencies.auth import get_current_user, require_permission, AccessID
6
 
7
  # Async wrappers for permission dependencies
@@ -21,12 +21,12 @@ from app.schemas.taxonomy_schema import TaxonomyInfo
21
  from app.services.taxonomy_service import TaxonomyService
22
  from app.utils.request_id_utils import get_request_id
23
 
24
- router=APIRouter()
25
- logger = logging.getLogger(__name__)
26
 
27
- @router.post("/",status_code=201)
28
  async def create_taxonomy(
29
- data:TaxonomyInfo,
30
  current_user: dict = Depends(require_create_taxonomy_permission),
31
  correlation_id: str = Depends(get_request_id)
32
  ):
@@ -35,11 +35,11 @@ async def create_taxonomy(
35
  data.merchant_id = current_user["merchant_id"]
36
  data.created_by = current_user["associate_id"]
37
  taxonomy_id = await TaxonomyService.create_taxonomy(data)
38
- logger.info(f"/taxxonomy/ create completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
39
  return {"taxonomy_id": taxonomy_id, "correlation_id": correlation_id}
40
 
41
  except RuntimeError as re:
42
- logger.error(f"RuntimeError while creating taxonomy item: {re} | correlation_id={correlation_id}", exc_info=True)
43
  raise HTTPException(status_code=500, detail=str(re))
44
 
45
  @router.put("/{id}", status_code=200)
@@ -53,12 +53,12 @@ async def update_taxonomy(
53
  try:
54
  data["merchant_id"] = current_user["merchant_id"]
55
  data["updated_by"] = current_user["associate_id"]
56
- result=await TaxonomyService.update_taxonomy(id, data)
57
- logger.info(f"/taxonomy/{{id}} update completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
58
  result_data = serialize_mongo_document(result)
59
  return {**result_data, "correlation_id": correlation_id}
60
  except RuntimeError as re:
61
- logger.error(f"RuntimeError while updating taxonomy: {re} | correlation_id={correlation_id}", exc_info=True)
62
  raise HTTPException(status_code=500, detail=str(re))
63
 
64
  @router.delete("/", status_code=200)
@@ -67,25 +67,23 @@ async def delete_taxonomy(
67
  current_user: dict = Depends(require_delete_taxonomy_permission),
68
  correlation_id: str = Depends(get_request_id)
69
  ):
70
-
71
  start_time = time.time()
72
  try:
73
-
74
  response = await TaxonomyService.delete_taxonomy(id)
75
  if response["message"] == "taxonomy deleted":
76
- logger.info(f"/taxonomy {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
77
- return {**response,"correlation_id": correlation_id}
78
  else:
79
  raise HTTPException(status_code=404, detail="Taxonomy not found")
80
 
81
  except ValueError as ve:
82
- logger.error(f"ValueError while deleting item: {ve} | correlation_id={correlation_id}", exc_info=True)
83
  raise HTTPException(status_code=400, detail=str(ve))
84
  except RuntimeError as re:
85
- logger.error(f"RuntimeError while deleting item: {re} | correlation_id={correlation_id}", exc_info=True)
86
  raise HTTPException(status_code=500, detail=str(re))
87
  except Exception as e:
88
- logger.error(f"Unexpected error while deleting item: {e} | correlation_id={correlation_id}", exc_info=True)
89
  raise HTTPException(status_code=500, detail="Internal server error")
90
 
91
  @router.get("/list", status_code=200)
@@ -96,30 +94,22 @@ async def list_taxonomy(
96
  ):
97
  start_time = time.time()
98
  try:
99
- # Call the service layer to list items (no pagination for LOV data)
100
  merchant_id = current_user.get("merchant_id")
101
 
102
- result = await TaxonomyService.list_taxonomy(
103
- merchant_id=merchant_id,
104
- type=type
105
- )
106
- logger.info(f"/taxonomy/list completed in {time.time() - start_time:.2f}s | correlation_id={correlation_id}")
107
 
108
- # Check if result is a dictionary with error message
109
  if isinstance(result, dict) and "message" in result and "Item not found" in result["message"]:
110
  return {"message": result["message"], "correlation_id": correlation_id}
111
 
112
- # If result is a list (which seems to be the case), wrap it properly
113
  if isinstance(result, list):
114
  return {"items": result, "count": len(result), "correlation_id": correlation_id}
115
 
116
- # Otherwise return the taxonomy structure with correlation_id (if it's a dict)
117
  return {**result, "correlation_id": correlation_id}
118
  except RuntimeError as re:
119
- logger.error(f"RuntimeError while list_taxonomy: {re} | correlation_id={correlation_id}", exc_info=True)
120
  raise HTTPException(status_code=500, detail=str(re))
121
  except Exception as e:
122
- logger.error(f"Error in list_taxonomy: {e} | correlation_id={correlation_id}", exc_info=True)
123
  raise HTTPException(status_code=500, detail="Internal server error")
124
 
125
-
 
 
1
  import time
2
  from typing import Optional
3
  from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query
4
+ from insightfy_utils.logging import get_logger
5
  from app.dependencies.auth import get_current_user, require_permission, AccessID
6
 
7
  # Async wrappers for permission dependencies
 
21
  from app.services.taxonomy_service import TaxonomyService
22
  from app.utils.request_id_utils import get_request_id
23
 
24
+ router = APIRouter()
25
+ logger = get_logger(__name__)
26
 
27
+ @router.post("/", status_code=201)
28
  async def create_taxonomy(
29
+ data: TaxonomyInfo,
30
  current_user: dict = Depends(require_create_taxonomy_permission),
31
  correlation_id: str = Depends(get_request_id)
32
  ):
 
35
  data.merchant_id = current_user["merchant_id"]
36
  data.created_by = current_user["associate_id"]
37
  taxonomy_id = await TaxonomyService.create_taxonomy(data)
38
+ logger.info("Taxonomy created", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
39
  return {"taxonomy_id": taxonomy_id, "correlation_id": correlation_id}
40
 
41
  except RuntimeError as re:
42
+ logger.error("RuntimeError while creating taxonomy", extra={"correlation_id": correlation_id}, exc_info=re)
43
  raise HTTPException(status_code=500, detail=str(re))
44
 
45
  @router.put("/{id}", status_code=200)
 
53
  try:
54
  data["merchant_id"] = current_user["merchant_id"]
55
  data["updated_by"] = current_user["associate_id"]
56
+ result = await TaxonomyService.update_taxonomy(id, data)
57
+ logger.info("Taxonomy updated", extra={"id": id, "duration": time.time() - start_time, "correlation_id": correlation_id})
58
  result_data = serialize_mongo_document(result)
59
  return {**result_data, "correlation_id": correlation_id}
60
  except RuntimeError as re:
61
+ logger.error("RuntimeError while updating taxonomy", extra={"correlation_id": correlation_id}, exc_info=re)
62
  raise HTTPException(status_code=500, detail=str(re))
63
 
64
  @router.delete("/", status_code=200)
 
67
  current_user: dict = Depends(require_delete_taxonomy_permission),
68
  correlation_id: str = Depends(get_request_id)
69
  ):
 
70
  start_time = time.time()
71
  try:
 
72
  response = await TaxonomyService.delete_taxonomy(id)
73
  if response["message"] == "taxonomy deleted":
74
+ logger.info("Taxonomy deleted", extra={"id": id, "duration": time.time() - start_time, "correlation_id": correlation_id})
75
+ return {**response, "correlation_id": correlation_id}
76
  else:
77
  raise HTTPException(status_code=404, detail="Taxonomy not found")
78
 
79
  except ValueError as ve:
80
+ logger.error("ValueError while deleting taxonomy", extra={"correlation_id": correlation_id}, exc_info=ve)
81
  raise HTTPException(status_code=400, detail=str(ve))
82
  except RuntimeError as re:
83
+ logger.error("RuntimeError while deleting taxonomy", extra={"correlation_id": correlation_id}, exc_info=re)
84
  raise HTTPException(status_code=500, detail=str(re))
85
  except Exception as e:
86
+ logger.error("Unexpected error while deleting taxonomy", extra={"correlation_id": correlation_id}, exc_info=e)
87
  raise HTTPException(status_code=500, detail="Internal server error")
88
 
89
  @router.get("/list", status_code=200)
 
94
  ):
95
  start_time = time.time()
96
  try:
 
97
  merchant_id = current_user.get("merchant_id")
98
 
99
+ result = await TaxonomyService.list_taxonomy(merchant_id=merchant_id, type=type)
100
+ logger.info("Taxonomy list completed", extra={"duration": time.time() - start_time, "correlation_id": correlation_id})
 
 
 
101
 
 
102
  if isinstance(result, dict) and "message" in result and "Item not found" in result["message"]:
103
  return {"message": result["message"], "correlation_id": correlation_id}
104
 
 
105
  if isinstance(result, list):
106
  return {"items": result, "count": len(result), "correlation_id": correlation_id}
107
 
 
108
  return {**result, "correlation_id": correlation_id}
109
  except RuntimeError as re:
110
+ logger.error("RuntimeError while listing taxonomy", extra={"correlation_id": correlation_id}, exc_info=re)
111
  raise HTTPException(status_code=500, detail=str(re))
112
  except Exception as e:
113
+ logger.error("Error in list_taxonomy", extra={"correlation_id": correlation_id}, exc_info=e)
114
  raise HTTPException(status_code=500, detail="Internal server error")
115
 
 
app/schemas/gift_card_schema.py CHANGED
@@ -226,5 +226,4 @@ class GiftCardTemplateFilter(BaseModel):
226
 
227
  # Attach $or to main filter
228
  mongo_filter["$or"] = or_conditions
229
- print(mongo_filter)
230
  return mongo_filter
 
226
 
227
  # Attach $or to main filter
228
  mongo_filter["$or"] = or_conditions
 
229
  return mongo_filter
app/services/catalogue_service.py CHANGED
@@ -3,16 +3,15 @@ from typing import Dict, Any, List, Optional
3
  from bson import ObjectId
4
  from fastapi import HTTPException
5
  from pydantic import ValidationError, TypeAdapter
 
6
  from app.models.catalogue_models import CatalogueModel
7
  from app.repositories.db import fetch_one_document
8
 
9
 
10
- import logging
11
-
12
  from app.schemas.catalogue_schema import CatalogueBulkImport, CatalogueItemBase, CatalogueListFilter, CatalogueUnion
13
  from app.utils import catalogue_utils
14
 
15
- logger = logging.getLogger(__name__)
16
 
17
  # Constants for error messages
18
  INTERNAL_SERVER_ERROR = "Internal server error"
@@ -30,7 +29,7 @@ class CatalogueService:
30
  """
31
  try:
32
  data = item.dict(by_alias=True, exclude_none=True)
33
- logger.info(f"Creating catalogue item with data: {data}")
34
 
35
  catalogue_inserted_id = await CatalogueModel.create_catalogue_item(data)
36
 
@@ -40,22 +39,21 @@ class CatalogueService:
40
  }
41
 
42
  except RuntimeError as re:
43
- logger.error(f"RuntimeError while creating catalogue item: {re}", exc_info=True)
44
  raise HTTPException(status_code=500, detail=str(re))
45
 
46
  except Exception as e:
47
- logger.error(f"Unexpected error while creating catalogue item: {e}", exc_info=True)
48
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
49
 
50
  @staticmethod
51
  async def update_catalogue_item(
52
  catalogue_id: str,
53
  update_data: dict,
54
- merchant_id:str
55
  ) -> dict:
56
 
57
- result = await CatalogueModel.update_catalogue_data(catalogue_id, update_data,merchant_id)
58
-
59
  return {"message": "Catalogue updated successfully"} if result else {"message": "No changes made"}
60
 
61
 
@@ -79,10 +77,10 @@ class CatalogueService:
79
  "list_details_pref",
80
  {"merchant_id": merchant_id, "associate_id": associate_id, "list_type": "catalogues"}
81
  )
82
- logger.info(f"User preferences for {associate_id}: {user_pref}")
83
  return user_pref
84
  except Exception as e:
85
- logger.warning(f"Failed to fetch user preferences: {e}")
86
  return None
87
 
88
  @staticmethod
@@ -209,7 +207,7 @@ class CatalogueService:
209
  """
210
  List catalogue items with optional user preference filtering.
211
  """
212
- logger.info(f"Fetching catalogues for {merchant_id=} with incoming filters={filters}")
213
 
214
  try:
215
  # Initialize base filter criteria
@@ -232,7 +230,13 @@ class CatalogueService:
232
  CatalogueService._apply_api_filters(filter_criteria, filters)
233
 
234
  logger.debug(
235
- f"Final filter criteria: {filter_criteria}, offset={offset}, limit={limit}, projection={final_projection_list}"
 
 
 
 
 
 
236
  )
237
 
238
  return await CatalogueModel.list_items(
@@ -244,10 +248,10 @@ class CatalogueService:
244
  )
245
 
246
  except RuntimeError as re:
247
- logger.error(f"RuntimeError while listing items: {re}", exc_info=True)
248
  raise HTTPException(status_code=500, detail=str(re))
249
  except Exception as e:
250
- logger.error(f"Unexpected error while listing items: {e}", exc_info=True)
251
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
252
 
253
  @staticmethod
@@ -264,7 +268,7 @@ class CatalogueService:
264
  raise RuntimeError("Invalid data format returned from model layer")
265
  return items
266
  except Exception as e:
267
- logger.error(f"Error fetching POS catalogue items: {e}", exc_info=True)
268
  raise RuntimeError("Failed to fetch catalogue items") from e
269
 
270
  @staticmethod
@@ -288,7 +292,7 @@ class CatalogueService:
288
  )
289
  return result
290
  except Exception as e:
291
- logger.error(f"Error fetching aggregated POS catalogue items: {e}", exc_info=True)
292
  raise RuntimeError("Failed to fetch catalogue items") from e
293
 
294
  @staticmethod
@@ -309,19 +313,18 @@ class CatalogueService:
309
  return items
310
 
311
  @staticmethod
312
- async def update_catalogue_inventory(catalogue_id: str, branch_id: str,merchant_id: str, update_data: dict):
313
-
314
  filter_criteria = {
315
  "merchant_id": merchant_id,
316
  "branch_id": branch_id,
317
  "catalogue_id": catalogue_id
318
  }
319
- result= await CatalogueModel.update_catalogue_inventory(filter_criteria, update_data)
320
  return {"message": "Catalogue updated successfully"} if result else {"message": "No changes made"}
321
 
322
  @staticmethod
323
  async def get_catalogue_data(catalogue_id: str, merchant_id: str):
324
- logger.info(f"Fetching catalogue with ID: {id}")
325
  try:
326
  catalogue_data = await CatalogueModel.get_catalogue_data(catalogue_id, merchant_id)
327
  if catalogue_data is None:
@@ -331,10 +334,10 @@ class CatalogueService:
331
  # Re-raise HTTP exceptions without modification
332
  raise
333
  except RuntimeError as re:
334
- logger.error(f"RuntimeError while fetching item: {re}", exc_info=True)
335
  raise HTTPException(status_code=500, detail=str(re))
336
  except Exception as e:
337
- logger.error(f"Unexpected error while fetching item: {e}", exc_info=True)
338
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
339
 
340
  @staticmethod
@@ -342,7 +345,7 @@ class CatalogueService:
342
  return await CatalogueModel.bulk_insert(items)
343
 
344
  @staticmethod
345
- async def bulk_upload_catalogues(data: List[dict], current_user: dict,merchant_id:str):
346
  valid_records = []
347
  errors = []
348
 
@@ -355,13 +358,13 @@ class CatalogueService:
355
  catalogue_dict["created_by"] = current_user["associate_id"]
356
  valid_records.append(catalogue_dict)
357
  except ValidationError as ve:
358
- logger.warning(f"Row {i + 1} validation error: {ve}")
359
  errors.append({"row": i + 1, "errors": ve.errors()})
360
 
361
  if not valid_records:
362
  raise RuntimeError("No valid records to insert")
363
 
364
- inserted_ids = await CatalogueModel.bulk_create(valid_records,merchant_id)
365
  return {
366
  "inserted_count": len(inserted_ids),
367
  "failed_count": len(errors),
@@ -377,8 +380,7 @@ class CatalogueService:
377
  """
378
  Fetches detailed information about an catalogue, including metadata and preferences.
379
  """
380
- logger.info(
381
- f"Fetching info widget data for catalogue ID: {associate_id}")
382
  try:
383
 
384
  # Fetch user preferences if available
@@ -393,14 +395,13 @@ class CatalogueService:
393
  (view for view in user_pref["views"] if view.get("name") == "catalogues"),
394
  None
395
  )
396
- logger.info(f"User preferences for {associate_id}: {catalogues_view}")
397
  catalogue_data = None
398
  if catalogues_view:
399
  catalogue_data = await CatalogueModel.get_info_widget_data(merchant_id, branch_id, catalogues_view)
400
 
401
  if catalogue_data is None:
402
- raise HTTPException(
403
- status_code=404, detail="Catalogue not found")
404
 
405
  return catalogue_data
406
 
 
3
  from bson import ObjectId
4
  from fastapi import HTTPException
5
  from pydantic import ValidationError, TypeAdapter
6
+ from insightfy_utils.logging import get_logger
7
  from app.models.catalogue_models import CatalogueModel
8
  from app.repositories.db import fetch_one_document
9
 
10
 
 
 
11
  from app.schemas.catalogue_schema import CatalogueBulkImport, CatalogueItemBase, CatalogueListFilter, CatalogueUnion
12
  from app.utils import catalogue_utils
13
 
14
+ logger = get_logger(__name__)
15
 
16
  # Constants for error messages
17
  INTERNAL_SERVER_ERROR = "Internal server error"
 
29
  """
30
  try:
31
  data = item.dict(by_alias=True, exclude_none=True)
32
+ logger.info("Creating catalogue item", extra={"data": data})
33
 
34
  catalogue_inserted_id = await CatalogueModel.create_catalogue_item(data)
35
 
 
39
  }
40
 
41
  except RuntimeError as re:
42
+ logger.error("RuntimeError while creating catalogue item", exc_info=re)
43
  raise HTTPException(status_code=500, detail=str(re))
44
 
45
  except Exception as e:
46
+ logger.error("Unexpected error while creating catalogue item", exc_info=e)
47
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
48
 
49
  @staticmethod
50
  async def update_catalogue_item(
51
  catalogue_id: str,
52
  update_data: dict,
53
+ merchant_id: str
54
  ) -> dict:
55
 
56
+ result = await CatalogueModel.update_catalogue_data(catalogue_id, update_data, merchant_id)
 
57
  return {"message": "Catalogue updated successfully"} if result else {"message": "No changes made"}
58
 
59
 
 
77
  "list_details_pref",
78
  {"merchant_id": merchant_id, "associate_id": associate_id, "list_type": "catalogues"}
79
  )
80
+ logger.info("User preferences fetched", extra={"associate_id": associate_id, "preferences": user_pref})
81
  return user_pref
82
  except Exception as e:
83
+ logger.warning("Failed to fetch user preferences", extra={"associate_id": associate_id, "error": str(e)})
84
  return None
85
 
86
  @staticmethod
 
207
  """
208
  List catalogue items with optional user preference filtering.
209
  """
210
+ logger.info("Fetching catalogues", extra={"merchant_id": merchant_id, "filters": filters})
211
 
212
  try:
213
  # Initialize base filter criteria
 
230
  CatalogueService._apply_api_filters(filter_criteria, filters)
231
 
232
  logger.debug(
233
+ "Final filter criteria",
234
+ extra={
235
+ "filter": filter_criteria,
236
+ "offset": offset,
237
+ "limit": limit,
238
+ "projection": final_projection_list
239
+ }
240
  )
241
 
242
  return await CatalogueModel.list_items(
 
248
  )
249
 
250
  except RuntimeError as re:
251
+ logger.error("RuntimeError while listing items", exc_info=re)
252
  raise HTTPException(status_code=500, detail=str(re))
253
  except Exception as e:
254
+ logger.error("Unexpected error while listing items", exc_info=e)
255
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
256
 
257
  @staticmethod
 
268
  raise RuntimeError("Invalid data format returned from model layer")
269
  return items
270
  except Exception as e:
271
+ logger.error("Error fetching POS catalogue items", exc_info=e)
272
  raise RuntimeError("Failed to fetch catalogue items") from e
273
 
274
  @staticmethod
 
292
  )
293
  return result
294
  except Exception as e:
295
+ logger.error("Error fetching aggregated POS catalogue items", exc_info=e)
296
  raise RuntimeError("Failed to fetch catalogue items") from e
297
 
298
  @staticmethod
 
313
  return items
314
 
315
  @staticmethod
316
+ async def update_catalogue_inventory(catalogue_id: str, branch_id: str, merchant_id: str, update_data: dict):
 
317
  filter_criteria = {
318
  "merchant_id": merchant_id,
319
  "branch_id": branch_id,
320
  "catalogue_id": catalogue_id
321
  }
322
+ result = await CatalogueModel.update_catalogue_inventory(filter_criteria, update_data)
323
  return {"message": "Catalogue updated successfully"} if result else {"message": "No changes made"}
324
 
325
  @staticmethod
326
  async def get_catalogue_data(catalogue_id: str, merchant_id: str):
327
+ logger.info("Fetching catalogue", extra={"catalogue_id": catalogue_id})
328
  try:
329
  catalogue_data = await CatalogueModel.get_catalogue_data(catalogue_id, merchant_id)
330
  if catalogue_data is None:
 
334
  # Re-raise HTTP exceptions without modification
335
  raise
336
  except RuntimeError as re:
337
+ logger.error("RuntimeError while fetching item", exc_info=re)
338
  raise HTTPException(status_code=500, detail=str(re))
339
  except Exception as e:
340
+ logger.error("Unexpected error while fetching item", exc_info=e)
341
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
342
 
343
  @staticmethod
 
345
  return await CatalogueModel.bulk_insert(items)
346
 
347
  @staticmethod
348
+ async def bulk_upload_catalogues(data: List[dict], current_user: dict, merchant_id: str):
349
  valid_records = []
350
  errors = []
351
 
 
358
  catalogue_dict["created_by"] = current_user["associate_id"]
359
  valid_records.append(catalogue_dict)
360
  except ValidationError as ve:
361
+ logger.warning("Row validation error", extra={"row": i + 1, "error": str(ve)})
362
  errors.append({"row": i + 1, "errors": ve.errors()})
363
 
364
  if not valid_records:
365
  raise RuntimeError("No valid records to insert")
366
 
367
+ inserted_ids = await CatalogueModel.bulk_create(valid_records, merchant_id)
368
  return {
369
  "inserted_count": len(inserted_ids),
370
  "failed_count": len(errors),
 
380
  """
381
  Fetches detailed information about an catalogue, including metadata and preferences.
382
  """
383
+ logger.info("Fetching info widget data", extra={"associate_id": associate_id})
 
384
  try:
385
 
386
  # Fetch user preferences if available
 
395
  (view for view in user_pref["views"] if view.get("name") == "catalogues"),
396
  None
397
  )
398
+ logger.info("User preferences fetched", extra={"associate_id": associate_id, "view": catalogues_view})
399
  catalogue_data = None
400
  if catalogues_view:
401
  catalogue_data = await CatalogueModel.get_info_widget_data(merchant_id, branch_id, catalogues_view)
402
 
403
  if catalogue_data is None:
404
+ raise HTTPException(status_code=404, detail="Catalogue not found")
 
405
 
406
  return catalogue_data
407
 
app/services/gift_card_service.py CHANGED
@@ -1,7 +1,7 @@
1
- import logging
2
  from typing import Any, Dict, List, Optional
3
  from fastapi import HTTPException
4
  from pydantic import ValidationError
 
5
 
6
  from app.repositories.gift_card_repository import GiftCardRepository
7
  from app.schemas.gift_card_schema import (
@@ -14,8 +14,7 @@ from app.schemas.gift_card_schema import (
14
  GiftCardStockResponse
15
  )
16
 
17
- # Configure logging for this module
18
- logger = logging.getLogger(__name__)
19
 
20
  # Constants for error messages
21
  INTERNAL_SERVER_ERROR = "Internal server error"
@@ -25,46 +24,25 @@ TEMPLATE_UPDATE_FAILED = "Failed to update gift card template"
25
  TEMPLATE_DELETE_FAILED = "Failed to delete gift card template"
26
 
27
  class GiftCardService:
28
- """
29
- Service layer for Gift Card operations.
30
- Contains business logic and validation for gift card templates.
31
- """
32
 
33
  @staticmethod
34
  async def create_template(
35
  template_data: GiftCardTemplateCreate,
36
  merchant_id: str,
37
  created_by: str,
38
- branch_id:str
39
  ) -> GiftCardTemplateCreateResponse:
40
- """
41
- Create a new gift card template.
42
-
43
- Args:
44
- template_data (GiftCardTemplateCreate): Template data to create
45
- merchant_id (str): Merchant ID
46
- created_by (str): User ID who is creating the template
47
-
48
- Returns:
49
- GiftCardTemplateCreateResponse: Created template info
50
-
51
- Raises:
52
- HTTPException: If creation fails
53
- """
54
  try:
55
- # Convert Pydantic model to dict and add merchant info
56
  data = template_data.model_dump(by_alias=True, exclude_none=True)
57
  data['merchant_id'] = merchant_id
58
  data['created_by'] = created_by
59
- data['branch_id']=branch_id
60
-
61
- logger.info(f"Creating gift card template for merchant {merchant_id}")
62
- # check name & merchant id already exisiting or not
63
 
64
- # Create template via repository
65
- template_id = await GiftCardRepository.create_template(data,merchant_id)
66
 
67
- # Get the created template to return full info
68
  created_template = await GiftCardRepository.get_template_by_id(template_id)
69
  if not created_template:
70
  raise HTTPException(status_code=500, detail="Template created but could not be retrieved")
@@ -72,12 +50,12 @@ class GiftCardService:
72
  return template_id
73
 
74
  except ValidationError as ve:
75
- logger.error(f"Validation error creating template: {ve}")
76
  raise HTTPException(status_code=400, detail=f"Validation error: {ve}")
77
  except HTTPException:
78
  raise
79
  except Exception as e:
80
- logger.error(f"Service error creating template: {e}", exc_info=True)
81
  raise HTTPException(status_code=500, detail=TEMPLATE_CREATION_FAILED)
82
 
83
  @staticmethod
@@ -87,37 +65,19 @@ class GiftCardService:
87
  offset: int = 0,
88
  limit: int = 100
89
  ) -> GiftCardTemplateListResponse:
90
- """
91
- Get gift card templates with pagination and filtering.
92
-
93
- Args:
94
- merchant_id (str): Merchant ID
95
- filters (Optional[GiftCardTemplateFilter]): Filter criteria
96
- offset (int): Number of items to skip
97
- limit (int): Maximum number of items to return
98
-
99
- Returns:
100
- GiftCardTemplateListResponse: Paginated list of templates
101
-
102
- Raises:
103
- HTTPException: If retrieval fails
104
- """
105
  try:
106
- # Build filter criteria
107
  filter_criteria = GiftCardRepository.build_filter_criteria(merchant_id, filters)
 
108
 
109
- logger.info(f"Fetching templates for merchant {merchant_id} with filters: {filter_criteria}")
110
-
111
- # Get templates from repository
112
  result = await GiftCardRepository.list_gift_card(filter_criteria, offset, limit)
113
 
114
- # Convert to response models
115
  templates = []
116
  for template_data in result['templates']:
117
  try:
118
  templates.append(GiftCardTemplateResponse(**template_data))
119
  except ValidationError as ve:
120
- logger.warning(f"Failed to parse template: {template_data}, error: {ve}")
121
  continue
122
  return GiftCardTemplateListResponse(
123
  templates=templates,
@@ -129,36 +89,20 @@ class GiftCardService:
129
  except HTTPException:
130
  raise
131
  except Exception as e:
132
- logger.error(f"Service error getting templates: {e}", exc_info=True)
133
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
134
 
135
  @staticmethod
136
- async def get_template_by_id(
137
- template_id: str,
138
- merchant_id: str
139
- ) -> dict:
140
- """
141
- Get a specific gift card template by ID.
142
-
143
- Args:
144
- template_id (str): Template ID to retrieve
145
- merchant_id (str): Merchant ID for security
146
-
147
- Returns:
148
- GiftCardTemplateResponse: Template data
149
-
150
- Raises:
151
- HTTPException: If template not found or access denied
152
- """
153
  try:
154
- logger.info(f"Fetching template {template_id} for merchant {merchant_id}")
155
 
156
  template = await GiftCardRepository.get_template_by_id(template_id)
157
 
158
  if not template:
159
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
160
 
161
- # Check if template belongs to the merchant
162
  if template.get('merchant_id') != merchant_id:
163
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
164
 
@@ -167,10 +111,10 @@ class GiftCardService:
167
  except HTTPException:
168
  raise
169
  except ValidationError as ve:
170
- logger.error(f"Validation error parsing template {template_id}: {ve}")
171
  raise HTTPException(status_code=500, detail="Template data validation failed")
172
  except Exception as e:
173
- logger.error(f"Service error getting template {template_id}: {e}", exc_info=True)
174
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
175
 
176
  @staticmethod
@@ -179,24 +123,10 @@ class GiftCardService:
179
  update_data: GiftCardTemplateUpdate,
180
  merchant_id: str
181
  ) -> GiftCardTemplateResponse:
182
- """
183
- Update a gift card template.
184
-
185
- Args:
186
- template_id (str): Template ID to update
187
- update_data (GiftCardTemplateUpdate): Update data
188
- merchant_id (str): Merchant ID for security
189
-
190
- Returns:
191
- GiftCardTemplateResponse: Updated template data
192
-
193
- Raises:
194
- HTTPException: If update fails or template not found
195
- """
196
  try:
197
- logger.info(f"Updating template {template_id} for merchant {merchant_id}")
198
 
199
- # First verify the template exists and belongs to the merchant
200
  existing_template = await GiftCardRepository.get_template_by_id(template_id)
201
  if not existing_template:
202
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
@@ -204,56 +134,36 @@ class GiftCardService:
204
  if existing_template.get('merchant_id') != merchant_id:
205
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
206
 
207
- # Convert update data to dict, excluding None values
208
  update_dict = update_data.model_dump(by_alias=True, exclude_none=True)
209
 
210
  if not update_dict:
211
  raise HTTPException(status_code=400, detail="No update data provided")
212
 
213
- # Validate business rules for updates
214
  await GiftCardService._validate_template_update(existing_template, update_dict)
215
 
216
- # Perform update
217
  success = await GiftCardRepository.update_template(template_id, update_dict)
218
 
219
  if not success:
220
  raise HTTPException(status_code=500, detail=TEMPLATE_UPDATE_FAILED)
221
 
222
- # Return updated template
223
  updated_template = await GiftCardRepository.get_template_by_id(template_id)
224
  return GiftCardTemplateResponse(**updated_template)
225
 
226
  except HTTPException:
227
  raise
228
  except ValidationError as ve:
229
- logger.error(f"Validation error updating template {template_id}: {ve}")
230
  raise HTTPException(status_code=400, detail=f"Validation error: {ve}")
231
  except Exception as e:
232
- logger.error(f"Service error updating template {template_id}: {e}", exc_info=True)
233
  raise HTTPException(status_code=500, detail=TEMPLATE_UPDATE_FAILED)
234
 
235
  @staticmethod
236
- async def delete_template(
237
- template_id: str,
238
- merchant_id: str
239
- ) -> Dict[str, str]:
240
- """
241
- Delete a gift card template.
242
-
243
- Args:
244
- template_id (str): Template ID to delete
245
- merchant_id (str): Merchant ID for security
246
-
247
- Returns:
248
- Dict[str, str]: Success message
249
-
250
- Raises:
251
- HTTPException: If deletion fails or template not found
252
- """
253
  try:
254
- logger.info(f"Deleting template {template_id} for merchant {merchant_id}")
255
 
256
- # First verify the template exists and belongs to the merchant
257
  existing_template = await GiftCardRepository.get_template_by_id(template_id)
258
  if not existing_template:
259
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
@@ -261,16 +171,13 @@ class GiftCardService:
261
  if existing_template.get('merchant_id') != merchant_id:
262
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
263
 
264
- # Check if template can be deleted (business rule: no issued cards)
265
  issued_count = existing_template.get('issued_count', 0)
266
  if issued_count > 0:
267
-
268
  raise HTTPException(
269
  status_code=400,
270
  detail=f"Cannot delete template with {issued_count} issued cards. Consider deactivating instead."
271
  )
272
 
273
- # Perform deletion
274
  success = await GiftCardRepository.delete_template(template_id)
275
 
276
  if not success:
@@ -281,31 +188,15 @@ class GiftCardService:
281
  except HTTPException:
282
  raise
283
  except Exception as e:
284
- logger.error(f"Service error deleting template {template_id}: {e}", exc_info=True)
285
  raise HTTPException(status_code=500, detail=TEMPLATE_DELETE_FAILED)
286
 
287
  @staticmethod
288
- async def check_stock_availability(
289
- template_id: str,
290
- merchant_id: str
291
- ) -> GiftCardStockResponse:
292
- """
293
- Check stock availability for a template.
294
-
295
- Args:
296
- template_id (str): Template ID to check
297
- merchant_id (str): Merchant ID for security
298
-
299
- Returns:
300
- GiftCardStockResponse: Stock availability info
301
-
302
- Raises:
303
- HTTPException: If template not found or access denied
304
- """
305
  try:
306
- logger.info(f"Checking stock for template {template_id}")
307
 
308
- # First verify the template exists and belongs to the merchant
309
  existing_template = await GiftCardRepository.get_template_by_id(template_id)
310
  if not existing_template:
311
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
@@ -313,28 +204,18 @@ class GiftCardService:
313
  if existing_template.get('merchant_id') != merchant_id:
314
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
315
 
316
- # Get stock information
317
  stock_info = await GiftCardRepository.check_stock_availability(template_id)
318
-
319
  return GiftCardStockResponse(**stock_info)
320
 
321
  except HTTPException:
322
  raise
323
  except Exception as e:
324
- logger.error(f"Service error checking stock for template {template_id}: {e}", exc_info=True)
325
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
326
 
327
  @staticmethod
328
  async def get_active_templates(merchant_id: str) -> List[GiftCardTemplateResponse]:
329
- """
330
- Get active templates for a merchant.
331
-
332
- Args:
333
- merchant_id (str): Merchant ID
334
-
335
- Returns:
336
- List[GiftCardTemplateResponse]: List of active templates
337
- """
338
  try:
339
  templates_data = await GiftCardRepository.get_active_templates(merchant_id)
340
 
@@ -343,13 +224,13 @@ class GiftCardService:
343
  try:
344
  templates.append(GiftCardTemplateResponse(**template_data))
345
  except ValidationError as ve:
346
- logger.warning(f"Failed to parse active template: {template_data}, error: {ve}")
347
  continue
348
 
349
  return templates
350
 
351
  except Exception as e:
352
- logger.error(f"Service error getting active templates: {e}", exc_info=True)
353
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
354
 
355
  @staticmethod
@@ -357,16 +238,7 @@ class GiftCardService:
357
  existing_template: Dict[str, Any],
358
  update_data: Dict[str, Any]
359
  ) -> None:
360
- """
361
- Validate business rules for template updates.
362
-
363
- Args:
364
- existing_template (Dict[str, Any]): Current template data
365
- update_data (Dict[str, Any]): Proposed updates
366
-
367
- Raises:
368
- HTTPException: If validation fails
369
- """
370
  # Check if trying to change delivery_type
371
  if 'delivery_type' in update_data:
372
  current_delivery_type = existing_template.get('delivery_type')
 
 
1
  from typing import Any, Dict, List, Optional
2
  from fastapi import HTTPException
3
  from pydantic import ValidationError
4
+ from insightfy_utils.logging import get_logger
5
 
6
  from app.repositories.gift_card_repository import GiftCardRepository
7
  from app.schemas.gift_card_schema import (
 
14
  GiftCardStockResponse
15
  )
16
 
17
+ logger = get_logger(__name__)
 
18
 
19
  # Constants for error messages
20
  INTERNAL_SERVER_ERROR = "Internal server error"
 
24
  TEMPLATE_DELETE_FAILED = "Failed to delete gift card template"
25
 
26
  class GiftCardService:
27
+ """Service layer for Gift Card operations."""
 
 
 
28
 
29
  @staticmethod
30
  async def create_template(
31
  template_data: GiftCardTemplateCreate,
32
  merchant_id: str,
33
  created_by: str,
34
+ branch_id: str
35
  ) -> GiftCardTemplateCreateResponse:
36
+ """Create a new gift card template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  try:
 
38
  data = template_data.model_dump(by_alias=True, exclude_none=True)
39
  data['merchant_id'] = merchant_id
40
  data['created_by'] = created_by
41
+ data['branch_id'] = branch_id
 
 
 
42
 
43
+ logger.info("Creating gift card template", extra={"merchant_id": merchant_id})
 
44
 
45
+ template_id = await GiftCardRepository.create_template(data, merchant_id)
46
  created_template = await GiftCardRepository.get_template_by_id(template_id)
47
  if not created_template:
48
  raise HTTPException(status_code=500, detail="Template created but could not be retrieved")
 
50
  return template_id
51
 
52
  except ValidationError as ve:
53
+ logger.error("Validation error creating template", exc_info=ve)
54
  raise HTTPException(status_code=400, detail=f"Validation error: {ve}")
55
  except HTTPException:
56
  raise
57
  except Exception as e:
58
+ logger.error("Service error creating template", exc_info=e)
59
  raise HTTPException(status_code=500, detail=TEMPLATE_CREATION_FAILED)
60
 
61
  @staticmethod
 
65
  offset: int = 0,
66
  limit: int = 100
67
  ) -> GiftCardTemplateListResponse:
68
+ """Get gift card templates with pagination and filtering."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  try:
 
70
  filter_criteria = GiftCardRepository.build_filter_criteria(merchant_id, filters)
71
+ logger.info("Fetching templates", extra={"merchant_id": merchant_id, "filters": filter_criteria})
72
 
 
 
 
73
  result = await GiftCardRepository.list_gift_card(filter_criteria, offset, limit)
74
 
 
75
  templates = []
76
  for template_data in result['templates']:
77
  try:
78
  templates.append(GiftCardTemplateResponse(**template_data))
79
  except ValidationError as ve:
80
+ logger.warning("Failed to parse template", extra={"template": template_data, "error": str(ve)})
81
  continue
82
  return GiftCardTemplateListResponse(
83
  templates=templates,
 
89
  except HTTPException:
90
  raise
91
  except Exception as e:
92
+ logger.error("Service error getting templates", exc_info=e)
93
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
94
 
95
  @staticmethod
96
+ async def get_template_by_id(template_id: str, merchant_id: str) -> dict:
97
+ """Get a specific gift card template by ID."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
98
  try:
99
+ logger.info("Fetching template", extra={"template_id": template_id, "merchant_id": merchant_id})
100
 
101
  template = await GiftCardRepository.get_template_by_id(template_id)
102
 
103
  if not template:
104
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
105
 
 
106
  if template.get('merchant_id') != merchant_id:
107
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
108
 
 
111
  except HTTPException:
112
  raise
113
  except ValidationError as ve:
114
+ logger.error("Validation error parsing template", extra={"template_id": template_id}, exc_info=ve)
115
  raise HTTPException(status_code=500, detail="Template data validation failed")
116
  except Exception as e:
117
+ logger.error("Service error getting template", extra={"template_id": template_id}, exc_info=e)
118
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
119
 
120
  @staticmethod
 
123
  update_data: GiftCardTemplateUpdate,
124
  merchant_id: str
125
  ) -> GiftCardTemplateResponse:
126
+ """Update a gift card template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  try:
128
+ logger.info("Updating template", extra={"template_id": template_id, "merchant_id": merchant_id})
129
 
 
130
  existing_template = await GiftCardRepository.get_template_by_id(template_id)
131
  if not existing_template:
132
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
 
134
  if existing_template.get('merchant_id') != merchant_id:
135
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
136
 
 
137
  update_dict = update_data.model_dump(by_alias=True, exclude_none=True)
138
 
139
  if not update_dict:
140
  raise HTTPException(status_code=400, detail="No update data provided")
141
 
 
142
  await GiftCardService._validate_template_update(existing_template, update_dict)
143
 
 
144
  success = await GiftCardRepository.update_template(template_id, update_dict)
145
 
146
  if not success:
147
  raise HTTPException(status_code=500, detail=TEMPLATE_UPDATE_FAILED)
148
 
 
149
  updated_template = await GiftCardRepository.get_template_by_id(template_id)
150
  return GiftCardTemplateResponse(**updated_template)
151
 
152
  except HTTPException:
153
  raise
154
  except ValidationError as ve:
155
+ logger.error("Validation error updating template", extra={"template_id": template_id}, exc_info=ve)
156
  raise HTTPException(status_code=400, detail=f"Validation error: {ve}")
157
  except Exception as e:
158
+ logger.error("Service error updating template", extra={"template_id": template_id}, exc_info=e)
159
  raise HTTPException(status_code=500, detail=TEMPLATE_UPDATE_FAILED)
160
 
161
  @staticmethod
162
+ async def delete_template(template_id: str, merchant_id: str) -> Dict[str, str]:
163
+ """Delete a gift card template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
164
  try:
165
+ logger.info("Deleting template", extra={"template_id": template_id, "merchant_id": merchant_id})
166
 
 
167
  existing_template = await GiftCardRepository.get_template_by_id(template_id)
168
  if not existing_template:
169
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
 
171
  if existing_template.get('merchant_id') != merchant_id:
172
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
173
 
 
174
  issued_count = existing_template.get('issued_count', 0)
175
  if issued_count > 0:
 
176
  raise HTTPException(
177
  status_code=400,
178
  detail=f"Cannot delete template with {issued_count} issued cards. Consider deactivating instead."
179
  )
180
 
 
181
  success = await GiftCardRepository.delete_template(template_id)
182
 
183
  if not success:
 
188
  except HTTPException:
189
  raise
190
  except Exception as e:
191
+ logger.error("Service error deleting template", extra={"template_id": template_id}, exc_info=e)
192
  raise HTTPException(status_code=500, detail=TEMPLATE_DELETE_FAILED)
193
 
194
  @staticmethod
195
+ async def check_stock_availability(template_id: str, merchant_id: str) -> GiftCardStockResponse:
196
+ """Check stock availability for a template."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
197
  try:
198
+ logger.info("Checking stock for template", extra={"template_id": template_id})
199
 
 
200
  existing_template = await GiftCardRepository.get_template_by_id(template_id)
201
  if not existing_template:
202
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
 
204
  if existing_template.get('merchant_id') != merchant_id:
205
  raise HTTPException(status_code=404, detail=TEMPLATE_NOT_FOUND)
206
 
 
207
  stock_info = await GiftCardRepository.check_stock_availability(template_id)
 
208
  return GiftCardStockResponse(**stock_info)
209
 
210
  except HTTPException:
211
  raise
212
  except Exception as e:
213
+ logger.error("Service error checking stock", extra={"template_id": template_id}, exc_info=e)
214
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
215
 
216
  @staticmethod
217
  async def get_active_templates(merchant_id: str) -> List[GiftCardTemplateResponse]:
218
+ """Get active templates for a merchant."""
 
 
 
 
 
 
 
 
219
  try:
220
  templates_data = await GiftCardRepository.get_active_templates(merchant_id)
221
 
 
224
  try:
225
  templates.append(GiftCardTemplateResponse(**template_data))
226
  except ValidationError as ve:
227
+ logger.warning("Failed to parse active template", extra={"template": template_data, "error": str(ve)})
228
  continue
229
 
230
  return templates
231
 
232
  except Exception as e:
233
+ logger.error("Service error getting active templates", exc_info=e)
234
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
235
 
236
  @staticmethod
 
238
  existing_template: Dict[str, Any],
239
  update_data: Dict[str, Any]
240
  ) -> None:
241
+ """Validate business rules for template updates."""
 
 
 
 
 
 
 
 
 
242
  # Check if trying to change delivery_type
243
  if 'delivery_type' in update_data:
244
  current_delivery_type = existing_template.get('delivery_type')
app/services/promotion_service.py CHANGED
@@ -1,4 +1,3 @@
1
- import logging
2
  import json
3
  from datetime import date, datetime
4
  from zoneinfo import ZoneInfo
@@ -8,22 +7,20 @@ from typing import Any, Dict, Optional, Union, List
8
  from bson import ObjectId
9
  from fastapi import HTTPException
10
  from pydantic import ValidationError
 
11
 
12
  from app.repositories.db import fetch_one_document
13
  from app.schemas.promotion_schema import PromotionMetaData, PromotionUpdate, PromotionValidateRequest
14
  from app.models.promotion_model import PromotionManagementModel
15
 
16
-
17
- logger = logging.getLogger(__name__)
18
 
19
  class PromotionManagementServices:
20
 
21
  @staticmethod
22
- async def create_promotion_data(item: PromotionMetaData ):
23
-
24
  data = item.dict(by_alias=True, exclude_none=True)
25
-
26
- logger.info(f"Creating promotion with data: {data}")
27
  promotion_inserted_id = await PromotionManagementModel.create_promotion_data(data)
28
 
29
  return {
@@ -42,19 +39,28 @@ class PromotionManagementServices:
42
  limit: int
43
  ) -> Dict[str, Any]:
44
  logger.info(
45
- f"Listing promotions for merchant_id={merchant_id}, status={status}, "
46
- f"is_active={is_active}, q={q}, sort={sort}, offset={offset}, limit={limit}"
 
 
 
 
 
 
 
 
47
  )
48
  try:
49
  return await PromotionManagementModel.list_promotion(
50
  merchant_id, status, is_active, q, sort, offset, limit
51
  )
52
  except RuntimeError as re:
53
- logger.error(f"RuntimeError while listing items: {re}", exc_info=True)
54
  raise HTTPException(status_code=500, detail=str(re))
55
  except Exception as e:
56
- logger.error(f"Unexpected error while listing items: {e}", exc_info=True)
57
  raise HTTPException(status_code=500, detail="Internal server error")
 
58
  @staticmethod
59
  async def compute_discount(promo: dict, data: PromotionValidateRequest) -> Dict[str, Any]:
60
  discount = 0.0
@@ -99,28 +105,25 @@ class PromotionManagementServices:
99
  return True,"valid"
100
 
101
  @staticmethod
102
- async def validate_promotion(data:PromotionValidateRequest):
103
- logging.info("Promotion code validation")
104
  try:
105
  promotion_data = None
106
 
107
  if data.code:
108
- # 1. Specific Promotion Code Provided
109
- logging.info(f"Fetching promotion data for Promotion code: {data.code}")
110
  promotion_data = await PromotionManagementModel.get_promotion_by_code(data.code)
111
 
112
  if not promotion_data:
113
  return {"valid": False, "reason": "invalid code"}
114
 
115
- # Validate the single specific Promotion
116
  is_valid, reason = PromotionManagementServices.validate_coupon(promotion_data, data)
117
 
118
  if not is_valid:
119
  return {"valid": False, "reason": reason}
120
 
121
  else:
122
- # 2. Auto-Apply (No Promotion code)
123
- logging.info(f"Fetching auto-apply promotion code for merchant: {data.merchant_id}")
124
  promotions = await PromotionManagementModel.get_promotion_by_merchant_id(data.merchant_id)
125
 
126
  best_promotion = None
@@ -130,11 +133,9 @@ class PromotionManagementServices:
130
  is_valid, reason = PromotionManagementServices.validate_coupon(promo, data)
131
 
132
  if is_valid:
133
- # Compute discount to find the best one
134
  discount_info = await PromotionManagementServices.compute_discount(promo, data)
135
  current_discount = discount_info["discount_amount"]
136
 
137
- # Precedence Rule: Select the one with the highest discount amount
138
  if current_discount > max_discount_amount:
139
  max_discount_amount = current_discount
140
  best_promotion = promo
@@ -144,8 +145,6 @@ class PromotionManagementServices:
144
  is_valid = True
145
  else:
146
  return {"valid": False, "reason": "No applicable auto-apply promotions found"}
147
-
148
-
149
 
150
  if promotion_data:
151
  if promotion_data.get('code'):
@@ -164,10 +163,9 @@ class PromotionManagementServices:
164
  return response
165
 
166
  return {"valid": False, "reason": "No valid promotion could be applied"}
167
-
168
 
169
  except Exception as e:
170
- logger.error(f"Error while validating Promotion: {e}", exc_info=True)
171
  raise RuntimeError("Failed to validate Promotion") from e
172
 
173
  @staticmethod
@@ -176,15 +174,9 @@ class PromotionManagementServices:
176
  associate_id: str,
177
  branch_id: str = None
178
  ) -> Dict[str, Any]:
179
- """
180
- Fetches detailed information about an promotion, including metadata and preferences.
181
- """
182
- logger.info(
183
- f"Fetching info widget data for ID: {associate_id}")
184
  try:
185
-
186
- # Fetch user preferences if available
187
-
188
  user_pref = await fetch_one_document(
189
  "list_key_score_cards",
190
  {"merchant_id": merchant_id}
@@ -195,15 +187,14 @@ class PromotionManagementServices:
195
  (view for view in user_pref["views"] if view.get("name") == "promotions"),
196
  None
197
  )
198
- logger.info(f"User preferences for {associate_id}: {promotion_view}")
199
  promotion_data = None
200
 
201
  if promotion_view:
202
  promotion_data = await PromotionManagementModel.get_info_widget_data(merchant_id, branch_id, promotion_view)
203
 
204
  if promotion_data is None:
205
- raise HTTPException(
206
- status_code=404, detail="promotion not found")
207
 
208
  return promotion_data
209
 
@@ -217,14 +208,13 @@ class PromotionManagementServices:
217
  return None
218
 
219
  if existing.get("used_count", 0) > 0:
220
- if ( "discount_type" in update_data and update_data["discount_type"] != existing.get("discount_type")):
221
  raise ValueError("Cannot change discount_type after promotion has been used")
222
 
223
  if ("value" in update_data and update_data["value"] != existing.get("value")):
224
  raise ValueError("Cannot change discount value after promotion has been used")
225
 
226
  updated = await PromotionManagementModel.update_promotion(promotion_id, update_data)
227
- print(updated)
228
  if updated:
229
  for k, v in updated.items():
230
  if isinstance(v, ObjectId):
@@ -232,18 +222,16 @@ class PromotionManagementServices:
232
 
233
  return updated
234
 
235
-
236
  async def get_by_promotion_id(promotion_id: str):
237
  try:
238
- logger.info(f"fetching promotion with Id: {promotion_id}")
239
- result= await PromotionManagementModel.get_by_promotion_id(promotion_id)
240
  if result:
241
- # Convert all ObjectIds β†’ str
242
  for k, v in result.items():
243
  if isinstance(v, ObjectId):
244
  result[k] = str(v)
245
  return result
246
 
247
  except RuntimeError as re:
248
- logger.error(f"RuntimeError while fetching promotion: {re}", exc_info=True)
249
  raise HTTPException(status_code=500, detail=str(re))
 
 
1
  import json
2
  from datetime import date, datetime
3
  from zoneinfo import ZoneInfo
 
7
  from bson import ObjectId
8
  from fastapi import HTTPException
9
  from pydantic import ValidationError
10
+ from insightfy_utils.logging import get_logger
11
 
12
  from app.repositories.db import fetch_one_document
13
  from app.schemas.promotion_schema import PromotionMetaData, PromotionUpdate, PromotionValidateRequest
14
  from app.models.promotion_model import PromotionManagementModel
15
 
16
+ logger = get_logger(__name__)
 
17
 
18
  class PromotionManagementServices:
19
 
20
  @staticmethod
21
+ async def create_promotion_data(item: PromotionMetaData):
 
22
  data = item.dict(by_alias=True, exclude_none=True)
23
+ logger.info("Creating promotion", extra={"data": data})
 
24
  promotion_inserted_id = await PromotionManagementModel.create_promotion_data(data)
25
 
26
  return {
 
39
  limit: int
40
  ) -> Dict[str, Any]:
41
  logger.info(
42
+ "Listing promotions",
43
+ extra={
44
+ "merchant_id": merchant_id,
45
+ "status": status,
46
+ "is_active": is_active,
47
+ "q": q,
48
+ "sort": sort,
49
+ "offset": offset,
50
+ "limit": limit
51
+ }
52
  )
53
  try:
54
  return await PromotionManagementModel.list_promotion(
55
  merchant_id, status, is_active, q, sort, offset, limit
56
  )
57
  except RuntimeError as re:
58
+ logger.error("RuntimeError while listing items", exc_info=re)
59
  raise HTTPException(status_code=500, detail=str(re))
60
  except Exception as e:
61
+ logger.error("Unexpected error while listing items", exc_info=e)
62
  raise HTTPException(status_code=500, detail="Internal server error")
63
+
64
  @staticmethod
65
  async def compute_discount(promo: dict, data: PromotionValidateRequest) -> Dict[str, Any]:
66
  discount = 0.0
 
105
  return True,"valid"
106
 
107
  @staticmethod
108
+ async def validate_promotion(data: PromotionValidateRequest):
109
+ logger.info("Promotion code validation")
110
  try:
111
  promotion_data = None
112
 
113
  if data.code:
114
+ logger.info("Fetching promotion data for code", extra={"code": data.code})
 
115
  promotion_data = await PromotionManagementModel.get_promotion_by_code(data.code)
116
 
117
  if not promotion_data:
118
  return {"valid": False, "reason": "invalid code"}
119
 
 
120
  is_valid, reason = PromotionManagementServices.validate_coupon(promotion_data, data)
121
 
122
  if not is_valid:
123
  return {"valid": False, "reason": reason}
124
 
125
  else:
126
+ logger.info("Fetching auto-apply promotion", extra={"merchant_id": data.merchant_id})
 
127
  promotions = await PromotionManagementModel.get_promotion_by_merchant_id(data.merchant_id)
128
 
129
  best_promotion = None
 
133
  is_valid, reason = PromotionManagementServices.validate_coupon(promo, data)
134
 
135
  if is_valid:
 
136
  discount_info = await PromotionManagementServices.compute_discount(promo, data)
137
  current_discount = discount_info["discount_amount"]
138
 
 
139
  if current_discount > max_discount_amount:
140
  max_discount_amount = current_discount
141
  best_promotion = promo
 
145
  is_valid = True
146
  else:
147
  return {"valid": False, "reason": "No applicable auto-apply promotions found"}
 
 
148
 
149
  if promotion_data:
150
  if promotion_data.get('code'):
 
163
  return response
164
 
165
  return {"valid": False, "reason": "No valid promotion could be applied"}
 
166
 
167
  except Exception as e:
168
+ logger.error("Error while validating Promotion", exc_info=e)
169
  raise RuntimeError("Failed to validate Promotion") from e
170
 
171
  @staticmethod
 
174
  associate_id: str,
175
  branch_id: str = None
176
  ) -> Dict[str, Any]:
177
+ """Fetches detailed information about a promotion, including metadata and preferences."""
178
+ logger.info("Fetching info widget data", extra={"associate_id": associate_id})
 
 
 
179
  try:
 
 
 
180
  user_pref = await fetch_one_document(
181
  "list_key_score_cards",
182
  {"merchant_id": merchant_id}
 
187
  (view for view in user_pref["views"] if view.get("name") == "promotions"),
188
  None
189
  )
190
+ logger.info("User preferences fetched", extra={"associate_id": associate_id, "view": promotion_view})
191
  promotion_data = None
192
 
193
  if promotion_view:
194
  promotion_data = await PromotionManagementModel.get_info_widget_data(merchant_id, branch_id, promotion_view)
195
 
196
  if promotion_data is None:
197
+ raise HTTPException(status_code=404, detail="promotion not found")
 
198
 
199
  return promotion_data
200
 
 
208
  return None
209
 
210
  if existing.get("used_count", 0) > 0:
211
+ if ("discount_type" in update_data and update_data["discount_type"] != existing.get("discount_type")):
212
  raise ValueError("Cannot change discount_type after promotion has been used")
213
 
214
  if ("value" in update_data and update_data["value"] != existing.get("value")):
215
  raise ValueError("Cannot change discount value after promotion has been used")
216
 
217
  updated = await PromotionManagementModel.update_promotion(promotion_id, update_data)
 
218
  if updated:
219
  for k, v in updated.items():
220
  if isinstance(v, ObjectId):
 
222
 
223
  return updated
224
 
 
225
  async def get_by_promotion_id(promotion_id: str):
226
  try:
227
+ logger.info("Fetching promotion", extra={"promotion_id": promotion_id})
228
+ result = await PromotionManagementModel.get_by_promotion_id(promotion_id)
229
  if result:
 
230
  for k, v in result.items():
231
  if isinstance(v, ObjectId):
232
  result[k] = str(v)
233
  return result
234
 
235
  except RuntimeError as re:
236
+ logger.error("RuntimeError while fetching promotion", exc_info=re)
237
  raise HTTPException(status_code=500, detail=str(re))
app/services/supplier_service.py CHANGED
@@ -1,10 +1,10 @@
1
-
2
  import logging
3
  import json
4
  from typing import Any, Dict, List, Union
5
 
6
  from fastapi import HTTPException
7
  from pydantic import ValidationError
 
8
 
9
  from app.models.supplier_models import SupplierModel
10
  from app.schemas.supplier_schema import Supplier
@@ -20,7 +20,7 @@ except ImportError:
20
  # Constants
21
  INTERNAL_SERVER_ERROR = "Internal server error"
22
 
23
- logger = logging.getLogger(__name__)
24
 
25
  class SupplierService:
26
 
@@ -28,22 +28,21 @@ class SupplierService:
28
  async def create_supplier(item_data: Supplier) -> Dict[str, Any]:
29
  try:
30
  data = item_data.dict(by_alias=True, exclude_none=True)
31
- logger.info(f"Creating supplier with data: {data}")
32
  data_id = await SupplierModel.create_supplier(data)
33
  return {"message": "Supplier created successfully", "id": data_id}
34
 
35
  except RuntimeError as re:
36
- logger.error(f"RuntimeError while creating supplier: {re}", exc_info=True)
37
  raise HTTPException(status_code=500, detail=str(re))
38
 
39
  except Exception as e:
40
- logger.error(f"Unexpected error while creating supplier: {e}", exc_info=True)
41
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
42
 
43
-
44
  @staticmethod
45
  async def get_supplier_data(supplier_id: str, merchant_id: str, branch_id: str = None):
46
- logger.info(f"Fetching supplier with ID: {supplier_id}")
47
  try:
48
  supplier_data = await SupplierModel.get_supplier_data(supplier_id, merchant_id)
49
  if supplier_data is None:
@@ -52,10 +51,10 @@ class SupplierService:
52
  except HTTPException as he:
53
  raise he
54
  except RuntimeError as re:
55
- logger.error(f"RuntimeError while fetching supplier: {re}", exc_info=True)
56
  raise HTTPException(status_code=500, detail=str(re))
57
  except Exception as e:
58
- logger.error(f"Unexpected error while fetching supplier: {e}", exc_info=True)
59
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
60
 
61
  @staticmethod
@@ -67,62 +66,49 @@ class SupplierService:
67
  limit: int = 10,
68
  projection_list: List[str] = None
69
  ) -> dict:
70
- """
71
- Retrieves a list of suppliers with pagination, filtering and user preferences support.
72
-
73
- Args:
74
- merchant_id: The ID of the merchant
75
- associate_id: The ID of the user requesting the data (for preferences)
76
- filters: Optional dictionary of filter criteria
77
- offset: Number of records to skip (for pagination)
78
- limit: Maximum number of records to return
79
- projection_list: Optional list of fields to include in the response
80
-
81
- Returns:
82
- Dictionary containing suppliers list, total count, and pagination info
83
- """
84
- logger.info(f"Listing suppliers for {merchant_id=} with incoming filters={filters}")
85
  try:
86
- # Build base query with merchant_id
87
  filter_criteria = {"merchant_id": merchant_id}
88
-
89
- # Fetch and apply user preferences if cache is available
90
  final_projection_list = projection_list
91
 
92
  if CACHE_AVAILABLE and associate_id:
93
  try:
94
  cache_key = f"user_pref:{merchant_id}:{associate_id}"
95
- logger.info(f"Fetching user preferences from cache with key: {cache_key}")
96
 
97
  user_pref = await get_or_set_cache(
98
  cache_key,
99
  fetch_func=lambda: fetch_user_preferences(merchant_id, associate_id)
100
  )
101
 
102
- logger.info(f"User preferences for {associate_id}: {user_pref}")
103
 
104
- # Merge filters using utility
105
  merged_filters = merge_filters_with_preferences(filters or {}, user_pref)
106
  filter_criteria.update(merged_filters)
107
 
108
- # Apply projection from user preferences if no explicit projection provided
109
  if not projection_list and user_pref and isinstance(user_pref.get("visible_columns"), list):
110
  final_projection_list = user_pref.get("visible_columns")
111
 
112
  except Exception as cache_exc:
113
- logger.warning(f"Error fetching user preferences: {cache_exc}")
114
- # Fall back to standard filters
115
  if filters:
116
  filter_criteria.update(filters)
117
  elif filters:
118
  filter_criteria.update(filters)
119
 
120
- logger.debug(f"Final filter criteria: {filter_criteria}, offset={offset}, limit={limit}, projection={final_projection_list}")
 
 
 
 
 
 
 
 
121
 
122
- # Get total count for pagination
123
  total_count = await SupplierModel.count_suppliers(filter_criteria)
124
 
125
- # Get paginated suppliers list
126
  suppliers = await SupplierModel.get_suppliers_list(
127
  query=filter_criteria,
128
  offset=offset,
@@ -130,10 +116,8 @@ class SupplierService:
130
  projection_list=final_projection_list
131
  )
132
 
133
- # Sanitize results and remove _id field
134
  sanitized_suppliers = [catalogue_utils.sanitize_document_for_mongo(doc) for doc in suppliers]
135
 
136
- # Explicitly remove _id field from each supplier document
137
  for supplier in sanitized_suppliers:
138
  if "_id" in supplier:
139
  del supplier["_id"]
@@ -147,10 +131,10 @@ class SupplierService:
147
  }
148
 
149
  except RuntimeError as re:
150
- logger.error(f"RuntimeError while listing suppliers: {re}", exc_info=True)
151
  raise HTTPException(status_code=500, detail=str(re))
152
  except Exception as e:
153
- logger.error(f"Unexpected error while listing suppliers: {e}", exc_info=True)
154
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
155
 
156
  @staticmethod
@@ -159,18 +143,8 @@ class SupplierService:
159
  update_data: dict,
160
  current_user: dict = None
161
  ) -> dict:
162
- """
163
- Update supplier data with change tracking.
164
-
165
- Args:
166
- supplier_id: The ID of the supplier to update
167
- update_data: The data to update
168
- current_user: Optional user info for tracking who made the change
169
-
170
- Returns:
171
- Message indicating success or no changes
172
- """
173
- logger.info(f"Updating supplier with ID: {supplier_id}")
174
  try:
175
  if current_user:
176
  update_data["updated_by"] = current_user.get("associate_id")
@@ -179,66 +153,51 @@ class SupplierService:
179
 
180
  return {"message": "Supplier updated successfully"} if result else {"message": "No changes made"}
181
  except RuntimeError as re:
182
- logger.error(f"RuntimeError while updating supplier: {re}", exc_info=True)
183
  raise HTTPException(status_code=500, detail=str(re))
184
  except Exception as e:
185
- logger.error(f"Unexpected error while updating supplier: {e}", exc_info=True)
186
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
187
 
188
-
189
  @staticmethod
190
- async def delete_supplier(
191
- supplier_id: str,
192
- merchant_id: str
193
- ) -> dict:
194
- """
195
- Deletes a supplier by ID.
196
- """
197
- logger.info(f"Deleting supplier with ID: {supplier_id}, merchant_id: {merchant_id}")
198
  try:
199
  result = await SupplierModel.delete_supplier(supplier_id, merchant_id)
200
  if not result:
201
  raise HTTPException(status_code=404, detail="Supplier not found")
202
 
203
- logger.info(f"Supplier with ID {supplier_id} deleted successfully.")
204
-
205
  return {"message": "Supplier deleted successfully"}
206
  except HTTPException as he:
207
  raise he
208
  except RuntimeError as re:
209
- logger.error(f"RuntimeError while deleting supplier: {re}", exc_info=True)
210
  raise HTTPException(status_code=500, detail=str(re))
211
  except Exception as e:
212
- logger.error(f"Unexpected error while deleting supplier: {e}", exc_info=True)
213
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
214
 
215
  @staticmethod
216
  async def get_filtered_suppliers(query: Dict[str, Any]) -> List[Dict[str, Any]]:
217
- """
218
- Get suppliers matching specific filter criteria.
219
-
220
- Args:
221
- query: Dictionary of filter criteria
222
-
223
- Returns:
224
- List of suppliers matching the criteria
225
- """
226
- logger.info(f"Fetching suppliers with filters: {query}")
227
  try:
228
  supplier_list = await SupplierModel.get_filtered_suppliers(query)
229
  if not supplier_list:
230
- logger.info("No suppliers found for given filters.")
231
  return []
232
 
233
  sanitized = [catalogue_utils.sanitize_document_for_mongo(doc) for doc in supplier_list]
234
- logger.info(f"Fetched {len(sanitized)} supplier(s).")
235
  return sanitized
236
 
237
  except RuntimeError as re:
238
- logger.error(f"RuntimeError while fetching suppliers: {re}", exc_info=True)
239
  raise HTTPException(status_code=500, detail=str(re))
240
  except Exception as e:
241
- logger.error(f"Unexpected error while filtering suppliers: {e}", exc_info=True)
242
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
243
 
244
  @staticmethod
@@ -247,18 +206,8 @@ class SupplierService:
247
  history_entry: dict,
248
  current_user: dict = None
249
  ) -> Dict[str, Any]:
250
- """
251
- Append a new entry to the supplier's supply history.
252
-
253
- Args:
254
- supplier_id: The ID of the supplier
255
- history_entry: The history entry to append
256
- current_user: Optional user info for tracking who made the change
257
-
258
- Returns:
259
- Message indicating success or no changes
260
- """
261
- logger.info(f"Appending supply history for supplier ID: {supplier_id}")
262
  try:
263
  if current_user:
264
  history_entry["added_by"] = current_user.get("associate_id")
@@ -266,16 +215,16 @@ class SupplierService:
266
  result = await SupplierModel.append_supply_history(supplier_id, history_entry)
267
 
268
  if not result:
269
- logger.warning(f"No changes made when appending history for supplier ID: {supplier_id}")
270
  return {"message": "No changes made"}
271
 
272
- logger.info(f"Successfully appended supply history for supplier ID: {supplier_id}")
273
  return {"message": "Supply history appended successfully"}
274
  except RuntimeError as re:
275
- logger.error(f"RuntimeError while appending supply history: {re}", exc_info=True)
276
  raise HTTPException(status_code=500, detail=str(re))
277
  except Exception as e:
278
- logger.error(f"Unexpected error while appending supply history: {e}", exc_info=True)
279
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
280
 
281
  @staticmethod
@@ -284,44 +233,25 @@ class SupplierService:
284
  supplier_id: str,
285
  current_user: dict = None
286
  ) -> Dict[str, Any]:
287
- """
288
- Upload a document related to a supplier.
289
-
290
- Args:
291
- data: The document data to upload
292
- supplier_id: The ID of the supplier
293
- current_user: Optional user info for tracking who uploaded the document
294
-
295
- Returns:
296
- Message indicating success
297
- """
298
- logger.info(f"Uploading document for supplier ID: {supplier_id}")
299
  try:
300
  if current_user:
301
  data["uploaded_by"] = current_user.get("associate_id")
302
 
303
  await SupplierModel.upload_document(data, supplier_id)
304
- logger.info(f"Document uploaded successfully for supplier ID: {supplier_id}")
305
  return {"message": "Document uploaded successfully"}
306
  except RuntimeError as re:
307
- logger.error(f"RuntimeError while uploading document: {re}", exc_info=True)
308
  raise HTTPException(status_code=500, detail=str(re))
309
  except Exception as e:
310
- logger.error(f"Unexpected error while uploading document: {e}", exc_info=True)
311
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
312
 
313
  @staticmethod
314
  async def bulk_upload_suppliers(data: List[dict], current_user: dict):
315
- """
316
- Bulk upload multiple suppliers from a list of dictionaries.
317
-
318
- Args:
319
- data: List of supplier data dictionaries
320
- current_user: User information for tracking who created the suppliers
321
-
322
- Returns:
323
- Dictionary with counts of inserted and failed records, plus error details
324
- """
325
  valid_records = []
326
  errors = []
327
 
@@ -334,7 +264,7 @@ class SupplierService:
334
  supplier_dict["created_by"] = current_user["associate_id"]
335
  valid_records.append(supplier_dict)
336
  except ValidationError as ve:
337
- logger.warning(f"Row {i + 1} validation error: {ve}")
338
  errors.append({"row": i + 1, "errors": ve.errors()})
339
 
340
  if not valid_records:
@@ -405,22 +335,10 @@ class SupplierService:
405
  associate_id: str,
406
  branch_id: str = None
407
  ) -> Dict[str, Any]:
408
- """
409
- Fetches detailed information about suppliers for dashboard widgets.
410
-
411
- Args:
412
- merchant_id: The ID of the merchant
413
- associate_id: The ID of the associate requesting the data
414
- branch_id: Optional branch ID for filtering
415
-
416
- Returns:
417
- Dictionary containing widget data
418
- """
419
- logger.info(f"Fetching info widget data for merchant ID: {merchant_id}")
420
  try:
421
- # Only try to fetch user preferences if cache functionality is available
422
  if CACHE_AVAILABLE:
423
- # Fetch user preferences if available
424
  user_pref = await fetch_one_document(
425
  "list_key_score_cards",
426
  {"merchant_id": merchant_id}
@@ -431,7 +349,7 @@ class SupplierService:
431
  (view for view in user_pref["views"] if view.get("name") == "suppliers"),
432
  None
433
  )
434
- logger.info(f"User preferences for widget view: {suppliers_view}")
435
 
436
  supplier_data = None
437
  if suppliers_view:
@@ -439,7 +357,6 @@ class SupplierService:
439
  if supplier_data is None:
440
  return {}
441
  else:
442
- # If no user_pref, return empty widgets or a default structure
443
  supplier_data = {}
444
 
445
  return supplier_data
@@ -447,8 +364,8 @@ class SupplierService:
447
  except HTTPException as he:
448
  raise he
449
  except RuntimeError as re:
450
- logger.error(f"RuntimeError while fetching widget data: {re}", exc_info=True)
451
  raise HTTPException(status_code=500, detail=str(re))
452
  except Exception as e:
453
- logger.error(f"Unexpected error while fetching widget data: {e}", exc_info=True)
454
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
 
 
1
  import logging
2
  import json
3
  from typing import Any, Dict, List, Union
4
 
5
  from fastapi import HTTPException
6
  from pydantic import ValidationError
7
+ from insightfy_utils.logging import get_logger
8
 
9
  from app.models.supplier_models import SupplierModel
10
  from app.schemas.supplier_schema import Supplier
 
20
  # Constants
21
  INTERNAL_SERVER_ERROR = "Internal server error"
22
 
23
+ logger = get_logger(__name__)
24
 
25
  class SupplierService:
26
 
 
28
  async def create_supplier(item_data: Supplier) -> Dict[str, Any]:
29
  try:
30
  data = item_data.dict(by_alias=True, exclude_none=True)
31
+ logger.info("Creating supplier", extra={"data": data})
32
  data_id = await SupplierModel.create_supplier(data)
33
  return {"message": "Supplier created successfully", "id": data_id}
34
 
35
  except RuntimeError as re:
36
+ logger.error("RuntimeError while creating supplier", exc_info=re)
37
  raise HTTPException(status_code=500, detail=str(re))
38
 
39
  except Exception as e:
40
+ logger.error("Unexpected error while creating supplier", exc_info=e)
41
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
42
 
 
43
  @staticmethod
44
  async def get_supplier_data(supplier_id: str, merchant_id: str, branch_id: str = None):
45
+ logger.info("Fetching supplier", extra={"supplier_id": supplier_id})
46
  try:
47
  supplier_data = await SupplierModel.get_supplier_data(supplier_id, merchant_id)
48
  if supplier_data is None:
 
51
  except HTTPException as he:
52
  raise he
53
  except RuntimeError as re:
54
+ logger.error("RuntimeError while fetching supplier", exc_info=re)
55
  raise HTTPException(status_code=500, detail=str(re))
56
  except Exception as e:
57
+ logger.error("Unexpected error while fetching supplier", exc_info=e)
58
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
59
 
60
  @staticmethod
 
66
  limit: int = 10,
67
  projection_list: List[str] = None
68
  ) -> dict:
69
+ """Retrieves a list of suppliers with pagination, filtering and user preferences support."""
70
+ logger.info("Listing suppliers", extra={"merchant_id": merchant_id, "filters": filters})
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  try:
 
72
  filter_criteria = {"merchant_id": merchant_id}
 
 
73
  final_projection_list = projection_list
74
 
75
  if CACHE_AVAILABLE and associate_id:
76
  try:
77
  cache_key = f"user_pref:{merchant_id}:{associate_id}"
78
+ logger.info("Fetching user preferences from cache", extra={"cache_key": cache_key})
79
 
80
  user_pref = await get_or_set_cache(
81
  cache_key,
82
  fetch_func=lambda: fetch_user_preferences(merchant_id, associate_id)
83
  )
84
 
85
+ logger.info("User preferences fetched", extra={"associate_id": associate_id, "user_pref": user_pref})
86
 
 
87
  merged_filters = merge_filters_with_preferences(filters or {}, user_pref)
88
  filter_criteria.update(merged_filters)
89
 
 
90
  if not projection_list and user_pref and isinstance(user_pref.get("visible_columns"), list):
91
  final_projection_list = user_pref.get("visible_columns")
92
 
93
  except Exception as cache_exc:
94
+ logger.warning("Error fetching user preferences", extra={"error": str(cache_exc)})
 
95
  if filters:
96
  filter_criteria.update(filters)
97
  elif filters:
98
  filter_criteria.update(filters)
99
 
100
+ logger.debug(
101
+ "Final filter criteria",
102
+ extra={
103
+ "filter": filter_criteria,
104
+ "offset": offset,
105
+ "limit": limit,
106
+ "projection": final_projection_list
107
+ }
108
+ )
109
 
 
110
  total_count = await SupplierModel.count_suppliers(filter_criteria)
111
 
 
112
  suppliers = await SupplierModel.get_suppliers_list(
113
  query=filter_criteria,
114
  offset=offset,
 
116
  projection_list=final_projection_list
117
  )
118
 
 
119
  sanitized_suppliers = [catalogue_utils.sanitize_document_for_mongo(doc) for doc in suppliers]
120
 
 
121
  for supplier in sanitized_suppliers:
122
  if "_id" in supplier:
123
  del supplier["_id"]
 
131
  }
132
 
133
  except RuntimeError as re:
134
+ logger.error("RuntimeError while listing suppliers", exc_info=re)
135
  raise HTTPException(status_code=500, detail=str(re))
136
  except Exception as e:
137
+ logger.error("Unexpected error while listing suppliers", exc_info=e)
138
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
139
 
140
  @staticmethod
 
143
  update_data: dict,
144
  current_user: dict = None
145
  ) -> dict:
146
+ """Update supplier data with change tracking."""
147
+ logger.info("Updating supplier", extra={"supplier_id": supplier_id})
 
 
 
 
 
 
 
 
 
 
148
  try:
149
  if current_user:
150
  update_data["updated_by"] = current_user.get("associate_id")
 
153
 
154
  return {"message": "Supplier updated successfully"} if result else {"message": "No changes made"}
155
  except RuntimeError as re:
156
+ logger.error("RuntimeError while updating supplier", exc_info=re)
157
  raise HTTPException(status_code=500, detail=str(re))
158
  except Exception as e:
159
+ logger.error("Unexpected error while updating supplier", exc_info=e)
160
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
161
 
 
162
  @staticmethod
163
+ async def delete_supplier(supplier_id: str, merchant_id: str) -> dict:
164
+ """Deletes a supplier by ID."""
165
+ logger.info("Deleting supplier", extra={"supplier_id": supplier_id, "merchant_id": merchant_id})
 
 
 
 
 
166
  try:
167
  result = await SupplierModel.delete_supplier(supplier_id, merchant_id)
168
  if not result:
169
  raise HTTPException(status_code=404, detail="Supplier not found")
170
 
171
+ logger.info("Supplier deleted successfully", extra={"supplier_id": supplier_id})
 
172
  return {"message": "Supplier deleted successfully"}
173
  except HTTPException as he:
174
  raise he
175
  except RuntimeError as re:
176
+ logger.error("RuntimeError while deleting supplier", exc_info=re)
177
  raise HTTPException(status_code=500, detail=str(re))
178
  except Exception as e:
179
+ logger.error("Unexpected error while deleting supplier", exc_info=e)
180
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
181
 
182
  @staticmethod
183
  async def get_filtered_suppliers(query: Dict[str, Any]) -> List[Dict[str, Any]]:
184
+ """Get suppliers matching specific filter criteria."""
185
+ logger.info("Fetching suppliers with filters", extra={"query": query})
 
 
 
 
 
 
 
 
186
  try:
187
  supplier_list = await SupplierModel.get_filtered_suppliers(query)
188
  if not supplier_list:
189
+ logger.info("No suppliers found for given filters")
190
  return []
191
 
192
  sanitized = [catalogue_utils.sanitize_document_for_mongo(doc) for doc in supplier_list]
193
+ logger.info("Fetched suppliers", extra={"count": len(sanitized)})
194
  return sanitized
195
 
196
  except RuntimeError as re:
197
+ logger.error("RuntimeError while fetching suppliers", exc_info=re)
198
  raise HTTPException(status_code=500, detail=str(re))
199
  except Exception as e:
200
+ logger.error("Unexpected error while filtering suppliers", exc_info=e)
201
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
202
 
203
  @staticmethod
 
206
  history_entry: dict,
207
  current_user: dict = None
208
  ) -> Dict[str, Any]:
209
+ """Append a new entry to the supplier's supply history."""
210
+ logger.info("Appending supply history", extra={"supplier_id": supplier_id})
 
 
 
 
 
 
 
 
 
 
211
  try:
212
  if current_user:
213
  history_entry["added_by"] = current_user.get("associate_id")
 
215
  result = await SupplierModel.append_supply_history(supplier_id, history_entry)
216
 
217
  if not result:
218
+ logger.warning("No changes made when appending history", extra={"supplier_id": supplier_id})
219
  return {"message": "No changes made"}
220
 
221
+ logger.info("Supply history appended successfully", extra={"supplier_id": supplier_id})
222
  return {"message": "Supply history appended successfully"}
223
  except RuntimeError as re:
224
+ logger.error("RuntimeError while appending supply history", exc_info=re)
225
  raise HTTPException(status_code=500, detail=str(re))
226
  except Exception as e:
227
+ logger.error("Unexpected error while appending supply history", exc_info=e)
228
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
229
 
230
  @staticmethod
 
233
  supplier_id: str,
234
  current_user: dict = None
235
  ) -> Dict[str, Any]:
236
+ """Upload a document related to a supplier."""
237
+ logger.info("Uploading document", extra={"supplier_id": supplier_id})
 
 
 
 
 
 
 
 
 
 
238
  try:
239
  if current_user:
240
  data["uploaded_by"] = current_user.get("associate_id")
241
 
242
  await SupplierModel.upload_document(data, supplier_id)
243
+ logger.info("Document uploaded successfully", extra={"supplier_id": supplier_id})
244
  return {"message": "Document uploaded successfully"}
245
  except RuntimeError as re:
246
+ logger.error("RuntimeError while uploading document", exc_info=re)
247
  raise HTTPException(status_code=500, detail=str(re))
248
  except Exception as e:
249
+ logger.error("Unexpected error while uploading document", exc_info=e)
250
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
251
 
252
  @staticmethod
253
  async def bulk_upload_suppliers(data: List[dict], current_user: dict):
254
+ """Bulk upload multiple suppliers from a list of dictionaries."""
 
 
 
 
 
 
 
 
 
255
  valid_records = []
256
  errors = []
257
 
 
264
  supplier_dict["created_by"] = current_user["associate_id"]
265
  valid_records.append(supplier_dict)
266
  except ValidationError as ve:
267
+ logger.warning("Row validation error", extra={"row": i + 1, "error": str(ve)})
268
  errors.append({"row": i + 1, "errors": ve.errors()})
269
 
270
  if not valid_records:
 
335
  associate_id: str,
336
  branch_id: str = None
337
  ) -> Dict[str, Any]:
338
+ """Fetches detailed information about suppliers for dashboard widgets."""
339
+ logger.info("Fetching info widget data", extra={"merchant_id": merchant_id})
 
 
 
 
 
 
 
 
 
 
340
  try:
 
341
  if CACHE_AVAILABLE:
 
342
  user_pref = await fetch_one_document(
343
  "list_key_score_cards",
344
  {"merchant_id": merchant_id}
 
349
  (view for view in user_pref["views"] if view.get("name") == "suppliers"),
350
  None
351
  )
352
+ logger.info("User preferences for widget view", extra={"view": suppliers_view})
353
 
354
  supplier_data = None
355
  if suppliers_view:
 
357
  if supplier_data is None:
358
  return {}
359
  else:
 
360
  supplier_data = {}
361
 
362
  return supplier_data
 
364
  except HTTPException as he:
365
  raise he
366
  except RuntimeError as re:
367
+ logger.error("RuntimeError while fetching widget data", exc_info=re)
368
  raise HTTPException(status_code=500, detail=str(re))
369
  except Exception as e:
370
+ logger.error("Unexpected error while fetching widget data", exc_info=e)
371
  raise HTTPException(status_code=500, detail=INTERNAL_SERVER_ERROR)
app/services/taxonomy_service.py CHANGED
@@ -3,59 +3,50 @@ import uuid
3
  from typing import Any, Dict, List, Optional
4
 
5
  from fastapi import HTTPException
 
6
  from app.models.taxonomy_model import TaxonomyModel
7
  from app.utils.catalogue_utils import sanitize_document_for_mongo
8
 
9
- logger = logging.getLogger(__name__)
10
 
11
  class TaxonomyService:
12
  @staticmethod
13
- async def create_taxonomy(item:any)-> Dict[str, Any]:
14
  try:
15
  data = item.dict(by_alias=True, exclude_none=True)
16
- logger.info(f"Creating Taxonomy with data: {data}")
17
- inserted_id= await TaxonomyModel.create_append_delete_taxonomy(data)
18
  return {
19
  'message': 'Taxonomy created successfully',
20
  'id': inserted_id
21
  }
22
 
23
  except RuntimeError as re:
24
- logger.error(f"RuntimeError while creating catalogue item: {re}", exc_info=True)
25
  raise HTTPException(status_code=500, detail=str(re))
26
 
27
  except Exception as e:
28
- logger.error(f"Unexpected error while creating catalogue item: {e}", exc_info=True)
29
  raise HTTPException(status_code=500, detail="Internal server error")
30
 
31
  @staticmethod
32
- async def update_taxonomy(
33
- id: str,
34
- update_data: dict
35
- ) -> dict:
36
-
37
- result = await TaxonomyModel.update_taxonomy(id, update_data)
38
-
39
- return {"message": "Taxonomy updated successfully"} if result else {"message": "No changes made"}
40
 
41
  @staticmethod
42
  async def delete_taxonomy(item_id: str) -> Dict[str, Any]:
43
- """
44
- Soft delete or remove an item from the catalogue.
45
- """
46
- logger.info(f"Attempting to delete taxonomy with item_id: {item_id}")
47
  success = await TaxonomyModel.delete_taxonomy(item_id)
48
- logger.info(f"Delete taxonomy result for item_id {item_id}: {success}")
49
  if success:
50
  return {"message": "taxonomy deleted"}
51
  else:
52
  return {"message": "taxonomy not found"}
53
 
54
  @staticmethod
55
- async def list_taxonomy(
56
- merchant_id: str,
57
- type=None
58
- ) -> Dict[str, Any]:
59
  """
60
  Retrieve a catalogue taxonomy item by ID.
61
  Returns a flat structure with merchant_id and arrays of taxonomy items.
@@ -109,4 +100,3 @@ class TaxonomyService:
109
  result["subcategories"] = item["subcategories"]
110
 
111
  return result
112
-
 
3
  from typing import Any, Dict, List, Optional
4
 
5
  from fastapi import HTTPException
6
+ from insightfy_utils.logging import get_logger
7
  from app.models.taxonomy_model import TaxonomyModel
8
  from app.utils.catalogue_utils import sanitize_document_for_mongo
9
 
10
+ logger = get_logger(__name__)
11
 
12
  class TaxonomyService:
13
  @staticmethod
14
+ async def create_taxonomy(item: any) -> Dict[str, Any]:
15
  try:
16
  data = item.dict(by_alias=True, exclude_none=True)
17
+ logger.info("Creating Taxonomy", extra={"data": data})
18
+ inserted_id = await TaxonomyModel.create_append_delete_taxonomy(data)
19
  return {
20
  'message': 'Taxonomy created successfully',
21
  'id': inserted_id
22
  }
23
 
24
  except RuntimeError as re:
25
+ logger.error("RuntimeError while creating taxonomy", exc_info=re)
26
  raise HTTPException(status_code=500, detail=str(re))
27
 
28
  except Exception as e:
29
+ logger.error("Unexpected error while creating taxonomy", exc_info=e)
30
  raise HTTPException(status_code=500, detail="Internal server error")
31
 
32
  @staticmethod
33
+ async def update_taxonomy(id: str, update_data: dict) -> dict:
34
+ result = await TaxonomyModel.update_taxonomy(id, update_data)
35
+ return {"message": "Taxonomy updated successfully"} if result else {"message": "No changes made"}
 
 
 
 
 
36
 
37
  @staticmethod
38
  async def delete_taxonomy(item_id: str) -> Dict[str, Any]:
39
+ """Soft delete or remove an item from the catalogue."""
40
+ logger.info("Attempting to delete taxonomy", extra={"item_id": item_id})
 
 
41
  success = await TaxonomyModel.delete_taxonomy(item_id)
42
+ logger.info("Delete taxonomy result", extra={"item_id": item_id, "success": success})
43
  if success:
44
  return {"message": "taxonomy deleted"}
45
  else:
46
  return {"message": "taxonomy not found"}
47
 
48
  @staticmethod
49
+ async def list_taxonomy(merchant_id: str, type=None) -> Dict[str, Any]:
 
 
 
50
  """
51
  Retrieve a catalogue taxonomy item by ID.
52
  Returns a flat structure with merchant_id and arrays of taxonomy items.
 
100
  result["subcategories"] = item["subcategories"]
101
 
102
  return result
 
app/sql.py CHANGED
@@ -1,27 +1,25 @@
1
  # database.py
2
  from __future__ import annotations
3
 
4
- import logging
5
  import sqlalchemy
6
  import databases
 
 
7
 
8
- # πŸ”’ Single source of truth for config/env:
9
- # settings.py loads .env (locally) and builds DATABASE_URI safely.
10
  from settings import DATABASE_URI
11
 
12
  # -----------------------------------------------------------------------------
13
- # Logging
14
  # -----------------------------------------------------------------------------
15
- logging.basicConfig(
16
- level=logging.INFO,
17
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
18
- )
19
- logger = logging.getLogger(__name__)
20
 
21
  # -----------------------------------------------------------------------------
22
- # Database URL from settings (no direct os.getenv or extra load_dotenv here)
23
  # -----------------------------------------------------------------------------
24
- DATABASE_URL = DATABASE_URI # alias if other parts of the app expect DATABASE_URL
 
 
25
 
26
  if not DATABASE_URL:
27
  logger.error("DATABASE_URI is empty or missing from settings.")
@@ -39,12 +37,11 @@ if not DATABASE_URL:
39
  # Initialize the database connection and metadata
40
  # -----------------------------------------------------------------------------
41
  try:
42
- # `databases` uses async drivers internally (asyncpg for +asyncpg).
43
  database = databases.Database(DATABASE_URL)
44
  metadata = sqlalchemy.MetaData()
45
  logger.info("Database configuration loaded successfully.")
46
  except Exception as e:
47
- logger.exception("Failed to initialize database configuration.")
48
  raise
49
 
50
  # -----------------------------------------------------------------------------
@@ -58,7 +55,7 @@ async def connect_to_database() -> None:
58
  await database.connect()
59
  logger.info("Successfully connected to the database.")
60
  except Exception as e:
61
- logger.exception("Error connecting to the database.")
62
  raise
63
 
64
  async def disconnect_from_database() -> None:
@@ -69,7 +66,7 @@ async def disconnect_from_database() -> None:
69
  await database.disconnect()
70
  logger.info("Successfully disconnected from the database.")
71
  except Exception as e:
72
- logger.exception("Error disconnecting from the database.")
73
  raise
74
 
75
  from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
@@ -89,7 +86,7 @@ async_engine = create_async_engine(
89
  # Connection arguments for asyncpg
90
  connect_args={
91
  "server_settings": {
92
- "application_name": "insightfy-tms",
93
  "jit": "off"
94
  },
95
  "command_timeout": 60,
 
1
  # database.py
2
  from __future__ import annotations
3
 
 
4
  import sqlalchemy
5
  import databases
6
+ from insightfy_utils.db import PostgresConnector
7
+ from insightfy_utils.logging import setup_logging, get_logger
8
 
 
 
9
  from settings import DATABASE_URI
10
 
11
  # -----------------------------------------------------------------------------
12
+ # Logging (migrated to insightfy-utils)
13
  # -----------------------------------------------------------------------------
14
+ setup_logging(level="INFO", format_type="json", app_name="insightfy-bloom-ms-ems-sql")
15
+ logger = get_logger(__name__)
 
 
 
16
 
17
  # -----------------------------------------------------------------------------
18
+ # Database URL from settings
19
  # -----------------------------------------------------------------------------
20
+ DATABASE_URL = DATABASE_URI
21
+
22
+ logger.info("Using DATABASE_URL", extra={"database_url": DATABASE_URL})
23
 
24
  if not DATABASE_URL:
25
  logger.error("DATABASE_URI is empty or missing from settings.")
 
37
  # Initialize the database connection and metadata
38
  # -----------------------------------------------------------------------------
39
  try:
 
40
  database = databases.Database(DATABASE_URL)
41
  metadata = sqlalchemy.MetaData()
42
  logger.info("Database configuration loaded successfully.")
43
  except Exception as e:
44
+ logger.exception("Failed to initialize database configuration.", exc_info=e)
45
  raise
46
 
47
  # -----------------------------------------------------------------------------
 
55
  await database.connect()
56
  logger.info("Successfully connected to the database.")
57
  except Exception as e:
58
+ logger.exception("Error connecting to the database.", exc_info=e)
59
  raise
60
 
61
  async def disconnect_from_database() -> None:
 
66
  await database.disconnect()
67
  logger.info("Successfully disconnected from the database.")
68
  except Exception as e:
69
+ logger.exception("Error disconnecting from the database.", exc_info=e)
70
  raise
71
 
72
  from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
 
86
  # Connection arguments for asyncpg
87
  connect_args={
88
  "server_settings": {
89
+ "application_name": "insightfy-ems", # Updated from tms to ems
90
  "jit": "off"
91
  },
92
  "command_timeout": 60,
app/utils/jwt.py CHANGED
@@ -1,6 +1,9 @@
1
  from jose import ExpiredSignatureError, jwt, JWTError
 
2
  from settings import SECRET_KEY, ALGORITHM
3
 
 
 
4
 
5
  def decode_jwt_token(token: str) -> dict:
6
  try:
@@ -12,5 +15,5 @@ def decode_jwt_token(token: str) -> dict:
12
  raise ValueError("Token has expired")
13
 
14
  except JWTError as e:
15
- print(f"❌ Invalid token: {e}")
16
  raise ValueError("Invalid token")
 
1
  from jose import ExpiredSignatureError, jwt, JWTError
2
+ from insightfy_utils.logging import get_logger
3
  from settings import SECRET_KEY, ALGORITHM
4
 
5
+ logger = get_logger(__name__)
6
+
7
 
8
  def decode_jwt_token(token: str) -> dict:
9
  try:
 
15
  raise ValueError("Token has expired")
16
 
17
  except JWTError as e:
18
+ logger.error("Invalid token", exc_info=e)
19
  raise ValueError("Invalid token")
requirements.txt CHANGED
@@ -1,16 +1,36 @@
1
- fastapi>=0.95,<1.0
2
- uvicorn[standard]>=0.22.0
3
- pymongo
4
- motor>=3.0
5
- pydantic
6
- python-dotenv
7
- pydantic[email]
8
- databases
9
- asyncpg
10
- sqlalchemy
11
- redis
12
- httpx==0.28.1
13
- python-jose==3.4.0
14
- coverage==7.8.0
15
- pandas==2.2.3
16
- python-multipart==0.0.20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Core Framework
2
+ fastapi>=0.104.0
3
+ uvicorn[standard]>=0.24.0
4
+ python-dotenv>=1.0.0
5
+ pydantic>=2.0.0
6
+ pydantic-settings>=2.0.0
7
+
8
+ # Database
9
+ asyncpg>=0.29.0
10
+ sqlalchemy>=2.0.0
11
+ databases>=0.8.0
12
+
13
+ # MongoDB
14
+ motor>=3.3.0
15
+ pymongo>=4.6.0
16
+
17
+ # Redis/Cache
18
+ redis>=5.0.0
19
+
20
+ # Authentication
21
+ pyjwt>=2.8.0
22
+ python-jose[cryptography]>=3.3.0
23
+ passlib[bcrypt]>=1.7.4
24
+
25
+ # Shared utilities library
26
+ insightfy-utils==0.1.0
27
+
28
+ # Development
29
+ pytest>=7.4.0
30
+ pytest-asyncio>=0.21.0
31
+ black>=23.0.0
32
+ flake8>=6.0.0
33
+ mypy>=1.7.0
34
+
35
+ httpx>=0.24.0
36
+ pandas>=1.5.0
settings.py CHANGED
@@ -2,9 +2,9 @@ from __future__ import annotations
2
 
3
  import os
4
  from urllib.parse import quote_plus, urlencode
5
- from dotenv import load_dotenv
6
 
7
- load_dotenv() # keep this so local .env is picked up
8
 
9
  # ────────────────────────────────────────────────────────────────────────────────
10
  # Security
 
2
 
3
  import os
4
  from urllib.parse import quote_plus, urlencode
5
+ from insightfy_utils.config import load_env
6
 
7
+ load_env() # Now properly implemented in insightfy-utils
8
 
9
  # ────────────────────────────────────────────────────────────────────────────────
10
  # Security