github-actions commited on
Commit ·
ee2e133
0
Parent(s):
Deploy to Hugging Face
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- Dockerfile +49 -0
- README.md +1132 -0
- app/core/config.py +88 -0
- app/core/database.py +82 -0
- app/core/email.py +109 -0
- app/core/llm.py +293 -0
- app/core/logging_config.py +75 -0
- app/core/middleware.py +70 -0
- app/core/scheduler.py +344 -0
- app/core/security.py +28 -0
- app/features/analytics/__init__.py +0 -0
- app/features/analytics/router.py +85 -0
- app/features/analytics/schemas.py +68 -0
- app/features/analytics/service.py +575 -0
- app/features/auth/deps.py +37 -0
- app/features/auth/models.py +23 -0
- app/features/auth/router.py +269 -0
- app/features/auth/schemas.py +30 -0
- app/features/bills/__init__.py +0 -0
- app/features/bills/models.py +42 -0
- app/features/bills/router.py +147 -0
- app/features/bills/schemas.py +49 -0
- app/features/bills/service.py +491 -0
- app/features/categories/__init__.py +1 -0
- app/features/categories/models.py +31 -0
- app/features/categories/router.py +69 -0
- app/features/categories/schemas.py +54 -0
- app/features/categories/service.py +105 -0
- app/features/credit_cards/__init__.py +0 -0
- app/features/credit_cards/models.py +24 -0
- app/features/credit_cards/router.py +115 -0
- app/features/credit_cards/schemas.py +52 -0
- app/features/credit_cards/service.py +189 -0
- app/features/dashboard/router.py +116 -0
- app/features/dashboard/service.py +176 -0
- app/features/export/router.py +28 -0
- app/features/export/service.py +39 -0
- app/features/forecasting/schemas.py +15 -0
- app/features/forecasting/service.py +394 -0
- app/features/goals/models.py +27 -0
- app/features/goals/router.py +47 -0
- app/features/goals/schemas.py +34 -0
- app/features/goals/service.py +118 -0
- app/features/notifications/service.py +462 -0
- app/features/sanitizer/service.py +42 -0
- app/features/settle_up/models.py +33 -0
- app/features/settle_up/router.py +51 -0
- app/features/settle_up/schemas.py +44 -0
- app/features/settle_up/service.py +100 -0
- app/features/sync/models.py +25 -0
Dockerfile
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Use an official Python runtime as a parent image
|
| 2 |
+
FROM python:3.11-slim
|
| 3 |
+
|
| 4 |
+
# Set environment variables
|
| 5 |
+
ENV PYTHONDONTWRITEBYTECODE 1
|
| 6 |
+
ENV PYTHONUNBUFFERED 1
|
| 7 |
+
ENV PORT 7860
|
| 8 |
+
|
| 9 |
+
# Set the working directory in the container
|
| 10 |
+
WORKDIR /app
|
| 11 |
+
|
| 12 |
+
# Install system dependencies
|
| 13 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 14 |
+
build-essential \
|
| 15 |
+
libpq-dev \
|
| 16 |
+
cmake \
|
| 17 |
+
pkg-config \
|
| 18 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 19 |
+
|
| 20 |
+
# Set environment variables for better stability with Stan (Prophet) and Llama-cpp
|
| 21 |
+
ENV OMP_NUM_THREADS 1
|
| 22 |
+
ENV MKL_NUM_THREADS 1
|
| 23 |
+
ENV OPENBLAS_NUM_THREADS 1
|
| 24 |
+
ENV KMP_DUPLICATE_LIB_OK TRUE
|
| 25 |
+
|
| 26 |
+
# Install Python dependencies
|
| 27 |
+
COPY requirements.txt .
|
| 28 |
+
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
|
| 29 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 30 |
+
|
| 31 |
+
# Install llama-cpp-python using pre-built CPU wheels.
|
| 32 |
+
# We use v0.3.19 as it is the latest version with pre-compiled binaries for Python 3.11 on Linux.
|
| 33 |
+
# The --only-binary flag ensures we never trigger a slow source compilation on Hugging Face.
|
| 34 |
+
RUN pip install --no-cache-dir llama-cpp-python==0.3.19 --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu --only-binary=:all:
|
| 35 |
+
|
| 36 |
+
# Pre-download the model into the image for instant startup on HF Spaces.
|
| 37 |
+
# Using Gemma 4 E4B (Instruct-GGUF) - ~2.5GB model file.
|
| 38 |
+
RUN mkdir -p models && \
|
| 39 |
+
python -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='bartowski/google_gemma-4-E4B-it-GGUF', filename='google_gemma-4-E4B-it-Q4_K_M.gguf', local_dir='models')"
|
| 40 |
+
|
| 41 |
+
# Copy the rest of the application code
|
| 42 |
+
COPY . .
|
| 43 |
+
|
| 44 |
+
# Expose the port the app runs on
|
| 45 |
+
EXPOSE 7860
|
| 46 |
+
|
| 47 |
+
# Command to run the application using uvicorn with a single worker
|
| 48 |
+
# Reverting to 1 worker for debugging startup hangs on HF Spaces.
|
| 49 |
+
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]
|
README.md
ADDED
|
@@ -0,0 +1,1132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Grip Backend
|
| 3 |
+
emoji: 💰
|
| 4 |
+
colorFrom: green
|
| 5 |
+
colorTo: blue
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
# GRIP
|
| 11 |
+
|
| 12 |
+
> **Autonomous Financial Intelligence.**
|
| 13 |
+
|
| 14 |
+
An AI-powered personal platform that transforms your inbox into a complete financial intelligence system. Track spending, forecast expenses, grow investments—all while keeping your data private and secure.
|
| 15 |
+
|
| 16 |
+
**🎯 The Only Platform in India That Shows If Your SIP Date is Costing You Money.**
|
| 17 |
+
|
| 18 |
+
Import your Consolidated Account Statement (CAMS, KFin, MFCentral) in 60 seconds. Discover if switching your SIP date could earn you thousands more. No other platform does this.
|
| 19 |
+
|
| 20 |
+
[](https://fastapi.tiangolo.com)
|
| 21 |
+
[](https://reactjs.org)
|
| 22 |
+
[](https://www.postgresql.org)
|
| 23 |
+
|
| 24 |
+
---
|
| 25 |
+
|
| 26 |
+
## 🌟 What Makes Grip Different
|
| 27 |
+
|
| 28 |
+
### 💰 Know Your True Spending Power
|
| 29 |
+
- **Safe-to-Spend Engine**: See what you can *actually* spend after bills, credit cards, and commitments—not just your bank balance.
|
| 30 |
+
- **Real-Time Intelligence**: Automatically accounts for unpaid bills, upcoming rent, and unbilled credit card purchases.
|
| 31 |
+
- **Predictive Budgeting**: Includes projected recurring expenses ("Surety") before they even arrive.
|
| 32 |
+
|
| 33 |
+
### 🎯 Timing Alpha: The Only Platform for Precision Wealth Analytics
|
| 34 |
+
Most investment apps are "lazy"—they show you generic fund returns or XIRR calculated with approximate dates. Grip is built for the precision-obsessed investor.
|
| 35 |
+
|
| 36 |
+
**The "Average" Problem with Other Platforms:**
|
| 37 |
+
- ❌ **Lazy Pricing**: Use month-end NAVs or weekly averages to calculate your returns.
|
| 38 |
+
- ❌ **Generic XIRR**: Show you a number that assumes your transactions happened at a "standard" time.
|
| 39 |
+
- ❌ **The Blind Spot**: No insight into whether your SIP date is actually helping or hurting you.
|
| 40 |
+
|
| 41 |
+
**The Grip Precision Advantage:**
|
| 42 |
+
- ✅ **Day-Specific NAV**: We fetch the *exact* NAV of the day your transaction hit the bank. If you invest on the 7th, we calculate parity with the 7th, not a "monthly average".
|
| 43 |
+
- ✅ **Timing Leakage Analysis (WORLD FIRST)**: We analyze every SIP you've ever made and cross-reference them with the volatility schedules of that specific fund.
|
| 44 |
+
- ✅ **What-If Date Simulation**: Grip simulates your entire investment history against every other day of the month (1st to 28th) to determine if a simple change in your salary-cycle could yield an extra 1-2% in "Timing Alpha".
|
| 45 |
+
|
| 46 |
+
**Why this is a Big Deal:**
|
| 47 |
+
Mutual Fund NAVs fluctuates daily. An investor who does a SIP on the 10th vs. the 15th might see a **1.5% difference in lifetime XIRR** for the exact same fund. Most platforms hide this "Timing Leakage." Grip exposes it and shows you how to fix it.
|
| 48 |
+
|
| 49 |
+
> **Our Finding says:** No other retail investment platform in India—not Zerodha Coin, INDMoney, or Groww—performs historical date-permutation analysis on your *actual* transactions to optimize your future returns.
|
| 50 |
+
|
| 51 |
+
### 🛡️ Frictionless, Private Onboarding
|
| 52 |
+
While others make you wait for "syncs" or manual entries, Grip is built to get you from zero to "Deep Insights" in under a minute.
|
| 53 |
+
|
| 54 |
+
- **Universal Statement Import**: Upload your Consolidated Account Statement (CAS) from CAMS, KFin, or MFCentral. We reconstruct your entire investment life—identifying SIPs, step-ups, and even "missed" months—instantly.
|
| 55 |
+
- **Step-Up & Skip Detection**: We don't just show total units; we map the *evolution* of your discipline. See exactly when you increased your SIPs and where you missed a beat.
|
| 56 |
+
- **Privacy-First Intelligence**: All your data is processed with local sanitization. Your bank details and PAN are masked *before* our analysis engines ever touch the data.
|
| 57 |
+
|
| 58 |
+
### 📊 Professional-Grade Portfolio Simulation
|
| 59 |
+
- **AI-Powered Forecasting**: We use Facebook Prophet (the same engine used by data scientists for revenue forecasting) to project your portfolio 10-20 years into the future with realistic confidence intervals.
|
| 60 |
+
- **Simulation Mode**: Instantly see how a ₹2,000 "Step-Up" in your monthly SIP today changes your net worth 15 years from now.
|
| 61 |
+
- **Email-to-Wealth Pipeline**: Once imported, Grip auto-extracts your future buys from your bank alert emails. No more manual tracking.
|
| 62 |
+
- **Precision XIRR**: Calculated using the Newton-Raphson method for accurate annualized returns based on daily cashflows.
|
| 63 |
+
- **Timing Leakage Reports**: Detailed breakdown of how much "extra" money you could have made by simply shifting your SIP date.
|
| 64 |
+
|
| 65 |
+
### 🤖 Hybrid Intelligence — Fast, Private, Reliable
|
| 66 |
+
- **Local LLM Engine (Primary)**: A high-privacy, zero-cost LLM (SmolLM2-1.7B) runs natively on your server for extraction. With 2048-token(configurable) context window handles even the longest transaction alerts.
|
| 67 |
+
- **Semantic Email Compression**: Inspired by LLMLingua-2, Grip semantically strips 60-70% of email boilerplate (disclaimers, etc.) while preserving high-signal transaction data, ensuring 2x-3x faster and more reliable extraction.
|
| 68 |
+
- **KV Cache Optimization**: Prompts are reordered to put static context at the top, allowing the local LLM to cache instruction states and significantly speed up batch processing of multiple emails.
|
| 69 |
+
- **Rule Engine (Secondary)**: A deterministic, zero-latency pattern-matching engine handles common Indian bank email formats.
|
| 70 |
+
- **Automatic Transaction Extraction**: Connect Gmail once; transactions are extracted from bank alerts automatically.
|
| 71 |
+
- **Webhook Sync Optimization**: Intelligent 5-second debouncing and concurrency guards prevent redundant syncs from batch emails.
|
| 72 |
+
- **Autonomous Notification Engine**: Scheduled email alerts for Gmail disconnection, surety bill reminders, and spending insights.
|
| 73 |
+
- **Hybrid Forecasting**: Combines Meta Prophet (statistical) + Local LLM (contextual) to predict month-end expenses.
|
| 74 |
+
- **Smart Learning**: Remembers your merchant preferences, auto-categorizes future transactions.
|
| 75 |
+
- **Multi-Layer Spam Filter**: Sender whitelist + subject gates + body signals distinguish real transactions from marketing emails.
|
| 76 |
+
### 🔒 Privacy Built-In, Not Bolted-On
|
| 77 |
+
- **100% On-Server Extraction**: Local LLM (SmolLM2-1.7B) runs natively on our server. Your financial data **never leaves your infrastructure** for extraction.
|
| 78 |
+
- **Real-Time Privacy**: Gmail webhooks (via Google Pub/Sub) trigger immediate, secure parsing.
|
| 79 |
+
- **Sanitization Before Processing**: PAN, Aadhaar, and Credit Card numbers are masked *before* the LLM even sees them, providing double-layered privacy.
|
| 80 |
+
- **No Data Selling**: Your financial data stays yours. Period.
|
| 81 |
+
- **Self-Hostable**: Open architecture—you control the deployment and data.
|
| 82 |
+
- **Read-Only Gmail**: OAuth 2.0 with minimal scopes; we can't send or modify your emails.
|
| 83 |
+
|
| 84 |
+
---
|
| 85 |
+
|
| 86 |
+
## ⚡ Zero-Effort Automation
|
| 87 |
+
- **One-Click Sync**: Connect Gmail → Transactions flow in automatically
|
| 88 |
+
- **Smart Deduplication**: SHA-256 hashing ensures no duplicate transactions
|
| 89 |
+
- **Background Processing**: Email parsing happens async—never blocks your UI
|
| 90 |
+
- **Merchant Intelligence**: Auto-learns from your verifications, gets smarter over time
|
| 91 |
+
- **Daily Price Sync**: Scheduled job updates investment NAVs every evening at 9 PM IST
|
| 92 |
+
|
| 93 |
+
---
|
| 94 |
+
|
| 95 |
+
## 🚀 How It Works
|
| 96 |
+
|
| 97 |
+
Grip processes your financial data through a sophisticated, privacy-preserving pipeline:
|
| 98 |
+
|
| 99 |
+
```
|
| 100 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 101 |
+
│ 1. EMAIL INGESTION (3 Methods) │
|
| 102 |
+
│ • OAuth Sync: Gmail API fetch (manual/scheduled) │
|
| 103 |
+
│ • Webhook Push: Real-time via Google Apps Script │
|
| 104 |
+
│ • Manual Entry: Cash/other transactions (auto-verified) │
|
| 105 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 106 |
+
↓
|
| 107 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 108 |
+
│ 2. PRIVACY SANITIZATION (LOCAL) │
|
| 109 |
+
│ Regex Engine → Masks PII → Safe for AI processing │
|
| 110 |
+
│ • Credit Card: 💳 ****-****-XXXX-1234 │
|
| 111 |
+
│ • Aadhaar: 🆔 XXXX-XXXX-5678 │
|
| 112 |
+
│ • UPI ID: 👤 <email>@*** │
|
| 113 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 114 |
+
↓
|
| 115 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 116 |
+
│ 3. AI EXTRACTION (Local SmolLM2-1.7B) │
|
| 117 |
+
│ Natural Language → Structured JSON │
|
| 118 |
+
│ "Rs 1,250 debited from Card ending 4521 at Swiggy" │
|
| 119 |
+
│ ↓ │
|
| 120 |
+
│ { amount: 1250, merchant: "Swiggy", │
|
| 121 |
+
│ category: "Food & Dining", account: "CREDIT_CARD" } │
|
| 122 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 123 |
+
↓
|
| 124 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 125 |
+
│ 4. SMART DEDUPLICATION │
|
| 126 |
+
│ SHA-256 Hash → Check Database → Skip if exists │
|
| 127 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 128 |
+
↓
|
| 129 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 130 |
+
│ 5. INVESTMENT DETECTION & MAPPING │
|
| 131 |
+
│ "ICICI Pru SIP ₹5000" → Match Rule → Fetch NAV → Add Units │
|
| 132 |
+
│ Auto-creates snapshots for portfolio tracking │
|
| 133 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 134 |
+
↓
|
| 135 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 136 |
+
│ 6. MERCHANT INTELLIGENCE │
|
| 137 |
+
│ User Verification → Create Mapping → Future Auto-categorize │
|
| 138 |
+
│ "SWIGGY*BANGALORE" → Clean: "Swiggy" → Category: Food │
|
| 139 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 140 |
+
↓
|
| 141 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 142 |
+
│ 7. PREDICTIVE FORECASTING │
|
| 143 |
+
│ Historical Data → Prophet/Local LLM → Month-end burden prediction│
|
| 144 |
+
│ "Expected ₹12,500 in remaining expenses (18 days left)" │
|
| 145 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 146 |
+
↓
|
| 147 |
+
│ 8. AUTONOMOUS NOTIFICATIONS │
|
| 148 |
+
│ • Gmail Connection Alerts: Instant email if OAuth expires │
|
| 149 |
+
│ • Surety Reminders: Morning-of alerts for big recurring bills │
|
| 150 |
+
│ • Weekly Insights: Smart alerts for Category spending spikes │
|
| 151 |
+
└────────────────────────┬────────────────────────────────────────┘
|
| 152 |
+
↓
|
| 153 |
+
┌─────────────────────────────────────────────────────────────────┐
|
| 154 |
+
│ 9. ACTIONABLE INSIGHTS & DASHBOARD │
|
| 155 |
+
│ • Safe-to-Spend = Balance - (Bills + CC + Buffer) │
|
| 156 |
+
│ • Wealth Trajectory: Historical + 10Y AI forecast │
|
| 157 |
+
│ • Investment XIRR: Annualized returns per asset │
|
| 158 |
+
│ Visual dashboard with spending trends and recommendations │
|
| 159 |
+
└─────────────────────────────────────────────────────────────────┘
|
| 160 |
+
```
|
| 161 |
+
|
| 162 |
+
---
|
| 163 |
+
|
| 164 |
+
## ✨ Key Features
|
| 165 |
+
|
| 166 |
+
### 💰 Safe-to-Spend Intelligence (Core USP)
|
| 167 |
+
|
| 168 |
+
**The Number That Matters Most**
|
| 169 |
+
|
| 170 |
+
Forget checking your bank balance—Grip shows you what you can *actually* spend without stress.
|
| 171 |
+
|
| 172 |
+
**Intelligent Calculation:**
|
| 173 |
+
```
|
| 174 |
+
Safe-to-Spend = Current Balance
|
| 175 |
+
- Unpaid Bills
|
| 176 |
+
- Projected Recurring Bills (Surety)
|
| 177 |
+
- Current Unbilled Credit Card Expenses
|
| 178 |
+
- Configurable Safety Buffer (default 10%)
|
| 179 |
+
```
|
| 180 |
+
|
| 181 |
+
**Real-World Example:**
|
| 182 |
+
```
|
| 183 |
+
Bank Balance: ₹45,000
|
| 184 |
+
- Rent (due in 5 days): -₹15,000
|
| 185 |
+
- Utilities (projected): -₹2,500
|
| 186 |
+
- Unbilled CC purchases: -₹8,200
|
| 187 |
+
- Safety Buffer (10%): -₹1,930
|
| 188 |
+
─────────────────────────────────────
|
| 189 |
+
Safe-to-Spend: ₹17,370 ✅
|
| 190 |
+
```
|
| 191 |
+
|
| 192 |
+
**Visual Health System:**
|
| 193 |
+
- 🔴 **Negative**: Overdrawn (immediate action required)
|
| 194 |
+
- 🟠 **Critical**: < ₹1,000 (extremely tight budget)
|
| 195 |
+
- 🟡 **Warning**: ₹1,000 - ₹3,000 (limited spending room)
|
| 196 |
+
- 🟢 **Healthy**: > ₹3,000 (comfortable spending capacity)
|
| 197 |
+
|
| 198 |
+
**Why This Matters:**
|
| 199 |
+
- **Prevents Overspending**: Accounts for committed expenses before they hit
|
| 200 |
+
- **Reduces Anxiety**: One number tells you your true spending power
|
| 201 |
+
- **Builds Buffer**: Automatic safety margin prevents living paycheck-to-paycheck
|
| 202 |
+
- **Predictive**: Includes projected bills, not just current ones
|
| 203 |
+
|
| 204 |
+
### 📈 Investment Intelligence Platform (NEW!)
|
| 205 |
+
|
| 206 |
+
**Automated Wealth Tracking**
|
| 207 |
+
|
| 208 |
+
Transform your investment expenses into a live-tracked portfolio with zero manual work.
|
| 209 |
+
|
| 210 |
+
**Email-to-Wealth Pipeline:**
|
| 211 |
+
```
|
| 212 |
+
① Bank Email: "SIP ₹5,000 debited for ICICI Pru Bluechip"
|
| 213 |
+
② Auto-Detection: Investment category + merchant pattern match
|
| 214 |
+
③ Smart Linking: Checks if asset exists in portfolio
|
| 215 |
+
- If New: Creates new holding
|
| 216 |
+
- If Existing: Appends transaction to history
|
| 217 |
+
④ NAV Fetch: Historical price on transaction date (mfapi.in)
|
| 218 |
+
⑤ Unit Calculation: ₹5,000 / ₹45.23 = 110.52 units
|
| 219 |
+
⑤ Snapshot Created: Portfolio updated with new units
|
| 220 |
+
⑥ XIRR Recalculated: Annualized returns refreshed
|
| 221 |
+
⑥ Portfolio Update: Total units increased, XIRR recalibrated
|
| 222 |
+
```
|
| 223 |
+
|
| 224 |
+
**🆕 Universal Statement Import (Instant Onboarding):**
|
| 225 |
+
- **Broad Support**: Import statements from CAMS, KFin, or MFCentral
|
| 226 |
+
- **Format Agnostic**: Supports both CSV and Excel formats
|
| 227 |
+
- **Bulk Processing**: Import years of transactions in seconds
|
| 228 |
+
- **Auto-Detection**: Automatically identifies SIP patterns vs lump sum
|
| 229 |
+
- **Step-Up Tracking**: Detects when SIP amount increases (e.g., ₹5k → ₹7k)
|
| 230 |
+
- **Skip Detection**: Identifies missed SIP months with gap analysis
|
| 231 |
+
- **Smart Metadata**: Stores change percentages, skip reasons, historical patterns
|
| 232 |
+
- **Zero Manual Work**: Auto-creates holdings, fetches historical NAVs, calculates units
|
| 233 |
+
- **Preview Before Import**: Review all transactions before committing
|
| 234 |
+
|
| 235 |
+
**🎯 SIP Date-Specific Performance Analysis (UNIQUE USP!):**
|
| 236 |
+
|
| 237 |
+
*No other platform in India offers this!*
|
| 238 |
+
|
| 239 |
+
**What Others Show:**
|
| 240 |
+
- ❌ Generic monthly average returns
|
| 241 |
+
- ❌ Hypothetical "if you invested on 1st Jan every year"
|
| 242 |
+
- ❌ Fund-level performance only
|
| 243 |
+
|
| 244 |
+
**What Grip Shows:**
|
| 245 |
+
- ✅ **YOUR Actual SIP Dates**: Analyzes your real purchase dates (e.g., 15th of every month)
|
| 246 |
+
- ✅ **Alternative Date Comparison**: Simulates 6 alternative dates (1st, 5th, 10th, 15th, 20th, 25th)
|
| 247 |
+
- ✅ **Exact NAV on Your Dates**: Fetches historical NAV for your specific purchase days
|
| 248 |
+
- ✅ **Potential Improvement**: Shows how much more you could have earned with different dates
|
| 249 |
+
- ✅ **Historical Win Rate**: "10th-date SIPs outperformed 15th in 16/24 months (67%)"
|
| 250 |
+
- ✅ **AI Insights**: "Switching to 10th could earn you ₹1,100 more (4.4% better)"
|
| 251 |
+
- ✅ **Optimization Recommendations**: Actionable suggestions for future SIPs
|
| 252 |
+
|
| 253 |
+
**Example Analysis:**
|
| 254 |
+
```
|
| 255 |
+
Your SIP Date: 15th of every month
|
| 256 |
+
Your Performance:
|
| 257 |
+
• Total Invested: ₹1,20,000
|
| 258 |
+
• Current Value: ₹1,45,000
|
| 259 |
+
• Returns: +₹25,000 (20.8%)
|
| 260 |
+
• XIRR: 12.5%
|
| 261 |
+
|
| 262 |
+
Best Alternative: 10th of every month
|
| 263 |
+
• Returns: +₹26,100 (21.8%)
|
| 264 |
+
• XIRR: 13.2%
|
| 265 |
+
• Improvement: ₹1,100 (0.9% better)
|
| 266 |
+
|
| 267 |
+
💡 Insight: "Your 15th date SIP performed well, but switching
|
| 268 |
+
to 10th could have earned you ₹1,100 more. Consider adjusting
|
| 269 |
+
your SIP date for future investments."
|
| 270 |
+
```
|
| 271 |
+
|
| 272 |
+
**Live Market Sync:**
|
| 273 |
+
- **Daily Price Updates**: Scheduled job at 9:00 PM IST
|
| 274 |
+
- **Mutual Funds**: NAV from mfapi.in (India's official MF API)
|
| 275 |
+
- **Stocks**: Real-time prices via yfinance
|
| 276 |
+
- **Auto-Snapshots**: Daily value tracking for Prophet forecasting
|
| 277 |
+
|
| 278 |
+
**Professional-Grade Analytics:**
|
| 279 |
+
- **XIRR Calculation**: scipy.optimize.newton for accurate annualized returns
|
| 280 |
+
- **Historical Performance**: Complete transaction history with date-wise snapshots
|
| 281 |
+
- **Asset-Level Drill-Down**: Click any holding to see detailed growth chart + SIP analysis
|
| 282 |
+
- **Portfolio Aggregation**: Net worth, total invested, absolute returns
|
| 283 |
+
- **Step-Up/Skip Visualization**: Timeline showing SIP changes and missed months
|
| 284 |
+
|
| 285 |
+
**AI-Powered Forecasting:**
|
| 286 |
+
- **Facebook Prophet**: Statistical time-series analysis on daily snapshots
|
| 287 |
+
- **10-20 Year Projections**: Confidence intervals with upper/lower bounds
|
| 288 |
+
- **Simulation Mode**: Adjust monthly SIP, see instant forecast updates
|
| 289 |
+
- **Category Breakdown**: Equity, Debt, Liquid, Fixed Income allocation
|
| 290 |
+
|
| 291 |
+
**Supported Asset Types:**
|
| 292 |
+
- ✅ **SIP** (Systematic Investment Plans) - *with date optimization*
|
| 293 |
+
- ✅ **Mutual Funds** (Lump sum)
|
| 294 |
+
- ✅ **Stocks** (Equity holdings)
|
| 295 |
+
- ✅ **FD/RD** (Fixed/Recurring Deposits - manual input)
|
| 296 |
+
- ✅ **PF/Gratuity** (Retirement accruals - formulaic)
|
| 297 |
+
- ✅ **Gold, Real Estate** (Manual tracking)
|
| 298 |
+
|
| 299 |
+
**Human-in-the-Loop:**
|
| 300 |
+
- **Statement Import**: Upload consolidated statement (CAMS/KFin/MFCentral) for instant portfolio creation
|
| 301 |
+
- **Transaction Linker**: Manually map undetected investment transactions
|
| 302 |
+
- **Mapping Rules**: Create patterns for future auto-detection
|
| 303 |
+
- **Adjustments**: Override AI suggestions, edit units/prices
|
| 304 |
+
- **Add Holdings**: Manually add assets not tracked via email
|
| 305 |
+
|
| 306 |
+
**Future-Proof:**
|
| 307 |
+
- **Tax Engine Placeholder**: Ready for LTCG/STCG calculations
|
| 308 |
+
- **Multi-Asset Support**: Extensible for crypto, bonds, commodities
|
| 309 |
+
- **Consolidated View**: Liquid cash + Fixed income + Market-linked in one dashboard
|
| 310 |
+
- **Multi-Fund Optimization**: Find best SIP dates across entire portfolio (coming soon)
|
| 311 |
+
|
| 312 |
+
### 🧠 AI-Powered Intelligence
|
| 313 |
+
|
| 314 |
+
**Automatic Transaction Extraction**
|
| 315 |
+
- Connects to Gmail via OAuth 2.0 (read-only)
|
| 316 |
+
- AI parses bank alerts, credit card statements, UPI confirmations
|
| 317 |
+
- Extracts: Amount, Merchant, Category, Account Type, Date
|
| 318 |
+
- Natural language processing handles different email formats
|
| 319 |
+
- Works with major Indian banks (ICICI, HDFC, SBI, Axis, Kotak, and others)
|
| 320 |
+
|
| 321 |
+
**Hybrid Forecasting Engine**
|
| 322 |
+
- **Meta Prophet**: Statistical time-series analysis of daily spending patterns
|
| 323 |
+
- **Groq LLM**: Category-level breakdowns with contextual reasoning
|
| 324 |
+
- "Food & Dining trending 20% higher: 4 weekend restaurant visits vs 2 last month"
|
| 325 |
+
- "Expected ₹12,500 in remaining expenses (18 days left in month)"
|
| 326 |
+
- Predicts month-end spending based on historical patterns
|
| 327 |
+
- Adapts to seasonal patterns, holidays, and lifestyle changes
|
| 328 |
+
|
| 329 |
+
**Merchant Intelligence & Memory**
|
| 330 |
+
- First time: "SWIGGY*BANGALORE127" → AI suggests "Food & Dining"
|
| 331 |
+
- You verify: "Food & Dining > Online Food"
|
| 332 |
+
- Forever after: "SWIGGY*" auto-categorized as "Food & Dining > Online Food"
|
| 333 |
+
- Learns from every verification, gets smarter over time
|
| 334 |
+
- Clean merchant names (no more cryptic transaction descriptions)
|
| 335 |
+
|
| 336 |
+
### 💳 Credit Card Lifecycle Management
|
| 337 |
+
|
| 338 |
+
**Comprehensive Card Tracking**
|
| 339 |
+
- Register unlimited credit cards with billing details
|
| 340 |
+
- Tracks: Card name, last 4 digits, statement date, payment due date, credit limit
|
| 341 |
+
- Automatic billing cycle calculation (current cycle, days remaining)
|
| 342 |
+
- Real-time unbilled amount in current cycle
|
| 343 |
+
- Credit utilization monitoring (% of limit used)
|
| 344 |
+
|
| 345 |
+
**Billing Cycle Intelligence**
|
| 346 |
+
```
|
| 347 |
+
HDFC Regalia Gold (••1234)
|
| 348 |
+
───────────────────────────────────
|
| 349 |
+
Statement Date: 15th (every month)
|
| 350 |
+
Payment Due: 25th (every month)
|
| 351 |
+
Current Cycle: Jan 16 - Feb 15
|
| 352 |
+
Days to Statement: 12 days
|
| 353 |
+
───────────────────────────────────
|
| 354 |
+
Unbilled Amount: ₹8,247
|
| 355 |
+
Credit Limit: ₹3,00,000
|
| 356 |
+
Utilization: 2.7% ✅
|
| 357 |
+
```
|
| 358 |
+
|
| 359 |
+
**Smart Alerts & Predictions**
|
| 360 |
+
- "Cycle closes in 5 days: ₹8,247 unbilled"
|
| 361 |
+
- "Estimated bill: ₹8,500 (based on current trend)"
|
| 362 |
+
- "Payment due in 10 days: ₹12,340"
|
| 363 |
+
- Prevents surprise bills by tracking unbilled amounts in real-time
|
| 364 |
+
|
| 365 |
+
**Transaction Linking**
|
| 366 |
+
- Link each transaction to specific credit card
|
| 367 |
+
- Accurate per-card spending tracking
|
| 368 |
+
- Prevents overspending within billing cycle
|
| 369 |
+
- Helps optimize card usage across multiple cards
|
| 370 |
+
|
| 371 |
+
### 📋 Bill Management & "Surety" Intelligence
|
| 372 |
+
|
| 373 |
+
**Bill Tracking**
|
| 374 |
+
- Create one-time or recurring bills
|
| 375 |
+
- Set due dates and payment amounts
|
| 376 |
+
- Mark bills as paid/unpaid
|
| 377 |
+
- View upcoming bills (next 7/30/60 days)
|
| 378 |
+
- Payment reminders
|
| 379 |
+
|
| 380 |
+
**Surety Bills (Predictable Expenses)**
|
| 381 |
+
|
| 382 |
+
The secret sauce for accurate Safe-to-Spend calculation.
|
| 383 |
+
|
| 384 |
+
**What is "Surety"?**
|
| 385 |
+
Predictable, recurring expenses that you *know* are coming:
|
| 386 |
+
- Rent (every 1st of month)
|
| 387 |
+
- Electricity/Water (monthly)
|
| 388 |
+
- Internet/Phone bills
|
| 389 |
+
- Insurance premiums
|
| 390 |
+
- Subscriptions (Netflix, Spotify, etc.)
|
| 391 |
+
- Society maintenance
|
| 392 |
+
|
| 393 |
+
**How It Works:**
|
| 394 |
+
```
|
| 395 |
+
① Mark bill as "Surety" (predictable recurring)
|
| 396 |
+
② Grip automatically projects next occurrence
|
| 397 |
+
③ Amount included in Safe-to-Spend calculation
|
| 398 |
+
④ Even if not yet billed, it's accounted for
|
| 399 |
+
```
|
| 400 |
+
|
| 401 |
+
**Example:**
|
| 402 |
+
```
|
| 403 |
+
Rent: ₹15,000 (Surety, due 1st of every month)
|
| 404 |
+
Today: Jan 20
|
| 405 |
+
Next Due: Feb 1 (12 days away)
|
| 406 |
+
|
| 407 |
+
Safe-to-Spend: Already reduced by ₹15,000
|
| 408 |
+
Result: Prevents overspending before rent is due ✅
|
| 409 |
+
```
|
| 410 |
+
|
| 411 |
+
**Frozen Funds Breakdown:**
|
| 412 |
+
```
|
| 413 |
+
┌────────────────────────────────────────┐
|
| 414 |
+
│ Frozen Funds: ₹25,700 │
|
| 415 |
+
├────────────────────────────────────────┤
|
| 416 |
+
│ • Unpaid Bills: ₹10,500 │
|
| 417 |
+
│ • Projected Surety: ₹12,000 │
|
| 418 |
+
│ • Unbilled CC: ₹3,200 │
|
| 419 |
+
└────────────────────────────────────────┘
|
| 420 |
+
```
|
| 421 |
+
|
| 422 |
+
### 🎯 Financial Goals
|
| 423 |
+
|
| 424 |
+
**Goal Setting & Tracking**
|
| 425 |
+
- Set savings goals with target amounts and deadlines
|
| 426 |
+
- Track progress towards each goal
|
| 427 |
+
- Visual progress indicators
|
| 428 |
+
- Automatic calculation of monthly savings needed
|
| 429 |
+
- Integration with Safe-to-Spend (optional goal reserves)
|
| 430 |
+
|
| 431 |
+
**Goal Types:**
|
| 432 |
+
- Emergency Fund
|
| 433 |
+
- Vacation
|
| 434 |
+
- Gadget Purchase
|
| 435 |
+
- Down Payment
|
| 436 |
+
- Custom Goals
|
| 437 |
+
|
| 438 |
+
**Smart Recommendations:**
|
| 439 |
+
- "Save ₹8,500/month to reach ₹1,00,000 goal by December"
|
| 440 |
+
- "You're 45% towards your iPhone fund!"
|
| 441 |
+
- "Adjust Safe-to-Spend buffer to include goal savings"
|
| 442 |
+
|
| 443 |
+
### 📊 Advanced Analytics
|
| 444 |
+
|
| 445 |
+
**Variance Analysis**
|
| 446 |
+
- Month-to-date vs last month comparison
|
| 447 |
+
- Category-level spend changes with % metrics
|
| 448 |
+
- "You spent 23% more on Food & Dining this month (₹8,500 vs ₹6,900)"
|
| 449 |
+
- Trend detection: "Entertainment spending doubled"
|
| 450 |
+
- Visual charts showing spend distribution
|
| 451 |
+
|
| 452 |
+
**Spend Categorization**
|
| 453 |
+
- 20+ default categories (Food & Dining, Shopping, Transport, etc.)
|
| 454 |
+
- Hierarchical subcategories (e.g., Food > Restaurants, Groceries, Online Food)
|
| 455 |
+
- Custom tag system for personal organization (#business, #vacation, #medical)
|
| 456 |
+
- Pie charts, bar graphs, trend lines
|
| 457 |
+
- Export category reports
|
| 458 |
+
|
| 459 |
+
**Monthly Summary Dashboard**
|
| 460 |
+
- Total income vs expenses
|
| 461 |
+
- Category-wise breakdown
|
| 462 |
+
- Top merchants
|
| 463 |
+
- Largest transactions
|
| 464 |
+
- Spending trends over time
|
| 465 |
+
|
| 466 |
+
### 🔄 Automated Email Sync
|
| 467 |
+
|
| 468 |
+
**Gmail Integration (Zero Manual Work)**
|
| 469 |
+
- One-click OAuth 2.0 connection (read-only access)
|
| 470 |
+
- Searches inbox for transaction keywords automatically:
|
| 471 |
+
- "spent", "debited", "transaction", "alert", "paid", "credited"
|
| 472 |
+
- Processes bank alerts, credit card statements, UPI confirmations
|
| 473 |
+
- Background sync (doesn't block UI)
|
| 474 |
+
- Deduplication (SHA-256 hash prevents duplicate transactions)
|
| 475 |
+
|
| 476 |
+
**Sync Features:**
|
| 477 |
+
- **Manual Trigger**: Click "Sync Now" anytime for instant update
|
| 478 |
+
- **Connection Status**: See last sync time, total transactions imported
|
| 479 |
+
- **Sync History**: Complete log with status, errors, records processed
|
| 480 |
+
- **Easy Disconnect**: One-click disconnect, reconnect anytime
|
| 481 |
+
- **Format-Agnostic**: Works with different email formats via natural language AI
|
| 482 |
+
|
| 483 |
+
**Supported Email Types:**
|
| 484 |
+
```
|
| 485 |
+
✅ Bank transaction alerts (ICICI, HDFC, SBI, etc.)
|
| 486 |
+
✅ Credit card alerts (Statement generated, payment due)
|
| 487 |
+
✅ UPI payment confirmations (GPay, PhonePe, Paytm)
|
| 488 |
+
✅ Debit card purchases (POS transactions)
|
| 489 |
+
✅ NEFT/RTGS/IMPS alerts (Fund transfers)
|
| 490 |
+
✅ Wallet transactions (Paytm, Mobikwik)
|
| 491 |
+
✅ Investment confirmations (SIP, MF purchases)
|
| 492 |
+
```
|
| 493 |
+
|
| 494 |
+
### 🔐 Privacy & Security (Core Differentiator)
|
| 495 |
+
|
| 496 |
+
**Local-First Sanitization**
|
| 497 |
+
```
|
| 498 |
+
Before AI processing (happens on your server):
|
| 499 |
+
────────────────────────────────────────────────
|
| 500 |
+
Original: "Paid ₹500 using Card 4521-6789-1234-5678"
|
| 501 |
+
Masked: "Paid ₹500 using Card ****-****-****-5678"
|
| 502 |
+
|
| 503 |
+
Original: "PAN: ABCDE1234F, Aadhaar: 9876-5432-1098"
|
| 504 |
+
Masked: "PAN: XXXXX1234X, Aadhaar: XXXX-XXXX-1098"
|
| 505 |
+
|
| 506 |
+
Original: "UPI: user@paytm paid merchant@phonepe"
|
| 507 |
+
Masked: "UPI: ****@paytm paid ****@phonepe"
|
| 508 |
+
```
|
| 509 |
+
|
| 510 |
+
**What Gets Sanitized:**
|
| 511 |
+
- ✅ Credit Card numbers (💳 12-digit masking, last 4 visible)
|
| 512 |
+
- ✅ PAN cards (🆔 Professional alpha-numeric masking)
|
| 513 |
+
- ✅ Aadhaar numbers (🆔 8-digit masking, last 4 visible)
|
| 514 |
+
- ✅ UPI IDs (👤 <email> or <username> prefix masked)
|
| 515 |
+
- ✅ Phone numbers (📱 middle 6 digits masked)
|
| 516 |
+
**Security Architecture:**
|
| 517 |
+
- JWT authentication with bcrypt password hashing
|
| 518 |
+
- Email verification with OTP (SMTP)
|
| 519 |
+
- Read-only Gmail OAuth (can't send/modify emails)
|
| 520 |
+
- Encrypted OAuth tokens in database (PostgreSQL JSONB encrypted)
|
| 521 |
+
- No third-party analytics or tracking
|
| 522 |
+
- Self-hostable (you control the data)
|
| 523 |
+
|
| 524 |
+
### 🏷️ Advanced Organization
|
| 525 |
+
|
| 526 |
+
**Tags System**
|
| 527 |
+
- Create custom tags (#vacation, #business, #medical, #family)
|
| 528 |
+
- Tag individual transactions
|
| 529 |
+
- Filter and analyze by tags
|
| 530 |
+
- Multi-tag support (one transaction, multiple tags)
|
| 531 |
+
|
| 532 |
+
**Categories & Subcategories**
|
| 533 |
+
- 20+ predefined categories
|
| 534 |
+
- Hierarchical structure (Category > Subcategory)
|
| 535 |
+
- Fully customizable (add/edit/delete)
|
| 536 |
+
- Visual spending distribution
|
| 537 |
+
|
| 538 |
+
**Search & Filters**
|
| 539 |
+
- Search by merchant, amount, category, tag
|
| 540 |
+
- Date range filters
|
| 541 |
+
- Account type filters (Credit Card, Savings, Cash, UPI)
|
| 542 |
+
- Status filters (Pending, Verified)
|
| 543 |
+
- Export filtered results
|
| 544 |
+
|
| 545 |
+
---
|
| 546 |
+
|
| 547 |
+
## 🛠️ Technology Stack
|
| 548 |
+
|
| 549 |
+
### Backend
|
| 550 |
+
- **Framework**: FastAPI (Python 3.12+) - High-performance async API
|
| 551 |
+
- **Database**: PostgreSQL with SQLAlchemy (async) + asyncpg
|
| 552 |
+
- **AI/ML**:
|
| 553 |
+
- **Groq** (Llama 3.3 70B) - Transaction extraction & forecasting
|
| 554 |
+
- **Meta Prophet** - Statistical time-series forecasting
|
| 555 |
+
- **scipy** - XIRR calculation (Newton-Raphson optimization)
|
| 556 |
+
- **Data APIs**:
|
| 557 |
+
- **mfapi.in** - Mutual fund NAV data (India)
|
| 558 |
+
- **yfinance** - Stock prices (global)
|
| 559 |
+
- **Scheduler**: APScheduler (async) - Daily price sync jobs
|
| 560 |
+
- **Authentication**: JWT + bcrypt
|
| 561 |
+
- **Email**: SMTP for OTP delivery
|
| 562 |
+
- **OAuth**: Google OAuth 2.0 for Gmail
|
| 563 |
+
- **Deployment**: Render/Vercel-ready
|
| 564 |
+
|
| 565 |
+
### Frontend
|
| 566 |
+
- **Framework**: React 19 with TypeScript
|
| 567 |
+
- **Build**: Vite (lightning-fast HMR)
|
| 568 |
+
- **Styling**: Vanilla CSS (no framework bloat)
|
| 569 |
+
- **State**: Zustand (lightweight)
|
| 570 |
+
- **Data Fetching**: Axios with interceptors
|
| 571 |
+
- **Charts**: Recharts (responsive, composable)
|
| 572 |
+
- **Icons**: Lucide React
|
| 573 |
+
- **Animations**: Framer Motion
|
| 574 |
+
- **Routing**: React Router DOM
|
| 575 |
+
|
| 576 |
+
### Infrastructure
|
| 577 |
+
- **Package Manager**: uv (Rust-based, 10-100x faster than pip)
|
| 578 |
+
- **Database**: Supabase / NeonDB (serverless Postgres)
|
| 579 |
+
- **Hosting**: Render (backend) + Vercel (frontend)
|
| 580 |
+
- **Version Control**: Git / GitHub
|
| 581 |
+
|
| 582 |
+
---
|
| 583 |
+
|
| 584 |
+
## ⚡ Quick Start
|
| 585 |
+
|
| 586 |
+
### Prerequisites
|
| 587 |
+
- Python 3.12+
|
| 588 |
+
- Node.js 18+
|
| 589 |
+
- PostgreSQL database
|
| 590 |
+
- Gmail account (for email sync)
|
| 591 |
+
- Groq API key ([Get one free](https://console.groq.com))
|
| 592 |
+
|
| 593 |
+
### 1. Clone & Install
|
| 594 |
+
|
| 595 |
+
```bash
|
| 596 |
+
# Clone repository
|
| 597 |
+
git clone https://github.com/yourusername/grip.git
|
| 598 |
+
cd grip
|
| 599 |
+
|
| 600 |
+
# Backend setup
|
| 601 |
+
cd Backend
|
| 602 |
+
uv sync # Install dependencies
|
| 603 |
+
|
| 604 |
+
# Frontend setup
|
| 605 |
+
cd ../Frontend
|
| 606 |
+
npm install
|
| 607 |
+
```
|
| 608 |
+
|
| 609 |
+
### 2. Configure Environment
|
| 610 |
+
|
| 611 |
+
**Backend (`Backend/.env`):**
|
| 612 |
+
```bash
|
| 613 |
+
# Database
|
| 614 |
+
DATABASE_URL=postgresql://user:pass@host:port/dbname
|
| 615 |
+
|
| 616 |
+
# Security
|
| 617 |
+
SECRET_KEY=your-secret-key-here
|
| 618 |
+
GRIP_SECRET=webhook-secret
|
| 619 |
+
|
| 620 |
+
# AI Features (Local SmolLM2-1.7B)
|
| 621 |
+
USE_AI_FORECASTING=true
|
| 622 |
+
ENABLE_SCHEDULER=true
|
| 623 |
+
GROQ_API_KEY=your-groq-api-key # Optional fallback
|
| 624 |
+
GROQ_MODEL=llama-3.3-70b-versatile
|
| 625 |
+
|
| 626 |
+
# Gmail OAuth & Webhooks (Real-time Sync)
|
| 627 |
+
GOOGLE_CLIENT_ID=your-client-id.apps.googleusercontent.com
|
| 628 |
+
GOOGLE_CLIENT_SECRET=your-client-secret
|
| 629 |
+
GMAIL_PUBSUB_TOPIC=projects/your-project/topics/gmail-updates
|
| 630 |
+
FRONTEND_ORIGIN=http://localhost:5173
|
| 631 |
+
|
| 632 |
+
# Email (for OTP)
|
| 633 |
+
SMTP_HOST=smtp.gmail.com
|
| 634 |
+
SMTP_PORT=587
|
| 635 |
+
SMTP_USER=your-email@gmail.com
|
| 636 |
+
SMTP_PASSWORD=your-gmail-app-password
|
| 637 |
+
FROM_EMAIL=noreply@grip.com
|
| 638 |
+
FROM_NAME=Grip
|
| 639 |
+
|
| 640 |
+
# Branding
|
| 641 |
+
APP_NAME=Grip
|
| 642 |
+
APP_TAGLINE=Money that minds itself.
|
| 643 |
+
```
|
| 644 |
+
|
| 645 |
+
**Frontend (`Frontend/.env`):**
|
| 646 |
+
```bash
|
| 647 |
+
VITE_API_BASE_URL=http://localhost:8000/api/v1
|
| 648 |
+
VITE_APP_NAME=Grip
|
| 649 |
+
VITE_APP_TAGLINE=Money that minds itself.
|
| 650 |
+
```
|
| 651 |
+
|
| 652 |
+
### 3. Initialize Database
|
| 653 |
+
|
| 654 |
+
```bash
|
| 655 |
+
cd Backend
|
| 656 |
+
|
| 657 |
+
# Run migrations
|
| 658 |
+
uv run alembic upgrade head
|
| 659 |
+
|
| 660 |
+
# Seed default data (optional)
|
| 661 |
+
uv run python scripts/seed_db.py
|
| 662 |
+
# Creates user: amit@grip.com / password: admin
|
| 663 |
+
```
|
| 664 |
+
|
| 665 |
+
### 4. Run Development Servers
|
| 666 |
+
|
| 667 |
+
```bash
|
| 668 |
+
# Terminal 1 - Backend
|
| 669 |
+
cd Backend
|
| 670 |
+
uv run uvicorn app.main:app --reload
|
| 671 |
+
# → http://localhost:8000
|
| 672 |
+
|
| 673 |
+
# Terminal 2 - Frontend
|
| 674 |
+
cd Frontend
|
| 675 |
+
npm run dev
|
| 676 |
+
# → http://localhost:5173
|
| 677 |
+
```
|
| 678 |
+
|
| 679 |
+
### 5. Set Up Gmail Sync (Optional)
|
| 680 |
+
|
| 681 |
+
See **[Gmail Sync Setup Guide](GMAIL_SYNC_QUICKSTART.md)** for detailed instructions.
|
| 682 |
+
|
| 683 |
+
**Quick version:**
|
| 684 |
+
1. Create Google Cloud project
|
| 685 |
+
2. Enable Gmail API
|
| 686 |
+
3. Create OAuth credentials
|
| 687 |
+
4. Add credentials to `.env`
|
| 688 |
+
5. Connect in app: More → Gmail Sync
|
| 689 |
+
|
| 690 |
+
---
|
| 691 |
+
|
| 692 |
+
## 📖 Usage
|
| 693 |
+
|
| 694 |
+
### First-Time Setup
|
| 695 |
+
|
| 696 |
+
1. **Register Account**
|
| 697 |
+
- Open http://localhost:5173
|
| 698 |
+
- Click "Sign Up"
|
| 699 |
+
- Enter email and password
|
| 700 |
+
- Check email for 6-digit OTP
|
| 701 |
+
- Verify and auto-login ✅
|
| 702 |
+
|
| 703 |
+
2. **Connect Gmail** (Recommended)
|
| 704 |
+
- Go to More → Gmail Sync
|
| 705 |
+
- Click "Connect Gmail"
|
| 706 |
+
- Approve Google OAuth
|
| 707 |
+
- Click "Sync Now"
|
| 708 |
+
- Watch transactions flow in automatically! 🎉
|
| 709 |
+
|
| 710 |
+
3. **Add Credit Cards** (Optional)
|
| 711 |
+
- Go to My Cards
|
| 712 |
+
- Add each card with billing details
|
| 713 |
+
- Link transactions to cards for cycle tracking
|
| 714 |
+
|
| 715 |
+
4. **Set Up Bills** (Optional)
|
| 716 |
+
- Go to Bills & Surety
|
| 717 |
+
- Add recurring bills (rent, utilities, subscriptions)
|
| 718 |
+
- Mark predictable expenses as "Surety"
|
| 719 |
+
|
| 720 |
+
5. **Track Investments** (NEW!)
|
| 721 |
+
|
| 722 |
+
**Option A: Statement Import (Fastest)**
|
| 723 |
+
- Go to Wealth tab → Click purple Upload icon
|
| 724 |
+
- Select Source (CAMS / KFin / MFCentral)
|
| 725 |
+
- Upload statement file (CSV/Excel)
|
| 726 |
+
- Preview transactions → Click Import
|
| 727 |
+
- System auto-detects SIPs, step-ups, and skips
|
| 728 |
+
|
| 729 |
+
**Option B: Manual Entry**
|
| 730 |
+
- Click "Link Transaction" to map investment expenses
|
| 731 |
+
- Or manually add holdings (MF, Stocks, FDs)
|
| 732 |
+
|
| 733 |
+
**Analyze Your SIPs**
|
| 734 |
+
- Click any SIP holding → Switch to "SIP Date Analysis" tab
|
| 735 |
+
- See your actual performance vs alternative dates
|
| 736 |
+
- Get optimization recommendations
|
| 737 |
+
|
| 738 |
+
Watch portfolio grow with daily NAV updates!
|
| 739 |
+
|
| 740 |
+
### Daily Workflow
|
| 741 |
+
|
| 742 |
+
**Automated (Recommended):**
|
| 743 |
+
1. Gmail Sync runs automatically (or click "Sync Now")
|
| 744 |
+
2. AI extracts transaction details
|
| 745 |
+
3. Investment transactions auto-mapped to portfolio
|
| 746 |
+
4. Review pending transactions in Transactions tab
|
| 747 |
+
5. Verify or edit as needed
|
| 748 |
+
6. Check Dashboard for safe-to-spend amount
|
| 749 |
+
7. Monitor Wealth tab for portfolio performance
|
| 750 |
+
|
| 751 |
+
**Manual Entry:**
|
| 752 |
+
1. Click "+" button
|
| 753 |
+
2. Enter transaction details
|
| 754 |
+
3. Select category
|
| 755 |
+
4. Save (auto-marked as verified)
|
| 756 |
+
|
| 757 |
+
---
|
| 758 |
+
|
| 759 |
+
## 🔌 API Documentation
|
| 760 |
+
|
| 761 |
+
### Interactive Docs
|
| 762 |
+
- **Swagger UI**: http://localhost:8000/docs
|
| 763 |
+
- **ReDoc**: http://localhost:8000/redoc
|
| 764 |
+
|
| 765 |
+
### Key Endpoints
|
| 766 |
+
|
| 767 |
+
#### Authentication
|
| 768 |
+
```bash
|
| 769 |
+
POST /api/v1/auth/register # Register with OTP
|
| 770 |
+
POST /api/v1/auth/verify-otp # Verify OTP
|
| 771 |
+
POST /api/v1/auth/token # Login (JWT)
|
| 772 |
+
```
|
| 773 |
+
|
| 774 |
+
#### Gmail Sync
|
| 775 |
+
```bash
|
| 776 |
+
GET /api/v1/sync/google/auth # Get OAuth URL
|
| 777 |
+
POST /api/v1/sync/google/callback # Complete OAuth
|
| 778 |
+
GET /api/v1/sync/status # Check connection
|
| 779 |
+
POST /api/v1/sync/manual # Trigger sync
|
| 780 |
+
GET /api/v1/sync/history # View sync logs
|
| 781 |
+
DELETE /api/v1/sync/disconnect # Disconnect Gmail
|
| 782 |
+
```
|
| 783 |
+
|
| 784 |
+
#### Transactions
|
| 785 |
+
```bash
|
| 786 |
+
GET /api/v1/transactions # List all
|
| 787 |
+
POST /api/v1/transactions/manual # Manual entry
|
| 788 |
+
GET /api/v1/transactions/pending # Pending review
|
| 789 |
+
PUT /api/v1/transactions/{id} # Update
|
| 790 |
+
DELETE /api/v1/transactions/{id} # Delete
|
| 791 |
+
POST /api/v1/transactions/{id}/verify # Verify
|
| 792 |
+
```
|
| 793 |
+
|
| 794 |
+
#### Wealth & Investments (NEW!)
|
| 795 |
+
```bash
|
| 796 |
+
GET /api/v1/wealth/holdings # List portfolio
|
| 797 |
+
GET /api/v1/wealth/holdings/{id} # Holding details with snapshots
|
| 798 |
+
POST /api/v1/wealth/holdings # Add new asset
|
| 799 |
+
POST /api/v1/wealth/forecast # AI forecast (Prophet)
|
| 800 |
+
POST /api/v1/wealth/map-transaction # Link transaction to holding
|
| 801 |
+
GET /api/v1/wealth/sync-prices # Trigger manual price sync
|
| 802 |
+
POST /api/v1/wealth/import-cams # Import CAMS statement (NEW!)
|
| 803 |
+
GET /api/v1/wealth/holdings/{id}/sip-analysis # SIP date performance analysis (NEW!)
|
| 804 |
+
```
|
| 805 |
+
|
| 806 |
+
#### Analytics
|
| 807 |
+
```bash
|
| 808 |
+
GET /api/v1/analytics/safe-to-spend # Real-time calculation
|
| 809 |
+
GET /api/v1/analytics/variance # Month-over-month
|
| 810 |
+
GET /api/v1/analytics/monthly-summary # Monthly stats
|
| 811 |
+
```
|
| 812 |
+
|
| 813 |
+
#### Forecasting
|
| 814 |
+
```bash
|
| 815 |
+
GET /api/v1/dashboard/forecast # 30-day AI prediction
|
| 816 |
+
```
|
| 817 |
+
|
| 818 |
+
---
|
| 819 |
+
|
| 820 |
+
## 🚀 Deployment
|
| 821 |
+
|
| 822 |
+
### Production Setup (Recommended)
|
| 823 |
+
|
| 824 |
+
**Architecture:**
|
| 825 |
+
- **Frontend**: Vercel (Free, unlimited bandwidth)
|
| 826 |
+
- **Backend**: Railway (Serverless, $5/month credit)
|
| 827 |
+
- **Database**: Supabase (Free tier, 500MB)
|
| 828 |
+
- **Scheduled Tasks**: GitHub Actions (Free unlimited for public repos)
|
| 829 |
+
|
| 830 |
+
**Total Cost: $0/month** (everything within free tiers!)
|
| 831 |
+
|
| 832 |
+
---
|
| 833 |
+
|
| 834 |
+
### Backend Deployment (Railway)
|
| 835 |
+
|
| 836 |
+
#### 1. Initial Setup
|
| 837 |
+
|
| 838 |
+
1. **Sign up at [railway.app](https://railway.app)** with GitHub
|
| 839 |
+
2. **Create New Project** → Deploy from GitHub repo
|
| 840 |
+
3. **Select your repository**
|
| 841 |
+
4. **Configure Service:**
|
| 842 |
+
- Root Directory: `Backend`
|
| 843 |
+
- Start Command: `uvicorn app.main:app --host 0.0.0.0 --port $PORT`
|
| 844 |
+
- Watch Paths: `Backend/**`
|
| 845 |
+
|
| 846 |
+
#### 2. Environment Variables
|
| 847 |
+
|
| 848 |
+
Add these in Railway Dashboard → Variables:
|
| 849 |
+
|
| 850 |
+
```bash
|
| 851 |
+
# Database
|
| 852 |
+
DATABASE_URL=postgresql://postgres.[ref]:[password]@aws-1-ap-south-1.pooler.supabase.com:6543/postgres
|
| 853 |
+
|
| 854 |
+
# Security
|
| 855 |
+
SECRET_KEY=your-secret-key-here
|
| 856 |
+
GRIP_SECRET=webhook-secret
|
| 857 |
+
ENVIRONMENT=production
|
| 858 |
+
|
| 859 |
+
# AI
|
| 860 |
+
GROQ_API_KEY=your-groq-api-key
|
| 861 |
+
GROQ_MODEL=llama-3.3-70b-versatile
|
| 862 |
+
USE_AI_FORECASTING=true
|
| 863 |
+
ENABLE_SCHEDULER=false # Using GitHub Actions for scheduled tasks
|
| 864 |
+
|
| 865 |
+
# Gmail OAuth
|
| 866 |
+
GOOGLE_CLIENT_ID=your-client-id.apps.googleusercontent.com
|
| 867 |
+
GOOGLE_CLIENT_SECRET=your-client-secret
|
| 868 |
+
FRONTEND_ORIGIN=https://your-app.vercel.app
|
| 869 |
+
|
| 870 |
+
# Email (OTP)
|
| 871 |
+
SMTP_HOST=smtp.gmail.com
|
| 872 |
+
SMTP_PORT=587
|
| 873 |
+
SMTP_USER=your-email@gmail.com
|
| 874 |
+
SMTP_PASSWORD=your-gmail-app-password
|
| 875 |
+
FROM_EMAIL=noreply@grip.com
|
| 876 |
+
FROM_NAME=Grip
|
| 877 |
+
|
| 878 |
+
# Branding
|
| 879 |
+
APP_NAME=Grip
|
| 880 |
+
APP_TAGLINE=Money that minds itself.
|
| 881 |
+
```
|
| 882 |
+
|
| 883 |
+
#### 3. Generate Domain
|
| 884 |
+
|
| 885 |
+
- Go to Settings → Generate Domain
|
| 886 |
+
- Copy the URL (e.g., `https://grip-backend.up.railway.app`)
|
| 887 |
+
- Update `VITE_API_BASE_URL` in frontend
|
| 888 |
+
|
| 889 |
+
---
|
| 890 |
+
|
| 891 |
+
### Scheduled Tasks (GitHub Actions)
|
| 892 |
+
|
| 893 |
+
**Why GitHub Actions?**
|
| 894 |
+
- ✅ **Free unlimited** for public repos (2,000 min/month for private)
|
| 895 |
+
- ✅ Saves $1-2/month on Railway (serverless vs always-on)
|
| 896 |
+
- ✅ Reliable cron scheduling
|
| 897 |
+
- ✅ Easy monitoring via1. **Add Secrets** (One-time):
|
| 898 |
+
- Go to GitHub repo → Settings → Secrets → Actions
|
| 899 |
+
- Click "New repository secret" and add:
|
| 900 |
+
- `DATABASE_URL`: Your Supabase connection string.
|
| 901 |
+
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USER`, `SMTP_PASSWORD`: For email alerts.
|
| 902 |
+
- `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET`: For Gmail OAuth.
|
| 903 |
+
- `GROQ_API_KEY`: For AI transaction extraction.
|
| 904 |
+
- `FRONTEND_ORIGIN`: Your deployment URL (e.g. `https://grip.vercel.app`).
|
| 905 |
+
|
| 906 |
+
2. **Workflows are already configured**:
|
| 907 |
+
- `daily-price-sync.yml`: Runs at 3:30 PM IST (Price updates).
|
| 908 |
+
- `gmail_sync.yml`: Runs every hour (Transactions).
|
| 909 |
+
- `daily-intelligence.yml`: Runs at 9:00 AM IST (Reminders & Insights).
|
| 910 |
+
|
| 911 |
+
3. **Test the Workflow**:
|
| 912 |
+
- Go to Actions tab
|
| 913 |
+
- Click "Daily Price Sync"
|
| 914 |
+
- Click "Run workflow" → "Run workflow"
|
| 915 |
+
- Check logs to verify success
|
| 916 |
+
|
| 917 |
+
4. **Set Railway to Serverless**:
|
| 918 |
+
- In Railway Environment Variables:
|
| 919 |
+
- `ENABLE_SCHEDULER=false` (disables internal scheduler)
|
| 920 |
+
- This saves ~$1-2/month in Railway credits
|
| 921 |
+
|
| 922 |
+
**Monitoring:**
|
| 923 |
+
- View logs in GitHub Actions tab
|
| 924 |
+
- Check Railway logs for API requests
|
| 925 |
+
- Verify data updates in Supabase dashboard
|
| 926 |
+
|
| 927 |
+
---
|
| 928 |
+
|
| 929 |
+
### Frontend Deployment (Vercel)
|
| 930 |
+
|
| 931 |
+
#### 1. Deploy to Vercel
|
| 932 |
+
|
| 933 |
+
```bash
|
| 934 |
+
cd Frontend
|
| 935 |
+
npm run build
|
| 936 |
+
vercel --prod
|
| 937 |
+
```
|
| 938 |
+
|
| 939 |
+
Or connect via Vercel Dashboard:
|
| 940 |
+
1. Go to [vercel.com](https://vercel.com)
|
| 941 |
+
2. Import Git Repository
|
| 942 |
+
3. Select your repo
|
| 943 |
+
4. Framework Preset: Vite
|
| 944 |
+
5. Root Directory: `Frontend`
|
| 945 |
+
6. Deploy!
|
| 946 |
+
|
| 947 |
+
#### 2. Environment Variables
|
| 948 |
+
|
| 949 |
+
Add in Vercel Dashboard → Settings → Environment Variables:
|
| 950 |
+
|
| 951 |
+
```bash
|
| 952 |
+
VITE_API_BASE_URL=https://grip-backend.up.railway.app/api/v1
|
| 953 |
+
VITE_APP_NAME=Grip
|
| 954 |
+
VITE_APP_TAGLINE=Money that minds itself.
|
| 955 |
+
```
|
| 956 |
+
|
| 957 |
+
#### 3. Update Google OAuth
|
| 958 |
+
|
| 959 |
+
- Go to [Google Cloud Console](https://console.cloud.google.com)
|
| 960 |
+
- APIs & Services → Credentials
|
| 961 |
+
- Edit OAuth 2.0 Client
|
| 962 |
+
- Add Authorized JavaScript Origins:
|
| 963 |
+
- `https://your-app.vercel.app`
|
| 964 |
+
- Add Authorized Redirect URIs:
|
| 965 |
+
- `https://your-app.vercel.app`
|
| 966 |
+
- Save
|
| 967 |
+
|
| 968 |
+
---
|
| 969 |
+
|
| 970 |
+
### Database Setup (Supabase)
|
| 971 |
+
|
| 972 |
+
1. **Create Project** at [supabase.com](https://supabase.com)
|
| 973 |
+
2. **Get Connection String**:
|
| 974 |
+
- Project Settings → Database
|
| 975 |
+
- Copy "Transaction" pooler string (port 6543)
|
| 976 |
+
3. **Add to Railway** as `DATABASE_URL`
|
| 977 |
+
4. **Add to GitHub Secrets** for Actions workflow
|
| 978 |
+
|
| 979 |
+
**Important:** Use port **6543** (Transaction pooler), not 5432, for Railway compatibility.
|
| 980 |
+
|
| 981 |
+
---
|
| 982 |
+
|
| 983 |
+
### Cost Breakdown
|
| 984 |
+
|
| 985 |
+
| Service | Free Tier | Your Usage | Cost |
|
| 986 |
+
|---------|-----------|------------|------|
|
| 987 |
+
| **Railway** (Serverless) | $5/month credit | ~$1-2/month | $0 |
|
| 988 |
+
| **Vercel** (Frontend) | Unlimited | Unlimited | $0 |
|
| 989 |
+
| **Supabase** (Database) | 500MB | ~50MB | $0 |
|
| 990 |
+
| **GitHub Actions** (Cron) | Unlimited (public) | 30 min/month | $0 |
|
| 991 |
+
| **Groq** (AI) | Free tier | ~1000 requests/month | $0 |
|
| 992 |
+
|
| 993 |
+
**Total: $0/month** 🎉
|
| 994 |
+
|
| 995 |
+
---
|
| 996 |
+
|
| 997 |
+
### Deployment Checklist
|
| 998 |
+
|
| 999 |
+
- [ ] Railway backend deployed with all env vars
|
| 1000 |
+
- [ ] Vercel frontend deployed with API URL
|
| 1001 |
+
- [ ] Supabase database created and connected
|
| 1002 |
+
- [ ] GitHub Actions secret added (`DATABASE_URL`)
|
| 1003 |
+
- [ ] Google OAuth redirect URIs updated
|
| 1004 |
+
- [ ] Test login flow
|
| 1005 |
+
- [ ] Test Gmail sync
|
| 1006 |
+
- [ ] Test scheduled task (manual trigger)
|
| 1007 |
+
- [ ] Verify investment price sync working
|
| 1008 |
+
|
| 1009 |
+
---
|
| 1010 |
+
|
| 1011 |
+
### Monitoring & Maintenance
|
| 1012 |
+
|
| 1013 |
+
**Daily Checks:**
|
| 1014 |
+
- GitHub Actions logs (scheduled task status)
|
| 1015 |
+
- Railway logs (API errors)
|
| 1016 |
+
- Supabase dashboard (data integrity)
|
| 1017 |
+
|
| 1018 |
+
**Weekly:**
|
| 1019 |
+
- Check Railway usage (should be <$2)
|
| 1020 |
+
- Review Groq API usage
|
| 1021 |
+
- Test critical flows (login, sync, forecast)
|
| 1022 |
+
|
| 1023 |
+
**Monthly:**
|
| 1024 |
+
- Review GitHub Actions minutes (should be ~30)
|
| 1025 |
+
- Check Railway credit balance
|
| 1026 |
+
- Update dependencies if needed
|
| 1027 |
+
|
| 1028 |
+
---
|
| 1029 |
+
|
| 1030 |
+
## 🔒 Privacy & Data Handling
|
| 1031 |
+
|
| 1032 |
+
### What We Store
|
| 1033 |
+
- Transaction metadata (amount, merchant, category, dates)
|
| 1034 |
+
- Investment snapshots (units, prices, dates)
|
| 1035 |
+
- Encrypted OAuth tokens (Gmail access)
|
| 1036 |
+
- User preferences and mappings
|
| 1037 |
+
- Sync logs (for debugging)
|
| 1038 |
+
|
| 1039 |
+
### What We DON'T Store
|
| 1040 |
+
- Full email content
|
| 1041 |
+
- Credit card CVVs or PINs
|
| 1042 |
+
- Unmasked PAN/Aadhaar numbers
|
| 1043 |
+
- Gmail passwords
|
| 1044 |
+
- Any sensitive PII
|
| 1045 |
+
|
| 1046 |
+
### Data Flow
|
| 1047 |
+
1. Email received in your Gmail
|
| 1048 |
+
2. OAuth token grants read access
|
| 1049 |
+
3. Email content fetched via API
|
| 1050 |
+
4. **Sanitization happens locally** (regex masking)
|
| 1051 |
+
5. Sanitized text sent to Groq for extraction
|
| 1052 |
+
6. Extracted JSON stored in database
|
| 1053 |
+
7. Investment transactions auto-mapped to holdings
|
| 1054 |
+
8. Daily price sync updates portfolio values
|
| 1055 |
+
9. Original email remains in your Gmail (unchanged)
|
| 1056 |
+
|
| 1057 |
+
---
|
| 1058 |
+
|
| 1059 |
+
## 🤝 Contributing
|
| 1060 |
+
|
| 1061 |
+
This project is currently private. For feature requests or bug reports, please open an issue.
|
| 1062 |
+
|
| 1063 |
+
---
|
| 1064 |
+
|
| 1065 |
+
## 📝 License
|
| 1066 |
+
|
| 1067 |
+
Private and proprietary. All rights reserved.
|
| 1068 |
+
|
| 1069 |
+
---
|
| 1070 |
+
|
| 1071 |
+
## 🛠️ Environment & Security Configuration
|
| 1072 |
+
|
| 1073 |
+
Grip is designed with a "Privacy-First" and "Cloud-Resilient" architecture. Depending on where you deploy (Local vs. Cloud), you may need to adjust certain security measures:
|
| 1074 |
+
|
| 1075 |
+
### 📧 Email Connection Modes
|
| 1076 |
+
Most cloud providers (Hugging Face, Railway) block **Ports 25, 587, and 465** to prevent spam.
|
| 1077 |
+
- **Grip Email Relay (Microservice)**: By default, we use a dedicated relay service (located in `/EmailService`) intended for deployment on Vercel (Port 443) to bypass SMTP blocks. Configure `EMAIL_RELAY_URL` in your `.env`.
|
| 1078 |
+
- **Standard SMTP**: If running locally or on a VPS where ports are open, uncomment the `LEGACY DIRECT SMTP` block in `app/core/email.py` and set your Gmail App Password.
|
| 1079 |
+
|
| 1080 |
+
### 🤖 LLM Intelligence & Fallbacks
|
| 1081 |
+
We utilize a dual-track AI system for maximum reliability:
|
| 1082 |
+
- **Grip Intelligence (Primary)**: A high-performance, private engine hosted on Hugging Face Spaces.
|
| 1083 |
+
- **Groq Llama-3 (Roboust Fallback)**: If the primary engine is sleeping or unreachable, Grip automatically falls back to Groq.
|
| 1084 |
+
- **Note**: Ensure `GROQ_API_KEY` is set in your environment variables. If you wish to use only Groq, uncomment the relevant lines in `app/core/llm.py`.
|
| 1085 |
+
|
| 1086 |
+
### 🔐 Security Measures
|
| 1087 |
+
- **Sanitization First**: All PII (PAN, Account numbers) is masked via local regex logic *before* being processed by any AI engine.
|
| 1088 |
+
- **Scoped Ingress**: Gmail OAuth is restricted to `gmail.readonly` and specifically queries for transaction-only keywords.
|
| 1089 |
+
|
| 1090 |
+
---
|
| 1091 |
+
|
| 1092 |
+
## 🙏 Acknowledgments
|
| 1093 |
+
|
| 1094 |
+
Built with incredible open-source tools:
|
| 1095 |
+
- **Groq** - Lightning-fast LLM inference
|
| 1096 |
+
- **Meta Prophet** - Time-series forecasting
|
| 1097 |
+
- **FastAPI** - Modern Python web framework
|
| 1098 |
+
- **React** - UI library
|
| 1099 |
+
- **PostgreSQL** - Robust database
|
| 1100 |
+
- **scipy** - Scientific computing for XIRR
|
| 1101 |
+
- **yfinance** - Stock market data
|
| 1102 |
+
- **mfapi.in** - Indian mutual fund NAV data
|
| 1103 |
+
- **Render** - Backend deployment
|
| 1104 |
+
- **Vercel** - Frontend deployment
|
| 1105 |
+
|
| 1106 |
+
---
|
| 1107 |
+
|
| 1108 |
+
## 📚 Documentation
|
| 1109 |
+
|
| 1110 |
+
- **[Quick Start Guide](GMAIL_SYNC_QUICKSTART.md)** - 15-minute setup
|
| 1111 |
+
- **[CAMS Import Guide](CAMS_IMPORT_GUIDE.md)** - Import years of history in 60s
|
| 1112 |
+
- **[Implementation Details](GMAIL_SYNC_IMPLEMENTATION.md)** - Technical deep-dive
|
| 1113 |
+
- **[Session Summary](SESSION_SUMMARY.md)** - Recent updates
|
| 1114 |
+
|
| 1115 |
+
---
|
| 1116 |
+
|
| 1117 |
+
## 💬 Support
|
| 1118 |
+
|
| 1119 |
+
For setup help or questions, refer to:
|
| 1120 |
+
1. **API Docs**: http://localhost:8000/docs
|
| 1121 |
+
2. **Troubleshooting**: Check `GMAIL_SYNC_IMPLEMENTATION.md`
|
| 1122 |
+
3. **Common Issues**: See "Troubleshooting" section in setup guides
|
| 1123 |
+
|
| 1124 |
+
---
|
| 1125 |
+
|
| 1126 |
+
<div align="center">
|
| 1127 |
+
|
| 1128 |
+
**Grip** - Autonomous Financial Intelligence.
|
| 1129 |
+
|
| 1130 |
+
*Made with ❤️ , effort and AI*
|
| 1131 |
+
|
| 1132 |
+
</div>
|
app/core/config.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
| 2 |
+
from functools import lru_cache
|
| 3 |
+
from typing import Optional
|
| 4 |
+
|
| 5 |
+
class Settings(BaseSettings):
|
| 6 |
+
PROJECT_NAME: str = "Grip"
|
| 7 |
+
API_V1_STR: str = "/api/v1"
|
| 8 |
+
ENVIRONMENT: str = "local"
|
| 9 |
+
APP_TIMEZONE: str = "Asia/Kolkata" # Default to IST
|
| 10 |
+
|
| 11 |
+
DATABASE_URL: str = ""
|
| 12 |
+
|
| 13 |
+
SECRET_KEY: str = "SECRET_KEY"
|
| 14 |
+
ALGORITHM: str = "HS256"
|
| 15 |
+
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 3 # 3 days
|
| 16 |
+
GRIP_SECRET: str = ""
|
| 17 |
+
|
| 18 |
+
EXCEPTION_ROUTES: list[str] = [
|
| 19 |
+
"/",
|
| 20 |
+
"/privacy",
|
| 21 |
+
"/terms",
|
| 22 |
+
"/docs",
|
| 23 |
+
"/redoc",
|
| 24 |
+
"/openapi.json",
|
| 25 |
+
"/api/v1/openapi.json",
|
| 26 |
+
"/api/v1/auth/register",
|
| 27 |
+
"/api/v1/auth/verify-otp",
|
| 28 |
+
"/api/v1/auth/token",
|
| 29 |
+
"/api/v1/auth/google-login",
|
| 30 |
+
"/api/v1/auth/google/one-tap",
|
| 31 |
+
"/api/v1/sync/webhook"
|
| 32 |
+
]
|
| 33 |
+
|
| 34 |
+
USE_AI_FORECASTING: bool = True
|
| 35 |
+
ENABLE_SCHEDULER: bool = True # Set to False when using external cron (e.g., GitHub Actions)
|
| 36 |
+
|
| 37 |
+
GROQ_API_KEY: str = ""
|
| 38 |
+
GROQ_MODEL: str = "llama-3.1-8b-instant"
|
| 39 |
+
|
| 40 |
+
# Local LLM Settings
|
| 41 |
+
LOCAL_LLM_CONTEXT: int = 2048
|
| 42 |
+
LOCAL_MODEL_REPO: str = "bartowski/google_gemma-4-E4B-it-GGUF"
|
| 43 |
+
LOCAL_MODEL_FILE: str = "google_gemma-4-E4B-it-Q4_K_M.gguf"
|
| 44 |
+
LOCAL_MODEL_DIR: str = "models"
|
| 45 |
+
|
| 46 |
+
GOOGLE_CLIENT_ID: str = ""
|
| 47 |
+
GOOGLE_CLIENT_SECRET: str = ""
|
| 48 |
+
FRONTEND_ORIGIN: str = "https://grip-akdey.vercel.app" # Frontend URL for OAuth origin parameter
|
| 49 |
+
GOOGLE_REDIRECT_URI: str = "postmessage"
|
| 50 |
+
GMAIL_PUBSUB_TOPIC: Optional[str] = None
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
# Firebase Settings
|
| 54 |
+
FIREBASE_CREDENTIALS_PATH: str = "firebase_credentials.json"
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# Email Settings
|
| 58 |
+
SMTP_HOST: str = "smtp.gmail.com"
|
| 59 |
+
SMTP_PORT: int = 587
|
| 60 |
+
SMTP_USER: str = ""
|
| 61 |
+
SMTP_PASSWORD: str = ""
|
| 62 |
+
FROM_EMAIL: str = "noreply@grip.com"
|
| 63 |
+
FROM_NAME: str = "Grip"
|
| 64 |
+
|
| 65 |
+
# External Email Relay (for bypassing cloud SMTP blocks)
|
| 66 |
+
EMAIL_RELAY_URL: Optional[str] = "https://akdey-grip-email-relay.vercel.app/send"
|
| 67 |
+
EMAIL_RELAY_SECRET: Optional[str] = None
|
| 68 |
+
|
| 69 |
+
# Branding
|
| 70 |
+
APP_NAME: str = "GRIP"
|
| 71 |
+
APP_TAGLINE: str = "Autonomous Financial Intelligence"
|
| 72 |
+
|
| 73 |
+
@property
|
| 74 |
+
def ASYNC_DATABASE_URL(self) -> str:
|
| 75 |
+
url = self.DATABASE_URL
|
| 76 |
+
if url.startswith("postgresql://"):
|
| 77 |
+
url = url.replace("postgresql://", "postgresql+asyncpg://", 1)
|
| 78 |
+
|
| 79 |
+
if "pgbouncer=true" in url:
|
| 80 |
+
url = url.replace("?pgbouncer=true", "").replace("&pgbouncer=true", "")
|
| 81 |
+
|
| 82 |
+
return url
|
| 83 |
+
|
| 84 |
+
model_config = SettingsConfigDict(env_file=".env", case_sensitive=True, extra="ignore")
|
| 85 |
+
|
| 86 |
+
@lru_cache()
|
| 87 |
+
def get_settings():
|
| 88 |
+
return Settings()
|
app/core/database.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
| 2 |
+
from sqlalchemy.orm import DeclarativeBase
|
| 3 |
+
from sqlalchemy.pool import NullPool
|
| 4 |
+
from app.core.config import get_settings
|
| 5 |
+
import ssl
|
| 6 |
+
|
| 7 |
+
settings = get_settings()
|
| 8 |
+
|
| 9 |
+
# Handle missing DATABASE_URL
|
| 10 |
+
db_url = settings.ASYNC_DATABASE_URL
|
| 11 |
+
if not db_url:
|
| 12 |
+
print("WARNING: DATABASE_URL not set. Using in-memory SQLite.")
|
| 13 |
+
db_url = "sqlite+aiosqlite:///:memory:"
|
| 14 |
+
|
| 15 |
+
# Initialize engine
|
| 16 |
+
engine = None
|
| 17 |
+
|
| 18 |
+
try:
|
| 19 |
+
connect_args = {}
|
| 20 |
+
poolclass = None
|
| 21 |
+
|
| 22 |
+
if "sqlite" in db_url:
|
| 23 |
+
connect_args = {"check_same_thread": False}
|
| 24 |
+
else:
|
| 25 |
+
# PostgreSQL configuration
|
| 26 |
+
connect_args = {
|
| 27 |
+
"statement_cache_size": 0, # Required for Supabase pooler
|
| 28 |
+
"server_settings": {"application_name": "grip_backend"},
|
| 29 |
+
"timeout": 20,
|
| 30 |
+
"command_timeout": 20
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
# Supabase-specific configuration
|
| 34 |
+
if "supabase" in db_url.lower():
|
| 35 |
+
# Use NullPool for Supabase Transaction Pooler (port 6543)
|
| 36 |
+
if ":6543" in db_url:
|
| 37 |
+
poolclass = NullPool
|
| 38 |
+
|
| 39 |
+
# Permissive SSL context for Supabase
|
| 40 |
+
ssl_context = ssl.create_default_context()
|
| 41 |
+
ssl_context.check_hostname = False
|
| 42 |
+
ssl_context.verify_mode = ssl.CERT_NONE
|
| 43 |
+
connect_args["ssl"] = ssl_context
|
| 44 |
+
|
| 45 |
+
# Create engine
|
| 46 |
+
engine_kwargs = {"echo": False, "connect_args": connect_args}
|
| 47 |
+
|
| 48 |
+
if poolclass is NullPool:
|
| 49 |
+
engine_kwargs["poolclass"] = NullPool
|
| 50 |
+
else:
|
| 51 |
+
engine_kwargs.update({
|
| 52 |
+
"pool_pre_ping": True,
|
| 53 |
+
"pool_recycle": 300,
|
| 54 |
+
"pool_size": 10,
|
| 55 |
+
"max_overflow": 20,
|
| 56 |
+
})
|
| 57 |
+
|
| 58 |
+
engine = create_async_engine(db_url, **engine_kwargs)
|
| 59 |
+
|
| 60 |
+
except Exception as e:
|
| 61 |
+
print(f"CRITICAL: Failed to create database engine: {e}")
|
| 62 |
+
import traceback
|
| 63 |
+
traceback.print_exc()
|
| 64 |
+
# Fallback to SQLite
|
| 65 |
+
engine = create_async_engine("sqlite+aiosqlite:///:memory:", poolclass=NullPool)
|
| 66 |
+
|
| 67 |
+
# Session factory
|
| 68 |
+
AsyncSessionLocal = async_sessionmaker(
|
| 69 |
+
bind=engine,
|
| 70 |
+
class_=AsyncSession,
|
| 71 |
+
expire_on_commit=False,
|
| 72 |
+
autoflush=False
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
# Base model
|
| 76 |
+
class Base(DeclarativeBase):
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
# Dependency
|
| 80 |
+
async def get_db():
|
| 81 |
+
async with AsyncSessionLocal() as session:
|
| 82 |
+
yield session
|
app/core/email.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import smtplib
|
| 2 |
+
from email.mime.text import MIMEText
|
| 3 |
+
from email.mime.multipart import MIMEMultipart
|
| 4 |
+
import logging
|
| 5 |
+
from app.core.config import get_settings
|
| 6 |
+
|
| 7 |
+
settings = get_settings()
|
| 8 |
+
logger = logging.getLogger(__name__)
|
| 9 |
+
|
| 10 |
+
import httpx
|
| 11 |
+
|
| 12 |
+
def send_email(to_email: str, subject: str, html_content: str):
|
| 13 |
+
"""
|
| 14 |
+
Sends an email using either a Vercel Microservice relay (Approach C)
|
| 15 |
+
or standard SMTP if configured.
|
| 16 |
+
"""
|
| 17 |
+
# External Relay (Recommended for cloud hosting like HF Spaces as required ports are blocked)
|
| 18 |
+
if settings.EMAIL_RELAY_URL and settings.EMAIL_RELAY_SECRET:
|
| 19 |
+
try:
|
| 20 |
+
payload = {
|
| 21 |
+
"to_email": to_email,
|
| 22 |
+
"subject": subject,
|
| 23 |
+
"html_content": html_content,
|
| 24 |
+
"from_name": settings.FROM_NAME
|
| 25 |
+
}
|
| 26 |
+
headers = {"X-Grip-Secret": settings.EMAIL_RELAY_SECRET}
|
| 27 |
+
|
| 28 |
+
# Using synchronous request for simplicity in background tasks,
|
| 29 |
+
# though async is generally better.
|
| 30 |
+
with httpx.Client() as client:
|
| 31 |
+
resp = client.post(settings.EMAIL_RELAY_URL, json=payload, headers=headers, timeout=15.0)
|
| 32 |
+
if resp.status_code == 200:
|
| 33 |
+
return True
|
| 34 |
+
else:
|
| 35 |
+
logger.error(f"Relay failed ({resp.status_code}): {resp.text}")
|
| 36 |
+
return False
|
| 37 |
+
except Exception as e:
|
| 38 |
+
logger.error(f"Relay connection error: {e}")
|
| 39 |
+
return False
|
| 40 |
+
|
| 41 |
+
# --- LEGACY DIRECT SMTP (Approach A/B) ---
|
| 42 |
+
# NOTE: DO NOT REMOVE THIS BLOCK.
|
| 43 |
+
# Standard SMTP (Port 587/465) is frequently blocked on cloud providers like HF Spaces.
|
| 44 |
+
# If using approach above, this code remains here as an alternative for local development.
|
| 45 |
+
"""
|
| 46 |
+
if not settings.SMTP_USER or not settings.SMTP_PASSWORD:
|
| 47 |
+
logger.warning("SMTP credentials not set. Email not sent.")
|
| 48 |
+
return False
|
| 49 |
+
|
| 50 |
+
try:
|
| 51 |
+
message = MIMEMultipart("alternative")
|
| 52 |
+
message["Subject"] = subject
|
| 53 |
+
message["From"] = f"{settings.FROM_NAME} <{settings.FROM_EMAIL}>"
|
| 54 |
+
message["To"] = to_email
|
| 55 |
+
|
| 56 |
+
part = MIMEText(html_content, "html")
|
| 57 |
+
message.attach(part)
|
| 58 |
+
|
| 59 |
+
# Use SMTP_SSL for port 465, otherwise standard SMTP + starttls
|
| 60 |
+
if settings.SMTP_PORT == 465:
|
| 61 |
+
with smtplib.SMTP_SSL(settings.SMTP_HOST, settings.SMTP_PORT) as server:
|
| 62 |
+
server.login(settings.SMTP_USER, settings.SMTP_PASSWORD)
|
| 63 |
+
server.sendmail(settings.FROM_EMAIL, to_email, message.as_string())
|
| 64 |
+
else:
|
| 65 |
+
with smtplib.SMTP(settings.SMTP_HOST, settings.SMTP_PORT) as server:
|
| 66 |
+
server.starttls()
|
| 67 |
+
server.login(settings.SMTP_USER, settings.SMTP_PASSWORD)
|
| 68 |
+
server.sendmail(settings.FROM_EMAIL, to_email, message.as_string())
|
| 69 |
+
|
| 70 |
+
return True
|
| 71 |
+
except Exception as e:
|
| 72 |
+
logger.error(f"Failed to send email to {to_email}: {e}")
|
| 73 |
+
return False
|
| 74 |
+
"""
|
| 75 |
+
logger.warning("No email relay or SMTP configured accurately.")
|
| 76 |
+
return False
|
| 77 |
+
|
| 78 |
+
def send_otp_email(to_email: str, otp: str):
|
| 79 |
+
subject = f"Your {settings.APP_NAME} Verification Code: {otp}"
|
| 80 |
+
html_content = f"""
|
| 81 |
+
<html>
|
| 82 |
+
<body style="font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; color: #1e293b; line-height: 1.6; margin: 0; padding: 20px; background-color: #f8fafc;">
|
| 83 |
+
<div style="max-width: 500px; margin: 0 auto; background: white; padding: 40px; border-radius: 20px; border: 1px solid #e2e8f0; box-shadow: 0 10px 25px -5px rgba(0,0,0,0.05);">
|
| 84 |
+
<div style="margin-bottom: 30px; text-align: left;">
|
| 85 |
+
<span style="font-size: 24px; font-weight: 900; letter-spacing: -0.02em; color: #111;">GRIP</span>
|
| 86 |
+
<div style="height: 4px; width: 40px; background: #4F46E5; margin-top: 4px; border-radius: 2px;"></div>
|
| 87 |
+
</div>
|
| 88 |
+
|
| 89 |
+
<h2 style="color: #111; margin-top: 0; font-size: 22px; font-weight: 800;">Verify your email</h2>
|
| 90 |
+
<p style="color: #475569; font-size: 16px;">Welcome! Please use the verification code below to complete your sign-in to {settings.APP_NAME}.</p>
|
| 91 |
+
|
| 92 |
+
<div style="background: #f1f5f9; padding: 30px; border-radius: 12px; margin: 25px 0; text-align: center; border: 1px solid #e2e8f0;">
|
| 93 |
+
<span style="font-size: 36px; font-weight: 900; letter-spacing: 12px; color: #111; font-family: monospace; display: block; margin-left: 12px;">{otp}</span>
|
| 94 |
+
</div>
|
| 95 |
+
|
| 96 |
+
<p style="font-size: 14px; color: #94a3b8; text-align: center;">This code will expire in 10 minutes.</p>
|
| 97 |
+
|
| 98 |
+
<div style="margin-top: 40px; border-top: 1px solid #f1f5f9; padding-top: 20px;">
|
| 99 |
+
<p style="font-size: 14px; color: #64748b; margin: 0;">Stay focused,</p>
|
| 100 |
+
<p style="font-size: 14px; font-weight: bold; color: #111; margin: 4px 0;">The {settings.APP_NAME} Team</p>
|
| 101 |
+
</div>
|
| 102 |
+
</div>
|
| 103 |
+
<div style="max-width: 500px; margin: 10px auto; text-align: center;">
|
| 104 |
+
<p style="font-size: 11px; color: #94a3b8;">If you didn't request this code, you can safely ignore this email.</p>
|
| 105 |
+
</div>
|
| 106 |
+
</body>
|
| 107 |
+
</html>
|
| 108 |
+
"""
|
| 109 |
+
return send_email(to_email, subject, html_content)
|
app/core/llm.py
ADDED
|
@@ -0,0 +1,293 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import re
|
| 3 |
+
import httpx
|
| 4 |
+
import json
|
| 5 |
+
import asyncio
|
| 6 |
+
import os
|
| 7 |
+
import threading
|
| 8 |
+
from typing import Optional, Dict, Any, List
|
| 9 |
+
from app.core.config import get_settings
|
| 10 |
+
|
| 11 |
+
print(">>> LLM MODULE IMPORTED", flush=True)
|
| 12 |
+
|
| 13 |
+
# Global flag to track if llama-cpp is available and usable on this system.
|
| 14 |
+
HAS_LLAMA_CPP = False
|
| 15 |
+
try:
|
| 16 |
+
import llama_cpp
|
| 17 |
+
HAS_LLAMA_CPP = True
|
| 18 |
+
except Exception:
|
| 19 |
+
# We don't log here to avoid cluttering startup logs if the user is running
|
| 20 |
+
# in a environment where they explicitly didn't install it.
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
settings = get_settings()
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
class LocalLLMEngine:
|
| 27 |
+
"""Handles local execution of GGUF models using llama-cpp-python."""
|
| 28 |
+
|
| 29 |
+
def __init__(self):
|
| 30 |
+
self._model = None
|
| 31 |
+
self._lock = threading.Lock()
|
| 32 |
+
self.repo_id = settings.LOCAL_MODEL_REPO
|
| 33 |
+
self.filename = settings.LOCAL_MODEL_FILE
|
| 34 |
+
self.models_dir = settings.LOCAL_MODEL_DIR
|
| 35 |
+
|
| 36 |
+
def _ensure_model(self):
|
| 37 |
+
"""Lazy load and potentially download the model. Thread-safe."""
|
| 38 |
+
with self._lock:
|
| 39 |
+
if self._model:
|
| 40 |
+
return self._model
|
| 41 |
+
|
| 42 |
+
try:
|
| 43 |
+
from llama_cpp import Llama
|
| 44 |
+
from huggingface_hub import hf_hub_download
|
| 45 |
+
except Exception as e:
|
| 46 |
+
logger.error(f"Cannot load local LLM engine (likely missing system dependencies for llama_cpp): {e}")
|
| 47 |
+
return None
|
| 48 |
+
|
| 49 |
+
try:
|
| 50 |
+
# Create models directory if it doesn't exist
|
| 51 |
+
os.makedirs(self.models_dir, exist_ok=True)
|
| 52 |
+
|
| 53 |
+
# Check for model existence. Use absolute path for reliability in Docker containers.
|
| 54 |
+
model_path = os.path.abspath(os.path.join(self.models_dir, self.filename))
|
| 55 |
+
logger.info(f"LocalLLMEngine: Checking for model at {model_path}")
|
| 56 |
+
|
| 57 |
+
if os.path.exists(model_path):
|
| 58 |
+
file_size = os.path.getsize(model_path)
|
| 59 |
+
logger.info(f"LocalLLMEngine: Model file found. Size: {file_size / (1024*1024):.2f} MB")
|
| 60 |
+
if file_size < 100 * 1024 * 1024: # Less than 100MB is likely a pointer/corrupted for a 1.7B model
|
| 61 |
+
logger.warning(f"LocalLLMEngine: Model file seems too small ({file_size} bytes). It might be an LFS pointer. Re-downloading...")
|
| 62 |
+
os.remove(model_path)
|
| 63 |
+
|
| 64 |
+
if not os.path.exists(model_path):
|
| 65 |
+
logger.warning(f"LocalLLMEngine: Model not found at expected path or removed. Attempting download from {self.repo_id}...")
|
| 66 |
+
downloaded_path = hf_hub_download(
|
| 67 |
+
repo_id=self.repo_id,
|
| 68 |
+
filename=self.filename,
|
| 69 |
+
local_dir=self.models_dir
|
| 70 |
+
)
|
| 71 |
+
model_path = os.path.abspath(downloaded_path)
|
| 72 |
+
logger.info(f"LocalLLMEngine: Download complete. Size: {os.path.getsize(model_path) / (1024*1024):.2f} MB")
|
| 73 |
+
|
| 74 |
+
# Initialize Llama-cpp with optimized context and caching
|
| 75 |
+
# n_ctx: 2048 (default) - Sufficient for long emails + context
|
| 76 |
+
# n_threads: 1 - Prevents CPU contention
|
| 77 |
+
# logits_all: False - Saves memory
|
| 78 |
+
self._model = Llama(
|
| 79 |
+
model_path=model_path,
|
| 80 |
+
n_ctx=settings.LOCAL_LLM_CONTEXT,
|
| 81 |
+
n_threads=1,
|
| 82 |
+
n_gpu_layers=0, # Force CPU
|
| 83 |
+
logits_all=False,
|
| 84 |
+
verbose=False
|
| 85 |
+
)
|
| 86 |
+
logger.info(f"Local LLM engine initialized (ctx: {settings.LOCAL_LLM_CONTEXT}, threads: 1).")
|
| 87 |
+
return self._model
|
| 88 |
+
except Exception as e:
|
| 89 |
+
logger.error(f"Failed to initialize local LLM engine: {e}")
|
| 90 |
+
return None
|
| 91 |
+
|
| 92 |
+
def _strip_thoughts(self, text: str) -> str:
|
| 93 |
+
"""Removes internal thinking blocks from Gemma 4 output."""
|
| 94 |
+
if not text:
|
| 95 |
+
return text
|
| 96 |
+
# Gemma 4 thought pattern: <|channel>thought ... <channel|>
|
| 97 |
+
text = re.sub(r'<\|channel>thought.*?<channel\|>', '', text, flags=re.DOTALL)
|
| 98 |
+
return text.strip()
|
| 99 |
+
|
| 100 |
+
def generate(self, prompt: str, system_prompt: str, temperature: float) -> Optional[str]:
|
| 101 |
+
"""Generate response using the local model."""
|
| 102 |
+
model = self._ensure_model()
|
| 103 |
+
if not model:
|
| 104 |
+
return None
|
| 105 |
+
|
| 106 |
+
try:
|
| 107 |
+
# Format prompt for Gemma 4
|
| 108 |
+
formatted_prompt = f"<|turn>system\n{system_prompt} <turn|>\n<|turn>user\n{prompt} <turn|>\n<|turn>model\n"
|
| 109 |
+
|
| 110 |
+
logger.debug(f"LocalLLMEngine: Starting inference with Gemma 4...")
|
| 111 |
+
output = model(
|
| 112 |
+
formatted_prompt,
|
| 113 |
+
max_tokens=800, # Increased for Gemma 4's reasoning capacity
|
| 114 |
+
temperature=temperature,
|
| 115 |
+
stop=["<turn|>", "<|turn>", "<|im_end|>", "<|endoftext|>"],
|
| 116 |
+
echo=False
|
| 117 |
+
)
|
| 118 |
+
raw_text = output['choices'][0]['text'].strip()
|
| 119 |
+
text = self._strip_thoughts(raw_text)
|
| 120 |
+
logger.info(f"LocalLLMEngine: Inference complete. Generated {len(text)} characters (stripped thoughts).")
|
| 121 |
+
return text
|
| 122 |
+
except Exception as e:
|
| 123 |
+
logger.error(f"Error during local LLM inference: {e}")
|
| 124 |
+
return None
|
| 125 |
+
|
| 126 |
+
class LLMService:
|
| 127 |
+
"""Centralized service for Large Language Model interactions."""
|
| 128 |
+
|
| 129 |
+
def __init__(self):
|
| 130 |
+
self.groq_api_key = settings.GROQ_API_KEY
|
| 131 |
+
self.groq_model = settings.GROQ_MODEL
|
| 132 |
+
self.groq_url = "https://api.groq.com/openai/v1/chat/completions"
|
| 133 |
+
self.local_engine = LocalLLMEngine()
|
| 134 |
+
|
| 135 |
+
# PII patterns for sanitizing content before sending to external APIs
|
| 136 |
+
self._pii_patterns = [
|
| 137 |
+
# (re.compile(r'[a-zA-Z0-9.\-_]{2,}@[a-zA-Z]{2,}'), '<UPI>'), # Allow LLM to read UPI based merchants
|
| 138 |
+
(re.compile(r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'), '<EMAIL>'),
|
| 139 |
+
(re.compile(r'(?:\+?91|0)?[6-9]\d{9}'), '<PHONE>'),
|
| 140 |
+
(re.compile(r'(?:\d[ -]*?){12,19}'), '<CARD>'),
|
| 141 |
+
(re.compile(r'[Xx]+\d{3,6}'), '<ACCOUNT>'),
|
| 142 |
+
(re.compile(r'[A-Z]{5}[0-9]{4}[A-Z]{1}'), '<PAN>'),
|
| 143 |
+
(re.compile(r'\d{4}\s\d{4}\s\d{4}'), '<AADHAAR>'),
|
| 144 |
+
]
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
def is_enabled(self) -> bool:
|
| 148 |
+
"""Check if any LLM service is available."""
|
| 149 |
+
return HAS_LLAMA_CPP # or bool(self.groq_api_key)
|
| 150 |
+
|
| 151 |
+
def _sanitize_for_external(self, text: str) -> str:
|
| 152 |
+
"""Extra PII scrub before sending to any external/third-party LLM API."""
|
| 153 |
+
if not text:
|
| 154 |
+
return text
|
| 155 |
+
text = re.sub(r'(?i)(Dear|Hello|Hi)\s+[A-Za-z\s]+,', r'\1 Customer,', text)
|
| 156 |
+
for pattern, label in self._pii_patterns:
|
| 157 |
+
text = pattern.sub(label, text)
|
| 158 |
+
return text
|
| 159 |
+
|
| 160 |
+
async def generate_response(
|
| 161 |
+
self,
|
| 162 |
+
prompt: str,
|
| 163 |
+
system_prompt: Optional[str] = "You are a helpful financial assistant.",
|
| 164 |
+
temperature: float = 0.5,
|
| 165 |
+
response_format: Optional[str] = None,
|
| 166 |
+
timeout: float = 120.0 # Increased timeout for local inference
|
| 167 |
+
) -> Optional[str]:
|
| 168 |
+
"""Generic method to generate a response, prioritizing local execution."""
|
| 169 |
+
global HAS_LLAMA_CPP
|
| 170 |
+
|
| 171 |
+
# 1. Try Local Engine (Primary — high privacy, no costs)
|
| 172 |
+
if HAS_LLAMA_CPP:
|
| 173 |
+
# We run in a threadpool to avoid blocking the event loop
|
| 174 |
+
try:
|
| 175 |
+
loop = asyncio.get_event_loop()
|
| 176 |
+
res = await loop.run_in_executor(
|
| 177 |
+
None,
|
| 178 |
+
self.local_engine.generate,
|
| 179 |
+
prompt,
|
| 180 |
+
system_prompt,
|
| 181 |
+
temperature
|
| 182 |
+
)
|
| 183 |
+
if res:
|
| 184 |
+
logger.info(">>> LLM_ENGINE: Local (Gemma 4) success.")
|
| 185 |
+
return res
|
| 186 |
+
# If we get here it means inference failed or engine is broken
|
| 187 |
+
except Exception as e:
|
| 188 |
+
# If it fails once with a severe error (like shared lib missing), we can disable it
|
| 189 |
+
# for the rest of this worker's lifecycle to stop log spam.
|
| 190 |
+
if "shared object file" in str(e) or "libc" in str(e).lower():
|
| 191 |
+
HAS_LLAMA_CPP = False
|
| 192 |
+
logger.error(f">>> LLM_ENGINE: Fatal library error. Disabling local LLM: {e}")
|
| 193 |
+
else:
|
| 194 |
+
logger.warning(f">>> LLM_ENGINE: Local engine runtime error: {e}")
|
| 195 |
+
|
| 196 |
+
# 2. Try Groq (Fallback — external API, sanitize content)
|
| 197 |
+
# if self.groq_api_key:
|
| 198 |
+
# logger.info(f">>> LLM_ENGINE: Falling back to Groq ({self.groq_model})...")
|
| 199 |
+
# sanitized_prompt = self._sanitize_for_external(prompt)
|
| 200 |
+
# result = await self._call_groq(sanitized_prompt, system_prompt, temperature, response_format, timeout)
|
| 201 |
+
# if result:
|
| 202 |
+
# logger.info(">>> LLM_ENGINE: Groq success.")
|
| 203 |
+
# return result
|
| 204 |
+
|
| 205 |
+
logger.warning("Local LLM engine is unavailable. Groq fallback is disabled.")
|
| 206 |
+
return None
|
| 207 |
+
|
| 208 |
+
async def _call_groq(
|
| 209 |
+
self,
|
| 210 |
+
prompt: str,
|
| 211 |
+
system_prompt: str,
|
| 212 |
+
temperature: float,
|
| 213 |
+
response_format: Optional[str],
|
| 214 |
+
timeout: float
|
| 215 |
+
) -> Optional[str]:
|
| 216 |
+
"""Call the Groq API (Fallback provider). Content must be pre-sanitized."""
|
| 217 |
+
headers = {
|
| 218 |
+
"Authorization": f"Bearer {self.groq_api_key}",
|
| 219 |
+
"Content-Type": "application/json"
|
| 220 |
+
}
|
| 221 |
+
|
| 222 |
+
messages = []
|
| 223 |
+
if system_prompt:
|
| 224 |
+
messages.append({"role": "system", "content": system_prompt})
|
| 225 |
+
messages.append({"role": "user", "content": prompt})
|
| 226 |
+
|
| 227 |
+
payload = {
|
| 228 |
+
"model": self.groq_model,
|
| 229 |
+
"messages": messages,
|
| 230 |
+
"temperature": temperature
|
| 231 |
+
}
|
| 232 |
+
|
| 233 |
+
if response_format == "json_object":
|
| 234 |
+
payload["response_format"] = {"type": "json_object"}
|
| 235 |
+
|
| 236 |
+
try:
|
| 237 |
+
async with httpx.AsyncClient() as client:
|
| 238 |
+
resp = await client.post(self.groq_url, headers=headers, json=payload, timeout=timeout)
|
| 239 |
+
if resp.status_code == 200:
|
| 240 |
+
return resp.json()['choices'][0]['message']['content']
|
| 241 |
+
elif resp.status_code == 429:
|
| 242 |
+
logger.error(f"Groq API Rate Limit Reached (429). Falling back to Regex engine.")
|
| 243 |
+
return None
|
| 244 |
+
else:
|
| 245 |
+
logger.error(f"Groq API Error ({resp.status_code}): {resp.text[:200]}")
|
| 246 |
+
return None
|
| 247 |
+
except Exception as e:
|
| 248 |
+
logger.error(f"Groq Connection Error: {type(e).__name__}: {e}")
|
| 249 |
+
return None
|
| 250 |
+
|
| 251 |
+
async def generate_json(
|
| 252 |
+
self,
|
| 253 |
+
prompt: str,
|
| 254 |
+
system_prompt: Optional[str] = "You are a financial intelligence engine. Always output valid JSON objects.",
|
| 255 |
+
temperature: float = 0.2,
|
| 256 |
+
timeout: float = 60.0
|
| 257 |
+
) -> Optional[Dict[str, Any]]:
|
| 258 |
+
"""Method specifically for JSON responses with robust parsing."""
|
| 259 |
+
content = await self.generate_response(
|
| 260 |
+
prompt=prompt,
|
| 261 |
+
system_prompt=system_prompt,
|
| 262 |
+
temperature=temperature,
|
| 263 |
+
response_format="json_object",
|
| 264 |
+
timeout=timeout
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
if not content:
|
| 268 |
+
return None
|
| 269 |
+
|
| 270 |
+
try:
|
| 271 |
+
json_match = re.search(r'(\{.*\})', content, re.DOTALL)
|
| 272 |
+
if json_match:
|
| 273 |
+
content = json_match.group(1)
|
| 274 |
+
|
| 275 |
+
content = content.strip().replace('```json', '').replace('```', '').strip()
|
| 276 |
+
return json.loads(content)
|
| 277 |
+
except (json.JSONDecodeError, IndexError) as e:
|
| 278 |
+
logger.error(f"LLM JSON Decode Error: {e}. Content: {content[:200]}...")
|
| 279 |
+
try:
|
| 280 |
+
# Last resort cleanup
|
| 281 |
+
cleaned = re.sub(r',\s*([\]\}])', r'\1', content)
|
| 282 |
+
return json.loads(cleaned)
|
| 283 |
+
except Exception:
|
| 284 |
+
return None
|
| 285 |
+
|
| 286 |
+
# Singleton-like instance
|
| 287 |
+
_llm_service = None
|
| 288 |
+
|
| 289 |
+
def get_llm_service() -> LLMService:
|
| 290 |
+
global _llm_service
|
| 291 |
+
if _llm_service is None:
|
| 292 |
+
_llm_service = LLMService()
|
| 293 |
+
return _llm_service
|
app/core/logging_config.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
from logging.handlers import RotatingFileHandler
|
| 6 |
+
|
| 7 |
+
from app.core.config import get_settings
|
| 8 |
+
|
| 9 |
+
# PII Patterns (Sync with SanitizerService)
|
| 10 |
+
PII_PATTERNS = {
|
| 11 |
+
# 'UPI': re.compile(r'[a-zA-Z0-9.\-_]{2,}@[a-zA-Z]{2,}'),
|
| 12 |
+
'EMAIL': re.compile(r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'),
|
| 13 |
+
'PHONE': re.compile(r'(?:\+?91|0)?[6-9]\d{9}'),
|
| 14 |
+
'CARD': re.compile(r'(?:\d[ -]*?){12,19}'),
|
| 15 |
+
'ACCOUNT': re.compile(r'[Xx]+\d{3,6}'),
|
| 16 |
+
'PAN': re.compile(r'[A-Z]{5}[0-9]{4}[A-Z]{1}'),
|
| 17 |
+
'AADHAAR': re.compile(r'\d{4}\s\d{4}\s\d{4}'),
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
class PIISanitizingFormatter(logging.Formatter):
|
| 21 |
+
def format(self, record: logging.LogRecord) -> str:
|
| 22 |
+
# Original formatted message
|
| 23 |
+
message = super().format(record)
|
| 24 |
+
|
| 25 |
+
# Sanitize greetings
|
| 26 |
+
message = re.sub(r'(?i)(Dear|Hello|Hi)\s+[A-Za-z\s]+,', r'\1 Customer,', message)
|
| 27 |
+
|
| 28 |
+
# Sanitize patterns
|
| 29 |
+
for label, pattern in PII_PATTERNS.items():
|
| 30 |
+
message = pattern.sub(f'<{label}>', message)
|
| 31 |
+
|
| 32 |
+
return message
|
| 33 |
+
|
| 34 |
+
# Configure basic logging to output to console and file
|
| 35 |
+
def setup_logging():
|
| 36 |
+
settings = get_settings()
|
| 37 |
+
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 38 |
+
|
| 39 |
+
# Custom Formatter
|
| 40 |
+
formatter = PIISanitizingFormatter(log_format)
|
| 41 |
+
|
| 42 |
+
# Console Handler
|
| 43 |
+
console_handler = logging.StreamHandler(sys.stdout)
|
| 44 |
+
console_handler.setFormatter(formatter)
|
| 45 |
+
handlers = [console_handler]
|
| 46 |
+
|
| 47 |
+
# Only add file logging if in local environment (Vercel has read-only FS)
|
| 48 |
+
if settings.ENVIRONMENT == "local":
|
| 49 |
+
# Ensure logs directory exists
|
| 50 |
+
log_dir = "logs"
|
| 51 |
+
if not os.path.exists(log_dir):
|
| 52 |
+
os.makedirs(log_dir)
|
| 53 |
+
|
| 54 |
+
log_file = os.path.join(log_dir, "app.log")
|
| 55 |
+
file_handler = RotatingFileHandler(log_file, maxBytes=5*1024*1024, backupCount=5)
|
| 56 |
+
file_handler.setFormatter(formatter)
|
| 57 |
+
handlers.append(file_handler)
|
| 58 |
+
|
| 59 |
+
# Reset any existing handlers
|
| 60 |
+
root_logger = logging.getLogger()
|
| 61 |
+
for h in root_logger.handlers[:]:
|
| 62 |
+
root_logger.removeHandler(h)
|
| 63 |
+
|
| 64 |
+
logging.basicConfig(
|
| 65 |
+
level=logging.INFO,
|
| 66 |
+
handlers=handlers
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
# Set levels for noisy libraries
|
| 70 |
+
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
| 71 |
+
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
| 72 |
+
logging.getLogger("python_multipart").setLevel(logging.WARNING)
|
| 73 |
+
|
| 74 |
+
# Removed setup_logging() call from here - it's called in app/main.py
|
| 75 |
+
logger = logging.getLogger("app")
|
app/core/middleware.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import Request, Response, status
|
| 2 |
+
from starlette.middleware.base import BaseHTTPMiddleware
|
| 3 |
+
from starlette.responses import JSONResponse
|
| 4 |
+
from jose import jwt, JWTError
|
| 5 |
+
from app.core.config import get_settings
|
| 6 |
+
|
| 7 |
+
settings = get_settings()
|
| 8 |
+
import logging
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
class AuthenticationMiddleware(BaseHTTPMiddleware):
|
| 12 |
+
async def dispatch(self, request: Request, call_next):
|
| 13 |
+
# 1. Check for Bypass/Exception Routes
|
| 14 |
+
path = request.url.path
|
| 15 |
+
is_exception = False
|
| 16 |
+
for route in settings.EXCEPTION_ROUTES:
|
| 17 |
+
if route == "/":
|
| 18 |
+
if path == "/":
|
| 19 |
+
is_exception = True
|
| 20 |
+
break
|
| 21 |
+
elif path.startswith(route):
|
| 22 |
+
is_exception = True
|
| 23 |
+
break
|
| 24 |
+
|
| 25 |
+
if is_exception:
|
| 26 |
+
logger.debug(f"Bypassing authentication for path: {path}")
|
| 27 |
+
return await call_next(request)
|
| 28 |
+
|
| 29 |
+
# 2. Extract Token
|
| 30 |
+
auth_header = request.headers.get("Authorization")
|
| 31 |
+
if not auth_header or not auth_header.startswith("Bearer "):
|
| 32 |
+
logger.warning(f"Authentication failed: Missing or invalid token for path {path}")
|
| 33 |
+
return JSONResponse(
|
| 34 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 35 |
+
content={"detail": "Not authenticated"}
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
token = auth_header.split(" ")[1]
|
| 39 |
+
|
| 40 |
+
# 3. Validate Token
|
| 41 |
+
try:
|
| 42 |
+
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
|
| 43 |
+
email: str = payload.get("sub")
|
| 44 |
+
if email is None:
|
| 45 |
+
raise JWTError
|
| 46 |
+
except JWTError:
|
| 47 |
+
logger.warning(f"Authentication failed: Invalid token for path {path}")
|
| 48 |
+
return JSONResponse(
|
| 49 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 50 |
+
content={"detail": "Could not validate credentials"}
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
# 4. Stateless Authentication
|
| 54 |
+
# We trust the token signature. We do NOT hit the DB here.
|
| 55 |
+
# Downstream dependencies (get_current_user) will fetch the full user object if needed.
|
| 56 |
+
request.state.user_email = email
|
| 57 |
+
# logger.debug(f"Token valid for {email}, proceeding statelessly for {path}")
|
| 58 |
+
|
| 59 |
+
# 5. Process request
|
| 60 |
+
try:
|
| 61 |
+
response = await call_next(request)
|
| 62 |
+
logger.info(f"Response: {response.status_code} for {path}")
|
| 63 |
+
return response
|
| 64 |
+
except Exception as e:
|
| 65 |
+
logger.error(f"Error in middleware processing {path}: {e}", exc_info=True)
|
| 66 |
+
return JSONResponse(
|
| 67 |
+
status_code=500,
|
| 68 |
+
content={"detail": "Internal Server Error in Middleware", "msg": str(e)}
|
| 69 |
+
)
|
| 70 |
+
|
app/core/scheduler.py
ADDED
|
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
import logging
|
| 3 |
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
| 4 |
+
from apscheduler.triggers.cron import CronTrigger
|
| 5 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 6 |
+
from typing import Optional
|
| 7 |
+
|
| 8 |
+
from app.core.database import AsyncSessionLocal
|
| 9 |
+
from app.core.config import get_settings
|
| 10 |
+
from app.features.wealth.service import WealthService
|
| 11 |
+
from app.features.sync.service import SyncService
|
| 12 |
+
from app.features.transactions.service import TransactionService
|
| 13 |
+
from app.features.categories.service import CategoryService
|
| 14 |
+
from app.core.llm import get_llm_service
|
| 15 |
+
from sqlalchemy import select, and_
|
| 16 |
+
from datetime import datetime, date, timedelta
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
settings = get_settings()
|
| 20 |
+
|
| 21 |
+
scheduler = AsyncIOScheduler()
|
| 22 |
+
|
| 23 |
+
async def run_daily_price_sync():
|
| 24 |
+
"""
|
| 25 |
+
Task to sync prices for all investment holdings.
|
| 26 |
+
Runs daily.
|
| 27 |
+
"""
|
| 28 |
+
logger.info("Starting Daily Price Sync...")
|
| 29 |
+
from app.features.auth.models import User
|
| 30 |
+
from app.features.wealth.models import InvestmentHolding, InvestmentSnapshot
|
| 31 |
+
from app.features.credit_cards.models import CreditCard
|
| 32 |
+
from app.features.bills.models import Bill
|
| 33 |
+
|
| 34 |
+
async with AsyncSessionLocal() as db:
|
| 35 |
+
service = WealthService(db)
|
| 36 |
+
try:
|
| 37 |
+
# We need to implement sync_all_holdings in WealthService first
|
| 38 |
+
await service.sync_all_holdings_prices()
|
| 39 |
+
logger.info("Daily Price Sync Completed Successfully.")
|
| 40 |
+
except Exception as e:
|
| 41 |
+
logger.error(f"Daily Price Sync Failed: {e}", exc_info=True)
|
| 42 |
+
|
| 43 |
+
async def run_surety_reminders():
|
| 44 |
+
"""
|
| 45 |
+
Check for payments due today or in the next 3 days and send notifications.
|
| 46 |
+
"""
|
| 47 |
+
logger.info("Starting Surety Reminders Scan...")
|
| 48 |
+
from app.features.auth.models import User
|
| 49 |
+
from app.features.bills.models import Bill
|
| 50 |
+
from app.features.credit_cards.models import CreditCard
|
| 51 |
+
from app.features.notifications.service import NotificationService
|
| 52 |
+
|
| 53 |
+
today = date.today()
|
| 54 |
+
reminder_window = today + timedelta(days=3)
|
| 55 |
+
|
| 56 |
+
async with AsyncSessionLocal() as db:
|
| 57 |
+
llm_service = get_llm_service()
|
| 58 |
+
notification_service = NotificationService(db, llm_service)
|
| 59 |
+
|
| 60 |
+
# Check explicit bills, join with User for personalization
|
| 61 |
+
stmt = (
|
| 62 |
+
select(Bill, User.full_name)
|
| 63 |
+
.join(User, Bill.user_id == User.id)
|
| 64 |
+
.where(
|
| 65 |
+
and_(
|
| 66 |
+
Bill.due_date >= today,
|
| 67 |
+
Bill.due_date <= reminder_window,
|
| 68 |
+
Bill.is_paid == False
|
| 69 |
+
)
|
| 70 |
+
)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
result = await db.execute(stmt)
|
| 74 |
+
upcoming_data = result.all()
|
| 75 |
+
|
| 76 |
+
for bill, full_name in upcoming_data:
|
| 77 |
+
try:
|
| 78 |
+
await notification_service.send_surety_reminder(
|
| 79 |
+
bill.user_id,
|
| 80 |
+
full_name,
|
| 81 |
+
bill.title,
|
| 82 |
+
float(bill.amount),
|
| 83 |
+
datetime.combine(bill.due_date, datetime.min.time())
|
| 84 |
+
)
|
| 85 |
+
logger.info(f"Sent reminder for bill: {bill.title}")
|
| 86 |
+
except Exception as e:
|
| 87 |
+
logger.error(f"Failed to send reminder for bill {bill.id}: {e}")
|
| 88 |
+
|
| 89 |
+
logger.info("Surety Reminders Completed.")
|
| 90 |
+
|
| 91 |
+
async def run_weekly_insights():
|
| 92 |
+
"""
|
| 93 |
+
Analyze spending for the last 7 days and send insights if growth is detected.
|
| 94 |
+
"""
|
| 95 |
+
logger.info("Starting Weekly Insights Analysis...")
|
| 96 |
+
from sqlalchemy import func
|
| 97 |
+
from app.features.auth.models import User
|
| 98 |
+
from app.features.transactions.models import Transaction
|
| 99 |
+
from app.features.bills.models import Bill
|
| 100 |
+
from app.features.credit_cards.models import CreditCard
|
| 101 |
+
from app.features.notifications.service import NotificationService
|
| 102 |
+
|
| 103 |
+
async with AsyncSessionLocal() as db:
|
| 104 |
+
llm_service = get_llm_service()
|
| 105 |
+
notification_service = NotificationService(db, llm_service)
|
| 106 |
+
|
| 107 |
+
# 1. Find categories where spend > 1000 in last 7 days
|
| 108 |
+
# Exclude 'Investment'
|
| 109 |
+
seven_days_ago = datetime.now() - timedelta(days=7)
|
| 110 |
+
|
| 111 |
+
stmt = (
|
| 112 |
+
select(User.id, User.full_name, Transaction.category, func.sum(func.abs(Transaction.amount)).label("total"))
|
| 113 |
+
.join(Transaction, User.id == Transaction.user_id)
|
| 114 |
+
.where(Transaction.transaction_date >= seven_days_ago.date())
|
| 115 |
+
.where(Transaction.category != 'Investment')
|
| 116 |
+
.group_by(User.id, User.full_name, Transaction.category)
|
| 117 |
+
.having(func.sum(func.abs(Transaction.amount)) > 1000)
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
result = await db.execute(stmt)
|
| 121 |
+
data = result.all()
|
| 122 |
+
logger.info(f"Weekly Insights: Found {len(data)} user/category pairs over ₹1,000 threshold.")
|
| 123 |
+
|
| 124 |
+
# 2. Group by user for consolidated emails
|
| 125 |
+
user_insights = {}
|
| 126 |
+
for user_id, full_name, category, total in data:
|
| 127 |
+
if user_id not in user_insights:
|
| 128 |
+
user_insights[user_id] = {
|
| 129 |
+
"full_name": full_name,
|
| 130 |
+
"items": []
|
| 131 |
+
}
|
| 132 |
+
user_insights[user_id]["items"].append({
|
| 133 |
+
"category": category,
|
| 134 |
+
"amount": float(total)
|
| 135 |
+
})
|
| 136 |
+
|
| 137 |
+
# 3. Send consolidated emails
|
| 138 |
+
for user_id, info in user_insights.items():
|
| 139 |
+
try:
|
| 140 |
+
await notification_service.send_weekly_summary(
|
| 141 |
+
user_id,
|
| 142 |
+
info["full_name"],
|
| 143 |
+
info["items"]
|
| 144 |
+
)
|
| 145 |
+
logger.info(f"Sent consolidated weekly recap to user {user_id} ({len(info['items'])} categories)")
|
| 146 |
+
except Exception as e:
|
| 147 |
+
logger.error(f"Failed to send weekly recap for user {user_id}: {e}")
|
| 148 |
+
|
| 149 |
+
logger.info("Weekly Insights Completed.")
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
async def run_monthly_report(target_date: Optional[date] = None):
|
| 153 |
+
"""
|
| 154 |
+
Generate and send a comprehensive monthly report for the previous month.
|
| 155 |
+
Generally runs on the 1st of the month.
|
| 156 |
+
"""
|
| 157 |
+
logger.info("Starting Monthly Report Generation...")
|
| 158 |
+
from app.features.auth.models import User
|
| 159 |
+
from app.features.bills.models import Bill
|
| 160 |
+
from app.features.credit_cards.models import CreditCard
|
| 161 |
+
from app.features.analytics.service import AnalyticsService
|
| 162 |
+
|
| 163 |
+
# If today is March 1st, we want February's data
|
| 164 |
+
ref_date = target_date or date.today()
|
| 165 |
+
if ref_date.day == 1:
|
| 166 |
+
prev_month_date = ref_date - timedelta(days=1)
|
| 167 |
+
month_idx = prev_month_date.month
|
| 168 |
+
year_idx = prev_month_date.year
|
| 169 |
+
else:
|
| 170 |
+
month_idx = ref_date.month
|
| 171 |
+
year_idx = ref_date.year
|
| 172 |
+
|
| 173 |
+
async with AsyncSessionLocal() as db:
|
| 174 |
+
from app.features.notifications.service import NotificationService
|
| 175 |
+
notification_service = NotificationService(db)
|
| 176 |
+
analytics_service = AnalyticsService()
|
| 177 |
+
|
| 178 |
+
result = await db.execute(select(User))
|
| 179 |
+
users = result.scalars().all()
|
| 180 |
+
|
| 181 |
+
for user in users:
|
| 182 |
+
try:
|
| 183 |
+
# Get full monthly summary & variance
|
| 184 |
+
summary = await analytics_service.get_monthly_summary(db, user.id, month=month_idx, year=year_idx)
|
| 185 |
+
variance = await analytics_service.get_variance_analysis(db, user.id, month=month_idx, year=year_idx)
|
| 186 |
+
|
| 187 |
+
await notification_service.send_monthly_report(
|
| 188 |
+
user_id=user.id,
|
| 189 |
+
full_name=user.full_name,
|
| 190 |
+
summary=summary,
|
| 191 |
+
variance=variance
|
| 192 |
+
)
|
| 193 |
+
logger.info(f"Sent monthly report to {user.id} for {month_idx}/{year_idx}")
|
| 194 |
+
except Exception as e:
|
| 195 |
+
logger.error(f"Failed monthly report for {user.id}: {e}")
|
| 196 |
+
|
| 197 |
+
logger.info("Monthly Report Job Completed.")
|
| 198 |
+
|
| 199 |
+
async def run_lifestyle_insights(override_date: Optional[date] = None):
|
| 200 |
+
"""
|
| 201 |
+
Perform periodic checks for inactivity and special events (like Fridays).
|
| 202 |
+
"""
|
| 203 |
+
logger.info(f"Starting Lifestyle Insights Trigger... (Override: {override_date})")
|
| 204 |
+
from app.features.auth.models import User
|
| 205 |
+
from app.features.transactions.models import Transaction
|
| 206 |
+
from app.features.bills.models import Bill
|
| 207 |
+
from app.features.credit_cards.models import CreditCard
|
| 208 |
+
from app.features.analytics.service import AnalyticsService
|
| 209 |
+
from sqlalchemy import func
|
| 210 |
+
|
| 211 |
+
today = override_date or date.today()
|
| 212 |
+
|
| 213 |
+
async with AsyncSessionLocal() as db:
|
| 214 |
+
from app.features.notifications.service import NotificationService
|
| 215 |
+
notification_service = NotificationService(db)
|
| 216 |
+
analytics_service = AnalyticsService()
|
| 217 |
+
|
| 218 |
+
# 1. Fetch all users
|
| 219 |
+
result = await db.execute(select(User))
|
| 220 |
+
users = result.scalars().all()
|
| 221 |
+
|
| 222 |
+
for user in users:
|
| 223 |
+
try:
|
| 224 |
+
# --- CHECK 1: INACTIVITY ---
|
| 225 |
+
# Check for the last transaction date
|
| 226 |
+
stmt = select(func.max(Transaction.transaction_date)).where(Transaction.user_id == user.id)
|
| 227 |
+
res = await db.execute(stmt)
|
| 228 |
+
last_txn_date = res.scalar()
|
| 229 |
+
|
| 230 |
+
if last_txn_date:
|
| 231 |
+
days_diff = (today - last_txn_date).days
|
| 232 |
+
# If inactive for exactly 7 or 14 days, send a nudge
|
| 233 |
+
if days_diff in [7, 14]:
|
| 234 |
+
await notification_service.send_inactivity_nudge(user.id, user.full_name, days_diff)
|
| 235 |
+
logger.info(f"Sent inactivity nudge to {user.id} ({days_diff} days)")
|
| 236 |
+
|
| 237 |
+
# --- CHECK 2: BUFFER EMERGENCY BRAKE ---
|
| 238 |
+
# Check if safe-to-spend is below the required buffer
|
| 239 |
+
sts_data = await analytics_service.calculate_safe_to_spend_amount(db, user.id)
|
| 240 |
+
# If safe-to-spend is zero or negative, it means the buffer is exhausted
|
| 241 |
+
if sts_data.safe_to_spend <= 0:
|
| 242 |
+
await notification_service.send_buffer_alert(user.id, user.full_name, float(sts_data.safe_to_spend))
|
| 243 |
+
logger.info(f"Sent buffer emergency brake to {user.id}")
|
| 244 |
+
|
| 245 |
+
# --- CHECK 3: WEEKEND (FRIDAY) ---
|
| 246 |
+
if today.weekday() == 4: # 4 is Friday
|
| 247 |
+
# Calculate safe-to-spend for this user
|
| 248 |
+
sts_data = await analytics_service.calculate_safe_to_spend_amount(db, user.id)
|
| 249 |
+
|
| 250 |
+
# Fetch top category for the last 7 days for more insight
|
| 251 |
+
seven_days_ago = today - timedelta(days=7)
|
| 252 |
+
cat_stmt = (
|
| 253 |
+
select(Transaction.category, func.sum(func.abs(Transaction.amount)).label("total"))
|
| 254 |
+
.where(Transaction.user_id == user.id)
|
| 255 |
+
.where(Transaction.transaction_date >= seven_days_ago)
|
| 256 |
+
.where(Transaction.category.notin_(["Income", "Transfer"]))
|
| 257 |
+
.group_by(Transaction.category)
|
| 258 |
+
.order_by(func.sum(func.abs(Transaction.amount)).desc())
|
| 259 |
+
.limit(1)
|
| 260 |
+
)
|
| 261 |
+
cat_res = await db.execute(cat_stmt)
|
| 262 |
+
top_cat_row = cat_res.first()
|
| 263 |
+
top_category = top_cat_row.category if top_cat_row else None
|
| 264 |
+
|
| 265 |
+
# Trigger the AI-driven weekend insight with more context
|
| 266 |
+
await notification_service.send_weekend_insight(
|
| 267 |
+
user_id=user.id,
|
| 268 |
+
full_name=user.full_name,
|
| 269 |
+
safe_to_spend=float(sts_data.safe_to_spend),
|
| 270 |
+
current_balance=float(sts_data.current_balance),
|
| 271 |
+
top_category=top_category
|
| 272 |
+
)
|
| 273 |
+
logger.info(f"Sent weekend insight to {user.id}")
|
| 274 |
+
|
| 275 |
+
except Exception as e:
|
| 276 |
+
logger.error(f"Error in lifestyle insight for user {user.id}: {e}")
|
| 277 |
+
|
| 278 |
+
logger.info("Lifestyle Insights Completed.")
|
| 279 |
+
|
| 280 |
+
async def run_gmail_sync():
|
| 281 |
+
"""
|
| 282 |
+
Task to sync Gmail transactions for all users.
|
| 283 |
+
"""
|
| 284 |
+
logger.info("Starting Gmail Sync...")
|
| 285 |
+
async with AsyncSessionLocal() as db:
|
| 286 |
+
# Import models inside function to avoid circular imports and ensure registry is ready
|
| 287 |
+
from app.features.auth.models import User
|
| 288 |
+
# Ensure relationships are loaded
|
| 289 |
+
from app.features.credit_cards.models import CreditCard
|
| 290 |
+
from app.features.bills.models import Bill
|
| 291 |
+
from app.features.notifications.service import NotificationService
|
| 292 |
+
|
| 293 |
+
# Instantiate services
|
| 294 |
+
llm_service = get_llm_service()
|
| 295 |
+
cat_service = CategoryService(db)
|
| 296 |
+
wealth_service = WealthService(db)
|
| 297 |
+
txn_service = TransactionService(db)
|
| 298 |
+
notif_service = NotificationService(db, llm_service)
|
| 299 |
+
sync_service = SyncService(db, txn_service, cat_service, wealth_service, notif_service, llm_service)
|
| 300 |
+
|
| 301 |
+
# Fetch users with gmail credentials
|
| 302 |
+
stmt = select(User).where(User.gmail_credentials.isnot(None))
|
| 303 |
+
result = await db.execute(stmt)
|
| 304 |
+
users = result.scalars().all()
|
| 305 |
+
|
| 306 |
+
logger.info(f"Found {len(users)} users with Gmail credentials.")
|
| 307 |
+
|
| 308 |
+
for user in users:
|
| 309 |
+
try:
|
| 310 |
+
logger.info(f"Syncing Gmail for user {user.id}...")
|
| 311 |
+
await sync_service.execute_sync(user.id, "SCHEDULED_TASK")
|
| 312 |
+
except Exception as e:
|
| 313 |
+
logger.error(f"Gmail sync failed for user {user.id}: {e}")
|
| 314 |
+
|
| 315 |
+
logger.info("Gmail Sync Completed.")
|
| 316 |
+
|
| 317 |
+
def start_scheduler():
|
| 318 |
+
"""
|
| 319 |
+
Start the scheduler if ENABLE_SCHEDULER is True.
|
| 320 |
+
Set ENABLE_SCHEDULER=False when using external cron (e.g., GitHub Actions).
|
| 321 |
+
"""
|
| 322 |
+
if not settings.ENABLE_SCHEDULER:
|
| 323 |
+
logger.info("Scheduler disabled (ENABLE_SCHEDULER=False). Using external cron.")
|
| 324 |
+
return
|
| 325 |
+
|
| 326 |
+
# Schedule the job to run at 3:30 PM IST (10:00 AM UTC)
|
| 327 |
+
# IST is UTC+5:30. 15:30 IST = 10:00 UTC.
|
| 328 |
+
trigger = CronTrigger(hour=10, minute=0) # 10:00 AM UTC = 3:30 PM IST
|
| 329 |
+
|
| 330 |
+
scheduler.add_job(run_daily_price_sync, trigger)
|
| 331 |
+
|
| 332 |
+
# Run Gmail sync every hour
|
| 333 |
+
scheduler.add_job(run_gmail_sync, 'interval', hours=1)
|
| 334 |
+
|
| 335 |
+
# Run Surety reminders daily at 9:00 AM IST (3:30 AM UTC)
|
| 336 |
+
reminder_trigger = CronTrigger(hour=3, minute=30)
|
| 337 |
+
scheduler.add_job(run_surety_reminders, reminder_trigger)
|
| 338 |
+
|
| 339 |
+
# Run Weekly Insights on Sunday at 10:00 AM IST (4:30 AM UTC)
|
| 340 |
+
insight_trigger = CronTrigger(day_of_week='sun', hour=4, minute=30)
|
| 341 |
+
scheduler.add_job(run_weekly_insights, insight_trigger)
|
| 342 |
+
|
| 343 |
+
scheduler.start()
|
| 344 |
+
logger.info("Scheduler started. Jobs scheduled.")
|
app/core/security.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timedelta
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from jose import jwt
|
| 4 |
+
from passlib.context import CryptContext
|
| 5 |
+
from app.core.config import get_settings
|
| 6 |
+
|
| 7 |
+
settings = get_settings()
|
| 8 |
+
|
| 9 |
+
pwd_context = CryptContext(schemes=["argon2", "bcrypt"], deprecated="auto")
|
| 10 |
+
|
| 11 |
+
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
| 12 |
+
# Bcrypt has a max length limit of 72 bytes, truncate input to avoid crash
|
| 13 |
+
return pwd_context.verify(plain_password[:72], hashed_password)
|
| 14 |
+
|
| 15 |
+
def get_password_hash(password: str) -> str:
|
| 16 |
+
# Bcrypt has a max length limit of 72 bytes for passwords
|
| 17 |
+
# We truncate excessively long passwords to prevent crashing
|
| 18 |
+
return pwd_context.hash(password[:72])
|
| 19 |
+
|
| 20 |
+
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
| 21 |
+
to_encode = data.copy()
|
| 22 |
+
if expires_delta:
|
| 23 |
+
expire = datetime.utcnow() + expires_delta
|
| 24 |
+
else:
|
| 25 |
+
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 26 |
+
to_encode.update({"exp": expire})
|
| 27 |
+
encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
|
| 28 |
+
return encoded_jwt
|
app/features/analytics/__init__.py
ADDED
|
File without changes
|
app/features/analytics/router.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, Optional
|
| 2 |
+
from fastapi import APIRouter, Depends, Query
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from app.core.database import get_db
|
| 5 |
+
from app.features.auth.deps import get_current_user
|
| 6 |
+
from app.features.auth.models import User
|
| 7 |
+
from app.features.analytics.schemas import (
|
| 8 |
+
VarianceAnalysis,
|
| 9 |
+
FrozenFundsBreakdown,
|
| 10 |
+
SafeToSpendResponse,
|
| 11 |
+
MonthlySummaryResponse,
|
| 12 |
+
SpendTrendResponse,
|
| 13 |
+
SpendTrendFrequency
|
| 14 |
+
)
|
| 15 |
+
from app.features.analytics.service import AnalyticsService
|
| 16 |
+
|
| 17 |
+
router = APIRouter()
|
| 18 |
+
|
| 19 |
+
@router.get("/summary/", response_model=MonthlySummaryResponse)
|
| 20 |
+
async def get_monthly_summary(
|
| 21 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 22 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 23 |
+
service: Annotated[AnalyticsService, Depends()],
|
| 24 |
+
month: Optional[int] = Query(None, ge=1, le=12),
|
| 25 |
+
year: Optional[int] = Query(None, ge=2000, le=2100),
|
| 26 |
+
scope: str = Query("month", enum=["month", "year", "all"])
|
| 27 |
+
):
|
| 28 |
+
"""
|
| 29 |
+
Get financial summary (Income vs Expense) for a specific scope.
|
| 30 |
+
"""
|
| 31 |
+
return await service.get_monthly_summary(db, current_user.id, month, year, scope)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@router.get("/variance/", response_model=VarianceAnalysis)
|
| 35 |
+
async def get_variance_analysis(
|
| 36 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 37 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 38 |
+
service: Annotated[AnalyticsService, Depends()],
|
| 39 |
+
month: Optional[int] = Query(None, ge=1, le=12),
|
| 40 |
+
year: Optional[int] = Query(None, ge=2000, le=2100)
|
| 41 |
+
):
|
| 42 |
+
"""
|
| 43 |
+
Get spending variance analysis for a specific period.
|
| 44 |
+
"""
|
| 45 |
+
return await service.get_variance_analysis(db, current_user.id, month, year)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@router.get("/burden/", response_model=FrozenFundsBreakdown)
|
| 49 |
+
async def get_burden_calculation(
|
| 50 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 51 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 52 |
+
service: Annotated[AnalyticsService, Depends()]
|
| 53 |
+
):
|
| 54 |
+
"""
|
| 55 |
+
Calculate total frozen funds (burden).
|
| 56 |
+
Formula: UnpaidBills + ProjectedSuretyBills + UnbilledCC
|
| 57 |
+
"""
|
| 58 |
+
return await service.calculate_burden(db, current_user.id)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@router.get("/safe-to-spend/", response_model=SafeToSpendResponse)
|
| 62 |
+
async def get_safe_to_spend(
|
| 63 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 64 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 65 |
+
service: Annotated[AnalyticsService, Depends()]
|
| 66 |
+
):
|
| 67 |
+
"""
|
| 68 |
+
Calculate safe-to-spend amount with AI-predicted buffer till salary (1st of next month).
|
| 69 |
+
Buffer = AI prediction of discretionary expenses till next salary
|
| 70 |
+
Formula: Balance - FrozenFunds - AI Buffer
|
| 71 |
+
"""
|
| 72 |
+
return await service.calculate_safe_to_spend_amount(db, current_user.id)
|
| 73 |
+
|
| 74 |
+
@router.get("/trends/spend/", response_model=SpendTrendResponse)
|
| 75 |
+
async def get_spend_trends(
|
| 76 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 77 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 78 |
+
service: Annotated[AnalyticsService, Depends()],
|
| 79 |
+
days: int = Query(30, ge=7, le=90),
|
| 80 |
+
frequency: SpendTrendFrequency = Query(SpendTrendFrequency.DAILY)
|
| 81 |
+
):
|
| 82 |
+
"""
|
| 83 |
+
Get spending trends for the last N days with specific frequency.
|
| 84 |
+
"""
|
| 85 |
+
return await service.get_spend_trends(db, current_user.id, days, frequency)
|
app/features/analytics/schemas.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from enum import Enum
|
| 2 |
+
from typing import Dict, List, Optional
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
from decimal import Decimal
|
| 5 |
+
from datetime import date
|
| 6 |
+
|
| 7 |
+
class SpendTrendFrequency(str, Enum):
|
| 8 |
+
DAILY = "daily"
|
| 9 |
+
WEEKLY = "weekly"
|
| 10 |
+
MONTHLY = "monthly"
|
| 11 |
+
|
| 12 |
+
class CategoryVariance(BaseModel):
|
| 13 |
+
current: Decimal
|
| 14 |
+
previous: Decimal
|
| 15 |
+
variance_amount: Decimal
|
| 16 |
+
variance_percentage: float
|
| 17 |
+
trend: str # "up", "down", "stable"
|
| 18 |
+
|
| 19 |
+
class VarianceAnalysis(BaseModel):
|
| 20 |
+
current_month_total: Decimal
|
| 21 |
+
last_month_total: Decimal
|
| 22 |
+
variance_amount: Decimal
|
| 23 |
+
variance_percentage: float
|
| 24 |
+
category_breakdown: Dict[str, CategoryVariance]
|
| 25 |
+
|
| 26 |
+
class IdentifiedObligation(BaseModel):
|
| 27 |
+
id: str
|
| 28 |
+
title: str
|
| 29 |
+
amount: Decimal
|
| 30 |
+
due_date: date
|
| 31 |
+
type: str # "BILL", "SIP", "SURETY_TXN", "GOAL"
|
| 32 |
+
status: str # "OVERDUE", "PENDING", "PROJECTED"
|
| 33 |
+
category: Optional[str] = None
|
| 34 |
+
sub_category: Optional[str] = None
|
| 35 |
+
source_id: Optional[str] = None
|
| 36 |
+
|
| 37 |
+
class FrozenFundsBreakdown(BaseModel):
|
| 38 |
+
unpaid_bills: Decimal
|
| 39 |
+
projected_surety: Decimal
|
| 40 |
+
unbilled_cc: Decimal
|
| 41 |
+
active_goals: Decimal = Decimal(0)
|
| 42 |
+
total_frozen: Decimal
|
| 43 |
+
obligations: List[IdentifiedObligation] = []
|
| 44 |
+
|
| 45 |
+
class SafeToSpendResponse(BaseModel):
|
| 46 |
+
current_balance: Decimal
|
| 47 |
+
frozen_funds: FrozenFundsBreakdown
|
| 48 |
+
buffer_amount: Decimal
|
| 49 |
+
buffer_percentage: float
|
| 50 |
+
safe_to_spend: Decimal
|
| 51 |
+
recommendation: str
|
| 52 |
+
status: str # "success", "warning", "critical", "negative"
|
| 53 |
+
|
| 54 |
+
class MonthlySummaryResponse(BaseModel):
|
| 55 |
+
total_income: Decimal
|
| 56 |
+
total_expense: Decimal
|
| 57 |
+
balance: Decimal
|
| 58 |
+
month: str
|
| 59 |
+
year: int
|
| 60 |
+
current_period_expense: Decimal = Decimal(0)
|
| 61 |
+
prior_period_settlement: Decimal = Decimal(0)
|
| 62 |
+
|
| 63 |
+
class SpendTrendPoint(BaseModel):
|
| 64 |
+
date: date
|
| 65 |
+
amount: Decimal
|
| 66 |
+
|
| 67 |
+
class SpendTrendResponse(BaseModel):
|
| 68 |
+
trends: List[SpendTrendPoint]
|
app/features/analytics/service.py
ADDED
|
@@ -0,0 +1,575 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import asyncio
|
| 3 |
+
from uuid import UUID
|
| 4 |
+
from decimal import Decimal
|
| 5 |
+
from typing import Dict, Optional
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
from sqlalchemy import select, func
|
| 8 |
+
from app.features.transactions.models import Transaction, AccountType
|
| 9 |
+
from app.features.goals.models import Goal
|
| 10 |
+
from app.features.analytics.schemas import (
|
| 11 |
+
CategoryVariance,
|
| 12 |
+
VarianceAnalysis,
|
| 13 |
+
FrozenFundsBreakdown,
|
| 14 |
+
SafeToSpendResponse,
|
| 15 |
+
MonthlySummaryResponse
|
| 16 |
+
)
|
| 17 |
+
from app.features.bills.service import BillService
|
| 18 |
+
from app.features.credit_cards.service import CreditCardService
|
| 19 |
+
from app.utils.finance_utils import (
|
| 20 |
+
calculate_frozen_funds,
|
| 21 |
+
calculate_safe_to_spend,
|
| 22 |
+
calculate_variance_percentage,
|
| 23 |
+
get_trend_indicator,
|
| 24 |
+
get_month_date_range,
|
| 25 |
+
get_previous_month_date_range,
|
| 26 |
+
get_year_date_range
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
from datetime import datetime, date, timedelta
|
| 30 |
+
import zoneinfo
|
| 31 |
+
from app.core.config import get_settings
|
| 32 |
+
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
settings = get_settings()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class AnalyticsService:
|
| 38 |
+
|
| 39 |
+
def __init__(self):
|
| 40 |
+
self.bill_service = BillService()
|
| 41 |
+
self.cc_service = CreditCardService()
|
| 42 |
+
self._tz = zoneinfo.ZoneInfo(settings.APP_TIMEZONE)
|
| 43 |
+
|
| 44 |
+
def _get_today(self) -> date:
|
| 45 |
+
"""Get current date in the configured timezone."""
|
| 46 |
+
return datetime.now(self._tz).date()
|
| 47 |
+
|
| 48 |
+
async def get_variance_analysis(
|
| 49 |
+
self,
|
| 50 |
+
db: AsyncSession,
|
| 51 |
+
user_id: UUID,
|
| 52 |
+
month: Optional[int] = None,
|
| 53 |
+
year: Optional[int] = None
|
| 54 |
+
) -> VarianceAnalysis:
|
| 55 |
+
"""Calculate period vs previous period variance."""
|
| 56 |
+
target_date = self._get_today()
|
| 57 |
+
if month and year:
|
| 58 |
+
target_date = date(year, month, 1)
|
| 59 |
+
|
| 60 |
+
current_range = get_month_date_range(target_date)
|
| 61 |
+
previous_range = get_previous_month_date_range(target_date)
|
| 62 |
+
|
| 63 |
+
# Current month spending
|
| 64 |
+
# Prepare Current month spending query
|
| 65 |
+
current_stmt = (
|
| 66 |
+
select(
|
| 67 |
+
Transaction.category,
|
| 68 |
+
func.sum(Transaction.amount).label("total")
|
| 69 |
+
)
|
| 70 |
+
.where(Transaction.user_id == user_id)
|
| 71 |
+
.where(Transaction.category.notin_(["Income"]))
|
| 72 |
+
.where(Transaction.transaction_date >= current_range["month_start"])
|
| 73 |
+
.where(Transaction.transaction_date <= current_range["month_end"])
|
| 74 |
+
.group_by(Transaction.category)
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
# Prepare Previous month spending query
|
| 78 |
+
previous_stmt = (
|
| 79 |
+
select(
|
| 80 |
+
Transaction.category,
|
| 81 |
+
func.sum(Transaction.amount).label("total")
|
| 82 |
+
)
|
| 83 |
+
.where(Transaction.user_id == user_id)
|
| 84 |
+
.where(Transaction.category.notin_(["Income"]))
|
| 85 |
+
.where(Transaction.transaction_date >= previous_range["month_start"])
|
| 86 |
+
.where(Transaction.transaction_date <= previous_range["month_end"])
|
| 87 |
+
.group_by(Transaction.category)
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
# Execute in parallel
|
| 91 |
+
current_res, previous_res = await asyncio.gather(
|
| 92 |
+
db.execute(current_stmt),
|
| 93 |
+
db.execute(previous_stmt)
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
current_by_category = {row.category: abs(row.total or Decimal("0")) for row in current_res.all()}
|
| 97 |
+
current_total = sum(current_by_category.values())
|
| 98 |
+
|
| 99 |
+
previous_by_category = {row.category: abs(row.total or Decimal("0")) for row in previous_res.all()}
|
| 100 |
+
previous_total = sum(previous_by_category.values())
|
| 101 |
+
|
| 102 |
+
# Calculate category-level variance
|
| 103 |
+
all_categories = set(current_by_category.keys()) | set(previous_by_category.keys())
|
| 104 |
+
category_breakdown = {}
|
| 105 |
+
|
| 106 |
+
for category in all_categories:
|
| 107 |
+
current_amount = current_by_category.get(category, Decimal("0"))
|
| 108 |
+
previous_amount = previous_by_category.get(category, Decimal("0"))
|
| 109 |
+
variance_amt = current_amount - previous_amount
|
| 110 |
+
variance_pct = calculate_variance_percentage(current_amount, previous_amount)
|
| 111 |
+
|
| 112 |
+
category_breakdown[category] = CategoryVariance(
|
| 113 |
+
current=current_amount,
|
| 114 |
+
previous=previous_amount,
|
| 115 |
+
variance_amount=variance_amt,
|
| 116 |
+
variance_percentage=variance_pct,
|
| 117 |
+
trend=get_trend_indicator(variance_pct)
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
# Overall variance
|
| 121 |
+
total_variance = current_total - previous_total
|
| 122 |
+
total_variance_pct = calculate_variance_percentage(
|
| 123 |
+
Decimal(str(current_total)),
|
| 124 |
+
Decimal(str(previous_total))
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
return VarianceAnalysis(
|
| 128 |
+
current_month_total=Decimal(str(current_total)),
|
| 129 |
+
last_month_total=Decimal(str(previous_total)),
|
| 130 |
+
variance_amount=Decimal(str(total_variance)),
|
| 131 |
+
variance_percentage=total_variance_pct,
|
| 132 |
+
category_breakdown=category_breakdown
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
async def calculate_burden(
|
| 136 |
+
self,
|
| 137 |
+
db: AsyncSession,
|
| 138 |
+
user_id: UUID
|
| 139 |
+
) -> FrozenFundsBreakdown:
|
| 140 |
+
"""Calculate total frozen funds (burden) with a detailed obligation ledger."""
|
| 141 |
+
try:
|
| 142 |
+
from app.features.analytics.schemas import IdentifiedObligation
|
| 143 |
+
import calendar
|
| 144 |
+
# wealth_service removed for decoupling
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
# Calculate days till end of month to align with salary cycle
|
| 148 |
+
today = self._get_today()
|
| 149 |
+
_, last_day = calendar.monthrange(today.year, today.month)
|
| 150 |
+
days_till_month_end = last_day - today.day
|
| 151 |
+
|
| 152 |
+
# 1. Execute multiple independent checks in parallel
|
| 153 |
+
# - Bill/Surety Ledger (heavy)
|
| 154 |
+
# - Credit Card exposure (fast)
|
| 155 |
+
# - Active Goals (fast)
|
| 156 |
+
|
| 157 |
+
goal_stmt = (
|
| 158 |
+
select(Goal)
|
| 159 |
+
.where(Goal.user_id == user_id)
|
| 160 |
+
.where(Goal.is_active == True)
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
ledger_task = self.bill_service.get_obligations_ledger(db, user_id, days_ahead=days_till_month_end)
|
| 164 |
+
cc_task = self.cc_service.get_all_unbilled_for_user(db, user_id)
|
| 165 |
+
goal_task = db.execute(goal_stmt)
|
| 166 |
+
|
| 167 |
+
ledger_data, unbilled_cc, goal_res = await asyncio.gather(
|
| 168 |
+
ledger_task,
|
| 169 |
+
cc_task,
|
| 170 |
+
goal_task
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
# 2. Process Bill/Surety results
|
| 174 |
+
unpaid_bills_total = ledger_data["unpaid_total"]
|
| 175 |
+
projected_surety_bills = ledger_data["projected_total"]
|
| 176 |
+
all_obligations = ledger_data["items"]
|
| 177 |
+
|
| 178 |
+
# 3. Process SIP Commitments (Placeholder)
|
| 179 |
+
sip_total = Decimal("0")
|
| 180 |
+
total_projected_surety = projected_surety_bills + sip_total
|
| 181 |
+
|
| 182 |
+
# 4. Process Goals
|
| 183 |
+
active_goals_total = Decimal("0")
|
| 184 |
+
goals = goal_res.scalars().all()
|
| 185 |
+
for g in goals:
|
| 186 |
+
amt = Decimal(str(g.monthly_contribution))
|
| 187 |
+
active_goals_total += amt
|
| 188 |
+
# Add goals to ledger items
|
| 189 |
+
all_obligations.append(IdentifiedObligation(
|
| 190 |
+
id=f"goal-{g.id}",
|
| 191 |
+
title=f"Goal: {g.name}",
|
| 192 |
+
amount=amt,
|
| 193 |
+
due_date=date.today() + timedelta(days=15),
|
| 194 |
+
type="GOAL",
|
| 195 |
+
status="PROJECTED",
|
| 196 |
+
category="Goal",
|
| 197 |
+
sub_category=g.category
|
| 198 |
+
))
|
| 199 |
+
|
| 200 |
+
total_frozen = calculate_frozen_funds(unpaid_bills_total, total_projected_surety, unbilled_cc) + active_goals_total
|
| 201 |
+
|
| 202 |
+
return FrozenFundsBreakdown(
|
| 203 |
+
unpaid_bills=unpaid_bills_total,
|
| 204 |
+
projected_surety=total_projected_surety,
|
| 205 |
+
unbilled_cc=unbilled_cc,
|
| 206 |
+
active_goals=active_goals_total,
|
| 207 |
+
total_frozen=total_frozen,
|
| 208 |
+
obligations=all_obligations
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
except Exception as e:
|
| 213 |
+
logger.error(f"Error calculating burden: {e}")
|
| 214 |
+
# Return safe zeros to prevent 500
|
| 215 |
+
zero = Decimal("0.00")
|
| 216 |
+
return FrozenFundsBreakdown(
|
| 217 |
+
unpaid_bills=zero,
|
| 218 |
+
projected_surety=zero,
|
| 219 |
+
unbilled_cc=zero,
|
| 220 |
+
active_goals=zero,
|
| 221 |
+
total_frozen=zero
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
async def calculate_safe_to_spend_amount(
|
| 225 |
+
self,
|
| 226 |
+
db: AsyncSession,
|
| 227 |
+
user_id: UUID
|
| 228 |
+
) -> SafeToSpendResponse:
|
| 229 |
+
"""Calculate safe-to-spend amount with frozen funds and mathematical buffer till salary."""
|
| 230 |
+
try:
|
| 231 |
+
# Calculate days till salary (1st of next month)
|
| 232 |
+
today = self._get_today()
|
| 233 |
+
if today.day == 1:
|
| 234 |
+
# If today is 1st, assume salary already received, buffer till next month's 1st
|
| 235 |
+
days_till_salary = 30 # Approximate
|
| 236 |
+
else:
|
| 237 |
+
# Days remaining in current month
|
| 238 |
+
import calendar
|
| 239 |
+
_, last_day = calendar.monthrange(today.year, today.month)
|
| 240 |
+
days_till_salary = last_day - today.day + 1
|
| 241 |
+
|
| 242 |
+
# 1. Prepare all independent queries/tasks
|
| 243 |
+
balance_stmt = (
|
| 244 |
+
select(func.sum(Transaction.amount))
|
| 245 |
+
.where(Transaction.user_id == user_id)
|
| 246 |
+
.where(Transaction.account_type.in_([AccountType.CASH, AccountType.SAVINGS]))
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
txn_count_stmt = (
|
| 250 |
+
select(func.count(Transaction.id))
|
| 251 |
+
.where(Transaction.user_id == user_id)
|
| 252 |
+
)
|
| 253 |
+
|
| 254 |
+
today_date = self._get_today()
|
| 255 |
+
thirty_days_ago = today_date - timedelta(days=30)
|
| 256 |
+
|
| 257 |
+
discretionary_stmt = (
|
| 258 |
+
select(func.sum(Transaction.amount))
|
| 259 |
+
.where(Transaction.user_id == user_id)
|
| 260 |
+
.where(Transaction.category.notin_(["Income", "Investment", "Housing", "Bill Payment", "Transfer", "EMI", "Loan", "Insurance", "Misc"]))
|
| 261 |
+
.where(Transaction.sub_category != "Credit Card Payment")
|
| 262 |
+
.where(Transaction.is_surety == False)
|
| 263 |
+
.where(func.abs(Transaction.amount) <= 5000) # Exclude large one-off purchases > 5k
|
| 264 |
+
.where(Transaction.transaction_date >= thirty_days_ago)
|
| 265 |
+
.where(Transaction.transaction_date <= today_date)
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
# 2. Execute everything in parallel
|
| 269 |
+
balance_task = db.execute(balance_stmt)
|
| 270 |
+
burden_task = self.calculate_burden(db, user_id)
|
| 271 |
+
count_task = db.execute(txn_count_stmt)
|
| 272 |
+
discretionary_task = db.execute(discretionary_stmt)
|
| 273 |
+
|
| 274 |
+
balance_result, frozen_breakdown, txn_count_result, discretionary_result = await asyncio.gather(
|
| 275 |
+
balance_task,
|
| 276 |
+
burden_task,
|
| 277 |
+
count_task,
|
| 278 |
+
discretionary_task
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
# 3. Process results
|
| 282 |
+
current_balance = balance_result.scalar() or Decimal("0")
|
| 283 |
+
total_transactions = txn_count_result.scalar() or 0
|
| 284 |
+
is_new_user = total_transactions == 0
|
| 285 |
+
total_discretionary_30d = abs(discretionary_result.scalar() or Decimal("0"))
|
| 286 |
+
|
| 287 |
+
# Calculate average daily discretionary expense
|
| 288 |
+
avg_daily_discretionary = total_discretionary_30d / Decimal("30")
|
| 289 |
+
|
| 290 |
+
# Buffer = Average daily discretionary × days till salary
|
| 291 |
+
buffer = avg_daily_discretionary * Decimal(str(days_till_salary))
|
| 292 |
+
|
| 293 |
+
# Only enforce minimum buffer if user has positive balance
|
| 294 |
+
if current_balance > 0:
|
| 295 |
+
min_buffer = Decimal("500")
|
| 296 |
+
buffer = max(buffer, min_buffer)
|
| 297 |
+
else:
|
| 298 |
+
# No/negative balance means no buffer needed
|
| 299 |
+
buffer = Decimal("0")
|
| 300 |
+
|
| 301 |
+
# Set method for display
|
| 302 |
+
buffer_method = "average"
|
| 303 |
+
buffer_confidence = "medium"
|
| 304 |
+
|
| 305 |
+
# Calculate safe-to-spend
|
| 306 |
+
# If balance is zero or negative, safe_to_spend should be 0 (can't spend what you don't have)
|
| 307 |
+
if current_balance <= 0:
|
| 308 |
+
safe_amount = Decimal("0")
|
| 309 |
+
else:
|
| 310 |
+
safe_amount = current_balance - frozen_breakdown.total_frozen - buffer
|
| 311 |
+
# Cap at 0 minimum (can't spend negative amounts)
|
| 312 |
+
safe_amount = max(Decimal("0"), safe_amount)
|
| 313 |
+
|
| 314 |
+
# Calculate buffer as percentage for response (for UI display)
|
| 315 |
+
buffer_percentage = float(buffer / current_balance) if current_balance > 0 else 0.0
|
| 316 |
+
|
| 317 |
+
# Format salary date for display
|
| 318 |
+
next_month = today.replace(day=1) + timedelta(days=32)
|
| 319 |
+
salary_date = next_month.replace(day=1)
|
| 320 |
+
salary_str = salary_date.strftime("%b %d")
|
| 321 |
+
|
| 322 |
+
# Generate recommendation based on user state
|
| 323 |
+
status = "success"
|
| 324 |
+
|
| 325 |
+
if is_new_user:
|
| 326 |
+
recommendation = "👋 Welcome! Add your first transaction to start tracking your finances."
|
| 327 |
+
status = "success"
|
| 328 |
+
elif current_balance < 0:
|
| 329 |
+
deficit = abs(current_balance)
|
| 330 |
+
recommendation = f"📉 Balance is ₹{deficit:.0f} in deficit. Add income to recover."
|
| 331 |
+
status = "negative"
|
| 332 |
+
elif current_balance == 0:
|
| 333 |
+
recommendation = "⚠️ No liquid balance available. Please add income transactions."
|
| 334 |
+
status = "warning"
|
| 335 |
+
elif safe_amount == 0:
|
| 336 |
+
overextended = frozen_breakdown.total_frozen + buffer - current_balance
|
| 337 |
+
recommendation = f"🔒 Overextended by ₹{overextended:.0f}. Frozen + Buffer exceed balance."
|
| 338 |
+
status = "critical"
|
| 339 |
+
elif safe_amount < (current_balance * Decimal("0.20")):
|
| 340 |
+
recommendation = f"⚡ Low capacity. ₹{buffer:.0f} reserved till salary ({salary_str})"
|
| 341 |
+
status = "warning"
|
| 342 |
+
else:
|
| 343 |
+
recommendation = f"✅ Healthy! ₹{buffer:.0f} buffered till salary ({salary_str})"
|
| 344 |
+
status = "success"
|
| 345 |
+
|
| 346 |
+
return SafeToSpendResponse(
|
| 347 |
+
current_balance=current_balance,
|
| 348 |
+
frozen_funds=frozen_breakdown,
|
| 349 |
+
buffer_amount=buffer,
|
| 350 |
+
buffer_percentage=buffer_percentage,
|
| 351 |
+
safe_to_spend=safe_amount,
|
| 352 |
+
recommendation=recommendation,
|
| 353 |
+
status=status
|
| 354 |
+
)
|
| 355 |
+
except Exception as e:
|
| 356 |
+
logger.error(f"Error calculating safe to spend: {e}")
|
| 357 |
+
# ... (error handling code remains the same) ...
|
| 358 |
+
# Return safe default
|
| 359 |
+
zero = Decimal("0.00")
|
| 360 |
+
empty_breakdown = FrozenFundsBreakdown(
|
| 361 |
+
unpaid_bills=zero,
|
| 362 |
+
projected_surety=zero,
|
| 363 |
+
unbilled_cc=zero,
|
| 364 |
+
active_goals=zero,
|
| 365 |
+
total_frozen=zero
|
| 366 |
+
)
|
| 367 |
+
return SafeToSpendResponse(
|
| 368 |
+
current_balance=zero,
|
| 369 |
+
frozen_funds=empty_breakdown,
|
| 370 |
+
buffer_amount=zero,
|
| 371 |
+
buffer_percentage=0.0,
|
| 372 |
+
safe_to_spend=zero,
|
| 373 |
+
recommendation="⚠️ Unable to calculate. Please check system logs.",
|
| 374 |
+
status="warning"
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
async def debug_buffer_Calculation(self, db: AsyncSession, user_id: UUID):
|
| 378 |
+
"""Debug method to show WHAT is being included in buffer calculation."""
|
| 379 |
+
today_date = self._get_today()
|
| 380 |
+
thirty_days_ago = today_date - timedelta(days=30)
|
| 381 |
+
|
| 382 |
+
# EXACT SAME logic as calculation
|
| 383 |
+
stmt = (
|
| 384 |
+
select(Transaction)
|
| 385 |
+
.where(Transaction.user_id == user_id)
|
| 386 |
+
.where(Transaction.category.notin_(["Income", "Investment", "Housing", "Bill Payment", "Transfer", "EMI", "Loan", "Insurance", "Misc"]))
|
| 387 |
+
.where(Transaction.sub_category != "Credit Card Payment")
|
| 388 |
+
.where(Transaction.is_surety == False)
|
| 389 |
+
.where(func.abs(Transaction.amount) <= 5000) # Exclude large one-off purchases > 5k
|
| 390 |
+
.where(Transaction.transaction_date >= thirty_days_ago)
|
| 391 |
+
.where(Transaction.transaction_date <= today_date)
|
| 392 |
+
.order_by(Transaction.amount) # Sort by amount (negative first = biggest spenders)
|
| 393 |
+
)
|
| 394 |
+
|
| 395 |
+
result = await db.execute(stmt)
|
| 396 |
+
txns = result.scalars().all()
|
| 397 |
+
|
| 398 |
+
total = sum(abs(t.amount) for t in txns)
|
| 399 |
+
|
| 400 |
+
return {
|
| 401 |
+
"total_discretionary_30d": total,
|
| 402 |
+
"daily_average": total / 30,
|
| 403 |
+
"count": len(txns),
|
| 404 |
+
"transactions": [
|
| 405 |
+
{
|
| 406 |
+
"date": t.transaction_date,
|
| 407 |
+
"amount": t.amount,
|
| 408 |
+
"merchant": t.merchant_name,
|
| 409 |
+
"category": t.category,
|
| 410 |
+
"sub_category": t.sub_category
|
| 411 |
+
}
|
| 412 |
+
for t in txns
|
| 413 |
+
]
|
| 414 |
+
}
|
| 415 |
+
|
| 416 |
+
async def get_monthly_summary(
|
| 417 |
+
self,
|
| 418 |
+
db: AsyncSession,
|
| 419 |
+
user_id: UUID,
|
| 420 |
+
month: Optional[int] = None,
|
| 421 |
+
year: Optional[int] = None,
|
| 422 |
+
scope: str = "month"
|
| 423 |
+
) -> MonthlySummaryResponse:
|
| 424 |
+
import datetime
|
| 425 |
+
|
| 426 |
+
target_date = self._get_today()
|
| 427 |
+
if month and year:
|
| 428 |
+
target_date = date(year, month, 1)
|
| 429 |
+
|
| 430 |
+
# Determine date range based on scope
|
| 431 |
+
if scope == "year":
|
| 432 |
+
date_range = get_year_date_range(target_date)
|
| 433 |
+
start_date = date_range["year_start"]
|
| 434 |
+
end_date = date_range["year_end"]
|
| 435 |
+
period_label = str(start_date.year)
|
| 436 |
+
elif scope == "all":
|
| 437 |
+
# For all time, start from year 2000
|
| 438 |
+
start_date = date(2000, 1, 1)
|
| 439 |
+
end_date = date(2100, 12, 31)
|
| 440 |
+
period_label = "All Time"
|
| 441 |
+
else:
|
| 442 |
+
# Default to month
|
| 443 |
+
date_range = get_month_date_range(target_date)
|
| 444 |
+
start_date = date_range["month_start"]
|
| 445 |
+
end_date = date_range["month_end"]
|
| 446 |
+
period_label = start_date.strftime("%B")
|
| 447 |
+
|
| 448 |
+
# Calculate Income
|
| 449 |
+
income_stmt = (
|
| 450 |
+
select(func.sum(Transaction.amount))
|
| 451 |
+
.where(Transaction.user_id == user_id)
|
| 452 |
+
.where(Transaction.category == "Income")
|
| 453 |
+
.where(Transaction.transaction_date >= start_date)
|
| 454 |
+
.where(Transaction.transaction_date <= end_date)
|
| 455 |
+
)
|
| 456 |
+
|
| 457 |
+
expense_stmt = (
|
| 458 |
+
select(func.sum(Transaction.amount))
|
| 459 |
+
.where(Transaction.user_id == user_id)
|
| 460 |
+
.where(Transaction.category.notin_(["Income"]))
|
| 461 |
+
.where(Transaction.transaction_date >= start_date)
|
| 462 |
+
.where(Transaction.transaction_date <= end_date)
|
| 463 |
+
)
|
| 464 |
+
|
| 465 |
+
# Execute in parallel
|
| 466 |
+
income_res, expense_res = await asyncio.gather(
|
| 467 |
+
db.execute(income_stmt),
|
| 468 |
+
db.execute(expense_stmt)
|
| 469 |
+
)
|
| 470 |
+
|
| 471 |
+
total_income = income_res.scalar() or Decimal("0")
|
| 472 |
+
total_expense_raw = abs(expense_res.scalar() or Decimal("0"))
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
# Calculate Prior Period Settlement (Strictly Credit Card Payments)
|
| 476 |
+
# We assume these payments are for previous month's dues.
|
| 477 |
+
prior_settlement_stmt = (
|
| 478 |
+
select(func.sum(Transaction.amount))
|
| 479 |
+
.where(Transaction.user_id == user_id)
|
| 480 |
+
.where(Transaction.sub_category == "Credit Card Payment")
|
| 481 |
+
.where(Transaction.transaction_date >= start_date)
|
| 482 |
+
.where(Transaction.transaction_date <= end_date)
|
| 483 |
+
)
|
| 484 |
+
prior_res = await db.execute(prior_settlement_stmt)
|
| 485 |
+
prior_period_settlement = abs(prior_res.scalar() or Decimal("0"))
|
| 486 |
+
|
| 487 |
+
# Current Period Expense is Total Expense minus the settlements
|
| 488 |
+
# (Assuming total_expense_raw includes the CC payments, which it does as they are not Income)
|
| 489 |
+
current_period_expense = total_expense_raw - prior_period_settlement
|
| 490 |
+
|
| 491 |
+
balance_stmt = (
|
| 492 |
+
select(func.sum(Transaction.amount))
|
| 493 |
+
.where(Transaction.user_id == user_id)
|
| 494 |
+
.where(Transaction.transaction_date >= start_date)
|
| 495 |
+
.where(Transaction.transaction_date <= end_date)
|
| 496 |
+
)
|
| 497 |
+
balance_res = await db.execute(balance_stmt)
|
| 498 |
+
net_balance = balance_res.scalar() or Decimal("0")
|
| 499 |
+
|
| 500 |
+
return MonthlySummaryResponse(
|
| 501 |
+
total_income=total_income,
|
| 502 |
+
total_expense=total_expense_raw,
|
| 503 |
+
balance=net_balance,
|
| 504 |
+
month=period_label,
|
| 505 |
+
year=target_date.year,
|
| 506 |
+
current_period_expense=current_period_expense,
|
| 507 |
+
prior_period_settlement=prior_period_settlement
|
| 508 |
+
)
|
| 509 |
+
|
| 510 |
+
async def get_spend_trends(
|
| 511 |
+
self,
|
| 512 |
+
db: AsyncSession,
|
| 513 |
+
user_id: UUID,
|
| 514 |
+
days: int = 30,
|
| 515 |
+
frequency: str = "daily"
|
| 516 |
+
):
|
| 517 |
+
"""Get spending trends for the last N days/weeks/months."""
|
| 518 |
+
from app.features.analytics.schemas import SpendTrendPoint, SpendTrendResponse
|
| 519 |
+
|
| 520 |
+
today = self._get_today()
|
| 521 |
+
|
| 522 |
+
if frequency == "monthly":
|
| 523 |
+
# Group by month for last 6 months
|
| 524 |
+
start_date = (today.replace(day=1) - timedelta(days=180)).replace(day=1)
|
| 525 |
+
date_field = func.date_trunc('month', Transaction.transaction_date)
|
| 526 |
+
limit_points = 6
|
| 527 |
+
elif frequency == "weekly":
|
| 528 |
+
# Group by week for last 12 weeks
|
| 529 |
+
start_date = today - timedelta(weeks=12)
|
| 530 |
+
date_field = func.date_trunc('week', Transaction.transaction_date)
|
| 531 |
+
limit_points = 12
|
| 532 |
+
else:
|
| 533 |
+
# Default Daily
|
| 534 |
+
start_date = today - timedelta(days=days + 5)
|
| 535 |
+
date_field = Transaction.transaction_date
|
| 536 |
+
limit_points = days
|
| 537 |
+
|
| 538 |
+
stmt = (
|
| 539 |
+
select(
|
| 540 |
+
date_field.label("date"),
|
| 541 |
+
func.sum(func.abs(Transaction.amount)).label("amount")
|
| 542 |
+
)
|
| 543 |
+
.where(Transaction.user_id == user_id)
|
| 544 |
+
.where(Transaction.category.notin_(["Income", "Transfer"]))
|
| 545 |
+
.where(Transaction.transaction_date >= start_date)
|
| 546 |
+
.where(Transaction.transaction_date <= today)
|
| 547 |
+
.group_by(date_field)
|
| 548 |
+
.order_by(date_field)
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
result = await db.execute(stmt)
|
| 552 |
+
data_points = result.all()
|
| 553 |
+
|
| 554 |
+
if frequency == "daily":
|
| 555 |
+
# Apply 3-day rolling average for daily
|
| 556 |
+
trends_map = {row.date: row.amount for row in data_points}
|
| 557 |
+
all_daily = []
|
| 558 |
+
full_start = today - timedelta(days=days + 2)
|
| 559 |
+
for i in range(days + 3):
|
| 560 |
+
d = full_start + timedelta(days=i)
|
| 561 |
+
all_daily.append({"date": d, "amount": trends_map.get(d, Decimal("0"))})
|
| 562 |
+
|
| 563 |
+
final_trends = []
|
| 564 |
+
for i in range(2, len(all_daily)):
|
| 565 |
+
d = all_daily[i]["date"]
|
| 566 |
+
if d < today - timedelta(days=days - 1): continue
|
| 567 |
+
avg_amount = (all_daily[i]["amount"] + all_daily[i-1]["amount"] + all_daily[i-2]["amount"]) / 3
|
| 568 |
+
final_trends.append(SpendTrendPoint(date=d, amount=avg_amount))
|
| 569 |
+
return SpendTrendResponse(trends=final_trends)
|
| 570 |
+
|
| 571 |
+
# For Weekly and Monthly, just return the data points
|
| 572 |
+
return SpendTrendResponse(trends=[
|
| 573 |
+
SpendTrendPoint(date=row.date if isinstance(row.date, date) else row.date.date(), amount=row.amount)
|
| 574 |
+
for row in data_points
|
| 575 |
+
])
|
app/features/auth/deps.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import Request, HTTPException, status, Depends
|
| 2 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 3 |
+
from sqlalchemy import select
|
| 4 |
+
from app.features.auth.models import User
|
| 5 |
+
from app.core.database import get_db
|
| 6 |
+
|
| 7 |
+
async def get_current_user(request: Request, db: AsyncSession = Depends(get_db)) -> User:
|
| 8 |
+
# 1. OPTIONAL: Check if user is already attached (e.g. by some other middleware)
|
| 9 |
+
if hasattr(request.state, "user") and request.state.user:
|
| 10 |
+
return request.state.user
|
| 11 |
+
|
| 12 |
+
# 2. Check for email claim from stateless middleware
|
| 13 |
+
if not hasattr(request.state, "user_email") or not request.state.user_email:
|
| 14 |
+
raise HTTPException(
|
| 15 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 16 |
+
detail="Authentication required"
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
# 3. Fetch User from DB
|
| 20 |
+
result = await db.execute(select(User).where(User.email == request.state.user_email))
|
| 21 |
+
user = result.scalar_one_or_none()
|
| 22 |
+
|
| 23 |
+
if not user:
|
| 24 |
+
raise HTTPException(
|
| 25 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 26 |
+
detail="User not found"
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
if not user.is_active:
|
| 30 |
+
raise HTTPException(
|
| 31 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 32 |
+
detail="Inactive user"
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
# Cache it on request state for subsequent calls in same request?
|
| 36 |
+
request.state.user = user
|
| 37 |
+
return user
|
app/features/auth/models.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from sqlalchemy import String, Boolean, DateTime, JSON
|
| 3 |
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
| 4 |
+
from sqlalchemy.sql import func
|
| 5 |
+
from app.core.database import Base
|
| 6 |
+
|
| 7 |
+
class User(Base):
|
| 8 |
+
__tablename__ = "users"
|
| 9 |
+
|
| 10 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 11 |
+
email: Mapped[str] = mapped_column(String, unique=True, index=True)
|
| 12 |
+
full_name: Mapped[str] = mapped_column(String, nullable=True)
|
| 13 |
+
hashed_password: Mapped[str] = mapped_column(String)
|
| 14 |
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
| 15 |
+
fcm_token: Mapped[str] = mapped_column(String, nullable=True)
|
| 16 |
+
gmail_credentials: Mapped[dict] = mapped_column(JSON, nullable=True)
|
| 17 |
+
created_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), server_default=func.now())
|
| 18 |
+
|
| 19 |
+
verification_code: Mapped[str] = mapped_column(String, nullable=True)
|
| 20 |
+
verification_code_expires_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), nullable=True)
|
| 21 |
+
|
| 22 |
+
credit_cards: Mapped[list["CreditCard"]] = relationship("CreditCard", back_populates="user")
|
| 23 |
+
bills: Mapped[list["Bill"]] = relationship("Bill", back_populates="user")
|
app/features/auth/router.py
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import timedelta, datetime, timezone
|
| 2 |
+
from typing import Annotated, Optional
|
| 3 |
+
from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks
|
| 4 |
+
from fastapi.security import OAuth2PasswordRequestForm
|
| 5 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 6 |
+
from sqlalchemy import select
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from google.oauth2 import id_token
|
| 9 |
+
from google.auth.transport import requests as google_requests
|
| 10 |
+
|
| 11 |
+
from app.core.database import get_db
|
| 12 |
+
from app.core.security import create_access_token, get_password_hash, verify_password
|
| 13 |
+
from app.features.auth.models import User
|
| 14 |
+
from app.features.auth import schemas
|
| 15 |
+
from app.core.config import get_settings
|
| 16 |
+
from app.core.llm import get_llm_service
|
| 17 |
+
from app.features.notifications.service import NotificationService
|
| 18 |
+
|
| 19 |
+
import logging
|
| 20 |
+
import random
|
| 21 |
+
import string
|
| 22 |
+
|
| 23 |
+
router = APIRouter()
|
| 24 |
+
settings = get_settings()
|
| 25 |
+
logger = logging.getLogger(__name__)
|
| 26 |
+
|
| 27 |
+
class GoogleLoginRequest(BaseModel):
|
| 28 |
+
token: str
|
| 29 |
+
|
| 30 |
+
@router.post("/register", response_model=dict)
|
| 31 |
+
async def register_user(
|
| 32 |
+
user_in: schemas.UserCreate,
|
| 33 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 34 |
+
background_tasks: BackgroundTasks
|
| 35 |
+
):
|
| 36 |
+
# Check if user exists
|
| 37 |
+
result = await db.execute(select(User).where(User.email == user_in.email))
|
| 38 |
+
existing_user = result.scalar_one_or_none()
|
| 39 |
+
|
| 40 |
+
from app.core.email import send_otp_email
|
| 41 |
+
|
| 42 |
+
otp = ''.join(random.choices(string.digits, k=6))
|
| 43 |
+
otp_expiry = datetime.now(timezone.utc) + timedelta(minutes=10)
|
| 44 |
+
|
| 45 |
+
if existing_user:
|
| 46 |
+
if existing_user.is_active:
|
| 47 |
+
raise HTTPException(
|
| 48 |
+
status_code=400,
|
| 49 |
+
detail="User with this email already exists"
|
| 50 |
+
)
|
| 51 |
+
else:
|
| 52 |
+
# Resend OTP
|
| 53 |
+
existing_user.hashed_password = get_password_hash(user_in.password)
|
| 54 |
+
existing_user.verification_code = otp
|
| 55 |
+
existing_user.verification_code_expires_at = otp_expiry
|
| 56 |
+
db.add(existing_user)
|
| 57 |
+
await db.commit()
|
| 58 |
+
|
| 59 |
+
background_tasks.add_task(send_otp_email, user_in.email, otp)
|
| 60 |
+
return {"message": "OTP sent to email", "email": user_in.email}
|
| 61 |
+
|
| 62 |
+
user = User(
|
| 63 |
+
email=user_in.email,
|
| 64 |
+
hashed_password=get_password_hash(user_in.password),
|
| 65 |
+
is_active=False,
|
| 66 |
+
verification_code=otp,
|
| 67 |
+
verification_code_expires_at=otp_expiry
|
| 68 |
+
)
|
| 69 |
+
db.add(user)
|
| 70 |
+
await db.commit()
|
| 71 |
+
await db.refresh(user)
|
| 72 |
+
|
| 73 |
+
background_tasks.add_task(send_otp_email, user_in.email, otp)
|
| 74 |
+
return {"message": "OTP sent to email", "email": user_in.email}
|
| 75 |
+
|
| 76 |
+
@router.post("/google/one-tap", response_model=schemas.Token)
|
| 77 |
+
async def google_one_tap(
|
| 78 |
+
payload: dict,
|
| 79 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 80 |
+
background_tasks: BackgroundTasks
|
| 81 |
+
):
|
| 82 |
+
"""
|
| 83 |
+
Combined Login + Gmail Sync authorization.
|
| 84 |
+
Exchanges an Authorization Code for tokens, registers the user,
|
| 85 |
+
and saves Gmail credentials in one go.
|
| 86 |
+
"""
|
| 87 |
+
from app.features.sync.router import get_google_flow
|
| 88 |
+
|
| 89 |
+
code = payload.get("code")
|
| 90 |
+
redirect_uri = payload.get("redirect_uri", "postmessage")
|
| 91 |
+
|
| 92 |
+
if not code:
|
| 93 |
+
raise HTTPException(status_code=400, detail="Missing authorization code")
|
| 94 |
+
|
| 95 |
+
try:
|
| 96 |
+
# 1. Exchange Code for Tokens
|
| 97 |
+
flow = get_google_flow(redirect_uri)
|
| 98 |
+
flow.fetch_token(code=code)
|
| 99 |
+
creds = flow.credentials
|
| 100 |
+
|
| 101 |
+
# 2. Extract Identity from ID Token
|
| 102 |
+
# We need to manually verify the ID token if fetch_token doesn't expose it ready-to-use
|
| 103 |
+
# or use creds.id_token
|
| 104 |
+
id_info = id_token.verify_oauth2_token(
|
| 105 |
+
creds.id_token,
|
| 106 |
+
google_requests.Request(),
|
| 107 |
+
settings.GOOGLE_CLIENT_ID
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
email = id_info['email']
|
| 111 |
+
full_name = id_info.get('name')
|
| 112 |
+
|
| 113 |
+
# 3. Handle User Record
|
| 114 |
+
result = await db.execute(select(User).where(User.email == email))
|
| 115 |
+
user = result.scalar_one_or_none()
|
| 116 |
+
|
| 117 |
+
is_new_user = False
|
| 118 |
+
if not user:
|
| 119 |
+
user = User(
|
| 120 |
+
email=email,
|
| 121 |
+
full_name=full_name,
|
| 122 |
+
is_active=True,
|
| 123 |
+
hashed_password="EXTERNAL_AUTH_GOOGLE"
|
| 124 |
+
)
|
| 125 |
+
db.add(user)
|
| 126 |
+
is_new_user = True
|
| 127 |
+
|
| 128 |
+
# 4. Save Gmail Credentials
|
| 129 |
+
# We store the dict format of credentials
|
| 130 |
+
user.gmail_credentials = {
|
| 131 |
+
"token": creds.token,
|
| 132 |
+
"refresh_token": creds.refresh_token,
|
| 133 |
+
"token_uri": creds.token_uri,
|
| 134 |
+
"client_id": creds.client_id,
|
| 135 |
+
"client_secret": creds.client_secret,
|
| 136 |
+
"scopes": creds.scopes
|
| 137 |
+
}
|
| 138 |
+
user.is_active = True # Ensure they are active if they reconnect
|
| 139 |
+
|
| 140 |
+
await db.commit()
|
| 141 |
+
await db.refresh(user)
|
| 142 |
+
|
| 143 |
+
# 5. Welcome Email for New Users
|
| 144 |
+
if is_new_user:
|
| 145 |
+
from app.features.notifications.service import NotificationService
|
| 146 |
+
llm = get_llm_service()
|
| 147 |
+
notif_service = NotificationService(db, llm)
|
| 148 |
+
background_tasks.add_task(notif_service.send_welcome_email, email, full_name)
|
| 149 |
+
|
| 150 |
+
# 6. Generate Grip Token
|
| 151 |
+
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 152 |
+
access_token = create_access_token(
|
| 153 |
+
data={"sub": user.email}, expires_delta=access_token_expires
|
| 154 |
+
)
|
| 155 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 156 |
+
|
| 157 |
+
except Exception as e:
|
| 158 |
+
logger.error(f"Google One-Tap Error: {e}")
|
| 159 |
+
raise HTTPException(status_code=400, detail=f"Authentication failed: {str(e)}")
|
| 160 |
+
|
| 161 |
+
@router.post("/google-login", response_model=schemas.Token)
|
| 162 |
+
async def google_login(
|
| 163 |
+
login_data: GoogleLoginRequest,
|
| 164 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 165 |
+
background_tasks: BackgroundTasks
|
| 166 |
+
):
|
| 167 |
+
"""Verifies Google ID Token and logs the user in (or registers them)."""
|
| 168 |
+
try:
|
| 169 |
+
idinfo = id_token.verify_oauth2_token(
|
| 170 |
+
login_data.token,
|
| 171 |
+
google_requests.Request(),
|
| 172 |
+
settings.GOOGLE_CLIENT_ID
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
email = idinfo['email']
|
| 176 |
+
full_name = idinfo.get('name')
|
| 177 |
+
|
| 178 |
+
result = await db.execute(select(User).where(User.email == email))
|
| 179 |
+
user = result.scalar_one_or_none()
|
| 180 |
+
|
| 181 |
+
is_new_user = False
|
| 182 |
+
if not user:
|
| 183 |
+
user = User(
|
| 184 |
+
email=email,
|
| 185 |
+
full_name=full_name,
|
| 186 |
+
is_active=True,
|
| 187 |
+
hashed_password="EXTERNAL_AUTH_GOOGLE"
|
| 188 |
+
)
|
| 189 |
+
db.add(user)
|
| 190 |
+
await db.commit()
|
| 191 |
+
await db.refresh(user)
|
| 192 |
+
is_new_user = True
|
| 193 |
+
elif not user.is_active:
|
| 194 |
+
user.is_active = True
|
| 195 |
+
await db.commit()
|
| 196 |
+
is_new_user = True
|
| 197 |
+
|
| 198 |
+
if is_new_user:
|
| 199 |
+
llm = get_llm_service()
|
| 200 |
+
notif_service = NotificationService(db, llm)
|
| 201 |
+
background_tasks.add_task(notif_service.send_welcome_email, email, full_name)
|
| 202 |
+
|
| 203 |
+
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 204 |
+
access_token = create_access_token(
|
| 205 |
+
data={"sub": user.email}, expires_delta=access_token_expires
|
| 206 |
+
)
|
| 207 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 208 |
+
except Exception as e:
|
| 209 |
+
logger.error(f"Google Login error: {e}")
|
| 210 |
+
raise HTTPException(status_code=400, detail="Invalid Google token")
|
| 211 |
+
|
| 212 |
+
@router.post("/verify-otp", response_model=schemas.Token)
|
| 213 |
+
async def verify_otp(
|
| 214 |
+
verify_in: schemas.VerifyOTP,
|
| 215 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 216 |
+
background_tasks: BackgroundTasks
|
| 217 |
+
):
|
| 218 |
+
result = await db.execute(select(User).where(User.email == verify_in.email))
|
| 219 |
+
user = result.scalar_one_or_none()
|
| 220 |
+
|
| 221 |
+
if not user or user.verification_code != verify_in.otp:
|
| 222 |
+
raise HTTPException(status_code=400, detail="Invalid OTP")
|
| 223 |
+
|
| 224 |
+
if user.verification_code_expires_at < datetime.now(timezone.utc):
|
| 225 |
+
raise HTTPException(status_code=400, detail="OTP expired")
|
| 226 |
+
|
| 227 |
+
user.is_active = True
|
| 228 |
+
user.verification_code = None
|
| 229 |
+
user.verification_code_expires_at = None
|
| 230 |
+
await db.commit()
|
| 231 |
+
|
| 232 |
+
llm = get_llm_service()
|
| 233 |
+
notif_service = NotificationService(db, llm)
|
| 234 |
+
background_tasks.add_task(notif_service.send_welcome_email, user.email, user.full_name)
|
| 235 |
+
|
| 236 |
+
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 237 |
+
access_token = create_access_token(
|
| 238 |
+
data={"sub": user.email}, expires_delta=access_token_expires
|
| 239 |
+
)
|
| 240 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 241 |
+
|
| 242 |
+
@router.post("/token", response_model=schemas.Token)
|
| 243 |
+
async def login_for_access_token(
|
| 244 |
+
form_data: Annotated[OAuth2PasswordRequestForm, Depends()],
|
| 245 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 246 |
+
):
|
| 247 |
+
result = await db.execute(select(User).where(User.email == form_data.username))
|
| 248 |
+
user = result.scalar_one_or_none()
|
| 249 |
+
if not user or not verify_password(form_data.password, user.hashed_password):
|
| 250 |
+
raise HTTPException(
|
| 251 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 252 |
+
detail="Incorrect email or password",
|
| 253 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 254 |
+
)
|
| 255 |
+
if not user.is_active:
|
| 256 |
+
raise HTTPException(status_code=400, detail="User not verified")
|
| 257 |
+
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
| 258 |
+
access_token = create_access_token(data={"sub": user.email}, expires_delta=access_token_expires)
|
| 259 |
+
return {"access_token": access_token, "token_type": "bearer"}
|
| 260 |
+
|
| 261 |
+
from app.features.auth.deps import get_current_user
|
| 262 |
+
@router.post("/verify")
|
| 263 |
+
async def verify_user_password(
|
| 264 |
+
data: schemas.PasswordVerification,
|
| 265 |
+
current_user: Annotated[User, Depends(get_current_user)]
|
| 266 |
+
):
|
| 267 |
+
if not verify_password(data.password, current_user.hashed_password):
|
| 268 |
+
raise HTTPException(status_code=400, detail="Invalid password")
|
| 269 |
+
return {"valid": True}
|
app/features/auth/schemas.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from pydantic import BaseModel, EmailStr
|
| 3 |
+
import uuid
|
| 4 |
+
|
| 5 |
+
class Token(BaseModel):
|
| 6 |
+
access_token: str
|
| 7 |
+
token_type: str
|
| 8 |
+
|
| 9 |
+
class TokenData(BaseModel):
|
| 10 |
+
email: Optional[str] = None
|
| 11 |
+
|
| 12 |
+
class UserBase(BaseModel):
|
| 13 |
+
email: EmailStr
|
| 14 |
+
is_active: Optional[bool] = True
|
| 15 |
+
|
| 16 |
+
class UserCreate(UserBase):
|
| 17 |
+
password: str
|
| 18 |
+
|
| 19 |
+
class UserResponse(UserBase):
|
| 20 |
+
id: uuid.UUID
|
| 21 |
+
|
| 22 |
+
class Config:
|
| 23 |
+
from_attributes = True
|
| 24 |
+
|
| 25 |
+
class PasswordVerification(BaseModel):
|
| 26 |
+
password: str
|
| 27 |
+
|
| 28 |
+
class VerifyOTP(BaseModel):
|
| 29 |
+
email: EmailStr
|
| 30 |
+
otp: str
|
app/features/bills/__init__.py
ADDED
|
File without changes
|
app/features/bills/models.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from decimal import Decimal
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from datetime import date
|
| 5 |
+
from sqlalchemy import String, ForeignKey, Numeric, Boolean, Integer, DateTime, Date
|
| 6 |
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
| 7 |
+
from sqlalchemy.sql import func
|
| 8 |
+
from app.core.database import Base
|
| 9 |
+
|
| 10 |
+
class Bill(Base):
|
| 11 |
+
__tablename__ = "bills"
|
| 12 |
+
|
| 13 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 14 |
+
user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"))
|
| 15 |
+
title: Mapped[str] = mapped_column(String, nullable=False)
|
| 16 |
+
amount: Mapped[Decimal] = mapped_column(Numeric(10, 2), nullable=False)
|
| 17 |
+
due_date: Mapped[date] = mapped_column(Date, nullable=False)
|
| 18 |
+
is_paid: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 19 |
+
is_recurring: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 20 |
+
recurrence_day: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
| 21 |
+
category: Mapped[str] = mapped_column(String, nullable=False)
|
| 22 |
+
sub_category: Mapped[str] = mapped_column(String, nullable=False)
|
| 23 |
+
created_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), server_default=func.now())
|
| 24 |
+
|
| 25 |
+
user: Mapped["User"] = relationship("User", back_populates="bills")
|
| 26 |
+
|
| 27 |
+
class BillExclusion(Base):
|
| 28 |
+
__tablename__ = "bill_exclusions"
|
| 29 |
+
|
| 30 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 31 |
+
user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"))
|
| 32 |
+
|
| 33 |
+
# For skipping a specific projection from a specific transaction
|
| 34 |
+
source_transaction_id: Mapped[Optional[uuid.UUID]] = mapped_column(ForeignKey("transactions.id"), nullable=True)
|
| 35 |
+
|
| 36 |
+
# For permanent exclusion logic
|
| 37 |
+
merchant_pattern: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
| 38 |
+
subcategory_pattern: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
| 39 |
+
|
| 40 |
+
exclusion_type: Mapped[str] = mapped_column(String) # 'SKIP', 'PERMANENT'
|
| 41 |
+
|
| 42 |
+
created_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), server_default=func.now())
|
app/features/bills/router.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, List, Optional
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from decimal import Decimal
|
| 4 |
+
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
| 5 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 6 |
+
from app.core.database import get_db
|
| 7 |
+
from app.features.auth.deps import get_current_user
|
| 8 |
+
from app.features.auth.models import User
|
| 9 |
+
from app.features.bills.schemas import (
|
| 10 |
+
BillCreate,
|
| 11 |
+
BillUpdate,
|
| 12 |
+
BillResponse,
|
| 13 |
+
MarkPaidRequest,
|
| 14 |
+
BillResponse,
|
| 15 |
+
MarkPaidRequest,
|
| 16 |
+
UpcomingBillsResponse,
|
| 17 |
+
SuretyExclusionCreate
|
| 18 |
+
)
|
| 19 |
+
from app.features.bills.service import BillService
|
| 20 |
+
|
| 21 |
+
router = APIRouter()
|
| 22 |
+
|
| 23 |
+
@router.get("/surety/list")
|
| 24 |
+
async def list_sureties(
|
| 25 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 26 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 27 |
+
service: Annotated[BillService, Depends()],
|
| 28 |
+
include_hidden: bool = True
|
| 29 |
+
):
|
| 30 |
+
"""List all detected surety obligations, including hidden/excluded ones."""
|
| 31 |
+
ledger = await service.get_obligations_ledger(db, current_user.id, days_ahead=60, include_hidden=include_hidden)
|
| 32 |
+
# Filter only Surety items
|
| 33 |
+
sureties = [item for item in ledger["items"] if item.type == "SURETY_TXN"]
|
| 34 |
+
return sureties
|
| 35 |
+
|
| 36 |
+
@router.post("/surety/exclusion")
|
| 37 |
+
async def create_exclusion(
|
| 38 |
+
exclusion_data: SuretyExclusionCreate,
|
| 39 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 40 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 41 |
+
service: Annotated[BillService, Depends()]
|
| 42 |
+
):
|
| 43 |
+
"""Create an exclusion rule for a surety."""
|
| 44 |
+
excl = await service.create_surety_exclusion(db, current_user.id, exclusion_data)
|
| 45 |
+
return {"status": "success", "id": str(excl.id)}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@router.post("", response_model=BillResponse, status_code=status.HTTP_201_CREATED)
|
| 49 |
+
async def create_bill(
|
| 50 |
+
bill_data: BillCreate,
|
| 51 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 52 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 53 |
+
service: Annotated[BillService, Depends()]
|
| 54 |
+
):
|
| 55 |
+
"""Create a new bill (one-time or recurring)."""
|
| 56 |
+
bill = await service.create_bill(db, current_user.id, bill_data)
|
| 57 |
+
return bill
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
@router.get("", response_model=List[BillResponse])
|
| 61 |
+
async def list_bills(
|
| 62 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 63 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 64 |
+
service: Annotated[BillService, Depends()],
|
| 65 |
+
paid: Optional[bool] = Query(None, description="Filter by paid status")
|
| 66 |
+
):
|
| 67 |
+
"""List all bills for the current user."""
|
| 68 |
+
bills = await service.get_user_bills(db, current_user.id, paid_filter=paid)
|
| 69 |
+
return bills
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@router.get("/upcoming", response_model=UpcomingBillsResponse)
|
| 73 |
+
async def get_upcoming_bills(
|
| 74 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 75 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 76 |
+
service: Annotated[BillService, Depends()],
|
| 77 |
+
days: int = Query(30, ge=1, le=90, description="Number of days to look ahead")
|
| 78 |
+
):
|
| 79 |
+
"""Get unpaid bills due in the next X days."""
|
| 80 |
+
bills = await service.get_upcoming_bills(db, current_user.id, days_ahead=days)
|
| 81 |
+
|
| 82 |
+
total_amount = sum(bill.amount for bill in bills)
|
| 83 |
+
|
| 84 |
+
return UpcomingBillsResponse(
|
| 85 |
+
upcoming_bills=bills,
|
| 86 |
+
total_amount=Decimal(str(total_amount)),
|
| 87 |
+
count=len(bills)
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@router.get("/{bill_id}", response_model=BillResponse)
|
| 92 |
+
async def get_bill(
|
| 93 |
+
bill_id: UUID,
|
| 94 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 95 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 96 |
+
service: Annotated[BillService, Depends()]
|
| 97 |
+
):
|
| 98 |
+
"""Get details of a specific bill."""
|
| 99 |
+
bill = await service.get_bill_by_id(db, bill_id, current_user.id)
|
| 100 |
+
|
| 101 |
+
if not bill:
|
| 102 |
+
raise HTTPException(
|
| 103 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 104 |
+
detail="Bill not found"
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
return bill
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
@router.put("/{bill_id}", response_model=BillResponse)
|
| 111 |
+
async def update_bill(
|
| 112 |
+
bill_id: UUID,
|
| 113 |
+
bill_data: BillUpdate,
|
| 114 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 115 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 116 |
+
service: Annotated[BillService, Depends()]
|
| 117 |
+
):
|
| 118 |
+
"""Update a bill."""
|
| 119 |
+
bill = await service.update_bill(db, bill_id, current_user.id, bill_data)
|
| 120 |
+
|
| 121 |
+
if not bill:
|
| 122 |
+
raise HTTPException(
|
| 123 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 124 |
+
detail="Bill not found"
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
return bill
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
@router.post("/{bill_id}/mark-paid", response_model=BillResponse)
|
| 131 |
+
async def mark_bill_paid(
|
| 132 |
+
bill_id: UUID,
|
| 133 |
+
request: MarkPaidRequest,
|
| 134 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 135 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 136 |
+
service: Annotated[BillService, Depends()]
|
| 137 |
+
):
|
| 138 |
+
"""Mark a bill as paid or unpaid."""
|
| 139 |
+
bill = await service.mark_paid(db, bill_id, current_user.id, request.paid)
|
| 140 |
+
|
| 141 |
+
if not bill:
|
| 142 |
+
raise HTTPException(
|
| 143 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 144 |
+
detail="Bill not found"
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
return bill
|
app/features/bills/schemas.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from datetime import datetime, date
|
| 4 |
+
from decimal import Decimal
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
|
| 7 |
+
class BillBase(BaseModel):
|
| 8 |
+
title: str = Field(..., description="Bill title (e.g., 'Rent', 'Electricity')")
|
| 9 |
+
amount: Decimal = Field(..., description="Bill amount")
|
| 10 |
+
due_date: date = Field(..., description="Due date for the bill")
|
| 11 |
+
is_recurring: bool = Field(default=False, description="Whether this is a recurring bill")
|
| 12 |
+
recurrence_day: Optional[int] = Field(None, ge=1, le=31, description="Day of month for recurring bills")
|
| 13 |
+
category: str
|
| 14 |
+
sub_category: str
|
| 15 |
+
|
| 16 |
+
class BillCreate(BillBase):
|
| 17 |
+
pass
|
| 18 |
+
|
| 19 |
+
class BillUpdate(BaseModel):
|
| 20 |
+
title: Optional[str] = None
|
| 21 |
+
amount: Optional[Decimal] = None
|
| 22 |
+
due_date: Optional[date] = None
|
| 23 |
+
is_recurring: Optional[bool] = None
|
| 24 |
+
recurrence_day: Optional[int] = Field(None, ge=1, le=31)
|
| 25 |
+
category: Optional[str] = None
|
| 26 |
+
sub_category: Optional[str] = None
|
| 27 |
+
|
| 28 |
+
class BillResponse(BillBase):
|
| 29 |
+
id: UUID
|
| 30 |
+
user_id: UUID
|
| 31 |
+
is_paid: bool
|
| 32 |
+
created_at: datetime
|
| 33 |
+
|
| 34 |
+
class Config:
|
| 35 |
+
from_attributes = True
|
| 36 |
+
|
| 37 |
+
class MarkPaidRequest(BaseModel):
|
| 38 |
+
paid: bool = True
|
| 39 |
+
|
| 40 |
+
class UpcomingBillsResponse(BaseModel):
|
| 41 |
+
upcoming_bills: list[BillResponse]
|
| 42 |
+
total_amount: Decimal
|
| 43 |
+
count: int
|
| 44 |
+
|
| 45 |
+
class SuretyExclusionCreate(BaseModel):
|
| 46 |
+
source_transaction_id: Optional[UUID] = None
|
| 47 |
+
merchant_pattern: Optional[str] = None
|
| 48 |
+
subcategory_pattern: Optional[str] = None
|
| 49 |
+
exclusion_type: str # 'SKIP', 'PERMANENT'
|
app/features/bills/service.py
ADDED
|
@@ -0,0 +1,491 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import asyncio
|
| 3 |
+
from uuid import UUID
|
| 4 |
+
from datetime import date, datetime, timedelta
|
| 5 |
+
import zoneinfo
|
| 6 |
+
from decimal import Decimal
|
| 7 |
+
from typing import List, Optional, Set, Tuple
|
| 8 |
+
from calendar import monthrange
|
| 9 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 10 |
+
from sqlalchemy import select, func, or_
|
| 11 |
+
from app.core.config import get_settings
|
| 12 |
+
from app.features.bills.models import Bill, BillExclusion
|
| 13 |
+
from app.features.bills.schemas import BillCreate, BillUpdate, SuretyExclusionCreate
|
| 14 |
+
|
| 15 |
+
settings = get_settings()
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
from app.features.categories.models import SubCategory
|
| 19 |
+
from app.features.transactions.models import Transaction
|
| 20 |
+
|
| 21 |
+
class BillService:
|
| 22 |
+
def __init__(self):
|
| 23 |
+
self._tz = zoneinfo.ZoneInfo(settings.APP_TIMEZONE)
|
| 24 |
+
|
| 25 |
+
def _get_today(self) -> date:
|
| 26 |
+
"""Get current date in the configured timezone."""
|
| 27 |
+
return datetime.now(self._tz).date()
|
| 28 |
+
|
| 29 |
+
async def create_bill(
|
| 30 |
+
self,
|
| 31 |
+
db: AsyncSession,
|
| 32 |
+
user_id: UUID,
|
| 33 |
+
bill_data: BillCreate
|
| 34 |
+
) -> Bill:
|
| 35 |
+
"""Create a new bill."""
|
| 36 |
+
data = bill_data.model_dump()
|
| 37 |
+
# Default recurrence_day to due_date day if it's recurring but day not specified
|
| 38 |
+
if data.get("is_recurring") and not data.get("recurrence_day"):
|
| 39 |
+
data["recurrence_day"] = data["due_date"].day
|
| 40 |
+
|
| 41 |
+
bill = Bill(
|
| 42 |
+
user_id=user_id,
|
| 43 |
+
**data
|
| 44 |
+
)
|
| 45 |
+
db.add(bill)
|
| 46 |
+
await db.commit()
|
| 47 |
+
await db.refresh(bill)
|
| 48 |
+
logger.info(f"Created bill '{bill.title}' for user {user_id}")
|
| 49 |
+
return bill
|
| 50 |
+
|
| 51 |
+
async def get_user_bills(
|
| 52 |
+
self,
|
| 53 |
+
db: AsyncSession,
|
| 54 |
+
user_id: UUID,
|
| 55 |
+
paid_filter: Optional[bool] = None
|
| 56 |
+
) -> List[Bill]:
|
| 57 |
+
"""Get all bills for a user with optional paid/unpaid filter."""
|
| 58 |
+
stmt = select(Bill).where(Bill.user_id == user_id)
|
| 59 |
+
|
| 60 |
+
if paid_filter is not None:
|
| 61 |
+
stmt = stmt.where(Bill.is_paid == paid_filter)
|
| 62 |
+
|
| 63 |
+
stmt = stmt.order_by(Bill.due_date)
|
| 64 |
+
|
| 65 |
+
result = await db.execute(stmt)
|
| 66 |
+
return list(result.scalars().all())
|
| 67 |
+
|
| 68 |
+
async def get_bill_by_id(
|
| 69 |
+
self,
|
| 70 |
+
db: AsyncSession,
|
| 71 |
+
bill_id: UUID,
|
| 72 |
+
user_id: UUID
|
| 73 |
+
) -> Optional[Bill]:
|
| 74 |
+
"""Get a specific bill by ID."""
|
| 75 |
+
stmt = select(Bill).where(
|
| 76 |
+
Bill.id == bill_id,
|
| 77 |
+
Bill.user_id == user_id
|
| 78 |
+
)
|
| 79 |
+
result = await db.execute(stmt)
|
| 80 |
+
return result.scalar_one_or_none()
|
| 81 |
+
|
| 82 |
+
async def update_bill(
|
| 83 |
+
self,
|
| 84 |
+
db: AsyncSession,
|
| 85 |
+
bill_id: UUID,
|
| 86 |
+
user_id: UUID,
|
| 87 |
+
bill_data: BillUpdate
|
| 88 |
+
) -> Optional[Bill]:
|
| 89 |
+
"""Update a bill."""
|
| 90 |
+
bill = await self.get_bill_by_id(db, bill_id, user_id)
|
| 91 |
+
|
| 92 |
+
if not bill:
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
update_data = bill_data.model_dump(exclude_unset=True)
|
| 96 |
+
# If toggling recurring on but no recurrence_day, default from existing or new due_date
|
| 97 |
+
if update_data.get("is_recurring") and not update_data.get("recurrence_day") and not bill.recurrence_day:
|
| 98 |
+
due_date = update_data.get("due_date") or bill.due_date
|
| 99 |
+
update_data["recurrence_day"] = due_date.day
|
| 100 |
+
|
| 101 |
+
for field, value in update_data.items():
|
| 102 |
+
setattr(bill, field, value)
|
| 103 |
+
|
| 104 |
+
await db.commit()
|
| 105 |
+
await db.refresh(bill)
|
| 106 |
+
logger.info(f"Updated bill {bill_id}")
|
| 107 |
+
return bill
|
| 108 |
+
|
| 109 |
+
async def mark_paid(
|
| 110 |
+
self,
|
| 111 |
+
db: AsyncSession,
|
| 112 |
+
bill_id: UUID,
|
| 113 |
+
user_id: UUID,
|
| 114 |
+
paid: bool = True
|
| 115 |
+
) -> Optional[Bill]:
|
| 116 |
+
"""Mark a bill as paid or unpaid. For recurring bills, advances the due date."""
|
| 117 |
+
bill = await self.get_bill_by_id(db, bill_id, user_id)
|
| 118 |
+
|
| 119 |
+
if not bill:
|
| 120 |
+
return None
|
| 121 |
+
|
| 122 |
+
if paid and bill.is_recurring:
|
| 123 |
+
# Advance due date to next month
|
| 124 |
+
today = self._get_today()
|
| 125 |
+
r_day = bill.recurrence_day or bill.due_date.day
|
| 126 |
+
next_due = self._calculate_next_recurrence(r_day, today)
|
| 127 |
+
|
| 128 |
+
# If next_due is same as current due_date (e.g. paying today's bill),
|
| 129 |
+
# we must ensure we move to the month AFTER.
|
| 130 |
+
if next_due <= bill.due_date:
|
| 131 |
+
# Force next month
|
| 132 |
+
next_month = bill.due_date + timedelta(days=32)
|
| 133 |
+
next_due = self._calculate_next_recurrence(r_day, next_month)
|
| 134 |
+
|
| 135 |
+
bill.due_date = next_due
|
| 136 |
+
bill.is_paid = False # Reset for next cycle
|
| 137 |
+
logger.info(f"Advanced recurring bill {bill_id} to {next_due}")
|
| 138 |
+
else:
|
| 139 |
+
bill.is_paid = paid
|
| 140 |
+
|
| 141 |
+
await db.commit()
|
| 142 |
+
await db.refresh(bill)
|
| 143 |
+
logger.info(f"Marked bill {bill_id} status updated (paid={paid})")
|
| 144 |
+
return bill
|
| 145 |
+
|
| 146 |
+
async def get_upcoming_bills(
|
| 147 |
+
self,
|
| 148 |
+
db: AsyncSession,
|
| 149 |
+
user_id: UUID,
|
| 150 |
+
days_ahead: int = 30
|
| 151 |
+
) -> List[Bill]:
|
| 152 |
+
"""Get unpaid bills due within the next X days."""
|
| 153 |
+
today = self._get_today()
|
| 154 |
+
threshold_date = today + timedelta(days=days_ahead)
|
| 155 |
+
|
| 156 |
+
stmt = (
|
| 157 |
+
select(Bill)
|
| 158 |
+
.where(Bill.user_id == user_id)
|
| 159 |
+
.where(Bill.is_paid == False)
|
| 160 |
+
.where(Bill.due_date <= threshold_date)
|
| 161 |
+
.order_by(Bill.due_date)
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
result = await db.execute(stmt)
|
| 165 |
+
return list(result.scalars().all())
|
| 166 |
+
|
| 167 |
+
async def create_surety_exclusion(
|
| 168 |
+
self,
|
| 169 |
+
db: AsyncSession,
|
| 170 |
+
user_id: UUID,
|
| 171 |
+
data: SuretyExclusionCreate
|
| 172 |
+
) -> BillExclusion:
|
| 173 |
+
"""Create an exclusion rule for auto-detected sureties."""
|
| 174 |
+
excl = BillExclusion(
|
| 175 |
+
user_id=user_id,
|
| 176 |
+
source_transaction_id=data.source_transaction_id,
|
| 177 |
+
merchant_pattern=data.merchant_pattern,
|
| 178 |
+
subcategory_pattern=data.subcategory_pattern,
|
| 179 |
+
exclusion_type=data.exclusion_type
|
| 180 |
+
)
|
| 181 |
+
db.add(excl)
|
| 182 |
+
await db.commit()
|
| 183 |
+
await db.refresh(excl)
|
| 184 |
+
logger.info(f"Created exclusion rule type {data.exclusion_type} for user {user_id}")
|
| 185 |
+
return excl
|
| 186 |
+
|
| 187 |
+
async def get_obligations_ledger(
|
| 188 |
+
self,
|
| 189 |
+
db: AsyncSession,
|
| 190 |
+
user_id: UUID,
|
| 191 |
+
days_ahead: int = 30,
|
| 192 |
+
include_hidden: bool = False
|
| 193 |
+
) -> dict:
|
| 194 |
+
"""
|
| 195 |
+
Get a full ledger of all identified obligations.
|
| 196 |
+
Returns: {
|
| 197 |
+
"unpaid_total": Decimal,
|
| 198 |
+
"projected_total": Decimal,
|
| 199 |
+
"items": List[IdentifiedObligation]
|
| 200 |
+
}
|
| 201 |
+
"""
|
| 202 |
+
from app.features.analytics.schemas import IdentifiedObligation
|
| 203 |
+
from app.features.categories.models import SubCategory
|
| 204 |
+
from app.utils.finance_utils import get_month_date_range, get_previous_month_date_range
|
| 205 |
+
|
| 206 |
+
today = self._get_today()
|
| 207 |
+
threshold_date = today + timedelta(days=days_ahead)
|
| 208 |
+
|
| 209 |
+
ledger_items = []
|
| 210 |
+
unpaid_total = Decimal("0.00")
|
| 211 |
+
projected_total = Decimal("0.00")
|
| 212 |
+
|
| 213 |
+
# Define Statements
|
| 214 |
+
excl_stmt = select(BillExclusion).where(BillExclusion.user_id == user_id)
|
| 215 |
+
bill_stmt = select(Bill).where(Bill.user_id == user_id, Bill.is_paid == False)
|
| 216 |
+
rec_stmt = select(Bill).where(Bill.user_id == user_id, Bill.is_recurring == True)
|
| 217 |
+
|
| 218 |
+
# Subquery for surety subcategories
|
| 219 |
+
surety_sub_query = select(SubCategory.name).where(SubCategory.is_surety == True)
|
| 220 |
+
|
| 221 |
+
prev_range = get_previous_month_date_range(today)
|
| 222 |
+
curr_range = get_month_date_range(today)
|
| 223 |
+
|
| 224 |
+
from sqlalchemy import or_
|
| 225 |
+
|
| 226 |
+
past_stmt = (
|
| 227 |
+
select(Transaction)
|
| 228 |
+
.where(Transaction.user_id == user_id)
|
| 229 |
+
.where(Transaction.transaction_date >= prev_range["month_start"])
|
| 230 |
+
.where(Transaction.transaction_date <= prev_range["month_end"])
|
| 231 |
+
.where(or_(
|
| 232 |
+
Transaction.is_surety == True,
|
| 233 |
+
Transaction.sub_category.in_(surety_sub_query)
|
| 234 |
+
))
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
curr_stmt = (
|
| 238 |
+
select(Transaction)
|
| 239 |
+
.where(Transaction.user_id == user_id)
|
| 240 |
+
.where(Transaction.transaction_date >= curr_range["month_start"])
|
| 241 |
+
.where(Transaction.transaction_date <= curr_range["month_end"])
|
| 242 |
+
.where(or_(
|
| 243 |
+
Transaction.is_surety == True,
|
| 244 |
+
Transaction.sub_category.in_(surety_sub_query)
|
| 245 |
+
))
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
# Execute all in parallel
|
| 249 |
+
# Note: sub_res is no longer needed since we used a subquery,
|
| 250 |
+
# but we keep it if needed for the python check in step 3
|
| 251 |
+
sub_stmt = select(SubCategory.name).where(SubCategory.is_surety == True)
|
| 252 |
+
excl_res, bill_res, rec_res, sub_res, past_res, curr_res = await asyncio.gather(
|
| 253 |
+
db.execute(excl_stmt),
|
| 254 |
+
db.execute(bill_stmt),
|
| 255 |
+
db.execute(rec_stmt),
|
| 256 |
+
db.execute(sub_stmt),
|
| 257 |
+
db.execute(past_stmt),
|
| 258 |
+
db.execute(curr_stmt)
|
| 259 |
+
)
|
| 260 |
+
|
| 261 |
+
exclusions = excl_res.scalars().all()
|
| 262 |
+
unpaid_bills = bill_res.scalars().all()
|
| 263 |
+
recurring_bills = rec_res.scalars().all()
|
| 264 |
+
surety_subs = set(name.lower() for name in sub_res.scalars().all())
|
| 265 |
+
past_txns = list(past_res.scalars().all())
|
| 266 |
+
curr_txns = list(curr_res.scalars().all())
|
| 267 |
+
|
| 268 |
+
skipped_source_ids = {e.source_transaction_id for e in exclusions if e.exclusion_type == 'SKIP' and e.source_transaction_id}
|
| 269 |
+
manual_paid_ids = {e.source_transaction_id for e in exclusions if e.exclusion_type == 'MANUAL_PAID' and e.source_transaction_id}
|
| 270 |
+
permanent_patterns = [
|
| 271 |
+
(e.merchant_pattern.lower() if e.merchant_pattern else None,
|
| 272 |
+
e.subcategory_pattern.lower() if e.subcategory_pattern else None)
|
| 273 |
+
for e in exclusions if e.exclusion_type == 'PERMANENT'
|
| 274 |
+
]
|
| 275 |
+
|
| 276 |
+
covered_signatures: Set[Tuple[str, Decimal]] = set()
|
| 277 |
+
|
| 278 |
+
# 1. Process unpaid bills
|
| 279 |
+
for bill in unpaid_bills:
|
| 280 |
+
status = "OVERDUE" if bill.due_date < today else "PENDING"
|
| 281 |
+
ledger_items.append(IdentifiedObligation(
|
| 282 |
+
id=str(bill.id),
|
| 283 |
+
title=bill.title,
|
| 284 |
+
amount=bill.amount,
|
| 285 |
+
due_date=bill.due_date,
|
| 286 |
+
type="BILL",
|
| 287 |
+
status=status,
|
| 288 |
+
category=bill.category,
|
| 289 |
+
sub_category=bill.sub_category,
|
| 290 |
+
source_id=None
|
| 291 |
+
))
|
| 292 |
+
unpaid_total += bill.amount
|
| 293 |
+
if bill.is_recurring:
|
| 294 |
+
covered_signatures.add((bill.sub_category.lower(), bill.amount))
|
| 295 |
+
|
| 296 |
+
# 2. Project NEXT instances for Recurring Bills
|
| 297 |
+
for bill in recurring_bills:
|
| 298 |
+
r_day = bill.recurrence_day or bill.due_date.day
|
| 299 |
+
next_due = self._calculate_next_recurrence(r_day, today)
|
| 300 |
+
if today <= next_due <= threshold_date:
|
| 301 |
+
if next_due > bill.due_date or bill.is_paid:
|
| 302 |
+
ledger_items.append(IdentifiedObligation(
|
| 303 |
+
id=f"proj-{bill.id}",
|
| 304 |
+
title=f"{bill.title} (Projected)",
|
| 305 |
+
amount=bill.amount,
|
| 306 |
+
due_date=next_due,
|
| 307 |
+
type="BILL",
|
| 308 |
+
status="PROJECTED",
|
| 309 |
+
category=bill.category,
|
| 310 |
+
sub_category=bill.sub_category,
|
| 311 |
+
source_id=None
|
| 312 |
+
))
|
| 313 |
+
projected_total += bill.amount
|
| 314 |
+
covered_signatures.add((bill.sub_category.lower(), bill.amount))
|
| 315 |
+
|
| 316 |
+
# 3. Process Surety
|
| 317 |
+
# Transactions are already filtered by the DB query above.
|
| 318 |
+
|
| 319 |
+
matched_curr_ids = set()
|
| 320 |
+
|
| 321 |
+
for p_txn in past_txns:
|
| 322 |
+
# Prepare status
|
| 323 |
+
is_excluded = False
|
| 324 |
+
exclusion_reason = ""
|
| 325 |
+
|
| 326 |
+
# Check Skip Exclusion
|
| 327 |
+
if p_txn.id in skipped_source_ids:
|
| 328 |
+
is_excluded = True
|
| 329 |
+
exclusion_reason = "SKIPPED"
|
| 330 |
+
|
| 331 |
+
# Check Manual Paid
|
| 332 |
+
if str(p_txn.id) in {str(id) for id in manual_paid_ids}:
|
| 333 |
+
is_excluded = True
|
| 334 |
+
exclusion_reason = "PAID"
|
| 335 |
+
|
| 336 |
+
# Check Permanent Exclusion
|
| 337 |
+
# Check Permanent Exclusion
|
| 338 |
+
if not is_excluded:
|
| 339 |
+
p_m = (p_txn.merchant_name or "").lower()
|
| 340 |
+
p_s = (p_txn.sub_category or "").lower()
|
| 341 |
+
for emp, esp in permanent_patterns:
|
| 342 |
+
match_m = (emp == p_m) if emp is not None else True
|
| 343 |
+
match_s = (esp == p_s) if esp is not None else True
|
| 344 |
+
if match_m and match_s:
|
| 345 |
+
is_excluded = True
|
| 346 |
+
exclusion_reason = "TERMINATED"
|
| 347 |
+
break
|
| 348 |
+
|
| 349 |
+
# Check Covered by Bill
|
| 350 |
+
# Relaxed check: subcategory AND amount
|
| 351 |
+
if not is_excluded:
|
| 352 |
+
# Need to handle Decimal comparison carefully? set handles it.
|
| 353 |
+
if (p_txn.sub_category.lower(), p_txn.amount) in covered_signatures:
|
| 354 |
+
is_excluded = True
|
| 355 |
+
exclusion_reason = "COVERED"
|
| 356 |
+
|
| 357 |
+
# estimated due date
|
| 358 |
+
due_day = p_txn.transaction_date.day
|
| 359 |
+
try:
|
| 360 |
+
p_date = today.replace(day=min(due_day, curr_range["month_end"].day))
|
| 361 |
+
except:
|
| 362 |
+
p_date = today
|
| 363 |
+
|
| 364 |
+
# Find partner in current month (regardless of exclusion, to determine projected vs done?)
|
| 365 |
+
# Actually if it's already done (partner found), we don't project anyway.
|
| 366 |
+
partner_found = False
|
| 367 |
+
for c_txn in curr_txns:
|
| 368 |
+
if c_txn.id not in matched_curr_ids:
|
| 369 |
+
if abs(p_txn.amount) == abs(c_txn.amount):
|
| 370 |
+
p_m = (p_txn.merchant_name or "").lower()
|
| 371 |
+
c_m = (c_txn.merchant_name or "").lower()
|
| 372 |
+
|
| 373 |
+
match_merch = (p_m == c_m)
|
| 374 |
+
# Relaxed match: SubCategory match is sufficient if amount is exact
|
| 375 |
+
match_sub = (p_txn.sub_category.lower() == c_txn.sub_category.lower())
|
| 376 |
+
|
| 377 |
+
# Even more relaxed: If one merchant contains the other (e.g. "Google" vs "Google Services")
|
| 378 |
+
match_fuzzy = (p_m and c_m) and (p_m in c_m or c_m in p_m)
|
| 379 |
+
|
| 380 |
+
if match_merch or match_sub or match_fuzzy:
|
| 381 |
+
matched_curr_ids.add(c_txn.id)
|
| 382 |
+
partner_found = True
|
| 383 |
+
break
|
| 384 |
+
|
| 385 |
+
if partner_found:
|
| 386 |
+
if include_hidden:
|
| 387 |
+
ledger_items.append(IdentifiedObligation(
|
| 388 |
+
id=f"auto-{p_txn.id}",
|
| 389 |
+
title=f"{p_txn.merchant_name or p_txn.sub_category} (Auto-detected)",
|
| 390 |
+
amount=abs(p_txn.amount),
|
| 391 |
+
due_date=p_date, # Use estimated date
|
| 392 |
+
type="SURETY_TXN",
|
| 393 |
+
status="PAID",
|
| 394 |
+
sub_category=p_txn.sub_category,
|
| 395 |
+
source_id=str(p_txn.id)
|
| 396 |
+
))
|
| 397 |
+
continue # Already paid this month, no obligation
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
# Does it pass filter?
|
| 401 |
+
if is_excluded and not include_hidden:
|
| 402 |
+
continue
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
date_status = "PROJECTED"
|
| 407 |
+
if p_date < today:
|
| 408 |
+
date_status = "OVERDUE"
|
| 409 |
+
|
| 410 |
+
final_status = date_status
|
| 411 |
+
if is_excluded:
|
| 412 |
+
final_status = exclusion_reason
|
| 413 |
+
|
| 414 |
+
# Date filter only applies if active (PROJECTED/OVERDUE)
|
| 415 |
+
# If excluded, we include it regardless of date if include_hidden is True?
|
| 416 |
+
# Or still respect threshold? Let's respect threshold for "Upcoming" logic, but maybe relax for "Management"?
|
| 417 |
+
# Let's keep threshold strict for now.
|
| 418 |
+
if today <= p_date <= threshold_date or final_status in ["OVERDUE", "SKIPPED", "TERMINATED", "COVERED", "PAID"]:
|
| 419 |
+
if include_hidden or not is_excluded:
|
| 420 |
+
ledger_items.append(IdentifiedObligation(
|
| 421 |
+
id=f"auto-{p_txn.id}",
|
| 422 |
+
title=f"{p_txn.merchant_name or p_txn.sub_category} (Auto-detected)",
|
| 423 |
+
amount=abs(p_txn.amount),
|
| 424 |
+
due_date=p_date,
|
| 425 |
+
type="SURETY_TXN",
|
| 426 |
+
status=final_status,
|
| 427 |
+
sub_category=p_txn.sub_category,
|
| 428 |
+
source_id=str(p_txn.id)
|
| 429 |
+
))
|
| 430 |
+
if final_status == "PROJECTED":
|
| 431 |
+
projected_total += abs(p_txn.amount)
|
| 432 |
+
elif final_status == "OVERDUE":
|
| 433 |
+
unpaid_total += abs(p_txn.amount)
|
| 434 |
+
|
| 435 |
+
return {
|
| 436 |
+
"unpaid_total": unpaid_total,
|
| 437 |
+
"projected_total": projected_total,
|
| 438 |
+
"items": ledger_items
|
| 439 |
+
}
|
| 440 |
+
|
| 441 |
+
async def get_projected_surety_bills(
|
| 442 |
+
self,
|
| 443 |
+
db: AsyncSession,
|
| 444 |
+
user_id: UUID,
|
| 445 |
+
days_ahead: int = 30
|
| 446 |
+
) -> Decimal:
|
| 447 |
+
"""Legay wrapper for projection total."""
|
| 448 |
+
res = await self.get_obligations_ledger(db, user_id, days_ahead)
|
| 449 |
+
return res["projected_total"]
|
| 450 |
+
|
| 451 |
+
async def get_unpaid_bills_total(
|
| 452 |
+
self,
|
| 453 |
+
db: AsyncSession,
|
| 454 |
+
user_id: UUID
|
| 455 |
+
) -> Decimal:
|
| 456 |
+
"""Legacy wrapper for unpaid total."""
|
| 457 |
+
res = await self.get_obligations_ledger(db, user_id)
|
| 458 |
+
return res["unpaid_total"]
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def _calculate_next_recurrence(
|
| 462 |
+
self,
|
| 463 |
+
recurrence_day: int,
|
| 464 |
+
reference_date: date
|
| 465 |
+
) -> date:
|
| 466 |
+
"""Calculate the next occurrence date for a recurring bill."""
|
| 467 |
+
try:
|
| 468 |
+
next_date = date(
|
| 469 |
+
reference_date.year,
|
| 470 |
+
reference_date.month,
|
| 471 |
+
min(recurrence_day, monthrange(reference_date.year, reference_date.month)[1])
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
if next_date >= reference_date:
|
| 475 |
+
return next_date
|
| 476 |
+
except ValueError:
|
| 477 |
+
pass
|
| 478 |
+
|
| 479 |
+
# Move to next month
|
| 480 |
+
if reference_date.month == 12:
|
| 481 |
+
next_month = 1
|
| 482 |
+
next_year = reference_date.year + 1
|
| 483 |
+
else:
|
| 484 |
+
next_month = reference_date.month + 1
|
| 485 |
+
next_year = reference_date.year
|
| 486 |
+
|
| 487 |
+
return date(
|
| 488 |
+
next_year,
|
| 489 |
+
next_month,
|
| 490 |
+
min(recurrence_day, monthrange(next_year, next_month)[1])
|
| 491 |
+
)
|
app/features/categories/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .models import Category, SubCategory
|
app/features/categories/models.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from typing import List, Optional
|
| 3 |
+
from sqlalchemy import String, ForeignKey, Text, Boolean
|
| 4 |
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
| 5 |
+
from app.core.database import Base
|
| 6 |
+
|
| 7 |
+
class Category(Base):
|
| 8 |
+
__tablename__ = "categories"
|
| 9 |
+
|
| 10 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 11 |
+
name: Mapped[str] = mapped_column(String, index=True)
|
| 12 |
+
icon: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
| 13 |
+
color: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
| 14 |
+
type: Mapped[str] = mapped_column(String, default="EXPENSE") # EXPENSE, INCOME, INVESTMENT
|
| 15 |
+
user_id: Mapped[Optional[uuid.UUID]] = mapped_column(ForeignKey("users.id"), nullable=True) # None for system categories
|
| 16 |
+
|
| 17 |
+
sub_categories: Mapped[List["SubCategory"]] = relationship("SubCategory", back_populates="category", cascade="all, delete-orphan", lazy="selectin")
|
| 18 |
+
|
| 19 |
+
class SubCategory(Base):
|
| 20 |
+
__tablename__ = "sub_categories"
|
| 21 |
+
|
| 22 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 23 |
+
name: Mapped[str] = mapped_column(String)
|
| 24 |
+
icon: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
| 25 |
+
color: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
| 26 |
+
type: Mapped[str] = mapped_column(String, default="EXPENSE")
|
| 27 |
+
is_surety: Mapped[bool] = mapped_column(Boolean, default=False)
|
| 28 |
+
category_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("categories.id"))
|
| 29 |
+
user_id: Mapped[Optional[uuid.UUID]] = mapped_column(ForeignKey("users.id"), nullable=True)
|
| 30 |
+
|
| 31 |
+
category: Mapped["Category"] = relationship("Category", back_populates="sub_categories")
|
app/features/categories/router.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, List
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from fastapi import APIRouter, Depends
|
| 4 |
+
from app.features.auth.deps import get_current_user
|
| 5 |
+
from app.features.auth.models import User
|
| 6 |
+
from app.features.categories import schemas
|
| 7 |
+
from app.features.categories.service import CategoryService
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
@router.get("/", response_model=List[schemas.CategoryResponse])
|
| 12 |
+
async def get_categories(
|
| 13 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 14 |
+
service: Annotated[CategoryService, Depends()]
|
| 15 |
+
):
|
| 16 |
+
return await service.get_categories(user_id=current_user.id)
|
| 17 |
+
|
| 18 |
+
@router.post("/", response_model=schemas.CategoryResponse)
|
| 19 |
+
async def create_category(
|
| 20 |
+
data: schemas.CategoryCreate,
|
| 21 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 22 |
+
service: Annotated[CategoryService, Depends()]
|
| 23 |
+
):
|
| 24 |
+
return await service.create_category(user_id=current_user.id, data=data)
|
| 25 |
+
|
| 26 |
+
@router.post("/sub-categories", response_model=schemas.SubCategoryResponse)
|
| 27 |
+
async def create_sub_category(
|
| 28 |
+
data: schemas.SubCategoryCreate,
|
| 29 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 30 |
+
service: Annotated[CategoryService, Depends()]
|
| 31 |
+
):
|
| 32 |
+
return await service.create_sub_category(user_id=current_user.id, data=data)
|
| 33 |
+
|
| 34 |
+
@router.delete("/{category_id}")
|
| 35 |
+
async def delete_category(
|
| 36 |
+
category_id: UUID,
|
| 37 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 38 |
+
service: Annotated[CategoryService, Depends()]
|
| 39 |
+
):
|
| 40 |
+
await service.delete_category(user_id=current_user.id, category_id=category_id)
|
| 41 |
+
return {"status": "success"}
|
| 42 |
+
|
| 43 |
+
@router.delete("/sub-categories/{sub_category_id}")
|
| 44 |
+
async def delete_sub_category(
|
| 45 |
+
sub_category_id: UUID,
|
| 46 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 47 |
+
service: Annotated[CategoryService, Depends()]
|
| 48 |
+
):
|
| 49 |
+
await service.delete_sub_category(user_id=current_user.id, sub_category_id=sub_category_id)
|
| 50 |
+
return {"status": "success"}
|
| 51 |
+
|
| 52 |
+
@router.patch("/{category_id}", response_model=schemas.CategoryResponse)
|
| 53 |
+
async def update_category(
|
| 54 |
+
category_id: UUID,
|
| 55 |
+
data: schemas.CategoryUpdate,
|
| 56 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 57 |
+
service: Annotated[CategoryService, Depends()]
|
| 58 |
+
):
|
| 59 |
+
return await service.update_category(user_id=current_user.id, category_id=category_id, data=data)
|
| 60 |
+
|
| 61 |
+
@router.patch("/sub-categories/{sub_category_id}", response_model=schemas.SubCategoryResponse)
|
| 62 |
+
async def update_sub_category(
|
| 63 |
+
sub_category_id: UUID,
|
| 64 |
+
data: schemas.SubCategoryUpdate,
|
| 65 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 66 |
+
service: Annotated[CategoryService, Depends()]
|
| 67 |
+
):
|
| 68 |
+
return await service.update_sub_category(user_id=current_user.id, sub_category_id=sub_category_id, data=data)
|
| 69 |
+
|
app/features/categories/schemas.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from typing import List, Optional, Literal
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
|
| 5 |
+
CategoryType = Literal["EXPENSE", "INCOME", "INVESTMENT"]
|
| 6 |
+
|
| 7 |
+
class SubCategoryBase(BaseModel):
|
| 8 |
+
name: str
|
| 9 |
+
icon: Optional[str] = None
|
| 10 |
+
color: Optional[str] = None
|
| 11 |
+
type: CategoryType = "EXPENSE"
|
| 12 |
+
is_surety: bool = False
|
| 13 |
+
|
| 14 |
+
class SubCategoryCreate(SubCategoryBase):
|
| 15 |
+
category_id: uuid.UUID
|
| 16 |
+
|
| 17 |
+
class SubCategoryUpdate(BaseModel):
|
| 18 |
+
name: Optional[str] = None
|
| 19 |
+
icon: Optional[str] = None
|
| 20 |
+
color: Optional[str] = None
|
| 21 |
+
type: Optional[CategoryType] = None
|
| 22 |
+
is_surety: Optional[bool] = None
|
| 23 |
+
|
| 24 |
+
class SubCategoryResponse(SubCategoryBase):
|
| 25 |
+
id: uuid.UUID
|
| 26 |
+
category_id: uuid.UUID
|
| 27 |
+
user_id: Optional[uuid.UUID] = None
|
| 28 |
+
is_surety: bool = False
|
| 29 |
+
|
| 30 |
+
class Config:
|
| 31 |
+
from_attributes = True
|
| 32 |
+
|
| 33 |
+
class CategoryBase(BaseModel):
|
| 34 |
+
name: str
|
| 35 |
+
icon: Optional[str] = None
|
| 36 |
+
color: Optional[str] = None
|
| 37 |
+
type: CategoryType = "EXPENSE"
|
| 38 |
+
|
| 39 |
+
class CategoryCreate(CategoryBase):
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
class CategoryUpdate(BaseModel):
|
| 43 |
+
name: Optional[str] = None
|
| 44 |
+
icon: Optional[str] = None
|
| 45 |
+
color: Optional[str] = None
|
| 46 |
+
type: Optional[CategoryType] = None
|
| 47 |
+
|
| 48 |
+
class CategoryResponse(CategoryBase):
|
| 49 |
+
id: uuid.UUID
|
| 50 |
+
user_id: Optional[uuid.UUID] = None
|
| 51 |
+
sub_categories: List[SubCategoryResponse] = []
|
| 52 |
+
|
| 53 |
+
class Config:
|
| 54 |
+
from_attributes = True
|
app/features/categories/service.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from uuid import UUID
|
| 2 |
+
from typing import List, Optional
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from sqlalchemy import select
|
| 5 |
+
from sqlalchemy.orm import selectinload, with_loader_criteria
|
| 6 |
+
from fastapi import HTTPException, Depends
|
| 7 |
+
from app.features.categories.models import Category, SubCategory
|
| 8 |
+
from app.features.categories import schemas
|
| 9 |
+
from app.core.database import get_db
|
| 10 |
+
|
| 11 |
+
class CategoryService:
|
| 12 |
+
def __init__(self, db: AsyncSession = Depends(get_db)):
|
| 13 |
+
self.db = db
|
| 14 |
+
|
| 15 |
+
async def get_categories(self, user_id: UUID) -> List[Category]:
|
| 16 |
+
# Fetch both system categories (user_id=None) and user-specific categories
|
| 17 |
+
stmt = (
|
| 18 |
+
select(Category)
|
| 19 |
+
.where((Category.user_id == None) | (Category.user_id == user_id))
|
| 20 |
+
.options(
|
| 21 |
+
with_loader_criteria(
|
| 22 |
+
SubCategory,
|
| 23 |
+
(SubCategory.user_id == None) | (SubCategory.user_id == user_id)
|
| 24 |
+
)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
result = await self.db.execute(stmt)
|
| 28 |
+
return result.scalars().all()
|
| 29 |
+
|
| 30 |
+
async def create_category(self, user_id: UUID, data: schemas.CategoryCreate) -> Category:
|
| 31 |
+
category = Category(
|
| 32 |
+
name=data.name,
|
| 33 |
+
icon=data.icon,
|
| 34 |
+
color=data.color,
|
| 35 |
+
type=data.type,
|
| 36 |
+
user_id=user_id
|
| 37 |
+
)
|
| 38 |
+
self.db.add(category)
|
| 39 |
+
await self.db.commit()
|
| 40 |
+
await self.db.refresh(category)
|
| 41 |
+
return category
|
| 42 |
+
|
| 43 |
+
async def create_sub_category(self, user_id: UUID, data: schemas.SubCategoryCreate) -> SubCategory:
|
| 44 |
+
sub_category = SubCategory(
|
| 45 |
+
name=data.name,
|
| 46 |
+
icon=data.icon,
|
| 47 |
+
color=data.color,
|
| 48 |
+
type=data.type,
|
| 49 |
+
category_id=data.category_id,
|
| 50 |
+
user_id=user_id,
|
| 51 |
+
is_surety=data.is_surety
|
| 52 |
+
)
|
| 53 |
+
self.db.add(sub_category)
|
| 54 |
+
await self.db.commit()
|
| 55 |
+
await self.db.refresh(sub_category)
|
| 56 |
+
return sub_category
|
| 57 |
+
|
| 58 |
+
async def delete_category(self, user_id: UUID, category_id: UUID):
|
| 59 |
+
stmt = select(Category).where(Category.id == category_id, Category.user_id == user_id)
|
| 60 |
+
result = await self.db.execute(stmt)
|
| 61 |
+
category = result.scalar_one_or_none()
|
| 62 |
+
if not category:
|
| 63 |
+
raise HTTPException(status_code=404, detail="Category not found or you don't have permission")
|
| 64 |
+
await self.db.delete(category)
|
| 65 |
+
await self.db.commit()
|
| 66 |
+
|
| 67 |
+
async def delete_sub_category(self, user_id: UUID, sub_category_id: UUID):
|
| 68 |
+
stmt = select(SubCategory).where(SubCategory.id == sub_category_id, SubCategory.user_id == user_id)
|
| 69 |
+
result = await self.db.execute(stmt)
|
| 70 |
+
sub_category = result.scalar_one_or_none()
|
| 71 |
+
if not sub_category:
|
| 72 |
+
raise HTTPException(status_code=404, detail="SubCategory not found or you don't have permission")
|
| 73 |
+
await self.db.delete(sub_category)
|
| 74 |
+
await self.db.commit()
|
| 75 |
+
|
| 76 |
+
async def update_category(self, user_id: UUID, category_id: UUID, data: schemas.CategoryUpdate) -> Category:
|
| 77 |
+
stmt = select(Category).where(Category.id == category_id, Category.user_id == user_id)
|
| 78 |
+
result = await self.db.execute(stmt)
|
| 79 |
+
category = result.scalar_one_or_none()
|
| 80 |
+
if not category:
|
| 81 |
+
raise HTTPException(status_code=404, detail="Category not found or you don't have permission")
|
| 82 |
+
|
| 83 |
+
update_data = data.model_dump(exclude_unset=True)
|
| 84 |
+
for key, value in update_data.items():
|
| 85 |
+
setattr(category, key, value)
|
| 86 |
+
|
| 87 |
+
await self.db.commit()
|
| 88 |
+
await self.db.refresh(category)
|
| 89 |
+
return category
|
| 90 |
+
|
| 91 |
+
async def update_sub_category(self, user_id: UUID, sub_category_id: UUID, data: schemas.SubCategoryUpdate) -> SubCategory:
|
| 92 |
+
stmt = select(SubCategory).where(SubCategory.id == sub_category_id, SubCategory.user_id == user_id)
|
| 93 |
+
result = await self.db.execute(stmt)
|
| 94 |
+
sub_category = result.scalar_one_or_none()
|
| 95 |
+
if not sub_category:
|
| 96 |
+
raise HTTPException(status_code=404, detail="SubCategory not found or you don't have permission")
|
| 97 |
+
|
| 98 |
+
update_data = data.model_dump(exclude_unset=True)
|
| 99 |
+
for key, value in update_data.items():
|
| 100 |
+
setattr(sub_category, key, value)
|
| 101 |
+
|
| 102 |
+
await self.db.commit()
|
| 103 |
+
await self.db.refresh(sub_category)
|
| 104 |
+
return sub_category
|
| 105 |
+
|
app/features/credit_cards/__init__.py
ADDED
|
File without changes
|
app/features/credit_cards/models.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from decimal import Decimal
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from sqlalchemy import String, ForeignKey, Numeric, Boolean, Integer, DateTime
|
| 5 |
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
| 6 |
+
from sqlalchemy.sql import func
|
| 7 |
+
from app.core.database import Base
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class CreditCard(Base):
|
| 11 |
+
__tablename__ = "credit_cards"
|
| 12 |
+
|
| 13 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 14 |
+
user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"))
|
| 15 |
+
card_name: Mapped[str] = mapped_column(String, nullable=False)
|
| 16 |
+
last_four_digits: Mapped[Optional[str]] = mapped_column(String(4), nullable=True)
|
| 17 |
+
statement_date: Mapped[int] = mapped_column(Integer, nullable=False)
|
| 18 |
+
payment_due_date: Mapped[int] = mapped_column(Integer, nullable=False)
|
| 19 |
+
credit_limit: Mapped[Optional[Decimal]] = mapped_column(Numeric(10, 2), nullable=True)
|
| 20 |
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
| 21 |
+
created_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), server_default=func.now())
|
| 22 |
+
|
| 23 |
+
user: Mapped["User"] = relationship("User", back_populates="credit_cards")
|
| 24 |
+
transactions: Mapped[list["Transaction"]] = relationship("Transaction", back_populates="credit_card")
|
app/features/credit_cards/router.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, List
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 4 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 5 |
+
from app.core.database import get_db
|
| 6 |
+
from app.features.auth.deps import get_current_user
|
| 7 |
+
from app.features.auth.models import User
|
| 8 |
+
from app.features.credit_cards.schemas import (
|
| 9 |
+
CreditCardCreate,
|
| 10 |
+
CreditCardUpdate,
|
| 11 |
+
CreditCardResponse,
|
| 12 |
+
CreditCardCycleInfo
|
| 13 |
+
)
|
| 14 |
+
from app.features.credit_cards.service import CreditCardService
|
| 15 |
+
|
| 16 |
+
router = APIRouter()
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@router.post("", response_model=CreditCardResponse, status_code=status.HTTP_201_CREATED)
|
| 20 |
+
async def create_credit_card(
|
| 21 |
+
card_data: CreditCardCreate,
|
| 22 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 23 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 24 |
+
service: Annotated[CreditCardService, Depends()]
|
| 25 |
+
):
|
| 26 |
+
"""Create a new credit card with billing cycle information."""
|
| 27 |
+
card = await service.create_card(db, current_user.id, card_data)
|
| 28 |
+
return card
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@router.get("", response_model=List[CreditCardResponse])
|
| 32 |
+
async def list_credit_cards(
|
| 33 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 34 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 35 |
+
service: Annotated[CreditCardService, Depends()],
|
| 36 |
+
active_only: bool = True
|
| 37 |
+
):
|
| 38 |
+
"""List all credit cards for the current user."""
|
| 39 |
+
cards = await service.get_user_cards(db, current_user.id, active_only)
|
| 40 |
+
return cards
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@router.get("/{card_id}", response_model=CreditCardResponse)
|
| 44 |
+
async def get_credit_card(
|
| 45 |
+
card_id: UUID,
|
| 46 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 47 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 48 |
+
service: Annotated[CreditCardService, Depends()]
|
| 49 |
+
):
|
| 50 |
+
"""Get details of a specific credit card."""
|
| 51 |
+
card = await service.get_card_by_id(db, card_id, current_user.id)
|
| 52 |
+
|
| 53 |
+
if not card:
|
| 54 |
+
raise HTTPException(
|
| 55 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 56 |
+
detail="Credit card not found"
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
return card
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@router.put("/{card_id}", response_model=CreditCardResponse)
|
| 63 |
+
async def update_credit_card(
|
| 64 |
+
card_id: UUID,
|
| 65 |
+
card_data: CreditCardUpdate,
|
| 66 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 67 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 68 |
+
service: Annotated[CreditCardService, Depends()]
|
| 69 |
+
):
|
| 70 |
+
"""Update a credit card."""
|
| 71 |
+
card = await service.update_card(db, card_id, current_user.id, card_data)
|
| 72 |
+
|
| 73 |
+
if not card:
|
| 74 |
+
raise HTTPException(
|
| 75 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 76 |
+
detail="Credit card not found"
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
return card
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
@router.delete("/{card_id}", status_code=status.HTTP_204_NO_CONTENT)
|
| 83 |
+
async def deactivate_credit_card(
|
| 84 |
+
card_id: UUID,
|
| 85 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 86 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 87 |
+
service: Annotated[CreditCardService, Depends()]
|
| 88 |
+
):
|
| 89 |
+
"""Deactivate a credit card (soft delete)."""
|
| 90 |
+
success = await service.deactivate_card(db, card_id, current_user.id)
|
| 91 |
+
|
| 92 |
+
if not success:
|
| 93 |
+
raise HTTPException(
|
| 94 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 95 |
+
detail="Credit card not found"
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@router.get("/{card_id}/cycle-info", response_model=CreditCardCycleInfo)
|
| 100 |
+
async def get_billing_cycle_info(
|
| 101 |
+
card_id: UUID,
|
| 102 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 103 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 104 |
+
service: Annotated[CreditCardService, Depends()]
|
| 105 |
+
):
|
| 106 |
+
"""Get current billing cycle information for a credit card."""
|
| 107 |
+
cycle_info = await service.get_cycle_info(db, card_id, current_user.id)
|
| 108 |
+
|
| 109 |
+
if not cycle_info:
|
| 110 |
+
raise HTTPException(
|
| 111 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 112 |
+
detail="Credit card not found"
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
return cycle_info
|
app/features/credit_cards/schemas.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from datetime import datetime, date
|
| 4 |
+
from decimal import Decimal
|
| 5 |
+
from pydantic import BaseModel, Field
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class CreditCardBase(BaseModel):
|
| 9 |
+
card_name: str = Field(..., description="User-defined name for the card (e.g., 'HDFC Regalia')")
|
| 10 |
+
last_four_digits: Optional[str] = Field(None, max_length=4, description="Last 4 digits of card number")
|
| 11 |
+
statement_date: int = Field(..., ge=1, le=31, description="Day of month when statement is generated")
|
| 12 |
+
payment_due_date: int = Field(..., ge=1, le=31, description="Day of month when payment is due")
|
| 13 |
+
credit_limit: Optional[Decimal] = Field(None, description="Credit limit of the card")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class CreditCardCreate(CreditCardBase):
|
| 17 |
+
pass
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class CreditCardUpdate(BaseModel):
|
| 21 |
+
card_name: Optional[str] = None
|
| 22 |
+
last_four_digits: Optional[str] = Field(None, max_length=4)
|
| 23 |
+
statement_date: Optional[int] = Field(None, ge=1, le=31)
|
| 24 |
+
payment_due_date: Optional[int] = Field(None, ge=1, le=31)
|
| 25 |
+
credit_limit: Optional[Decimal] = None
|
| 26 |
+
is_active: Optional[bool] = None
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class CreditCardResponse(CreditCardBase):
|
| 30 |
+
id: UUID
|
| 31 |
+
user_id: UUID
|
| 32 |
+
is_active: bool
|
| 33 |
+
created_at: datetime
|
| 34 |
+
|
| 35 |
+
class Config:
|
| 36 |
+
from_attributes = True
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class CreditCardCycleInfo(BaseModel):
|
| 40 |
+
card_id: UUID
|
| 41 |
+
card_name: str
|
| 42 |
+
cycle_start: date
|
| 43 |
+
cycle_end: date
|
| 44 |
+
next_statement_date: date
|
| 45 |
+
days_until_statement: int
|
| 46 |
+
unbilled_amount: Decimal
|
| 47 |
+
credit_limit: Optional[Decimal]
|
| 48 |
+
utilization_percentage: Optional[float]
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class CreditCardWithCycleInfo(CreditCardResponse):
|
| 52 |
+
cycle_info: CreditCardCycleInfo
|
app/features/credit_cards/service.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from datetime import date
|
| 4 |
+
from decimal import Decimal
|
| 5 |
+
from typing import List, Optional
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
from sqlalchemy import select, func
|
| 8 |
+
from app.features.credit_cards.models import CreditCard
|
| 9 |
+
from app.features.credit_cards.schemas import CreditCardCreate, CreditCardUpdate, CreditCardCycleInfo
|
| 10 |
+
from app.features.transactions.models import Transaction
|
| 11 |
+
from app.utils.finance_utils import get_billing_cycle_dates
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class CreditCardService:
|
| 17 |
+
|
| 18 |
+
async def create_card(
|
| 19 |
+
self,
|
| 20 |
+
db: AsyncSession,
|
| 21 |
+
user_id: UUID,
|
| 22 |
+
card_data: CreditCardCreate
|
| 23 |
+
) -> CreditCard:
|
| 24 |
+
"""Create a new credit card."""
|
| 25 |
+
card = CreditCard(
|
| 26 |
+
user_id=user_id,
|
| 27 |
+
**card_data.model_dump()
|
| 28 |
+
)
|
| 29 |
+
db.add(card)
|
| 30 |
+
await db.commit()
|
| 31 |
+
await db.refresh(card)
|
| 32 |
+
logger.info(f"Created credit card {card.card_name} for user {user_id}")
|
| 33 |
+
return card
|
| 34 |
+
|
| 35 |
+
async def get_user_cards(
|
| 36 |
+
self,
|
| 37 |
+
db: AsyncSession,
|
| 38 |
+
user_id: UUID,
|
| 39 |
+
active_only: bool = True
|
| 40 |
+
) -> List[CreditCard]:
|
| 41 |
+
"""Get all credit cards for a user."""
|
| 42 |
+
stmt = select(CreditCard).where(CreditCard.user_id == user_id)
|
| 43 |
+
|
| 44 |
+
if active_only:
|
| 45 |
+
stmt = stmt.where(CreditCard.is_active == True)
|
| 46 |
+
|
| 47 |
+
result = await db.execute(stmt)
|
| 48 |
+
return list(result.scalars().all())
|
| 49 |
+
|
| 50 |
+
async def get_card_by_id(
|
| 51 |
+
self,
|
| 52 |
+
db: AsyncSession,
|
| 53 |
+
card_id: UUID,
|
| 54 |
+
user_id: UUID
|
| 55 |
+
) -> Optional[CreditCard]:
|
| 56 |
+
"""Get a specific credit card by ID."""
|
| 57 |
+
stmt = select(CreditCard).where(
|
| 58 |
+
CreditCard.id == card_id,
|
| 59 |
+
CreditCard.user_id == user_id
|
| 60 |
+
)
|
| 61 |
+
result = await db.execute(stmt)
|
| 62 |
+
return result.scalar_one_or_none()
|
| 63 |
+
|
| 64 |
+
async def update_card(
|
| 65 |
+
self,
|
| 66 |
+
db: AsyncSession,
|
| 67 |
+
card_id: UUID,
|
| 68 |
+
user_id: UUID,
|
| 69 |
+
card_data: CreditCardUpdate
|
| 70 |
+
) -> Optional[CreditCard]:
|
| 71 |
+
"""Update a credit card."""
|
| 72 |
+
card = await self.get_card_by_id(db, card_id, user_id)
|
| 73 |
+
|
| 74 |
+
if not card:
|
| 75 |
+
return None
|
| 76 |
+
|
| 77 |
+
update_data = card_data.model_dump(exclude_unset=True)
|
| 78 |
+
for field, value in update_data.items():
|
| 79 |
+
setattr(card, field, value)
|
| 80 |
+
|
| 81 |
+
await db.commit()
|
| 82 |
+
await db.refresh(card)
|
| 83 |
+
logger.info(f"Updated credit card {card_id}")
|
| 84 |
+
return card
|
| 85 |
+
|
| 86 |
+
async def deactivate_card(
|
| 87 |
+
self,
|
| 88 |
+
db: AsyncSession,
|
| 89 |
+
card_id: UUID,
|
| 90 |
+
user_id: UUID
|
| 91 |
+
) -> bool:
|
| 92 |
+
"""Deactivate a credit card (soft delete)."""
|
| 93 |
+
card = await self.get_card_by_id(db, card_id, user_id)
|
| 94 |
+
|
| 95 |
+
if not card:
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
card.is_active = False
|
| 99 |
+
await db.commit()
|
| 100 |
+
logger.info(f"Deactivated credit card {card_id}")
|
| 101 |
+
return True
|
| 102 |
+
|
| 103 |
+
async def get_unbilled_amount(
|
| 104 |
+
self,
|
| 105 |
+
db: AsyncSession,
|
| 106 |
+
card_id: UUID,
|
| 107 |
+
cycle_start: date,
|
| 108 |
+
cycle_end: date
|
| 109 |
+
) -> Decimal:
|
| 110 |
+
"""
|
| 111 |
+
Calculate total unsettled debt for the card.
|
| 112 |
+
Ignores cycle dates in favor of explicit 'is_settled' status.
|
| 113 |
+
"""
|
| 114 |
+
stmt = (
|
| 115 |
+
select(func.sum(Transaction.amount))
|
| 116 |
+
.where(Transaction.credit_card_id == card_id)
|
| 117 |
+
.where(Transaction.is_settled == False) # Only include unpaid transactions
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
result = await db.execute(stmt)
|
| 121 |
+
amount = result.scalar() or Decimal("0.00")
|
| 122 |
+
# Transactions are stored as: Expense negative (-), Income positive (+)
|
| 123 |
+
# Unbilled amount (Debt) should be positive for expenses.
|
| 124 |
+
# So we negate the sum. (-(-1000) = 1000 debt).
|
| 125 |
+
return -amount
|
| 126 |
+
|
| 127 |
+
async def get_cycle_info(
|
| 128 |
+
self,
|
| 129 |
+
db: AsyncSession,
|
| 130 |
+
card_id: UUID,
|
| 131 |
+
user_id: UUID
|
| 132 |
+
) -> Optional[CreditCardCycleInfo]:
|
| 133 |
+
"""Get current billing cycle information for a card."""
|
| 134 |
+
card = await self.get_card_by_id(db, card_id, user_id)
|
| 135 |
+
|
| 136 |
+
if not card:
|
| 137 |
+
return None
|
| 138 |
+
|
| 139 |
+
cycle_dates = get_billing_cycle_dates(card.statement_date)
|
| 140 |
+
unbilled = await self.get_unbilled_amount(
|
| 141 |
+
db,
|
| 142 |
+
card_id,
|
| 143 |
+
cycle_dates["cycle_start"],
|
| 144 |
+
cycle_dates["cycle_end"]
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
utilization = None
|
| 148 |
+
if card.credit_limit and card.credit_limit > 0:
|
| 149 |
+
utilization = float((unbilled / card.credit_limit) * 100)
|
| 150 |
+
|
| 151 |
+
return CreditCardCycleInfo(
|
| 152 |
+
card_id=card.id,
|
| 153 |
+
card_name=card.card_name,
|
| 154 |
+
cycle_start=cycle_dates["cycle_start"],
|
| 155 |
+
cycle_end=cycle_dates["cycle_end"],
|
| 156 |
+
next_statement_date=cycle_dates["next_statement_date"],
|
| 157 |
+
days_until_statement=cycle_dates["days_until_statement"],
|
| 158 |
+
unbilled_amount=unbilled,
|
| 159 |
+
credit_limit=card.credit_limit,
|
| 160 |
+
utilization_percentage=utilization
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
async def get_all_unbilled_for_user(
|
| 164 |
+
self,
|
| 165 |
+
db: AsyncSession,
|
| 166 |
+
user_id: UUID
|
| 167 |
+
) -> Decimal:
|
| 168 |
+
"""Get total unbilled amount across all active credit cards for a user."""
|
| 169 |
+
# 1. Get all active card IDs for this user
|
| 170 |
+
card_stmt = select(CreditCard.id).where(CreditCard.user_id == user_id, CreditCard.is_active == True)
|
| 171 |
+
card_res = await db.execute(card_stmt)
|
| 172 |
+
card_ids = [row[0] for row in card_res.all()]
|
| 173 |
+
|
| 174 |
+
if not card_ids:
|
| 175 |
+
return Decimal("0.00")
|
| 176 |
+
|
| 177 |
+
# 2. Get sum of all unsettled transactions for these cards
|
| 178 |
+
# Note: We use is_settled=False as the source of truth for unbilled debt
|
| 179 |
+
stmt = (
|
| 180 |
+
select(func.sum(Transaction.amount))
|
| 181 |
+
.where(Transaction.credit_card_id.in_(card_ids))
|
| 182 |
+
.where(Transaction.is_settled == False)
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
result = await db.execute(stmt)
|
| 186 |
+
amount = result.scalar() or Decimal("0.00")
|
| 187 |
+
|
| 188 |
+
# Expenses are negative, so negate to get positive debt amount
|
| 189 |
+
return -amount
|
app/features/dashboard/router.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, Dict, Optional
|
| 2 |
+
import asyncio
|
| 3 |
+
from fastapi import APIRouter, Depends, Query
|
| 4 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 5 |
+
from sqlalchemy import select, func, case
|
| 6 |
+
from app.core.database import get_db
|
| 7 |
+
from app.features.auth.deps import get_current_user
|
| 8 |
+
from app.features.auth.models import User
|
| 9 |
+
from app.features.transactions.models import Transaction
|
| 10 |
+
|
| 11 |
+
from app.features.dashboard.service import get_daily_expenses, get_category_expenses_history, get_monthly_category_breakdown, get_category_daily_expenses
|
| 12 |
+
from app.features.forecasting.service import ForecastingService
|
| 13 |
+
|
| 14 |
+
router = APIRouter()
|
| 15 |
+
|
| 16 |
+
@router.get("/liquidity")
|
| 17 |
+
async def get_liquidity_dashboard(
|
| 18 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 19 |
+
db: Annotated[AsyncSession, Depends(get_db)]
|
| 20 |
+
):
|
| 21 |
+
balance_stmt = (
|
| 22 |
+
select(func.sum(Transaction.amount))
|
| 23 |
+
.where(Transaction.user_id == current_user.id)
|
| 24 |
+
.where(Transaction.account_type.in_(["CASH", "SAVINGS"]))
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
cc_stmt = (
|
| 28 |
+
select(func.sum(Transaction.amount))
|
| 29 |
+
.where(Transaction.user_id == current_user.id)
|
| 30 |
+
.where(Transaction.category != "Income")
|
| 31 |
+
.where(Transaction.account_type == "CREDIT_CARD")
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
bills_stmt = (
|
| 35 |
+
select(func.sum(Transaction.amount))
|
| 36 |
+
.where(Transaction.user_id == current_user.id)
|
| 37 |
+
.where(Transaction.sub_category.in_(["Rent", "Maintenance", "Credit Card Payment"]))
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
balance_res = await db.execute(balance_stmt)
|
| 41 |
+
cc_res = await db.execute(cc_stmt)
|
| 42 |
+
bills_res = await db.execute(bills_stmt)
|
| 43 |
+
|
| 44 |
+
balance = balance_res.scalar() or 0
|
| 45 |
+
unbilled_cc = cc_res.scalar() or 0
|
| 46 |
+
bills = bills_res.scalar() or 0
|
| 47 |
+
|
| 48 |
+
# Liquidity is the sum of liquid balance + CC debt (which is negative)
|
| 49 |
+
# If balance is 10,000 and CC debt is -2,000, liquidity is 8,000.
|
| 50 |
+
return {
|
| 51 |
+
"liquidity": balance + unbilled_cc,
|
| 52 |
+
"breakdown": {
|
| 53 |
+
"balance": balance,
|
| 54 |
+
"unbilled_cc": abs(unbilled_cc),
|
| 55 |
+
"bills": abs(bills)
|
| 56 |
+
}
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
@router.get("/investments")
|
| 60 |
+
async def get_investments_dashboard(
|
| 61 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 62 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 63 |
+
month: Optional[int] = Query(None, ge=1, le=12),
|
| 64 |
+
year: Optional[int] = Query(None, ge=2000, le=2100)
|
| 65 |
+
):
|
| 66 |
+
from datetime import date
|
| 67 |
+
from app.utils.finance_utils import get_month_date_range
|
| 68 |
+
|
| 69 |
+
# Aggregate by SubCategory for Investment Category
|
| 70 |
+
stmt = (
|
| 71 |
+
select(Transaction.sub_category, func.sum(Transaction.amount))
|
| 72 |
+
.where(Transaction.user_id == current_user.id)
|
| 73 |
+
.where(Transaction.category == "Investment")
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
if month and year:
|
| 77 |
+
target_date = date(year, month, 1)
|
| 78 |
+
rng = get_month_date_range(target_date)
|
| 79 |
+
stmt = stmt.where(Transaction.transaction_date >= rng["month_start"])
|
| 80 |
+
stmt = stmt.where(Transaction.transaction_date <= rng["month_end"])
|
| 81 |
+
|
| 82 |
+
stmt = stmt.group_by(Transaction.sub_category)
|
| 83 |
+
result = await db.execute(stmt)
|
| 84 |
+
breakdown = {str(row[0]): abs(row[1]) for row in result.all()}
|
| 85 |
+
|
| 86 |
+
total = abs(sum(breakdown.values()))
|
| 87 |
+
|
| 88 |
+
return {
|
| 89 |
+
"total_investments": total,
|
| 90 |
+
"breakdown": breakdown
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
@router.get("/forecast")
|
| 94 |
+
async def get_financial_forecast(
|
| 95 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 96 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 97 |
+
service: Annotated[ForecastingService, Depends()]
|
| 98 |
+
):
|
| 99 |
+
# Execute data gathering in parallel
|
| 100 |
+
category_daily_task = get_category_daily_expenses(db, current_user.id, days=120)
|
| 101 |
+
monthly_breakdown_task = get_monthly_category_breakdown(db, current_user.id, months=4)
|
| 102 |
+
|
| 103 |
+
category_daily, monthly_breakdown = await asyncio.gather(
|
| 104 |
+
category_daily_task,
|
| 105 |
+
monthly_breakdown_task
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
forecast = await service.calculate_safe_to_spend(category_daily, monthly_breakdown)
|
| 109 |
+
|
| 110 |
+
return {
|
| 111 |
+
"predicted_burden_30d": forecast.amount,
|
| 112 |
+
"confidence": forecast.confidence,
|
| 113 |
+
"description": forecast.reason,
|
| 114 |
+
"time_frame": forecast.time_frame,
|
| 115 |
+
"breakdown": forecast.breakdown
|
| 116 |
+
}
|
app/features/dashboard/service.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime, timedelta
|
| 2 |
+
from sqlalchemy import select, func, cast, Date
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from app.features.transactions.models import Transaction
|
| 5 |
+
|
| 6 |
+
async def get_daily_expenses(db: AsyncSession, user_id: str, days: int = 90):
|
| 7 |
+
"""Return daily aggregated expenses for forecasting."""
|
| 8 |
+
# Ensure start_date is a date object for comparison with transaction_date
|
| 9 |
+
start_date = (datetime.now() - timedelta(days=days)).date()
|
| 10 |
+
|
| 11 |
+
stmt = (
|
| 12 |
+
select(
|
| 13 |
+
Transaction.transaction_date.label("day"),
|
| 14 |
+
func.sum(Transaction.amount).label("total")
|
| 15 |
+
)
|
| 16 |
+
.where(Transaction.user_id == user_id)
|
| 17 |
+
.where(Transaction.category != "Income")
|
| 18 |
+
.where(Transaction.transaction_date >= start_date)
|
| 19 |
+
.group_by("day")
|
| 20 |
+
.order_by("day")
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
result = await db.execute(stmt)
|
| 25 |
+
rows = result.all()
|
| 26 |
+
|
| 27 |
+
# Return absolute values because expenses are stored as negative,
|
| 28 |
+
# but forecasting expects positive magnitude of spend.
|
| 29 |
+
return [
|
| 30 |
+
{"ds": row.day.isoformat(), "y": abs(float(row.total or 0))}
|
| 31 |
+
for row in rows
|
| 32 |
+
if row.day is not None # Filter out any missing dates if they exist
|
| 33 |
+
]
|
| 34 |
+
|
| 35 |
+
async def get_category_expenses_history(db: AsyncSession, user_id: str, days: int = 90):
|
| 36 |
+
"""Return aggregated expenses by category for the last N days."""
|
| 37 |
+
start_date = (datetime.now() - timedelta(days=days)).date()
|
| 38 |
+
|
| 39 |
+
stmt = (
|
| 40 |
+
select(
|
| 41 |
+
Transaction.category,
|
| 42 |
+
func.sum(Transaction.amount).label("total")
|
| 43 |
+
)
|
| 44 |
+
.where(Transaction.user_id == user_id)
|
| 45 |
+
.where(Transaction.category != "Income")
|
| 46 |
+
.where(Transaction.transaction_date >= start_date)
|
| 47 |
+
.group_by(Transaction.category)
|
| 48 |
+
.order_by(func.sum(Transaction.amount).asc()) # Expenses are negative, so ASC puts biggest spenders first
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
result = await db.execute(stmt)
|
| 52 |
+
rows = result.all()
|
| 53 |
+
|
| 54 |
+
return [
|
| 55 |
+
{"category": row.category, "total": abs(float(row.total or 0))}
|
| 56 |
+
for row in rows
|
| 57 |
+
]
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
async def get_discretionary_daily_expenses(db: AsyncSession, user_id: str, days: int = 30):
|
| 61 |
+
"""Return daily discretionary expenses (excluding Investment, Housing, Bills, Transfers, Surety)."""
|
| 62 |
+
start_date = (datetime.now() - timedelta(days=days)).date()
|
| 63 |
+
|
| 64 |
+
stmt = (
|
| 65 |
+
select(
|
| 66 |
+
Transaction.transaction_date.label("day"),
|
| 67 |
+
func.sum(Transaction.amount).label("total")
|
| 68 |
+
)
|
| 69 |
+
.where(Transaction.user_id == user_id)
|
| 70 |
+
.where(Transaction.category.notin_(["Income", "Investment", "Housing", "Bill Payment", "Transfer"]))
|
| 71 |
+
.where(Transaction.is_surety == False)
|
| 72 |
+
.where(Transaction.transaction_date >= start_date)
|
| 73 |
+
.group_by("day")
|
| 74 |
+
.order_by("day")
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
result = await db.execute(stmt)
|
| 78 |
+
rows = result.all()
|
| 79 |
+
|
| 80 |
+
return [
|
| 81 |
+
{"ds": row.day.isoformat(), "y": abs(float(row.total or 0))}
|
| 82 |
+
for row in rows
|
| 83 |
+
if row.day is not None
|
| 84 |
+
]
|
| 85 |
+
|
| 86 |
+
async def get_monthly_category_breakdown(db: AsyncSession, user_id: str, months: int = 4):
|
| 87 |
+
"""Return aggregated expenses by month and category for the last N months."""
|
| 88 |
+
# Approximate days
|
| 89 |
+
days = months * 30
|
| 90 |
+
start_date = (datetime.now() - timedelta(days=days)).replace(day=1).date()
|
| 91 |
+
|
| 92 |
+
# Extract year and month. For SQLite/Postgres compatibility, we might just fetch date
|
| 93 |
+
# and group in python, or use a universally supported truncate/extract.
|
| 94 |
+
# But for now, let's fetch all transactions (filtered) and group in Python to be safe
|
| 95 |
+
# and allow for complex "Sub Category" logic if needed later.
|
| 96 |
+
|
| 97 |
+
stmt = (
|
| 98 |
+
select(
|
| 99 |
+
Transaction.transaction_date,
|
| 100 |
+
Transaction.category,
|
| 101 |
+
Transaction.sub_category,
|
| 102 |
+
Transaction.amount
|
| 103 |
+
)
|
| 104 |
+
.where(Transaction.user_id == user_id)
|
| 105 |
+
.where(Transaction.category != "Income")
|
| 106 |
+
.where(Transaction.transaction_date >= start_date)
|
| 107 |
+
.order_by(Transaction.transaction_date)
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
result = await db.execute(stmt)
|
| 111 |
+
rows = result.all()
|
| 112 |
+
|
| 113 |
+
# Process in Python
|
| 114 |
+
# Structure: { "2023-10": { "Housing": 2000, "Food": 500, "Rent": 2000 } }
|
| 115 |
+
# Note: collecting SubCategories for "Rent" visibility
|
| 116 |
+
|
| 117 |
+
breakdown = {}
|
| 118 |
+
|
| 119 |
+
for row in rows:
|
| 120 |
+
if not row.transaction_date:
|
| 121 |
+
continue
|
| 122 |
+
|
| 123 |
+
month_key = row.transaction_date.strftime("%Y-%m") # YYYY-MM
|
| 124 |
+
amount = abs(float(row.amount or 0))
|
| 125 |
+
cat = row.category
|
| 126 |
+
sub = row.sub_category
|
| 127 |
+
|
| 128 |
+
if month_key not in breakdown:
|
| 129 |
+
breakdown[month_key] = {}
|
| 130 |
+
|
| 131 |
+
# Aggregate by Category
|
| 132 |
+
breakdown[month_key][cat] = breakdown[month_key].get(cat, 0) + amount
|
| 133 |
+
|
| 134 |
+
# ALSO explicitly capture likely Fixed Expenses as pseudo-categories for the LLM
|
| 135 |
+
# This helps 'sanitized' data requirement by not sending every subcategory,
|
| 136 |
+
# but prominently featuring "Rent", "EMI", "Insurance"
|
| 137 |
+
if sub and sub in ["Rent", "Maintenance", "EMI", "Insurance", "Education"]:
|
| 138 |
+
breakdown[month_key][f"_{sub}"] = breakdown[month_key].get(f"_{sub}", 0) + amount
|
| 139 |
+
|
| 140 |
+
# Convert to list for easier JSON serialization
|
| 141 |
+
# [ { "month": "2023-10", "breakdown": {...} }, ... ]
|
| 142 |
+
formatted = []
|
| 143 |
+
for m in sorted(breakdown.keys()):
|
| 144 |
+
formatted.append({
|
| 145 |
+
"month": m,
|
| 146 |
+
"categories": breakdown[m]
|
| 147 |
+
})
|
| 148 |
+
|
| 149 |
+
return formatted
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
async def get_category_daily_expenses(db: AsyncSession, user_id: str, days: int = 120):
|
| 153 |
+
"""Return daily aggregated expenses by category for forecasting."""
|
| 154 |
+
start_date = (datetime.now() - timedelta(days=days)).date()
|
| 155 |
+
|
| 156 |
+
stmt = (
|
| 157 |
+
select(
|
| 158 |
+
Transaction.category,
|
| 159 |
+
Transaction.transaction_date.label("day"),
|
| 160 |
+
func.sum(Transaction.amount).label("total")
|
| 161 |
+
)
|
| 162 |
+
.where(Transaction.user_id == user_id)
|
| 163 |
+
.where(Transaction.category != "Income")
|
| 164 |
+
.where(Transaction.transaction_date >= start_date)
|
| 165 |
+
.group_by(Transaction.category, "day")
|
| 166 |
+
.order_by(Transaction.category, "day")
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
result = await db.execute(stmt)
|
| 170 |
+
rows = result.all()
|
| 171 |
+
|
| 172 |
+
return [
|
| 173 |
+
{"category": row.category, "ds": row.day.isoformat(), "y": abs(float(row.total or 0))}
|
| 174 |
+
for row in rows
|
| 175 |
+
if row.day is not None
|
| 176 |
+
]
|
app/features/export/router.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, Depends
|
| 2 |
+
from fastapi.responses import Response
|
| 3 |
+
from typing import Annotated
|
| 4 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 5 |
+
from app.core.database import get_db
|
| 6 |
+
from app.features.auth.deps import get_current_user
|
| 7 |
+
from app.features.auth.models import User
|
| 8 |
+
from app.features.export.service import generate_csv_export
|
| 9 |
+
|
| 10 |
+
router = APIRouter()
|
| 11 |
+
|
| 12 |
+
@router.get("/csv")
|
| 13 |
+
async def export_transactions_csv(
|
| 14 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 15 |
+
db: Annotated[AsyncSession, Depends(get_db)]
|
| 16 |
+
):
|
| 17 |
+
"""
|
| 18 |
+
Export all transactions as a CSV file.
|
| 19 |
+
"""
|
| 20 |
+
csv_content = await generate_csv_export(db, current_user.id)
|
| 21 |
+
|
| 22 |
+
return Response(
|
| 23 |
+
content=csv_content,
|
| 24 |
+
media_type="text/csv",
|
| 25 |
+
headers={
|
| 26 |
+
"Content-Disposition": f"attachment; filename=grip_transactions_backup.csv"
|
| 27 |
+
}
|
| 28 |
+
)
|
app/features/export/service.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import csv
|
| 2 |
+
import io
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from sqlalchemy import select
|
| 5 |
+
from app.features.transactions.models import Transaction
|
| 6 |
+
|
| 7 |
+
async def generate_csv_export(db: AsyncSession, user_id: str) -> str:
|
| 8 |
+
"""
|
| 9 |
+
Generates a CSV string containing all transaction data for the user.
|
| 10 |
+
"""
|
| 11 |
+
stmt = select(Transaction).where(Transaction.user_id == user_id).order_by(Transaction.transaction_date.desc())
|
| 12 |
+
result = await db.execute(stmt)
|
| 13 |
+
transactions = result.scalars().all()
|
| 14 |
+
|
| 15 |
+
output = io.StringIO()
|
| 16 |
+
writer = csv.writer(output)
|
| 17 |
+
|
| 18 |
+
# Header
|
| 19 |
+
writer.writerow([
|
| 20 |
+
"Date", "Amount", "Currency", "Merchant", "Category",
|
| 21 |
+
"Sub Category", "Account Type", "Status", "Is Manual", "Remarks", "Tags"
|
| 22 |
+
])
|
| 23 |
+
|
| 24 |
+
for t in transactions:
|
| 25 |
+
writer.writerow([
|
| 26 |
+
t.transaction_date.isoformat() if t.transaction_date else "",
|
| 27 |
+
t.amount,
|
| 28 |
+
t.currency,
|
| 29 |
+
t.merchant_name or "",
|
| 30 |
+
t.category,
|
| 31 |
+
t.sub_category,
|
| 32 |
+
t.account_type,
|
| 33 |
+
t.status,
|
| 34 |
+
"Yes" if t.is_manual else "No",
|
| 35 |
+
t.remarks or "",
|
| 36 |
+
",".join(t.tags) if t.tags else ""
|
| 37 |
+
])
|
| 38 |
+
|
| 39 |
+
return output.getvalue()
|
app/features/forecasting/schemas.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
from pydantic import BaseModel
|
| 3 |
+
from decimal import Decimal
|
| 4 |
+
|
| 5 |
+
class CategoryForecast(BaseModel):
|
| 6 |
+
category: str
|
| 7 |
+
predicted_amount: Decimal
|
| 8 |
+
reason: str
|
| 9 |
+
|
| 10 |
+
class ForecastResponse(BaseModel):
|
| 11 |
+
amount: Decimal
|
| 12 |
+
reason: str
|
| 13 |
+
time_frame: str
|
| 14 |
+
confidence: str = "high"
|
| 15 |
+
breakdown: List[CategoryForecast] = []
|
app/features/forecasting/service.py
ADDED
|
@@ -0,0 +1,394 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import json
|
| 3 |
+
from decimal import Decimal
|
| 4 |
+
from typing import List, Optional
|
| 5 |
+
from fastapi import Depends
|
| 6 |
+
|
| 7 |
+
from app.core.config import get_settings
|
| 8 |
+
|
| 9 |
+
# Lazy import Prophet to avoid crashes if not installed/compiled
|
| 10 |
+
# Prophet is used only if manually installed (not available on Vercel)
|
| 11 |
+
try:
|
| 12 |
+
from prophet import Prophet
|
| 13 |
+
import pandas as pd
|
| 14 |
+
PROPHET_AVAILABLE = True
|
| 15 |
+
except ImportError:
|
| 16 |
+
PROPHET_AVAILABLE = False
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
settings = get_settings()
|
| 20 |
+
|
| 21 |
+
from datetime import date, timedelta
|
| 22 |
+
from app.features.forecasting.schemas import ForecastResponse, CategoryForecast
|
| 23 |
+
from app.core.llm import get_llm_service, LLMService
|
| 24 |
+
import calendar
|
| 25 |
+
|
| 26 |
+
class ForecastingService:
|
| 27 |
+
def __init__(self, llm: LLMService = Depends(get_llm_service)):
|
| 28 |
+
# Handle manual instantiation for LLM
|
| 29 |
+
from app.core.llm import LLMService as ActualLLMService
|
| 30 |
+
if isinstance(llm, ActualLLMService):
|
| 31 |
+
self.llm = llm
|
| 32 |
+
else:
|
| 33 |
+
from app.core.llm import get_llm_service
|
| 34 |
+
self.llm = get_llm_service()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
async def calculate_safe_to_spend(self, category_daily_history: List[dict], monthly_breakdown: List[dict] = []) -> ForecastResponse:
|
| 38 |
+
"""Forecast upcoming expenses for the next full month."""
|
| 39 |
+
today = date.today()
|
| 40 |
+
# Get first day of NEXT month
|
| 41 |
+
if today.month == 12:
|
| 42 |
+
next_month_start = date(today.year + 1, 1, 1)
|
| 43 |
+
else:
|
| 44 |
+
next_month_start = date(today.year, today.month + 1, 1)
|
| 45 |
+
|
| 46 |
+
# Get last day of NEXT month
|
| 47 |
+
_, last_day = calendar.monthrange(next_month_start.year, next_month_start.month)
|
| 48 |
+
next_month_end = date(next_month_start.year, next_month_start.month, last_day)
|
| 49 |
+
|
| 50 |
+
days_in_next_month = (next_month_end - next_month_start).days + 1
|
| 51 |
+
time_frame_str = f"Next Month ({next_month_start.strftime('%B %Y')})"
|
| 52 |
+
|
| 53 |
+
# LOGIC:
|
| 54 |
+
# User requested category-wise Prophet forecast.
|
| 55 |
+
|
| 56 |
+
use_prophet = settings.USE_AI_FORECASTING and PROPHET_AVAILABLE
|
| 57 |
+
|
| 58 |
+
if use_prophet:
|
| 59 |
+
return await self._calculate_prophet_categorywise(category_daily_history, monthly_breakdown, next_month_start, next_month_end, time_frame_str)
|
| 60 |
+
|
| 61 |
+
# Fallback to LLM for basic total if Prophet is missing (though user wants Prophet)
|
| 62 |
+
return await self._calculate_llm(category_daily_history, monthly_breakdown, time_frame_str, days_in_next_month)
|
| 63 |
+
|
| 64 |
+
async def _calculate_prophet_categorywise(self, category_daily_history: List[dict], monthly_breakdown: List[dict], start_date: date, end_date: date, time_frame: str) -> ForecastResponse:
|
| 65 |
+
"""Forecast for each category individually using Prophet or Fixed Expense logic."""
|
| 66 |
+
|
| 67 |
+
if not category_daily_history:
|
| 68 |
+
return ForecastResponse(amount=Decimal("0.00"), reason="No historical data found.", time_frame=time_frame, confidence="low")
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
df_all = pd.DataFrame(category_daily_history)
|
| 72 |
+
if df_all.empty:
|
| 73 |
+
return ForecastResponse(amount=Decimal("0.00"), reason="No historical data found.", time_frame=time_frame, confidence="low")
|
| 74 |
+
|
| 75 |
+
categories = df_all['category'].unique()
|
| 76 |
+
|
| 77 |
+
# Map monthly breakdown for easier recurring check
|
| 78 |
+
# Filter out pseudo-categories starting with '_' to avoid mis-detecting trends
|
| 79 |
+
cat_monthly_totals = {}
|
| 80 |
+
for month_data in monthly_breakdown:
|
| 81 |
+
for cat, amount in month_data.get("categories", {}).items():
|
| 82 |
+
if cat.startswith("_"): continue
|
| 83 |
+
if cat not in cat_monthly_totals:
|
| 84 |
+
cat_monthly_totals[cat] = []
|
| 85 |
+
cat_monthly_totals[cat].append(amount)
|
| 86 |
+
|
| 87 |
+
breakdown = []
|
| 88 |
+
total_amount = Decimal("0.00")
|
| 89 |
+
|
| 90 |
+
# Determine actual history days in the provided data
|
| 91 |
+
min_history_date = pd.to_datetime(df_all['ds']).min()
|
| 92 |
+
today = date.today()
|
| 93 |
+
total_history_days = (today - min_history_date.date()).days + 1
|
| 94 |
+
if total_history_days < 30: total_history_days = 120 # Fallback safety
|
| 95 |
+
|
| 96 |
+
# Determine days to predict (from max historical date until end of next month)
|
| 97 |
+
max_history_date = pd.to_datetime(df_all['ds']).max()
|
| 98 |
+
days_to_predict = (end_date - max_history_date.date()).days
|
| 99 |
+
|
| 100 |
+
if days_to_predict <= 0:
|
| 101 |
+
return ForecastResponse(amount=Decimal("0.00"), reason="Data already covers the forecast period.", time_frame=time_frame)
|
| 102 |
+
|
| 103 |
+
for cat in categories:
|
| 104 |
+
cat_df_raw = df_all[df_all['category'] == cat][['ds', 'y']].copy()
|
| 105 |
+
cat_df_raw['ds'] = pd.to_datetime(cat_df_raw['ds'])
|
| 106 |
+
|
| 107 |
+
# 1. FIXED/RECURRING DETECTION (Most accurate for Rent, SIPs, etc.)
|
| 108 |
+
monthly_values = cat_monthly_totals.get(cat, [])
|
| 109 |
+
num_months = len(monthly_values)
|
| 110 |
+
num_txns = len(cat_df_raw)
|
| 111 |
+
|
| 112 |
+
# If it appears monthly but rarely (1-2 txns per month), it's a fixed expense
|
| 113 |
+
is_recurring = num_months >= 1 and (num_txns / max(1, num_months)) <= 4
|
| 114 |
+
|
| 115 |
+
if is_recurring:
|
| 116 |
+
import statistics
|
| 117 |
+
# Use max or median for fixed costs
|
| 118 |
+
predicted_monthly = statistics.median(monthly_values) if monthly_values else 0
|
| 119 |
+
cat_total = Decimal(str(max(0, round(predicted_monthly, 2))))
|
| 120 |
+
reason = "Projected based on monthly recurring patterns (Rent/SIP/RD/Bills)."
|
| 121 |
+
|
| 122 |
+
# 2. PROPHET FOR FREQUENT DISCRETIONARY (Food, Shopping, etc.)
|
| 123 |
+
elif num_txns >= 15:
|
| 124 |
+
try:
|
| 125 |
+
# CRITICAL: Fill in missing days with 0 so Prophet doesn't think
|
| 126 |
+
# a sparse expense is a daily expense.
|
| 127 |
+
all_dates = pd.date_range(start=min_history_date, end=max_history_date, freq='D')
|
| 128 |
+
cat_df = cat_df_raw.set_index('ds').reindex(all_dates, fill_value=0).reset_index()
|
| 129 |
+
cat_df.columns = ['ds', 'y']
|
| 130 |
+
|
| 131 |
+
m = Prophet(
|
| 132 |
+
daily_seasonality=False,
|
| 133 |
+
weekly_seasonality=True,
|
| 134 |
+
yearly_seasonality=False,
|
| 135 |
+
changepoint_prior_scale=0.01 # Be conservative
|
| 136 |
+
)
|
| 137 |
+
m.fit(cat_df)
|
| 138 |
+
|
| 139 |
+
future = m.make_future_dataframe(periods=days_to_predict)
|
| 140 |
+
forecast = m.predict(future)
|
| 141 |
+
|
| 142 |
+
start_dt = pd.to_datetime(start_date)
|
| 143 |
+
end_dt = pd.to_datetime(end_date)
|
| 144 |
+
mask = (forecast['ds'] >= start_dt) & (forecast['ds'] <= end_dt)
|
| 145 |
+
predicted = forecast[mask]['yhat'].sum()
|
| 146 |
+
|
| 147 |
+
# Safety Cap: Forecast should not realistically exceed 2x the historical monthly average
|
| 148 |
+
hist_monthly_avg = (cat_df_raw['y'].sum() / total_history_days) * 30
|
| 149 |
+
predicted = min(predicted, hist_monthly_avg * 2)
|
| 150 |
+
|
| 151 |
+
cat_total = Decimal(str(max(0, round(predicted, 2))))
|
| 152 |
+
reason = f"Trend-based forecast using {num_txns} data points."
|
| 153 |
+
except Exception as e:
|
| 154 |
+
logger.error(f"Error forecasting category {cat}: {e}")
|
| 155 |
+
avg_daily = cat_df_raw['y'].sum() / total_history_days
|
| 156 |
+
cat_total = Decimal(str(max(0, round(avg_daily * 30, 2))))
|
| 157 |
+
reason = "Forecasting model error; used historical monthly average."
|
| 158 |
+
|
| 159 |
+
# 3. FALLBACK: SIMPLE MOVING AVERAGE
|
| 160 |
+
else:
|
| 161 |
+
avg_daily = cat_df_raw['y'].sum() / total_history_days
|
| 162 |
+
cat_total = Decimal(str(max(0, round(avg_daily * 30, 2))))
|
| 163 |
+
reason = "Forecasted using 4-month daily spend average."
|
| 164 |
+
|
| 165 |
+
if cat_total > 50: # Filter out noise
|
| 166 |
+
breakdown.append(CategoryForecast(
|
| 167 |
+
category=cat,
|
| 168 |
+
predicted_amount=cat_total,
|
| 169 |
+
reason=reason
|
| 170 |
+
))
|
| 171 |
+
total_amount += cat_total
|
| 172 |
+
|
| 173 |
+
breakdown.sort(key=lambda x: x.predicted_amount, reverse=True)
|
| 174 |
+
|
| 175 |
+
return ForecastResponse(
|
| 176 |
+
amount=total_amount,
|
| 177 |
+
reason=f"Multi-model forecast for {len(breakdown)} categories, optimized for monthly recurring expenses and discretionary trends.",
|
| 178 |
+
time_frame=time_frame,
|
| 179 |
+
confidence="high",
|
| 180 |
+
breakdown=breakdown
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
except Exception as e:
|
| 184 |
+
logger.error(f"Prophet category forecasting error: {e}")
|
| 185 |
+
return ForecastResponse(
|
| 186 |
+
amount=Decimal("0.00"),
|
| 187 |
+
reason="System error during forecasting.",
|
| 188 |
+
time_frame=time_frame,
|
| 189 |
+
confidence="low"
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
except Exception as e:
|
| 193 |
+
logger.error(f"Prophet category forecasting error: {e}")
|
| 194 |
+
return ForecastResponse(
|
| 195 |
+
amount=Decimal("0.00"),
|
| 196 |
+
reason="System error during forecasting.",
|
| 197 |
+
time_frame=time_frame,
|
| 198 |
+
confidence="low"
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
async def _calculate_llm(self, category_daily_history: List[dict], monthly_breakdown: List[dict], time_frame: str, days: int) -> ForecastResponse:
|
| 202 |
+
"""Use LLM to predict remaining month expenses."""
|
| 203 |
+
default_response = ForecastResponse(
|
| 204 |
+
amount=Decimal("0.00"),
|
| 205 |
+
reason="Insufficient data/AI service unavailable.",
|
| 206 |
+
time_frame=time_frame,
|
| 207 |
+
confidence="low"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
if not self.llm.is_enabled:
|
| 211 |
+
return default_response
|
| 212 |
+
|
| 213 |
+
if not category_daily_history or len(category_daily_history) < 5:
|
| 214 |
+
return ForecastResponse(
|
| 215 |
+
amount=Decimal("0.00"),
|
| 216 |
+
reason="Need more historical data to generate an AI forecast.",
|
| 217 |
+
time_frame=time_frame,
|
| 218 |
+
confidence="low"
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
try:
|
| 222 |
+
# Prepare context for LLM from category daily history
|
| 223 |
+
df = pd.DataFrame(category_daily_history)
|
| 224 |
+
category_totals = df.groupby('category')['y'].sum().to_dict()
|
| 225 |
+
recent_daily = df.groupby('ds')['y'].sum().tail(90).to_dict()
|
| 226 |
+
|
| 227 |
+
prompt = f"""
|
| 228 |
+
Analyze the following financial data to predict expenses for the NEXT {days} DAYS (full month).
|
| 229 |
+
|
| 230 |
+
1. Daily History Summary: {json.dumps(recent_daily)}
|
| 231 |
+
2. Category Totals (Last 120 days): {json.dumps(category_totals)}
|
| 232 |
+
3. Monthly Category Trends (Key for recurring bills like Rent): {json.dumps(monthly_breakdown)}
|
| 233 |
+
|
| 234 |
+
Task:
|
| 235 |
+
- Analyze the 'Monthly Category Trends' to identify recurring payments (e.g., Rent, Insurance).
|
| 236 |
+
- Note: Categories starting with '_' like '_Rent' are explicit recurring bills.
|
| 237 |
+
- Predict discretionary spending based on 'Daily History'.
|
| 238 |
+
|
| 239 |
+
Return the TOTAL predicted expenses for the full {days} day month.
|
| 240 |
+
|
| 241 |
+
You must return a valid JSON object.
|
| 242 |
+
Required JSON structure:
|
| 243 |
+
{{
|
| 244 |
+
"predicted_total": float,
|
| 245 |
+
"reason": "short explanation",
|
| 246 |
+
"breakdown": [
|
| 247 |
+
{{ "category": "string", "predicted_amount": float, "reason": "string" }}
|
| 248 |
+
]
|
| 249 |
+
}}
|
| 250 |
+
"""
|
| 251 |
+
|
| 252 |
+
system_prompt = "You are a financial intelligence engine. Always output valid JSON."
|
| 253 |
+
data = await self.llm.generate_json(prompt, system_prompt=system_prompt, temperature=0.1, timeout=60.0)
|
| 254 |
+
|
| 255 |
+
if data:
|
| 256 |
+
return ForecastResponse(
|
| 257 |
+
amount=Decimal(str(max(0, data.get("predicted_total", 0)))),
|
| 258 |
+
reason=data.get("reason", "Based on analysis of spending cycles."),
|
| 259 |
+
time_frame=time_frame,
|
| 260 |
+
confidence="medium",
|
| 261 |
+
breakdown=data.get("breakdown", [])
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
except Exception as e:
|
| 265 |
+
logger.error(f"LLM forecasting error: {e}")
|
| 266 |
+
|
| 267 |
+
return default_response
|
| 268 |
+
|
| 269 |
+
async def _get_llm_breakdown(self, category_history: List[dict], total_forecast: float, days: int) -> dict:
|
| 270 |
+
"""Helper to get just the breakdown and reason from LLM, given a known total."""
|
| 271 |
+
try:
|
| 272 |
+
prompt = f"""
|
| 273 |
+
Given the historical category spending and a STATISTICALLY FORECASTED total of {total_forecast}
|
| 274 |
+
for the REMAINING {days} DAYS of the month:
|
| 275 |
+
1. Allocate the forecasted total to categories based on history (considering end-of-month dues).
|
| 276 |
+
2. Explain the forecast trend in 1 sentence.
|
| 277 |
+
|
| 278 |
+
Category History (90d): {json.dumps(category_history)}
|
| 279 |
+
|
| 280 |
+
Return ONLY a JSON object:
|
| 281 |
+
{{
|
| 282 |
+
"reason": "string",
|
| 283 |
+
"breakdown": [ {{ "category": "string", "predicted_amount": float, "reason": "string" }} ]
|
| 284 |
+
}}
|
| 285 |
+
"""
|
| 286 |
+
|
| 287 |
+
data = await self.llm.generate_json(prompt, temperature=0.1, timeout=60.0)
|
| 288 |
+
if data:
|
| 289 |
+
return data
|
| 290 |
+
except Exception as e:
|
| 291 |
+
logger.error(f"LLM breakdown error: {e}")
|
| 292 |
+
|
| 293 |
+
return {"reason": "Statistical forecast.", "breakdown": []}
|
| 294 |
+
|
| 295 |
+
async def predict_discretionary_buffer(self, history_data: List[dict], buffer_days: int = 7) -> dict:
|
| 296 |
+
"""
|
| 297 |
+
Predict discretionary spending for the next N days using AI.
|
| 298 |
+
Returns: {
|
| 299 |
+
"predicted_amount": Decimal,
|
| 300 |
+
"confidence": str,
|
| 301 |
+
"range_low": Decimal,
|
| 302 |
+
"range_high": Decimal,
|
| 303 |
+
"method": str
|
| 304 |
+
}
|
| 305 |
+
"""
|
| 306 |
+
default_result = {
|
| 307 |
+
"predicted_amount": Decimal("500"), # Minimum fallback
|
| 308 |
+
"confidence": "low",
|
| 309 |
+
"range_low": Decimal("500"),
|
| 310 |
+
"range_high": Decimal("500"),
|
| 311 |
+
"method": "fallback"
|
| 312 |
+
}
|
| 313 |
+
|
| 314 |
+
if not history_data or len(history_data) < 7:
|
| 315 |
+
return default_result
|
| 316 |
+
|
| 317 |
+
use_prophet = settings.USE_AI_FORECASTING and PROPHET_AVAILABLE
|
| 318 |
+
|
| 319 |
+
try:
|
| 320 |
+
if use_prophet:
|
| 321 |
+
# Use Prophet for prediction
|
| 322 |
+
df = pd.DataFrame(history_data)
|
| 323 |
+
df['ds'] = pd.to_datetime(df['ds'])
|
| 324 |
+
|
| 325 |
+
m = Prophet(interval_width=0.8) # 80% confidence interval
|
| 326 |
+
m.fit(df)
|
| 327 |
+
|
| 328 |
+
# Predict for buffer_days
|
| 329 |
+
future = m.make_future_dataframe(periods=buffer_days)
|
| 330 |
+
forecast = m.predict(future)
|
| 331 |
+
|
| 332 |
+
# Get predictions for future days only
|
| 333 |
+
last_date = df['ds'].max()
|
| 334 |
+
future_mask = forecast['ds'] > last_date
|
| 335 |
+
future_forecast = forecast[future_mask]
|
| 336 |
+
|
| 337 |
+
predicted_total = max(0, future_forecast['yhat'].sum())
|
| 338 |
+
range_low = max(0, future_forecast['yhat_lower'].sum())
|
| 339 |
+
range_high = max(0, future_forecast['yhat_upper'].sum())
|
| 340 |
+
|
| 341 |
+
return {
|
| 342 |
+
"predicted_amount": Decimal(str(predicted_total)),
|
| 343 |
+
"confidence": "high",
|
| 344 |
+
"range_low": Decimal(str(range_low)),
|
| 345 |
+
"range_high": Decimal(str(range_high)),
|
| 346 |
+
"method": "prophet"
|
| 347 |
+
}
|
| 348 |
+
|
| 349 |
+
elif self.llm.is_enabled:
|
| 350 |
+
# Use LLM for prediction
|
| 351 |
+
history_summary = [
|
| 352 |
+
{"date": d['ds'], "amount": float(d['y'])}
|
| 353 |
+
for d in history_data[-30:] # Last 30 days
|
| 354 |
+
]
|
| 355 |
+
|
| 356 |
+
prompt = f"""
|
| 357 |
+
Analyze the following 30-day DISCRETIONARY expense history (Food, Shopping, Entertainment, Transport, etc.).
|
| 358 |
+
Predict the TOTAL discretionary spending for the NEXT {buffer_days} DAYS.
|
| 359 |
+
|
| 360 |
+
Daily History: {json.dumps(history_summary)}
|
| 361 |
+
|
| 362 |
+
Consider:
|
| 363 |
+
- Day of week patterns (weekends vs weekdays)
|
| 364 |
+
- Recent trends
|
| 365 |
+
- Typical daily variation
|
| 366 |
+
|
| 367 |
+
Return ONLY a JSON object:
|
| 368 |
+
{{
|
| 369 |
+
"predicted_total": float,
|
| 370 |
+
"confidence_low": float,
|
| 371 |
+
"confidence_high": float
|
| 372 |
+
}}
|
| 373 |
+
"""
|
| 374 |
+
|
| 375 |
+
data = await self.llm.generate_json(prompt, temperature=0.1, timeout=60.0)
|
| 376 |
+
|
| 377 |
+
if data:
|
| 378 |
+
predicted = max(0, data.get("predicted_total", 0))
|
| 379 |
+
low = max(0, data.get("confidence_low", predicted * 0.8))
|
| 380 |
+
high = max(0, data.get("confidence_high", predicted * 1.2))
|
| 381 |
+
|
| 382 |
+
return {
|
| 383 |
+
"predicted_amount": Decimal(str(predicted)),
|
| 384 |
+
"confidence": "medium",
|
| 385 |
+
"range_low": Decimal(str(low)),
|
| 386 |
+
"range_high": Decimal(str(high)),
|
| 387 |
+
"method": "llm"
|
| 388 |
+
}
|
| 389 |
+
|
| 390 |
+
except Exception as e:
|
| 391 |
+
logger.error(f"Buffer prediction error: {e}")
|
| 392 |
+
|
| 393 |
+
return default_result
|
| 394 |
+
|
app/features/goals/models.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import date
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from sqlalchemy import Column, String, Float, Date, Boolean, ForeignKey, DateTime, func
|
| 4 |
+
from sqlalchemy.orm import relationship
|
| 5 |
+
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
|
| 6 |
+
from app.core.database import Base
|
| 7 |
+
|
| 8 |
+
class Goal(Base):
|
| 9 |
+
__tablename__ = "goals"
|
| 10 |
+
|
| 11 |
+
id = Column(PG_UUID(as_uuid=True), primary_key=True, default=func.uuid_generate_v4())
|
| 12 |
+
user_id = Column(PG_UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
| 13 |
+
|
| 14 |
+
name = Column(String, nullable=False)
|
| 15 |
+
target_amount = Column(Float, nullable=False)
|
| 16 |
+
target_date = Column(Date, nullable=False)
|
| 17 |
+
|
| 18 |
+
# The amount we need to "freeze" monthly to hit this goal
|
| 19 |
+
monthly_contribution = Column(Float, nullable=False, default=0.0)
|
| 20 |
+
|
| 21 |
+
# Track how much has been "saved" logically (optional, for tracking logic)
|
| 22 |
+
current_saved = Column(Float, default=0.0)
|
| 23 |
+
|
| 24 |
+
is_active = Column(Boolean, default=True)
|
| 25 |
+
created_at = Column(DateTime, default=func.now())
|
| 26 |
+
|
| 27 |
+
user = relationship("User", backref="goals")
|
app/features/goals/router.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated, List, Optional
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 4 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 5 |
+
|
| 6 |
+
from app.core.database import get_db
|
| 7 |
+
from app.features.auth.deps import get_current_user
|
| 8 |
+
from app.features.auth.models import User
|
| 9 |
+
from app.features.goals.schemas import GoalCreate, GoalResponse, FeasibilityCheck
|
| 10 |
+
from app.features.goals.service import GoalService
|
| 11 |
+
|
| 12 |
+
router = APIRouter()
|
| 13 |
+
|
| 14 |
+
@router.post("/feasibility", response_model=FeasibilityCheck)
|
| 15 |
+
async def check_goal_feasibility(
|
| 16 |
+
goal_data: GoalCreate,
|
| 17 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 18 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 19 |
+
service: Annotated[GoalService, Depends()]
|
| 20 |
+
):
|
| 21 |
+
return await service.check_feasibility(db, current_user.id, goal_data)
|
| 22 |
+
|
| 23 |
+
@router.post("/", response_model=GoalResponse)
|
| 24 |
+
async def create_goal(
|
| 25 |
+
goal_data: GoalCreate,
|
| 26 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 27 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 28 |
+
service: Annotated[GoalService, Depends()]
|
| 29 |
+
):
|
| 30 |
+
return await service.create_goal(db, current_user.id, goal_data)
|
| 31 |
+
|
| 32 |
+
@router.get("/", response_model=List[GoalResponse])
|
| 33 |
+
async def get_my_goals(
|
| 34 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 35 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 36 |
+
service: Annotated[GoalService, Depends()]
|
| 37 |
+
):
|
| 38 |
+
return await service.get_active_goals(db, current_user.id)
|
| 39 |
+
|
| 40 |
+
@router.delete("/{goal_id}", status_code=status.HTTP_204_NO_CONTENT)
|
| 41 |
+
async def delete_goal(
|
| 42 |
+
goal_id: UUID,
|
| 43 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 44 |
+
db: Annotated[AsyncSession, Depends(get_db)],
|
| 45 |
+
service: Annotated[GoalService, Depends()]
|
| 46 |
+
):
|
| 47 |
+
await service.delete_goal(db, current_user.id, goal_id)
|
app/features/goals/schemas.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel
|
| 2 |
+
from datetime import date
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
|
| 6 |
+
class GoalBase(BaseModel):
|
| 7 |
+
name: str # string -> str
|
| 8 |
+
target_amount: float
|
| 9 |
+
target_date: date
|
| 10 |
+
|
| 11 |
+
class GoalCreate(GoalBase):
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
class GoalUpdate(BaseModel):
|
| 15 |
+
name: Optional[str] = None
|
| 16 |
+
target_amount: Optional[float] = None
|
| 17 |
+
target_date: Optional[date] = None
|
| 18 |
+
is_active: Optional[bool] = None
|
| 19 |
+
|
| 20 |
+
class GoalResponse(GoalBase):
|
| 21 |
+
id: UUID
|
| 22 |
+
user_id: UUID
|
| 23 |
+
monthly_contribution: float
|
| 24 |
+
current_saved: float
|
| 25 |
+
is_active: bool
|
| 26 |
+
|
| 27 |
+
class Config:
|
| 28 |
+
from_attributes = True
|
| 29 |
+
|
| 30 |
+
class FeasibilityCheck(BaseModel):
|
| 31 |
+
is_feasible: bool
|
| 32 |
+
required_monthly_savings: float
|
| 33 |
+
available_monthly_liquidity: float
|
| 34 |
+
message: str
|
app/features/goals/service.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from datetime import date
|
| 4 |
+
from typing import List, Optional
|
| 5 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 6 |
+
from sqlalchemy import select, func, desc
|
| 7 |
+
|
| 8 |
+
from app.features.goals.models import Goal
|
| 9 |
+
from app.features.goals.schemas import GoalCreate, FeasibilityCheck
|
| 10 |
+
from app.features.analytics.service import AnalyticsService
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
class GoalService:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
self.analytics_service = AnalyticsService()
|
| 17 |
+
|
| 18 |
+
def _calculate_monthly_contribution(self, target_amount: float, target_date: date) -> float:
|
| 19 |
+
today = date.today()
|
| 20 |
+
if target_date <= today:
|
| 21 |
+
return target_amount # If date is passed or today, we need it all now!
|
| 22 |
+
|
| 23 |
+
# Calculate roughly months remaining
|
| 24 |
+
# We use a simple approximation: (YearDiff * 12) + MonthDiff
|
| 25 |
+
# If day is current day or later, full month counts? Let's keep it simple.
|
| 26 |
+
months_remaining = (target_date.year - today.year) * 12 + (target_date.month - today.month)
|
| 27 |
+
|
| 28 |
+
# If less than 1 month (e.g. same month later date), treat as 1 month
|
| 29 |
+
months_remaining = max(1, months_remaining)
|
| 30 |
+
|
| 31 |
+
return round(target_amount / months_remaining, 2)
|
| 32 |
+
|
| 33 |
+
async def check_feasibility(self, db: AsyncSession, user_id: UUID, goal: GoalCreate) -> FeasibilityCheck:
|
| 34 |
+
required_savings = self._calculate_monthly_contribution(goal.target_amount, goal.target_date)
|
| 35 |
+
|
| 36 |
+
# Get Average Monthly Summary (Liquidity)
|
| 37 |
+
# We can use the 'get_monthly_summary' but logic needs to be predictive.
|
| 38 |
+
# Let's simplify: Take avg income - avg expense from last 3 months?
|
| 39 |
+
# Or easier: Use current safe_to_spend BEFORE goal deduction as 'Available'.
|
| 40 |
+
|
| 41 |
+
# Actually proper way:
|
| 42 |
+
# Liquidity = Avg Income - (Avg Expenses + Avg Surety + Avg Investments)
|
| 43 |
+
# This seems complex to calculate "Active Liquidity" on the fly accurately without detailed history.
|
| 44 |
+
# PROXY: Use 'Safe to Spend' of current month as a proxy for "Monthly Free Cashflow"?
|
| 45 |
+
# No, Safe to Spend is balance based.
|
| 46 |
+
|
| 47 |
+
# Better: Use (Income - Expense) avg over last 3 months.
|
| 48 |
+
# But for MVP speed: Let's assume user has the safe_to_spend capacity available monthly.
|
| 49 |
+
# Wait, safe_to_spend is CURRENT balance.
|
| 50 |
+
|
| 51 |
+
# Let's use AnalyticsService to get a 'monthly_free_flow' estimate.
|
| 52 |
+
# We can reuse 'calculate_burden' logic.
|
| 53 |
+
|
| 54 |
+
# Let's fetch last month's summary as a baseline.
|
| 55 |
+
# TODO: Ideally fetch avg of last 3 months.
|
| 56 |
+
last_month = await self.analytics_service.get_monthly_summary(db, user_id)
|
| 57 |
+
# Note: This gets current month.
|
| 58 |
+
|
| 59 |
+
# Estimate: Income - (Surety + Avg Discretionary).
|
| 60 |
+
# Let's be conservative: Available = Total Income - Total Expense (Last Month)
|
| 61 |
+
# If last month was negative, we assume 0 available?
|
| 62 |
+
|
| 63 |
+
# Alternative: Just return the REQUIRED amount and a message based on SafeToSpend.
|
| 64 |
+
# If Required > SafeToSpend(current), it's definitely risky for NOW.
|
| 65 |
+
|
| 66 |
+
safe_response = await self.analytics_service.calculate_safe_to_spend_amount(db, user_id)
|
| 67 |
+
current_safe_capacity = float(safe_response.safe_to_spend)
|
| 68 |
+
|
| 69 |
+
is_feasible = current_safe_capacity >= required_savings
|
| 70 |
+
|
| 71 |
+
msg = "✅ Plan looks solid." if is_feasible else "⚠️ This requires more than your current safe buffer."
|
| 72 |
+
|
| 73 |
+
return FeasibilityCheck(
|
| 74 |
+
is_feasible=is_feasible,
|
| 75 |
+
required_monthly_savings=required_savings,
|
| 76 |
+
available_monthly_liquidity=current_safe_capacity,
|
| 77 |
+
message=msg
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
async def create_goal(self, db: AsyncSession, user_id: UUID, goal_data: GoalCreate) -> Goal:
|
| 81 |
+
monthly_contrib = self._calculate_monthly_contribution(goal_data.target_amount, goal_data.target_date)
|
| 82 |
+
|
| 83 |
+
goal = Goal(
|
| 84 |
+
user_id=user_id,
|
| 85 |
+
name=goal_data.name,
|
| 86 |
+
target_amount=goal_data.target_amount,
|
| 87 |
+
target_date=goal_data.target_date,
|
| 88 |
+
monthly_contribution=monthly_contrib,
|
| 89 |
+
is_active=True
|
| 90 |
+
)
|
| 91 |
+
db.add(goal)
|
| 92 |
+
await db.commit()
|
| 93 |
+
await db.refresh(goal)
|
| 94 |
+
return goal
|
| 95 |
+
|
| 96 |
+
async def get_active_goals(self, db: AsyncSession, user_id: UUID) -> List[Goal]:
|
| 97 |
+
result = await db.execute(
|
| 98 |
+
select(Goal)
|
| 99 |
+
.where(Goal.user_id == user_id)
|
| 100 |
+
.where(Goal.is_active == True)
|
| 101 |
+
.order_by(Goal.target_date)
|
| 102 |
+
)
|
| 103 |
+
return result.scalars().all()
|
| 104 |
+
|
| 105 |
+
async def get_total_monthly_goal_contribution(self, db: AsyncSession, user_id: UUID) -> float:
|
| 106 |
+
result = await db.execute(
|
| 107 |
+
select(func.sum(Goal.monthly_contribution))
|
| 108 |
+
.where(Goal.user_id == user_id)
|
| 109 |
+
.where(Goal.is_active == True)
|
| 110 |
+
)
|
| 111 |
+
return result.scalar() or 0.0
|
| 112 |
+
|
| 113 |
+
async def delete_goal(self, db: AsyncSession, user_id: UUID, goal_id: UUID):
|
| 114 |
+
result = await db.execute(select(Goal).where(Goal.id == goal_id, Goal.user_id == user_id))
|
| 115 |
+
goal = result.scalar_one_or_none()
|
| 116 |
+
if goal:
|
| 117 |
+
await db.delete(goal)
|
| 118 |
+
await db.commit()
|
app/features/notifications/service.py
ADDED
|
@@ -0,0 +1,462 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
import logging
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
from sqlalchemy import select, and_
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
from fastapi import Depends
|
| 8 |
+
|
| 9 |
+
from app.core.database import get_db
|
| 10 |
+
from app.core.email import send_email
|
| 11 |
+
from app.core.config import get_settings
|
| 12 |
+
from app.core.llm import get_llm_service, LLMService
|
| 13 |
+
from app.features.auth.models import User
|
| 14 |
+
from app.features.bills.models import Bill
|
| 15 |
+
from app.features.transactions.models import Transaction, TransactionStatus
|
| 16 |
+
|
| 17 |
+
settings = get_settings()
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
class NotificationService:
|
| 21 |
+
def __init__(self, db: AsyncSession = Depends(get_db), llm: LLMService = Depends(get_llm_service)):
|
| 22 |
+
self.db = db
|
| 23 |
+
# If instantiated manually (e.g. in scheduler), llm will be the Depends object
|
| 24 |
+
from app.core.llm import LLMService as ActualLLMService
|
| 25 |
+
if isinstance(llm, ActualLLMService):
|
| 26 |
+
self.llm = llm
|
| 27 |
+
else:
|
| 28 |
+
from app.core.llm import get_llm_service
|
| 29 |
+
self.llm = get_llm_service()
|
| 30 |
+
|
| 31 |
+
def _derive_name(self, email: str, full_name: Optional[str] = None) -> str:
|
| 32 |
+
if full_name:
|
| 33 |
+
return full_name
|
| 34 |
+
return email.split('@')[0].replace('.', ' ').title()
|
| 35 |
+
|
| 36 |
+
def _get_html_wrapper(self, title: str, content: str, cta_text: Optional[str] = None, cta_url: Optional[str] = None, footer_note: Optional[str] = None) -> str:
|
| 37 |
+
"""Premium 'Grip Neon' design system for high-impact emails."""
|
| 38 |
+
cta_html = ""
|
| 39 |
+
if cta_text and cta_url:
|
| 40 |
+
cta_html = f"""
|
| 41 |
+
<div style="margin: 40px 0; text-align: center;">
|
| 42 |
+
<a href="{cta_url}" style="background: linear-gradient(135deg, #111 0%, #333 100%); color: #fff; padding: 18px 36px; text-decoration: none; border-radius: 14px; font-weight: 800; display: inline-block; font-size: 16px; letter-spacing: 0.02em; border: 1px solid rgba(255,255,255,0.1); box-shadow: 0 10px 20px -5px rgba(0,0,0,0.3);">{cta_text}</a>
|
| 43 |
+
</div>
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
footer_note_html = ""
|
| 47 |
+
if footer_note:
|
| 48 |
+
footer_note_html = f"""
|
| 49 |
+
<p style="font-size: 13px; color: #64748b; border-top: 1px solid #f1f5f9; padding-top: 20px; margin-top: 30px; font-style: italic;">
|
| 50 |
+
{footer_note}
|
| 51 |
+
</p>
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
return f"""
|
| 55 |
+
<html>
|
| 56 |
+
<body style="font-family: 'Outfit', 'Inter', sans-serif; color: #1e293b; line-height: 1.6; margin: 0; padding: 0; background-color: #0c0e12;">
|
| 57 |
+
<div style="max-width: 600px; margin: 40px auto; background: #ffffff; border-radius: 32px; overflow: hidden; box-shadow: 0 40px 100px -20px rgba(0,0,0,0.5);">
|
| 58 |
+
<!-- Header with Neon Pulse -->
|
| 59 |
+
<div style="background: #000; padding: 40px; text-align: left; position: relative;">
|
| 60 |
+
<div style="display: flex; align-items: center;">
|
| 61 |
+
<div style="width: 12px; height: 12px; background: #4F46E5; border-radius: 50%; box-shadow: 0 0 15px #4F46E5; margin-right: 12px;"></div>
|
| 62 |
+
<span style="font-size: 28px; font-weight: 900; letter-spacing: -0.04em; color: #ffffff;">{settings.APP_NAME}</span>
|
| 63 |
+
</div>
|
| 64 |
+
<p style="color: #94a3b8; font-size: 12px; margin: 8px 0 0 24px; text-transform: uppercase; letter-spacing: 0.2em; font-weight: 600;">Autonomous Intelligence</p>
|
| 65 |
+
</div>
|
| 66 |
+
|
| 67 |
+
<div style="padding: 50px 40px;">
|
| 68 |
+
<h2 style="color: #111; margin-top: 0; font-size: 32px; font-weight: 800; letter-spacing: -0.03em; line-height: 1.1;">{title}</h2>
|
| 69 |
+
|
| 70 |
+
<div style="color: #475569; font-size: 17px; margin-top: 25px;">
|
| 71 |
+
{content}
|
| 72 |
+
</div>
|
| 73 |
+
|
| 74 |
+
{cta_html}
|
| 75 |
+
{footer_note_html}
|
| 76 |
+
|
| 77 |
+
<div style="margin-top: 50px; border-top: 1px solid #f1f5f9; padding-top: 30px;">
|
| 78 |
+
<p style="font-size: 14px; color: #94a3b8; margin: 0;">Automated with ❤️ by</p>
|
| 79 |
+
<p style="font-size: 16px; font-weight: 900; color: #111; margin: 4px 0; letter-spacing: -0.01em;">The GRIP Engine</p>
|
| 80 |
+
</div>
|
| 81 |
+
</div>
|
| 82 |
+
</div>
|
| 83 |
+
<div style="max-width: 600px; margin: 0 auto; text-align: center; padding-bottom: 40px;">
|
| 84 |
+
<p style="font-size: 11px; color: #475569; letter-spacing: 0.05em;">SECURE • AUTONOMOUS • INTELLIGENT</p>
|
| 85 |
+
</div>
|
| 86 |
+
</body>
|
| 87 |
+
</html>
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
async def notify_gmail_disconnection(self, user_id: uuid.UUID, email: str, full_name: str = None):
|
| 91 |
+
"""Notify user that their Gmail connection has expired or been revoked."""
|
| 92 |
+
name = self._derive_name(email, full_name)
|
| 93 |
+
subject = f"Action Required: {settings.APP_NAME} Connection Lost"
|
| 94 |
+
content = f"""
|
| 95 |
+
<p>Hello {name},</p>
|
| 96 |
+
<p>Your Gmail connection for <strong>{email}</strong> has expired or been revoked.</p>
|
| 97 |
+
<p>{settings.APP_NAME} is unable to automatically sync your latest transactions. Please reconnect your account to resume automated financial intelligence.</p>
|
| 98 |
+
"""
|
| 99 |
+
html = self._get_html_wrapper(
|
| 100 |
+
title="Connection Lost",
|
| 101 |
+
content=content,
|
| 102 |
+
cta_text="Reconnect Gmail",
|
| 103 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/sync",
|
| 104 |
+
footer_note="If you didn't expect this, it might be due to Google's security policy for applications in testing mode."
|
| 105 |
+
)
|
| 106 |
+
send_email(email, subject, html)
|
| 107 |
+
|
| 108 |
+
async def send_welcome_email(self, email: str, full_name: Optional[str] = None):
|
| 109 |
+
"""Send a witty, high-premium welcome email to new users."""
|
| 110 |
+
name = self._derive_name(email, full_name)
|
| 111 |
+
subject = f"Initiating Grip Protocol: Welcome, {name}"
|
| 112 |
+
|
| 113 |
+
welcome_message = f"Welcome to {settings.APP_NAME}. You've just taken the first step toward absolute financial sovereignty. Your inbox is now your intelligence hub."
|
| 114 |
+
|
| 115 |
+
if self.llm.is_enabled:
|
| 116 |
+
prompt = f"""
|
| 117 |
+
Task: Write a witty, premium welcome message for {name}.
|
| 118 |
+
Context: They just joined Grip, an autonomous financial intelligence hub.
|
| 119 |
+
- Persona: Futuristic, cheeky financial AI.
|
| 120 |
+
- Max 30 words. No quotes.
|
| 121 |
+
Example: 'Initiation complete, {name}. I'm now minding your balance while you focus on the vision. Welcome to the hub.'
|
| 122 |
+
"""
|
| 123 |
+
resp = await self.llm.generate_response(prompt, temperature=0.8, timeout=30.0)
|
| 124 |
+
if resp:
|
| 125 |
+
welcome_message = resp.strip().replace('"', '')
|
| 126 |
+
|
| 127 |
+
content = f"""
|
| 128 |
+
<p>Hello {name},</p>
|
| 129 |
+
<div style="background: #fff; border: 1px solid #e2e8f0; padding: 25px; border-radius: 16px; margin: 25px 0;">
|
| 130 |
+
<p style="margin: 0; font-size: 18px; color: #111; font-style: italic; line-height: 1.6;">"{welcome_message}"</p>
|
| 131 |
+
</div>
|
| 132 |
+
<p>Grip is now scanning for your financial 'Sureties' and building your high-precision Wealth map. Connect Gmail to unlock full autonomous mode.</p>
|
| 133 |
+
"""
|
| 134 |
+
html = self._get_html_wrapper(
|
| 135 |
+
title="Welcome to the Hub",
|
| 136 |
+
content=content,
|
| 137 |
+
cta_text="Enter Dashboard",
|
| 138 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/dashboard"
|
| 139 |
+
)
|
| 140 |
+
send_email(email, subject, html)
|
| 141 |
+
|
| 142 |
+
async def send_surety_reminder(self, user_id: uuid.UUID, full_name: str, bill_title: str, amount: float, due_date: datetime):
|
| 143 |
+
"""Send a reminder before a fixed obligation (surety) is due."""
|
| 144 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 145 |
+
user = result.scalar_one_or_none()
|
| 146 |
+
if not user or not user.email: return
|
| 147 |
+
|
| 148 |
+
name = self._derive_name(user.email, full_name)
|
| 149 |
+
subject = f"Reminder: Payment Due for {bill_title}"
|
| 150 |
+
content = f"""
|
| 151 |
+
<p>Hello {name},</p>
|
| 152 |
+
<p>This is a reminder that your recurring payment for <strong>{bill_title}</strong> is due soon.</p>
|
| 153 |
+
<div style="background: #f8fafc; padding: 25px; border-radius: 12px; margin: 25px 0; border: 1px solid #f1f5f9; text-align: center;">
|
| 154 |
+
<p style="margin: 0; font-size: 14px; text-transform: uppercase; color: #64748b; letter-spacing: 0.05em;">Amount Due</p>
|
| 155 |
+
<p style="margin: 5px 0; font-size: 32px; font-weight: 800; color: #1e293b;">₹{abs(amount):,.2f}</p>
|
| 156 |
+
<p style="margin: 10px 0 0 0; font-size: 16px; color: #475569;">Due on <strong>{due_date.strftime('%d %B, %Y')}</strong></p>
|
| 157 |
+
</div>
|
| 158 |
+
<p>Ensure you have sufficient funds to avoid any late fees.</p>
|
| 159 |
+
"""
|
| 160 |
+
html = self._get_html_wrapper(
|
| 161 |
+
title="Payment Reminder",
|
| 162 |
+
content=content,
|
| 163 |
+
cta_text="View Obligations",
|
| 164 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/transactions?view=custom&category=Bills"
|
| 165 |
+
)
|
| 166 |
+
send_email(user.email, subject, html)
|
| 167 |
+
|
| 168 |
+
async def send_spending_insight(self, user_id: uuid.UUID, full_name: str, category: str, amount: float, percentage_increase: float):
|
| 169 |
+
"""Notify user about abnormal spending patterns with a cheeky 'Roast'."""
|
| 170 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 171 |
+
user = result.scalar_one_or_none()
|
| 172 |
+
if not user or not user.email: return
|
| 173 |
+
|
| 174 |
+
name = self._derive_name(user.email, full_name)
|
| 175 |
+
subject = f"Category Alert: Your {category} spend is getting loud"
|
| 176 |
+
|
| 177 |
+
roast_message = f"We noticed that your spending in {category} is {percentage_increase:.1f}% higher than your usual average this month."
|
| 178 |
+
|
| 179 |
+
if self.llm.is_enabled:
|
| 180 |
+
prompt = f"""
|
| 181 |
+
Persona: Sassy, witty, premium personal CFO.
|
| 182 |
+
Task: Write a funny, slightly brutal 'Roast' for {name} regarding their {category} spending.
|
| 183 |
+
Context: They spent ₹{amount:,.0f} this week, which is {percentage_increase:.1f}% higher than normal.
|
| 184 |
+
- Max 30 words. No quotes, no markdown.
|
| 185 |
+
- Be cheeky. Example: 'Your coffee budget is starting to look like a down payment on a house, {name}. Maybe it's time to learn how a kettle works?'
|
| 186 |
+
"""
|
| 187 |
+
resp = await self.llm.generate_response(prompt, temperature=0.8, timeout=60.0)
|
| 188 |
+
if resp:
|
| 189 |
+
roast_message = resp.strip()
|
| 190 |
+
|
| 191 |
+
content = f"""
|
| 192 |
+
<p>Hello {name},</p>
|
| 193 |
+
<div style="background: #fff; border: 1px solid #fee2e2; padding: 25px; border-radius: 16px; margin: 25px 0;">
|
| 194 |
+
<p style="margin: 0; font-size: 18px; color: #111; font-style: italic; line-height: 1.6;">"{roast_message}"</p>
|
| 195 |
+
</div>
|
| 196 |
+
<div style="background: #f8fafc; padding: 20px; border-radius: 12px; border: 1px solid #f1f5f9; display: flex; justify-content: space-between; align-items: center;">
|
| 197 |
+
<span style="font-size: 14px; color: #64748b;">This Week's {category}:</span>
|
| 198 |
+
<span style="font-size: 20px; font-weight: 800; color: #ef4444;">₹{amount:,.0f}</span>
|
| 199 |
+
</div>
|
| 200 |
+
"""
|
| 201 |
+
html = self._get_html_wrapper(
|
| 202 |
+
title="Spending Alert",
|
| 203 |
+
content=content,
|
| 204 |
+
cta_text="Review Transactions",
|
| 205 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/analytics"
|
| 206 |
+
)
|
| 207 |
+
send_email(user.email, subject, html)
|
| 208 |
+
|
| 209 |
+
async def send_weekly_summary(self, user_id: uuid.UUID, full_name: str, categories_data: List[dict]):
|
| 210 |
+
"""Send a consolidated weekly spending roast for multiple categories."""
|
| 211 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 212 |
+
user = result.scalar_one_or_none()
|
| 213 |
+
if not user or not user.email: return
|
| 214 |
+
|
| 215 |
+
name = self._derive_name(user.email, full_name)
|
| 216 |
+
subject = f"Weekly Recap: Your wallet has some explaining to do"
|
| 217 |
+
|
| 218 |
+
# Prepare context for LLM
|
| 219 |
+
context_items = [f"{item['category']}: ₹{item['amount']:,.0f}" for item in categories_data]
|
| 220 |
+
context_str = "\n".join(context_items)
|
| 221 |
+
|
| 222 |
+
roast_message = f"You had some significant spending this week in {', '.join([item['category'] for item in categories_data])}. Keep an eye on your budget!"
|
| 223 |
+
|
| 224 |
+
if self.llm.is_enabled:
|
| 225 |
+
prompt = f"""
|
| 226 |
+
Persona: Sassy, witty, premium personal CFO.
|
| 227 |
+
Task: Write a funny, slightly brutal consolidated 'Roast' for {name} based on their weekly spending across multiple categories.
|
| 228 |
+
Context:
|
| 229 |
+
{context_str}
|
| 230 |
+
|
| 231 |
+
- Max 40 words. No quotes, no markdown.
|
| 232 |
+
- Be cheeky about the combination of things they are spending on.
|
| 233 |
+
"""
|
| 234 |
+
resp = await self.llm.generate_response(prompt, temperature=0.8, timeout=60.0)
|
| 235 |
+
if resp:
|
| 236 |
+
roast_message = resp.strip().replace('"', '')
|
| 237 |
+
|
| 238 |
+
# Build category breakdown HTML
|
| 239 |
+
breakdown_html = ""
|
| 240 |
+
for item in categories_data:
|
| 241 |
+
breakdown_html += f"""
|
| 242 |
+
<div style="background: #f8fafc; padding: 15px; border-radius: 12px; border: 1px solid #f1f5f9; display: flex; justify-content: space-between; align-items: center; margin-bottom: 10px;">
|
| 243 |
+
<span style="font-size: 14px; color: #64748b;">{item['category']}:</span>
|
| 244 |
+
<span style="font-size: 18px; font-weight: 700; color: #ef4444;">₹{item['amount']:,.0f}</span>
|
| 245 |
+
</div>
|
| 246 |
+
"""
|
| 247 |
+
|
| 248 |
+
content = f"""
|
| 249 |
+
<p>Hello {name},</p>
|
| 250 |
+
<div style="background: #fff; border: 1px solid #fee2e2; padding: 25px; border-radius: 16px; margin: 25px 0;">
|
| 251 |
+
<p style="margin: 0; font-size: 18px; color: #111; font-style: italic; line-height: 1.6;">"{roast_message}"</p>
|
| 252 |
+
</div>
|
| 253 |
+
<h3 style="color: #1e293b; font-size: 16px; margin-bottom: 15px;">Weekly Spend Highlights</h3>
|
| 254 |
+
{breakdown_html}
|
| 255 |
+
"""
|
| 256 |
+
html = self._get_html_wrapper(
|
| 257 |
+
title="Weekly Spending Alert",
|
| 258 |
+
content=content,
|
| 259 |
+
cta_text="Review Dashboard",
|
| 260 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/dashboard"
|
| 261 |
+
)
|
| 262 |
+
send_email(user.email, subject, html)
|
| 263 |
+
|
| 264 |
+
async def send_buffer_alert(self, user_id: uuid.UUID, full_name: str, safe_to_spend: float):
|
| 265 |
+
"""Emergency alert when Safe-to-Spend drops into the danger zone."""
|
| 266 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 267 |
+
user = result.scalar_one_or_none()
|
| 268 |
+
if not user or not user.email: return
|
| 269 |
+
|
| 270 |
+
name = self._derive_name(user.email, full_name)
|
| 271 |
+
subject = "🚨 Red Alert: Buffer Exhausted"
|
| 272 |
+
|
| 273 |
+
content = f"""
|
| 274 |
+
<p>Hello {name}, your financial dashboard is flashing red.</p>
|
| 275 |
+
<p style="font-size: 17px; color: #111; font-weight: 600;">Your Safe-to-Spend has dropped below your safety buffer.</p>
|
| 276 |
+
|
| 277 |
+
<div style="background: #000; color: #fff; padding: 40px; border-radius: 24px; margin: 30px 0; text-align: center; border: 2px solid #ef4444; box-shadow: 0 0 30px rgba(239, 68, 68, 0.4);">
|
| 278 |
+
<div style="width: 10px; height: 10px; background: #ef4444; border-radius: 50%; box-shadow: 0 0 15px #ef4444; margin: 0 auto 15px auto; animation: pulse 2s infinite;"></div>
|
| 279 |
+
<span style="display: block; font-size: 11px; text-transform: uppercase; letter-spacing: 0.2em; color: #94a3b8; margin-bottom: 8px;">DANGER ZONE BALANCE</span>
|
| 280 |
+
<span style="font-size: 42px; font-weight: 900; color: #ef4444; letter-spacing: -0.05em;">₹{safe_to_spend:,.2f}</span>
|
| 281 |
+
</div>
|
| 282 |
+
|
| 283 |
+
<p style="color: #475569; font-size: 16px;">This means any further spending until your next income might cannibalize funds reserved for your upcoming bills. It's time for an elective spending freeze.</p>
|
| 284 |
+
"""
|
| 285 |
+
html = self._get_html_wrapper(
|
| 286 |
+
title="Financial Flare",
|
| 287 |
+
content=content,
|
| 288 |
+
cta_text="Check Damage",
|
| 289 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/dashboard"
|
| 290 |
+
)
|
| 291 |
+
send_email(user.email, subject, html)
|
| 292 |
+
|
| 293 |
+
async def send_inactivity_nudge(self, user_id: uuid.UUID, full_name: str, days_inactive: int):
|
| 294 |
+
"""Notify user if no transactions have been synced for a while."""
|
| 295 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 296 |
+
user = result.scalar_one_or_none()
|
| 297 |
+
if not user or not user.email: return
|
| 298 |
+
|
| 299 |
+
name = self._derive_name(user.email, full_name)
|
| 300 |
+
subject = f"We missed you, {name}!"
|
| 301 |
+
nudge_message = f"It has been {days_inactive} days since your last transaction was synced. Financial intelligence works best with fresh data!"
|
| 302 |
+
|
| 303 |
+
if self.llm.is_enabled:
|
| 304 |
+
prompt = f"""
|
| 305 |
+
Persona: Sassy, witty, premium personal CFO.
|
| 306 |
+
Task: Write a funny, slightly flirty/teasing nudge for {name} who hasn't synced their bank in {days_inactive} days.
|
| 307 |
+
- Tease them about their 'ghosting' skills or 'selective memory' regarding spending.
|
| 308 |
+
- Max 30 words.
|
| 309 |
+
- No quotes, no markdown.
|
| 310 |
+
Example: "Ghosting your finances doesn't make the bills go away, {name}. Reconnect before your budget has an identity crisis."
|
| 311 |
+
"""
|
| 312 |
+
resp = await self.llm.generate_response(prompt, temperature=0.7, timeout=60.0)
|
| 313 |
+
if resp:
|
| 314 |
+
nudge_message = resp.strip().replace('"', '')
|
| 315 |
+
|
| 316 |
+
content = f"<p>Hello {name},</p><p>{nudge_message}</p>"
|
| 317 |
+
html = self._get_html_wrapper(
|
| 318 |
+
title="It's been a while...",
|
| 319 |
+
content=content,
|
| 320 |
+
cta_text="Sync Now",
|
| 321 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/sync"
|
| 322 |
+
)
|
| 323 |
+
send_email(user.email, subject, html)
|
| 324 |
+
|
| 325 |
+
async def send_weekend_insight(self, user_id: uuid.UUID, full_name: str, safe_to_spend: float, current_balance: float, top_category: Optional[str] = None):
|
| 326 |
+
"""Send a personalized, AI-generated weekend recommendation."""
|
| 327 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 328 |
+
user = result.scalar_one_or_none()
|
| 329 |
+
if not user or not user.email: return
|
| 330 |
+
|
| 331 |
+
name = self._derive_name(user.email, full_name)
|
| 332 |
+
ai_headline = "Ready for the Weekend?"
|
| 333 |
+
ai_message = "Your Safe-to-Spend is ready for review. Have a great weekend!"
|
| 334 |
+
ai_cta = "Check Budget"
|
| 335 |
+
subject = f"Weekend Insight: ₹{safe_to_spend:,.0f}"
|
| 336 |
+
|
| 337 |
+
if self.llm.is_enabled:
|
| 338 |
+
context_str = f"Top Spend this week: {top_category}" if top_category else ""
|
| 339 |
+
prompt = f"""
|
| 340 |
+
Persona: Witty, premium, world-class lifestyle concierge.
|
| 341 |
+
Context: User {name} has ₹{safe_to_spend:,.0f} safe to spend. {context_str}.
|
| 342 |
+
|
| 343 |
+
Task: Write a highly personal, cheeky weekend recommendation.
|
| 344 |
+
- If {top_category} is 'Food': Tease their palate.
|
| 345 |
+
- If Budget > 3k: Suggest a 'treat yourself' moment.
|
| 346 |
+
- If Budget < 3k and > 1k: Suggest something in the middle.
|
| 347 |
+
- If Budget < 1k: Suggest something 'poor but gold' like a park sunset with stolen office coffee.
|
| 348 |
+
- Mood: Sophisticated but funny. Use wordplay.
|
| 349 |
+
|
| 350 |
+
Return JSON only, NO markdown:
|
| 351 |
+
{{ "headline": "Witty headline", "message": "The suggestion", "cta": "Cheeky CTA", "subject": "Bait-y subject line" }}
|
| 352 |
+
"""
|
| 353 |
+
data = await self.llm.generate_json(prompt, temperature=0.8, timeout=60.0)
|
| 354 |
+
if data:
|
| 355 |
+
ai_headline = data.get("headline", ai_headline)
|
| 356 |
+
ai_message = data.get("message", ai_message)
|
| 357 |
+
ai_cta = data.get("cta", ai_cta)
|
| 358 |
+
subject = data.get("subject", subject)
|
| 359 |
+
|
| 360 |
+
content = f"""
|
| 361 |
+
<p>Hello {name},</p>
|
| 362 |
+
<p style="font-size: 18px; line-height: 1.5;">{ai_message}</p>
|
| 363 |
+
<div style="background: #000; color: white; padding: 40px; border-radius: 24px; margin: 30px 0; text-align: center; border: 1px solid rgba(79, 70, 229, 0.3); box-shadow: 0 10px 40px -10px rgba(79, 70, 229, 0.4);">
|
| 364 |
+
<div style="width: 8px; height: 8px; background: #4F46E5; border-radius: 50%; box-shadow: 0 0 10px #4F46E5; margin: 0 auto 15px auto;"></div>
|
| 365 |
+
<span style="display: block; font-size: 11px; text-transform: uppercase; letter-spacing: 0.2em; color: #94a3b8; margin-bottom: 8px; font-weight: 700;">Safe-to-Spend Vibe</span>
|
| 366 |
+
<span style="font-size: 42px; font-weight: 900; color: #fff; letter-spacing: -0.05em;">₹{safe_to_spend:,.2f}</span>
|
| 367 |
+
</div>
|
| 368 |
+
"""
|
| 369 |
+
html = self._get_html_wrapper(
|
| 370 |
+
title=ai_headline,
|
| 371 |
+
content=content,
|
| 372 |
+
cta_text=ai_cta,
|
| 373 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/dashboard",
|
| 374 |
+
footer_note="This figure accounts for your current balance minus all upcoming obligations and safety buffers."
|
| 375 |
+
)
|
| 376 |
+
send_email(user.email, subject, html)
|
| 377 |
+
|
| 378 |
+
async def send_monthly_report(self, user_id: uuid.UUID, full_name: str, summary: any, variance: any):
|
| 379 |
+
"""Send a massive monthly intelligence report with AI recommendations and data breakdown."""
|
| 380 |
+
result = await self.db.execute(select(User).where(User.id == user_id))
|
| 381 |
+
user = result.scalar_one_or_none()
|
| 382 |
+
if not user or not user.email: return
|
| 383 |
+
|
| 384 |
+
name = self._derive_name(user.email, full_name)
|
| 385 |
+
subject = f"Monthly Intelligence: Your {summary.month} Review"
|
| 386 |
+
|
| 387 |
+
# 1. Prepare visual breakdown (Top 5 categories)
|
| 388 |
+
sorted_cats = sorted(variance.category_breakdown.items(), key=lambda x: x[1].current, reverse=True)[:5]
|
| 389 |
+
breakdown_html = ""
|
| 390 |
+
for cat, data in sorted_cats:
|
| 391 |
+
percentage = (float(data.current) / float(summary.total_expense) * 100) if summary.total_expense > 0 else 0
|
| 392 |
+
breakdown_html += f"""
|
| 393 |
+
<div style="margin-bottom: 20px;">
|
| 394 |
+
<div style="display: flex; justify-content: space-between; font-size: 14px; margin-bottom: 6px;">
|
| 395 |
+
<span style="color: #475569; font-weight: 600;">{cat}</span>
|
| 396 |
+
<span style="color: #111; font-weight: 800;">₹{data.current:,.0f}</span>
|
| 397 |
+
</div>
|
| 398 |
+
<div style="width: 100%; height: 8px; background: #f1f5f9; border-radius: 4px; overflow: hidden;">
|
| 399 |
+
<div style="width: {min(100, percentage)}%; height: 100%; background: #4F46E5; box-shadow: 0 0 10px rgba(79, 70, 229, 0.4);"></div>
|
| 400 |
+
</div>
|
| 401 |
+
</div>
|
| 402 |
+
"""
|
| 403 |
+
|
| 404 |
+
# 2. Get AI Strategic Nudge
|
| 405 |
+
ai_strategy = "Great work tracking your finances this month. Keep it up for a stronger next month!"
|
| 406 |
+
if self.llm.is_enabled:
|
| 407 |
+
top_cats_str = ", ".join([f"{c}: ₹{d.current:,.0f}" for c, d in sorted_cats])
|
| 408 |
+
prompt = f"""
|
| 409 |
+
Persona: Sassy but brilliant luxury wealth manager.
|
| 410 |
+
User: {name}
|
| 411 |
+
Month: {summary.month}
|
| 412 |
+
Total Income: ₹{summary.total_income:,.0f}, Expenses: ₹{summary.total_expense:,.0f}
|
| 413 |
+
Top Spends: {top_cats_str}
|
| 414 |
+
|
| 415 |
+
Task: Write a 2-3 sentence 'Optimization Strategy'.
|
| 416 |
+
- Be blunt but funny.
|
| 417 |
+
- If expenses > income, send a 'brutal' reality check.
|
| 418 |
+
- If income > expenses, celebrate the win but suggest an 'aggressive' investment move.
|
| 419 |
+
- Max 40 words. No markdown.
|
| 420 |
+
"""
|
| 421 |
+
resp = await self.llm.generate_response(prompt, temperature=0.8, timeout=10.0)
|
| 422 |
+
if resp:
|
| 423 |
+
ai_strategy = resp.strip()
|
| 424 |
+
|
| 425 |
+
content = f"""
|
| 426 |
+
<p>Hello {name}, your financial dossier for <strong>{summary.month}</strong> is ready.</p>
|
| 427 |
+
|
| 428 |
+
<!-- Summary Cards -->
|
| 429 |
+
<div style="display: flex; gap: 15px; margin: 30px 0;">
|
| 430 |
+
<div style="flex: 1; background: #f8fafc; padding: 25px; border-radius: 20px; border: 1px solid #e2e8f0; text-align: center;">
|
| 431 |
+
<span style="display: block; font-size: 11px; text-transform: uppercase; color: #64748b; letter-spacing: 0.1em; margin-bottom: 8px; font-weight: 700;">Income</span>
|
| 432 |
+
<span style="font-size: 24px; font-weight: 900; color: #10b981;">₹{summary.total_income:,.0f}</span>
|
| 433 |
+
</div>
|
| 434 |
+
<div style="flex: 1; background: #f8fafc; padding: 25px; border-radius: 20px; border: 1px solid #e2e8f0; text-align: center;">
|
| 435 |
+
<span style="display: block; font-size: 11px; text-transform: uppercase; color: #64748b; letter-spacing: 0.1em; margin-bottom: 8px; font-weight: 700;">Expenses</span>
|
| 436 |
+
<span style="font-size: 24px; font-weight: 900; color: #ef4444;">₹{summary.total_expense:,.0f}</span>
|
| 437 |
+
</div>
|
| 438 |
+
</div>
|
| 439 |
+
|
| 440 |
+
<!-- Strategy Box -->
|
| 441 |
+
<div style="background: #000; color: white; padding: 35px; border-radius: 24px; margin: 30px 0; border: 1px solid rgba(79, 70, 229, 0.4); box-shadow: 0 20px 50px -10px rgba(0,0,0,0.3);">
|
| 442 |
+
<div style="width: 8px; height: 8px; background: #4F46E5; border-radius: 50%; box-shadow: 0 0 10px #4F46E5; margin-bottom: 15px;"></div>
|
| 443 |
+
<p style="margin: 0; font-size: 12px; text-transform: uppercase; letter-spacing: 0.2em; color: #94a3b8; font-weight: 700; border-bottom: 1px solid #333; padding-bottom: 12px; margin-bottom: 20px;">AI Wealth Strategy</p>
|
| 444 |
+
<p style="margin: 0; font-size: 17px; font-style: italic; color: #fff; line-height: 1.6;">"{ai_strategy}"</p>
|
| 445 |
+
</div>
|
| 446 |
+
|
| 447 |
+
<!-- Visual Breakdown -->
|
| 448 |
+
<div style="margin-top: 40px;">
|
| 449 |
+
<h3 style="color: #111; font-size: 20px; font-weight: 800; margin-bottom: 20px; letter-spacing: -0.02em;">Category Intelligence</h3>
|
| 450 |
+
<div style="background: white; border: 1px solid #f1f5f9; padding: 30px; border-radius: 24px;">
|
| 451 |
+
{breakdown_html}
|
| 452 |
+
</div>
|
| 453 |
+
</div>
|
| 454 |
+
"""
|
| 455 |
+
html = self._get_html_wrapper(
|
| 456 |
+
title=f"{summary.month} Dossier",
|
| 457 |
+
content=content,
|
| 458 |
+
cta_text="Check Full Analytics",
|
| 459 |
+
cta_url=f"{settings.FRONTEND_ORIGIN}/analytics",
|
| 460 |
+
footer_note="Based on consolidated data from your synchronized bank accounts and manual entries."
|
| 461 |
+
)
|
| 462 |
+
send_email(user.email, subject, html)
|
app/features/sanitizer/service.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
|
| 3 |
+
class SanitizerService:
|
| 4 |
+
def __init__(self):
|
| 5 |
+
# Regex Patterns
|
| 6 |
+
# Regex Patterns
|
| 7 |
+
self.patterns = {
|
| 8 |
+
# Order matters: Specific patterns first
|
| 9 |
+
# 'UPI': re.compile(r'[a-zA-Z0-9.\-_]{2,}@[a-zA-Z]{2,}'), # Commented out so LLM can extract merchant name
|
| 10 |
+
'EMAIL': re.compile(r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b'),
|
| 11 |
+
'PHONE': re.compile(r'(?:\+?91|0)?[6-9]\d{9}'), # India specific mobile regex
|
| 12 |
+
'CARD': re.compile(r'(?:\d[ -]*?){12,19}'), # 12-19 digits for cards
|
| 13 |
+
'ACCOUNT': re.compile(r'[Xx]+\d{3,6}'), # Matches masked accounts like xxx1234
|
| 14 |
+
'OTP': re.compile(r'\b\d{4,8}\b'), # 4-8 digit standalone numbers (often OTPs or amounts, use carefully)
|
| 15 |
+
'PAN': re.compile(r'[A-Z]{5}[0-9]{4}[A-Z]{1}'),
|
| 16 |
+
'AADHAAR': re.compile(r'\d{4}\s\d{4}\s\d{4}'),
|
| 17 |
+
}
|
| 18 |
+
def sanitize(self, text: str) -> str:
|
| 19 |
+
if not text:
|
| 20 |
+
return text
|
| 21 |
+
|
| 22 |
+
# Common greeting removal (Dear Customer, Hello Name)
|
| 23 |
+
text = re.sub(r'(?i)(Dear|Hello|Hi)\s+[A-Za-z\s]+,', r'\1 Customer,', text)
|
| 24 |
+
|
| 25 |
+
for label, pattern in self.patterns.items():
|
| 26 |
+
if label == 'OTP':
|
| 27 |
+
# Skip generic number replacement to avoid sanitizing amounts, unless it clearly looks like an OTP
|
| 28 |
+
# For now, let's rely on LLM to ignore OTPs, or be very strict.
|
| 29 |
+
# Actually, better to NOT sanitize simple numbers blindly as they might be amounts.
|
| 30 |
+
continue
|
| 31 |
+
|
| 32 |
+
text = pattern.sub(f'<{label}>', text)
|
| 33 |
+
|
| 34 |
+
return text
|
| 35 |
+
|
| 36 |
+
_sanitizer = None
|
| 37 |
+
|
| 38 |
+
def get_sanitizer_service():
|
| 39 |
+
global _sanitizer
|
| 40 |
+
if _sanitizer is None:
|
| 41 |
+
_sanitizer = SanitizerService()
|
| 42 |
+
return _sanitizer
|
app/features/settle_up/models.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from decimal import Decimal
|
| 3 |
+
from datetime import date
|
| 4 |
+
from typing import Optional
|
| 5 |
+
from sqlalchemy import String, ForeignKey, Numeric, Text, DateTime, Date
|
| 6 |
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
| 7 |
+
from sqlalchemy.sql import func
|
| 8 |
+
from app.core.database import Base
|
| 9 |
+
from app.features.auth.models import User
|
| 10 |
+
from app.features.transactions.models import Transaction
|
| 11 |
+
|
| 12 |
+
class SettleUpEntry(Base):
|
| 13 |
+
__tablename__ = "settle_up_entries"
|
| 14 |
+
|
| 15 |
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
| 16 |
+
user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"), index=True)
|
| 17 |
+
|
| 18 |
+
# Who the transaction is with (serves as the grouping key)
|
| 19 |
+
peer_name: Mapped[str] = mapped_column(String, index=True)
|
| 20 |
+
|
| 21 |
+
# Positive amount = They owe you (You lent them money)
|
| 22 |
+
# Negative amount = You owe them (You borrowed money, or they repaid a loan)
|
| 23 |
+
amount: Mapped[Decimal] = mapped_column(Numeric(10, 2))
|
| 24 |
+
|
| 25 |
+
# Loose coupling to the main transaction, if it originated from a bank sync/manual entry
|
| 26 |
+
transaction_id: Mapped[Optional[uuid.UUID]] = mapped_column(ForeignKey("transactions.id", ondelete="CASCADE"), nullable=True)
|
| 27 |
+
|
| 28 |
+
remarks: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 29 |
+
date: Mapped[date] = mapped_column(Date, default=date.today)
|
| 30 |
+
created_at: Mapped[DateTime] = mapped_column(DateTime(timezone=True), server_default=func.now())
|
| 31 |
+
|
| 32 |
+
user: Mapped["User"] = relationship("User")
|
| 33 |
+
transaction: Mapped[Optional["Transaction"]] = relationship("Transaction")
|
app/features/settle_up/router.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from uuid import UUID
|
| 2 |
+
from typing import Annotated, List
|
| 3 |
+
from fastapi import APIRouter, Depends
|
| 4 |
+
from app.features.auth.deps import get_current_user
|
| 5 |
+
from app.features.auth.models import User
|
| 6 |
+
from app.features.settle_up import schemas
|
| 7 |
+
from app.features.settle_up.service import SettleUpService
|
| 8 |
+
|
| 9 |
+
router = APIRouter()
|
| 10 |
+
|
| 11 |
+
@router.get("/balances", response_model=List[schemas.PeerBalance])
|
| 12 |
+
async def get_peer_balances(
|
| 13 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 14 |
+
service: Annotated[SettleUpService, Depends()]
|
| 15 |
+
):
|
| 16 |
+
return await service.get_peer_balances(user_id=current_user.id)
|
| 17 |
+
|
| 18 |
+
@router.get("/{peer_name}/history", response_model=List[schemas.SettleUpEntryResponse])
|
| 19 |
+
async def get_peer_history(
|
| 20 |
+
peer_name: str,
|
| 21 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 22 |
+
service: Annotated[SettleUpService, Depends()],
|
| 23 |
+
limit: int = 50
|
| 24 |
+
):
|
| 25 |
+
return await service.get_peer_history(user_id=current_user.id, peer_name=peer_name, limit=limit)
|
| 26 |
+
|
| 27 |
+
@router.post("/", response_model=schemas.SettleUpEntryResponse)
|
| 28 |
+
async def create_ledger_entry(
|
| 29 |
+
data: schemas.SettleUpEntryCreate,
|
| 30 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 31 |
+
service: Annotated[SettleUpService, Depends()]
|
| 32 |
+
):
|
| 33 |
+
return await service.add_ledger_entry(user_id=current_user.id, data=data)
|
| 34 |
+
|
| 35 |
+
@router.put("/{entry_id}", response_model=schemas.SettleUpEntryResponse)
|
| 36 |
+
async def update_settle_up_entry(
|
| 37 |
+
entry_id: UUID,
|
| 38 |
+
data: schemas.SettleUpEntryUpdate,
|
| 39 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 40 |
+
service: Annotated[SettleUpService, Depends()]
|
| 41 |
+
):
|
| 42 |
+
return await service.update_settle_up_entry(user_id=current_user.id, entry_id=entry_id, data=data)
|
| 43 |
+
|
| 44 |
+
@router.delete("/{entry_id}")
|
| 45 |
+
async def delete_settle_up_entry(
|
| 46 |
+
entry_id: UUID,
|
| 47 |
+
current_user: Annotated[User, Depends(get_current_user)],
|
| 48 |
+
service: Annotated[SettleUpService, Depends()]
|
| 49 |
+
):
|
| 50 |
+
await service.delete_settle_up_entry(user_id=current_user.id, entry_id=entry_id)
|
| 51 |
+
return {"status": "success"}
|
app/features/settle_up/schemas.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel, Field
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from datetime import date, datetime
|
| 5 |
+
from decimal import Decimal
|
| 6 |
+
|
| 7 |
+
class SettleUpEntryBase(BaseModel):
|
| 8 |
+
peer_name: str
|
| 9 |
+
amount: Decimal
|
| 10 |
+
remarks: Optional[str] = None
|
| 11 |
+
date: Optional[date] = None
|
| 12 |
+
|
| 13 |
+
class Config:
|
| 14 |
+
str_strip_whitespace = True
|
| 15 |
+
|
| 16 |
+
class SettleUpEntryCreate(SettleUpEntryBase):
|
| 17 |
+
transaction_id: Optional[UUID] = None
|
| 18 |
+
|
| 19 |
+
class SettleUpEntryUpdate(BaseModel):
|
| 20 |
+
peer_name: Optional[str] = None
|
| 21 |
+
amount: Optional[Decimal] = None
|
| 22 |
+
remarks: Optional[str] = None
|
| 23 |
+
date: Optional[date] = None
|
| 24 |
+
|
| 25 |
+
class Config:
|
| 26 |
+
str_strip_whitespace = True
|
| 27 |
+
|
| 28 |
+
class SettleUpEntryResponse(SettleUpEntryBase):
|
| 29 |
+
id: UUID
|
| 30 |
+
user_id: UUID
|
| 31 |
+
date: date # Override Optional from base — DB always has a date
|
| 32 |
+
transaction_id: Optional[UUID] = None
|
| 33 |
+
created_at: datetime
|
| 34 |
+
|
| 35 |
+
class Config:
|
| 36 |
+
from_attributes = True
|
| 37 |
+
|
| 38 |
+
class PeerBalance(BaseModel):
|
| 39 |
+
peer_name: str
|
| 40 |
+
net_balance: Decimal
|
| 41 |
+
last_activity_date: date
|
| 42 |
+
|
| 43 |
+
class Config:
|
| 44 |
+
str_strip_whitespace = True
|
app/features/settle_up/service.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from uuid import UUID
|
| 2 |
+
from typing import List
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
from sqlalchemy import select, func, desc
|
| 5 |
+
from fastapi import HTTPException
|
| 6 |
+
from fastapi import Depends
|
| 7 |
+
from app.features.settle_up.models import SettleUpEntry
|
| 8 |
+
from app.features.settle_up import schemas
|
| 9 |
+
from app.core.database import get_db
|
| 10 |
+
|
| 11 |
+
class SettleUpService:
|
| 12 |
+
def __init__(self, db: AsyncSession = Depends(get_db)):
|
| 13 |
+
self.db = db
|
| 14 |
+
|
| 15 |
+
async def get_peer_balances(self, user_id: UUID) -> List[schemas.PeerBalance]:
|
| 16 |
+
"""
|
| 17 |
+
Gets a summary of all peers and the net balance owed.
|
| 18 |
+
Positive: They owe you. Negative: You owe them.
|
| 19 |
+
"""
|
| 20 |
+
stmt = (
|
| 21 |
+
select(
|
| 22 |
+
func.max(SettleUpEntry.peer_name).label("peer_name"),
|
| 23 |
+
func.sum(SettleUpEntry.amount).label("net_balance"),
|
| 24 |
+
func.max(SettleUpEntry.date).label("last_activity_date")
|
| 25 |
+
)
|
| 26 |
+
.where(SettleUpEntry.user_id == user_id)
|
| 27 |
+
.group_by(func.lower(SettleUpEntry.peer_name))
|
| 28 |
+
.having(func.sum(SettleUpEntry.amount) != 0) # Optionally filter out settled (0) balances, or keep them for history
|
| 29 |
+
.order_by(desc("last_activity_date"))
|
| 30 |
+
)
|
| 31 |
+
result = await self.db.execute(stmt)
|
| 32 |
+
|
| 33 |
+
balances = []
|
| 34 |
+
for row in result:
|
| 35 |
+
balances.append(
|
| 36 |
+
schemas.PeerBalance(
|
| 37 |
+
peer_name=row.peer_name,
|
| 38 |
+
net_balance=row.net_balance,
|
| 39 |
+
last_activity_date=row.last_activity_date,
|
| 40 |
+
)
|
| 41 |
+
)
|
| 42 |
+
return balances
|
| 43 |
+
|
| 44 |
+
async def get_peer_history(self, user_id: UUID, peer_name: str, limit: int = 50) -> List[SettleUpEntry]:
|
| 45 |
+
"""Gets the transaction history for a specific peer."""
|
| 46 |
+
peer_name_stripped = peer_name.strip()
|
| 47 |
+
stmt = (
|
| 48 |
+
select(SettleUpEntry)
|
| 49 |
+
.where(SettleUpEntry.user_id == user_id)
|
| 50 |
+
.where(SettleUpEntry.peer_name.ilike(peer_name_stripped))
|
| 51 |
+
.order_by(SettleUpEntry.date.desc(), SettleUpEntry.created_at.desc())
|
| 52 |
+
.limit(limit)
|
| 53 |
+
)
|
| 54 |
+
result = await self.db.execute(stmt)
|
| 55 |
+
return result.scalars().all()
|
| 56 |
+
|
| 57 |
+
async def add_ledger_entry(self, user_id: UUID, data: schemas.SettleUpEntryCreate) -> SettleUpEntry:
|
| 58 |
+
"""Adds a pure manual ledger entry (not shadowing a bank transaction)."""
|
| 59 |
+
entry_data = data.model_dump()
|
| 60 |
+
entry_data["user_id"] = user_id
|
| 61 |
+
|
| 62 |
+
if not entry_data.get("date"):
|
| 63 |
+
from datetime import date
|
| 64 |
+
entry_data["date"] = date.today()
|
| 65 |
+
|
| 66 |
+
entry = SettleUpEntry(**entry_data)
|
| 67 |
+
self.db.add(entry)
|
| 68 |
+
await self.db.commit()
|
| 69 |
+
await self.db.refresh(entry)
|
| 70 |
+
return entry
|
| 71 |
+
|
| 72 |
+
async def update_settle_up_entry(self, user_id: UUID, entry_id: UUID, data: schemas.SettleUpEntryUpdate) -> SettleUpEntry:
|
| 73 |
+
"""Updates an existing settle-up entry."""
|
| 74 |
+
stmt = select(SettleUpEntry).where(SettleUpEntry.id == entry_id, SettleUpEntry.user_id == user_id)
|
| 75 |
+
result = await self.db.execute(stmt)
|
| 76 |
+
entry = result.scalar_one_or_none()
|
| 77 |
+
|
| 78 |
+
if not entry:
|
| 79 |
+
raise HTTPException(status_code=404, detail="Entry not found")
|
| 80 |
+
|
| 81 |
+
update_data = data.model_dump(exclude_unset=True)
|
| 82 |
+
for field, value in update_data.items():
|
| 83 |
+
setattr(entry, field, value)
|
| 84 |
+
|
| 85 |
+
await self.db.commit()
|
| 86 |
+
await self.db.refresh(entry)
|
| 87 |
+
return entry
|
| 88 |
+
|
| 89 |
+
async def delete_settle_up_entry(self, user_id: UUID, entry_id: UUID) -> bool:
|
| 90 |
+
"""Deletes a settle-up entry."""
|
| 91 |
+
stmt = select(SettleUpEntry).where(SettleUpEntry.id == entry_id, SettleUpEntry.user_id == user_id)
|
| 92 |
+
result = await self.db.execute(stmt)
|
| 93 |
+
entry = result.scalar_one_or_none()
|
| 94 |
+
|
| 95 |
+
if not entry:
|
| 96 |
+
raise HTTPException(status_code=404, detail="Entry not found")
|
| 97 |
+
|
| 98 |
+
await self.db.delete(entry)
|
| 99 |
+
await self.db.commit()
|
| 100 |
+
return True
|
app/features/sync/models.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from sqlalchemy import String, Integer, DateTime, ForeignKey, Text
|
| 4 |
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
| 5 |
+
from sqlalchemy.sql import func
|
| 6 |
+
from app.core.database import Base
|
| 7 |
+
|
| 8 |
+
class SyncLog(Base):
|
| 9 |
+
__tablename__ = "sync_logs"
|
| 10 |
+
|
| 11 |
+
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
| 12 |
+
user_id: Mapped[UUID] = mapped_column(ForeignKey("users.id"))
|
| 13 |
+
|
| 14 |
+
start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
|
| 15 |
+
end_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True)
|
| 16 |
+
status: Mapped[str] = mapped_column(String, default="IN_PROGRESS") # IN_PROGRESS, SUCCESS, FAILED
|
| 17 |
+
records_processed: Mapped[int] = mapped_column(Integer, default=0)
|
| 18 |
+
error_message: Mapped[str] = mapped_column(Text, nullable=True)
|
| 19 |
+
trigger_source: Mapped[str] = mapped_column(String) # WEBHOOK, MANUAL
|
| 20 |
+
|
| 21 |
+
# Store the historyId used for this sync to know where to start next time
|
| 22 |
+
history_id_used: Mapped[str] = mapped_column(String, nullable=True)
|
| 23 |
+
summary: Mapped[str] = mapped_column(Text, nullable=True) # JSON summary of processed records
|
| 24 |
+
|
| 25 |
+
# Relationship to user if needed, or just ID
|