Spaces:
Sleeping
Sleeping
'code
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .dockerignore +55 -0
- .env +15 -0
- .python-version +1 -0
- CLAUDE.md +273 -0
- Dockerfile +65 -0
- alembic.ini +145 -0
- alembic/README +1 -0
- alembic/env.py +90 -0
- alembic/script.py.mako +28 -0
- alembic/versions/3b6c60669e48_add_project_model_and_relationship_to_.py +52 -0
- alembic/versions/4ac448e3f100_add_due_date_field_to_task_model.py +32 -0
- alembic/versions/6f0b6403a1d8_add_refresh_token_table.py +43 -0
- alembic/versions/8e3b5a7c2d9f_add_conversation_message_tables.py +59 -0
- alembic/versions/9a4b8c7d1e2f_add_is_ai_generated_to_tasks.py +27 -0
- alembic/versions/__pycache__/3b6c60669e48_add_project_model_and_relationship_to_.cpython-312.pyc +0 -0
- alembic/versions/__pycache__/4ac448e3f100_add_due_date_field_to_task_model.cpython-312.pyc +0 -0
- alembic/versions/__pycache__/6f0b6403a1d8_add_refresh_token_table.cpython-312.pyc +0 -0
- alembic/versions/__pycache__/8e3b5a7c2d9f_add_conversation_message_tables.cpython-312.pyc +0 -0
- alembic/versions/__pycache__/9a4b8c7d1e2f_add_is_ai_generated_to_tasks.cpython-312.pyc +0 -0
- alembic/versions/__pycache__/a1b2c3d4e5f6_add_audit_log_table.cpython-312.pyc +0 -0
- alembic/versions/__pycache__/ec70eaafa7b6_initial_schema_with_users_and_tasks_.cpython-312.pyc +0 -0
- alembic/versions/a1b2c3d4e5f6_add_audit_log_table.py +43 -0
- alembic/versions/ec70eaafa7b6_initial_schema_with_users_and_tasks_.py +54 -0
- main.py +6 -0
- requirements.txt +17 -0
- src/__init__.py +0 -0
- src/agent_config.py +54 -0
- src/config.py +27 -0
- src/database.py +24 -0
- src/events.py +123 -0
- src/main.py +62 -0
- src/mcp_server.py +140 -0
- src/mcp_tools/__init__.py +3 -0
- src/mcp_tools/task_tools.py +539 -0
- src/middleware/auth.py +41 -0
- src/models/__init__.py +31 -0
- src/models/audit_log.py +40 -0
- src/models/conversation.py +40 -0
- src/models/message.py +41 -0
- src/models/project.py +49 -0
- src/models/task.py +57 -0
- src/models/user.py +36 -0
- src/routers/__init__.py +3 -0
- src/routers/audit.py +125 -0
- src/routers/auth.py +189 -0
- src/routers/chat.py +203 -0
- src/routers/projects.py +259 -0
- src/routers/tasks.py +637 -0
- src/schemas/auth.py +41 -0
- src/schemas/task.py +39 -0
.dockerignore
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Python dependencies
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
*.so
|
| 6 |
+
.Python
|
| 7 |
+
build/
|
| 8 |
+
develop-eggs/
|
| 9 |
+
dist/
|
| 10 |
+
downloads/
|
| 11 |
+
eggs/
|
| 12 |
+
.eggs/
|
| 13 |
+
lib/
|
| 14 |
+
lib64/
|
| 15 |
+
parts/
|
| 16 |
+
sdist/
|
| 17 |
+
var/
|
| 18 |
+
wheels/
|
| 19 |
+
*.egg-info/
|
| 20 |
+
.installed.cfg
|
| 21 |
+
*.egg
|
| 22 |
+
|
| 23 |
+
# Virtual environments
|
| 24 |
+
venv/
|
| 25 |
+
ENV/
|
| 26 |
+
env/
|
| 27 |
+
.venv
|
| 28 |
+
|
| 29 |
+
# IDE
|
| 30 |
+
.idea/
|
| 31 |
+
.vscode/
|
| 32 |
+
*.swp
|
| 33 |
+
*.swo
|
| 34 |
+
*~
|
| 35 |
+
|
| 36 |
+
# OS files
|
| 37 |
+
.DS_Store
|
| 38 |
+
Thumbs.db
|
| 39 |
+
|
| 40 |
+
# Environment
|
| 41 |
+
.env*
|
| 42 |
+
!.env.example
|
| 43 |
+
|
| 44 |
+
# Tests
|
| 45 |
+
.pytest_cache/
|
| 46 |
+
.coverage
|
| 47 |
+
htmlcov/
|
| 48 |
+
*.cover
|
| 49 |
+
|
| 50 |
+
# Debug logs
|
| 51 |
+
*.log
|
| 52 |
+
|
| 53 |
+
# UV
|
| 54 |
+
.uv/
|
| 55 |
+
uv.lock
|
.env
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Database Configuration
|
| 2 |
+
DATABASE_URL=postgresql://neondb_owner:npg_LsojKQF8bGn2@ep-mute-pine-a4g0wfsu-pooler.us-east-1.aws.neon.tech/neondb?sslmode=require&channel_binding=require
|
| 3 |
+
|
| 4 |
+
# JWT Configuration
|
| 5 |
+
BETTER_AUTH_SECRET=your-secret-key-change-in-production
|
| 6 |
+
JWT_SECRET_KEY=your-jwt-secret-change-in-production
|
| 7 |
+
JWT_ALGORITHM=HS256
|
| 8 |
+
ACCESS_TOKEN_EXPIRE_DAYS=7
|
| 9 |
+
JWT_COOKIE_SECURE=True
|
| 10 |
+
JWT_COOKIE_SAMESITE=none
|
| 11 |
+
|
| 12 |
+
# CORS Configuration
|
| 13 |
+
FRONTEND_URL=https://task-flow-roan-beta.vercel.app,http://localhost:3000,http://127.0.0.1:3000
|
| 14 |
+
OPENAI_API_KEY=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
|
| 15 |
+
GEMINI_API_KEY=AIzaSyDcrSw3MIP0f4uJAf8Ol6M2BB4KUpkBRqI
|
.python-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
3.12
|
CLAUDE.md
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Claude Agent Instructions - Backend
|
| 2 |
+
|
| 3 |
+
## Context
|
| 4 |
+
|
| 5 |
+
You are working in the **FastAPI backend** of a full-stack task management application.
|
| 6 |
+
|
| 7 |
+
**Parent Instructions**: See root `CLAUDE.md` for global rules.
|
| 8 |
+
|
| 9 |
+
## Technology Stack
|
| 10 |
+
|
| 11 |
+
- **FastAPI** 0.115+
|
| 12 |
+
- **SQLModel** 0.0.24+ (NOT raw SQLAlchemy)
|
| 13 |
+
- **Pydantic v2** for validation
|
| 14 |
+
- **PostgreSQL 16** via Neon
|
| 15 |
+
- **UV** package manager
|
| 16 |
+
- **Alembic** for migrations
|
| 17 |
+
- **Python 3.13+**
|
| 18 |
+
|
| 19 |
+
## Critical Requirements
|
| 20 |
+
|
| 21 |
+
### SQLModel (NOT SQLAlchemy)
|
| 22 |
+
|
| 23 |
+
**Correct** (SQLModel):
|
| 24 |
+
```python
|
| 25 |
+
from sqlmodel import SQLModel, Field, Relationship
|
| 26 |
+
|
| 27 |
+
class User(SQLModel, table=True):
|
| 28 |
+
id: int | None = Field(default=None, primary_key=True)
|
| 29 |
+
email: str = Field(unique=True, index=True)
|
| 30 |
+
password_hash: str
|
| 31 |
+
|
| 32 |
+
tasks: list["Task"] = Relationship(back_populates="owner")
|
| 33 |
+
```
|
| 34 |
+
|
| 35 |
+
**Forbidden** (raw SQLAlchemy):
|
| 36 |
+
```python
|
| 37 |
+
from sqlalchemy import Column, Integer, String # NO!
|
| 38 |
+
```
|
| 39 |
+
|
| 40 |
+
### User Data Isolation (CRITICAL)
|
| 41 |
+
|
| 42 |
+
**ALWAYS filter by user_id**:
|
| 43 |
+
```python
|
| 44 |
+
from fastapi import Depends, HTTPException
|
| 45 |
+
from sqlmodel import select
|
| 46 |
+
|
| 47 |
+
async def get_user_tasks(
|
| 48 |
+
user_id: int,
|
| 49 |
+
current_user: User = Depends(get_current_user),
|
| 50 |
+
session: Session = Depends(get_session)
|
| 51 |
+
):
|
| 52 |
+
# Verify ownership
|
| 53 |
+
if user_id != current_user.id:
|
| 54 |
+
raise HTTPException(status_code=404) # NOT 403!
|
| 55 |
+
|
| 56 |
+
# Filter by user_id
|
| 57 |
+
statement = select(Task).where(Task.user_id == user_id)
|
| 58 |
+
tasks = session.exec(statement).all()
|
| 59 |
+
return tasks
|
| 60 |
+
```
|
| 61 |
+
|
| 62 |
+
### JWT Authentication
|
| 63 |
+
|
| 64 |
+
**Token Validation**:
|
| 65 |
+
```python
|
| 66 |
+
from jose import jwt, JWTError
|
| 67 |
+
from fastapi import Depends, HTTPException, status
|
| 68 |
+
from fastapi.security import HTTPBearer
|
| 69 |
+
|
| 70 |
+
security = HTTPBearer()
|
| 71 |
+
|
| 72 |
+
async def get_current_user(
|
| 73 |
+
token: str = Depends(security)
|
| 74 |
+
) -> User:
|
| 75 |
+
try:
|
| 76 |
+
payload = jwt.decode(
|
| 77 |
+
token.credentials,
|
| 78 |
+
settings.BETTER_AUTH_SECRET,
|
| 79 |
+
algorithms=[settings.JWT_ALGORITHM]
|
| 80 |
+
)
|
| 81 |
+
user_id: int = payload.get("sub")
|
| 82 |
+
if user_id is None:
|
| 83 |
+
raise HTTPException(status_code=401)
|
| 84 |
+
except JWTError:
|
| 85 |
+
raise HTTPException(status_code=401)
|
| 86 |
+
|
| 87 |
+
user = get_user_from_db(user_id)
|
| 88 |
+
if user is None:
|
| 89 |
+
raise HTTPException(status_code=401)
|
| 90 |
+
return user
|
| 91 |
+
```
|
| 92 |
+
|
| 93 |
+
### Password Security
|
| 94 |
+
|
| 95 |
+
```python
|
| 96 |
+
from passlib.context import CryptContext
|
| 97 |
+
|
| 98 |
+
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
| 99 |
+
|
| 100 |
+
def hash_password(password: str) -> str:
|
| 101 |
+
return pwd_context.hash(password)
|
| 102 |
+
|
| 103 |
+
def verify_password(plain: str, hashed: str) -> bool:
|
| 104 |
+
return pwd_context.verify(plain, hashed)
|
| 105 |
+
```
|
| 106 |
+
|
| 107 |
+
## Project Structure
|
| 108 |
+
|
| 109 |
+
```
|
| 110 |
+
src/
|
| 111 |
+
├── main.py # FastAPI app, CORS, startup
|
| 112 |
+
├── config.py # Environment variables
|
| 113 |
+
├── database.py # SQLModel engine, session
|
| 114 |
+
├── models/
|
| 115 |
+
│ ├── user.py # User SQLModel
|
| 116 |
+
│ └── task.py # Task SQLModel
|
| 117 |
+
├── schemas/
|
| 118 |
+
│ ├── auth.py # Request/response schemas
|
| 119 |
+
│ └── task.py # Request/response schemas
|
| 120 |
+
├── routers/
|
| 121 |
+
│ ├── auth.py # /api/auth/* endpoints
|
| 122 |
+
│ └── tasks.py # /api/{user_id}/tasks/* endpoints
|
| 123 |
+
├── middleware/
|
| 124 |
+
│ └── auth.py # JWT validation
|
| 125 |
+
└── utils/
|
| 126 |
+
├── security.py # bcrypt, JWT helpers
|
| 127 |
+
└── deps.py # Dependency injection
|
| 128 |
+
```
|
| 129 |
+
|
| 130 |
+
## API Patterns
|
| 131 |
+
|
| 132 |
+
### Endpoint Structure
|
| 133 |
+
|
| 134 |
+
```python
|
| 135 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 136 |
+
from sqlmodel import Session
|
| 137 |
+
|
| 138 |
+
router = APIRouter(prefix="/api/{user_id}/tasks", tags=["tasks"])
|
| 139 |
+
|
| 140 |
+
@router.get("/", response_model=list[TaskResponse])
|
| 141 |
+
async def list_tasks(
|
| 142 |
+
user_id: int,
|
| 143 |
+
current_user: User = Depends(get_current_user),
|
| 144 |
+
session: Session = Depends(get_session)
|
| 145 |
+
):
|
| 146 |
+
# Authorization check
|
| 147 |
+
if user_id != current_user.id:
|
| 148 |
+
raise HTTPException(status_code=404)
|
| 149 |
+
|
| 150 |
+
# Query with user_id filter
|
| 151 |
+
statement = select(Task).where(Task.user_id == user_id)
|
| 152 |
+
tasks = session.exec(statement).all()
|
| 153 |
+
return tasks
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
### Error Responses
|
| 157 |
+
|
| 158 |
+
```python
|
| 159 |
+
# 401 Unauthorized - Invalid/missing JWT
|
| 160 |
+
raise HTTPException(
|
| 161 |
+
status_code=401,
|
| 162 |
+
detail="Invalid authentication credentials"
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
# 404 Not Found - Resource doesn't exist OR unauthorized access
|
| 166 |
+
raise HTTPException(
|
| 167 |
+
status_code=404,
|
| 168 |
+
detail="Task not found"
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
# 400 Bad Request - Validation error
|
| 172 |
+
raise HTTPException(
|
| 173 |
+
status_code=400,
|
| 174 |
+
detail="Title must be between 1-200 characters"
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
# 409 Conflict - Duplicate resource
|
| 178 |
+
raise HTTPException(
|
| 179 |
+
status_code=409,
|
| 180 |
+
detail="An account with this email already exists"
|
| 181 |
+
)
|
| 182 |
+
```
|
| 183 |
+
|
| 184 |
+
## Database Migrations
|
| 185 |
+
|
| 186 |
+
**Creating Migrations**:
|
| 187 |
+
```bash
|
| 188 |
+
uv run alembic revision --autogenerate -m "Add users and tasks tables"
|
| 189 |
+
```
|
| 190 |
+
|
| 191 |
+
**Applying Migrations**:
|
| 192 |
+
```bash
|
| 193 |
+
uv run alembic upgrade head
|
| 194 |
+
```
|
| 195 |
+
|
| 196 |
+
**Migration File Structure**:
|
| 197 |
+
```python
|
| 198 |
+
def upgrade():
|
| 199 |
+
op.create_table(
|
| 200 |
+
'user',
|
| 201 |
+
sa.Column('id', sa.Integer(), primary_key=True),
|
| 202 |
+
sa.Column('email', sa.String(), unique=True),
|
| 203 |
+
sa.Column('password_hash', sa.String()),
|
| 204 |
+
)
|
| 205 |
+
op.create_index('ix_user_email', 'user', ['email'])
|
| 206 |
+
```
|
| 207 |
+
|
| 208 |
+
## Testing
|
| 209 |
+
|
| 210 |
+
**Fixtures** (`tests/conftest.py`):
|
| 211 |
+
```python
|
| 212 |
+
import pytest
|
| 213 |
+
from sqlmodel import Session, create_engine
|
| 214 |
+
from fastapi.testclient import TestClient
|
| 215 |
+
|
| 216 |
+
@pytest.fixture
|
| 217 |
+
def session():
|
| 218 |
+
engine = create_engine("sqlite:///:memory:")
|
| 219 |
+
SQLModel.metadata.create_all(engine)
|
| 220 |
+
with Session(engine) as session:
|
| 221 |
+
yield session
|
| 222 |
+
|
| 223 |
+
@pytest.fixture
|
| 224 |
+
def client(session):
|
| 225 |
+
app.dependency_overrides[get_session] = lambda: session
|
| 226 |
+
yield TestClient(app)
|
| 227 |
+
```
|
| 228 |
+
|
| 229 |
+
**Test Example**:
|
| 230 |
+
```python
|
| 231 |
+
def test_create_task(client, auth_headers):
|
| 232 |
+
response = client.post(
|
| 233 |
+
"/api/1/tasks",
|
| 234 |
+
headers=auth_headers,
|
| 235 |
+
json={"title": "Test Task", "description": "Test"}
|
| 236 |
+
)
|
| 237 |
+
assert response.status_code == 201
|
| 238 |
+
assert response.json()["title"] == "Test Task"
|
| 239 |
+
```
|
| 240 |
+
|
| 241 |
+
## Environment Variables
|
| 242 |
+
|
| 243 |
+
Required in `.env`:
|
| 244 |
+
```
|
| 245 |
+
DATABASE_URL=postgresql://taskuser:taskpassword@db:5432/taskdb
|
| 246 |
+
BETTER_AUTH_SECRET=your-secret-key-change-in-production
|
| 247 |
+
JWT_SECRET_KEY=your-jwt-secret-change-in-production
|
| 248 |
+
JWT_ALGORITHM=HS256
|
| 249 |
+
ACCESS_TOKEN_EXPIRE_DAYS=7
|
| 250 |
+
```
|
| 251 |
+
|
| 252 |
+
## Common Mistakes to Avoid
|
| 253 |
+
|
| 254 |
+
❌ Using raw SQLAlchemy instead of SQLModel
|
| 255 |
+
✅ Use SQLModel for all database models
|
| 256 |
+
|
| 257 |
+
❌ Trusting user_id from request parameters
|
| 258 |
+
✅ Always extract from validated JWT token
|
| 259 |
+
|
| 260 |
+
❌ Returning 403 for unauthorized access
|
| 261 |
+
✅ Return 404 to prevent information leakage
|
| 262 |
+
|
| 263 |
+
❌ SQL string concatenation
|
| 264 |
+
✅ SQLModel parameterized queries only
|
| 265 |
+
|
| 266 |
+
❌ Plaintext passwords
|
| 267 |
+
✅ bcrypt hashing always
|
| 268 |
+
|
| 269 |
+
## References
|
| 270 |
+
|
| 271 |
+
- Root Instructions: `../CLAUDE.md`
|
| 272 |
+
- Feature Spec: `../specs/001-task-crud-auth/spec.md`
|
| 273 |
+
- Constitution: `../.specify/memory/constitution.md`
|
Dockerfile
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Multi-stage Dockerfile for FastAPI backend
|
| 2 |
+
# Stage 1: Builder - Install dependencies and prepare the application
|
| 3 |
+
FROM python:3.13-slim AS builder
|
| 4 |
+
|
| 5 |
+
# Set working directory
|
| 6 |
+
WORKDIR /app
|
| 7 |
+
|
| 8 |
+
# Install system dependencies for building
|
| 9 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 10 |
+
gcc \
|
| 11 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 12 |
+
|
| 13 |
+
# Install UV for fast dependency management
|
| 14 |
+
RUN pip install --no-cache-dir uv
|
| 15 |
+
|
| 16 |
+
# Copy pyproject.toml first for better layer caching
|
| 17 |
+
COPY pyproject.toml uv.lock* ./
|
| 18 |
+
|
| 19 |
+
# Create virtual environment and install dependencies
|
| 20 |
+
# Note: UV_SYSTEM_PYTHON=0 ensures packages are installed to venv, not system
|
| 21 |
+
ENV UV_SYSTEM_PYTHON=0
|
| 22 |
+
RUN uv venv /app/.venv && \
|
| 23 |
+
. /app/.venv/bin/activate && \
|
| 24 |
+
uv pip install -r pyproject.toml
|
| 25 |
+
|
| 26 |
+
# Copy the rest of the application
|
| 27 |
+
COPY . .
|
| 28 |
+
|
| 29 |
+
# Stage 2: Runner - Production-ready image
|
| 30 |
+
FROM python:3.13-slim AS runner
|
| 31 |
+
|
| 32 |
+
# Create non-root user for security
|
| 33 |
+
RUN groupadd --system --gid 1001 appgroup && \
|
| 34 |
+
useradd --system --uid 1001 --gid appgroup --shell /bin/false --create-home appuser
|
| 35 |
+
|
| 36 |
+
# Set working directory
|
| 37 |
+
WORKDIR /app
|
| 38 |
+
|
| 39 |
+
# Copy virtual environment from builder
|
| 40 |
+
COPY --from=builder /app/.venv /app/.venv
|
| 41 |
+
|
| 42 |
+
# Copy application code
|
| 43 |
+
COPY --from=builder /app/src /app/src
|
| 44 |
+
COPY --from=builder /app/pyproject.toml /app/pyproject.toml
|
| 45 |
+
|
| 46 |
+
# Set environment variables
|
| 47 |
+
ENV PYTHONUNBUFFERED=1
|
| 48 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
| 49 |
+
|
| 50 |
+
# Set ownership
|
| 51 |
+
RUN chown -R appuser:appgroup /app
|
| 52 |
+
|
| 53 |
+
# Use non-root user
|
| 54 |
+
USER appuser
|
| 55 |
+
|
| 56 |
+
# Expose the application port
|
| 57 |
+
EXPOSE 8000
|
| 58 |
+
|
| 59 |
+
# Health check
|
| 60 |
+
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
|
| 61 |
+
CMD wget --no-verbose --tries=1 --spider http://localhost:${PORT:-8000}/health || exit 1
|
| 62 |
+
|
| 63 |
+
# Start the application using explicit path to uvicorn with dynamic port support
|
| 64 |
+
# Default to port 8000 if PORT env var is not set
|
| 65 |
+
CMD ["/bin/sh", "-c", "/app/.venv/bin/uvicorn src.main:app --host 0.0.0.0 --port ${PORT:-8000}"]
|
alembic.ini
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# A generic, single database configuration.
|
| 2 |
+
|
| 3 |
+
[alembic]
|
| 4 |
+
# path to migration scripts.
|
| 5 |
+
# this is typically a path given in POSIX (e.g. forward slashes)
|
| 6 |
+
# format, relative to the token %(here)s which refers to the location of this
|
| 7 |
+
# ini file
|
| 8 |
+
script_location = %(here)s/alembic
|
| 9 |
+
|
| 10 |
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
| 11 |
+
# Uncomment the line below if you want the files to be prepended with date and time
|
| 12 |
+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
| 13 |
+
# for all available tokens
|
| 14 |
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
| 15 |
+
|
| 16 |
+
# sys.path path, will be prepended to sys.path if present.
|
| 17 |
+
# defaults to the current working directory. for multiple paths, the path separator
|
| 18 |
+
# is defined by "path_separator" below.
|
| 19 |
+
prepend_sys_path = .
|
| 20 |
+
|
| 21 |
+
# timezone to use when rendering the date within the migration file
|
| 22 |
+
# as well as the filename.
|
| 23 |
+
# If specified, requires the tzdata library which can be installed by adding
|
| 24 |
+
# `alembic[tz]` to the pip requirements.
|
| 25 |
+
# string value is passed to ZoneInfo()
|
| 26 |
+
# leave blank for localtime
|
| 27 |
+
# timezone =
|
| 28 |
+
|
| 29 |
+
# max length of characters to apply to the "slug" field
|
| 30 |
+
# truncate_slug_length = 40
|
| 31 |
+
|
| 32 |
+
# set to 'true' to run the environment during
|
| 33 |
+
# the 'revision' command, regardless of autogenerate
|
| 34 |
+
# revision_environment = false
|
| 35 |
+
|
| 36 |
+
# set to 'true' to allow .pyc and .pyo files without
|
| 37 |
+
# a source .py file to be detected as revisions in the
|
| 38 |
+
# versions/ directory
|
| 39 |
+
# sourceless = false
|
| 40 |
+
|
| 41 |
+
# version location specification; This defaults
|
| 42 |
+
# to <script_location>/versions. When using multiple version
|
| 43 |
+
# directories, initial revisions must be specified with --version-path.
|
| 44 |
+
# The path separator used here should be the separator specified by "path_separator"
|
| 45 |
+
# below.
|
| 46 |
+
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
| 47 |
+
|
| 48 |
+
# path_separator; This indicates what character is used to split lists of file
|
| 49 |
+
# paths, including version_locations and prepend_sys_path within configparser
|
| 50 |
+
# files such as alembic.ini.
|
| 51 |
+
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
| 52 |
+
# to provide os-dependent path splitting.
|
| 53 |
+
#
|
| 54 |
+
# Note that in order to support legacy alembic.ini files, this default does NOT
|
| 55 |
+
# take place if path_separator is not present in alembic.ini. If this
|
| 56 |
+
# option is omitted entirely, fallback logic is as follows:
|
| 57 |
+
#
|
| 58 |
+
# 1. Parsing of the version_locations option falls back to using the legacy
|
| 59 |
+
# "version_path_separator" key, which if absent then falls back to the legacy
|
| 60 |
+
# behavior of splitting on spaces and/or commas.
|
| 61 |
+
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
| 62 |
+
# behavior of splitting on spaces, commas, or colons.
|
| 63 |
+
#
|
| 64 |
+
# Valid values for path_separator are:
|
| 65 |
+
#
|
| 66 |
+
# path_separator = :
|
| 67 |
+
# path_separator = ;
|
| 68 |
+
# path_separator = space
|
| 69 |
+
# path_separator = newline
|
| 70 |
+
#
|
| 71 |
+
# Use os.pathsep. Default configuration used for new projects.
|
| 72 |
+
path_separator = os
|
| 73 |
+
|
| 74 |
+
# set to 'true' to search source files recursively
|
| 75 |
+
# in each "version_locations" directory
|
| 76 |
+
# new in Alembic version 1.10
|
| 77 |
+
# recursive_version_locations = false
|
| 78 |
+
|
| 79 |
+
# the output encoding used when revision files
|
| 80 |
+
# are written from script.py.mako
|
| 81 |
+
# output_encoding = utf-8
|
| 82 |
+
|
| 83 |
+
# database URL. This is consumed by the user-maintained env.py script only.
|
| 84 |
+
# other means of configuring database URLs may be customized within the env.py
|
| 85 |
+
# file.
|
| 86 |
+
sqlalchemy.url = postgresql://neondb_owner:npg_LsojKQF8bGn2@ep-mute-pine-a4g0wfsu-pooler.us-east-1.aws.neon.tech/neondb?sslmode=require&channel_binding=require
|
| 87 |
+
|
| 88 |
+
[post_write_hooks]
|
| 89 |
+
# post_write_hooks defines scripts or Python functions that are run
|
| 90 |
+
# on newly generated revision scripts. See the documentation for further
|
| 91 |
+
# detail and examples
|
| 92 |
+
|
| 93 |
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
| 94 |
+
# hooks = black
|
| 95 |
+
# black.type = console_scripts
|
| 96 |
+
# black.entrypoint = black
|
| 97 |
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
| 98 |
+
|
| 99 |
+
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
| 100 |
+
# hooks = ruff
|
| 101 |
+
# ruff.type = module
|
| 102 |
+
# ruff.module = ruff
|
| 103 |
+
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
| 104 |
+
|
| 105 |
+
# Alternatively, use the exec runner to execute a binary found on your PATH
|
| 106 |
+
# hooks = ruff
|
| 107 |
+
# ruff.type = exec
|
| 108 |
+
# ruff.executable = ruff
|
| 109 |
+
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
| 110 |
+
|
| 111 |
+
# Logging configuration. This is also consumed by the user-maintained
|
| 112 |
+
# env.py script only.
|
| 113 |
+
[loggers]
|
| 114 |
+
keys = root,sqlalchemy,alembic
|
| 115 |
+
|
| 116 |
+
[handlers]
|
| 117 |
+
keys = console
|
| 118 |
+
|
| 119 |
+
[formatters]
|
| 120 |
+
keys = generic
|
| 121 |
+
|
| 122 |
+
[logger_root]
|
| 123 |
+
level = WARNING
|
| 124 |
+
handlers = console
|
| 125 |
+
qualname =
|
| 126 |
+
|
| 127 |
+
[logger_sqlalchemy]
|
| 128 |
+
level = WARNING
|
| 129 |
+
handlers =
|
| 130 |
+
qualname = sqlalchemy.engine
|
| 131 |
+
|
| 132 |
+
[logger_alembic]
|
| 133 |
+
level = INFO
|
| 134 |
+
handlers =
|
| 135 |
+
qualname = alembic
|
| 136 |
+
|
| 137 |
+
[handler_console]
|
| 138 |
+
class = StreamHandler
|
| 139 |
+
args = (sys.stderr,)
|
| 140 |
+
level = NOTSET
|
| 141 |
+
formatter = generic
|
| 142 |
+
|
| 143 |
+
[formatter_generic]
|
| 144 |
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
| 145 |
+
datefmt = %H:%M:%S
|
alembic/README
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Generic single-database configuration.
|
alembic/env.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from logging.config import fileConfig
|
| 2 |
+
|
| 3 |
+
from sqlalchemy import engine_from_config
|
| 4 |
+
from sqlalchemy import pool
|
| 5 |
+
from alembic import context
|
| 6 |
+
|
| 7 |
+
# Import SQLModel and models
|
| 8 |
+
from sqlmodel import SQLModel
|
| 9 |
+
|
| 10 |
+
import sys
|
| 11 |
+
import os
|
| 12 |
+
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
| 13 |
+
|
| 14 |
+
from src.models.user import User # Import your models
|
| 15 |
+
from src.models.task import Task # Import your models
|
| 16 |
+
from src.models.project import Project # Import your models
|
| 17 |
+
from src.models.conversation import Conversation # Import your models
|
| 18 |
+
from src.models.message import Message # Import your models
|
| 19 |
+
from src.models.audit_log import AuditLog # Import your models
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# this is the Alembic Config object, which provides
|
| 23 |
+
# access to the values within the .ini file in use.
|
| 24 |
+
config = context.config
|
| 25 |
+
|
| 26 |
+
# Interpret the config file for Python logging.
|
| 27 |
+
# This line sets up loggers basically.
|
| 28 |
+
if config.config_file_name is not None:
|
| 29 |
+
fileConfig(config.config_file_name)
|
| 30 |
+
|
| 31 |
+
# add your model's MetaData object here
|
| 32 |
+
# for 'autogenerate' support
|
| 33 |
+
target_metadata = SQLModel.metadata
|
| 34 |
+
|
| 35 |
+
# other values from the config, defined by the needs of env.py,
|
| 36 |
+
# can be acquired:
|
| 37 |
+
# my_important_option = config.get_main_option("my_important_option")
|
| 38 |
+
# ... etc.
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def run_migrations_offline() -> None:
|
| 42 |
+
"""Run migrations in 'offline' mode.
|
| 43 |
+
|
| 44 |
+
This configures the context with just a URL
|
| 45 |
+
and not an Engine, though an Engine is acceptable
|
| 46 |
+
here as well. By skipping the Engine creation
|
| 47 |
+
we don't even need a DBAPI to be available.
|
| 48 |
+
|
| 49 |
+
Calls to context.execute() here emit the given string to the
|
| 50 |
+
script output.
|
| 51 |
+
|
| 52 |
+
"""
|
| 53 |
+
url = config.get_main_option("sqlalchemy.url")
|
| 54 |
+
context.configure(
|
| 55 |
+
url=url,
|
| 56 |
+
target_metadata=target_metadata,
|
| 57 |
+
literal_binds=True,
|
| 58 |
+
dialect_opts={"paramstyle": "named"},
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
with context.begin_transaction():
|
| 62 |
+
context.run_migrations()
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def run_migrations_online() -> None:
|
| 66 |
+
"""Run migrations in 'online' mode.
|
| 67 |
+
|
| 68 |
+
In this scenario we need to create an Engine
|
| 69 |
+
and associate a connection with the context.
|
| 70 |
+
|
| 71 |
+
"""
|
| 72 |
+
connectable = engine_from_config(
|
| 73 |
+
config.get_section(config.config_ini_section),
|
| 74 |
+
prefix="sqlalchemy.",
|
| 75 |
+
poolclass=pool.NullPool,
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
with connectable.connect() as connection:
|
| 79 |
+
context.configure(
|
| 80 |
+
connection=connection, target_metadata=target_metadata
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
with context.begin_transaction():
|
| 84 |
+
context.run_migrations()
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
if context.is_offline_mode():
|
| 88 |
+
run_migrations_offline()
|
| 89 |
+
else:
|
| 90 |
+
run_migrations_online()
|
alembic/script.py.mako
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""${message}
|
| 2 |
+
|
| 3 |
+
Revision ID: ${up_revision}
|
| 4 |
+
Revises: ${down_revision | comma,n}
|
| 5 |
+
Create Date: ${create_date}
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
${imports if imports else ""}
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = ${repr(up_revision)}
|
| 16 |
+
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
"""Upgrade schema."""
|
| 23 |
+
${upgrades if upgrades else "pass"}
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def downgrade() -> None:
|
| 27 |
+
"""Downgrade schema."""
|
| 28 |
+
${downgrades if downgrades else "pass"}
|
alembic/versions/3b6c60669e48_add_project_model_and_relationship_to_.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add Project model and relationship to Task
|
| 2 |
+
|
| 3 |
+
Revision ID: 3b6c60669e48
|
| 4 |
+
Revises: ec70eaafa7b6
|
| 5 |
+
Create Date: 2025-12-19 03:46:01.389687
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# revision identifiers, used by Alembic.
|
| 16 |
+
revision: str = '3b6c60669e48'
|
| 17 |
+
down_revision: Union[str, Sequence[str], None] = 'ec70eaafa7b6'
|
| 18 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 19 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def upgrade() -> None:
|
| 23 |
+
"""Upgrade schema."""
|
| 24 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 25 |
+
op.create_table('project',
|
| 26 |
+
sa.Column('id', sa.Uuid(), nullable=False),
|
| 27 |
+
sa.Column('user_id', sa.Uuid(), nullable=False),
|
| 28 |
+
sa.Column('name', sa.String(length=200), nullable=False),
|
| 29 |
+
sa.Column('description', sa.String(length=1000), nullable=True),
|
| 30 |
+
sa.Column('color', sa.String(length=7), nullable=True),
|
| 31 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 32 |
+
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
| 33 |
+
sa.Column('deadline', sa.DateTime(), nullable=True),
|
| 34 |
+
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
| 35 |
+
sa.PrimaryKeyConstraint('id')
|
| 36 |
+
)
|
| 37 |
+
op.create_index(op.f('ix_project_user_id'), 'project', ['user_id'], unique=False)
|
| 38 |
+
op.add_column('task', sa.Column('project_id', sa.Uuid(), nullable=True))
|
| 39 |
+
op.create_index(op.f('ix_task_project_id'), 'task', ['project_id'], unique=False)
|
| 40 |
+
op.create_foreign_key(None, 'task', 'project', ['project_id'], ['id'])
|
| 41 |
+
# ### end Alembic commands ###
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def downgrade() -> None:
|
| 45 |
+
"""Downgrade schema."""
|
| 46 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 47 |
+
op.drop_constraint(None, 'task', type_='foreignkey')
|
| 48 |
+
op.drop_index(op.f('ix_task_project_id'), table_name='task')
|
| 49 |
+
op.drop_column('task', 'project_id')
|
| 50 |
+
op.drop_index(op.f('ix_project_user_id'), table_name='project')
|
| 51 |
+
op.drop_table('project')
|
| 52 |
+
# ### end Alembic commands ###
|
alembic/versions/4ac448e3f100_add_due_date_field_to_task_model.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add due_date field to Task model
|
| 2 |
+
|
| 3 |
+
Revision ID: 4ac448e3f100
|
| 4 |
+
Revises: 3b6c60669e48
|
| 5 |
+
Create Date: 2025-12-19 03:50:35.687835
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = '4ac448e3f100'
|
| 16 |
+
down_revision: Union[str, Sequence[str], None] = '3b6c60669e48'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
"""Upgrade schema."""
|
| 23 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 24 |
+
op.add_column('task', sa.Column('due_date', sa.DateTime(), nullable=True))
|
| 25 |
+
# ### end Alembic commands ###
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def downgrade() -> None:
|
| 29 |
+
"""Downgrade schema."""
|
| 30 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 31 |
+
op.drop_column('task', 'due_date')
|
| 32 |
+
# ### end Alembic commands ###
|
alembic/versions/6f0b6403a1d8_add_refresh_token_table.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add refresh token table
|
| 2 |
+
|
| 3 |
+
Revision ID: 6f0b6403a1d8
|
| 4 |
+
Revises: 4ac448e3f100
|
| 5 |
+
Create Date: 2025-12-24 01:48:38.858071
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = '6f0b6403a1d8'
|
| 16 |
+
down_revision: Union[str, Sequence[str], None] = '4ac448e3f100'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
"""Upgrade schema."""
|
| 23 |
+
# Create refresh_tokens table
|
| 24 |
+
op.create_table('refresh_tokens',
|
| 25 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 26 |
+
sa.Column('user_id', sa.Uuid(), nullable=False),
|
| 27 |
+
sa.Column('token', sa.String(), nullable=False),
|
| 28 |
+
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
| 29 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 30 |
+
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
| 31 |
+
sa.PrimaryKeyConstraint('id'),
|
| 32 |
+
sa.UniqueConstraint('token')
|
| 33 |
+
)
|
| 34 |
+
op.create_index(op.f('ix_refresh_tokens_user_id'), 'refresh_tokens', ['user_id'], unique=False)
|
| 35 |
+
op.create_index(op.f('ix_refresh_tokens_token'), 'refresh_tokens', ['token'], unique=True)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def downgrade() -> None:
|
| 39 |
+
"""Downgrade schema."""
|
| 40 |
+
# Drop refresh_tokens table
|
| 41 |
+
op.drop_index(op.f('ix_refresh_tokens_token'), table_name='refresh_tokens')
|
| 42 |
+
op.drop_index(op.f('ix_refresh_tokens_user_id'), table_name='refresh_tokens')
|
| 43 |
+
op.drop_table('refresh_tokens')
|
alembic/versions/8e3b5a7c2d9f_add_conversation_message_tables.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add conversation and message tables
|
| 2 |
+
|
| 3 |
+
Revision ID: 8e3b5a7c2d9f
|
| 4 |
+
Revises: 6f0b6403a1d8
|
| 5 |
+
Create Date: 2025-12-24 01:50:45.930037
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = '8e3b5a7c2d9f'
|
| 16 |
+
down_revision: Union[str, Sequence[str], None] = '6f0b6403a1d8'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = '6f0b6403a1d8'
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
"""Upgrade schema."""
|
| 23 |
+
# Create conversation table
|
| 24 |
+
op.create_table('conversation',
|
| 25 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 26 |
+
sa.Column('user_id', sa.Uuid(), nullable=False),
|
| 27 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 28 |
+
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
| 29 |
+
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
| 30 |
+
sa.PrimaryKeyConstraint('id')
|
| 31 |
+
)
|
| 32 |
+
op.create_index(op.f('ix_conversation_user_id'), 'conversation', ['user_id'], unique=False)
|
| 33 |
+
|
| 34 |
+
# Create message table
|
| 35 |
+
op.create_table('message',
|
| 36 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 37 |
+
sa.Column('conversation_id', sa.Integer(), nullable=False),
|
| 38 |
+
sa.Column('user_id', sa.Uuid(), nullable=False),
|
| 39 |
+
sa.Column('role', sa.Enum('user', 'assistant', name='message_role'), nullable=False),
|
| 40 |
+
sa.Column('content', sa.Text(), nullable=False),
|
| 41 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 42 |
+
sa.ForeignKeyConstraint(['conversation_id'], ['conversation.id'], ),
|
| 43 |
+
sa.PrimaryKeyConstraint('id')
|
| 44 |
+
)
|
| 45 |
+
op.create_index(op.f('ix_message_conversation_id'), 'message', ['conversation_id'], unique=False)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def downgrade() -> None:
|
| 49 |
+
"""Downgrade schema."""
|
| 50 |
+
# Drop message table
|
| 51 |
+
op.drop_index(op.f('ix_message_conversation_id'), table_name='message')
|
| 52 |
+
op.drop_table('message')
|
| 53 |
+
|
| 54 |
+
# Drop conversation table
|
| 55 |
+
op.drop_index(op.f('ix_conversation_user_id'), table_name='conversation')
|
| 56 |
+
op.drop_table('conversation')
|
| 57 |
+
|
| 58 |
+
# Drop enum type
|
| 59 |
+
op.execute('DROP TYPE IF EXISTS message_role;')
|
alembic/versions/9a4b8c7d1e2f_add_is_ai_generated_to_tasks.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""add is_ai_generated to tasks
|
| 2 |
+
|
| 3 |
+
Revision ID: 9a4b8c7d1e2f
|
| 4 |
+
Revises: 8e3b5a7c2d9f
|
| 5 |
+
Create Date: 2025-12-25 05:47:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from alembic import op
|
| 9 |
+
import sqlalchemy as sa
|
| 10 |
+
import uuid
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# revision identifiers
|
| 14 |
+
revision = '9a4b8c7d1e2f'
|
| 15 |
+
down_revision = '8e3b5a7c2d9f'
|
| 16 |
+
branch_labels = None
|
| 17 |
+
depends_on = None
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def upgrade():
|
| 21 |
+
# Add the is_ai_generated column to the tasks table
|
| 22 |
+
op.add_column('task', sa.Column('is_ai_generated', sa.Boolean(), nullable=False, server_default='false'))
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def downgrade():
|
| 26 |
+
# Remove the is_ai_generated column from the tasks table
|
| 27 |
+
op.drop_column('task', 'is_ai_generated')
|
alembic/versions/__pycache__/3b6c60669e48_add_project_model_and_relationship_to_.cpython-312.pyc
ADDED
|
Binary file (3.45 kB). View file
|
|
|
alembic/versions/__pycache__/4ac448e3f100_add_due_date_field_to_task_model.cpython-312.pyc
ADDED
|
Binary file (1.35 kB). View file
|
|
|
alembic/versions/__pycache__/6f0b6403a1d8_add_refresh_token_table.cpython-312.pyc
ADDED
|
Binary file (2.65 kB). View file
|
|
|
alembic/versions/__pycache__/8e3b5a7c2d9f_add_conversation_message_tables.cpython-312.pyc
ADDED
|
Binary file (3.57 kB). View file
|
|
|
alembic/versions/__pycache__/9a4b8c7d1e2f_add_is_ai_generated_to_tasks.cpython-312.pyc
ADDED
|
Binary file (1.07 kB). View file
|
|
|
alembic/versions/__pycache__/a1b2c3d4e5f6_add_audit_log_table.cpython-312.pyc
ADDED
|
Binary file (2.8 kB). View file
|
|
|
alembic/versions/__pycache__/ec70eaafa7b6_initial_schema_with_users_and_tasks_.cpython-312.pyc
ADDED
|
Binary file (3.82 kB). View file
|
|
|
alembic/versions/a1b2c3d4e5f6_add_audit_log_table.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Add audit_log table
|
| 2 |
+
|
| 3 |
+
Revision ID: a1b2c3d4e5f6
|
| 4 |
+
Revises: 9a4b8c7d1e2f
|
| 5 |
+
Create Date: 2026-01-31 01:00:00.000000
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
from alembic import op
|
| 10 |
+
import sqlalchemy as sa
|
| 11 |
+
import sqlmodel
|
| 12 |
+
from sqlalchemy.dialects import postgresql
|
| 13 |
+
|
| 14 |
+
# revision identifiers, used by Alembic.
|
| 15 |
+
revision: str = 'a1b2c3d4e5f6'
|
| 16 |
+
down_revision: Union[str, None] = '9a4b8c7d1e2f'
|
| 17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def upgrade() -> None:
|
| 22 |
+
# Create audit_log table
|
| 23 |
+
op.create_table('auditlog',
|
| 24 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 25 |
+
sa.Column('event_id', sa.String(), nullable=False),
|
| 26 |
+
sa.Column('event_type', sa.String(length=50), nullable=False),
|
| 27 |
+
sa.Column('user_id', sa.String(), nullable=False),
|
| 28 |
+
sa.Column('task_id', sa.Integer(), nullable=False),
|
| 29 |
+
sa.Column('event_data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
| 30 |
+
sa.Column('timestamp', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
| 31 |
+
sa.PrimaryKeyConstraint('id'),
|
| 32 |
+
sa.UniqueConstraint('event_id'),
|
| 33 |
+
sa.Index('ix_auditlog_event_id', 'event_id'),
|
| 34 |
+
sa.Index('ix_auditlog_user_id', 'user_id'),
|
| 35 |
+
sa.Index('ix_auditlog_task_id', 'task_id'),
|
| 36 |
+
sa.Index('ix_auditlog_event_type', 'event_type'),
|
| 37 |
+
sa.Index('ix_auditlog_timestamp', 'timestamp')
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def downgrade() -> None:
|
| 42 |
+
# Drop audit_log table
|
| 43 |
+
op.drop_table('auditlog')
|
alembic/versions/ec70eaafa7b6_initial_schema_with_users_and_tasks_.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Initial schema with users and tasks tables
|
| 2 |
+
|
| 3 |
+
Revision ID: ec70eaafa7b6
|
| 4 |
+
Revises:
|
| 5 |
+
Create Date: 2025-12-16 05:07:24.251683
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
from typing import Sequence, Union
|
| 9 |
+
|
| 10 |
+
from alembic import op
|
| 11 |
+
import sqlalchemy as sa
|
| 12 |
+
import sqlmodel
|
| 13 |
+
# revision identifiers, used by Alembic.
|
| 14 |
+
revision: str = 'ec70eaafa7b6'
|
| 15 |
+
down_revision: Union[str, Sequence[str], None] = None
|
| 16 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
| 17 |
+
depends_on: Union[str, Sequence[str], None] = None
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def upgrade() -> None:
|
| 21 |
+
"""Upgrade schema."""
|
| 22 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 23 |
+
op.create_table('user',
|
| 24 |
+
sa.Column('id', sa.Uuid(), nullable=False),
|
| 25 |
+
sa.Column('email', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
| 26 |
+
sa.Column('password_hash', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
| 27 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 28 |
+
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
| 29 |
+
sa.PrimaryKeyConstraint('id')
|
| 30 |
+
)
|
| 31 |
+
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
| 32 |
+
op.create_table('task',
|
| 33 |
+
sa.Column('id', sa.Integer(), nullable=False),
|
| 34 |
+
sa.Column('user_id', sa.Uuid(), nullable=False),
|
| 35 |
+
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(length=200), nullable=False),
|
| 36 |
+
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(length=1000), nullable=True),
|
| 37 |
+
sa.Column('completed', sa.Boolean(), nullable=False),
|
| 38 |
+
sa.Column('created_at', sa.DateTime(), nullable=True),
|
| 39 |
+
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
| 40 |
+
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
| 41 |
+
sa.PrimaryKeyConstraint('id')
|
| 42 |
+
)
|
| 43 |
+
op.create_index(op.f('ix_task_user_id'), 'task', ['user_id'], unique=False)
|
| 44 |
+
# ### end Alembic commands ###
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def downgrade() -> None:
|
| 48 |
+
"""Downgrade schema."""
|
| 49 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
| 50 |
+
op.drop_index(op.f('ix_task_user_id'), table_name='task')
|
| 51 |
+
op.drop_table('task')
|
| 52 |
+
op.drop_index(op.f('ix_user_email'), table_name='user')
|
| 53 |
+
op.drop_table('user')
|
| 54 |
+
# ### end Alembic commands ###
|
main.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def main():
|
| 2 |
+
print("Hello from task-api!")
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
main()
|
requirements.txt
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
alembic>=1.17.2
|
| 2 |
+
fastapi>=0.124.4
|
| 3 |
+
passlib[bcrypt]>=1.7.4
|
| 4 |
+
psycopg2-binary>=2.9.11
|
| 5 |
+
pydantic-settings>=2.12.0
|
| 6 |
+
pydantic[email]>=2.12.5
|
| 7 |
+
python-jose[cryptography]>=3.5.0
|
| 8 |
+
python-multipart>=0.0.20
|
| 9 |
+
sqlmodel>=0.0.27
|
| 10 |
+
uvicorn>=0.38.0
|
| 11 |
+
httpx>=0.28.1
|
| 12 |
+
pytest>=9.0.2
|
| 13 |
+
pytest-asyncio>=1.3.0
|
| 14 |
+
python-dotenv>=1.0.1
|
| 15 |
+
bcrypt>=3.1.3,<4.0.0
|
| 16 |
+
cryptography>=45.0.0
|
| 17 |
+
dapr>=1.13.0
|
src/__init__.py
ADDED
|
File without changes
|
src/agent_config.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from openai import OpenAI
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
from .mcp_server import get_mcp_tools, get_mcp_tools_for_gemin_api
|
| 5 |
+
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
# Initialize OpenAI client with Gemini API
|
| 9 |
+
client = OpenAI(
|
| 10 |
+
api_key=os.getenv("GEMINI_API_KEY"),
|
| 11 |
+
base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
# Define the AI agent instructions
|
| 15 |
+
AGENT_INSTRUCTIONS = """
|
| 16 |
+
You are TaskFlow AI, a premium productivity assistant. Your goal is to help users manage their life and work tasks with zero friction.
|
| 17 |
+
|
| 18 |
+
You have access to several tools:
|
| 19 |
+
- `add_task`: Use this when the user wants to create a new task. You can optionally specify a `project_name` to group them.
|
| 20 |
+
- `list_tasks`: Use this to show the user their tasks. You can filter by status.
|
| 21 |
+
- `complete_task`: Use this to mark a task as done.
|
| 22 |
+
- `delete_task`: Use this to remove a task.
|
| 23 |
+
- `update_task`: Use this to change a task's title or description.
|
| 24 |
+
- `create_project`: Create a new project (collection of tasks) with a specific name, description, and color.
|
| 25 |
+
- `list_projects`: Show all existing projects.
|
| 26 |
+
- `get_calendar`: Retrieve tasks and events for a date range. "Calendar events" are simply tasks with a due date.
|
| 27 |
+
|
| 28 |
+
Guidelines:
|
| 29 |
+
1. Always be professional, helpful, and concise.
|
| 30 |
+
2. If a user's request is vague, ask for clarification.
|
| 31 |
+
3. When listing tasks or calendar items, use a clean markdown format. Use bullet points or tables.
|
| 32 |
+
4. If a tool call fails, explain the issue politely to the user.
|
| 33 |
+
5. You can handle multiple tasks/projects in one go if the user asks for it.
|
| 34 |
+
6. Always confirm when an action has been successfully performed.
|
| 35 |
+
7. NEVER ask the user for their user_id. It is handled automatically by the system.
|
| 36 |
+
8. Today's date is 2025-12-29. Use this for relative date requests.
|
| 37 |
+
9. When the user asks for "calendar" or "schedule", use `get_calendar`.
|
| 38 |
+
|
| 39 |
+
Your tone should be encouraging and efficient. Let's get things done!
|
| 40 |
+
"""
|
| 41 |
+
|
| 42 |
+
def get_todo_agent():
|
| 43 |
+
"""
|
| 44 |
+
Configure and return the AI agent with Gemini 2.0 Flash and MCP tools
|
| 45 |
+
"""
|
| 46 |
+
# Return a configuration object that can be used by the chat endpoint
|
| 47 |
+
return {
|
| 48 |
+
"client": client,
|
| 49 |
+
"model": "gemini-2.5-flash",
|
| 50 |
+
"instructions": AGENT_INSTRUCTIONS,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
# For now, return a simple configuration
|
| 54 |
+
todo_agent_config = get_todo_agent()
|
src/config.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic_settings import BaseSettings
|
| 2 |
+
from typing import Optional
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class Settings(BaseSettings):
|
| 6 |
+
# Database
|
| 7 |
+
DATABASE_URL: str = "postgresql://neondb_owner:npg_LsojKQF8bGn2@ep-mute-pine-a4g0wfsu-pooler.us-east-1.aws.neon.tech/neondb?sslmode=require&channel_binding=require"
|
| 8 |
+
# Auth
|
| 9 |
+
BETTER_AUTH_SECRET: str = "your-secret-key-change-in-production"
|
| 10 |
+
JWT_SECRET_KEY: str = "your-jwt-secret-change-in-production"
|
| 11 |
+
JWT_ALGORITHM: str = "HS256"
|
| 12 |
+
ACCESS_TOKEN_EXPIRE_DAYS: int = 7
|
| 13 |
+
JWT_COOKIE_SECURE: bool = False # Set to True in production (requires HTTPS)
|
| 14 |
+
JWT_COOKIE_SAMESITE: str = "lax" # "lax" | "strict" | "none" (use "none" for cross-site cookies in production)
|
| 15 |
+
|
| 16 |
+
# CORS
|
| 17 |
+
FRONTEND_URL: str = "https://victorious-mushroom-09538ac1e.2.azurestaticapps.net"
|
| 18 |
+
|
| 19 |
+
# AI API Keys
|
| 20 |
+
GEMINI_API_KEY: Optional[str] = "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"
|
| 21 |
+
OPENAI_API_KEY: Optional[str] = "AIzaSyDcrSw3MIP0f4uJAf8Ol6M2BB4KUpkBRqI"
|
| 22 |
+
|
| 23 |
+
class Config:
|
| 24 |
+
env_file = ".env"
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
settings = Settings()
|
src/database.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import create_engine, Session
|
| 2 |
+
from contextlib import contextmanager
|
| 3 |
+
from .config import settings
|
| 4 |
+
# Create the database engine
|
| 5 |
+
engine = create_engine(
|
| 6 |
+
settings.DATABASE_URL,
|
| 7 |
+
echo=False, # Set to True for SQL query logging
|
| 8 |
+
pool_pre_ping=True,
|
| 9 |
+
pool_size=5,
|
| 10 |
+
max_overflow=10
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@contextmanager
|
| 15 |
+
def get_session():
|
| 16 |
+
"""Context manager for database sessions."""
|
| 17 |
+
with Session(engine) as session:
|
| 18 |
+
yield session
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def get_session_dep():
|
| 22 |
+
"""Dependency for FastAPI to get database session."""
|
| 23 |
+
with get_session() as session:
|
| 24 |
+
yield session
|
src/events.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import httpx
|
| 2 |
+
import uuid
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from typing import Any, Dict, Optional
|
| 5 |
+
import asyncio
|
| 6 |
+
import logging
|
| 7 |
+
import time
|
| 8 |
+
from .models.task import Task # Assuming Task model exists
|
| 9 |
+
from .utils.circuit_breaker import kafka_circuit_breaker
|
| 10 |
+
from .utils.metrics import (
|
| 11 |
+
increment_event_published,
|
| 12 |
+
observe_event_publish_duration,
|
| 13 |
+
increment_event_publish_error,
|
| 14 |
+
increment_rate_limiter_request,
|
| 15 |
+
increment_rate_limiter_rejection
|
| 16 |
+
)
|
| 17 |
+
from .utils.rate_limiter import event_publisher_rate_limiter
|
| 18 |
+
|
| 19 |
+
# Configure logging
|
| 20 |
+
logging.basicConfig(level=logging.INFO)
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
# Retry configuration
|
| 24 |
+
MAX_RETRIES = 3
|
| 25 |
+
RETRY_DELAY = 1 # seconds
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
async def publish_task_event(event_type: str, task: Task):
|
| 29 |
+
"""
|
| 30 |
+
Publish a task event to Kafka via Dapr with retry mechanism, circuit breaker, and rate limiting.
|
| 31 |
+
Implements graceful degradation - operations continue even if event publishing fails.
|
| 32 |
+
|
| 33 |
+
Args:
|
| 34 |
+
event_type: The type of event ('created', 'updated', 'completed', 'deleted')
|
| 35 |
+
task: The task object that triggered the event
|
| 36 |
+
"""
|
| 37 |
+
start_time = time.time()
|
| 38 |
+
|
| 39 |
+
# Rate limiting check - use user_id as the rate limiting key
|
| 40 |
+
user_id = getattr(task, 'user_id', 'unknown')
|
| 41 |
+
rate_limit_key = f"event_publisher:{user_id}"
|
| 42 |
+
|
| 43 |
+
increment_rate_limiter_request(rate_limit_key)
|
| 44 |
+
|
| 45 |
+
if not event_publisher_rate_limiter.is_allowed(rate_limit_key):
|
| 46 |
+
logger.warning(f"Rate limit exceeded for user {user_id}, event type {event_type}")
|
| 47 |
+
increment_rate_limiter_rejection(rate_limit_key)
|
| 48 |
+
# Continue with the main operation but skip event publishing
|
| 49 |
+
logger.info(f"Skipping event publishing due to rate limit for user {user_id}")
|
| 50 |
+
return
|
| 51 |
+
|
| 52 |
+
event = {
|
| 53 |
+
"event_id": str(uuid.uuid4()),
|
| 54 |
+
"event_type": event_type,
|
| 55 |
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
| 56 |
+
"user_id": str(user_id), # Convert to string for consistency
|
| 57 |
+
"task_id": getattr(task, 'id', 0), # Assuming id exists on task
|
| 58 |
+
"task_data": {
|
| 59 |
+
"title": getattr(task, 'title', ''),
|
| 60 |
+
"description": getattr(task, 'description', ''),
|
| 61 |
+
"completed": getattr(task, 'completed', False)
|
| 62 |
+
}
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
# Use circuit breaker to wrap the publishing operation
|
| 66 |
+
async def _publish_with_retry():
|
| 67 |
+
# Publish via Dapr Pub/Sub with retry mechanism
|
| 68 |
+
for attempt in range(MAX_RETRIES):
|
| 69 |
+
try:
|
| 70 |
+
async with httpx.AsyncClient() as client:
|
| 71 |
+
response = await client.post(
|
| 72 |
+
"http://localhost:3500/v1.0/publish/kafka-pubsub/task-events",
|
| 73 |
+
json=event
|
| 74 |
+
)
|
| 75 |
+
response.raise_for_status()
|
| 76 |
+
logger.info(f"Event published successfully: {event_type} for task {task.id} on attempt {attempt + 1}")
|
| 77 |
+
return # Success, exit the function
|
| 78 |
+
except httpx.RequestError as e:
|
| 79 |
+
logger.warning(f"Attempt {attempt + 1} failed to publish event: {e}")
|
| 80 |
+
if attempt == MAX_RETRIES - 1: # Last attempt
|
| 81 |
+
logger.error(f"Failed to publish event after {MAX_RETRIES} attempts: {e}")
|
| 82 |
+
raise # Re-raise the exception after all retries are exhausted
|
| 83 |
+
|
| 84 |
+
# Wait before retrying (exponential backoff)
|
| 85 |
+
await asyncio.sleep(RETRY_DELAY * (2 ** attempt))
|
| 86 |
+
|
| 87 |
+
logger.error(f"All {MAX_RETRIES} attempts failed to publish event for task {task.id}")
|
| 88 |
+
raise Exception(f"Failed to publish event after {MAX_RETRIES} attempts")
|
| 89 |
+
|
| 90 |
+
# Call the publishing function through the circuit breaker
|
| 91 |
+
# Use graceful degradation: if event publishing fails, log the error but don't fail the main operation
|
| 92 |
+
try:
|
| 93 |
+
await kafka_circuit_breaker.call(_publish_with_retry)
|
| 94 |
+
duration = time.time() - start_time
|
| 95 |
+
logger.info(f"Successfully published {event_type} event for task {task.id}")
|
| 96 |
+
increment_event_published(event_type)
|
| 97 |
+
observe_event_publish_duration(event_type, duration)
|
| 98 |
+
except Exception as e:
|
| 99 |
+
duration = time.time() - start_time
|
| 100 |
+
logger.error(f"Event publishing failed for task {task.id}, but main operation continues: {e}")
|
| 101 |
+
increment_event_publish_error(event_type)
|
| 102 |
+
observe_event_publish_duration(event_type, duration)
|
| 103 |
+
# Don't raise the exception - allow the main operation to continue (graceful degradation)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
async def publish_created_event(task: Task):
|
| 107 |
+
"""Publish a 'created' event for a new task."""
|
| 108 |
+
await publish_task_event("created", task)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
async def publish_updated_event(task: Task):
|
| 112 |
+
"""Publish an 'updated' event for a modified task."""
|
| 113 |
+
await publish_task_event("updated", task)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
async def publish_deleted_event(task: Task):
|
| 117 |
+
"""Publish a 'deleted' event for a deleted task."""
|
| 118 |
+
await publish_task_event("deleted", task)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
async def publish_completed_event(task: Task):
|
| 122 |
+
"""Publish a 'completed' event for a completed task."""
|
| 123 |
+
await publish_task_event("completed", task)
|
src/main.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 3 |
+
from .config import settings
|
| 4 |
+
|
| 5 |
+
from .routers import auth, tasks, projects, chat, audit
|
| 6 |
+
from .utils.health_check import kafka_health_checker
|
| 7 |
+
|
| 8 |
+
app = FastAPI(
|
| 9 |
+
title="Task API",
|
| 10 |
+
description="Task management API with authentication",
|
| 11 |
+
version="1.0.0"
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
# Include routers
|
| 15 |
+
app.include_router(auth.router)
|
| 16 |
+
app.include_router(tasks.router)
|
| 17 |
+
app.include_router(projects.router)
|
| 18 |
+
app.include_router(chat.router)
|
| 19 |
+
app.include_router(audit.router)
|
| 20 |
+
|
| 21 |
+
# Prepare allowed origins from settings.FRONTEND_URL (comma separated)
|
| 22 |
+
_frontend_origins = [o.strip() for o in settings.FRONTEND_URL.split(",")] if settings.FRONTEND_URL else []
|
| 23 |
+
|
| 24 |
+
# CORS configuration (development and production)
|
| 25 |
+
# Include common development URLs and Minikube/K8s service URLs
|
| 26 |
+
allowed_origins = _frontend_origins + [
|
| 27 |
+
# Local development (all common ports)
|
| 28 |
+
"http://localhost:3000", "http://localhost:3001", "http://localhost:8000",
|
| 29 |
+
"http://localhost:38905", "http://localhost:40529", # User's dynamic ports
|
| 30 |
+
"http://127.0.0.1:3000", "http://127.0.0.1:3001", "http://127.0.0.1:8000",
|
| 31 |
+
"http://127.0.0.1:38905", "http://127.0.0.1:40529", # User's dynamic ports
|
| 32 |
+
# Minikube NodePort (replace with your Minikube IP in production)
|
| 33 |
+
"http://192.168.49.2:30080", "http://192.168.49.2:30081",
|
| 34 |
+
"http://192.168.49.2:30147", "http://192.168.49.2:30148",
|
| 35 |
+
# Kubernetes internal service names (for cluster-internal communication)
|
| 36 |
+
"http://todo-chatbot-backend:8000",
|
| 37 |
+
"http://todo-chatbot-frontend:3000", 'https://ai-powered-full-stack-task-manageme.vercel.app', "https://victorious-mushroom-09538ac1e.2.azurestaticapps.net"
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
app.add_middleware(
|
| 41 |
+
CORSMiddleware,
|
| 42 |
+
allow_origins=allowed_origins,
|
| 43 |
+
allow_credentials=True,
|
| 44 |
+
allow_methods=["*"],
|
| 45 |
+
allow_headers=["*"],
|
| 46 |
+
# Expose origin header for debugging if needed
|
| 47 |
+
expose_headers=["Access-Control-Allow-Origin"],
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
@app.get("/api/health")
|
| 51 |
+
async def health_check():
|
| 52 |
+
return {"status": "healthy"}
|
| 53 |
+
|
| 54 |
+
@app.get("/api/health/kafka")
|
| 55 |
+
async def kafka_health_check():
|
| 56 |
+
"""Check Kafka connectivity through Dapr"""
|
| 57 |
+
health_result = await kafka_health_checker.check_kafka_connectivity()
|
| 58 |
+
return health_result
|
| 59 |
+
|
| 60 |
+
if __name__ == "__main__":
|
| 61 |
+
import uvicorn
|
| 62 |
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|
src/mcp_server.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from fastmcp import FastMCP
|
| 3 |
+
from .mcp_tools.task_tools import get_task_tools, execute_add_task, execute_list_tasks, execute_complete_task, execute_delete_task, execute_update_task
|
| 4 |
+
from pydantic import BaseModel
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def create_mcp_server():
|
| 8 |
+
"""Create and configure the MCP server with task tools"""
|
| 9 |
+
# Create the FastMCP server instance
|
| 10 |
+
mcp_server = FastMCP("task-mcp-server")
|
| 11 |
+
|
| 12 |
+
# Add each tool to the server with its handler
|
| 13 |
+
@mcp_server.tool(
|
| 14 |
+
name="add_task",
|
| 15 |
+
description="Create a new task for the user",
|
| 16 |
+
input_schema={
|
| 17 |
+
"type": "object",
|
| 18 |
+
"properties": {
|
| 19 |
+
"user_id": {"type": "string", "description": "User ID to create task for"},
|
| 20 |
+
"title": {"type": "string", "description": "Task title, 1-200 characters"},
|
| 21 |
+
"description": {"type": "string", "description": "Task description, optional, max 1000 chars"}
|
| 22 |
+
},
|
| 23 |
+
"required": ["user_id", "title"]
|
| 24 |
+
}
|
| 25 |
+
)
|
| 26 |
+
async def handle_add_task(user_id: str, title: str, description: str = None):
|
| 27 |
+
from .mcp_tools.task_tools import AddTaskParams
|
| 28 |
+
params = AddTaskParams(user_id=user_id, title=title, description=description)
|
| 29 |
+
result = execute_add_task(params)
|
| 30 |
+
return result.dict()
|
| 31 |
+
|
| 32 |
+
@mcp_server.tool(
|
| 33 |
+
name="list_tasks",
|
| 34 |
+
description="Retrieve user's tasks",
|
| 35 |
+
input_schema={
|
| 36 |
+
"type": "object",
|
| 37 |
+
"properties": {
|
| 38 |
+
"user_id": {"type": "string", "description": "User ID to list tasks for"},
|
| 39 |
+
"status": {"type": "string", "enum": ["all", "pending", "completed"], "default": "all"}
|
| 40 |
+
},
|
| 41 |
+
"required": ["user_id"]
|
| 42 |
+
}
|
| 43 |
+
)
|
| 44 |
+
async def handle_list_tasks(user_id: str, status: str = "all"):
|
| 45 |
+
from .mcp_tools.task_tools import ListTasksParams
|
| 46 |
+
params = ListTasksParams(user_id=user_id, status=status)
|
| 47 |
+
result = execute_list_tasks(params)
|
| 48 |
+
return result.dict()
|
| 49 |
+
|
| 50 |
+
@mcp_server.tool(
|
| 51 |
+
name="complete_task",
|
| 52 |
+
description="Mark a task as complete",
|
| 53 |
+
input_schema={
|
| 54 |
+
"type": "object",
|
| 55 |
+
"properties": {
|
| 56 |
+
"user_id": {"type": "string", "description": "User ID of the task owner"},
|
| 57 |
+
"task_id": {"type": "integer", "description": "ID of the task to update"}
|
| 58 |
+
},
|
| 59 |
+
"required": ["user_id", "task_id"]
|
| 60 |
+
}
|
| 61 |
+
)
|
| 62 |
+
async def handle_complete_task(user_id: str, task_id: int):
|
| 63 |
+
from .mcp_tools.task_tools import CompleteTaskParams
|
| 64 |
+
params = CompleteTaskParams(user_id=user_id, task_id=task_id)
|
| 65 |
+
result = execute_complete_task(params)
|
| 66 |
+
return result.dict()
|
| 67 |
+
|
| 68 |
+
@mcp_server.tool(
|
| 69 |
+
name="delete_task",
|
| 70 |
+
description="Remove a task",
|
| 71 |
+
input_schema={
|
| 72 |
+
"type": "object",
|
| 73 |
+
"properties": {
|
| 74 |
+
"user_id": {"type": "string", "description": "User ID of the task owner"},
|
| 75 |
+
"task_id": {"type": "integer", "description": "ID of the task to delete"}
|
| 76 |
+
},
|
| 77 |
+
"required": ["user_id", "task_id"]
|
| 78 |
+
}
|
| 79 |
+
)
|
| 80 |
+
async def handle_delete_task(user_id: str, task_id: int):
|
| 81 |
+
from .mcp_tools.task_tools import DeleteTaskParams
|
| 82 |
+
params = DeleteTaskParams(user_id=user_id, task_id=task_id)
|
| 83 |
+
result = execute_delete_task(params)
|
| 84 |
+
return result.dict()
|
| 85 |
+
|
| 86 |
+
@mcp_server.tool(
|
| 87 |
+
name="update_task",
|
| 88 |
+
description="Modify task details",
|
| 89 |
+
input_schema={
|
| 90 |
+
"type": "object",
|
| 91 |
+
"properties": {
|
| 92 |
+
"user_id": {"type": "string", "description": "User ID of the task owner"},
|
| 93 |
+
"task_id": {"type": "integer", "description": "ID of the task to update"},
|
| 94 |
+
"title": {"type": "string", "description": "New task title"},
|
| 95 |
+
"description": {"type": "string", "description": "New task description"}
|
| 96 |
+
},
|
| 97 |
+
"required": ["user_id", "task_id"]
|
| 98 |
+
}
|
| 99 |
+
)
|
| 100 |
+
async def handle_update_task(user_id: str, task_id: int, title: str = None, description: str = None):
|
| 101 |
+
from .mcp_tools.task_tools import UpdateTaskParams
|
| 102 |
+
params = UpdateTaskParams(user_id=user_id, task_id=task_id, title=title, description=description)
|
| 103 |
+
result = execute_update_task(params)
|
| 104 |
+
return result.dict()
|
| 105 |
+
|
| 106 |
+
return mcp_server
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# Global MCP server instance - create only when needed
|
| 110 |
+
_mcp_server_instance = None
|
| 111 |
+
|
| 112 |
+
def get_mcp_server_instance():
|
| 113 |
+
global _mcp_server_instance
|
| 114 |
+
if _mcp_server_instance is None:
|
| 115 |
+
_mcp_server_instance = create_mcp_server()
|
| 116 |
+
return _mcp_server_instance
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def get_mcp_tools():
|
| 120 |
+
"""Get the list of MCP tools for registration with the agent"""
|
| 121 |
+
# Return the tool definitions directly rather than accessing server instance
|
| 122 |
+
from .mcp_tools.task_tools import get_task_tools
|
| 123 |
+
return get_task_tools()
|
| 124 |
+
|
| 125 |
+
def get_mcp_tools_for_gemin_api():
|
| 126 |
+
"""Get the list of tools for Gemini API"""
|
| 127 |
+
# Return the tool definitions in Gemini API format
|
| 128 |
+
from .mcp_tools.task_tools import get_task_tools_for_gemin_api
|
| 129 |
+
return get_task_tools_for_gemin_api()
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
# Run the server if this file is executed directly
|
| 133 |
+
if __name__ == "__main__":
|
| 134 |
+
import sys
|
| 135 |
+
if len(sys.argv) > 1 and sys.argv[1] == "--stdio":
|
| 136 |
+
# Run the server using stdio transport
|
| 137 |
+
from fastmcp.stdio import run_stdio_server
|
| 138 |
+
run_stdio_server(get_mcp_server_instance())
|
| 139 |
+
else:
|
| 140 |
+
print("Usage: python mcp_server.py --stdio")
|
src/mcp_tools/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .task_tools import get_task_tools
|
| 2 |
+
|
| 3 |
+
__all__ = ["get_task_tools"]
|
src/mcp_tools/task_tools.py
ADDED
|
@@ -0,0 +1,539 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# No MCP import needed - this file only defines tool parameters and functions
|
| 2 |
+
from pydantic import BaseModel, Field
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
import json
|
| 5 |
+
from sqlmodel import Session, select
|
| 6 |
+
from uuid import UUID
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
|
| 10 |
+
# Add the src directory to the path so we can import our models
|
| 11 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
| 12 |
+
|
| 13 |
+
from sqlmodel import Session, select
|
| 14 |
+
from typing import List, Optional
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
import uuid
|
| 17 |
+
from ..models.task import Task, TaskCreate, TaskUpdate
|
| 18 |
+
from ..models.project import Project, ProjectCreate, ProjectUpdate
|
| 19 |
+
from ..models.user import User
|
| 20 |
+
from ..database import engine
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class AddTaskParams(BaseModel):
|
| 24 |
+
user_id: str = Field(description="User ID to create task for")
|
| 25 |
+
title: str = Field(description="Task title, 1-200 characters")
|
| 26 |
+
description: Optional[str] = Field(default=None, description="Task description, optional, max 1000 chars")
|
| 27 |
+
due_date: Optional[str] = Field(default=None, description="Due date in ISO format (YYYY-MM-DD)")
|
| 28 |
+
project_name: Optional[str] = Field(default=None, description="Optional name of the project to associate this task with")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class AddTaskResult(BaseModel):
|
| 32 |
+
task_id: int
|
| 33 |
+
status: str
|
| 34 |
+
title: str
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class ListTasksParams(BaseModel):
|
| 38 |
+
user_id: str = Field(description="User ID to list tasks for")
|
| 39 |
+
status: Optional[str] = Field(default="all", description="Task status filter: 'all', 'pending', 'completed'")
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class ListTasksResultItem(BaseModel):
|
| 43 |
+
id: int
|
| 44 |
+
title: str
|
| 45 |
+
completed: bool
|
| 46 |
+
created_at: str
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class ListTasksResult(BaseModel):
|
| 50 |
+
tasks: List[ListTasksResultItem]
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class CompleteTaskParams(BaseModel):
|
| 54 |
+
user_id: str = Field(description="User ID of the task owner")
|
| 55 |
+
task_id: int = Field(description="ID of the task to complete")
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class CompleteTaskResult(BaseModel):
|
| 59 |
+
task_id: int
|
| 60 |
+
status: str
|
| 61 |
+
title: str
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class DeleteTaskParams(BaseModel):
|
| 65 |
+
user_id: str = Field(description="User ID of the task owner")
|
| 66 |
+
task_id: int = Field(description="ID of the task to delete")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class DeleteTaskResult(BaseModel):
|
| 70 |
+
task_id: int
|
| 71 |
+
status: str
|
| 72 |
+
title: str
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class UpdateTaskParams(BaseModel):
|
| 76 |
+
user_id: str = Field(description="User ID of the task owner")
|
| 77 |
+
task_id: int = Field(description="ID of the task to update")
|
| 78 |
+
title: Optional[str] = Field(default=None, description="New task title")
|
| 79 |
+
description: Optional[str] = Field(default=None, description="New task description")
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class UpdateTaskResult(BaseModel):
|
| 83 |
+
task_id: int
|
| 84 |
+
status: str
|
| 85 |
+
title: str
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class CreateProjectParams(BaseModel):
|
| 89 |
+
user_id: str = Field(description="User ID to create project for")
|
| 90 |
+
name: str = Field(description="Project name")
|
| 91 |
+
description: Optional[str] = Field(default=None, description="Project description")
|
| 92 |
+
color: Optional[str] = Field(default="#3b82f6", description="Hex color code")
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class CreateProjectResult(BaseModel):
|
| 96 |
+
project_id: str
|
| 97 |
+
status: str
|
| 98 |
+
name: str
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class ListProjectsParams(BaseModel):
|
| 102 |
+
user_id: str = Field(description="User ID to list projects for")
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class ProjectResultItem(BaseModel):
|
| 106 |
+
id: str
|
| 107 |
+
name: str
|
| 108 |
+
description: Optional[str]
|
| 109 |
+
color: Optional[str]
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class ListProjectsResult(BaseModel):
|
| 113 |
+
projects: List[ProjectResultItem]
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class GetCalendarParams(BaseModel):
|
| 117 |
+
user_id: str = Field(description="User ID")
|
| 118 |
+
start_date: str = Field(description="Start date ISO string")
|
| 119 |
+
end_date: str = Field(description="End date ISO string")
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class CalendarItem(BaseModel):
|
| 123 |
+
id: int
|
| 124 |
+
title: str
|
| 125 |
+
due_date: str
|
| 126 |
+
completed: bool
|
| 127 |
+
project_name: Optional[str]
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
class GetCalendarResult(BaseModel):
|
| 131 |
+
items: List[CalendarItem]
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def get_task_tools_for_gemin_api():
|
| 135 |
+
"""Returns a list of all task-related tools in Gemini API format, hiding user_id from AI"""
|
| 136 |
+
def get_schema_without_user_id(model):
|
| 137 |
+
schema = model.model_json_schema()
|
| 138 |
+
if "properties" in schema and "user_id" in schema["properties"]:
|
| 139 |
+
del schema["properties"]["user_id"]
|
| 140 |
+
if "required" in schema and "user_id" in schema["required"]:
|
| 141 |
+
schema["required"].remove("user_id")
|
| 142 |
+
return schema
|
| 143 |
+
|
| 144 |
+
tools = [
|
| 145 |
+
{
|
| 146 |
+
"type": "function",
|
| 147 |
+
"function": {
|
| 148 |
+
"name": "add_task",
|
| 149 |
+
"description": "Create a new task for the user. Do not ask for user_id.",
|
| 150 |
+
"parameters": get_schema_without_user_id(AddTaskParams)
|
| 151 |
+
}
|
| 152 |
+
},
|
| 153 |
+
{
|
| 154 |
+
"type": "function",
|
| 155 |
+
"function": {
|
| 156 |
+
"name": "list_tasks",
|
| 157 |
+
"description": "Retrieve user's tasks. Do not ask for user_id.",
|
| 158 |
+
"parameters": get_schema_without_user_id(ListTasksParams)
|
| 159 |
+
}
|
| 160 |
+
},
|
| 161 |
+
{
|
| 162 |
+
"type": "function",
|
| 163 |
+
"function": {
|
| 164 |
+
"name": "complete_task",
|
| 165 |
+
"description": "Mark a task as complete. Do not ask for user_id.",
|
| 166 |
+
"parameters": get_schema_without_user_id(CompleteTaskParams)
|
| 167 |
+
}
|
| 168 |
+
},
|
| 169 |
+
{
|
| 170 |
+
"type": "function",
|
| 171 |
+
"function": {
|
| 172 |
+
"name": "delete_task",
|
| 173 |
+
"description": "Remove a task. Do not ask for user_id.",
|
| 174 |
+
"parameters": get_schema_without_user_id(DeleteTaskParams)
|
| 175 |
+
}
|
| 176 |
+
},
|
| 177 |
+
{
|
| 178 |
+
"type": "function",
|
| 179 |
+
"function": {
|
| 180 |
+
"name": "update_task",
|
| 181 |
+
"description": "Modify task details. Do not ask for user_id.",
|
| 182 |
+
"parameters": get_schema_without_user_id(UpdateTaskParams)
|
| 183 |
+
}
|
| 184 |
+
},
|
| 185 |
+
{
|
| 186 |
+
"type": "function",
|
| 187 |
+
"function": {
|
| 188 |
+
"name": "create_project",
|
| 189 |
+
"description": "Create a new project. Projects can hold multiple tasks. Do not ask for user_id.",
|
| 190 |
+
"parameters": get_schema_without_user_id(CreateProjectParams)
|
| 191 |
+
}
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"type": "function",
|
| 195 |
+
"function": {
|
| 196 |
+
"name": "list_projects",
|
| 197 |
+
"description": "List all projects for the user. Do not ask for user_id.",
|
| 198 |
+
"parameters": get_schema_without_user_id(ListProjectsParams)
|
| 199 |
+
}
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"type": "function",
|
| 203 |
+
"function": {
|
| 204 |
+
"name": "get_calendar",
|
| 205 |
+
"description": "Get tasks and events for a specific date range (Calendar view). Do not ask for user_id.",
|
| 206 |
+
"parameters": get_schema_without_user_id(GetCalendarParams)
|
| 207 |
+
}
|
| 208 |
+
}
|
| 209 |
+
]
|
| 210 |
+
return tools
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def get_task_tools():
|
| 214 |
+
"""Returns a list of all task-related MCP tools (for MCP usage)"""
|
| 215 |
+
tools = [
|
| 216 |
+
{
|
| 217 |
+
"name": "add_task",
|
| 218 |
+
"description": "Create a new task for the user",
|
| 219 |
+
"input_schema": AddTaskParams.model_json_schema()
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"name": "list_tasks",
|
| 223 |
+
"description": "Retrieve user's tasks",
|
| 224 |
+
"input_schema": ListTasksParams.model_json_schema()
|
| 225 |
+
},
|
| 226 |
+
{
|
| 227 |
+
"name": "complete_task",
|
| 228 |
+
"description": "Mark a task as complete",
|
| 229 |
+
"input_schema": CompleteTaskParams.model_json_schema()
|
| 230 |
+
},
|
| 231 |
+
{
|
| 232 |
+
"name": "delete_task",
|
| 233 |
+
"description": "Remove a task",
|
| 234 |
+
"input_schema": DeleteTaskParams.model_json_schema()
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
"name": "update_task",
|
| 238 |
+
"description": "Modify task details",
|
| 239 |
+
"input_schema": UpdateTaskParams.model_json_schema()
|
| 240 |
+
}
|
| 241 |
+
]
|
| 242 |
+
return tools
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def execute_add_task(params: AddTaskParams) -> AddTaskResult:
|
| 246 |
+
"""Execute the add_task tool"""
|
| 247 |
+
try:
|
| 248 |
+
# Validate user_id format
|
| 249 |
+
try:
|
| 250 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 251 |
+
except ValueError:
|
| 252 |
+
raise ValueError(f"Invalid user_id format: {params.user_id}")
|
| 253 |
+
|
| 254 |
+
# Create a database session
|
| 255 |
+
with Session(engine) as db_session:
|
| 256 |
+
# Verify user exists
|
| 257 |
+
user_exists = db_session.exec(
|
| 258 |
+
select(User).where(User.id == user_uuid)
|
| 259 |
+
).first()
|
| 260 |
+
|
| 261 |
+
if not user_exists:
|
| 262 |
+
raise ValueError(f"User with id {params.user_id} not found")
|
| 263 |
+
|
| 264 |
+
# Parse due date if provided
|
| 265 |
+
due_date_dt = None
|
| 266 |
+
if params.due_date:
|
| 267 |
+
try:
|
| 268 |
+
due_date_dt = datetime.fromisoformat(params.due_date.replace('Z', '+00:00'))
|
| 269 |
+
except ValueError:
|
| 270 |
+
# Try simple YYYY-MM-DD
|
| 271 |
+
try:
|
| 272 |
+
due_date_dt = datetime.strptime(params.due_date, "%Y-%m-%d")
|
| 273 |
+
except ValueError:
|
| 274 |
+
pass
|
| 275 |
+
|
| 276 |
+
# Handle project association
|
| 277 |
+
project_id = None
|
| 278 |
+
if params.project_name:
|
| 279 |
+
project = db_session.exec(
|
| 280 |
+
select(Project).where(
|
| 281 |
+
Project.name == params.project_name,
|
| 282 |
+
Project.user_id == user_uuid
|
| 283 |
+
)
|
| 284 |
+
).first()
|
| 285 |
+
if project:
|
| 286 |
+
project_id = project.id
|
| 287 |
+
|
| 288 |
+
# Create the task
|
| 289 |
+
task = Task(
|
| 290 |
+
title=params.title,
|
| 291 |
+
description=params.description,
|
| 292 |
+
due_date=due_date_dt,
|
| 293 |
+
user_id=user_uuid,
|
| 294 |
+
project_id=project_id
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
db_session.add(task)
|
| 298 |
+
db_session.commit()
|
| 299 |
+
db_session.refresh(task)
|
| 300 |
+
|
| 301 |
+
return AddTaskResult(
|
| 302 |
+
task_id=task.id,
|
| 303 |
+
status="created",
|
| 304 |
+
title=task.title
|
| 305 |
+
)
|
| 306 |
+
except Exception as e:
|
| 307 |
+
raise e
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def execute_list_tasks(params: ListTasksParams) -> ListTasksResult:
|
| 311 |
+
"""Execute the list_tasks tool"""
|
| 312 |
+
try:
|
| 313 |
+
# Validate user_id format
|
| 314 |
+
try:
|
| 315 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 316 |
+
except ValueError:
|
| 317 |
+
raise ValueError(f"Invalid user_id format: {params.user_id}")
|
| 318 |
+
|
| 319 |
+
# Create a database session
|
| 320 |
+
with Session(engine) as db_session:
|
| 321 |
+
# Build query based on status filter
|
| 322 |
+
query = select(Task).where(Task.user_id == user_uuid)
|
| 323 |
+
|
| 324 |
+
if params.status and params.status.lower() == "completed":
|
| 325 |
+
query = query.where(Task.completed == True)
|
| 326 |
+
elif params.status and params.status.lower() == "pending":
|
| 327 |
+
query = query.where(Task.completed == False)
|
| 328 |
+
|
| 329 |
+
# Execute query
|
| 330 |
+
tasks = db_session.exec(query).all()
|
| 331 |
+
|
| 332 |
+
# Convert to result format
|
| 333 |
+
task_items = [
|
| 334 |
+
ListTasksResultItem(
|
| 335 |
+
id=task.id,
|
| 336 |
+
title=task.title,
|
| 337 |
+
completed=task.completed,
|
| 338 |
+
created_at=task.created_at.isoformat() if task.created_at else ""
|
| 339 |
+
)
|
| 340 |
+
for task in tasks
|
| 341 |
+
]
|
| 342 |
+
|
| 343 |
+
return ListTasksResult(tasks=task_items)
|
| 344 |
+
except Exception as e:
|
| 345 |
+
raise e
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def execute_complete_task(params: CompleteTaskParams) -> CompleteTaskResult:
|
| 349 |
+
"""Execute the complete_task tool"""
|
| 350 |
+
try:
|
| 351 |
+
# Validate user_id format
|
| 352 |
+
try:
|
| 353 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 354 |
+
except ValueError:
|
| 355 |
+
raise ValueError(f"Invalid user_id format: {params.user_id}")
|
| 356 |
+
|
| 357 |
+
# Create a database session
|
| 358 |
+
with Session(engine) as db_session:
|
| 359 |
+
# Find the task and verify it belongs to the user
|
| 360 |
+
task = db_session.exec(
|
| 361 |
+
select(Task).where(
|
| 362 |
+
Task.id == params.task_id,
|
| 363 |
+
Task.user_id == user_uuid
|
| 364 |
+
)
|
| 365 |
+
).first()
|
| 366 |
+
|
| 367 |
+
if not task:
|
| 368 |
+
raise ValueError(f"Task with id {params.task_id} not found for user {params.user_id}")
|
| 369 |
+
|
| 370 |
+
# Update task as completed
|
| 371 |
+
task.completed = True
|
| 372 |
+
db_session.add(task)
|
| 373 |
+
db_session.commit()
|
| 374 |
+
db_session.refresh(task)
|
| 375 |
+
|
| 376 |
+
return CompleteTaskResult(
|
| 377 |
+
task_id=task.id,
|
| 378 |
+
status="completed",
|
| 379 |
+
title=task.title
|
| 380 |
+
)
|
| 381 |
+
except Exception as e:
|
| 382 |
+
raise e
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def execute_delete_task(params: DeleteTaskParams) -> DeleteTaskResult:
|
| 386 |
+
"""Execute the delete_task tool"""
|
| 387 |
+
try:
|
| 388 |
+
# Validate user_id format
|
| 389 |
+
try:
|
| 390 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 391 |
+
except ValueError:
|
| 392 |
+
raise ValueError(f"Invalid user_id format: {params.user_id}")
|
| 393 |
+
|
| 394 |
+
# Create a database session
|
| 395 |
+
with Session(engine) as db_session:
|
| 396 |
+
# Find the task and verify it belongs to the user
|
| 397 |
+
task = db_session.exec(
|
| 398 |
+
select(Task).where(
|
| 399 |
+
Task.id == params.task_id,
|
| 400 |
+
Task.user_id == user_uuid
|
| 401 |
+
)
|
| 402 |
+
).first()
|
| 403 |
+
|
| 404 |
+
if not task:
|
| 405 |
+
raise ValueError(f"Task with id {params.task_id} not found for user {params.user_id}")
|
| 406 |
+
|
| 407 |
+
# Delete the task
|
| 408 |
+
db_session.delete(task)
|
| 409 |
+
db_session.commit()
|
| 410 |
+
|
| 411 |
+
return DeleteTaskResult(
|
| 412 |
+
task_id=task.id,
|
| 413 |
+
status="deleted",
|
| 414 |
+
title=task.title
|
| 415 |
+
)
|
| 416 |
+
except Exception as e:
|
| 417 |
+
raise e
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def execute_update_task(params: UpdateTaskParams) -> UpdateTaskResult:
|
| 421 |
+
"""Execute the update_task tool"""
|
| 422 |
+
try:
|
| 423 |
+
# Validate user_id format
|
| 424 |
+
try:
|
| 425 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 426 |
+
except ValueError:
|
| 427 |
+
raise ValueError(f"Invalid user_id format: {params.user_id}")
|
| 428 |
+
|
| 429 |
+
# Create a database session
|
| 430 |
+
with Session(engine) as db_session:
|
| 431 |
+
# Find the task and verify it belongs to the user
|
| 432 |
+
task = db_session.exec(
|
| 433 |
+
select(Task).where(
|
| 434 |
+
Task.id == params.task_id,
|
| 435 |
+
Task.user_id == user_uuid
|
| 436 |
+
)
|
| 437 |
+
).first()
|
| 438 |
+
|
| 439 |
+
if not task:
|
| 440 |
+
raise ValueError(f"Task with id {params.task_id} not found for user {params.user_id}")
|
| 441 |
+
|
| 442 |
+
# Update the task with provided parameters
|
| 443 |
+
if params.title is not None:
|
| 444 |
+
task.title = params.title
|
| 445 |
+
if params.description is not None:
|
| 446 |
+
task.description = params.description
|
| 447 |
+
|
| 448 |
+
db_session.add(task)
|
| 449 |
+
db_session.commit()
|
| 450 |
+
db_session.refresh(task)
|
| 451 |
+
|
| 452 |
+
return UpdateTaskResult(
|
| 453 |
+
task_id=task.id,
|
| 454 |
+
status="updated",
|
| 455 |
+
title=task.title
|
| 456 |
+
)
|
| 457 |
+
except Exception as e:
|
| 458 |
+
raise e
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def execute_create_project(params: CreateProjectParams) -> CreateProjectResult:
|
| 462 |
+
"""Execute the create_project tool"""
|
| 463 |
+
try:
|
| 464 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 465 |
+
with Session(engine) as db_session:
|
| 466 |
+
project = Project(
|
| 467 |
+
name=params.name,
|
| 468 |
+
description=params.description,
|
| 469 |
+
color=params.color,
|
| 470 |
+
user_id=user_uuid
|
| 471 |
+
)
|
| 472 |
+
db_session.add(project)
|
| 473 |
+
db_session.commit()
|
| 474 |
+
db_session.refresh(project)
|
| 475 |
+
return CreateProjectResult(
|
| 476 |
+
project_id=str(project.id),
|
| 477 |
+
status="created",
|
| 478 |
+
name=project.name
|
| 479 |
+
)
|
| 480 |
+
except Exception as e:
|
| 481 |
+
raise e
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
def execute_list_projects(params: ListProjectsParams) -> ListProjectsResult:
|
| 485 |
+
"""Execute the list_projects tool"""
|
| 486 |
+
try:
|
| 487 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 488 |
+
with Session(engine) as db_session:
|
| 489 |
+
projects = db_session.exec(
|
| 490 |
+
select(Project).where(Project.user_id == user_uuid)
|
| 491 |
+
).all()
|
| 492 |
+
|
| 493 |
+
project_items = [
|
| 494 |
+
ProjectResultItem(
|
| 495 |
+
id=str(p.id),
|
| 496 |
+
name=p.name,
|
| 497 |
+
description=p.description,
|
| 498 |
+
color=p.color
|
| 499 |
+
) for p in projects
|
| 500 |
+
]
|
| 501 |
+
return ListProjectsResult(projects=project_items)
|
| 502 |
+
except Exception as e:
|
| 503 |
+
raise e
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def execute_get_calendar(params: GetCalendarParams) -> GetCalendarResult:
|
| 507 |
+
"""Execute the get_calendar tool"""
|
| 508 |
+
try:
|
| 509 |
+
user_uuid = uuid.UUID(params.user_id)
|
| 510 |
+
start_dt = datetime.fromisoformat(params.start_date.replace('Z', '+00:00'))
|
| 511 |
+
end_dt = datetime.fromisoformat(params.end_date.replace('Z', '+00:00'))
|
| 512 |
+
|
| 513 |
+
with Session(engine) as db_session:
|
| 514 |
+
query = select(Task).where(
|
| 515 |
+
Task.user_id == user_uuid,
|
| 516 |
+
Task.due_date >= start_dt,
|
| 517 |
+
Task.due_date <= end_dt
|
| 518 |
+
)
|
| 519 |
+
tasks = db_session.exec(query).all()
|
| 520 |
+
|
| 521 |
+
items = []
|
| 522 |
+
for task in tasks:
|
| 523 |
+
p_name = None
|
| 524 |
+
if task.project_id:
|
| 525 |
+
p = db_session.exec(select(Project).where(Project.id == task.project_id)).first()
|
| 526 |
+
if p:
|
| 527 |
+
p_name = p.name
|
| 528 |
+
|
| 529 |
+
items.append(CalendarItem(
|
| 530 |
+
id=task.id,
|
| 531 |
+
title=task.title,
|
| 532 |
+
due_date=task.due_date.isoformat() if task.due_date else "",
|
| 533 |
+
completed=task.completed,
|
| 534 |
+
project_name=p_name
|
| 535 |
+
))
|
| 536 |
+
|
| 537 |
+
return GetCalendarResult(items=items)
|
| 538 |
+
except Exception as e:
|
| 539 |
+
raise e
|
src/middleware/auth.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import HTTPException, status
|
| 2 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from sqlmodel import Session
|
| 5 |
+
import uuid
|
| 6 |
+
|
| 7 |
+
from ..models.user import User
|
| 8 |
+
from ..utils.security import verify_user_id_from_token
|
| 9 |
+
from ..database import get_session_dep
|
| 10 |
+
from fastapi import Depends
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# Security scheme for JWT
|
| 14 |
+
security = HTTPBearer()
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
async def verify_jwt_token(
|
| 18 |
+
credentials: HTTPAuthorizationCredentials = Depends(security),
|
| 19 |
+
session: Session = Depends(get_session_dep)
|
| 20 |
+
):
|
| 21 |
+
"""Verify JWT token and return user_id if valid."""
|
| 22 |
+
token = credentials.credentials
|
| 23 |
+
user_id = verify_user_id_from_token(token)
|
| 24 |
+
|
| 25 |
+
if not user_id:
|
| 26 |
+
raise HTTPException(
|
| 27 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 28 |
+
detail="Invalid token or expired token.",
|
| 29 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# Get user from database to ensure they still exist
|
| 33 |
+
user = session.get(User, user_id)
|
| 34 |
+
if not user:
|
| 35 |
+
raise HTTPException(
|
| 36 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 37 |
+
detail="User no longer exists.",
|
| 38 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
return user_id
|
src/models/__init__.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .user import User, UserCreate, UserRead
|
| 2 |
+
from .task import Task, TaskCreate, TaskRead, TaskUpdate
|
| 3 |
+
from .project import Project, ProjectCreate, ProjectRead, ProjectUpdate
|
| 4 |
+
from .conversation import Conversation, ConversationCreate, ConversationRead, ConversationUpdate
|
| 5 |
+
from .message import Message, MessageCreate, MessageRead, MessageUpdate
|
| 6 |
+
from .audit_log import AuditLog, AuditLogCreate, AuditLogRead
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"User",
|
| 10 |
+
"UserCreate",
|
| 11 |
+
"UserRead",
|
| 12 |
+
"Task",
|
| 13 |
+
"TaskCreate",
|
| 14 |
+
"TaskRead",
|
| 15 |
+
"TaskUpdate",
|
| 16 |
+
"Project",
|
| 17 |
+
"ProjectCreate",
|
| 18 |
+
"ProjectRead",
|
| 19 |
+
"ProjectUpdate",
|
| 20 |
+
"Conversation",
|
| 21 |
+
"ConversationCreate",
|
| 22 |
+
"ConversationRead",
|
| 23 |
+
"ConversationUpdate",
|
| 24 |
+
"Message",
|
| 25 |
+
"MessageCreate",
|
| 26 |
+
"MessageRead",
|
| 27 |
+
"MessageUpdate",
|
| 28 |
+
"AuditLog",
|
| 29 |
+
"AuditLogCreate",
|
| 30 |
+
"AuditLogRead",
|
| 31 |
+
]
|
src/models/audit_log.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import SQLModel, Field
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from sqlalchemy import Column, DateTime, JSON
|
| 5 |
+
import uuid
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class AuditLogBase(SQLModel):
|
| 9 |
+
event_id: str = Field(index=True) # UUID for deduplication
|
| 10 |
+
event_type: str = Field(max_length=50) # created|updated|completed|deleted
|
| 11 |
+
user_id: str # String user identifier
|
| 12 |
+
task_id: int # Reference to the affected task
|
| 13 |
+
event_data: dict = Field(sa_column=Column(JSON)) # JSONB field for event data
|
| 14 |
+
timestamp: datetime = Field(sa_column=Column(DateTime(timezone=True)), default=datetime.utcnow)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class AuditLog(AuditLogBase, table=True):
|
| 18 |
+
"""
|
| 19 |
+
Persistent record of all task events for a user.
|
| 20 |
+
Contains id, event_id, event_type, user_id, task_id, event_data (JSONB), and timestamp.
|
| 21 |
+
"""
|
| 22 |
+
id: Optional[int] = Field(default=None, primary_key=True)
|
| 23 |
+
event_id: str = Field(index=True, unique=True) # Unique constraint for deduplication
|
| 24 |
+
event_type: str = Field(max_length=50) # created|updated|completed|deleted
|
| 25 |
+
user_id: str # String user identifier
|
| 26 |
+
task_id: int # Reference to the affected task
|
| 27 |
+
event_data: dict = Field(sa_column=Column(JSON)) # JSONB field for event data
|
| 28 |
+
timestamp: datetime = Field(sa_column=Column(DateTime(timezone=True)), default=datetime.utcnow)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class AuditLogCreate(AuditLogBase):
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class AuditLogRead(AuditLogBase):
|
| 36 |
+
id: int
|
| 37 |
+
timestamp: datetime
|
| 38 |
+
|
| 39 |
+
class Config:
|
| 40 |
+
from_attributes = True
|
src/models/conversation.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import SQLModel, Field, Relationship
|
| 2 |
+
from typing import Optional, List
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from sqlalchemy import Column, DateTime
|
| 5 |
+
import uuid
|
| 6 |
+
from .user import User # Import User model for relationship
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class ConversationBase(SQLModel):
|
| 10 |
+
user_id: uuid.UUID = Field(foreign_key="user.id", index=True)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Conversation(ConversationBase, table=True):
|
| 14 |
+
id: Optional[int] = Field(default=None, primary_key=True)
|
| 15 |
+
user_id: uuid.UUID = Field(foreign_key="user.id", index=True)
|
| 16 |
+
created_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
|
| 17 |
+
updated_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow))
|
| 18 |
+
|
| 19 |
+
# Relationship to user
|
| 20 |
+
owner: Optional["User"] = Relationship(back_populates="conversations")
|
| 21 |
+
|
| 22 |
+
# Relationship to messages
|
| 23 |
+
messages: List["Message"] = Relationship(back_populates="conversation")
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class ConversationCreate(ConversationBase):
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ConversationRead(ConversationBase):
|
| 31 |
+
id: int
|
| 32 |
+
created_at: datetime
|
| 33 |
+
updated_at: datetime
|
| 34 |
+
|
| 35 |
+
class Config:
|
| 36 |
+
from_attributes = True
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ConversationUpdate(SQLModel):
|
| 40 |
+
pass
|
src/models/message.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import SQLModel, Field, Relationship
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from sqlalchemy import Column, DateTime, Enum as SAEnum
|
| 5 |
+
import uuid
|
| 6 |
+
from .conversation import Conversation # Import Conversation model for relationship
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class MessageBase(SQLModel):
|
| 10 |
+
conversation_id: int = Field(foreign_key="conversation.id", index=True)
|
| 11 |
+
user_id: uuid.UUID
|
| 12 |
+
role: str = Field(sa_column=Column("role", SAEnum("user", "assistant", name="message_role")))
|
| 13 |
+
content: str
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Message(MessageBase, table=True):
|
| 17 |
+
id: Optional[int] = Field(default=None, primary_key=True)
|
| 18 |
+
conversation_id: int = Field(foreign_key="conversation.id", index=True)
|
| 19 |
+
user_id: uuid.UUID
|
| 20 |
+
role: str = Field(sa_column=Column("role", SAEnum("user", "assistant", name="message_role")))
|
| 21 |
+
content: str
|
| 22 |
+
created_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
|
| 23 |
+
|
| 24 |
+
# Relationship to conversation
|
| 25 |
+
conversation: Optional["Conversation"] = Relationship(back_populates="messages")
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class MessageCreate(MessageBase):
|
| 29 |
+
pass
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class MessageRead(MessageBase):
|
| 33 |
+
id: int
|
| 34 |
+
created_at: datetime
|
| 35 |
+
|
| 36 |
+
class Config:
|
| 37 |
+
from_attributes = True
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class MessageUpdate(SQLModel):
|
| 41 |
+
content: Optional[str] = None
|
src/models/project.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import SQLModel, Field, Relationship
|
| 2 |
+
from typing import Optional, List
|
| 3 |
+
import uuid
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from sqlalchemy import Column, DateTime
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class ProjectBase(SQLModel):
|
| 9 |
+
name: str = Field(min_length=1, max_length=200)
|
| 10 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 11 |
+
color: Optional[str] = Field(default="#3b82f6", max_length=7) # Hex color code
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Project(ProjectBase, table=True):
|
| 15 |
+
id: Optional[uuid.UUID] = Field(default_factory=uuid.uuid4, primary_key=True)
|
| 16 |
+
user_id: uuid.UUID = Field(foreign_key="user.id", index=True)
|
| 17 |
+
name: str = Field(min_length=1, max_length=200)
|
| 18 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 19 |
+
color: Optional[str] = Field(default="#3b82f6", max_length=7)
|
| 20 |
+
created_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
|
| 21 |
+
updated_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow))
|
| 22 |
+
deadline: Optional[datetime] = None
|
| 23 |
+
|
| 24 |
+
# Relationship to user
|
| 25 |
+
owner: Optional["User"] = Relationship(back_populates="projects")
|
| 26 |
+
|
| 27 |
+
# Relationship to tasks
|
| 28 |
+
tasks: List["Task"] = Relationship(back_populates="project")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ProjectCreate(ProjectBase):
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class ProjectRead(ProjectBase):
|
| 36 |
+
id: uuid.UUID
|
| 37 |
+
user_id: uuid.UUID
|
| 38 |
+
created_at: datetime
|
| 39 |
+
updated_at: datetime
|
| 40 |
+
|
| 41 |
+
class Config:
|
| 42 |
+
from_attributes = True
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class ProjectUpdate(SQLModel):
|
| 46 |
+
name: Optional[str] = Field(default=None, min_length=1, max_length=200)
|
| 47 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 48 |
+
color: Optional[str] = Field(default=None, max_length=7)
|
| 49 |
+
deadline: Optional[datetime] = None
|
src/models/task.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import SQLModel, Field, Relationship
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
import uuid
|
| 5 |
+
from sqlalchemy import Column, DateTime
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class TaskBase(SQLModel):
|
| 9 |
+
title: str = Field(min_length=1, max_length=200)
|
| 10 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 11 |
+
completed: bool = Field(default=False)
|
| 12 |
+
due_date: Optional[datetime] = None
|
| 13 |
+
is_ai_generated: bool = Field(default=False)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class Task(TaskBase, table=True):
|
| 17 |
+
id: Optional[int] = Field(default=None, primary_key=True)
|
| 18 |
+
user_id: uuid.UUID = Field(foreign_key="user.id", index=True)
|
| 19 |
+
project_id: Optional[uuid.UUID] = Field(default=None, foreign_key="project.id", index=True)
|
| 20 |
+
title: str = Field(min_length=1, max_length=200)
|
| 21 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 22 |
+
completed: bool = Field(default=False)
|
| 23 |
+
due_date: Optional[datetime] = None
|
| 24 |
+
created_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
|
| 25 |
+
updated_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow))
|
| 26 |
+
|
| 27 |
+
# Relationship to user
|
| 28 |
+
owner: Optional["User"] = Relationship(back_populates="tasks")
|
| 29 |
+
|
| 30 |
+
# Relationship to project
|
| 31 |
+
project: Optional["Project"] = Relationship(back_populates="tasks")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class TaskCreate(TaskBase):
|
| 35 |
+
project_id: Optional[uuid.UUID] = None
|
| 36 |
+
is_ai_generated: bool = False
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class TaskRead(TaskBase):
|
| 40 |
+
id: int
|
| 41 |
+
user_id: uuid.UUID
|
| 42 |
+
project_id: Optional[uuid.UUID] = None
|
| 43 |
+
created_at: datetime
|
| 44 |
+
updated_at: datetime
|
| 45 |
+
is_ai_generated: bool = False
|
| 46 |
+
|
| 47 |
+
class Config:
|
| 48 |
+
from_attributes = True
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class TaskUpdate(SQLModel):
|
| 52 |
+
title: Optional[str] = Field(default=None, min_length=1, max_length=200)
|
| 53 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 54 |
+
completed: Optional[bool] = None
|
| 55 |
+
project_id: Optional[uuid.UUID] = None
|
| 56 |
+
due_date: Optional[datetime] = None
|
| 57 |
+
is_ai_generated: Optional[bool] = None
|
src/models/user.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlmodel import SQLModel, Field, Relationship
|
| 2 |
+
from typing import Optional, List
|
| 3 |
+
import uuid
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from sqlalchemy import Column, DateTime
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class UserBase(SQLModel):
|
| 9 |
+
email: str = Field(unique=True, index=True, max_length=255)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class User(UserBase, table=True):
|
| 13 |
+
id: Optional[uuid.UUID] = Field(default_factory=uuid.uuid4, primary_key=True)
|
| 14 |
+
email: str = Field(unique=True, index=True, max_length=255)
|
| 15 |
+
password_hash: str = Field(max_length=255)
|
| 16 |
+
created_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
|
| 17 |
+
updated_at: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow))
|
| 18 |
+
|
| 19 |
+
# Relationship to tasks
|
| 20 |
+
tasks: List["Task"] = Relationship(back_populates="owner")
|
| 21 |
+
|
| 22 |
+
# Relationship to projects
|
| 23 |
+
projects: List["Project"] = Relationship(back_populates="owner")
|
| 24 |
+
|
| 25 |
+
# Relationship to conversations
|
| 26 |
+
conversations: List["Conversation"] = Relationship(back_populates="owner")
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class UserCreate(UserBase):
|
| 30 |
+
password: str
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class UserRead(UserBase):
|
| 34 |
+
id: uuid.UUID
|
| 35 |
+
created_at: datetime
|
| 36 |
+
updated_at: datetime
|
src/routers/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from . import auth, tasks, projects, chat, audit
|
| 2 |
+
|
| 3 |
+
__all__ = ["auth", "tasks", "projects", "chat", "audit"]
|
src/routers/audit.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, Depends, status, Body
|
| 2 |
+
from sqlmodel import Session, select
|
| 3 |
+
from typing import List, Dict, Any
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
import logging
|
| 6 |
+
|
| 7 |
+
from ..models.audit_log import AuditLog, AuditLogCreate
|
| 8 |
+
from ..models.user import User
|
| 9 |
+
from ..database import get_session_dep
|
| 10 |
+
from ..utils.deps import get_current_user
|
| 11 |
+
|
| 12 |
+
# Configure logging
|
| 13 |
+
logging.basicConfig(level=logging.INFO)
|
| 14 |
+
logger = logging.getLogger(__name__)
|
| 15 |
+
|
| 16 |
+
router = APIRouter(prefix="/api/audit", tags=["audit"])
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@router.post("/events")
|
| 20 |
+
async def receive_audit_event(
|
| 21 |
+
event: Dict[str, Any] = Body(...),
|
| 22 |
+
session: Session = Depends(get_session_dep)
|
| 23 |
+
):
|
| 24 |
+
"""
|
| 25 |
+
Receives audit events from Dapr Pub/Sub (Kafka) and saves them to the database.
|
| 26 |
+
This endpoint is called by the Dapr sidecar when events are published.
|
| 27 |
+
"""
|
| 28 |
+
try:
|
| 29 |
+
logger.info(f"Received audit event: {event}")
|
| 30 |
+
|
| 31 |
+
# Extract event data
|
| 32 |
+
event_id = event.get("event_id")
|
| 33 |
+
event_type = event.get("event_type")
|
| 34 |
+
user_id = event.get("user_id")
|
| 35 |
+
task_id = event.get("task_id")
|
| 36 |
+
task_data = event.get("task_data", {})
|
| 37 |
+
timestamp = event.get("timestamp")
|
| 38 |
+
|
| 39 |
+
# Validate required fields
|
| 40 |
+
if not all([event_id, event_type, user_id, task_id]):
|
| 41 |
+
logger.warning(f"Missing required fields in event: {event}")
|
| 42 |
+
return {"status": "error", "message": "Missing required fields"}
|
| 43 |
+
|
| 44 |
+
# Check if event already exists (deduplication)
|
| 45 |
+
existing = session.exec(
|
| 46 |
+
select(AuditLog).where(AuditLog.event_id == event_id)
|
| 47 |
+
).first()
|
| 48 |
+
|
| 49 |
+
if existing:
|
| 50 |
+
logger.info(f"Event {event_id} already exists, skipping")
|
| 51 |
+
return {"status": "skipped", "message": "Event already exists"}
|
| 52 |
+
|
| 53 |
+
# Create audit log entry
|
| 54 |
+
audit_log = AuditLog(
|
| 55 |
+
event_id=event_id,
|
| 56 |
+
event_type=event_type,
|
| 57 |
+
user_id=user_id,
|
| 58 |
+
task_id=task_id,
|
| 59 |
+
event_data={
|
| 60 |
+
"title": task_data.get("title", ""),
|
| 61 |
+
"description": task_data.get("description", ""),
|
| 62 |
+
"completed": task_data.get("completed", False)
|
| 63 |
+
}
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
session.add(audit_log)
|
| 67 |
+
session.commit()
|
| 68 |
+
session.refresh(audit_log)
|
| 69 |
+
|
| 70 |
+
logger.info(f"Audit event {event_id} saved successfully")
|
| 71 |
+
return {"status": "success", "message": "Event saved", "id": audit_log.id}
|
| 72 |
+
|
| 73 |
+
except Exception as e:
|
| 74 |
+
logger.error(f"Error saving audit event: {e}", exc_info=True)
|
| 75 |
+
session.rollback()
|
| 76 |
+
return {"status": "error", "message": str(e)}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@router.get("/events/{user_id}", response_model=dict)
|
| 80 |
+
async def get_user_audit_events(
|
| 81 |
+
user_id: UUID,
|
| 82 |
+
current_user: User = Depends(get_current_user),
|
| 83 |
+
session: Session = Depends(get_session_dep),
|
| 84 |
+
offset: int = 0,
|
| 85 |
+
limit: int = 50
|
| 86 |
+
):
|
| 87 |
+
"""Get audit events for a specific user."""
|
| 88 |
+
|
| 89 |
+
# Verify that the user_id matches the authenticated user
|
| 90 |
+
if current_user.id != user_id:
|
| 91 |
+
raise HTTPException(
|
| 92 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 93 |
+
detail="User not found"
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
# Query audit logs for the user
|
| 97 |
+
query = select(AuditLog).where(AuditLog.user_id == str(user_id)).order_by(AuditLog.timestamp.desc())
|
| 98 |
+
|
| 99 |
+
# Get total count
|
| 100 |
+
total_query = select(AuditLog).where(AuditLog.user_id == str(user_id))
|
| 101 |
+
total_count = len(session.exec(total_query).all())
|
| 102 |
+
|
| 103 |
+
# Apply pagination
|
| 104 |
+
audit_logs = session.exec(query.offset(offset).limit(limit)).all()
|
| 105 |
+
|
| 106 |
+
# Convert to dict
|
| 107 |
+
events = [
|
| 108 |
+
{
|
| 109 |
+
"id": log.id,
|
| 110 |
+
"event_id": log.event_id,
|
| 111 |
+
"event_type": log.event_type,
|
| 112 |
+
"user_id": log.user_id,
|
| 113 |
+
"task_id": log.task_id,
|
| 114 |
+
"event_data": log.event_data,
|
| 115 |
+
"timestamp": log.timestamp.isoformat() if log.timestamp else None
|
| 116 |
+
}
|
| 117 |
+
for log in audit_logs
|
| 118 |
+
]
|
| 119 |
+
|
| 120 |
+
return {
|
| 121 |
+
"events": events,
|
| 122 |
+
"total": total_count,
|
| 123 |
+
"offset": offset,
|
| 124 |
+
"limit": limit
|
| 125 |
+
}
|
src/routers/auth.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends, Response, Request
|
| 2 |
+
from sqlmodel import Session, select
|
| 3 |
+
from typing import Annotated
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
from uuid import uuid4
|
| 6 |
+
import secrets
|
| 7 |
+
|
| 8 |
+
from ..models.user import User, UserCreate, UserRead
|
| 9 |
+
from ..schemas.auth import RegisterRequest, RegisterResponse, LoginRequest, LoginResponse, ForgotPasswordRequest, ResetPasswordRequest
|
| 10 |
+
from ..utils.security import hash_password, create_access_token, verify_password
|
| 11 |
+
from ..utils.deps import get_current_user
|
| 12 |
+
from ..database import get_session_dep
|
| 13 |
+
from ..config import settings
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
router = APIRouter(prefix="/api/auth", tags=["auth"])
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@router.post("/register", response_model=RegisterResponse, status_code=status.HTTP_201_CREATED)
|
| 20 |
+
def register(user_data: RegisterRequest, response: Response, session: Session = Depends(get_session_dep)):
|
| 21 |
+
"""Register a new user with email and password."""
|
| 22 |
+
|
| 23 |
+
# Check if user already exists
|
| 24 |
+
existing_user = session.exec(select(User).where(User.email == user_data.email)).first()
|
| 25 |
+
if existing_user:
|
| 26 |
+
raise HTTPException(
|
| 27 |
+
status_code=status.HTTP_409_CONFLICT,
|
| 28 |
+
detail="An account with this email already exists"
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
# Validate password length
|
| 32 |
+
if len(user_data.password) < 8:
|
| 33 |
+
raise HTTPException(
|
| 34 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 35 |
+
detail="Password must be at least 8 characters"
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
# Hash the password
|
| 39 |
+
password_hash = hash_password(user_data.password)
|
| 40 |
+
|
| 41 |
+
# Create new user
|
| 42 |
+
user = User(
|
| 43 |
+
email=user_data.email,
|
| 44 |
+
password_hash=password_hash
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
session.add(user)
|
| 48 |
+
session.commit()
|
| 49 |
+
session.refresh(user)
|
| 50 |
+
|
| 51 |
+
# Create access token
|
| 52 |
+
access_token = create_access_token(data={"sub": str(user.id)})
|
| 53 |
+
|
| 54 |
+
# Set the token as an httpOnly cookie
|
| 55 |
+
response.set_cookie(
|
| 56 |
+
key="access_token",
|
| 57 |
+
value=access_token,
|
| 58 |
+
httponly=True,
|
| 59 |
+
secure=settings.JWT_COOKIE_SECURE, # True in production, False in development
|
| 60 |
+
samesite=settings.JWT_COOKIE_SAMESITE,
|
| 61 |
+
max_age=settings.ACCESS_TOKEN_EXPIRE_DAYS * 24 * 60 * 60, # Convert days to seconds
|
| 62 |
+
path="/"
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
# Return response
|
| 66 |
+
return RegisterResponse(
|
| 67 |
+
id=user.id,
|
| 68 |
+
email=user.email,
|
| 69 |
+
message="Account created successfully"
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
@router.post("/login", response_model=LoginResponse)
|
| 74 |
+
def login(login_data: LoginRequest, response: Response, session: Session = Depends(get_session_dep)):
|
| 75 |
+
"""Authenticate user with email and password, return JWT token."""
|
| 76 |
+
|
| 77 |
+
# Find user by email
|
| 78 |
+
user = session.exec(select(User).where(User.email == login_data.email)).first()
|
| 79 |
+
|
| 80 |
+
if not user or not verify_password(login_data.password, user.password_hash):
|
| 81 |
+
raise HTTPException(
|
| 82 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 83 |
+
detail="Invalid email or password",
|
| 84 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
# Create access token
|
| 88 |
+
access_token = create_access_token(data={"sub": str(user.id)})
|
| 89 |
+
|
| 90 |
+
# Set the token as an httpOnly cookie
|
| 91 |
+
response.set_cookie(
|
| 92 |
+
key="access_token",
|
| 93 |
+
value=access_token,
|
| 94 |
+
httponly=True,
|
| 95 |
+
secure=settings.JWT_COOKIE_SECURE, # True in production, False in development
|
| 96 |
+
samesite=settings.JWT_COOKIE_SAMESITE,
|
| 97 |
+
max_age=settings.ACCESS_TOKEN_EXPIRE_DAYS * 24 * 60 * 60, # Convert days to seconds
|
| 98 |
+
path="/"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
# Debug: Print the cookie being set
|
| 102 |
+
print(f"Setting cookie: access_token={access_token}")
|
| 103 |
+
print(f"Cookie attributes: httponly={True}, secure={settings.JWT_COOKIE_SECURE}, samesite={settings.JWT_COOKIE_SAMESITE}, max_age={settings.ACCESS_TOKEN_EXPIRE_DAYS * 24 * 60 * 60}")
|
| 104 |
+
|
| 105 |
+
# Return response
|
| 106 |
+
return LoginResponse(
|
| 107 |
+
access_token=access_token,
|
| 108 |
+
token_type="bearer",
|
| 109 |
+
user=RegisterResponse(
|
| 110 |
+
id=user.id,
|
| 111 |
+
email=user.email,
|
| 112 |
+
message="Login successful"
|
| 113 |
+
)
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@router.post("/logout")
|
| 118 |
+
def logout(request: Request, response: Response, current_user: User = Depends(get_current_user)):
|
| 119 |
+
"""Logout user by clearing the access token cookie."""
|
| 120 |
+
# Clear the access_token cookie
|
| 121 |
+
response.set_cookie(
|
| 122 |
+
key="access_token",
|
| 123 |
+
value="",
|
| 124 |
+
httponly=True,
|
| 125 |
+
secure=settings.JWT_COOKIE_SECURE,
|
| 126 |
+
samesite=settings.JWT_COOKIE_SAMESITE,
|
| 127 |
+
max_age=0, # Expire immediately
|
| 128 |
+
path="/"
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
return {"message": "Logged out successfully"}
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
@router.get("/me", response_model=RegisterResponse)
|
| 135 |
+
def get_current_user_profile(request: Request, current_user: User = Depends(get_current_user)):
|
| 136 |
+
"""Get the current authenticated user's profile."""
|
| 137 |
+
# Debug: Print the cookies received
|
| 138 |
+
print(f"Received cookies: {request.cookies}")
|
| 139 |
+
print(f"Access token cookie: {request.cookies.get('access_token')}")
|
| 140 |
+
|
| 141 |
+
return RegisterResponse(
|
| 142 |
+
id=current_user.id,
|
| 143 |
+
email=current_user.email,
|
| 144 |
+
message="User profile retrieved successfully"
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
@router.post("/forgot-password")
|
| 149 |
+
def forgot_password(forgot_data: ForgotPasswordRequest, session: Session = Depends(get_session_dep)):
|
| 150 |
+
"""Initiate password reset process by verifying email exists."""
|
| 151 |
+
# Check if user exists
|
| 152 |
+
user = session.exec(select(User).where(User.email == forgot_data.email)).first()
|
| 153 |
+
|
| 154 |
+
if not user:
|
| 155 |
+
# For security reasons, we don't reveal if the email exists or not
|
| 156 |
+
return {"message": "If the email exists, a reset link would be sent"}
|
| 157 |
+
|
| 158 |
+
# In a real implementation, we would send an email here
|
| 159 |
+
# But as per requirements, we're just simulating the process
|
| 160 |
+
return {"message": "If the email exists, a reset link would be sent"}
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
@router.post("/reset-password")
|
| 164 |
+
def reset_password(reset_data: ResetPasswordRequest, session: Session = Depends(get_session_dep)):
|
| 165 |
+
"""Reset user password after verification."""
|
| 166 |
+
# Check if user exists
|
| 167 |
+
user = session.exec(select(User).where(User.email == reset_data.email)).first()
|
| 168 |
+
|
| 169 |
+
if not user:
|
| 170 |
+
raise HTTPException(
|
| 171 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 172 |
+
detail="User not found"
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
# Validate password length
|
| 176 |
+
if len(reset_data.new_password) < 8:
|
| 177 |
+
raise HTTPException(
|
| 178 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 179 |
+
detail="Password must be at least 8 characters"
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
# Hash the new password
|
| 183 |
+
user.password_hash = hash_password(reset_data.new_password)
|
| 184 |
+
|
| 185 |
+
# Update the user
|
| 186 |
+
session.add(user)
|
| 187 |
+
session.commit()
|
| 188 |
+
|
| 189 |
+
return {"message": "Password reset successfully"}
|
src/routers/chat.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends
|
| 2 |
+
from sqlmodel import Session
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
from pydantic import BaseModel
|
| 6 |
+
import json
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
from ..models.user import User
|
| 10 |
+
from ..models.conversation import Conversation
|
| 11 |
+
from ..models.message import Message
|
| 12 |
+
from ..database import get_session_dep
|
| 13 |
+
from ..utils.deps import get_current_user
|
| 14 |
+
from ..services.conversation_service import ConversationService
|
| 15 |
+
from ..agent_config import todo_agent_config
|
| 16 |
+
from ..mcp_server import get_mcp_tools_for_gemin_api
|
| 17 |
+
from ..mcp_tools.task_tools import (
|
| 18 |
+
execute_add_task,
|
| 19 |
+
execute_list_tasks,
|
| 20 |
+
execute_complete_task,
|
| 21 |
+
execute_delete_task,
|
| 22 |
+
execute_update_task,
|
| 23 |
+
execute_create_project,
|
| 24 |
+
execute_list_projects,
|
| 25 |
+
execute_get_calendar,
|
| 26 |
+
AddTaskParams,
|
| 27 |
+
ListTasksParams,
|
| 28 |
+
CompleteTaskParams,
|
| 29 |
+
DeleteTaskParams,
|
| 30 |
+
UpdateTaskParams,
|
| 31 |
+
CreateProjectParams,
|
| 32 |
+
ListProjectsParams,
|
| 33 |
+
GetCalendarParams
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
router = APIRouter(prefix="/api/{user_id}/chat", tags=["chat"])
|
| 37 |
+
|
| 38 |
+
logger = logging.getLogger(__name__)
|
| 39 |
+
|
| 40 |
+
class ChatRequest(BaseModel):
|
| 41 |
+
conversation_id: Optional[int] = None
|
| 42 |
+
message: str
|
| 43 |
+
|
| 44 |
+
class ChatResponse(BaseModel):
|
| 45 |
+
conversation_id: int
|
| 46 |
+
response: str
|
| 47 |
+
tool_calls: list = []
|
| 48 |
+
|
| 49 |
+
@router.post("/", response_model=ChatResponse)
|
| 50 |
+
def chat(
|
| 51 |
+
user_id: UUID,
|
| 52 |
+
chat_request: ChatRequest,
|
| 53 |
+
current_user: User = Depends(get_current_user),
|
| 54 |
+
session: Session = Depends(get_session_dep)
|
| 55 |
+
):
|
| 56 |
+
"""
|
| 57 |
+
Handle chat requests from users using AI assistant with tool calling.
|
| 58 |
+
"""
|
| 59 |
+
logger.info(f"Chat endpoint called with user_id: {user_id}, current_user.id: {current_user.id}")
|
| 60 |
+
|
| 61 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 62 |
+
if current_user.id != user_id:
|
| 63 |
+
logger.warning(f"User ID mismatch: path user_id={user_id}, auth user_id={current_user.id}")
|
| 64 |
+
raise HTTPException(
|
| 65 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 66 |
+
detail="Access denied"
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
# Get or create conversation
|
| 70 |
+
conversation_id = chat_request.conversation_id
|
| 71 |
+
if conversation_id is None:
|
| 72 |
+
conversation = Conversation(user_id=user_id)
|
| 73 |
+
session.add(conversation)
|
| 74 |
+
session.commit()
|
| 75 |
+
session.refresh(conversation)
|
| 76 |
+
conversation_id = conversation.id
|
| 77 |
+
else:
|
| 78 |
+
conversation = session.get(Conversation, conversation_id)
|
| 79 |
+
if not conversation or conversation.user_id != user_id:
|
| 80 |
+
raise HTTPException(
|
| 81 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 82 |
+
detail="Conversation not found"
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
# Store user message
|
| 86 |
+
user_message = Message(
|
| 87 |
+
conversation_id=conversation_id,
|
| 88 |
+
user_id=user_id,
|
| 89 |
+
role="user",
|
| 90 |
+
content=chat_request.message
|
| 91 |
+
)
|
| 92 |
+
session.add(user_message)
|
| 93 |
+
session.commit()
|
| 94 |
+
|
| 95 |
+
# Get conversation history (last 10 messages for context)
|
| 96 |
+
conversation_history = ConversationService.get_messages(
|
| 97 |
+
conversation_id=conversation_id,
|
| 98 |
+
user_id=user_id,
|
| 99 |
+
db_session=session,
|
| 100 |
+
limit=10
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
history_for_agent = []
|
| 104 |
+
for msg in conversation_history:
|
| 105 |
+
history_for_agent.append({
|
| 106 |
+
"role": msg.role,
|
| 107 |
+
"content": msg.content
|
| 108 |
+
})
|
| 109 |
+
|
| 110 |
+
agent_config = todo_agent_config
|
| 111 |
+
tools = get_mcp_tools_for_gemin_api()
|
| 112 |
+
|
| 113 |
+
messages = [
|
| 114 |
+
{"role": "system", "content": agent_config["instructions"]},
|
| 115 |
+
*history_for_agent,
|
| 116 |
+
{"role": "user", "content": chat_request.message}
|
| 117 |
+
]
|
| 118 |
+
|
| 119 |
+
try:
|
| 120 |
+
# Call the AI agent with tools
|
| 121 |
+
response = agent_config["client"].chat.completions.create(
|
| 122 |
+
model=agent_config["model"],
|
| 123 |
+
messages=messages,
|
| 124 |
+
tools=tools,
|
| 125 |
+
tool_choice="auto"
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
response_message = response.choices[0].message
|
| 129 |
+
tool_calls = response_message.tool_calls
|
| 130 |
+
|
| 131 |
+
# If there are tool calls, execute them
|
| 132 |
+
if tool_calls:
|
| 133 |
+
# Add assistant's tool call message to history
|
| 134 |
+
messages.append(response_message)
|
| 135 |
+
|
| 136 |
+
for tool_call in tool_calls:
|
| 137 |
+
function_name = tool_call.function.name
|
| 138 |
+
function_args = json.loads(tool_call.function.arguments)
|
| 139 |
+
|
| 140 |
+
# Force the user_id to be the current user's ID for security
|
| 141 |
+
function_args["user_id"] = str(user_id)
|
| 142 |
+
|
| 143 |
+
logger.info(f"Executing tool: {function_name} with args: {function_args}")
|
| 144 |
+
|
| 145 |
+
result = None
|
| 146 |
+
try:
|
| 147 |
+
if function_name == "add_task":
|
| 148 |
+
result = execute_add_task(AddTaskParams(**function_args))
|
| 149 |
+
elif function_name == "list_tasks":
|
| 150 |
+
result = execute_list_tasks(ListTasksParams(**function_args))
|
| 151 |
+
elif function_name == "complete_task":
|
| 152 |
+
result = execute_complete_task(CompleteTaskParams(**function_args))
|
| 153 |
+
elif function_name == "delete_task":
|
| 154 |
+
result = execute_delete_task(DeleteTaskParams(**function_args))
|
| 155 |
+
elif function_name == "update_task":
|
| 156 |
+
result = execute_update_task(UpdateTaskParams(**function_args))
|
| 157 |
+
elif function_name == "create_project":
|
| 158 |
+
result = execute_create_project(CreateProjectParams(**function_args))
|
| 159 |
+
elif function_name == "list_projects":
|
| 160 |
+
result = execute_list_projects(ListProjectsParams(**function_args))
|
| 161 |
+
elif function_name == "get_calendar":
|
| 162 |
+
result = execute_get_calendar(GetCalendarParams(**function_args))
|
| 163 |
+
|
| 164 |
+
tool_result_content = json.dumps(result.dict() if result else {"error": "Unknown tool"})
|
| 165 |
+
except Exception as e:
|
| 166 |
+
logger.error(f"Error executing tool {function_name}: {str(e)}")
|
| 167 |
+
tool_result_content = json.dumps({"error": str(e)})
|
| 168 |
+
|
| 169 |
+
messages.append({
|
| 170 |
+
"tool_call_id": tool_call.id,
|
| 171 |
+
"role": "tool",
|
| 172 |
+
"name": function_name,
|
| 173 |
+
"content": tool_result_content,
|
| 174 |
+
})
|
| 175 |
+
|
| 176 |
+
# Get final response from AI after tool results
|
| 177 |
+
second_response = agent_config["client"].chat.completions.create(
|
| 178 |
+
model=agent_config["model"],
|
| 179 |
+
messages=messages,
|
| 180 |
+
)
|
| 181 |
+
ai_response = second_response.choices[0].message.content
|
| 182 |
+
else:
|
| 183 |
+
ai_response = response_message.content
|
| 184 |
+
|
| 185 |
+
except Exception as e:
|
| 186 |
+
logger.error(f"Error in AI processing: {str(e)}")
|
| 187 |
+
ai_response = f"I encountered an error processing your request. Please try again later. (Error: {str(e)})"
|
| 188 |
+
|
| 189 |
+
# Store assistant response
|
| 190 |
+
assistant_message = Message(
|
| 191 |
+
conversation_id=conversation_id,
|
| 192 |
+
user_id=user_id,
|
| 193 |
+
role="assistant",
|
| 194 |
+
content=ai_response
|
| 195 |
+
)
|
| 196 |
+
session.add(assistant_message)
|
| 197 |
+
session.commit()
|
| 198 |
+
|
| 199 |
+
return ChatResponse(
|
| 200 |
+
conversation_id=conversation_id,
|
| 201 |
+
response=ai_response,
|
| 202 |
+
tool_calls=[] # We already handled them
|
| 203 |
+
)
|
src/routers/projects.py
ADDED
|
@@ -0,0 +1,259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends
|
| 2 |
+
from sqlmodel import Session, select, and_, func
|
| 3 |
+
from typing import List
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
|
| 7 |
+
from ..models.user import User
|
| 8 |
+
from ..models.project import Project, ProjectCreate, ProjectUpdate, ProjectRead
|
| 9 |
+
from ..models.task import Task
|
| 10 |
+
from ..database import get_session_dep
|
| 11 |
+
from ..utils.deps import get_current_user
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
router = APIRouter(prefix="/api/{user_id}/projects", tags=["projects"])
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@router.get("/", response_model=List[ProjectRead])
|
| 18 |
+
def list_projects(
|
| 19 |
+
user_id: UUID,
|
| 20 |
+
current_user: User = Depends(get_current_user),
|
| 21 |
+
session: Session = Depends(get_session_dep)
|
| 22 |
+
):
|
| 23 |
+
"""List all projects for the authenticated user."""
|
| 24 |
+
|
| 25 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 26 |
+
if current_user.id != user_id:
|
| 27 |
+
raise HTTPException(
|
| 28 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 29 |
+
detail="Project not found"
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# Build the query with user_id filter
|
| 33 |
+
query = select(Project).where(Project.user_id == user_id)
|
| 34 |
+
|
| 35 |
+
# Apply ordering (newest first)
|
| 36 |
+
query = query.order_by(Project.created_at.desc())
|
| 37 |
+
|
| 38 |
+
projects = session.exec(query).all()
|
| 39 |
+
return projects
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@router.post("/", response_model=ProjectRead, status_code=status.HTTP_201_CREATED)
|
| 43 |
+
def create_project(
|
| 44 |
+
*,
|
| 45 |
+
user_id: UUID,
|
| 46 |
+
project_data: ProjectCreate,
|
| 47 |
+
current_user: User = Depends(get_current_user),
|
| 48 |
+
session: Session = Depends(get_session_dep)
|
| 49 |
+
):
|
| 50 |
+
"""Create a new project for the authenticated user."""
|
| 51 |
+
|
| 52 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 53 |
+
if current_user.id != user_id:
|
| 54 |
+
raise HTTPException(
|
| 55 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 56 |
+
detail="User not found"
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Create the project
|
| 60 |
+
project = Project(
|
| 61 |
+
name=project_data.name,
|
| 62 |
+
description=project_data.description,
|
| 63 |
+
color=project_data.color,
|
| 64 |
+
user_id=user_id
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
session.add(project)
|
| 68 |
+
session.commit()
|
| 69 |
+
session.refresh(project)
|
| 70 |
+
|
| 71 |
+
return project
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
@router.get("/{project_id}", response_model=ProjectRead)
|
| 75 |
+
def get_project(
|
| 76 |
+
*,
|
| 77 |
+
user_id: UUID,
|
| 78 |
+
project_id: UUID,
|
| 79 |
+
current_user: User = Depends(get_current_user),
|
| 80 |
+
session: Session = Depends(get_session_dep)
|
| 81 |
+
):
|
| 82 |
+
"""Get a specific project by ID for the authenticated user."""
|
| 83 |
+
|
| 84 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 85 |
+
if current_user.id != user_id:
|
| 86 |
+
raise HTTPException(
|
| 87 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 88 |
+
detail="Project not found"
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
# Fetch the project
|
| 92 |
+
project = session.get(Project, project_id)
|
| 93 |
+
|
| 94 |
+
# Check if project exists and belongs to the user
|
| 95 |
+
if not project or project.user_id != user_id:
|
| 96 |
+
raise HTTPException(
|
| 97 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 98 |
+
detail="Project not found"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
return project
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
@router.put("/{project_id}", response_model=ProjectRead)
|
| 105 |
+
def update_project(
|
| 106 |
+
*,
|
| 107 |
+
user_id: UUID,
|
| 108 |
+
project_id: UUID,
|
| 109 |
+
project_data: ProjectUpdate,
|
| 110 |
+
current_user: User = Depends(get_current_user),
|
| 111 |
+
session: Session = Depends(get_session_dep)
|
| 112 |
+
):
|
| 113 |
+
"""Update an existing project for the authenticated user."""
|
| 114 |
+
|
| 115 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 116 |
+
if current_user.id != user_id:
|
| 117 |
+
raise HTTPException(
|
| 118 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 119 |
+
detail="Project not found"
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
# Fetch the project
|
| 123 |
+
project = session.get(Project, project_id)
|
| 124 |
+
|
| 125 |
+
# Check if project exists and belongs to the user
|
| 126 |
+
if not project or project.user_id != user_id:
|
| 127 |
+
raise HTTPException(
|
| 128 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 129 |
+
detail="Project not found"
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
# Update the project
|
| 133 |
+
project_data_dict = project_data.dict(exclude_unset=True)
|
| 134 |
+
for key, value in project_data_dict.items():
|
| 135 |
+
setattr(project, key, value)
|
| 136 |
+
|
| 137 |
+
session.add(project)
|
| 138 |
+
session.commit()
|
| 139 |
+
session.refresh(project)
|
| 140 |
+
|
| 141 |
+
return project
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
@router.delete("/{project_id}")
|
| 145 |
+
def delete_project(
|
| 146 |
+
*,
|
| 147 |
+
user_id: UUID,
|
| 148 |
+
project_id: UUID,
|
| 149 |
+
current_user: User = Depends(get_current_user),
|
| 150 |
+
session: Session = Depends(get_session_dep)
|
| 151 |
+
):
|
| 152 |
+
"""Delete a project for the authenticated user."""
|
| 153 |
+
|
| 154 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 155 |
+
if current_user.id != user_id:
|
| 156 |
+
raise HTTPException(
|
| 157 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 158 |
+
detail="Project not found"
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
# Fetch the project
|
| 162 |
+
project = session.get(Project, project_id)
|
| 163 |
+
|
| 164 |
+
# Check if project exists and belongs to the user
|
| 165 |
+
if not project or project.user_id != user_id:
|
| 166 |
+
raise HTTPException(
|
| 167 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 168 |
+
detail="Project not found"
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
# Delete the project
|
| 172 |
+
session.delete(project)
|
| 173 |
+
session.commit()
|
| 174 |
+
|
| 175 |
+
return {"message": "Project deleted successfully"}
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
@router.get("/{project_id}/tasks", response_model=List[Task])
|
| 179 |
+
def list_project_tasks(
|
| 180 |
+
*,
|
| 181 |
+
user_id: UUID,
|
| 182 |
+
project_id: UUID,
|
| 183 |
+
current_user: User = Depends(get_current_user),
|
| 184 |
+
session: Session = Depends(get_session_dep)
|
| 185 |
+
):
|
| 186 |
+
"""List all tasks for a specific project."""
|
| 187 |
+
|
| 188 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 189 |
+
if current_user.id != user_id:
|
| 190 |
+
raise HTTPException(
|
| 191 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 192 |
+
detail="Project not found"
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
# Fetch the project
|
| 196 |
+
project = session.get(Project, project_id)
|
| 197 |
+
|
| 198 |
+
# Check if project exists and belongs to the user
|
| 199 |
+
if not project or project.user_id != user_id:
|
| 200 |
+
raise HTTPException(
|
| 201 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 202 |
+
detail="Project not found"
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
# Build the query with project_id filter
|
| 206 |
+
query = select(Task).where(Task.project_id == project_id)
|
| 207 |
+
|
| 208 |
+
# Apply ordering (newest first)
|
| 209 |
+
query = query.order_by(Task.created_at.desc())
|
| 210 |
+
|
| 211 |
+
tasks = session.exec(query).all()
|
| 212 |
+
return tasks
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
@router.get("/{project_id}/progress")
|
| 216 |
+
def get_project_progress(
|
| 217 |
+
*,
|
| 218 |
+
user_id: UUID,
|
| 219 |
+
project_id: UUID,
|
| 220 |
+
current_user: User = Depends(get_current_user),
|
| 221 |
+
session: Session = Depends(get_session_dep)
|
| 222 |
+
):
|
| 223 |
+
"""Get progress statistics for a specific project."""
|
| 224 |
+
|
| 225 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 226 |
+
if current_user.id != user_id:
|
| 227 |
+
raise HTTPException(
|
| 228 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 229 |
+
detail="Project not found"
|
| 230 |
+
)
|
| 231 |
+
|
| 232 |
+
# Fetch the project
|
| 233 |
+
project = session.get(Project, project_id)
|
| 234 |
+
|
| 235 |
+
# Check if project exists and belongs to the user
|
| 236 |
+
if not project or project.user_id != user_id:
|
| 237 |
+
raise HTTPException(
|
| 238 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 239 |
+
detail="Project not found"
|
| 240 |
+
)
|
| 241 |
+
|
| 242 |
+
# Get task counts
|
| 243 |
+
total_tasks_query = select(func.count()).where(Task.project_id == project_id)
|
| 244 |
+
completed_tasks_query = select(func.count()).where(and_(Task.project_id == project_id, Task.completed == True))
|
| 245 |
+
|
| 246 |
+
total_tasks = session.exec(total_tasks_query).first()
|
| 247 |
+
completed_tasks = session.exec(completed_tasks_query).first()
|
| 248 |
+
|
| 249 |
+
# Calculate progress
|
| 250 |
+
progress = 0
|
| 251 |
+
if total_tasks > 0:
|
| 252 |
+
progress = round((completed_tasks / total_tasks) * 100, 2)
|
| 253 |
+
|
| 254 |
+
return {
|
| 255 |
+
"total_tasks": total_tasks,
|
| 256 |
+
"completed_tasks": completed_tasks,
|
| 257 |
+
"pending_tasks": total_tasks - completed_tasks,
|
| 258 |
+
"progress": progress
|
| 259 |
+
}
|
src/routers/tasks.py
ADDED
|
@@ -0,0 +1,637 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter, HTTPException, status, Depends
|
| 2 |
+
from sqlmodel import Session, select, and_, func
|
| 3 |
+
from typing import List
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
from datetime import datetime, timedelta, date
|
| 6 |
+
import logging
|
| 7 |
+
import uuid as uuid_lib
|
| 8 |
+
|
| 9 |
+
from ..models.user import User
|
| 10 |
+
from ..models.task import Task, TaskCreate, TaskUpdate, TaskRead
|
| 11 |
+
from ..models.audit_log import AuditLog
|
| 12 |
+
from ..schemas.task import TaskListResponse
|
| 13 |
+
from ..database import get_session_dep
|
| 14 |
+
from ..utils.deps import get_current_user
|
| 15 |
+
from ..events import publish_created_event, publish_updated_event, publish_deleted_event, publish_completed_event
|
| 16 |
+
|
| 17 |
+
# Configure logging
|
| 18 |
+
logging.basicConfig(level=logging.INFO)
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def save_audit_event(
|
| 23 |
+
session: Session,
|
| 24 |
+
event_type: str,
|
| 25 |
+
task: Task,
|
| 26 |
+
user_id: UUID
|
| 27 |
+
):
|
| 28 |
+
"""
|
| 29 |
+
Directly save an audit event to the database.
|
| 30 |
+
This bypasses Kafka/Dapr for now and provides immediate persistence.
|
| 31 |
+
"""
|
| 32 |
+
try:
|
| 33 |
+
event_id = str(uuid_lib.uuid4())
|
| 34 |
+
audit_log = AuditLog(
|
| 35 |
+
event_id=event_id,
|
| 36 |
+
event_type=event_type,
|
| 37 |
+
user_id=str(user_id),
|
| 38 |
+
task_id=task.id,
|
| 39 |
+
event_data={
|
| 40 |
+
"title": task.title,
|
| 41 |
+
"description": task.description or "",
|
| 42 |
+
"completed": task.completed
|
| 43 |
+
}
|
| 44 |
+
)
|
| 45 |
+
session.add(audit_log)
|
| 46 |
+
session.flush() # Flush to database without committing (parent transaction handles commit)
|
| 47 |
+
logger.info(f"Audit event {event_type} saved for task {task.id}")
|
| 48 |
+
except Exception as e:
|
| 49 |
+
logger.error(f"Failed to save audit event: {e}")
|
| 50 |
+
# Don't raise - continue execution even if audit save fails
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
router = APIRouter(prefix="/api/{user_id}/tasks", tags=["tasks"])
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@router.get("/stats")
|
| 57 |
+
def get_task_stats(
|
| 58 |
+
user_id: UUID,
|
| 59 |
+
current_user: User = Depends(get_current_user),
|
| 60 |
+
session: Session = Depends(get_session_dep)
|
| 61 |
+
):
|
| 62 |
+
"""Get advanced task statistics, streaks, and achievements."""
|
| 63 |
+
if current_user.id != user_id:
|
| 64 |
+
raise HTTPException(
|
| 65 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 66 |
+
detail="User not found"
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
tasks = session.exec(select(Task).where(Task.user_id == user_id)).all()
|
| 70 |
+
|
| 71 |
+
total = len(tasks)
|
| 72 |
+
completed_tasks = [t for t in tasks if t.completed]
|
| 73 |
+
completed_count = len(completed_tasks)
|
| 74 |
+
pending_count = total - completed_count
|
| 75 |
+
completion_rate = round((completed_count / total * 100), 1) if total > 0 else 0
|
| 76 |
+
|
| 77 |
+
# Streak calculation
|
| 78 |
+
# Group completed tasks by day (using updated_at as completion time for now)
|
| 79 |
+
completed_dates = sorted(list(set([t.updated_at.date() for t in completed_tasks])), reverse=True)
|
| 80 |
+
|
| 81 |
+
streak = 0
|
| 82 |
+
if completed_dates:
|
| 83 |
+
today = datetime.utcnow().date()
|
| 84 |
+
yesterday = today - timedelta(days=1)
|
| 85 |
+
|
| 86 |
+
# Check if the streak is still active (completed something today or yesterday)
|
| 87 |
+
if completed_dates[0] == today or completed_dates[0] == yesterday:
|
| 88 |
+
# We count the current active streak
|
| 89 |
+
streak = 1
|
| 90 |
+
for i in range(len(completed_dates) - 1):
|
| 91 |
+
if completed_dates[i] - timedelta(days=1) == completed_dates[i+1]:
|
| 92 |
+
streak += 1
|
| 93 |
+
else:
|
| 94 |
+
break
|
| 95 |
+
|
| 96 |
+
# Achievements logic
|
| 97 |
+
achievements = [
|
| 98 |
+
{
|
| 99 |
+
"id": "first_task",
|
| 100 |
+
"title": "First Step",
|
| 101 |
+
"description": "Complete your first task",
|
| 102 |
+
"unlocked": completed_count >= 1,
|
| 103 |
+
"icon": "Star",
|
| 104 |
+
"progress": 100 if completed_count >= 1 else 0
|
| 105 |
+
},
|
| 106 |
+
{
|
| 107 |
+
"id": "five_tasks",
|
| 108 |
+
"title": "High Five",
|
| 109 |
+
"description": "Complete 5 tasks",
|
| 110 |
+
"unlocked": completed_count >= 5,
|
| 111 |
+
"icon": "Zap",
|
| 112 |
+
"progress": min(100, int(completed_count / 5 * 100))
|
| 113 |
+
},
|
| 114 |
+
{
|
| 115 |
+
"id": "ten_tasks",
|
| 116 |
+
"title": "Task Master",
|
| 117 |
+
"description": "Complete 10 tasks",
|
| 118 |
+
"unlocked": completed_count >= 10,
|
| 119 |
+
"icon": "Trophy",
|
| 120 |
+
"progress": min(100, int(completed_count / 10 * 100))
|
| 121 |
+
},
|
| 122 |
+
{
|
| 123 |
+
"id": "streak_3",
|
| 124 |
+
"title": "Consistent",
|
| 125 |
+
"description": "3-day completion streak",
|
| 126 |
+
"unlocked": streak >= 3,
|
| 127 |
+
"icon": "Flame",
|
| 128 |
+
"progress": min(100, int(streak / 3 * 100))
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"id": "streak_7",
|
| 132 |
+
"title": "Unstoppable",
|
| 133 |
+
"description": "7-day completion streak",
|
| 134 |
+
"unlocked": streak >= 7,
|
| 135 |
+
"icon": "Award",
|
| 136 |
+
"progress": min(100, int(streak / 7 * 100))
|
| 137 |
+
}
|
| 138 |
+
]
|
| 139 |
+
|
| 140 |
+
# Productivity chart data (last 7 days)
|
| 141 |
+
chart_data = []
|
| 142 |
+
for i in range(6, -1, -1):
|
| 143 |
+
day = (datetime.utcnow() - timedelta(days=i)).date()
|
| 144 |
+
count = len([t for t in completed_tasks if t.updated_at.date() == day])
|
| 145 |
+
chart_data.append({
|
| 146 |
+
"date": day.strftime("%a"),
|
| 147 |
+
"count": count,
|
| 148 |
+
"isToday": i == 0
|
| 149 |
+
})
|
| 150 |
+
|
| 151 |
+
return {
|
| 152 |
+
"total": total,
|
| 153 |
+
"completed": completed_count,
|
| 154 |
+
"pending": pending_count,
|
| 155 |
+
"completionRate": completion_rate,
|
| 156 |
+
"streak": streak,
|
| 157 |
+
"achievements": achievements,
|
| 158 |
+
"chartData": chart_data
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@router.get("/", response_model=TaskListResponse)
|
| 163 |
+
def list_tasks(
|
| 164 |
+
user_id: UUID,
|
| 165 |
+
current_user: User = Depends(get_current_user),
|
| 166 |
+
session: Session = Depends(get_session_dep),
|
| 167 |
+
completed: bool = None,
|
| 168 |
+
offset: int = 0,
|
| 169 |
+
limit: int = 50
|
| 170 |
+
):
|
| 171 |
+
"""List all tasks for the authenticated user with optional filtering."""
|
| 172 |
+
|
| 173 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 174 |
+
if current_user.id != user_id:
|
| 175 |
+
raise HTTPException(
|
| 176 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 177 |
+
detail="Task not found"
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
# Build the query with user_id filter
|
| 181 |
+
query = select(Task).where(Task.user_id == user_id)
|
| 182 |
+
|
| 183 |
+
# Apply completed filter if specified
|
| 184 |
+
if completed is not None:
|
| 185 |
+
query = query.where(Task.completed == completed)
|
| 186 |
+
|
| 187 |
+
# Apply ordering (newest first)
|
| 188 |
+
query = query.order_by(Task.created_at.desc())
|
| 189 |
+
|
| 190 |
+
# Apply pagination
|
| 191 |
+
query = query.offset(offset).limit(limit)
|
| 192 |
+
|
| 193 |
+
tasks = session.exec(query).all()
|
| 194 |
+
|
| 195 |
+
# Get total count for pagination info
|
| 196 |
+
total_query = select(func.count()).select_from(Task).where(Task.user_id == user_id)
|
| 197 |
+
if completed is not None:
|
| 198 |
+
total_query = total_query.where(Task.completed == completed)
|
| 199 |
+
total = session.exec(total_query).one()
|
| 200 |
+
|
| 201 |
+
# Convert to response format
|
| 202 |
+
task_responses = []
|
| 203 |
+
for task in tasks:
|
| 204 |
+
task_dict = {
|
| 205 |
+
"id": task.id,
|
| 206 |
+
"user_id": str(task.user_id),
|
| 207 |
+
"title": task.title,
|
| 208 |
+
"description": task.description,
|
| 209 |
+
"completed": task.completed,
|
| 210 |
+
"due_date": task.due_date.isoformat() if task.due_date else None,
|
| 211 |
+
"project_id": str(task.project_id) if task.project_id else None,
|
| 212 |
+
"created_at": task.created_at.isoformat(),
|
| 213 |
+
"updated_at": task.updated_at.isoformat()
|
| 214 |
+
}
|
| 215 |
+
task_responses.append(task_dict)
|
| 216 |
+
|
| 217 |
+
return TaskListResponse(
|
| 218 |
+
tasks=task_responses,
|
| 219 |
+
total=total,
|
| 220 |
+
offset=offset,
|
| 221 |
+
limit=limit
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
@router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED)
|
| 226 |
+
async def create_task(
|
| 227 |
+
user_id: UUID,
|
| 228 |
+
task_data: TaskCreate,
|
| 229 |
+
current_user: User = Depends(get_current_user),
|
| 230 |
+
session: Session = Depends(get_session_dep)
|
| 231 |
+
):
|
| 232 |
+
"""Create a new task for the authenticated user."""
|
| 233 |
+
|
| 234 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 235 |
+
if current_user.id != user_id:
|
| 236 |
+
raise HTTPException(
|
| 237 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 238 |
+
detail="User not found"
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
# Validate title length
|
| 242 |
+
if len(task_data.title) < 1 or len(task_data.title) > 200:
|
| 243 |
+
raise HTTPException(
|
| 244 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 245 |
+
detail="Title must be between 1 and 200 characters"
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
# Validate description length if provided
|
| 249 |
+
if task_data.description and len(task_data.description) > 1000:
|
| 250 |
+
raise HTTPException(
|
| 251 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 252 |
+
detail="Description must be 1000 characters or less"
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
# Create new task
|
| 256 |
+
task = Task(
|
| 257 |
+
title=task_data.title,
|
| 258 |
+
description=task_data.description,
|
| 259 |
+
completed=task_data.completed,
|
| 260 |
+
due_date=task_data.due_date,
|
| 261 |
+
project_id=task_data.project_id,
|
| 262 |
+
user_id=user_id
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
session.add(task)
|
| 266 |
+
session.commit()
|
| 267 |
+
session.refresh(task)
|
| 268 |
+
|
| 269 |
+
# Publish created event
|
| 270 |
+
try:
|
| 271 |
+
await publish_created_event(task)
|
| 272 |
+
logger.info(f"Published created event for task {task.id}")
|
| 273 |
+
except Exception as e:
|
| 274 |
+
logger.error(f"Failed to publish created event for task {task.id}: {e}")
|
| 275 |
+
# Continue execution even if event publishing fails
|
| 276 |
+
|
| 277 |
+
# Save audit event to database
|
| 278 |
+
save_audit_event(session, "created", task, user_id)
|
| 279 |
+
|
| 280 |
+
return TaskRead(
|
| 281 |
+
id=task.id,
|
| 282 |
+
user_id=task.user_id,
|
| 283 |
+
title=task.title,
|
| 284 |
+
description=task.description,
|
| 285 |
+
completed=task.completed,
|
| 286 |
+
due_date=task.due_date,
|
| 287 |
+
project_id=task.project_id,
|
| 288 |
+
created_at=task.created_at,
|
| 289 |
+
updated_at=task.updated_at
|
| 290 |
+
)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
@router.get("/{task_id}", response_model=TaskRead)
|
| 294 |
+
def get_task(
|
| 295 |
+
user_id: UUID,
|
| 296 |
+
task_id: int,
|
| 297 |
+
current_user: User = Depends(get_current_user),
|
| 298 |
+
session: Session = Depends(get_session_dep)
|
| 299 |
+
):
|
| 300 |
+
"""Get a specific task by ID for the authenticated user."""
|
| 301 |
+
|
| 302 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 303 |
+
if current_user.id != user_id:
|
| 304 |
+
raise HTTPException(
|
| 305 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 306 |
+
detail="Task not found"
|
| 307 |
+
)
|
| 308 |
+
|
| 309 |
+
# Get the task
|
| 310 |
+
task = session.get(Task, task_id)
|
| 311 |
+
|
| 312 |
+
# Verify the task exists and belongs to the user
|
| 313 |
+
if not task or task.user_id != user_id:
|
| 314 |
+
raise HTTPException(
|
| 315 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 316 |
+
detail="Task not found"
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
return TaskRead(
|
| 320 |
+
id=task.id,
|
| 321 |
+
user_id=task.user_id,
|
| 322 |
+
title=task.title,
|
| 323 |
+
description=task.description,
|
| 324 |
+
completed=task.completed,
|
| 325 |
+
due_date=task.due_date,
|
| 326 |
+
project_id=task.project_id,
|
| 327 |
+
created_at=task.created_at,
|
| 328 |
+
updated_at=task.updated_at
|
| 329 |
+
)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
@router.put("/{task_id}", response_model=TaskRead)
|
| 333 |
+
async def update_task(
|
| 334 |
+
user_id: UUID,
|
| 335 |
+
task_id: int,
|
| 336 |
+
task_data: TaskUpdate,
|
| 337 |
+
current_user: User = Depends(get_current_user),
|
| 338 |
+
session: Session = Depends(get_session_dep)
|
| 339 |
+
):
|
| 340 |
+
"""Update an existing task for the authenticated user."""
|
| 341 |
+
|
| 342 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 343 |
+
if current_user.id != user_id:
|
| 344 |
+
raise HTTPException(
|
| 345 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 346 |
+
detail="Task not found"
|
| 347 |
+
)
|
| 348 |
+
|
| 349 |
+
# Get the task
|
| 350 |
+
task = session.get(Task, task_id)
|
| 351 |
+
|
| 352 |
+
# Verify the task exists and belongs to the user
|
| 353 |
+
if not task or task.user_id != user_id:
|
| 354 |
+
raise HTTPException(
|
| 355 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 356 |
+
detail="Task not found"
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
# Store original values for the event
|
| 360 |
+
original_completed = task.completed
|
| 361 |
+
|
| 362 |
+
# Update fields if provided
|
| 363 |
+
if task_data.title is not None:
|
| 364 |
+
if len(task_data.title) < 1 or len(task_data.title) > 200:
|
| 365 |
+
raise HTTPException(
|
| 366 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 367 |
+
detail="Title must be between 1 and 200 characters"
|
| 368 |
+
)
|
| 369 |
+
task.title = task_data.title
|
| 370 |
+
|
| 371 |
+
if task_data.description is not None:
|
| 372 |
+
if len(task_data.description) > 1000:
|
| 373 |
+
raise HTTPException(
|
| 374 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 375 |
+
detail="Description must be 1000 characters or less"
|
| 376 |
+
)
|
| 377 |
+
task.description = task_data.description
|
| 378 |
+
|
| 379 |
+
if task_data.completed is not None:
|
| 380 |
+
task.completed = task_data.completed
|
| 381 |
+
|
| 382 |
+
if task_data.due_date is not None:
|
| 383 |
+
task.due_date = task_data.due_date
|
| 384 |
+
|
| 385 |
+
if task_data.project_id is not None:
|
| 386 |
+
task.project_id = task_data.project_id
|
| 387 |
+
|
| 388 |
+
# Update the timestamp
|
| 389 |
+
task.updated_at = datetime.utcnow()
|
| 390 |
+
|
| 391 |
+
session.add(task)
|
| 392 |
+
session.commit()
|
| 393 |
+
session.refresh(task)
|
| 394 |
+
|
| 395 |
+
# Publish updated event
|
| 396 |
+
try:
|
| 397 |
+
await publish_updated_event(task)
|
| 398 |
+
logger.info(f"Published updated event for task {task.id}")
|
| 399 |
+
except Exception as e:
|
| 400 |
+
logger.error(f"Failed to publish updated event for task {task.id}: {e}")
|
| 401 |
+
# Continue execution even if event publishing fails
|
| 402 |
+
|
| 403 |
+
# Save audit event for update
|
| 404 |
+
save_audit_event(session, "updated", task, user_id)
|
| 405 |
+
|
| 406 |
+
# If the task was marked as completed, publish a completed event
|
| 407 |
+
if original_completed != task.completed and task.completed:
|
| 408 |
+
try:
|
| 409 |
+
await publish_completed_event(task)
|
| 410 |
+
logger.info(f"Published completed event for task {task.id}")
|
| 411 |
+
except Exception as e:
|
| 412 |
+
logger.error(f"Failed to publish completed event for task {task.id}: {e}")
|
| 413 |
+
|
| 414 |
+
# Save audit event for completion
|
| 415 |
+
save_audit_event(session, "completed", task, user_id)
|
| 416 |
+
|
| 417 |
+
return TaskRead(
|
| 418 |
+
id=task.id,
|
| 419 |
+
user_id=task.user_id,
|
| 420 |
+
title=task.title,
|
| 421 |
+
description=task.description,
|
| 422 |
+
completed=task.completed,
|
| 423 |
+
due_date=task.due_date,
|
| 424 |
+
project_id=task.project_id,
|
| 425 |
+
created_at=task.created_at,
|
| 426 |
+
updated_at=task.updated_at
|
| 427 |
+
)
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
@router.patch("/{task_id}", response_model=TaskRead)
|
| 431 |
+
async def patch_task(
|
| 432 |
+
user_id: UUID,
|
| 433 |
+
task_id: int,
|
| 434 |
+
task_data: TaskUpdate,
|
| 435 |
+
current_user: User = Depends(get_current_user),
|
| 436 |
+
session: Session = Depends(get_session_dep)
|
| 437 |
+
):
|
| 438 |
+
"""Partially update an existing task for the authenticated user."""
|
| 439 |
+
|
| 440 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 441 |
+
if current_user.id != user_id:
|
| 442 |
+
raise HTTPException(
|
| 443 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 444 |
+
detail="Task not found"
|
| 445 |
+
)
|
| 446 |
+
|
| 447 |
+
# Get the task
|
| 448 |
+
task = session.get(Task, task_id)
|
| 449 |
+
|
| 450 |
+
# Verify the task exists and belongs to the user
|
| 451 |
+
if not task or task.user_id != user_id:
|
| 452 |
+
raise HTTPException(
|
| 453 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 454 |
+
detail="Task not found"
|
| 455 |
+
)
|
| 456 |
+
|
| 457 |
+
# Store original values for the event
|
| 458 |
+
original_completed = task.completed
|
| 459 |
+
|
| 460 |
+
# Update fields if provided
|
| 461 |
+
if task_data.title is not None:
|
| 462 |
+
if len(task_data.title) < 1 or len(task_data.title) > 200:
|
| 463 |
+
raise HTTPException(
|
| 464 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 465 |
+
detail="Title must be between 1 and 200 characters"
|
| 466 |
+
)
|
| 467 |
+
task.title = task_data.title
|
| 468 |
+
|
| 469 |
+
if task_data.description is not None:
|
| 470 |
+
if len(task_data.description) > 1000:
|
| 471 |
+
raise HTTPException(
|
| 472 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 473 |
+
detail="Description must be 1000 characters or less"
|
| 474 |
+
)
|
| 475 |
+
task.description = task_data.description
|
| 476 |
+
|
| 477 |
+
if task_data.completed is not None:
|
| 478 |
+
task.completed = task_data.completed
|
| 479 |
+
|
| 480 |
+
if task_data.due_date is not None:
|
| 481 |
+
task.due_date = task_data.due_date
|
| 482 |
+
|
| 483 |
+
if task_data.project_id is not None:
|
| 484 |
+
task.project_id = task_data.project_id
|
| 485 |
+
|
| 486 |
+
# Update the timestamp
|
| 487 |
+
task.updated_at = datetime.utcnow()
|
| 488 |
+
|
| 489 |
+
session.add(task)
|
| 490 |
+
session.commit()
|
| 491 |
+
session.refresh(task)
|
| 492 |
+
|
| 493 |
+
# Publish updated event
|
| 494 |
+
try:
|
| 495 |
+
await publish_updated_event(task)
|
| 496 |
+
logger.info(f"Published updated event for task {task.id}")
|
| 497 |
+
except Exception as e:
|
| 498 |
+
logger.error(f"Failed to publish updated event for task {task.id}: {e}")
|
| 499 |
+
|
| 500 |
+
# Save audit event for update
|
| 501 |
+
save_audit_event(session, "updated", task, user_id)
|
| 502 |
+
|
| 503 |
+
# If the task was marked as completed, publish a completed event
|
| 504 |
+
if original_completed != task.completed and task.completed:
|
| 505 |
+
try:
|
| 506 |
+
await publish_completed_event(task)
|
| 507 |
+
logger.info(f"Published completed event for task {task.id}")
|
| 508 |
+
except Exception as e:
|
| 509 |
+
logger.error(f"Failed to publish completed event for task {task.id}: {e}")
|
| 510 |
+
|
| 511 |
+
# Save audit event for completion
|
| 512 |
+
save_audit_event(session, "completed", task, user_id)
|
| 513 |
+
|
| 514 |
+
return TaskRead(
|
| 515 |
+
id=task.id,
|
| 516 |
+
user_id=task.user_id,
|
| 517 |
+
title=task.title,
|
| 518 |
+
description=task.description,
|
| 519 |
+
completed=task.completed,
|
| 520 |
+
due_date=task.due_date,
|
| 521 |
+
project_id=task.project_id,
|
| 522 |
+
created_at=task.created_at,
|
| 523 |
+
updated_at=task.updated_at
|
| 524 |
+
)
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT)
|
| 528 |
+
async def delete_task(
|
| 529 |
+
user_id: UUID,
|
| 530 |
+
task_id: int,
|
| 531 |
+
current_user: User = Depends(get_current_user),
|
| 532 |
+
session: Session = Depends(get_session_dep)
|
| 533 |
+
):
|
| 534 |
+
"""Delete a task for the authenticated user."""
|
| 535 |
+
|
| 536 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 537 |
+
if current_user.id != user_id:
|
| 538 |
+
raise HTTPException(
|
| 539 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 540 |
+
detail="Task not found"
|
| 541 |
+
)
|
| 542 |
+
|
| 543 |
+
# Get the task
|
| 544 |
+
task = session.get(Task, task_id)
|
| 545 |
+
|
| 546 |
+
# Verify the task exists and belongs to the user
|
| 547 |
+
if not task or task.user_id != user_id:
|
| 548 |
+
raise HTTPException(
|
| 549 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 550 |
+
detail="Task not found"
|
| 551 |
+
)
|
| 552 |
+
|
| 553 |
+
# Publish deleted event before deleting the task
|
| 554 |
+
try:
|
| 555 |
+
await publish_deleted_event(task)
|
| 556 |
+
logger.info(f"Published deleted event for task {task.id}")
|
| 557 |
+
except Exception as e:
|
| 558 |
+
logger.error(f"Failed to publish deleted event for task {task.id}: {e}")
|
| 559 |
+
# Continue with deletion even if event publishing fails
|
| 560 |
+
|
| 561 |
+
# Save audit event for deletion (while task still exists)
|
| 562 |
+
save_audit_event(session, "deleted", task, user_id)
|
| 563 |
+
|
| 564 |
+
session.delete(task)
|
| 565 |
+
session.commit()
|
| 566 |
+
|
| 567 |
+
# Return 204 No Content
|
| 568 |
+
return
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
@router.patch("/{task_id}/toggle", response_model=TaskRead)
|
| 572 |
+
async def toggle_task_completion(
|
| 573 |
+
user_id: UUID,
|
| 574 |
+
task_id: int,
|
| 575 |
+
current_user: User = Depends(get_current_user),
|
| 576 |
+
session: Session = Depends(get_session_dep)
|
| 577 |
+
):
|
| 578 |
+
"""Toggle the completion status of a task."""
|
| 579 |
+
|
| 580 |
+
# Verify that the user_id in the URL matches the authenticated user
|
| 581 |
+
if current_user.id != user_id:
|
| 582 |
+
raise HTTPException(
|
| 583 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 584 |
+
detail="Task not found"
|
| 585 |
+
)
|
| 586 |
+
|
| 587 |
+
# Get the task
|
| 588 |
+
task = session.get(Task, task_id)
|
| 589 |
+
|
| 590 |
+
# Verify the task exists and belongs to the user
|
| 591 |
+
if not task or task.user_id != user_id:
|
| 592 |
+
raise HTTPException(
|
| 593 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 594 |
+
detail="Task not found"
|
| 595 |
+
)
|
| 596 |
+
|
| 597 |
+
# Store original completion status for event
|
| 598 |
+
original_completed = task.completed
|
| 599 |
+
|
| 600 |
+
# Toggle the completion status
|
| 601 |
+
task.completed = not task.completed
|
| 602 |
+
task.updated_at = datetime.utcnow()
|
| 603 |
+
|
| 604 |
+
session.add(task)
|
| 605 |
+
session.commit()
|
| 606 |
+
session.refresh(task)
|
| 607 |
+
|
| 608 |
+
# Publish updated event
|
| 609 |
+
try:
|
| 610 |
+
await publish_updated_event(task)
|
| 611 |
+
logger.info(f"Published updated event for task {task.id}")
|
| 612 |
+
except Exception as e:
|
| 613 |
+
logger.error(f"Failed to publish updated event for task {task.id}: {e}")
|
| 614 |
+
|
| 615 |
+
# Save audit event for update
|
| 616 |
+
save_audit_event(session, "updated", task, user_id)
|
| 617 |
+
|
| 618 |
+
# If the task was marked as completed, publish a completed event
|
| 619 |
+
if not original_completed and task.completed:
|
| 620 |
+
try:
|
| 621 |
+
await publish_completed_event(task)
|
| 622 |
+
logger.info(f"Published completed event for task {task.id}")
|
| 623 |
+
except Exception as e:
|
| 624 |
+
logger.error(f"Failed to publish completed event for task {task.id}: {e}")
|
| 625 |
+
|
| 626 |
+
# Save audit event for completion
|
| 627 |
+
save_audit_event(session, "completed", task, user_id)
|
| 628 |
+
|
| 629 |
+
return TaskRead(
|
| 630 |
+
id=task.id,
|
| 631 |
+
user_id=task.user_id,
|
| 632 |
+
title=task.title,
|
| 633 |
+
description=task.description,
|
| 634 |
+
completed=task.completed,
|
| 635 |
+
created_at=task.created_at,
|
| 636 |
+
updated_at=task.updated_at
|
| 637 |
+
)
|
src/schemas/auth.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel, EmailStr
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class RegisterRequest(BaseModel):
|
| 8 |
+
email: EmailStr
|
| 9 |
+
password: str
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RegisterResponse(BaseModel):
|
| 13 |
+
id: UUID
|
| 14 |
+
email: EmailStr
|
| 15 |
+
message: str
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class LoginRequest(BaseModel):
|
| 19 |
+
email: EmailStr
|
| 20 |
+
password: str
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class LoginResponse(BaseModel):
|
| 24 |
+
access_token: str
|
| 25 |
+
token_type: str
|
| 26 |
+
user: RegisterResponse
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ErrorResponse(BaseModel):
|
| 30 |
+
detail: str
|
| 31 |
+
status_code: Optional[int] = None
|
| 32 |
+
errors: Optional[list] = None
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class ForgotPasswordRequest(BaseModel):
|
| 36 |
+
email: EmailStr
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ResetPasswordRequest(BaseModel):
|
| 40 |
+
email: EmailStr
|
| 41 |
+
new_password: str
|
src/schemas/task.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pydantic import BaseModel
|
| 2 |
+
from typing import List, Optional
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class TaskBase(BaseModel):
|
| 8 |
+
title: str
|
| 9 |
+
description: Optional[str] = None
|
| 10 |
+
completed: bool = False
|
| 11 |
+
due_date: Optional[datetime] = None
|
| 12 |
+
project_id: Optional[UUID] = None
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TaskCreate(TaskBase):
|
| 16 |
+
title: str
|
| 17 |
+
description: Optional[str] = None
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TaskUpdate(BaseModel):
|
| 21 |
+
title: Optional[str] = None
|
| 22 |
+
description: Optional[str] = None
|
| 23 |
+
completed: Optional[bool] = None
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class TaskRead(TaskBase):
|
| 27 |
+
id: int
|
| 28 |
+
user_id: UUID
|
| 29 |
+
due_date: Optional[datetime] = None
|
| 30 |
+
project_id: Optional[UUID] = None
|
| 31 |
+
created_at: datetime
|
| 32 |
+
updated_at: datetime
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class TaskListResponse(BaseModel):
|
| 36 |
+
tasks: List[TaskRead]
|
| 37 |
+
total: int
|
| 38 |
+
offset: int
|
| 39 |
+
limit: int
|