diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..549d4e3e0296d7c1ff99155c80f382cfcac5244e --- /dev/null +++ b/.dockerignore @@ -0,0 +1,45 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +env/ +venv/ +ENV/ +.venv + +# Testing +.pytest_cache/ +.coverage +htmlcov/ +*.cover + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment +.env +.env.local +.env.*.local + +# Database +*.db +*.sqlite + +# Logs +*.log + +# OS +.DS_Store +Thumbs.db + +# Alembic +alembic/versions/*.pyc + +# Documentation +docs/_build/ diff --git a/.env b/.env new file mode 100644 index 0000000000000000000000000000000000000000..209239a4a5c1f65c7ad7fb01f0648e419fea79b6 --- /dev/null +++ b/.env @@ -0,0 +1,13 @@ +# Database Configuration +# For local PostgreSQL: postgresql://user:password@localhost:5432/todo_db +# For Neon: Use your Neon connection string from the dashboard +DATABASE_URL=postgresql://neondb_owner:npg_MmFvJBHT8Y0k@ep-silent-thunder-ab0rbvrp-pooler.eu-west-2.aws.neon.tech/neondb?sslmode=require&channel_binding=require +# Application Settings +APP_NAME=Task CRUD API +DEBUG=True +CORS_ORIGINS=http://localhost:3000 + +# Authentication +BETTER_AUTH_SECRET=zMdW1P03wJvWJnLKzQ8YYO26vHeinqmR +JWT_ALGORITHM=HS256 +JWT_EXPIRATION_DAYS=7 diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..e9232d3d85382220f94f2167488f581a64add4ae --- /dev/null +++ b/.env.example @@ -0,0 +1,12 @@ +# Database Configuration +DATABASE_URL=postgresql://user:password@host:5432/database + +# Application Settings +APP_NAME=Task CRUD API +DEBUG=True +CORS_ORIGINS=http://localhost:3000 + +# Authentication (Placeholder for Spec 2) +# JWT_SECRET=your-secret-key-here +# JWT_ALGORITHM=HS256 +# JWT_EXPIRATION_MINUTES=1440 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..4a1ed3c42b4f5c6357cfc41b28b5b679a65301fe --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +# Use Python 3.11 slim image +FROM python:3.11-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first for better caching +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY . . + +# Expose port 7860 (Hugging Face Spaces default) +EXPOSE 7860 + +# Run database migrations and start the application +CMD alembic upgrade head && uvicorn src.main:app --host 0.0.0.0 --port 7860 diff --git a/README.md b/README.md index 72228325d06dd8759105e4d7d2dcf822ea30118a..9db0c621ecf2b1e69dfabf2d0f22caa97b22c4b9 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,309 @@ ---- -title: Taskflow Api -emoji: đŸĻ€ -colorFrom: green -colorTo: indigo -sdk: docker -pinned: false -license: mit ---- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# Task CRUD API - Backend + +FastAPI backend for the Task Manager application with full CRUD operations, filtering, and sorting. + +## Tech Stack + +- **Python**: 3.11+ +- **FastAPI**: 0.104+ (Web framework) +- **SQLModel**: 0.0.14+ (ORM) +- **Alembic**: 1.13.0 (Database migrations) +- **PostgreSQL**: Neon Serverless or local PostgreSQL +- **Pydantic**: 2.x (Data validation) + +## Project Structure + +``` +backend/ +├── src/ +│ ├── api/ +│ │ ├── deps.py # Dependency injection (DB session, auth stub) +│ │ └── routes/ +│ │ └── tasks.py # Task CRUD endpoints +│ ├── core/ +│ │ ├── config.py # Application settings +│ │ └── database.py # Database connection +│ ├── models/ +│ │ ├── user.py # User model (stub) +│ │ └── task.py # Task model +│ ├── schemas/ +│ │ └── task.py # Pydantic schemas +│ ├── services/ +│ │ └── task_service.py # Business logic +│ └── main.py # FastAPI application +├── alembic/ +│ ├── versions/ +│ │ └── 001_initial.py # Initial migration +│ └── env.py # Alembic configuration +├── tests/ # Test directory (to be implemented) +├── .env # Environment variables +├── .env.example # Environment template +├── alembic.ini # Alembic configuration +└── requirements.txt # Python dependencies +``` + +## Setup Instructions + +### 1. Install Dependencies + +```bash +cd backend +pip install -r requirements.txt +``` + +### 2. Configure Environment + +Copy `.env.example` to `.env` and configure: + +```bash +cp .env.example .env +``` + +Edit `.env`: + +```env +# For Neon PostgreSQL (recommended) +DATABASE_URL=postgresql://user:password@ep-xxx.neon.tech/dbname?sslmode=require + +# OR for local PostgreSQL +DATABASE_URL=postgresql://postgres:postgres@localhost:5432/todo_db + +APP_NAME=Task CRUD API +DEBUG=True +CORS_ORIGINS=http://localhost:3000 + +# Authentication (REQUIRED) +BETTER_AUTH_SECRET= +JWT_ALGORITHM=HS256 +JWT_EXPIRATION_DAYS=7 +``` + +**Generate BETTER_AUTH_SECRET:** +```bash +# Use Python to generate a secure random secret +python -c "import secrets; print(secrets.token_urlsafe(32))" + +# IMPORTANT: Use the SAME secret in both backend/.env and frontend/.env.local +``` + +### 3. Run Database Migrations + +```bash +# Apply migrations to create tables +python -m alembic upgrade head +``` + +### 4. Start Development Server + +```bash +# Start with auto-reload +uvicorn src.main:app --reload + +# Server runs at http://localhost:8000 +``` + +## API Endpoints + +### Authentication + +| Method | Endpoint | Description | Auth Required | +|--------|----------|-------------|---------------| +| POST | `/api/auth/signup` | Register new user account | No | +| POST | `/api/auth/signin` | Authenticate and receive JWT token | No | +| GET | `/api/auth/me` | Get current user profile | Yes | + +### Tasks + +| Method | Endpoint | Description | Auth Required | +|--------|----------|-------------|---------------| +| GET | `/api/tasks` | List tasks with filtering and sorting | Yes | +| POST | `/api/tasks` | Create a new task | Yes | +| GET | `/api/tasks/{id}` | Get a single task | Yes | +| PUT | `/api/tasks/{id}` | Update task (replace all fields) | Yes | +| PATCH | `/api/tasks/{id}` | Partially update task | Yes | +| DELETE | `/api/tasks/{id}` | Delete a task | Yes | + +### Query Parameters (GET /api/tasks) + +- `completed`: Filter by status (true/false/null for all) +- `sort`: Sort field (created_at or updated_at) +- `order`: Sort order (asc or desc) +- `limit`: Maximum number of results +- `offset`: Number of results to skip + +### Example Requests + +**Sign Up:** +```bash +curl -X POST http://localhost:8000/api/auth/signup \ + -H "Content-Type: application/json" \ + -d '{ + "email": "user@example.com", + "password": "SecurePass123", + "name": "John Doe" + }' +``` + +**Sign In:** +```bash +curl -X POST http://localhost:8000/api/auth/signin \ + -H "Content-Type: application/json" \ + -d '{ + "email": "user@example.com", + "password": "SecurePass123" + }' +``` + +**Get Current User (requires JWT token):** +```bash +curl http://localhost:8000/api/auth/me \ + -H "Authorization: Bearer " +``` + +**Create Task (requires JWT token):** +```bash +curl -X POST http://localhost:8000/api/tasks \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{"title": "Buy groceries", "description": "Milk, eggs, bread"}' +``` + +**List Active Tasks:** +```bash +curl "http://localhost:8000/api/tasks?completed=false&sort=created_at&order=desc" \ + -H "Authorization: Bearer " +``` + +**Toggle Completion:** +```bash +curl -X PATCH http://localhost:8000/api/tasks/1 \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{"completed": true}' +``` + +## API Documentation + +Interactive API documentation available at: +- **Swagger UI**: http://localhost:8000/docs +- **ReDoc**: http://localhost:8000/redoc + +## Database Schema + +### Tasks Table + +| Column | Type | Description | +|--------|------|-------------| +| id | INTEGER | Primary key | +| user_id | INTEGER | Foreign key to users | +| title | VARCHAR(200) | Task title (required) | +| description | VARCHAR(1000) | Task description (optional) | +| completed | BOOLEAN | Completion status | +| created_at | DATETIME | Creation timestamp | +| updated_at | DATETIME | Last update timestamp | + +**Indexes:** +- `ix_tasks_user_id` - User lookup +- `ix_tasks_completed` - Status filtering +- `ix_tasks_user_id_completed` - Combined user + status +- `ix_tasks_created_at` - Date sorting + +## Authentication + +**Status**: JWT-based authentication with Better Auth integration + +### Authentication Flow + +1. **User Registration** (`POST /api/auth/signup`): + - Validates email format (RFC 5322) + - Validates password strength (min 8 chars, uppercase, lowercase, number) + - Hashes password with bcrypt (cost factor 12) + - Creates user account in database + - Returns user profile (no token issued) + +2. **User Sign In** (`POST /api/auth/signin`): + - Verifies email and password + - Creates JWT token with 7-day expiration + - Token includes: user_id (sub), email, issued_at (iat), expiration (exp) + - Returns token and user profile + +3. **Protected Endpoints**: + - All `/api/tasks/*` endpoints require JWT authentication + - Client must include `Authorization: Bearer ` header + - Backend verifies token signature using `BETTER_AUTH_SECRET` + - Extracts user_id from token and filters all queries by authenticated user + - Returns 401 Unauthorized for missing, invalid, or expired tokens + +### Security Features + +- **Stateless Authentication**: No server-side session storage +- **User Data Isolation**: All task queries automatically filtered by authenticated user_id +- **Password Security**: Bcrypt hashing with cost factor 12 +- **Token Expiration**: 7-day JWT expiration (configurable via JWT_EXPIRATION_DAYS) +- **Shared Secret**: BETTER_AUTH_SECRET must match between frontend and backend +- **Error Handling**: Generic error messages for invalid credentials (prevents user enumeration) + +### Token Structure + +```json +{ + "sub": "123", // User ID + "email": "user@example.com", + "iat": 1704067200, // Issued at timestamp + "exp": 1704672000, // Expiration timestamp (7 days) + "iss": "better-auth" // Issuer +} +``` + +### Error Responses + +- **401 TOKEN_EXPIRED**: JWT token has expired +- **401 TOKEN_INVALID**: Invalid signature or malformed token +- **401 TOKEN_MISSING**: No Authorization header provided +- **401 INVALID_CREDENTIALS**: Email or password incorrect (generic message) +- **409 EMAIL_EXISTS**: Email already registered during signup + +## Development + +### Create New Migration + +```bash +python -m alembic revision --autogenerate -m "description" +``` + +### Rollback Migration + +```bash +python -m alembic downgrade -1 +``` + +### Run Tests (when implemented) + +```bash +pytest +``` + +## Troubleshooting + +### Database Connection Issues + +1. **Neon**: Ensure connection string includes `?sslmode=require` +2. **Local PostgreSQL**: Verify PostgreSQL is running and database exists +3. Check `.env` file has correct `DATABASE_URL` + +### Migration Errors + +```bash +# Reset database (WARNING: deletes all data) +python -m alembic downgrade base +python -m alembic upgrade head +``` + +## Next Steps + +1. Implement JWT authentication (Spec 2) +2. Add comprehensive test suite +3. Add API rate limiting +4. Implement pagination metadata +5. Add task categories/tags +6. Deploy to production (Vercel/Railway) diff --git a/README_HF.md b/README_HF.md new file mode 100644 index 0000000000000000000000000000000000000000..919e8b1bdd2ff87b38cf36b4fc4b0974d4e9e2ca --- /dev/null +++ b/README_HF.md @@ -0,0 +1,33 @@ +--- +title: TaskFlow API +emoji: ✅ +colorFrom: blue +colorTo: indigo +sdk: docker +pinned: false +license: mit +--- + +# TaskFlow Backend API + +FastAPI backend for TaskFlow task management application. + +## Features + +- User authentication with JWT +- Task CRUD operations +- PostgreSQL database with SQLModel ORM +- RESTful API design + +## Environment Variables + +Configure these in your Space settings: + +- `DATABASE_URL`: PostgreSQL connection string +- `SECRET_KEY`: JWT secret key (generate a secure random string) +- `ALGORITHM`: JWT algorithm (default: HS256) +- `ACCESS_TOKEN_EXPIRE_MINUTES`: Token expiration time (default: 30) + +## API Documentation + +Once deployed, visit `/docs` for interactive API documentation. diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000000000000000000000000000000000000..168c910216e4410946e6a6f0ecbcf0e22d7ee5e9 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,112 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names +file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/__pycache__/env.cpython-313.pyc b/alembic/__pycache__/env.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0acfc5e2142f44544ddf8158fa8d8a003109dfd3 Binary files /dev/null and b/alembic/__pycache__/env.cpython-313.pyc differ diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000000000000000000000000000000000000..a6e937642891c730bcf7bc38e67c274e0184d956 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,63 @@ +from logging.config import fileConfig +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from alembic import context +import sys +from pathlib import Path + +# Add the src directory to the path +sys.path.insert(0, str(Path(__file__).resolve().parent.parent / "src")) + +from src.core.config import settings +from src.models import User, Task +from sqlmodel import SQLModel + +# this is the Alembic Config object +config = context.config + +# Set the database URL from settings +config.set_main_option("sqlalchemy.url", settings.DATABASE_URL) + +# Interpret the config file for Python logging +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Add your model's MetaData object here for 'autogenerate' support +target_metadata = SQLModel.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode.""" + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000000000000000000000000000000000000..55df2863d206fa1678abb4c92e90c45d3f85c114 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/001_initial.py b/alembic/versions/001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..f7540fa33c36acda386e1877ef5919d0caf5820b --- /dev/null +++ b/alembic/versions/001_initial.py @@ -0,0 +1,63 @@ +"""Create users and tasks tables + +Revision ID: 001_initial +Revises: +Create Date: 2026-01-08 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '001_initial' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Create users table + op.create_table( + 'users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=100), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_index('ix_users_email', 'users', ['email'], unique=True) + + # Create tasks table + op.create_table( + 'tasks', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=200), nullable=False), + sa.Column('description', sa.String(length=1000), nullable=True), + sa.Column('completed', sa.Boolean(), nullable=False, server_default='false'), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + + # Create indexes for tasks table + op.create_index('ix_tasks_user_id', 'tasks', ['user_id']) + op.create_index('ix_tasks_completed', 'tasks', ['completed']) + op.create_index('ix_tasks_user_id_completed', 'tasks', ['user_id', 'completed']) + op.create_index('ix_tasks_created_at', 'tasks', ['created_at']) + + +def downgrade() -> None: + # Drop indexes + op.drop_index('ix_tasks_created_at', table_name='tasks') + op.drop_index('ix_tasks_user_id_completed', table_name='tasks') + op.drop_index('ix_tasks_completed', table_name='tasks') + op.drop_index('ix_tasks_user_id', table_name='tasks') + + # Drop tables + op.drop_table('tasks') + op.drop_index('ix_users_email', table_name='users') + op.drop_table('users') diff --git a/alembic/versions/002_add_user_password.py b/alembic/versions/002_add_user_password.py new file mode 100644 index 0000000000000000000000000000000000000000..f042755e2bdb111c621ba9ffb5a79179d05a7087 --- /dev/null +++ b/alembic/versions/002_add_user_password.py @@ -0,0 +1,26 @@ +"""Add password_hash to users table + +Revision ID: 002_add_user_password +Revises: 001_initial +Create Date: 2026-01-09 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '002_add_user_password' +down_revision = '001_initial' +branch_labels = None +depends_on = None + + +def upgrade(): + """Add password_hash column to users table.""" + op.add_column('users', sa.Column('password_hash', sa.String(length=255), nullable=False)) + + +def downgrade(): + """Remove password_hash column from users table.""" + op.drop_column('users', 'password_hash') diff --git a/alembic/versions/__pycache__/001_initial.cpython-313.pyc b/alembic/versions/__pycache__/001_initial.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d16507297b1f4b7025eb38b73ccd9403b12325e0 Binary files /dev/null and b/alembic/versions/__pycache__/001_initial.cpython-313.pyc differ diff --git a/alembic/versions/__pycache__/002_add_user_password.cpython-313.pyc b/alembic/versions/__pycache__/002_add_user_password.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b267de3fed51c8009d7264e26b0f4b70d50050c Binary files /dev/null and b/alembic/versions/__pycache__/002_add_user_password.cpython-313.pyc differ diff --git a/deploy-prepare.ps1 b/deploy-prepare.ps1 new file mode 100644 index 0000000000000000000000000000000000000000..cb029a62b1c2b1f245103c8658f9b5016d1885cd --- /dev/null +++ b/deploy-prepare.ps1 @@ -0,0 +1,129 @@ +# Quick Deployment Script for Hugging Face Spaces (PowerShell) +# This script helps prepare your backend for deployment + +Write-Host "🚀 TaskFlow Backend - Hugging Face Deployment Preparation" -ForegroundColor Cyan +Write-Host "==========================================================" -ForegroundColor Cyan +Write-Host "" + +# Check if we're in the backend directory +if (-not (Test-Path "requirements.txt")) { + Write-Host "❌ Error: Please run this script from the backend directory" -ForegroundColor Red + exit 1 +} + +Write-Host "✅ Found backend directory" -ForegroundColor Green +Write-Host "" + +# Check required files +Write-Host "📋 Checking required files..." -ForegroundColor Yellow +$files = @("Dockerfile", "requirements.txt", "alembic.ini", "src/main.py") +$missing_files = @() + +foreach ($file in $files) { + if (Test-Path $file) { + Write-Host " ✅ $file" -ForegroundColor Green + } else { + Write-Host " ❌ $file (missing)" -ForegroundColor Red + $missing_files += $file + } +} + +if ($missing_files.Count -gt 0) { + Write-Host "" + Write-Host "❌ Missing required files. Please ensure all files are present." -ForegroundColor Red + exit 1 +} + +Write-Host "" +Write-Host "🔐 Generating secrets..." -ForegroundColor Yellow + +# Generate BETTER_AUTH_SECRET (using .NET crypto) +$bytes = New-Object byte[] 32 +$rng = [System.Security.Cryptography.RandomNumberGenerator]::Create() +$rng.GetBytes($bytes) +$SECRET = [System.BitConverter]::ToString($bytes).Replace("-", "").ToLower() + +Write-Host "" +Write-Host "Your BETTER_AUTH_SECRET (save this!):" -ForegroundColor Cyan +Write-Host "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" -ForegroundColor Cyan +Write-Host $SECRET -ForegroundColor Yellow +Write-Host "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" -ForegroundColor Cyan +Write-Host "" + +# Create deployment notes +$deploymentNotes = @" +TaskFlow Backend - Deployment Information +Generated: $(Get-Date) + +BETTER_AUTH_SECRET: $SECRET + +Required Environment Variables for Hugging Face: +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +1. DATABASE_URL + Get from: Neon PostgreSQL Dashboard + Format: postgresql://user:password@host/database + +2. BETTER_AUTH_SECRET + Value: $SECRET + +3. CORS_ORIGINS + Initial: http://localhost:3000 + After frontend deploy: https://your-app.vercel.app,https://your-app-*.vercel.app + +4. DEBUG + Value: False + +5. JWT_ALGORITHM (optional) + Value: HS256 + +6. JWT_EXPIRATION_DAYS (optional) + Value: 7 + +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +Next Steps: +1. Create Hugging Face Space at https://huggingface.co/new-space +2. Choose Docker SDK +3. Clone the Space repository +4. Copy all backend files to the Space directory +5. Rename README_HF.md to README.md +6. Commit and push +7. Add environment variables in Space Settings +8. Wait for build to complete + +"@ + +$deploymentNotes | Out-File -FilePath "DEPLOYMENT_NOTES.txt" -Encoding UTF8 + +Write-Host "📝 Deployment notes saved to: DEPLOYMENT_NOTES.txt" -ForegroundColor Green +Write-Host "" + +# Check if Docker is available +$dockerInstalled = Get-Command docker -ErrorAction SilentlyContinue +if ($dockerInstalled) { + Write-Host "đŸŗ Docker is installed" -ForegroundColor Green + Write-Host "" + $response = Read-Host "Would you like to test the Docker build locally? (y/n)" + if ($response -eq "y" -or $response -eq "Y") { + Write-Host "Building Docker image..." -ForegroundColor Yellow + docker build -t taskflow-backend-test . + if ($LASTEXITCODE -eq 0) { + Write-Host "✅ Docker build successful!" -ForegroundColor Green + } else { + Write-Host "❌ Docker build failed. Please check the errors above." -ForegroundColor Red + } + } +} else { + Write-Host "â„šī¸ Docker not found. Skipping local build test." -ForegroundColor Yellow +} + +Write-Host "" +Write-Host "✅ Preparation complete!" -ForegroundColor Green +Write-Host "" +Write-Host "📚 Next steps:" -ForegroundColor Cyan +Write-Host " 1. Review DEPLOYMENT_NOTES.txt" +Write-Host " 2. Follow the deployment guide in ../DEPLOYMENT_GUIDE.md" +Write-Host " 3. Create your Hugging Face Space" +Write-Host " 4. Push your code" +Write-Host "" diff --git a/deploy-prepare.sh b/deploy-prepare.sh new file mode 100644 index 0000000000000000000000000000000000000000..a47e8453684bcca421c55aa1b41a8504f84b7685 --- /dev/null +++ b/deploy-prepare.sh @@ -0,0 +1,125 @@ +#!/bin/bash + +# Quick Deployment Script for Hugging Face Spaces +# This script helps prepare your backend for deployment + +echo "🚀 TaskFlow Backend - Hugging Face Deployment Preparation" +echo "==========================================================" +echo "" + +# Check if we're in the backend directory +if [ ! -f "requirements.txt" ]; then + echo "❌ Error: Please run this script from the backend directory" + exit 1 +fi + +echo "✅ Found backend directory" +echo "" + +# Check required files +echo "📋 Checking required files..." +files=("Dockerfile" "requirements.txt" "alembic.ini" "src/main.py") +missing_files=() + +for file in "${files[@]}"; do + if [ -f "$file" ]; then + echo " ✅ $file" + else + echo " ❌ $file (missing)" + missing_files+=("$file") + fi +done + +if [ ${#missing_files[@]} -ne 0 ]; then + echo "" + echo "❌ Missing required files. Please ensure all files are present." + exit 1 +fi + +echo "" +echo "🔐 Generating secrets..." + +# Generate BETTER_AUTH_SECRET +SECRET=$(openssl rand -hex 32) +echo "" +echo "Your BETTER_AUTH_SECRET (save this!):" +echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" +echo "$SECRET" +echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" +echo "" + +# Create deployment notes +cat > DEPLOYMENT_NOTES.txt << EOF +TaskFlow Backend - Deployment Information +Generated: $(date) + +BETTER_AUTH_SECRET: $SECRET + +Required Environment Variables for Hugging Face: +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +1. DATABASE_URL + Get from: Neon PostgreSQL Dashboard + Format: postgresql://user:password@host/database + +2. BETTER_AUTH_SECRET + Value: $SECRET + +3. CORS_ORIGINS + Initial: http://localhost:3000 + After frontend deploy: https://your-app.vercel.app,https://your-app-*.vercel.app + +4. DEBUG + Value: False + +5. JWT_ALGORITHM (optional) + Value: HS256 + +6. JWT_EXPIRATION_DAYS (optional) + Value: 7 + +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +Next Steps: +1. Create Hugging Face Space at https://huggingface.co/new-space +2. Choose Docker SDK +3. Clone the Space repository +4. Copy all backend files to the Space directory +5. Rename README_HF.md to README.md +6. Commit and push +7. Add environment variables in Space Settings +8. Wait for build to complete + +EOF + +echo "📝 Deployment notes saved to: DEPLOYMENT_NOTES.txt" +echo "" + +# Test if Docker is available +if command -v docker &> /dev/null; then + echo "đŸŗ Docker is installed" + echo "" + echo "Would you like to test the Docker build locally? (y/n)" + read -r response + if [[ "$response" =~ ^[Yy]$ ]]; then + echo "Building Docker image..." + docker build -t taskflow-backend-test . + if [ $? -eq 0 ]; then + echo "✅ Docker build successful!" + else + echo "❌ Docker build failed. Please check the errors above." + fi + fi +else + echo "â„šī¸ Docker not found. Skipping local build test." +fi + +echo "" +echo "✅ Preparation complete!" +echo "" +echo "📚 Next steps:" +echo " 1. Review DEPLOYMENT_NOTES.txt" +echo " 2. Follow the deployment guide in ../DEPLOYMENT_GUIDE.md" +echo " 3. Create your Hugging Face Space" +echo " 4. Push your code" +echo "" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbac3331073c96ea551f3ea0cd9f30a2fab42bcc --- /dev/null +++ b/requirements.txt @@ -0,0 +1,12 @@ +fastapi==0.104.1 +sqlmodel==0.0.14 +pydantic==2.5.0 +uvicorn[standard]==0.24.0 +alembic==1.13.0 +psycopg2-binary==2.9.9 +python-dotenv==1.0.0 +pytest==7.4.3 +httpx==0.25.2 +PyJWT==2.8.0 +passlib[bcrypt]==1.7.4 +python-multipart==0.0.6 diff --git a/src/__pycache__/main.cpython-313.pyc b/src/__pycache__/main.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f7532570323f62653a42c188ab9572e7551355f Binary files /dev/null and b/src/__pycache__/main.cpython-313.pyc differ diff --git a/src/api/__init__.py b/src/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8945521514f87903f1d8342afa6aae41e9df5449 --- /dev/null +++ b/src/api/__init__.py @@ -0,0 +1 @@ +"""API module initialization.""" diff --git a/src/api/__pycache__/__init__.cpython-313.pyc b/src/api/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..01db7414e01c5ce7650268b671e83ed8a0635460 Binary files /dev/null and b/src/api/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/api/__pycache__/deps.cpython-313.pyc b/src/api/__pycache__/deps.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7a3dceefdbb77c7acc125d55c37e72401da6945 Binary files /dev/null and b/src/api/__pycache__/deps.cpython-313.pyc differ diff --git a/src/api/deps.py b/src/api/deps.py new file mode 100644 index 0000000000000000000000000000000000000000..65dad8959cdb1b322166ecc0b717af6bff6865a2 --- /dev/null +++ b/src/api/deps.py @@ -0,0 +1,55 @@ +from sqlmodel import Session +from typing import Generator +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from src.core.database import get_session +from src.core.security import verify_jwt_token +from src.core.config import settings + +security = HTTPBearer() + + +def get_db() -> Generator[Session, None, None]: + """Get database session dependency.""" + yield from get_session() + + +def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(security) +) -> int: + """ + Get current user ID from JWT token. + + Extracts and verifies JWT from Authorization header. + + Args: + credentials: HTTP Bearer credentials from Authorization header + + Returns: + User ID extracted from validated token + + Raises: + HTTPException: 401 if token is missing, invalid, or expired + """ + if not credentials: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={"WWW-Authenticate": "Bearer"} + ) + + token = credentials.credentials + + # Verify token and extract payload + payload = verify_jwt_token(token, settings.BETTER_AUTH_SECRET) + + # Extract user ID from 'sub' claim + user_id = payload.get("sub") + if not user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token payload", + headers={"WWW-Authenticate": "Bearer"} + ) + + return int(user_id) diff --git a/src/api/routes/__init__.py b/src/api/routes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6c33d578d40cd482838fe96bb3609e0158df33eb --- /dev/null +++ b/src/api/routes/__init__.py @@ -0,0 +1 @@ +"""API routes module initialization.""" diff --git a/src/api/routes/__pycache__/__init__.cpython-313.pyc b/src/api/routes/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c48259f06d7d028b1d509122193daaa40fe70df Binary files /dev/null and b/src/api/routes/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/api/routes/__pycache__/auth.cpython-313.pyc b/src/api/routes/__pycache__/auth.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e1260747d4653b4b4b61ecb44b5b98a436edf83 Binary files /dev/null and b/src/api/routes/__pycache__/auth.cpython-313.pyc differ diff --git a/src/api/routes/__pycache__/tasks.cpython-313.pyc b/src/api/routes/__pycache__/tasks.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..530c93b541984029aea8d9c49e6f99eee13eca1e Binary files /dev/null and b/src/api/routes/__pycache__/tasks.cpython-313.pyc differ diff --git a/src/api/routes/auth.py b/src/api/routes/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..2f0280974373688fa43135678c59924c5945b4e7 --- /dev/null +++ b/src/api/routes/auth.py @@ -0,0 +1,88 @@ +"""Authentication API routes.""" +from fastapi import APIRouter, Depends, HTTPException, status +from sqlmodel import Session, select +from src.api.deps import get_db, get_current_user +from src.schemas.auth import SignupRequest, SigninRequest, SignupResponse, TokenResponse, UserProfile +from src.services.auth_service import AuthService +from src.models.user import User + +router = APIRouter(prefix="/api/auth", tags=["authentication"]) + + +@router.post("/signup", response_model=SignupResponse, status_code=status.HTTP_201_CREATED) +def signup( + signup_data: SignupRequest, + db: Session = Depends(get_db) +): + """ + Register a new user account. + + Args: + signup_data: User signup information (email, password, name) + db: Database session + + Returns: + SignupResponse: Created user details + + Raises: + HTTPException: 400 if validation fails + HTTPException: 409 if email already exists + """ + service = AuthService(db) + return service.signup(signup_data) + + +@router.post("/signin", response_model=TokenResponse) +def signin( + signin_data: SigninRequest, + db: Session = Depends(get_db) +): + """ + Authenticate user and issue JWT token. + + Args: + signin_data: User signin credentials (email, password) + db: Database session + + Returns: + TokenResponse: JWT token and user profile + + Raises: + HTTPException: 401 if credentials are invalid + """ + service = AuthService(db) + return service.signin(signin_data) + + +@router.get("/me", response_model=UserProfile) +def get_current_user_profile( + current_user_id: int = Depends(get_current_user), + db: Session = Depends(get_db) +): + """ + Get current authenticated user's profile. + + Args: + current_user_id: ID of authenticated user from JWT token + db: Database session + + Returns: + UserProfile: Current user's profile information + + Raises: + HTTPException: 404 if user not found + """ + user = db.exec(select(User).where(User.id == current_user_id)).first() + + if not user: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="User not found" + ) + + return UserProfile( + id=user.id, + email=user.email, + name=user.name, + created_at=user.created_at + ) diff --git a/src/api/routes/tasks.py b/src/api/routes/tasks.py new file mode 100644 index 0000000000000000000000000000000000000000..61e488fc66dd911ea1f131ac8519c95ac01eb733 --- /dev/null +++ b/src/api/routes/tasks.py @@ -0,0 +1,179 @@ +"""Task API routes.""" +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException, status, Query +from sqlmodel import Session +from src.api.deps import get_db, get_current_user +from src.schemas.task import TaskCreate, TaskUpdate, TaskPatch, TaskResponse, TaskListResponse +from src.services.task_service import TaskService + +router = APIRouter(prefix="/api/tasks", tags=["tasks"]) + + +@router.get("", response_model=TaskListResponse) +def get_tasks( + completed: Optional[bool] = Query(None, description="Filter by completion status"), + sort: str = Query("created_at", description="Sort field (created_at or updated_at)"), + order: str = Query("desc", description="Sort order (asc or desc)"), + limit: Optional[int] = Query(None, description="Maximum number of tasks to return"), + offset: int = Query(0, description="Number of tasks to skip"), + db: Session = Depends(get_db), + current_user_id: int = Depends(get_current_user) +): + """ + Get tasks for the current user with filtering and sorting. + + Query Parameters: + - completed: Filter by completion status (true/false/null for all) + - sort: Sort field (created_at or updated_at) + - order: Sort order (asc or desc) + - limit: Maximum number of tasks to return + - offset: Number of tasks to skip + + Returns: + TaskListResponse: List of tasks with total count + """ + service = TaskService(db) + tasks = service.get_tasks( + user_id=current_user_id, + completed=completed, + sort=sort, + order=order, + limit=limit, + offset=offset + ) + return TaskListResponse(tasks=tasks, total=len(tasks)) + + +@router.post("", response_model=TaskResponse, status_code=status.HTTP_201_CREATED) +def create_task( + task_data: TaskCreate, + db: Session = Depends(get_db), + current_user_id: int = Depends(get_current_user) +): + """ + Create a new task for the current user. + + Args: + task_data: Task creation data + + Returns: + TaskResponse: Created task + """ + service = TaskService(db) + task = service.create_task(user_id=current_user_id, task_data=task_data) + return task + + +@router.get("/{task_id}", response_model=TaskResponse) +def get_task( + task_id: int, + db: Session = Depends(get_db), + current_user_id: int = Depends(get_current_user) +): + """ + Get a single task by ID. + + Args: + task_id: ID of the task + + Returns: + TaskResponse: Task details + + Raises: + HTTPException: 404 if task not found or doesn't belong to user + """ + service = TaskService(db) + task = service.get_task(task_id=task_id, user_id=current_user_id) + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + return task + + +@router.put("/{task_id}", response_model=TaskResponse) +def update_task( + task_id: int, + task_data: TaskUpdate, + db: Session = Depends(get_db), + current_user_id: int = Depends(get_current_user) +): + """ + Update a task (PUT - replaces all fields). + + Args: + task_id: ID of the task + task_data: Task update data + + Returns: + TaskResponse: Updated task + + Raises: + HTTPException: 404 if task not found or doesn't belong to user + """ + service = TaskService(db) + task = service.update_task(task_id=task_id, user_id=current_user_id, task_data=task_data) + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + return task + + +@router.patch("/{task_id}", response_model=TaskResponse) +def patch_task( + task_id: int, + task_data: TaskPatch, + db: Session = Depends(get_db), + current_user_id: int = Depends(get_current_user) +): + """ + Partially update a task (PATCH - updates only provided fields). + + Args: + task_id: ID of the task + task_data: Task patch data + + Returns: + TaskResponse: Updated task + + Raises: + HTTPException: 404 if task not found or doesn't belong to user + """ + service = TaskService(db) + task = service.patch_task(task_id=task_id, user_id=current_user_id, task_data=task_data) + if not task: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) + return task + + +@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_task( + task_id: int, + db: Session = Depends(get_db), + current_user_id: int = Depends(get_current_user) +): + """ + Delete a task. + + Args: + task_id: ID of the task + + Returns: + No content (204) + + Raises: + HTTPException: 404 if task not found or doesn't belong to user + """ + service = TaskService(db) + deleted = service.delete_task(task_id=task_id, user_id=current_user_id) + if not deleted: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task not found" + ) diff --git a/src/core/__init__.py b/src/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b48ac107a83eb8cf6d1ba7bd005bd022d65d670a --- /dev/null +++ b/src/core/__init__.py @@ -0,0 +1 @@ +"""Core module initialization.""" diff --git a/src/core/__pycache__/__init__.cpython-313.pyc b/src/core/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ae95bb6a113316e9b28a6e90af5323d6d2f8200 Binary files /dev/null and b/src/core/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/core/__pycache__/config.cpython-313.pyc b/src/core/__pycache__/config.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8db1e78267e413ced2b36ab7bd7f72625f576e8e Binary files /dev/null and b/src/core/__pycache__/config.cpython-313.pyc differ diff --git a/src/core/__pycache__/database.cpython-313.pyc b/src/core/__pycache__/database.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da59878a5833971d6c12bd928040521d4bd3bee3 Binary files /dev/null and b/src/core/__pycache__/database.cpython-313.pyc differ diff --git a/src/core/__pycache__/security.cpython-313.pyc b/src/core/__pycache__/security.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..847e16eefeaffd23f1694c8b867439b4852e828a Binary files /dev/null and b/src/core/__pycache__/security.cpython-313.pyc differ diff --git a/src/core/config.py b/src/core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..3060382acec65342fddb32f81f3263c7f15e2aeb --- /dev/null +++ b/src/core/config.py @@ -0,0 +1,27 @@ +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + """Application settings.""" + + # Database + DATABASE_URL: str + + # Application + APP_NAME: str = "Task CRUD API" + DEBUG: bool = True + + # CORS + CORS_ORIGINS: str = "http://localhost:3000" + + # Authentication + BETTER_AUTH_SECRET: str # Required - must be set in .env + JWT_ALGORITHM: str = "HS256" + JWT_EXPIRATION_DAYS: int = 7 + + class Config: + env_file = ".env" + case_sensitive = True + + +settings = Settings() diff --git a/src/core/database.py b/src/core/database.py new file mode 100644 index 0000000000000000000000000000000000000000..8cadd1387a4a67db65cb1a47fe5305f286414a84 --- /dev/null +++ b/src/core/database.py @@ -0,0 +1,17 @@ +from sqlmodel import create_engine, Session +from .config import settings + +# Create database engine +engine = create_engine( + settings.DATABASE_URL, + echo=settings.DEBUG, + pool_pre_ping=True, + pool_size=5, + max_overflow=10 +) + + +def get_session(): + """Get database session.""" + with Session(engine) as session: + yield session diff --git a/src/core/security.py b/src/core/security.py new file mode 100644 index 0000000000000000000000000000000000000000..a2849853ce303c994853470de0511b88fa25d50e --- /dev/null +++ b/src/core/security.py @@ -0,0 +1,110 @@ +"""Security utilities for authentication and authorization.""" +import jwt +from datetime import datetime, timedelta +from passlib.context import CryptContext +from fastapi import HTTPException, status +from typing import Optional + +# Password hashing context +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +def hash_password(password: str) -> str: + """ + Hash a password using bcrypt. + + Args: + password: Plain text password + + Returns: + Hashed password string + """ + return pwd_context.hash(password) + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """ + Verify a password against its hash. + + Args: + plain_password: Plain text password to verify + hashed_password: Hashed password to compare against + + Returns: + True if password matches, False otherwise + """ + return pwd_context.verify(plain_password, hashed_password) + + +def create_jwt_token(user_id: int, email: str, secret: str, expiration_days: int = 7) -> str: + """ + Create a JWT token for a user. + + Args: + user_id: User's unique identifier + email: User's email address + secret: Secret key for signing the token + expiration_days: Number of days until token expires (default: 7) + + Returns: + Encoded JWT token string + """ + now = datetime.utcnow() + payload = { + "sub": str(user_id), + "email": email, + "iat": now, + "exp": now + timedelta(days=expiration_days), + "iss": "better-auth" + } + return jwt.encode(payload, secret, algorithm="HS256") + + +def verify_jwt_token(token: str, secret: str) -> dict: + """ + Verify and decode a JWT token. + + Args: + token: JWT token string to verify + secret: Secret key used to sign the token + + Returns: + Decoded token payload as dictionary + + Raises: + HTTPException: 401 if token is expired or invalid + """ + try: + payload = jwt.decode( + token, + secret, + algorithms=["HS256"], + options={ + "verify_signature": True, + "verify_exp": True, + "require": ["sub", "email", "iat", "exp", "iss"] + } + ) + + # Validate issuer + if payload.get("iss") != "better-auth": + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token issuer", + headers={"WWW-Authenticate": "Bearer"} + ) + + return payload + + except jwt.ExpiredSignatureError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Token has expired", + headers={"WWW-Authenticate": "Bearer"} + ) + except jwt.InvalidTokenError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token", + headers={"WWW-Authenticate": "Bearer"} + ) diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000000000000000000000000000000000000..600e85df145bbf2863a4b52f93fa39ee96826a7b --- /dev/null +++ b/src/main.py @@ -0,0 +1,34 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from .core.config import settings +from .api.routes import tasks, auth + +app = FastAPI( + title=settings.APP_NAME, + debug=settings.DEBUG +) + +# Configure CORS +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS.split(","), + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Register routes +app.include_router(auth.router) +app.include_router(tasks.router) + + +@app.get("/") +async def root(): + """Root endpoint.""" + return {"message": "Task CRUD API", "status": "running"} + + +@app.get("/health") +async def health(): + """Health check endpoint.""" + return {"status": "healthy"} diff --git a/src/models/__init__.py b/src/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c604c1378759001ea685196bcae67235966a0358 --- /dev/null +++ b/src/models/__init__.py @@ -0,0 +1,5 @@ +"""Models module initialization.""" +from .user import User +from .task import Task + +__all__ = ["User", "Task"] diff --git a/src/models/__pycache__/__init__.cpython-313.pyc b/src/models/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bec56b5bd599768daaea32b344bf2a2345c82f5e Binary files /dev/null and b/src/models/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/models/__pycache__/task.cpython-313.pyc b/src/models/__pycache__/task.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..154fab77373b705ac2780c24bc3247995de84939 Binary files /dev/null and b/src/models/__pycache__/task.cpython-313.pyc differ diff --git a/src/models/__pycache__/user.cpython-313.pyc b/src/models/__pycache__/user.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a590064ad4f052db95c0c97173231fc832706cd Binary files /dev/null and b/src/models/__pycache__/user.cpython-313.pyc differ diff --git a/src/models/task.py b/src/models/task.py new file mode 100644 index 0000000000000000000000000000000000000000..c32895b22f54a5ca1d21a533675a33dd2cd47001 --- /dev/null +++ b/src/models/task.py @@ -0,0 +1,17 @@ +from sqlmodel import SQLModel, Field +from datetime import datetime +from typing import Optional + + +class Task(SQLModel, table=True): + """Task entity representing a to-do item.""" + + __tablename__ = "tasks" + + id: Optional[int] = Field(default=None, primary_key=True) + user_id: int = Field(foreign_key="users.id", nullable=False, index=True) + title: str = Field(max_length=200, nullable=False) + description: Optional[str] = Field(default=None, max_length=1000) + completed: bool = Field(default=False, nullable=False, index=True) + created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False, index=True) + updated_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) diff --git a/src/models/user.py b/src/models/user.py new file mode 100644 index 0000000000000000000000000000000000000000..4ef3138d9d00d8fc222984cc626490a4d020dd96 --- /dev/null +++ b/src/models/user.py @@ -0,0 +1,16 @@ +from sqlmodel import SQLModel, Field +from datetime import datetime +from typing import Optional + + +class User(SQLModel, table=True): + """User entity with authentication support.""" + + __tablename__ = "users" + + id: Optional[int] = Field(default=None, primary_key=True) + email: str = Field(max_length=255, unique=True, nullable=False, index=True) + name: str = Field(max_length=100, nullable=False) + password_hash: str = Field(max_length=255, nullable=False) + created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) + updated_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) diff --git a/src/schemas/__init__.py b/src/schemas/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a7b0d7167c901245b63ca5e9f76920abc43baa81 --- /dev/null +++ b/src/schemas/__init__.py @@ -0,0 +1 @@ +"""Pydantic schemas for request/response validation.""" diff --git a/src/schemas/__pycache__/__init__.cpython-313.pyc b/src/schemas/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07b271843c07bb6ea9d3cd385d13acfcd4a25e51 Binary files /dev/null and b/src/schemas/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/schemas/__pycache__/auth.cpython-313.pyc b/src/schemas/__pycache__/auth.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e3aa644d37ce86416f7d89289ac2be0e1f5db11 Binary files /dev/null and b/src/schemas/__pycache__/auth.cpython-313.pyc differ diff --git a/src/schemas/__pycache__/task.cpython-313.pyc b/src/schemas/__pycache__/task.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0992e8fd959f4f29809cfe66e0ebf0c12c96642 Binary files /dev/null and b/src/schemas/__pycache__/task.cpython-313.pyc differ diff --git a/src/schemas/auth.py b/src/schemas/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..44d2acf93235d31bd307ffc425bb3d4298113bba --- /dev/null +++ b/src/schemas/auth.py @@ -0,0 +1,47 @@ +"""Authentication schemas for request/response validation.""" +from pydantic import BaseModel, EmailStr, Field +from datetime import datetime +from typing import Optional + + +class SignupRequest(BaseModel): + """Request schema for user signup.""" + email: EmailStr = Field(..., description="User's email address") + password: str = Field(..., min_length=8, max_length=100, description="User's password") + name: str = Field(..., min_length=1, max_length=100, description="User's display name") + + +class SigninRequest(BaseModel): + """Request schema for user signin.""" + email: EmailStr = Field(..., description="User's email address") + password: str = Field(..., description="User's password") + + +class UserProfile(BaseModel): + """User profile response schema.""" + id: int + email: str + name: str + created_at: datetime + + class Config: + from_attributes = True + + +class TokenResponse(BaseModel): + """Response schema for authentication with JWT token.""" + access_token: str = Field(..., description="JWT access token") + token_type: str = Field(default="bearer", description="Token type") + expires_in: int = Field(..., description="Token expiration time in seconds") + user: UserProfile + + +class SignupResponse(BaseModel): + """Response schema for successful signup.""" + id: int + email: str + name: str + created_at: datetime + + class Config: + from_attributes = True diff --git a/src/schemas/task.py b/src/schemas/task.py new file mode 100644 index 0000000000000000000000000000000000000000..d35da6c80c30a1b59cee71218777437be6dca154 --- /dev/null +++ b/src/schemas/task.py @@ -0,0 +1,102 @@ +"""Pydantic schemas for Task CRUD operations.""" +from datetime import datetime +from typing import Optional +from pydantic import BaseModel, Field, ConfigDict + + +class TaskCreate(BaseModel): + """Schema for creating a new task.""" + title: str = Field(..., min_length=1, max_length=200, description="Task title") + description: Optional[str] = Field(None, max_length=1000, description="Task description") + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "title": "Buy groceries", + "description": "Milk, eggs, bread" + } + } + ) + + +class TaskResponse(BaseModel): + """Schema for task response.""" + id: int = Field(..., description="Task ID") + user_id: int = Field(..., description="User ID who owns the task") + title: str = Field(..., description="Task title") + description: Optional[str] = Field(None, description="Task description") + completed: bool = Field(..., description="Task completion status") + created_at: datetime = Field(..., description="Task creation timestamp") + updated_at: datetime = Field(..., description="Task last update timestamp") + + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ + "example": { + "id": 1, + "user_id": 1, + "title": "Buy groceries", + "description": "Milk, eggs, bread", + "completed": False, + "created_at": "2026-01-08T10:00:00Z", + "updated_at": "2026-01-08T10:00:00Z" + } + } + ) + + +class TaskUpdate(BaseModel): + """Schema for updating a task (PUT - replaces all fields).""" + title: str = Field(..., min_length=1, max_length=200, description="Task title") + description: Optional[str] = Field(None, max_length=1000, description="Task description") + completed: bool = Field(..., description="Task completion status") + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "title": "Buy groceries", + "description": "Milk, eggs, bread, cheese", + "completed": False + } + } + ) + + +class TaskPatch(BaseModel): + """Schema for partially updating a task (PATCH - updates only provided fields).""" + title: Optional[str] = Field(None, min_length=1, max_length=200, description="Task title") + description: Optional[str] = Field(None, max_length=1000, description="Task description") + completed: Optional[bool] = Field(None, description="Task completion status") + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "completed": True + } + } + ) + + +class TaskListResponse(BaseModel): + """Schema for list of tasks response.""" + tasks: list[TaskResponse] = Field(..., description="List of tasks") + total: int = Field(..., description="Total number of tasks") + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "tasks": [ + { + "id": 1, + "user_id": 1, + "title": "Buy groceries", + "description": "Milk, eggs, bread", + "completed": False, + "created_at": "2026-01-08T10:00:00Z", + "updated_at": "2026-01-08T10:00:00Z" + } + ], + "total": 1 + } + } + ) diff --git a/src/services/__init__.py b/src/services/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..de2060fec304cfaeb488847d9c1c3f2a58e46561 --- /dev/null +++ b/src/services/__init__.py @@ -0,0 +1 @@ +"""Business logic services.""" diff --git a/src/services/__pycache__/__init__.cpython-313.pyc b/src/services/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e53c226c6191b584e3cb7b152718b752bff8da0 Binary files /dev/null and b/src/services/__pycache__/__init__.cpython-313.pyc differ diff --git a/src/services/__pycache__/auth_service.cpython-313.pyc b/src/services/__pycache__/auth_service.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b74cb3c1a548de9a1da16dcd990058853cd69645 Binary files /dev/null and b/src/services/__pycache__/auth_service.cpython-313.pyc differ diff --git a/src/services/__pycache__/task_service.cpython-313.pyc b/src/services/__pycache__/task_service.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..580738d5fbc7ea76fdcfe48547f092dc6034fcbd Binary files /dev/null and b/src/services/__pycache__/task_service.cpython-313.pyc differ diff --git a/src/services/auth_service.py b/src/services/auth_service.py new file mode 100644 index 0000000000000000000000000000000000000000..aef00c396a39a180d9425a671d1fe0a79c644076 --- /dev/null +++ b/src/services/auth_service.py @@ -0,0 +1,174 @@ +"""Authentication service for user signup and signin.""" +from sqlmodel import Session, select +from fastapi import HTTPException, status +from src.models.user import User +from src.schemas.auth import SignupRequest, SigninRequest, SignupResponse, TokenResponse, UserProfile +from src.core.security import hash_password, verify_password, create_jwt_token +from src.core.config import settings +from datetime import datetime +import re + + +class AuthService: + """Service for handling authentication operations.""" + + def __init__(self, db: Session): + self.db = db + + def signup(self, signup_data: SignupRequest) -> SignupResponse: + """ + Create a new user account. + + Args: + signup_data: User signup information + + Returns: + SignupResponse with created user details + + Raises: + HTTPException: 409 if email already exists + HTTPException: 400 if validation fails + """ + # Validate email format (RFC 5322) + if not self._validate_email(signup_data.email): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid email format", + ) + + # Validate password strength + password_errors = self._validate_password(signup_data.password) + if password_errors: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Password does not meet requirements", + headers={"X-Password-Errors": ", ".join(password_errors)} + ) + + # Check if email already exists + existing_user = self.db.exec( + select(User).where(User.email == signup_data.email) + ).first() + + if existing_user: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Email already registered", + ) + + # Hash password + password_hash = hash_password(signup_data.password) + + # Create user + user = User( + email=signup_data.email, + name=signup_data.name, + password_hash=password_hash, + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + ) + + self.db.add(user) + self.db.commit() + self.db.refresh(user) + + return SignupResponse( + id=user.id, + email=user.email, + name=user.name, + created_at=user.created_at, + ) + + def signin(self, signin_data: SigninRequest) -> TokenResponse: + """ + Authenticate user and issue JWT token. + + Args: + signin_data: User signin credentials + + Returns: + TokenResponse with JWT token and user profile + + Raises: + HTTPException: 401 if credentials are invalid + """ + # Find user by email + user = self.db.exec( + select(User).where(User.email == signin_data.email) + ).first() + + # Verify password + if not user or not verify_password(signin_data.password, user.password_hash): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid credentials", + ) + + # Create JWT token + token = create_jwt_token( + user_id=user.id, + email=user.email, + secret=settings.BETTER_AUTH_SECRET, + expiration_days=settings.JWT_EXPIRATION_DAYS, + ) + + # Calculate expiration in seconds (7 days = 604800 seconds) + expires_in = settings.JWT_EXPIRATION_DAYS * 24 * 60 * 60 + + return TokenResponse( + access_token=token, + token_type="bearer", + expires_in=expires_in, + user=UserProfile( + id=user.id, + email=user.email, + name=user.name, + created_at=user.created_at, + ), + ) + + def _validate_email(self, email: str) -> bool: + """ + Validate email format (RFC 5322). + + Args: + email: Email address to validate + + Returns: + True if valid, False otherwise + """ + # Simplified RFC 5322 email validation + pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$' + return re.match(pattern, email) is not None + + def _validate_password(self, password: str) -> list[str]: + """ + Validate password strength. + + Requirements: + - Minimum 8 characters + - At least one uppercase letter + - At least one lowercase letter + - At least one number + + Args: + password: Password to validate + + Returns: + List of validation error messages (empty if valid) + """ + errors = [] + + if len(password) < 8: + errors.append("Password must be at least 8 characters") + + if not re.search(r'[A-Z]', password): + errors.append("Password must contain at least one uppercase letter") + + if not re.search(r'[a-z]', password): + errors.append("Password must contain at least one lowercase letter") + + if not re.search(r'\d', password): + errors.append("Password must contain at least one number") + + return errors diff --git a/src/services/task_service.py b/src/services/task_service.py new file mode 100644 index 0000000000000000000000000000000000000000..d822c2d4cf3a5bcb6cf8f8c89d098422e332b654 --- /dev/null +++ b/src/services/task_service.py @@ -0,0 +1,175 @@ +"""Task service for business logic.""" +from datetime import datetime +from typing import Optional +from sqlmodel import Session, select +from src.models.task import Task +from src.schemas.task import TaskCreate, TaskUpdate, TaskPatch + + +class TaskService: + """Service for task operations.""" + + def __init__(self, db: Session): + """Initialize task service with database session.""" + self.db = db + + def get_tasks( + self, + user_id: int, + completed: Optional[bool] = None, + sort: str = "created_at", + order: str = "desc", + limit: Optional[int] = None, + offset: int = 0 + ) -> list[Task]: + """ + Get tasks for a user with filtering and sorting. + + Args: + user_id: ID of the user + completed: Filter by completion status (None = all, True = completed, False = active) + sort: Sort field ("created_at" or "updated_at") + order: Sort order ("asc" or "desc") + limit: Maximum number of tasks to return + offset: Number of tasks to skip + + Returns: + List of tasks matching the criteria + """ + statement = select(Task).where(Task.user_id == user_id) + + # Apply completion filter + if completed is not None: + statement = statement.where(Task.completed == completed) + + # Apply sorting + sort_column = Task.created_at if sort == "created_at" else Task.updated_at + if order == "asc": + statement = statement.order_by(sort_column.asc()) + else: + statement = statement.order_by(sort_column.desc()) + + # Apply pagination + if offset > 0: + statement = statement.offset(offset) + if limit is not None: + statement = statement.limit(limit) + + tasks = self.db.exec(statement).all() + return list(tasks) + + def create_task(self, user_id: int, task_data: TaskCreate) -> Task: + """ + Create a new task for a user. + + Args: + user_id: ID of the user + task_data: Task creation data + + Returns: + Created task + """ + now = datetime.utcnow() + task = Task( + user_id=user_id, + title=task_data.title, + description=task_data.description, + completed=False, + created_at=now, + updated_at=now + ) + self.db.add(task) + self.db.commit() + self.db.refresh(task) + return task + + def get_task(self, task_id: int, user_id: int) -> Optional[Task]: + """ + Get a single task by ID for a specific user. + + Args: + task_id: ID of the task + user_id: ID of the user + + Returns: + Task if found and belongs to user, None otherwise + """ + statement = select(Task).where(Task.id == task_id, Task.user_id == user_id) + task = self.db.exec(statement).first() + return task + + def update_task(self, task_id: int, user_id: int, task_data: TaskUpdate) -> Optional[Task]: + """ + Update a task (PUT - replaces all fields). + + Args: + task_id: ID of the task + user_id: ID of the user + task_data: Task update data + + Returns: + Updated task if found and belongs to user, None otherwise + """ + task = self.get_task(task_id, user_id) + if not task: + return None + + task.title = task_data.title + task.description = task_data.description + task.completed = task_data.completed + task.updated_at = datetime.utcnow() + + self.db.add(task) + self.db.commit() + self.db.refresh(task) + return task + + def patch_task(self, task_id: int, user_id: int, task_data: TaskPatch) -> Optional[Task]: + """ + Partially update a task (PATCH - updates only provided fields). + + Args: + task_id: ID of the task + user_id: ID of the user + task_data: Task patch data + + Returns: + Updated task if found and belongs to user, None otherwise + """ + task = self.get_task(task_id, user_id) + if not task: + return None + + # Update only provided fields + if task_data.title is not None: + task.title = task_data.title + if task_data.description is not None: + task.description = task_data.description + if task_data.completed is not None: + task.completed = task_data.completed + + task.updated_at = datetime.utcnow() + + self.db.add(task) + self.db.commit() + self.db.refresh(task) + return task + + def delete_task(self, task_id: int, user_id: int) -> bool: + """ + Delete a task. + + Args: + task_id: ID of the task + user_id: ID of the user + + Returns: + True if task was deleted, False if not found or doesn't belong to user + """ + task = self.get_task(task_id, user_id) + if not task: + return False + + self.db.delete(task) + self.db.commit() + return True diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..46816ddf5e7038aefa80906a6c47fb6943223343 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests package.""" diff --git a/tests/__pycache__/__init__.cpython-313.pyc b/tests/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f1fe69a11900c06cc4584e28bb55af523bbd92d Binary files /dev/null and b/tests/__pycache__/__init__.cpython-313.pyc differ diff --git a/tests/__pycache__/conftest.cpython-313-pytest-8.3.4.pyc b/tests/__pycache__/conftest.cpython-313-pytest-8.3.4.pyc new file mode 100644 index 0000000000000000000000000000000000000000..344fda6427514125a6402343a4addc38197bc9a8 Binary files /dev/null and b/tests/__pycache__/conftest.cpython-313-pytest-8.3.4.pyc differ diff --git a/tests/__pycache__/test_tasks.cpython-313-pytest-8.3.4.pyc b/tests/__pycache__/test_tasks.cpython-313-pytest-8.3.4.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d27ced67bb5c4a628c2a02335046881a8156cd0 Binary files /dev/null and b/tests/__pycache__/test_tasks.cpython-313-pytest-8.3.4.pyc differ diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..dbfa318995aa90c3af4d8251b3943539017ac6f9 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,63 @@ +"""Test configuration and fixtures.""" +import pytest +from sqlmodel import Session, SQLModel, create_engine +from sqlmodel.pool import StaticPool +from fastapi.testclient import TestClient + +from src.main import app +from src.api.deps import get_db +from src.models.user import User +from src.models.task import Task + + +@pytest.fixture(name="session") +def session_fixture(): + """Create a fresh database session for each test.""" + engine = create_engine( + "sqlite:///:memory:", + connect_args={"check_same_thread": False}, + poolclass=StaticPool, + ) + SQLModel.metadata.create_all(engine) + with Session(engine) as session: + yield session + + +@pytest.fixture(name="client") +def client_fixture(session: Session): + """Create a test client with overridden database session.""" + def get_db_override(): + yield session + + app.dependency_overrides[get_db] = get_db_override + client = TestClient(app) + yield client + app.dependency_overrides.clear() + + +@pytest.fixture(name="test_user") +def test_user_fixture(session: Session): + """Create a test user.""" + user = User( + email="test@example.com", + name="Test User" + ) + session.add(user) + session.commit() + session.refresh(user) + return user + + +@pytest.fixture(name="test_task") +def test_task_fixture(session: Session, test_user: User): + """Create a test task.""" + task = Task( + user_id=test_user.id, + title="Test Task", + description="Test Description", + completed=False + ) + session.add(task) + session.commit() + session.refresh(task) + return task diff --git a/tests/test_tasks.py b/tests/test_tasks.py new file mode 100644 index 0000000000000000000000000000000000000000..b91c80bef29c146cb349a0e17b322f842aee0209 --- /dev/null +++ b/tests/test_tasks.py @@ -0,0 +1,255 @@ +"""Tests for Task API endpoints.""" +import pytest +from fastapi.testclient import TestClient +from sqlmodel import Session + +from src.models.user import User +from src.models.task import Task + + +class TestTaskEndpoints: + """Test suite for task CRUD operations.""" + + def test_create_task(self, client: TestClient, test_user: User): + """Test creating a new task.""" + response = client.post( + "/api/tasks", + json={"title": "New Task", "description": "New Description"} + ) + assert response.status_code == 201 + data = response.json() + assert data["title"] == "New Task" + assert data["description"] == "New Description" + assert data["completed"] is False + assert data["user_id"] == test_user.id + assert "id" in data + assert "created_at" in data + assert "updated_at" in data + + def test_create_task_without_description(self, client: TestClient, test_user: User): + """Test creating a task without description.""" + response = client.post( + "/api/tasks", + json={"title": "Task Without Description"} + ) + assert response.status_code == 201 + data = response.json() + assert data["title"] == "Task Without Description" + assert data["description"] is None + + def test_create_task_invalid_data(self, client: TestClient): + """Test creating a task with invalid data.""" + response = client.post( + "/api/tasks", + json={"description": "Missing title"} + ) + assert response.status_code == 422 + + def test_get_tasks(self, client: TestClient, test_task: Task): + """Test getting all tasks.""" + response = client.get("/api/tasks") + assert response.status_code == 200 + data = response.json() + assert "tasks" in data + assert "total" in data + assert data["total"] >= 1 + assert len(data["tasks"]) >= 1 + + def test_get_tasks_empty(self, client: TestClient, test_user: User): + """Test getting tasks when none exist.""" + response = client.get("/api/tasks") + assert response.status_code == 200 + data = response.json() + assert data["tasks"] == [] + assert data["total"] == 0 + + def test_get_tasks_filter_completed(self, client: TestClient, session: Session, test_user: User): + """Test filtering tasks by completion status.""" + # Create completed and active tasks + completed_task = Task( + user_id=test_user.id, + title="Completed Task", + completed=True + ) + active_task = Task( + user_id=test_user.id, + title="Active Task", + completed=False + ) + session.add(completed_task) + session.add(active_task) + session.commit() + + # Test filter for completed tasks + response = client.get("/api/tasks?completed=true") + assert response.status_code == 200 + data = response.json() + assert all(task["completed"] for task in data["tasks"]) + + # Test filter for active tasks + response = client.get("/api/tasks?completed=false") + assert response.status_code == 200 + data = response.json() + assert all(not task["completed"] for task in data["tasks"]) + + def test_get_tasks_sort_order(self, client: TestClient, session: Session, test_user: User): + """Test sorting tasks.""" + # Create multiple tasks + for i in range(3): + task = Task( + user_id=test_user.id, + title=f"Task {i}", + completed=False + ) + session.add(task) + session.commit() + + # Test descending order (newest first) + response = client.get("/api/tasks?sort=created_at&order=desc") + assert response.status_code == 200 + data = response.json() + tasks = data["tasks"] + assert len(tasks) >= 3 + # Verify descending order + for i in range(len(tasks) - 1): + assert tasks[i]["created_at"] >= tasks[i + 1]["created_at"] + + # Test ascending order (oldest first) + response = client.get("/api/tasks?sort=created_at&order=asc") + assert response.status_code == 200 + data = response.json() + tasks = data["tasks"] + # Verify ascending order + for i in range(len(tasks) - 1): + assert tasks[i]["created_at"] <= tasks[i + 1]["created_at"] + + def test_get_single_task(self, client: TestClient, test_task: Task): + """Test getting a single task by ID.""" + response = client.get(f"/api/tasks/{test_task.id}") + assert response.status_code == 200 + data = response.json() + assert data["id"] == test_task.id + assert data["title"] == test_task.title + + def test_get_nonexistent_task(self, client: TestClient): + """Test getting a task that doesn't exist.""" + response = client.get("/api/tasks/99999") + assert response.status_code == 404 + + def test_update_task(self, client: TestClient, test_task: Task): + """Test updating a task with PUT.""" + response = client.put( + f"/api/tasks/{test_task.id}", + json={ + "title": "Updated Title", + "description": "Updated Description", + "completed": True + } + ) + assert response.status_code == 200 + data = response.json() + assert data["title"] == "Updated Title" + assert data["description"] == "Updated Description" + assert data["completed"] is True + + def test_update_nonexistent_task(self, client: TestClient): + """Test updating a task that doesn't exist.""" + response = client.put( + "/api/tasks/99999", + json={ + "title": "Updated", + "description": "Updated", + "completed": True + } + ) + assert response.status_code == 404 + + def test_patch_task_title(self, client: TestClient, test_task: Task): + """Test partially updating task title.""" + original_description = test_task.description + response = client.patch( + f"/api/tasks/{test_task.id}", + json={"title": "Patched Title"} + ) + assert response.status_code == 200 + data = response.json() + assert data["title"] == "Patched Title" + assert data["description"] == original_description + assert data["completed"] == test_task.completed + + def test_patch_task_completion(self, client: TestClient, test_task: Task): + """Test toggling task completion status.""" + response = client.patch( + f"/api/tasks/{test_task.id}", + json={"completed": True} + ) + assert response.status_code == 200 + data = response.json() + assert data["completed"] is True + assert data["title"] == test_task.title + + def test_patch_nonexistent_task(self, client: TestClient): + """Test patching a task that doesn't exist.""" + response = client.patch( + "/api/tasks/99999", + json={"completed": True} + ) + assert response.status_code == 404 + + def test_delete_task(self, client: TestClient, test_task: Task): + """Test deleting a task.""" + task_id = test_task.id + response = client.delete(f"/api/tasks/{task_id}") + assert response.status_code == 204 + + # Verify task is deleted + response = client.get(f"/api/tasks/{task_id}") + assert response.status_code == 404 + + def test_delete_nonexistent_task(self, client: TestClient): + """Test deleting a task that doesn't exist.""" + response = client.delete("/api/tasks/99999") + assert response.status_code == 404 + + def test_pagination(self, client: TestClient, session: Session, test_user: User): + """Test pagination with limit and offset.""" + # Create 10 tasks + for i in range(10): + task = Task( + user_id=test_user.id, + title=f"Task {i}", + completed=False + ) + session.add(task) + session.commit() + + # Test limit + response = client.get("/api/tasks?limit=5") + assert response.status_code == 200 + data = response.json() + assert len(data["tasks"]) == 5 + + # Test offset + response = client.get("/api/tasks?limit=5&offset=5") + assert response.status_code == 200 + data = response.json() + assert len(data["tasks"]) == 5 + + def test_task_timestamps(self, client: TestClient, test_task: Task): + """Test that timestamps are properly set and updated.""" + # Get initial timestamps + response = client.get(f"/api/tasks/{test_task.id}") + initial_data = response.json() + initial_updated_at = initial_data["updated_at"] + + # Update task + response = client.patch( + f"/api/tasks/{test_task.id}", + json={"title": "Updated Title"} + ) + assert response.status_code == 200 + updated_data = response.json() + + # Verify updated_at changed + assert updated_data["updated_at"] >= initial_updated_at + assert updated_data["created_at"] == initial_data["created_at"]