diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000000000000000000000000000000000000..afeac93f791a6eb8cfffa586a102a0b6842018ec --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,9 @@ +{ + "permissions": { + "allow": [ + "Bash(set NODE_OPTIONS=--preserve-symlinks)", + "Bash(npm run dev:*)", + "Bash(curl:*)" + ] + } +} diff --git a/.gitignore b/.gitignore index c9d599591e13dbecaa960a0ae67a66e296780ec5..0fee240a00f23fbf22097071d5f2556abaaa1f14 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,45 @@ -# Python +# Phase 2 Premium Web SaaS - Comprehensive .gitignore +# Covers: Next.js 14, FastAPI, Python, Node.js, TypeScript, Tailwind, shadcn/ui + +# ======================================== +# Environment Variables & Secrets +# ======================================== +.env +.env.local +.env.*.local +.env.development.local +.env.test.local +.env.production.local +.env.production +*.pem +*.key +*.crt +secrets/ +credentials/ + +# ======================================== +# Node.js / NPM / TypeScript +# ======================================== +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +dist/ +dist-ssr/ +build/ +.next/ +out/ +.turbo/ +.vercel/ +*.tsbuildinfo +*.log +logs/ +*.lnk + +# ======================================== +# Python / FastAPI / SQLModel +# ======================================== __pycache__/ *.py[cod] *$py.class @@ -16,26 +57,214 @@ parts/ sdist/ var/ wheels/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg - -# Virtual Environment +MANIFEST +pip-log.txt +pip-delete-this-directory.txt +.pytest_cache/ +.coverage +htmlcov/ +.tox/ +.nox/ +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.log +.pytest_cache/ +.hypothesis/ +.venv/ venv/ ENV/ env/ -.venv +venv.bak/ +venv.stamp/ + +# ======================================== +# Alembic (Database Migrations) +# ======================================== +alembic/versions/*.pyc +alembic/versions/__pycache__/ -# IDE +# ======================================== +# IDE & Editor Files +# ======================================== .vscode/ .idea/ *.swp *.swo *~ +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db +*.sublime-project +*.sublime-workspace +.history/ +*.fdb_cdblock/ +.project +.classpath +.settings/ +*.launch +.factorypath +.metadata.gradle +.gradle/ +*.iml +*.ipr +*.iws +.project.settings +.settings/ +.loadpath +.recommenders +.springBeans +.sts4-cache +.idea_modules/ +.vs/ +*.code-workspace + +# ======================================== +# Testing & Coverage +# ======================================== +coverage/ +*.lcov +.nyc_output/ +playwright-report/ +test-results/ +*.test.tsx.snap +*.mock.tsx -# OS +# ======================================== +# OS Generated Files +# ======================================== .DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db Thumbs.db +desktop.ini -# Project specific +# ======================================== +# Temporary Files +# ======================================== +*.tmp +*.temp +*.cache +*.bak +*.backup +*.swp +*.swo +*~.nib +*.sql +*.sqlite +*.db + +# ======================================== +# Build & Distribution Artifacts +# ======================================== +*.tgz +*.tar.gz +*.rar +*.zip +*.7z +*.exe +*.dll +*.dylib +*.bin +*.obj/ +out/ +target/ +*.class +*.jar +*.war +*.ear + +# ======================================== +# Cloud / Deployment +# ======================================== +.vercel +.netlify +.firebase/ +amplify/ +#debug.log +*.log + +# ======================================== +# Package Manager Lock Files (Optional) +# ======================================== +# Uncomment if you want to ignore lock files +# package-lock.json +# yarn.lock +# pnpm-lock.yaml +# poetry.lock +# Pipfile.lock + +# ======================================== +# Docker +# ======================================== +*.dockerfile +docker-compose.override.yml + +# ======================================== +# Database Files +# ======================================== +*.db +*.sqlite +*.sqlite3 +*.db3 +*.psql +*.sql +*.sqlitedb + +# ======================================== +# Cloudinary / Image Uploads (if local storage) +# ======================================== +uploads/ +public/uploads/ +static/uploads/ +temp/ + +# ======================================== +# AI / ML Model Files +# ======================================== +*.pkl +*.h5 +*.hdf5 +*.pb +*.onnx +*.ckpt +*.pt +*.pth + +# ======================================== +# Logs & Debugging +# ======================================== *.log +logs/ +*.debug +debug.log +error.log +access.log +server.log +application.log + +# ======================================== +# Misc +# ======================================== +.site/ +.sass-cache/ +.jekyll-cache/ +.jekyll-metadata +.jekyll-server-cache +package-lock.json +yarn.lock +pnpm-lock.yaml +nul diff --git a/.specify/memory/constitution.md b/.specify/memory/constitution.md index 4af156993327f10d92818a407666d3461d243ec4..cb8c0bc0f9be596c13c560f9dfb7a12c3bc2d645 100644 --- a/.specify/memory/constitution.md +++ b/.specify/memory/constitution.md @@ -1,368 +1,55 @@ - - -# Evolution of Todo Constitution +# [PROJECT_NAME] Constitution + ## Core Principles -### I. Purpose - -This project exists to demonstrate **Spec-Driven Development (SDD)** for building a system that evolves from a **simple console application** into a **cloud-native, AI-driven, event-based distributed platform**. - -The primary objective is **architectural discipline**, not feature velocity. - ---- - -### II. Spec-Driven Development Only - -All work MUST follow this strict order: - -Constitution → Specify → Plan → Tasks → Implement - -**Mandatory Rules:** -- No skipping steps -- No merging steps -- No code without tasks - -**Rationale:** This ensures every implementation decision is traceable to requirements, prevents scope creep, and maintains architectural integrity across all evolution phases. - ---- - -### III. No Manual Coding - -**Non-Negotiable Rules:** -- Humans MUST NOT write application code -- ALL code must be generated via `/sp.implement` -- Humans MAY: edit specs, review output, request regeneration - -**Rationale:** Manual coding bypasses the spec-driven workflow and introduces untraceable behavior changes. Manual coding equals phase failure. - ---- - -### IV. Single Source of Truth - -**Mandatory Rules:** -- Specs are the only authority -- If behavior is not written, it does not exist -- Implementation may NEVER introduce new behavior - -**Rationale:** Prevents implementation drift and ensures all features are properly specified, reviewed, and approved before coding begins. - ---- - -### V. Phase Evolution Contract - -The project MUST evolve strictly in this order: - -| Phase | Scope | -|-----|-----| -| Phase I | In-memory console app | -| Phase II | Full-stack web app | -| Phase III | AI agents via MCP | -| Phase IV | Kubernetes deployment | -| Phase V | Event-driven cloud system | - -**Non-Negotiable:** No phase may skip responsibilities. - -**Rationale:** Each phase builds upon previous foundations. Skipping phases breaks the evolutionary principle and introduces architectural debt. - ---- - -### VI. Stateless Services - -**Mandatory Rules:** -- Backend services MUST be stateless -- State stored in: Database or Dapr state store -- Restarting services must not break functionality - -**Rationale:** Enables horizontal scaling, fault tolerance, and cloud-native deployment patterns. Stateful services create scaling bottlenecks and operational complexity. - ---- - -### VII. Agent-First Design - -**Mandatory Rules:** -- Agents invoke tools, not functions -- All agent behavior must be explicit -- No autonomous free-form execution - -**Rationale:** Explicit tool invocations are auditable, testable, and可控. Free-form execution creates unpredictable behavior and security risks. - ---- - -### VIII. Event-Driven by Default (Phase V) - -**Mandatory Rules:** -- Events represent facts -- Consumers react independently -- No synchronous dependencies - -**Rationale:** Enables loose coupling, independent scaling, and resilience. Synchronous dependencies create cascading failures and tight coupling. - ---- - -### IX. Security Rules - -**Mandatory Rules:** -- Authentication mandatory once introduced -- JWT verification at backend boundary -- User data isolation enforced in backend -- Secrets NEVER hard-coded -- No trust in frontend - -**Rationale:** Defense-in-depth prevents unauthorized access and data leakage. Frontend is inherently untrustworthy; backend must enforce all security rules. - ---- - -### X. Technology Constraints - -**Allowed Stack:** -- Frontend: Next.js (App Router) -- Backend: FastAPI (Python) -- ORM: SQLModel -- Database: PostgreSQL (Neon) -- Auth: Better Auth -- AI: OpenAI Agents SDK -- MCP: Official MCP SDK -- Orchestration: Kubernetes -- Messaging: Kafka (via Dapr) - -**Non-Negotiable:** Changes require spec updates. - -**Rationale:** Standardized stack reduces complexity, improves maintainability, and ensures team expertise depth. - ---- - -### XI. Error Handling - -**Mandatory Rules:** -- Errors must be user-friendly -- No crashes on invalid input -- System must recover gracefully -- Errors must not leak internals - -**Rationale:** User experience and security. Crashes and leaked internals create frustration and security vulnerabilities. - ---- - -### XII. Change Management - -**Change Type Mapping:** +### [PRINCIPLE_1_NAME] + +[PRINCIPLE_1_DESCRIPTION] + -| Change Type | Required Action | -|-----------|----------------| -| Behavior | Update `speckit.specify` | -| Architecture | Update `speckit.plan` | -| Tasks | Update `speckit.tasks` | -| Principles | Update this constitution | +### [PRINCIPLE_2_NAME] + +[PRINCIPLE_2_DESCRIPTION] + -**Rationale:** Ensures all changes are properly traced through the spec-driven workflow. +### [PRINCIPLE_3_NAME] + +[PRINCIPLE_3_DESCRIPTION] + ---- +### [PRINCIPLE_4_NAME] + +[PRINCIPLE_4_DESCRIPTION] + -## Phase Enforcement Rules +### [PRINCIPLE_5_NAME] + +[PRINCIPLE_5_DESCRIPTION] + -### Phase I — Console (Foundation) +### [PRINCIPLE_6_NAME] -**Scope Constraints:** -- Single user -- In-memory only -- No database -- No web -- No auth -- No AI -- No agents -**Rationale:** Establish core domain logic without infrastructure complexity. +[PRINCIPLE__DESCRIPTION] ---- +## [SECTION_2_NAME] + -### Phase II — Full Stack +[SECTION_2_CONTENT] + -**Scope Requirements:** -- Persistent database -- REST APIs -- Frontend + backend separation -- Authentication mandatory -- User-level data isolation +## [SECTION_3_NAME] + -**Rationale:** Transition from prototype to production-ready application. - ---- - -### Phase III — AI & MCP - -**Scope Requirements:** -- AI agents MUST operate via MCP tools -- No direct DB access by agents -- Chat must be stateless -- Conversation state persisted externally - -**Rationale:** Enable AI capabilities while maintaining security and scalability. - ---- - -### Phase IV — Kubernetes - -**Scope Requirements:** -- All services containerized -- Helm charts required -- Minikube parity with production -- No environment-specific logic - -**Rationale:** Enable cloud-native deployment and operational consistency. - ---- - -### Phase V — Event-Driven Cloud - -**Scope Requirements:** -- CRUD emits events -- Asynchronous consumers -- Kafka via Dapr only -- No service-to-service tight coupling - -**Rationale:** Enable distributed system patterns and independent scaling. - ---- - -## Architecture Principles - -### 1. Stateless Services - -Backend services MUST be stateless. State stored in: -- Database (PostgreSQL/Neon) -- Dapr state store (Phase III+) - -Restarting services must not break functionality. - ---- - -### 2. Agent-First Design - -- Agents invoke tools, not functions -- All agent behavior must be explicit -- No autonomous free-form execution - ---- - -### 3. Event-Driven by Default (Phase V) - -- Events represent facts -- Consumers react independently -- No synchronous dependencies - ---- - -## Security Rules - -- Authentication mandatory once introduced -- JWT verification at backend boundary -- User data isolation enforced in backend -- Secrets NEVER hard-coded -- No trust in frontend - ---- - -## Technology Stack - -**Allowed Technologies:** -- Frontend: Next.js (App Router) -- Backend: FastAPI (Python) -- ORM: SQLModel -- Database: PostgreSQL (Neon) -- Auth: Better Auth -- AI: OpenAI Agents SDK -- MCP: Official MCP SDK -- Orchestration: Kubernetes -- Messaging: Kafka (via Dapr) - -**Changes require spec updates.** - ---- - -## Error Handling Standards - -- Errors must be user-friendly -- No crashes on invalid input -- System must recover gracefully -- Errors must not leak internals - ---- - -## Enforcement Hierarchy - -If conflicts occur, precedence is: - -Constitution > Specify > Plan > Tasks > Implementation - -Lower layers MUST obey higher layers. - ---- - -## Definition of Success - -This project is successful when: - -- Every feature traces to a spec -- No manual code exists -- Agents operate only via tools -- System scales from CLI to cloud -- Architecture is explainable and auditable - ---- - -## Final Rule - -If it is not specified, -**it is forbidden.** - ---- +[SECTION_3_CONTENT] + ## Governance + -### Amendment Procedure - -1. Changes to this constitution require: - - Documentation of rationale - - Impact analysis on existing specs - - Migration plan for affected features - - Version bump following semantic versioning - -2. Versioning Policy: - - **MAJOR**: Backward incompatible governance/principle removals or redefinitions - - **MINOR**: New principle/section added or materially expanded guidance - - **PATCH**: Clarifications, wording, typo fixes, non-semantic refinements - -3. Compliance Review: - - All PRs MUST verify constitution compliance - - Plan templates MUST include constitution check gates - - Spec templates MUST enforce principle requirements - -### Enforcement - -- Constitution supersedes all other practices -- Complexity MUST be justified against principles -- All agents and tools MUST follow constitution rules +[GOVERNANCE_RULES] + -**Version**: 1.0.0 | **Ratified**: 2026-01-20 | **Last Amended**: 2026-01-20 +**Version**: [CONSTITUTION_VERSION] | **Ratified**: [RATIFICATION_DATE] | **Last Amended**: [LAST_AMENDED_DATE] + diff --git a/CLAUDE.md b/CLAUDE.md index 563a84a29e1dc6fad71c481f2d46d6522db67b78..f26964bd72637758c93bc82149eab60788e7be69 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,4 +1,4 @@ -# Claude Code Rules +# Claude Code Rules This file is generated during init for the selected agent. diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..753863f5b17889952e0f19c9ea63e67cae18144e --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,44 @@ +# ======================================== +# Database Configuration +# ======================================== +# PostgreSQL connection string (use Neon for production) +DATABASE_URL=postgresql+psycopg://user:password@localhost:5432/todoapp + +# ======================================== +# JWT Authentication +# ======================================== +# Secret key for JWT token signing (must be at least 32 characters) +JWT_SECRET=your-super-secret-key-change-this-in-production-min-32-chars + +# ======================================== +# Cloudinary Configuration (Avatar Storage) +# ======================================== +CLOUDINARY_CLOUD_NAME=your-cloud-name +CLOUDINARY_API_KEY=your-api-key +CLOUDINARY_API_SECRET=your-api-secret + +# ======================================== +# Hugging Face AI Configuration +# ======================================== +HUGGINGFACE_API_KEY=your-huggingface-api-key + +# ======================================== +# Frontend URL +# ======================================== +# Allowed CORS origin for frontend +FRONTEND_URL=http://localhost:3000 + +# ======================================== +# Application Settings +# ======================================== +# Environment: development, staging, production +ENV=development + +# API Port +PORT=8000 + +# ======================================== +# Optional: Log Level +# ======================================== +# debug, info, warning, error, critical +LOG_LEVEL=info diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e140444d5dc0c0454235e2e0d8427aee44782afa --- /dev/null +++ b/backend/README.md @@ -0,0 +1,165 @@ +# Todo App Backend - Phase 2 + +FastAPI backend for the Todo SaaS application with authentication, database, and AI integration. + +## Tech Stack + +- **FastAPI** - Modern, fast web framework for building APIs +- **SQLModel** - SQLModel for ORM with Pydantic validation +- **Alembic** - Database migration tool +- **PostgreSQL** - Primary database (Neon in production) +- **JWT + bcrypt** - Secure authentication +- **Hugging Face** - AI integration for todo features +- **Cloudinary** - Avatar image storage + +## Setup + +### 1. Create virtual environment + +```bash +python -m venv venv +source venv/bin/activate # Windows: venv\Scripts\activate +``` + +### 2. Install dependencies + +```bash +pip install -e . +``` + +Or install with dev dependencies: + +```bash +pip install -e ".[dev]" +``` + +### 3. Setup environment + +```bash +cp .env.example .env +# Edit .env with your configuration +``` + +### 4. Run database migrations + +```bash +alembic upgrade head +``` + +### 5. Start development server + +```bash +uvicorn src.main:app --reload --port 8000 +``` + +API will be available at: http://localhost:8000 +API docs at: http://localhost:8000/docs + +## Project Structure + +``` +backend/ +├── src/ +│ ├── api/ # API route handlers +│ ├── core/ # Core configuration and utilities +│ ├── models/ # SQLModel database models +│ ├── schemas/ # Pydantic schemas for request/response +│ ├── services/ # Business logic services +│ ├── tests/ # Test files +│ └── utils/ # Utility functions +├── alembic/ # Database migrations +└── pyproject.toml # Project configuration +``` + +## Available Scripts + +```bash +# Development +python -m uvicorn src.main:app --reload + +# Testing +pytest # Run tests +pytest --cov=src # Run with coverage + +# Database migrations +alembic revision --autogenerate -m "message" # Create migration +alembic upgrade head # Apply migrations +alembic downgrade -1 # Rollback one migration + +# Code quality +black . # Format code +ruff check . # Lint code +ruff check . --fix # Fix linting issues +mypy . # Type checking +``` + +## API Endpoints + +### Authentication +- `POST /auth/signup` - User registration +- `POST /auth/login` - User login +- `POST /auth/logout` - User logout +- `GET /auth/me` - Get current user + +### Todos +- `GET /todos` - List todos with filtering +- `POST /todos` - Create todo +- `GET /todos/{id}` - Get single todo +- `PUT /todos/{id}` - Update todo +- `DELETE /todos/{id}` - Delete todo +- `PATCH /todos/{id}/complete` - Mark todo complete + +### User Profile +- `GET /users/me` - Get user profile +- `PUT /users/me` - Update user profile +- `POST /users/me/avatar` - Upload avatar + +### AI Features +- `POST /ai/generate-todo` - Generate todo from text +- `POST /ai/summarize` - Summarize todos +- `POST /ai/prioritize` - Prioritize todos + +## Environment Variables + +See `.env.example` for required environment variables: + +- `DATABASE_URL` - PostgreSQL connection string +- `JWT_SECRET` - Secret key for JWT (min 32 chars) +- `CLOUDINARY_CLOUD_NAME` - Cloudinary cloud name +- `CLOUDINARY_API_KEY` - Cloudinary API key +- `CLOUDINARY_API_SECRET` - Cloudinary API secret +- `HUGGINGFACE_API_KEY` - Hugging Face API key +- `FRONTEND_URL` - Frontend URL for CORS + +## Development with Docker + +```bash +# Start PostgreSQL +docker-compose up -d postgres + +# Run migrations +alembic upgrade head + +# Start server +uvicorn src.main:app --reload +``` + +## Testing + +```bash +# Run all tests +pytest + +# Run with coverage +pytest --cov=src --cov-report=html + +# Run specific test file +pytest tests/test_auth.py + +# Run with verbose output +pytest -v +``` + +## License + +MIT diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000000000000000000000000000000000000..26b798c5ae7015bf71bea63b01e72c4674220b5a --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,113 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgresql+psycopg://user:password@localhost:5432/todoapp + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000000000000000000000000000000000000..d1da91f9b94e43d189720a1e713dd33d539c2b8e --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,81 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool + +from alembic import context + +# Import your models here for autogenerate support +from src.core.config import settings +from src.models.user import User +from src.models.todo import Todo +from src.models.session import Session +from src.models.ai_request import AIRequest +from sqlmodel import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = SQLModel.metadata + +# Set the database URL from settings +config.set_main_option('sqlalchemy.url', settings.database_url) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000000000000000000000000000000000000..fbc4b07dcef98b20c6f96b642097f35e8433258e --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/001_initial_schema.py b/backend/alembic/versions/001_initial_schema.py new file mode 100644 index 0000000000000000000000000000000000000000..026fa0cdf05623257310ef6088975a43d7416890 --- /dev/null +++ b/backend/alembic/versions/001_initial_schema.py @@ -0,0 +1,185 @@ +"""Initial schema: users, todos, sessions, ai_requests tables + +Revision ID: 001 +Revises: +Create Date: 2025-01-23 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '001' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create users table + op.create_table( + 'users', + sa.Column( + 'id', + postgresql.UUID(as_uuid=True), + server_default=sa.text('gen_random_uuid()'), + nullable=False, + ), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('password_hash', sa.String(length=255), nullable=False), + sa.Column('avatar_url', sa.String(length=500), nullable=True), + sa.Column( + 'created_at', + sa.DateTime(), + server_default=sa.text('CURRENT_TIMESTAMP'), + nullable=False, + ), + sa.Column( + 'updated_at', + sa.DateTime(), + server_default=sa.text('CURRENT_TIMESTAMP'), + nullable=False, + ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email'), + ) + op.create_index(op.f('ix_users_id'), 'users', ['id']) + op.create_index(op.f('ix_users_email'), 'users', ['email']) + + # Create todos table + op.create_table( + 'todos', + sa.Column( + 'id', + postgresql.UUID(as_uuid=True), + server_default=sa.text('gen_random_uuid()'), + nullable=False, + ), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('status', sa.String(length=50), server_default='pending', nullable=False), + sa.Column('priority', sa.String(length=50), server_default='medium', nullable=False), + sa.Column('due_date', sa.DateTime(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column( + 'user_id', + postgresql.UUID(as_uuid=True), + nullable=False, + ), + sa.Column( + 'created_at', + sa.DateTime(), + server_default=sa.text('CURRENT_TIMESTAMP'), + nullable=False, + ), + sa.Column( + 'updated_at', + sa.DateTime(), + server_default=sa.text('CURRENT_TIMESTAMP'), + nullable=False, + ), + sa.PrimaryKeyConstraint('id'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), + ) + op.create_index(op.f('ix_todos_id'), 'todos', ['id'], unique=True) + op.create_index(op.f('ix_todos_user_id'), 'todos', ['user_id']) + op.create_index('idx_todos_user_status', 'todos', ['user_id', 'status']) + op.create_index('idx_todos_user_priority', 'todos', ['user_id', 'priority']) + op.create_index('idx_todos_due_date', 'todos', ['due_date']) + + # Create sessions table + op.create_table( + 'sessions', + sa.Column( + 'id', + postgresql.UUID(as_uuid=True), + server_default=sa.text('gen_random_uuid()'), + nullable=False, + ), + sa.Column( + 'user_id', + postgresql.UUID(as_uuid=True), + nullable=False, + ), + sa.Column('token', sa.String(length=500), nullable=False), + sa.Column('expires_at', sa.DateTime(), nullable=False), + sa.Column( + 'created_at', + sa.DateTime(), + server_default=sa.text('CURRENT_TIMESTAMP'), + nullable=False, + ), + sa.Column('revoked_at', sa.DateTime(), nullable=True), + sa.Column('user_agent', sa.String(length=500), nullable=True), + sa.Column('ip_address', sa.String(length=45), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), + ) + op.create_index(op.f('ix_sessions_id'), 'sessions', ['id'], unique=True) + op.create_index(op.f('ix_sessions_user_id'), 'sessions', ['user_id']) + op.create_index(op.f('ix_sessions_token'), 'sessions', ['token']) + op.create_index('idx_sessions_user_expires', 'sessions', ['user_id', 'expires_at']) + + # Create ai_requests table + op.create_table( + 'ai_requests', + sa.Column( + 'id', + postgresql.UUID(as_uuid=True), + server_default=sa.text('gen_random_uuid()'), + nullable=False, + ), + sa.Column( + 'user_id', + postgresql.UUID(as_uuid=True), + nullable=False, + ), + sa.Column('request_type', sa.String(length=50), nullable=False), + sa.Column('input_data', sa.Text(), nullable=False), + sa.Column('output_data', sa.Text(), nullable=True), + sa.Column('model_used', sa.String(length=100), nullable=False), + sa.Column('tokens_used', sa.Integer(), nullable=True), + sa.Column('processing_time_ms', sa.Integer(), nullable=True), + sa.Column( + 'created_at', + sa.DateTime(), + server_default=sa.text('CURRENT_TIMESTAMP'), + nullable=False, + ), + sa.PrimaryKeyConstraint('id'), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), + ) + op.create_index(op.f('ix_ai_requests_id'), 'ai_requests', ['id'], unique=True) + op.create_index(op.f('ix_ai_requests_user_id'), 'ai_requests', ['user_id']) + op.create_index('idx_ai_requests_user_type', 'ai_requests', ['user_id', 'request_type']) + op.create_index('idx_ai_requests_created', 'ai_requests', ['created_at']) + + +def downgrade() -> None: + # Drop tables in reverse order + op.drop_index('idx_ai_requests_created', table_name='ai_requests') + op.drop_index('idx_ai_requests_user_type', table_name='ai_requests') + op.drop_index(op.f('ix_ai_requests_user_id'), table_name='ai_requests') + op.drop_index(op.f('ix_ai_requests_id'), table_name='ai_requests') + op.drop_table('ai_requests') + + op.drop_index('idx_sessions_user_expires', table_name='sessions') + op.drop_index(op.f('ix_sessions_token'), table_name='sessions') + op.drop_index(op.f('ix_sessions_user_id'), table_name='sessions') + op.drop_index(op.f('ix_sessions_id'), table_name='sessions') + op.drop_table('sessions') + + op.drop_index('idx_todos_due_date', table_name='todos') + op.drop_index('idx_todos_user_priority', table_name='todos') + op.drop_index('idx_todos_user_status', table_name='todos') + op.drop_index(op.f('ix_todos_user_id'), table_name='todos') + op.drop_index(op.f('ix_todos_id'), table_name='todos') + op.drop_table('todos') + + op.drop_index(op.f('ix_users_email'), table_name='users') + op.drop_index(op.f('ix_users_id'), table_name='users') + op.drop_table('users') diff --git a/backend/jest.config.js b/backend/jest.config.js new file mode 100644 index 0000000000000000000000000000000000000000..ec51a1ba6b4dd63ccd90e33e64f79b45756c96e2 --- /dev/null +++ b/backend/jest.config.js @@ -0,0 +1,28 @@ +const nextJest = require('next/jest'); + +const createJestConfig = nextJest({ + // Provide the path to your Next.js app to load next.config.js and .env files in your test environment + dir: './', +}); + +// Add any custom config to be passed to Jest +const customJestConfig = { + setupFilesAfterEnv: ['/jest.setup.js'], + testEnvironment: 'jest-environment-jsdom', + moduleNameMapper: { + '^@/(.*)$': '/src/$1', + }, + collectCoverageFrom: [ + 'src/**/*.{js,jsx,ts,tsx}', + '!src/**/*.d.ts', + '!src/**/*.stories.{js,jsx,ts,tsx}', + '!src/**/__tests__/**', + ], + testMatch: [ + '/src/**/__tests__/**/*.{js,jsx,ts,tsx}', + '/src/**/*.{spec,test}.{js,jsx,ts,tsx}', + ], +}; + +// createJestConfig is exported this way to ensure that next/jest can load the Next.js config which is async +module.exports = createJestConfig(customJestConfig); diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..4f58c320d04c975ba1c959cf17b8756300313260 --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,156 @@ +[project] +name = "todo-app-backend" +version = "0.1.0" +description = "Phase 2 Backend API for Todo SaaS Application" +authors = [ + {name = "User", email = "user@example.com"} +] +readme = "README.md" +requires-python = ">=3.11" +classifiers = [ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Framework :: FastAPI", + "Intended Audience :: Developers", +] + +dependencies = [ + "fastapi>=0.109.0", + "uvicorn[standard]>=0.27.0", + "sqlmodel>=0.0.14", + "alembic>=1.13.0", + "psycopg[binary]>=3.1.0", + "python-jose[cryptography]>=3.3.0", + "passlib[bcrypt]>=1.7.4", + "python-multipart>=0.0.6", + "cloudinary>=1.40.0", + "huggingface-hub>=0.20.0", + "httpx>=0.26.0", + "pydantic>=2.5.0", + "pydantic-settings>=2.1.0", + "python-dotenv>=1.0.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=7.4.0", + "pytest-asyncio>=0.23.0", + "pytest-cov>=4.1.0", + "black>=23.12.0", + "ruff>=0.1.0", + "mypy>=1.8.0", +] + +[build-system] +requires = ["setuptools>=68.0"] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 100 +target-version = ['py311'] +include = '\.pyi?$' +extend-exclude = ''' +/( + # directories + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | build + | dist + | alembic/versions +)/ +''' + +[tool.ruff] +line-length = 100 +target-version = "py311" +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade +] +ignore = [ + "E501", # line too long (handled by black) + "B008", # do not perform function calls in argument defaults + "C901", # too complex +] + +[tool.ruff.per-file-ignores] +"__init__.py" = ["F401"] + +[tool.mypy] +python_version = "3.11" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +check_untyped_defs = true +no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_no_return = true +follow_imports = "normal" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "alembic.*" +ignore_missing_imports = true + +[tool.pytest.ini_options] +minversion = "7.0" +asyncio_mode = "auto" +testpaths = ["src/tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +addopts = [ + "-ra", + "--strict-markers", + "--strict-config", + "--cov=src", + "--cov-report=term-missing", + "--cov-report=html", +] + +[tool.coverage.run] +source = ["src"] +omit = [ + "*/tests/*", + "*/alembic/versions/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "raise AssertionError", + "raise NotImplementedError", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", + "@abstractmethod", +] + +[project.scripts] +# Development commands +dev = "uvicorn src.main:app --reload --port 8000 --host 0.0.0.0" +test = "pytest" +test-cov = "pytest --cov=src --cov-report=html" + +# Database migration commands +db-upgrade = "alembic upgrade head" +db-downgrade = "alembic downgrade -1" +db-migration = "alembic revision --autogenerate -m" + +# Code quality commands +format = "black ." +lint = "ruff check ." +lint-fix = "ruff check . --fix" +type-check = "mypy ." diff --git a/backend/src/__init__.py b/backend/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/api/__init__.py b/backend/src/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/api/ai.py b/backend/src/api/ai.py new file mode 100644 index 0000000000000000000000000000000000000000..01dafcb14d56c3006938a178b5e94395b148aa38 --- /dev/null +++ b/backend/src/api/ai.py @@ -0,0 +1,159 @@ +""" +AI API routes. + +Provides endpoints for AI-powered todo features. +""" +from typing import List +from fastapi import APIRouter, HTTPException, status, Depends +from pydantic import BaseModel + +from src.api.deps import get_current_user_id, get_db +from src.services.ai_service import ai_service +from sqlmodel import Session, select +from src.models.todo import Todo +from uuid import UUID + + +class AIGenerateRequest(BaseModel): + """Request schema for AI todo generation.""" + + goal: str + + +class AIGenerateResponse(BaseModel): + """Response schema for AI todo generation.""" + + todos: List[dict] + message: str + + +class AISummarizeResponse(BaseModel): + """Response schema for AI todo summarization.""" + + summary: str + breakdown: dict + urgent_todos: List[str] + + +class AIPrioritizeResponse(BaseModel): + """Response schema for AI todo prioritization.""" + + prioritized_todos: List[dict] + message: str + + +router = APIRouter() + + +@router.post( + '/generate-todo', + response_model=AIGenerateResponse, + summary='Generate todos with AI', + description='Generate todo suggestions from a goal using AI', +) +async def generate_todos( + request: AIGenerateRequest, + current_user_id: str = Depends(get_current_user_id), +): + """Generate todos from a goal using AI.""" + try: + result = ai_service.generate_todos(request.goal) + return AIGenerateResponse(**result) + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"AI service error: {str(e)}", + ) + + +@router.post( + '/summarize', + response_model=AISummarizeResponse, + summary='Summarize todos with AI', + description='Get an AI-powered summary of todos', +) +async def summarize_todos( + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Summarize todos using AI.""" + try: + # Get user's todos + query = select(Todo).where(Todo.user_id == UUID(current_user_id)) + todos = db.exec(query).all() + + # Convert to dict format + todos_dict = [ + { + "title": t.title, + "description": t.description, + "priority": t.priority.value, + "due_date": t.due_date.isoformat() if t.due_date else None, + } + for t in todos + ] + + result = ai_service.summarize_todos(todos_dict) + return AISummarizeResponse(**result) + + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"AI service error: {str(e)}", + ) + + +@router.post( + '/prioritize', + response_model=AIPrioritizeResponse, + summary='Prioritize todos with AI', + description='Get AI-powered todo prioritization', +) +async def prioritize_todos( + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Prioritize todos using AI.""" + try: + # Get user's todos + query = select(Todo).where(Todo.user_id == UUID(current_user_id)) + todos = db.exec(query).all() + + # Convert to dict format with IDs + todos_dict = [ + { + "id": str(t.id), + "title": t.title, + "description": t.description, + "priority": t.priority.value, + "due_date": t.due_date.isoformat() if t.due_date else None, + } + for t in todos + ] + + result = ai_service.prioritize_todos(todos_dict) + return AIPrioritizeResponse(**result) + + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"AI service error: {str(e)}", + ) + + +__all__ = ['router'] diff --git a/backend/src/api/auth.py b/backend/src/api/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f1f712e47c71ec46e724bcaf33894f1b7ac258 --- /dev/null +++ b/backend/src/api/auth.py @@ -0,0 +1,276 @@ +""" +Authentication API routes. + +Provides endpoints for user registration, login, logout, and token verification. +""" +from fastapi import APIRouter, Depends, HTTPException, Response, status +from fastapi.security import OAuth2PasswordRequestForm +from sqlmodel import Session + +from src.api.deps import get_current_user, get_db +from src.core.config import settings +from src.models.user import User +from src.schemas.auth import AuthResponse, LoginRequest, SignupRequest +from src.schemas.user import UserResponse +from src.services.auth_service import ( + authenticate_user, + create_user, + create_user_token, + get_user_by_email, +) + +router = APIRouter() + + +@router.post( + '/signup', + response_model=AuthResponse, + status_code=status.HTTP_201_CREATED, + summary='Register a new user', + description='Create a new user account with email and password', +) +async def signup( + user_data: SignupRequest, + db: Session = Depends(get_db), +): + """ + Register a new user. + + Validates email format, checks for duplicate emails, + validates password strength, and creates a new user. + + Args: + user_data: User registration data (name, email, password) + db: Database session + + Returns: + AuthResponse: JWT token and user information + + Raises: + HTTPException 400: If validation fails or email already exists + """ + print(f"DEBUG: Signup request received: {user_data.dict()}") + try: + # Create user + user = create_user(db, user_data) + + # Generate JWT token + access_token = create_user_token(user.id) + + # Return response + return AuthResponse( + access_token=access_token, + token_type='bearer', + user={ + 'id': str(user.id), + 'name': user.name, + 'email': user.email, + 'avatar_url': user.avatar_url, + 'created_at': user.created_at.isoformat(), + 'updated_at': user.updated_at.isoformat(), + }, + ) + + except ValueError as e: + # Handle validation errors + error_msg = str(e) + + if 'already registered' in error_msg: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Email is already registered. Please use a different email or login.', + ) + elif 'Password' in error_msg: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=error_msg, + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Validation failed: ' + error_msg, + ) + + except Exception as e: + # Handle unexpected errors + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='An error occurred while creating your account. Please try again.', + ) + + +@router.post( + '/login', + response_model=AuthResponse, + summary='Login user', + description='Authenticate user with email and password', +) +async def login( + user_data: LoginRequest, + response: Response, + db: Session = Depends(get_db), +): + """ + Login a user. + + Validates credentials and returns a JWT token. + + Args: + user_data: Login credentials (email, password) + response: FastAPI response object + db: Database session + + Returns: + AuthResponse: JWT token and user information + + Raises: + HTTPException 401: If credentials are invalid + """ + print(f"DEBUG: Login request received: email={user_data.email}") + # Authenticate user + user = authenticate_user(db, user_data.email, user_data.password) + + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Invalid email or password', + headers={'WWW-Authenticate': 'Bearer'}, + ) + + # Generate JWT token + access_token = create_user_token(user.id) + + # Set httpOnly cookie (optional, for additional security) + response.set_cookie( + key='access_token', + value=access_token, + httponly=True, + secure=not settings.is_development, # HTTPS in production + samesite='lax', + max_age=settings.jwt_expiration_days * 24 * 60 * 60, # Convert days to seconds + ) + + # Return response + return AuthResponse( + access_token=access_token, + token_type='bearer', + user={ + 'id': str(user.id), + 'name': user.name, + 'email': user.email, + 'avatar_url': user.avatar_url, + 'created_at': user.created_at.isoformat(), + 'updated_at': user.updated_at.isoformat(), + }, + ) + + +@router.post( + '/logout', + summary='Logout user', + description='Logout user and clear authentication token', +) +async def logout(response: Response): + """ + Logout a user. + + Clears the authentication cookie. + + Args: + response: FastAPI response object + + Returns: + dict: Logout confirmation message + """ + # Clear authentication cookie + response.delete_cookie('access_token') + + return {'message': 'Successfully logged out'} + + +@router.get( + '/me', + response_model=UserResponse, + summary='Get current user', + description='Get information about the currently authenticated user', +) +async def get_current_user_info( + current_user: User = Depends(get_current_user), +): + """ + Get current authenticated user. + + Requires valid JWT token in Authorization header. + + Args: + current_user: Current user from dependency + + Returns: + UserResponse: Current user information + """ + return current_user + + +# OAuth2 compatible endpoint for token generation +@router.post( + '/token', + response_model=AuthResponse, + summary='Get access token', + description='OAuth2 compatible endpoint to get access token', +) +async def get_access_token( + response: Response, + form_data: OAuth2PasswordRequestForm = Depends(), + db: Session = Depends(get_db), +): + """ + OAuth2 compatible token endpoint. + + Used by OAuth2 clients to obtain access tokens. + + Args: + form_data: OAuth2 password request form + response: FastAPI response object + db: Database session + + Returns: + AuthResponse: JWT token and user information + """ + # Use login logic + user = authenticate_user(db, form_data.username, form_data.password) + + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Incorrect email or password', + headers={'WWW-Authenticate': 'Bearer'}, + ) + + access_token = create_user_token(user.id) + + # Set cookie + response.set_cookie( + key='access_token', + value=access_token, + httponly=True, + secure=not settings.is_development, + samesite='lax', + max_age=settings.jwt_expiration_days * 24 * 60 * 60, + ) + + return AuthResponse( + access_token=access_token, + token_type='bearer', + user={ + 'id': str(user.id), + 'name': user.name, + 'email': user.email, + 'avatar_url': user.avatar_url, + 'created_at': user.created_at.isoformat(), + 'updated_at': user.updated_at.isoformat(), + }, + ) + + +# Export router +__all__ = ['router'] diff --git a/backend/src/api/deps.py b/backend/src/api/deps.py new file mode 100644 index 0000000000000000000000000000000000000000..82623a617af939fe895d11b0b895e4d78df06b71 --- /dev/null +++ b/backend/src/api/deps.py @@ -0,0 +1,171 @@ +""" +FastAPI dependencies for database sessions and authentication. + +Provides reusable dependency functions for injecting database sessions +and authenticated users into route handlers. +""" +from typing import Optional + +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from sqlmodel import Session, select + +from src.core.database import get_session +from src.core.security import TokenData +from src.models.user import User + +# HTTP Bearer token scheme for JWT extraction +security = HTTPBearer(auto_error=False) + + +async def get_db( + session: Session = Depends(get_session), +) -> Session: + """ + Dependency for getting database session. + + This is a passthrough dependency that allows for future enhancements + like request-scoped sessions or transaction management. + + Args: + session: Database session from get_session + + Returns: + Session: Database session + + Example: + @app.get("/users") + def get_users(db: Session = Depends(get_db)): + return db.exec(select(User)).all() + """ + return session + + +async def get_current_user( + credentials: Optional[HTTPAuthorizationCredentials] = Depends(security), + db: Session = Depends(get_db), +) -> User: + """ + Dependency for getting authenticated user from JWT token. + + Extracts JWT token from Authorization header, validates it, + and returns the corresponding user. + + Args: + credentials: HTTP Bearer credentials from Authorization header + db: Database session + + Returns: + User: Authenticated user + + Raises: + HTTPException: If token is missing, invalid, or user not found + + Example: + @app.get("/me") + def get_me(current_user: User = Depends(get_current_user)): + return current_user + """ + # Check if credentials are provided + if credentials is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Not authenticated', + headers={'WWW-Authenticate': 'Bearer'}, + ) + + # Extract and decode token + token = credentials.credentials + token_data = TokenData.from_token(token) + + if token_data is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Invalid authentication credentials', + headers={'WWW-Authenticate': 'Bearer'}, + ) + + # Check if token is expired + if token_data.is_expired(): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Token has expired', + headers={'WWW-Authenticate': 'Bearer'}, + ) + + # Get user from database + user = db.get(User, token_data.user_id) + + if user is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='User not found', + headers={'WWW-Authenticate': 'Bearer'}, + ) + + return user + + +async def get_current_user_optional( + credentials: Optional[HTTPAuthorizationCredentials] = Depends(security), + db: Session = Depends(get_db), +) -> Optional[User]: + """ + Optional authentication dependency. + + Returns the authenticated user if a valid token is provided, + otherwise returns None. Useful for routes that work for both + authenticated and anonymous users. + + Args: + credentials: HTTP Bearer credentials from Authorization header + db: Database session + + Returns: + Optional[User]: Authenticated user or None + + Example: + @app.get("/public-data") + def get_public_data(user: Optional[User] = Depends(get_current_user_optional)): + if user: + return {'data': '...', 'user': user.email} + return {'data': '...'} + """ + if credentials is None: + return None + + token = credentials.credentials + token_data = TokenData.from_token(token) + + if token_data is None or token_data.is_expired(): + return None + + user = db.get(User, token_data.user_id) + return user + + +async def get_current_user_id( + current_user: User = Depends(get_current_user), +) -> str: + """ + Dependency for getting authenticated user's ID as a string. + + This is a convenience wrapper around get_current_user that extracts + just the user ID as a string, which is commonly needed in API routes. + + Args: + current_user: Authenticated user from get_current_user + + Returns: + str: User ID as a string + + Example: + @app.get("/todos") + def list_todos(user_id: str = Depends(get_current_user_id)): + return {"user_id": user_id} + """ + return str(current_user.id) + + +# Export for use in other modules +__all__ = ['get_db', 'get_current_user', 'get_current_user_optional', 'get_current_user_id'] diff --git a/backend/src/api/todos.py b/backend/src/api/todos.py new file mode 100644 index 0000000000000000000000000000000000000000..f5227bbaa746a050a79ea675ee7c8519b79f01cd --- /dev/null +++ b/backend/src/api/todos.py @@ -0,0 +1,347 @@ +""" +Todo API routes. + +Provides endpoints for todo CRUD operations. +""" +from typing import Optional +from uuid import UUID +from datetime import datetime + +from fastapi import APIRouter, HTTPException, Query, status, Depends +from sqlmodel import Session, select + +from src.api.deps import get_current_user_id, get_db +from src.models.todo import Priority, Status, Todo +from src.schemas.todo import TodoCreateRequest, TodoResponse, TodoUpdateRequest + +router = APIRouter() + + +@router.get( + '/', + response_model=list[TodoResponse], + summary='List todos', + description='Get all todos for the current user with optional filtering', +) +async def list_todos( + skip: int = Query(0, ge=0, description='Number of todos to skip'), + limit: int = Query(20, ge=1, le=100, description='Number of todos to return'), + status_filter: Optional[str] = Query(None, alias='status', description='Filter by status'), + priority: Optional[str] = Query(None, description='Filter by priority'), + search: Optional[str] = Query(None, description='Search in title and description'), + sort_by: str = Query('created_at', description='Sort by field'), + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """List todos for the current user with filtering and pagination.""" + # Build base query with user isolation + query = select(Todo).where(Todo.user_id == UUID(current_user_id)) + + # Apply filters + if status_filter: + query = query.where(Todo.status == Status(status_filter)) + if priority: + query = query.where(Todo.priority == Priority(priority)) + if search: + search_pattern = f'%{search}%' + query = query.where( + (Todo.title.ilike(search_pattern)) | (Todo.description.ilike(search_pattern)) + ) + + # Apply sorting + if sort_by == 'created_at': + query = query.order_by(Todo.created_at.desc()) + elif sort_by == 'due_date': + query = query.order_by(Todo.due_date.asc().nulls_last()) + elif sort_by == 'priority': + query = query.order_by(Todo.priority.desc()) + + # Apply pagination + query = query.offset(skip).limit(limit) + + # Execute query + todos = db.exec(query).all() + + return [ + TodoResponse( + id=str(todo.id), + user_id=str(todo.user_id), + title=todo.title, + description=todo.description, + status=todo.status.value, + priority=todo.priority.value, + tags=todo.tags, + due_date=todo.due_date.isoformat() if todo.due_date else None, + created_at=todo.created_at.isoformat(), + updated_at=todo.updated_at.isoformat(), + ) + for todo in todos + ] + + +@router.post( + '/', + response_model=TodoResponse, + status_code=status.HTTP_201_CREATED, + summary='Create todo', + description='Create a new todo', +) +async def create_todo( + todo_data: TodoCreateRequest, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Create a new todo for the current user.""" + todo = Todo( + title=todo_data.title, + description=todo_data.description, + priority=Priority(todo_data.priority) if todo_data.priority else Priority.MEDIUM, + due_date=todo_data.due_date, + tags=todo_data.tags, + user_id=UUID(current_user_id), + status=Status.PENDING, + ) + + db.add(todo) + db.commit() + db.refresh(todo) + + return TodoResponse( + id=str(todo.id), + user_id=str(todo.user_id), + title=todo.title, + description=todo.description, + status=todo.status.value, + priority=todo.priority.value, + tags=todo.tags, + due_date=todo.due_date.isoformat() if todo.due_date else None, + created_at=todo.created_at.isoformat(), + updated_at=todo.updated_at.isoformat(), + ) + + +# IMPORTANT: More specific routes must come BEFORE parameterized routes +@router.post( + '/{todo_id}/toggle', + response_model=TodoResponse, + summary='Toggle todo completion (POST)', + description='Toggle todo completion status - POST method for frontend compatibility', +) +async def toggle_todo_post( + todo_id: str, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Toggle todo completion status using POST method.""" + query = select(Todo).where( + Todo.id == UUID(todo_id), + Todo.user_id == UUID(current_user_id) + ) + todo = db.exec(query).first() + + if not todo: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='Todo not found', + ) + + # Toggle status - flip between completed and pending + if todo.status == Status.PENDING: + todo.status = Status.COMPLETED + if not todo.completed_at: + todo.completed_at = datetime.utcnow() + else: + todo.status = Status.PENDING + todo.completed_at = None + + db.add(todo) + db.commit() + db.refresh(todo) + + return TodoResponse( + id=str(todo.id), + user_id=str(todo.user_id), + title=todo.title, + description=todo.description, + status=todo.status.value, + priority=todo.priority.value, + tags=todo.tags, + due_date=todo.due_date.isoformat() if todo.due_date else None, + created_at=todo.created_at.isoformat(), + updated_at=todo.updated_at.isoformat(), + ) + + +@router.patch( + '/{todo_id}/complete', + response_model=TodoResponse, + summary='Toggle todo completion', + description='Toggle todo completion status', +) +async def toggle_complete( + todo_id: str, + completed: bool = True, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Toggle todo completion status.""" + query = select(Todo).where( + Todo.id == UUID(todo_id), + Todo.user_id == UUID(current_user_id) + ) + todo = db.exec(query).first() + + if not todo: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='Todo not found', + ) + + # Toggle status + todo.status = Status.COMPLETED if completed else Status.PENDING + if completed and not todo.completed_at: + todo.completed_at = datetime.utcnow() + elif not completed: + todo.completed_at = None + + db.add(todo) + db.commit() + db.refresh(todo) + + return TodoResponse( + id=str(todo.id), + user_id=str(todo.user_id), + title=todo.title, + description=todo.description, + status=todo.status.value, + priority=todo.priority.value, + tags=todo.tags, + due_date=todo.due_date.isoformat() if todo.due_date else None, + created_at=todo.created_at.isoformat(), + updated_at=todo.updated_at.isoformat(), + ) + + +@router.get( + '/{todo_id}', + response_model=TodoResponse, + summary='Get todo', + description='Get a specific todo by ID', +) +async def get_todo( + todo_id: str, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Get a specific todo.""" + query = select(Todo).where( + Todo.id == UUID(todo_id), + Todo.user_id == UUID(current_user_id) + ) + todo = db.exec(query).first() + + if not todo: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='Todo not found', + ) + + return TodoResponse( + id=str(todo.id), + user_id=str(todo.user_id), + title=todo.title, + description=todo.description, + status=todo.status.value, + priority=todo.priority.value, + tags=todo.tags, + due_date=todo.due_date.isoformat() if todo.due_date else None, + created_at=todo.created_at.isoformat(), + updated_at=todo.updated_at.isoformat(), + ) + + +@router.put( + '/{todo_id}', + response_model=TodoResponse, + summary='Update todo', + description='Update a todo', +) +async def update_todo( + todo_id: str, + todo_data: TodoUpdateRequest, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Update a todo.""" + query = select(Todo).where( + Todo.id == UUID(todo_id), + Todo.user_id == UUID(current_user_id) + ) + todo = db.exec(query).first() + + if not todo: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='Todo not found', + ) + + # Update fields + if todo_data.title is not None: + todo.title = todo_data.title + if todo_data.description is not None: + todo.description = todo_data.description + if todo_data.priority is not None: + todo.priority = Priority(todo_data.priority) + if todo_data.due_date is not None: + todo.due_date = todo_data.due_date + if todo_data.tags is not None: + todo.tags = todo_data.tags + + db.add(todo) + db.commit() + db.refresh(todo) + + return TodoResponse( + id=str(todo.id), + user_id=str(todo.user_id), + title=todo.title, + description=todo.description, + status=todo.status.value, + priority=todo.priority.value, + tags=todo.tags, + due_date=todo.due_date.isoformat() if todo.due_date else None, + created_at=todo.created_at.isoformat(), + updated_at=todo.updated_at.isoformat(), + ) + + +@router.delete( + '/{todo_id}', + status_code=status.HTTP_204_NO_CONTENT, + summary='Delete todo', + description='Delete a todo', +) +async def delete_todo( + todo_id: str, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Delete a todo.""" + query = select(Todo).where( + Todo.id == UUID(todo_id), + Todo.user_id == UUID(current_user_id) + ) + todo = db.exec(query).first() + + if not todo: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='Todo not found', + ) + + db.delete(todo) + db.commit() + return None + + +__all__ = ['router'] diff --git a/backend/src/api/users.py b/backend/src/api/users.py new file mode 100644 index 0000000000000000000000000000000000000000..39608c77b1abec15463aaf4bf212b23d9fc8c828 --- /dev/null +++ b/backend/src/api/users.py @@ -0,0 +1,173 @@ +""" +User API routes. + +Provides endpoints for user profile management. +""" +from uuid import UUID +from typing import Optional + +from fastapi import APIRouter, HTTPException, status, Depends, File, UploadFile +from sqlmodel import Session, select, func +from datetime import datetime + +from src.api.deps import get_current_user_id, get_db +from src.models.user import User +from src.models.todo import Todo, Status +from src.schemas.user import UserResponse, UserProfileUpdateRequest + +router = APIRouter() + + +@router.get( + '/me', + response_model=dict, + summary='Get current user profile', + description='Get current user profile with statistics', +) +async def get_profile( + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Get current user profile with todo statistics.""" + # Get user + query = select(User).where(User.id == UUID(current_user_id)) + user = db.exec(query).first() + + if not user: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='User not found', + ) + + # Get todo statistics + total_todos = db.exec( + select(func.count()).select_from(Todo).where(Todo.user_id == user.id) + ).one() + pending_todos = db.exec( + select(func.count()).select_from(Todo).where( + Todo.user_id == user.id, + Todo.status == Status.PENDING + ) + ).one() + completed_todos = db.exec( + select(func.count()).select_from(Todo).where( + Todo.user_id == user.id, + Todo.status == Status.COMPLETED + ) + ).one() + + return { + 'id': str(user.id), + 'name': user.name, + 'email': user.email, + 'avatar_url': user.avatar_url, + 'created_at': user.created_at.isoformat(), + 'updated_at': user.updated_at.isoformat(), + 'stats': { + 'total_todos': total_todos, + 'pending_todos': pending_todos, + 'completed_todos': completed_todos, + } + } + + +@router.put( + '/me', + response_model=dict, + summary='Update user profile', + description='Update current user profile', +) +async def update_profile( + profile_data: UserProfileUpdateRequest, + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Update current user profile.""" + # Get user + query = select(User).where(User.id == UUID(current_user_id)) + user = db.exec(query).first() + + if not user: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail='User not found', + ) + + # Update fields + if profile_data.name is not None: + user.name = profile_data.name + user.updated_at = datetime.utcnow() + + db.add(user) + db.commit() + db.refresh(user) + + return { + 'id': str(user.id), + 'name': user.name, + 'email': user.email, + 'avatar_url': user.avatar_url, + 'created_at': user.created_at.isoformat(), + 'updated_at': user.updated_at.isoformat(), + } + + +@router.post( + '/me/avatar', + response_model=dict, + summary='Upload avatar', + description='Upload user avatar image', +) +async def upload_avatar( + file: UploadFile = File(...), + current_user_id: str = Depends(get_current_user_id), + db: Session = Depends(get_db), +): + """Upload user avatar.""" + # Validate file type + allowed_types = ['image/jpeg', 'image/png', 'image/gif', 'image/webp'] + if file.content_type not in allowed_types: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Invalid file type. Allowed: {", ".join(allowed_types)}', + ) + + # Validate file size (5MB max) + MAX_FILE_SIZE = 5 * 1024 * 1024 # 5MB + content = await file.read() + if len(content) > MAX_FILE_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='File too large. Maximum size: 5MB', + ) + + # For now, return a placeholder avatar URL + # In production, you would upload to Cloudinary here + from src.core.config import settings + + if settings.cloudinary_cloud_name: + # TODO: Implement Cloudinary upload + avatar_url = f"https://ui-avatars.com/api/?name={file.filename}&background=random" + else: + # Use UI Avatars as fallback + query = select(User).where(User.id == UUID(current_user_id)) + user = db.exec(query).first() + avatar_url = f"https://ui-avatars.com/api/?name={user.name}&background=random" + + # Update user avatar + query = select(User).where(User.id == UUID(current_user_id)) + user = db.exec(query).first() + + if user: + user.avatar_url = avatar_url + user.updated_at = datetime.utcnow() + db.add(user) + db.commit() + + return { + 'avatar_url': avatar_url, + 'message': 'Avatar uploaded successfully', + } + + +__all__ = ['router'] diff --git a/backend/src/core/__init__.py b/backend/src/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/core/config.py b/backend/src/core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..16cab42e77f3cbe3cdfd5ada940ccc72e7f84b02 --- /dev/null +++ b/backend/src/core/config.py @@ -0,0 +1,132 @@ +""" +Application configuration using pydantic-settings. + +Loads environment variables from .env file and provides type-safe access. +""" +from functools import lru_cache +from typing import Optional + +from pydantic import Field, field_validator +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + """Application settings loaded from environment variables.""" + + model_config = SettingsConfigDict( + env_file='.env', + env_file_encoding='utf-8', + case_sensitive=False, + extra='ignore', + ) + + # ======================================== + # Database Configuration + # ======================================== + database_url: str = Field( + default='postgresql+psycopg://todoapp:todoapp_password@localhost:5432/todoapp', + description='PostgreSQL connection string', + ) + + # ======================================== + # JWT Authentication + # ======================================== + jwt_secret: str = Field( + ..., + min_length=32, + description='Secret key for JWT token signing (min 32 characters)', + ) + + jwt_algorithm: str = Field(default='HS256', description='JWT algorithm') + jwt_expiration_days: int = Field(default=7, description='JWT token expiration in days') + + # ======================================== + # Cloudinary Configuration (Avatar Storage) + # ======================================== + cloudinary_cloud_name: Optional[str] = Field( + default=None, description='Cloudinary cloud name' + ) + cloudinary_api_key: Optional[str] = Field(default=None, description='Cloudinary API key') + cloudinary_api_secret: Optional[str] = Field( + default=None, description='Cloudinary API secret' + ) + + # ======================================== + # Hugging Face AI Configuration + # ======================================== + huggingface_api_key: Optional[str] = Field( + default=None, description='Hugging Face API key' + ) + + # ======================================== + # Frontend URL + # ======================================== + frontend_url: str = Field( + default='http://localhost:3000', + description='Allowed CORS origin for frontend', + ) + + # ======================================== + # Application Settings + # ======================================== + env: str = Field(default='development', description='Environment: development, staging, production') + port: int = Field(default=8000, description='API port') + log_level: str = Field(default='info', description='Log level: debug, info, warning, error, critical') + + # ======================================== + # Security Settings + # ======================================== + bcrypt_rounds: int = Field(default=12, description='Bcrypt password hashing rounds') + cors_origins: list[str] = Field( + default=['http://localhost:3000', 'http://localhost:3001', 'http://localhost:3002', 'http://127.0.0.1:3000', 'http://127.0.0.1:3001', 'http://127.0.0.1:3002'], description='CORS allowed origins' + ) + + @field_validator('env') + @classmethod + def validate_environment(cls, v: str) -> str: + """Validate environment value.""" + allowed = ['development', 'staging', 'production'] + if v not in allowed: + raise ValueError(f'env must be one of {allowed}') + return v + + @field_validator('log_level') + @classmethod + def validate_log_level(cls, v: str) -> str: + """Validate log level value.""" + allowed = ['debug', 'info', 'warning', 'error', 'critical'] + if v not in allowed: + raise ValueError(f'log_level must be one of {allowed}') + return v + + @property + def is_development(self) -> bool: + """Check if running in development mode.""" + return self.env == 'development' + + @property + def is_production(self) -> bool: + """Check if running in production mode.""" + return self.env == 'production' + + @property + def database_url_sync(self) -> str: + """ + Get synchronous database URL for Alembic migrations. + Replaces postgresql+psycopg with postgresql+psycopg2. + """ + return self.database_url.replace('+psycopg', '+psycopg2') + + +@lru_cache() +def get_settings() -> Settings: + """ + Get cached settings instance. + + Uses lru_cache to ensure settings are loaded only once. + """ + return Settings() + + +# Export settings instance +settings = get_settings() diff --git a/backend/src/core/database.py b/backend/src/core/database.py new file mode 100644 index 0000000000000000000000000000000000000000..168cf34a417a54bd095b599f3299518fd94427a1 --- /dev/null +++ b/backend/src/core/database.py @@ -0,0 +1,103 @@ +""" +Database configuration and session management. + +Provides SQLAlchemy engine with connection pooling and session dependency for FastAPI. +""" +from typing import Generator + +from sqlalchemy import create_engine, text +from sqlalchemy.exc import SQLAlchemyError +from sqlmodel import Session, SQLModel + +from src.core.config import settings + +# Create SQLAlchemy engine with connection pooling +engine = create_engine( + str(settings.database_url), + pool_size=10, # Number of connections to maintain + max_overflow=20, # Additional connections when pool is full + pool_recycle=3600, # Recycle connections after 1 hour + pool_pre_ping=True, # Verify connections before using + echo=settings.is_development, # Log SQL in development +) + + +def init_db() -> None: + """ + Initialize database by creating all tables. + + This should only be used for development/testing. + In production, use Alembic migrations instead. + """ + SQLModel.metadata.create_all(engine) + + +def get_session() -> Generator[Session, None, None]: + """ + FastAPI dependency for database session. + + Yields a database session and ensures it's closed after use. + Automatically handles rollback on errors. + + Yields: + Session: SQLAlchemy session + + Example: + @app.get("/users") + def get_users(db: Session = Depends(get_session)): + return db.exec(select(User)).all() + """ + session = Session(engine) + try: + yield session + session.commit() + except SQLAlchemyError: + session.rollback() + raise + finally: + session.close() + + +class DatabaseManager: + """ + Database manager for advanced operations. + + Provides methods for health checks, connection testing, + and administrative tasks. + """ + + @staticmethod + def check_connection() -> bool: + """ + Check if database connection is alive. + + Returns: + bool: True if connection is successful, False otherwise + """ + try: + with engine.connect() as conn: + conn.execute(text("SELECT 1")) + return True + except Exception as e: + print(f"Database connection error: {e}") + return False + + @staticmethod + def get_pool_status() -> dict: + """ + Get connection pool status. + + Returns: + dict: Pool statistics including size, checked out, and overflow + """ + pool = engine.pool + return { + 'pool_size': pool.size(), + 'checked_out': pool.checkedout(), + 'overflow': pool.overflow(), + 'max_overflow': engine.pool.max_overflow, + } + + +# Export for use in other modules +__all__ = ['engine', 'get_session', 'init_db', 'DatabaseManager'] diff --git a/backend/src/core/security.py b/backend/src/core/security.py new file mode 100644 index 0000000000000000000000000000000000000000..a1f28ca588a74c9144c74c7bb95e9f0fc27bfe13 --- /dev/null +++ b/backend/src/core/security.py @@ -0,0 +1,154 @@ +""" +Security utilities for authentication and password management. + +Provides password hashing with bcrypt and JWT token creation/verification. +""" +from datetime import datetime, timedelta +from typing import Optional + +from jose import JWTError, jwt +from passlib.context import CryptContext + +from src.core.config import settings + +# Password hashing context with bcrypt +pwd_context = CryptContext(schemes=['bcrypt'], deprecated='auto') + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """ + Verify a plain password against a hashed password. + + Args: + plain_password: Plain text password to verify + hashed_password: Hashed password to compare against + + Returns: + bool: True if passwords match, False otherwise + """ + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password: str) -> str: + """ + Hash a password using bcrypt. + + Args: + password: Plain text password to hash + + Returns: + str: Hashed password + """ + return pwd_context.hash(password) + + +def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str: + """ + Create a JWT access token. + + Args: + data: Data to encode in the token (typically {'sub': user_id}) + expires_delta: Optional custom expiration time + + Returns: + str: Encoded JWT token + + Example: + token = create_access_token(data={'sub': str(user.id)}) + """ + to_encode = data.copy() + + # Set expiration time + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(days=settings.jwt_expiration_days) + + to_encode.update({'exp': expire}) + + # Encode token + encoded_jwt = jwt.encode( + to_encode, settings.jwt_secret, algorithm=settings.jwt_algorithm + ) + + return encoded_jwt + + +def decode_access_token(token: str) -> Optional[dict]: + """ + Decode and verify a JWT access token. + + Args: + token: JWT token to decode + + Returns: + dict: Decoded token payload if valid, None if invalid + + Example: + payload = decode_access_token(token) + if payload: + user_id = payload.get('sub') + """ + try: + payload = jwt.decode( + token, settings.jwt_secret, algorithms=[settings.jwt_algorithm] + ) + return payload + except JWTError: + return None + + +class TokenData: + """ + Token data model for decoded JWT tokens. + + Attributes: + user_id: User ID from token subject + exp: Token expiration timestamp + """ + + def __init__(self, user_id: Optional[str] = None, exp: Optional[int] = None): + self.user_id = user_id + self.exp = exp + + @classmethod + def from_token(cls, token: str) -> Optional['TokenData']: + """ + Create TokenData from JWT token. + + Args: + token: JWT token to decode + + Returns: + TokenData if token is valid, None otherwise + """ + payload = decode_access_token(token) + if payload is None: + return None + + user_id = payload.get('sub') + exp = payload.get('exp') + + return cls(user_id=user_id, exp=exp) + + def is_expired(self) -> bool: + """ + Check if token is expired. + + Returns: + bool: True if token is expired, False otherwise + """ + if self.exp is None: + return False + + return datetime.utcnow().timestamp() > self.exp + + +# Export for use in other modules +__all__ = [ + 'verify_password', + 'get_password_hash', + 'create_access_token', + 'decode_access_token', + 'TokenData', +] diff --git a/backend/src/main.py b/backend/src/main.py new file mode 100644 index 0000000000000000000000000000000000000000..228f9c6c35a8170090f63cec9df0708cb91b5583 --- /dev/null +++ b/backend/src/main.py @@ -0,0 +1,126 @@ +""" +FastAPI application main entry point. + +Configures the FastAPI app with CORS middleware, routes, and middleware. +""" +from contextlib import asynccontextmanager +from typing import AsyncGenerator + +from fastapi import FastAPI, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse + +from src.core.config import settings +from src.core.database import DatabaseManager, init_db + + +@asynccontextmanager +async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: + """ + Lifespan context manager for FastAPI app. + + Handles startup and shutdown events. + """ + # Startup + print(f"Starting Todo App API") + print(f"Environment: {settings.env}") + print(f"Database: {settings.database_url.split('@')[-1]}") + + # Initialize database (create tables if not exists) + # In production, use Alembic migrations instead + if settings.is_development: + init_db() + print("Database initialized") + + yield + + # Shutdown + print("Shutting down Todo App API") + + +# Create FastAPI app +app = FastAPI( + title='Todo App API', + description='Premium Todo SaaS Application API', + version='0.1.0', + docs_url='/docs', + redoc_url='/redoc', + lifespan=lifespan, +) + + +# Configure CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=settings.cors_origins, + allow_credentials=True, + allow_methods=['*'], + allow_headers=['*'], +) + + +# Global exception handler +@app.exception_handler(Exception) +async def global_exception_handler(request: Request, exc: Exception): + """Handle all unhandled exceptions.""" + print(f"Unhandled exception: {exc}") + return JSONResponse( + status_code=500, + content={ + 'detail': 'Internal server error', + 'message': str(exc) if settings.is_development else 'An error occurred', + }, + ) + + +# Health check endpoint +@app.get('/health', tags=['Health']) +async def health_check(): + """ + Health check endpoint. + + Returns API status and database connection status. + """ + db_connected = DatabaseManager.check_connection() + + return { + 'status': 'healthy', + 'api': 'Todo App API', + 'version': '0.1.0', + 'environment': settings.env, + 'database': 'connected' if db_connected else 'disconnected', + } + + +# Root endpoint +@app.get('/', tags=['Root']) +async def root(): + """ + Root endpoint with API information. + """ + return { + 'message': 'Welcome to Todo App API', + 'version': '0.1.0', + 'docs': '/docs', + 'health': '/health', + } + + +# Include routers +from src.api import auth, todos, users, ai + +app.include_router(auth.router, prefix='/api/auth', tags=['Authentication']) +app.include_router(todos.router, prefix='/api/todos', tags=['Todos']) +app.include_router(users.router, prefix='/api/users', tags=['Users']) +app.include_router(ai.router, prefix='/api/ai', tags=['AI']) + + +if __name__ == '__main__': + import uvicorn + + uvicorn.run( + 'src.main:app', + host='0.0.0.0', + port=settings.port, + reload=settings.is_development, + ) diff --git a/backend/src/models/__init__.py b/backend/src/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/models/ai_request.py b/backend/src/models/ai_request.py new file mode 100644 index 0000000000000000000000000000000000000000..edf040178971fffe2be9bc95534d87893b8efd7c --- /dev/null +++ b/backend/src/models/ai_request.py @@ -0,0 +1,93 @@ +""" +AIRequest model for tracking AI feature usage. +""" +from datetime import datetime +from enum import Enum +from typing import Optional +from uuid import UUID, uuid4 + +from sqlmodel import Column, DateTime, Field, ForeignKey, SQLModel, Text +from sqlalchemy import text, Index + + +class AIRequestType(str, Enum): + """Types of AI requests.""" + + GENERATE_TODO = 'generate_todo' + SUMMARIZE = 'summarize' + PRIORITIZE = 'prioritize' + + +class AIRequest(SQLModel, table=True): + """ + AIRequest model for tracking AI feature usage. + + Attributes: + id: Unique request identifier (UUID) + user_id: User who made the request (foreign key) + request_type: Type of AI request + input_data: Input data sent to AI + output_data: Output data from AI + model_used: AI model used for processing + tokens_used: Optional number of tokens used + processing_time_ms: Processing time in milliseconds + created_at: Request timestamp + """ + + __tablename__ = 'ai_requests' + + id: UUID = Field( + default_factory=uuid4, + primary_key=True, + index=True, + description='Unique request identifier', + ) + user_id: UUID = Field( + default=None, + foreign_key='users.id', + nullable=False, + index=True, + description='User who made the request', + ) + request_type: AIRequestType = Field( + description='Type of AI request', + ) + input_data: str = Field( + sa_column=Column(Text), + description='Input data sent to AI', + ) + output_data: Optional[str] = Field( + default=None, + sa_column=Column(Text), + description='Output data from AI', + ) + model_used: str = Field( + max_length=100, + description='AI model used', + ) + tokens_used: Optional[int] = Field( + default=None, + description='Number of tokens used', + ) + processing_time_ms: Optional[int] = Field( + default=None, + description='Processing time in milliseconds', + ) + created_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column=Column(DateTime(), server_default=text('CURRENT_TIMESTAMP')), + description='Request timestamp', + ) + + # Define indexes + __table_args__ = ( + Index('idx_ai_requests_user_type', 'user_id', 'request_type'), + Index('idx_ai_requests_created', 'created_at'), + ) + + def __repr__(self) -> str: + return f'' + + +# Export for use in other modules +__all__ = ['AIRequest', 'AIRequestType'] diff --git a/backend/src/models/session.py b/backend/src/models/session.py new file mode 100644 index 0000000000000000000000000000000000000000..08f57843af54fc0ecf24e81329b40af460331300 --- /dev/null +++ b/backend/src/models/session.py @@ -0,0 +1,91 @@ +""" +Session model for JWT token management. +""" +from datetime import datetime +from typing import Optional +from uuid import UUID, uuid4 + +from sqlmodel import Column, DateTime, Field, ForeignKey, SQLModel, Text +from sqlalchemy import text, Index + + +class Session(SQLModel, table=True): + """ + Session model for tracking active JWT tokens. + + Attributes: + id: Unique session identifier (UUID) + user_id: Associated user ID (foreign key) + token: JWT token (hashed or partial) + expires_at: Token expiration timestamp + created_at: Session creation timestamp + revoked_at: Optional revocation timestamp + user_agent: Optional user agent string + ip_address: Optional IP address + """ + + __tablename__ = 'sessions' + + id: UUID = Field( + default_factory=uuid4, + primary_key=True, + index=True, + description='Unique session identifier', + ) + user_id: UUID = Field( + default=None, + foreign_key='users.id', + nullable=False, + index=True, + description='Associated user ID', + ) + token: str = Field( + max_length=500, + index=True, + description='JWT token identifier', + ) + expires_at: datetime = Field( + description='Token expiration timestamp', + ) + created_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column=Column(DateTime(), server_default=text('CURRENT_TIMESTAMP')), + description='Session creation timestamp', + ) + revoked_at: Optional[datetime] = Field( + default=None, + description='Revocation timestamp', + ) + user_agent: Optional[str] = Field( + default=None, + max_length=500, + description='User agent string', + ) + ip_address: Optional[str] = Field( + default=None, + max_length=45, + description='IP address (IPv4 or IPv6)', + ) + + # Define indexes + __table_args__ = ( + Index('idx_sessions_user_expires', 'user_id', 'expires_at'), + Index('idx_sessions_token', 'token'), + ) + + def __repr__(self) -> str: + return f'' + + def is_valid(self) -> bool: + """Check if session is valid (not expired and not revoked).""" + if self.revoked_at is not None: + return False + return datetime.utcnow() < self.expires_at + + def revoke(self) -> None: + """Revoke the session.""" + self.revoked_at = datetime.utcnow() + + +# Export for use in other modules +__all__ = ['Session'] diff --git a/backend/src/models/todo.py b/backend/src/models/todo.py new file mode 100644 index 0000000000000000000000000000000000000000..813a031dc70f40d565887d5f2b0b4cc9a2ba7c34 --- /dev/null +++ b/backend/src/models/todo.py @@ -0,0 +1,124 @@ +""" +Todo model for task management. +""" +from datetime import datetime +from enum import Enum +from typing import Optional, List +from uuid import UUID, uuid4 + +from pydantic import Field as PydanticField +from sqlmodel import Column, DateTime, Field, ForeignKey, SQLModel, Text +from sqlalchemy import text, Index, ARRAY, String + + +class Priority(str, Enum): + """Todo priority levels.""" + + LOW = 'low' + MEDIUM = 'medium' + HIGH = 'high' + + +class Status(str, Enum): + """Todo status values.""" + + PENDING = 'pending' + COMPLETED = 'completed' + + +class Todo(SQLModel, table=True): + """ + Todo model representing user tasks. + + Attributes: + id: Unique todo identifier (UUID) + title: Todo title + description: Optional detailed description + status: Current status (pending, in_progress, completed, cancelled) + priority: Priority level (low, medium, high) + due_date: Optional due date + completed_at: Optional completion timestamp + user_id: Owner user ID (foreign key) + created_at: Creation timestamp + updated_at: Last update timestamp + """ + + __tablename__ = 'todos' + + id: UUID = Field( + default_factory=uuid4, + primary_key=True, + index=True, + description='Unique todo identifier', + ) + title: str = Field(max_length=255, description='Todo title') + description: Optional[str] = Field( + default=None, sa_column=Column(Text), description='Detailed description' + ) + status: Status = Field( + default=Status.PENDING, + description='Current status', + ) + priority: Priority = Field( + default=Priority.MEDIUM, + description='Priority level', + ) + due_date: Optional[datetime] = Field( + default=None, + description='Due date', + ) + tags: Optional[List[str]] = Field( + default=None, + sa_column=Column(ARRAY(String)), # PostgreSQL array type + description='Tags for categorization', + ) + completed_at: Optional[datetime] = Field( + default=None, + description='Completion timestamp', + ) + user_id: UUID = Field( + default=None, + foreign_key='users.id', + nullable=False, + index=True, + description='Owner user ID', + ) + created_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column=Column(DateTime(), server_default=text('CURRENT_TIMESTAMP')), + description='Creation timestamp', + ) + updated_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column=Column( + DateTime(), + server_default=text('CURRENT_TIMESTAMP'), + onupdate=text('CURRENT_TIMESTAMP'), + ), + description='Last update timestamp', + ) + + # Define indexes + __table_args__ = ( + Index('idx_todos_user_status', 'user_id', 'status'), + Index('idx_todos_user_priority', 'user_id', 'priority'), + Index('idx_todos_due_date', 'due_date'), + ) + + def __repr__(self) -> str: + return f'' + + def mark_completed(self) -> None: + """Mark todo as completed.""" + self.status = Status.COMPLETED + self.completed_at = datetime.utcnow() + + def is_overdue(self) -> bool: + """Check if todo is overdue.""" + if self.due_date is None or self.status == Status.COMPLETED: + return False + return datetime.utcnow() > self.due_date + + +# Export for use in other modules +__all__ = ['Todo', 'Priority', 'Status'] diff --git a/backend/src/models/user.py b/backend/src/models/user.py new file mode 100644 index 0000000000000000000000000000000000000000..22d761aed7a5fe8487fc78d65bdf84d2cccdfbe2 --- /dev/null +++ b/backend/src/models/user.py @@ -0,0 +1,65 @@ +""" +User model for authentication and profile. +""" +from datetime import datetime +from typing import Optional +from uuid import UUID, uuid4 + +from sqlmodel import Column, DateTime, Field, SQLModel +from sqlalchemy import text + + +class User(SQLModel, table=True): + """ + User model representing application users. + + Attributes: + id: Unique user identifier (UUID) + name: User's full name + email: User's email address (unique) + password_hash: Bcrypt hashed password + avatar_url: Optional Cloudinary avatar URL + created_at: Account creation timestamp + updated_at: Last update timestamp + """ + + __tablename__ = 'users' + + id: UUID = Field( + default_factory=uuid4, + primary_key=True, + index=True, + description='Unique user identifier', + ) + name: str = Field(max_length=255, description="User's full name") + email: str = Field( + unique=True, + index=True, + max_length=255, + description="User's email address", + ) + password_hash: str = Field(max_length=255, description='Bcrypt hashed password', exclude=True) + avatar_url: Optional[str] = Field( + default=None, max_length=500, description='Cloudinary avatar URL' + ) + created_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column=Column(DateTime(), server_default=text('CURRENT_TIMESTAMP')), + description='Account creation timestamp', + ) + updated_at: datetime = Field( + default_factory=datetime.utcnow, + sa_column=Column( + DateTime(), + server_default=text('CURRENT_TIMESTAMP'), + onupdate=text('CURRENT_TIMESTAMP'), + ), + description='Last update timestamp', + ) + + def __repr__(self) -> str: + return f'' + + +# Export for use in other modules +__all__ = ['User'] diff --git a/backend/src/schemas/__init__.py b/backend/src/schemas/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/schemas/auth.py b/backend/src/schemas/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..4c51d33191f63cb2d60fe1c64d75fa5c5fdaa659 --- /dev/null +++ b/backend/src/schemas/auth.py @@ -0,0 +1,61 @@ +""" +Pydantic schemas for authentication operations. + +Used for request/response validation in auth endpoints. +""" +from typing import Optional + +from pydantic import BaseModel, EmailStr, Field + + +class SignupRequest(BaseModel): + """Schema for user registration request.""" + + name: str = Field( + ..., + min_length=1, + max_length=255, + description="User's full name", + ) + email: EmailStr = Field( + ..., + description="User's email address", + ) + password: str = Field( + ..., + min_length=8, + max_length=128, + description="Password (min 8 characters, must include letter and number)", + ) + + +class LoginRequest(BaseModel): + """Schema for user login request.""" + + email: EmailStr = Field(..., description="User's email address") + password: str = Field(..., description="User's password") + + +class AuthResponse(BaseModel): + """Schema for authentication response.""" + + access_token: str = Field(..., description="JWT access token") + token_type: str = Field(default='bearer', description="Token type") + user: dict = Field(..., description="User information") + + +class LogoutResponse(BaseModel): + """Schema for logout response.""" + + message: str = Field(default='Successfully logged out', description="Logout message") + + +class ErrorResponse(BaseModel): + """Schema for error responses.""" + + detail: str = Field(..., description="Error message") + error_code: Optional[str] = Field(None, description="Error code for client handling") + + +# Export schemas +__all__ = ['SignupRequest', 'LoginRequest', 'AuthResponse', 'LogoutResponse', 'ErrorResponse'] diff --git a/backend/src/schemas/todo.py b/backend/src/schemas/todo.py new file mode 100644 index 0000000000000000000000000000000000000000..0858a667925e43d4234bf1a5965a597adc25fca0 --- /dev/null +++ b/backend/src/schemas/todo.py @@ -0,0 +1,60 @@ +""" +Todo schemas for request/response validation. +""" +from datetime import datetime +from typing import Optional, List +from pydantic import BaseModel, Field +from uuid import UUID + + +class TodoCreateRequest(BaseModel): + """Request schema for creating a todo.""" + + title: str = Field(..., min_length=1, max_length=500, description="Todo title") + description: Optional[str] = Field(None, max_length=5000, description="Detailed description") + priority: Optional[str] = Field("medium", pattern="^(low|medium|high)$", description="Priority level") + due_date: Optional[datetime] = Field(None, description="Due date") + tags: Optional[List[str]] = Field(None, description="Tags for categorization") + + +class TodoUpdateRequest(BaseModel): + """Request schema for updating a todo.""" + + title: Optional[str] = Field(None, min_length=1, max_length=500, description="Todo title") + description: Optional[str] = Field(None, max_length=5000, description="Detailed description") + priority: Optional[str] = Field(None, pattern="^(low|medium|high)$", description="Priority level") + due_date: Optional[datetime] = Field(None, description="Due date") + tags: Optional[List[str]] = Field(None, description="Tags for categorization") + + +class TodoResponse(BaseModel): + """Response schema for a todo.""" + + id: str + user_id: str + title: str + description: Optional[str] + status: str + priority: str + tags: Optional[List[str]] + due_date: Optional[str] + created_at: str + updated_at: str + + +class TodoListResponse(BaseModel): + """Response schema for todo list with pagination.""" + + todos: List[TodoResponse] + total: int + skip: int + limit: int + has_more: bool + + +__all__ = [ + "TodoCreateRequest", + "TodoUpdateRequest", + "TodoResponse", + "TodoListResponse", +] diff --git a/backend/src/schemas/user.py b/backend/src/schemas/user.py new file mode 100644 index 0000000000000000000000000000000000000000..e8195548e914c265f6cf7b8585368102e60acabf --- /dev/null +++ b/backend/src/schemas/user.py @@ -0,0 +1,66 @@ +""" +Pydantic schemas for User model. + +Used for request/response validation and serialization. +""" +from datetime import datetime +from typing import Optional +from uuid import UUID + +from pydantic import BaseModel, EmailStr, Field + + +class UserBase(BaseModel): + """Base user schema with common fields.""" + + name: str = Field(..., min_length=1, max_length=255, description="User's full name") + email: EmailStr = Field(..., description="User's email address") + + +class UserCreate(UserBase): + """Schema for creating a new user.""" + + password: str = Field( + ..., + min_length=8, + max_length=128, + description="User's password (min 8 characters)", + ) + + +class UserLogin(BaseModel): + """Schema for user login.""" + + email: EmailStr = Field(..., description="User's email address") + password: str = Field(..., description="User's password") + + +class UserUpdate(BaseModel): + """Schema for updating user profile.""" + + name: Optional[str] = Field(None, min_length=1, max_length=255) + avatar_url: Optional[str] = Field(None, max_length=500) + + +class UserProfileUpdateRequest(BaseModel): + """Schema for updating user profile (minimal).""" + + name: Optional[str] = Field(None, min_length=1, max_length=255, description="User's full name") + + +class UserResponse(UserBase): + """Schema for user response (excluding sensitive data).""" + + id: UUID = Field(..., description="User ID") + avatar_url: Optional[str] = Field(None, description="Avatar URL") + created_at: datetime = Field(..., description="Account creation timestamp") + updated_at: datetime = Field(..., description="Last update timestamp") + + class Config: + """Pydantic config.""" + + from_attributes = True # Enable ORM mode + + +# Export schemas +__all__ = ['UserBase', 'UserCreate', 'UserLogin', 'UserUpdate', 'UserResponse', 'UserProfileUpdateRequest'] diff --git a/backend/src/services/__init__.py b/backend/src/services/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/services/ai_service.py b/backend/src/services/ai_service.py new file mode 100644 index 0000000000000000000000000000000000000000..61ad62fa05282481a8958de69c53d2f41fd85d34 --- /dev/null +++ b/backend/src/services/ai_service.py @@ -0,0 +1,239 @@ +""" +AI Service for Hugging Face integration. + +Provides todo generation, summarization, and prioritization features. +""" +import json +import os +from typing import List, Optional +from huggingface_hub import InferenceClient + +from src.core.config import settings + + +class AIService: + """Service for AI-powered todo features.""" + + def __init__(self): + """Initialize AI service with Hugging Face client.""" + self.client = None + if settings.huggingface_api_key: + self.client = InferenceClient(token=settings.huggingface_api_key) + + def _generate_todos_prompt(self, goal: str) -> str: + """Generate prompt for todo creation.""" + return f"""You are a task planning assistant. Generate 5-7 actionable, specific todo items for this goal: "{goal}" + +Requirements: +- Each todo must be specific and actionable +- Include realistic due dates (relative: "tomorrow", "next week", "next month") +- Assign priority (low/medium/high) +- Return as JSON array with exact format below + +Output format (JSON array): +{{ + "todos": [ + {{"title": "Research competitors", "description": "Analyze top 3 competitor features", "priority": "high", "due_date": "2025-01-25"}}, + {{"title": "Create wireframes", "description": "Sketch main dashboard screens", "priority": "medium", "due_date": "2025-01-26"}} + ] +}} + +Only return JSON, no other text.""" + + def _summarize_todos_prompt(self, todos: List) -> str: + """Generate prompt for todo summarization.""" + todos_text = "\n".join([f"- {t['title']}: {t.get('description', '')}" for t in todos]) + return f"""Summarize these {len(todos)} todo items into a concise overview: + +{todos_text} + +Provide: +- Total count breakdown by priority (high/medium/low) +- Top 3 most urgent items +- One sentence overall status + +Keep under 100 words. Be concise and actionable.""" + + def _prioritize_todos_prompt(self, todos: List) -> str: + """Generate prompt for todo prioritization.""" + todos_text = "\n".join([ + f"{i+1}. {t['title']} (Priority: {t.get('priority', 'medium')}, Due: {t.get('due_date', 'none')})" + for i, t in enumerate(todos) + ]) + return f"""You are a productivity expert. Reorder these todos by urgency and importance: + +Current todos: +{todos_text} + +Consider: +- Due dates (earlier = more urgent) +- Priority levels explicitly assigned +- Task dependencies + +Return as ordered JSON array: +{{ + "todos": [ + {{"id": "1", "title": "...", "priority_score": 95, "reasoning": "Due tomorrow"}}, + {{"id": "2", "title": "...", "priority_score": 80, "reasoning": "High priority, due in 3 days"}} + ] +}} + +Only return JSON, no other text.""" + + def generate_todos(self, goal: str) -> dict: + """ + Generate todos from a goal using AI. + + Args: + goal: User's goal to break down into todos + + Returns: + Dict with generated todos + + Raises: + ValueError: If AI service is not configured or response is invalid + """ + if not self.client: + raise ValueError("AI service not configured. Please set HUGGINGFACE_API_KEY.") + + try: + prompt = self._generate_todos_prompt(goal) + response = self.client.text_generation( + prompt, + model="mistralai/Mistral-7B-Instruct-v0.2", + max_new_tokens=500, + temperature=0.7, + ) + + # Parse JSON response + response_text = response.strip() + if "```json" in response_text: + response_text = response_text.split("```json")[1].split("```")[0].strip() + elif "```" in response_text: + response_text = response_text.split("```")[1].split("```")[0].strip() + + result = json.loads(response_text) + + return { + "todos": result.get("todos", []), + "message": f"Generated {len(result.get('todos', []))} todos for your goal" + } + + except json.JSONDecodeError as e: + raise ValueError(f"Invalid AI response format. Please try again.") from e + except Exception as e: + raise ValueError(f"AI service error: {str(e)}") from e + + def summarize_todos(self, todos: List[dict]) -> dict: + """ + Summarize todos using AI. + + Args: + todos: List of todo dictionaries + + Returns: + Dict with summary and breakdown + """ + if not self.client: + raise ValueError("AI service not configured. Please set HUGGINGFACE_API_KEY.") + + if not todos: + return { + "summary": "No todos to summarize.", + "breakdown": {"high_priority": 0, "medium_priority": 0, "low_priority": 0}, + "urgent_todos": [] + } + + try: + # Calculate breakdown + breakdown = { + "high_priority": sum(1 for t in todos if t.get("priority") == "high"), + "medium_priority": sum(1 for t in todos if t.get("priority") == "medium"), + "low_priority": sum(1 for t in todos if t.get("priority") == "low"), + } + + # Get urgent todos (high priority or due soon) + from datetime import datetime, timedelta + urgent = [] + for t in todos: + if t.get("priority") == "high": + urgent.append(t.get("title", "")) + elif t.get("due_date"): + try: + due_date = datetime.fromisoformat(t["due_date"].replace("Z", "+00:00")) + if due_date <= datetime.now() + timedelta(days=2): + urgent.append(t.get("title", "")) + except: + pass + + # Generate summary + prompt = self._summarize_todos_prompt(todos) + summary = self.client.text_generation( + prompt, + model="facebook/bart-large-cnn", + max_new_tokens=200, + temperature=0.5, + ) + + return { + "summary": summary.strip(), + "breakdown": breakdown, + "urgent_todos": urgent[:3] # Top 3 urgent + } + + except Exception as e: + raise ValueError(f"AI service error: {str(e)}") from e + + def prioritize_todos(self, todos: List[dict]) -> dict: + """ + Prioritize todos using AI. + + Args: + todos: List of todo dictionaries + + Returns: + Dict with prioritized todos + """ + if not self.client: + raise ValueError("AI service not configured. Please set HUGGINGFACE_API_KEY.") + + if not todos: + return { + "prioritized_todos": [], + "message": "No todos to prioritize" + } + + try: + prompt = self._prioritize_todos_prompt(todos) + response = self.client.text_generation( + prompt, + model="mistralai/Mistral-7B-Instruct-v0.2", + max_new_tokens=500, + temperature=0.7, + ) + + # Parse JSON response + response_text = response.strip() + if "```json" in response_text: + response_text = response_text.split("```json")[1].split("```")[0].strip() + elif "```" in response_text: + response_text = response_text.split("```")[1].split("```")[0].strip() + + result = json.loads(response_text) + + return { + "prioritized_todos": result.get("todos", []), + "message": f"Prioritized {len(result.get('todos', []))} todos" + } + + except json.JSONDecodeError as e: + raise ValueError(f"Invalid AI response format. Please try again.") from e + except Exception as e: + raise ValueError(f"AI service error: {str(e)}") from e + + +# Global AI service instance +ai_service = AIService() + + +__all__ = ['ai_service', 'AIService'] diff --git a/backend/src/services/auth_service.py b/backend/src/services/auth_service.py new file mode 100644 index 0000000000000000000000000000000000000000..01d797aaa9345ab4bde465969bdd080487f2b72c --- /dev/null +++ b/backend/src/services/auth_service.py @@ -0,0 +1,205 @@ +""" +Authentication service for user management. + +Provides functions for user creation, authentication, and JWT token management. +""" +import re +from typing import Optional +from uuid import UUID + +from jose import JWTError +from sqlmodel import Session, select + +from src.core.config import settings +from src.core.security import create_access_token, verify_password +from src.models.user import User +from src.schemas.user import UserCreate + + +def hash_password(password: str) -> str: + """ + Hash a password using bcrypt. + + Args: + password: Plain text password + + Returns: + str: Hashed password + + Raises: + ValueError: If password doesn't meet requirements + """ + if not password or len(password) < 8: + raise ValueError('Password must be at least 8 characters long') + + if not re.search(r'[A-Za-z]', password): + raise ValueError('Password must contain at least one letter') + + if not re.search(r'\d', password): + raise ValueError('Password must contain at least one number') + + from src.core.security import get_password_hash + + return get_password_hash(password) + + +def check_email_exists(db: Session, email: str) -> bool: + """ + Check if an email already exists in the database. + + Args: + db: Database session + email: Email to check + + Returns: + bool: True if email exists, False otherwise + """ + user = db.exec(select(User).where(User.email.ilike(email))).first() + return user is not None + + +def create_user(db: Session, user_data: UserCreate) -> User: + """ + Create a new user in the database. + + Args: + db: Database session + user_data: User creation data + + Returns: + User: Created user object + + Raises: + ValueError: If email already exists or password is invalid + """ + # Check if email already exists (case-insensitive) + if check_email_exists(db, user_data.email): + raise ValueError(f'Email {user_data.email} is already registered') + + # Hash password + try: + password_hash = hash_password(user_data.password) + except ValueError as e: + raise ValueError(str(e)) + + # Create new user + user = User( + name=user_data.name.strip(), + email=user_data.email.lower().strip(), + password_hash=password_hash, + ) + + # Save to database + db.add(user) + db.commit() + db.refresh(user) + + return user + + +def authenticate_user(db: Session, email: str, password: str) -> Optional[User]: + """ + Authenticate a user with email and password. + + Args: + db: Database session + email: User's email + password: Plain text password + + Returns: + User: User object if authentication successful, None otherwise + """ + # Find user by email (case-insensitive) + user = db.exec(select(User).where(User.email.ilike(email))).first() + + if not user: + return None + + # Verify password + if not verify_password(password, user.password_hash): + return None + + return user + + +def create_user_token(user_id: UUID) -> str: + """ + Create a JWT access token for a user. + + Args: + user_id: User's UUID + + Returns: + str: JWT access token + + Raises: + ValueError: If token creation fails + """ + try: + token = create_access_token(data={'sub': str(user_id)}) + return token + except JWTError as e: + raise ValueError(f'Failed to create access token: {str(e)}') + + +def verify_user_token(token: str) -> Optional[UUID]: + """ + Verify a JWT token and extract user ID. + + Args: + token: JWT access token + + Returns: + UUID: User ID if token is valid, None otherwise + """ + from src.core.security import decode_access_token, TokenData + + try: + token_data = TokenData.from_token(token) + if token_data and token_data.user_id and not token_data.is_expired(): + return UUID(token_data.user_id) + except (JWTError, ValueError): + return None + + return None + + +def get_user_by_id(db: Session, user_id: UUID) -> Optional[User]: + """ + Get a user by ID. + + Args: + db: Database session + user_id: User's UUID + + Returns: + User: User object if found, None otherwise + """ + return db.get(User, user_id) + + +def get_user_by_email(db: Session, email: str) -> Optional[User]: + """ + Get a user by email. + + Args: + db: Database session + email: User's email + + Returns: + User: User object if found, None otherwise + """ + return db.exec(select(User).where(User.email.ilike(email))).first() + + +# Export for use in other modules +__all__ = [ + 'hash_password', + 'check_email_exists', + 'create_user', + 'authenticate_user', + 'create_user_token', + 'verify_user_token', + 'get_user_by_id', + 'get_user_by_email', +] diff --git a/backend/src/tests/__init__.py b/backend/src/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/src/utils/__init__.py b/backend/src/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/cookies.txt b/cookies.txt new file mode 100644 index 0000000000000000000000000000000000000000..e43fb577f510ded580524eab7d979a3ca3e3925d --- /dev/null +++ b/cookies.txt @@ -0,0 +1,5 @@ +# Netscape HTTP Cookie File +# https://curl.se/docs/http-cookies.html +# This file was generated by libcurl! Edit at your own risk. + +#HttpOnly_localhost FALSE / FALSE 1769932286 access_token eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI5NjQ3MDViNC1jYzNiLTRmYTktYWY3Yy00OGVkNTdkZGNlNDQiLCJleHAiOjE3Njk5MzIyODZ9.CWtt77dZvCXg9P8SQHI-pQ3nzemeFu8rZuaAmIv6H9c diff --git a/docker-compose.override.yml.example b/docker-compose.override.yml.example new file mode 100644 index 0000000000000000000000000000000000000000..78a9a93a07961cdcd465618a198e9c58b27b457c --- /dev/null +++ b/docker-compose.override.yml.example @@ -0,0 +1,18 @@ +# Example docker-compose override for additional development tools +# Copy this file to docker-compose.override.yml and customize as needed + +version: '3.8' + +services: + # pgAdmin for PostgreSQL Management + pgadmin: + image: dpage/pgadmin4:latest + container_name: todo-app-pgadmin + restart: unless-stopped + environment: + PGADMIN_DEFAULT_EMAIL: admin@todoapp.local + PGADMIN_DEFAULT_PASSWORD: admin + ports: + - '5050:80' + depends_on: + - postgres diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..5452d27c2debcfd086a7f781ac2d4ce00e788c73 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,46 @@ +version: '3.8' + +services: + # PostgreSQL Database for Local Development + postgres: + image: postgres:16-alpine + container_name: todo-app-postgres + restart: unless-stopped + environment: + POSTGRES_USER: todoapp + POSTGRES_PASSWORD: todoapp_password + POSTGRES_DB: todoapp + ports: + - '5432:5432' + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U todoapp'] + interval: 10s + timeout: 5s + retries: 5 + + # Redis for Caching (Optional - for future use) + redis: + image: redis:7-alpine + container_name: todo-app-redis + restart: unless-stopped + ports: + - '6379:6379' + volumes: + - redis_data:/data + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] + interval: 10s + timeout: 3s + retries: 5 + +volumes: + postgres_data: + driver: local + redis_data: + driver: local + +networks: + default: + name: todo-app-network diff --git a/frontend/.env.example b/frontend/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..744582477f6c40890d7ec88ca1bd9dd8f44096bd --- /dev/null +++ b/frontend/.env.example @@ -0,0 +1,20 @@ +# ======================================== +# API Configuration +# ======================================== +# Backend API URL (change for production) +NEXT_PUBLIC_API_URL=http://localhost:8000 + +# ======================================== +# Application Settings +# ======================================== +# Environment: development, staging, production +NEXT_PUBLIC_APP_ENV=development + +# ======================================== +# Feature Flags +# ======================================== +# Enable AI features (requires HUGGINGFACE_API_KEY in backend) +NEXT_PUBLIC_ENABLE_AI=true + +# Enable analytics +NEXT_PUBLIC_ENABLE_ANALYTICS=false diff --git a/frontend/.eslintrc.json b/frontend/.eslintrc.json new file mode 100644 index 0000000000000000000000000000000000000000..95ac2c6210f40351fde224281132ca207ff51fdc --- /dev/null +++ b/frontend/.eslintrc.json @@ -0,0 +1,13 @@ +{ + "extends": [ + "next/core-web-vitals", + "prettier" + ], + "rules": { + "react/no-unescaped-entities": "off", + "@next/next/no-page-custom-font": "off", + "prefer-const": "error", + "no-unused-vars": "off", + "@typescript-eslint/no-unused-vars": "off" + } +} diff --git a/frontend/.prettierrc b/frontend/.prettierrc new file mode 100644 index 0000000000000000000000000000000000000000..e999e95bc054ea7cff74b02d140ecab385652933 --- /dev/null +++ b/frontend/.prettierrc @@ -0,0 +1,8 @@ +{ + "semi": false, + "singleQuote": true, + "tabWidth": 2, + "trailingComma": "es5", + "printWidth": 100, + "plugins": ["prettier-plugin-tailwindcss"] +} diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..84ea7558fd89a57e6a199f22b43f3733c4970d03 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,191 @@ +# Todo App Frontend - Phase 2 + +Next.js 14 frontend for the Todo SaaS application with premium UI and authentication. + +## Tech Stack + +- **Next.js 14** - React framework with App Router +- **TypeScript** - Type-safe development +- **Tailwind CSS** - Utility-first CSS framework +- **shadcn/ui** - Premium UI components +- **Framer Motion** - Smooth animations +- **next-themes** - Dark/light theme support + +## Setup + +### 1. Install dependencies + +```bash +npm install +``` + +### 2. Setup environment + +```bash +cp .env.example .env.local +# Edit .env.local with your configuration +``` + +### 3. Start development server + +```bash +npm run dev +``` + +App will be available at: http://localhost:3000 + +## Project Structure + +``` +frontend/ +├── src/ +│ ├── app/ # Next.js App Router pages +│ ├── components/ # React components +│ │ └── ui/ # shadcn/ui components +│ ├── hooks/ # Custom React hooks +│ ├── lib/ # Utility functions +│ ├── styles/ # Global styles +│ └── types/ # TypeScript type definitions +├── public/ # Static assets +└── package.json # Dependencies and scripts +``` + +## Available Scripts + +```bash +# Development +npm run dev # Start dev server + +# Building +npm run build # Build for production +npm run start # Start production server + +# Testing +npm test # Run unit tests +npm run test:watch # Watch mode +npm run test:e2e # Run E2E tests with Playwright + +# Code Quality +npm run lint # Run ESLint +npm run lint:fix # Fix ESLint issues +npm run format # Format with Prettier +npm run type-check # TypeScript type checking +``` + +## Features + +### Authentication +- Login with email/password +- User registration with validation +- Secure JWT token storage +- Auto-redirect based on auth state + +### Todo Management +- Create, edit, delete todos +- Mark todos as complete +- Filter by status +- Search todos +- Sort by date, priority + +### User Profile +- View and edit profile +- Upload avatar (Cloudinary) +- Update name and email + +### AI Features +- Generate todos from text +- Summarize tasks +- Prioritize tasks + +### UI/UX +- Dark/light theme toggle +- Smooth animations +- Mobile responsive +- Loading states +- Error handling + +## Environment Variables + +See `.env.example` for required environment variables: + +```env +NEXT_PUBLIC_API_URL=http://localhost:8000 +NEXT_PUBLIC_APP_ENV=development +NEXT_PUBLIC_ENABLE_AI=true +``` + +## Component Library + +This project uses [shadcn/ui](https://ui.shadcn.com/) for premium UI components. + +### Adding new components + +```bash +npx shadcn-ui@latest add [component-name] +``` + +Available components: +- Button, Input, Label, Card +- Dialog, Dropdown Menu, Select +- Tabs, Switch, Avatar +- Toast, and more... + +## State Management + +- React Context for auth state +- React hooks for local state +- Server Components for data fetching +- Client Components for interactivity + +## Styling + +- **Tailwind CSS** - Utility classes +- **CSS Variables** - Theme customization +- **Framer Motion** - Animations +- **shadcn/ui** - Pre-built components + +## Testing + +```bash +# Unit tests +npm test + +# E2E tests +npm run test:e2e + +# E2E with UI +npm run e2e:ui + +# E2E debug mode +npm run e2e:debug +``` + +## Building for Production + +```bash +# Build +npm run build + +# Test production build locally +npm run start +``` + +## Deployment + +This app is designed to be deployed on **Vercel**: + +1. Push code to GitHub +2. Import project in Vercel +3. Configure environment variables +4. Deploy + +## Browser Support + +- Chrome (last 2 versions) +- Firefox (last 2 versions) +- Safari (last 2 versions) +- Edge (last 2 versions) + +## License + +MIT diff --git a/frontend/components.json b/frontend/components.json new file mode 100644 index 0000000000000000000000000000000000000000..654b1016a1fcbdd6cb7b954027f960b9bf580f82 --- /dev/null +++ b/frontend/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "tailwind.config.ts", + "css": "src/app/globals.css", + "baseColor": "slate", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} diff --git a/frontend/jest.setup.js b/frontend/jest.setup.js new file mode 100644 index 0000000000000000000000000000000000000000..d15902fa78826aa5dec87a73b1c6582af0fbf0cb --- /dev/null +++ b/frontend/jest.setup.js @@ -0,0 +1,2 @@ +// Learn more: https://github.com/testing-library/jest-dom +import '@testing-library/jest-dom'; diff --git a/frontend/next-env.d.ts b/frontend/next-env.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f11a03dc6cc37f2b5105c08f2e7b24c603ab2f4 --- /dev/null +++ b/frontend/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/frontend/next.config.js b/frontend/next.config.js new file mode 100644 index 0000000000000000000000000000000000000000..06fd9ee26879eb1e3cf5ee98eb3254920d07576e --- /dev/null +++ b/frontend/next.config.js @@ -0,0 +1,67 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + reactStrictMode: true, + swcMinify: true, + + env: { + NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL || 'http://localhost:8000', + }, + + // Enable experimental features for better performance + experimental: { + optimizePackageImports: ['lucide-react', '@radix-ui/react-icons'], + }, + + // Image optimization + images: { + remotePatterns: [ + { + protocol: 'https', + hostname: 'res.cloudinary.com', + }, + ], + }, + + // Webpack configuration + webpack: (config) => { + config.externals = [...(config.externals || []), { canvas: 'canvas' }]; + + // Handle Windows case sensitivity issues + config.resolve.symlinks = false; + config.snapshot = { + ...config.snapshot, + managedPaths: [/^(.+?[\\/]node_modules[\\/])/], + }; + + return config; + }, + + // Headers for security + async headers() { + return [ + { + source: '/:path*', + headers: [ + { + key: 'X-DNS-Prefetch-Control', + value: 'on' + }, + { + key: 'X-Frame-Options', + value: 'SAMEORIGIN' + }, + { + key: 'X-Content-Type-Options', + value: 'nosniff' + }, + { + key: 'Referrer-Policy', + value: 'origin-when-cross-origin' + }, + ], + }, + ]; + }, +}; + +module.exports = nextConfig; diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000000000000000000000000000000000000..048166c5a4733014e789acbf498b829b3fd6af2c --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,63 @@ +{ + "name": "todo-app-frontend", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint", + "lint:fix": "next lint --fix", + "format": "prettier --write \"**/*.{js,jsx,ts,tsx,json,css,md}\"", + "format:check": "prettier --check \"**/*.{js,jsx,ts,tsx,json,css,md}\"", + "type-check": "tsc --noEmit", + "test": "jest", + "test:watch": "jest --watch", + "test:coverage": "jest --coverage", + "test:e2e": "playwright test", + "e2e:ui": "playwright test --ui", + "e2e:debug": "playwright test --debug" + }, + "dependencies": { + "next": "14.1.0", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "@radix-ui/react-avatar": "^1.0.4", + "@radix-ui/react-dialog": "^1.0.5", + "@radix-ui/react-dropdown-menu": "^2.0.6", + "@radix-ui/react-label": "^2.0.2", + "@radix-ui/react-select": "^2.0.0", + "@radix-ui/react-slot": "^1.0.2", + "@radix-ui/react-switch": "^1.0.3", + "@radix-ui/react-tabs": "^1.0.4", + "@radix-ui/react-toast": "^1.1.5", + "framer-motion": "^11.0.0", + "next-themes": "^0.2.1", + "class-variance-authority": "^0.7.0", + "clsx": "^2.1.0", + "tailwind-merge": "^2.2.0", + "tailwindcss-animate": "^1.0.7", + "lucide-react": "^0.323.0", + "zod": "^3.22.0", + "date-fns": "^3.0.0" + }, + "devDependencies": { + "@types/node": "^20", + "@types/react": "^18", + "@types/react-dom": "^18", + "typescript": "^5", + "tailwindcss": "^3.4.0", + "postcss": "^8", + "autoprefixer": "^10.0.1", + "eslint": "^8", + "eslint-config-next": "14.1.0", + "eslint-config-prettier": "^9.1.0", + "prettier": "^3.2.0", + "prettier-plugin-tailwindcss": "^0.5.0", + "@testing-library/react": "^14.0.0", + "@testing-library/jest-dom": "^6.0.0", + "@playwright/test": "^1.40.0", + "jest": "^29.0.0", + "jest-environment-jsdom": "^29.0.0" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000000000000000000000000000000000000..12a703d900da8159c30e75acbd2c4d87ae177f62 --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/frontend/src/app/ai/page.tsx b/frontend/src/app/ai/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..c8cbc8d9828da6915f9a99317dbba85d48586a35 --- /dev/null +++ b/frontend/src/app/ai/page.tsx @@ -0,0 +1,391 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { useRouter } from 'next/navigation'; +import { motion } from 'framer-motion'; +import { useAuth } from '@/hooks/use-auth'; +import { Header } from '@/components/common/Header'; +import { aiApi } from '@/lib/api'; +import { Loader2, Sparkles, CheckCircle, List, TrendingUp } from 'lucide-react'; + +type TabType = 'generate' | 'summarize' | 'prioritize'; + +export default function AIPage() { + const router = useRouter(); + const { user, loading: authLoading, isAuthenticated } = useAuth(); + const [activeTab, setActiveTab] = useState('generate'); + const [goal, setGoal] = useState(''); + const [loading, setLoading] = useState(false); + const [result, setResult] = useState(null); + const [error, setError] = useState(''); + + // Redirect to login if not authenticated + useEffect(() => { + if (!authLoading && !isAuthenticated) { + router.push('/login'); + } + }, [authLoading, isAuthenticated, router]); + + const handleGenerate = async () => { + if (!goal.trim()) return; + + setLoading(true); + setError(''); + setResult(null); + + try { + const response = await aiApi.generateTodos(goal); + setResult(response); + } catch (err: any) { + setError(err.message || 'Failed to generate todos. Please try again.'); + } finally { + setLoading(false); + } + }; + + const handleSummarize = async () => { + setLoading(true); + setError(''); + setResult(null); + + try { + const response = await aiApi.summarize(); + setResult(response); + } catch (err: any) { + setError(err.message || 'Failed to summarize todos. Please try again.'); + } finally { + setLoading(false); + } + }; + + const handlePrioritize = async () => { + setLoading(true); + setError(''); + setResult(null); + + try { + const response = await aiApi.prioritize(); + setResult(response); + } catch (err: any) { + setError(err.message || 'Failed to prioritize todos. Please try again.'); + } finally { + setLoading(false); + } + }; + + const handleTabAction = () => { + if (activeTab === 'generate' && goal.trim()) { + handleGenerate(); + } else if (activeTab === 'summarize') { + handleSummarize(); + } else if (activeTab === 'prioritize') { + handlePrioritize(); + } + }; + + if (authLoading) { + return ( +
+ +
+ ); + } + + if (!isAuthenticated) { + return null; + } + + return ( +
+
+ +
+ +
+ +

+ AI Assistant +

+
+

+ Let AI help you manage your tasks more effectively +

+ + {/* Tabs */} +
+ + + +
+ + {/* Content */} +
+ {activeTab === 'generate' && ( +
+
+ +