Claude Code - Backend Implementation Specialist Claude Sonnet 4.5 commited on
Commit
b93a6a5
·
1 Parent(s): 56e51a9

Add complete AI Todo Chatbot backend application

Browse files

- FastAPI backend with JWT authentication
- AI-powered chat using Cohere API
- Task management with subtasks
- Password reset functionality
- MCP server integration
- Docker configuration for Hugging Face Spaces

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .env.example +55 -0
  2. .gitignore +12 -0
  3. Dockerfile +25 -0
  4. Procfile +1 -0
  5. alembic.ini +114 -0
  6. alembic/env.py +94 -0
  7. alembic/script.py.mako +24 -0
  8. alembic/versions/001_initial_schema.py +52 -0
  9. alembic/versions/002_add_password_reset_tokens.py +40 -0
  10. alembic/versions/003_ai_chatbot_tables.py +56 -0
  11. alembic/versions/a6878af5b66f_add_category_and_due_date_to_tasks.py +30 -0
  12. api/index.py +19 -0
  13. api/test.py +45 -0
  14. init_db.py +9 -0
  15. migrate_db.py +36 -0
  16. requirements.txt +12 -0
  17. src/__pycache__/database.cpython-314.pyc +0 -0
  18. src/__pycache__/main.cpython-314.pyc +0 -0
  19. src/agents/__init__.py +0 -0
  20. src/agents/__pycache__/__init__.cpython-314.pyc +0 -0
  21. src/agents/__pycache__/cohere_client.cpython-314.pyc +0 -0
  22. src/agents/__pycache__/orchestrator.cpython-314.pyc +0 -0
  23. src/agents/cohere_client.py +168 -0
  24. src/agents/orchestrator.py +163 -0
  25. src/api/__init__.py +1 -0
  26. src/api/__pycache__/__init__.cpython-314.pyc +0 -0
  27. src/api/__pycache__/auth.cpython-314.pyc +0 -0
  28. src/api/__pycache__/chat.cpython-314.pyc +0 -0
  29. src/api/__pycache__/password_reset.cpython-314.pyc +0 -0
  30. src/api/__pycache__/subtasks.cpython-314.pyc +0 -0
  31. src/api/__pycache__/tasks.cpython-314.pyc +0 -0
  32. src/api/ai.py +228 -0
  33. src/api/auth.py +155 -0
  34. src/api/chat.py +210 -0
  35. src/api/password_reset.py +233 -0
  36. src/api/subtasks.py +230 -0
  37. src/api/tasks.py +278 -0
  38. src/config/__init__.py +0 -0
  39. src/config/__pycache__/__init__.cpython-314.pyc +0 -0
  40. src/config/__pycache__/logging.cpython-314.pyc +0 -0
  41. src/config/logging.py +63 -0
  42. src/database.py +59 -0
  43. src/main.py +82 -0
  44. src/main_minimal.py +21 -0
  45. src/mcp/__init__.py +0 -0
  46. src/mcp/__pycache__/__init__.cpython-314.pyc +0 -0
  47. src/mcp/__pycache__/server.cpython-314.pyc +0 -0
  48. src/mcp/server.py +138 -0
  49. src/mcp/tools/__init__.py +65 -0
  50. src/mcp/tools/__pycache__/__init__.cpython-314.pyc +0 -0
.env.example ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Backend Environment Variables
2
+ # Copy this file to .env and fill in your values
3
+
4
+ # Database Configuration
5
+ DATABASE_URL=sqlite:///./todo.db
6
+ # For production, use PostgreSQL:
7
+ # DATABASE_URL=postgresql://user:password@host:5432/database
8
+
9
+ # JWT Configuration
10
+ JWT_SECRET_KEY=your-super-secret-key-change-this-min-32-characters-long
11
+ JWT_ALGORITHM=HS256
12
+ ACCESS_TOKEN_EXPIRE_MINUTES=30
13
+
14
+ # CORS Configuration
15
+ CORS_ORIGINS=http://localhost:3000,http://localhost:3001,http://localhost:3002
16
+ # For production, add your frontend URL:
17
+ # CORS_ORIGINS=https://your-frontend-url.com,http://localhost:3000
18
+
19
+ # Gmail SMTP Configuration (for password reset emails)
20
+ # To get app-specific password:
21
+ # 1. Enable 2-Factor Authentication on your Gmail account
22
+ # 2. Go to Google Account → Security → 2-Step Verification → App passwords
23
+ # 3. Select "Mail" and "Other (Custom name)"
24
+ # 4. Copy the 16-character password
25
+ SMTP_HOST=smtp.gmail.com
26
+ SMTP_PORT=587
27
+ SMTP_USERNAME=your_email@gmail.com
28
+ SMTP_PASSWORD=your_app_specific_password_here
29
+ SMTP_USE_TLS=true
30
+ EMAIL_FROM=your_email@gmail.com
31
+ EMAIL_FROM_NAME=Todo Application
32
+
33
+ # Frontend URL (for password reset links)
34
+ FRONTEND_URL=http://localhost:3000
35
+ # For production:
36
+ # FRONTEND_URL=https://your-frontend-url.com
37
+
38
+ # Password Reset Configuration
39
+ PASSWORD_RESET_TOKEN_EXPIRY_MINUTES=15
40
+ PASSWORD_RESET_MAX_REQUESTS_PER_HOUR=3
41
+
42
+ # Cohere AI API Configuration
43
+ # Get your API key from: https://dashboard.cohere.com/api-keys
44
+ COHERE_API_KEY=your-cohere-api-key-here
45
+
46
+ # AI Chatbot Configuration
47
+ # Cohere model settings for conversational AI
48
+ COHERE_MODEL=command-r-plus
49
+ COHERE_TEMPERATURE=0.3
50
+ COHERE_MAX_TOKENS=2000
51
+ COHERE_TIMEOUT=30
52
+
53
+ # MCP Tools Configuration
54
+ MCP_SERVER_NAME=todo-tools
55
+ MCP_SERVER_VERSION=1.0.0
.gitignore ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .vercel
2
+ .env*.local
3
+
4
+ # Test data and tokens
5
+ *.token.json
6
+ *_token.json
7
+ token.txt
8
+ *_test.json
9
+ *_response.json
10
+ signin_*.json
11
+ signup_*.json
12
+ fresh_token.json
Dockerfile ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use Python 3.11 slim image
2
+ FROM python:3.11-slim
3
+
4
+ # Set working directory
5
+ WORKDIR /app
6
+
7
+ # Copy requirements first for better caching
8
+ COPY requirements.txt .
9
+
10
+ # Install Python dependencies
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ # Copy application code
14
+ COPY src ./src
15
+ COPY api ./api
16
+ COPY alembic ./alembic
17
+ COPY alembic.ini .
18
+ COPY init_db.py .
19
+ COPY .env.example .env
20
+
21
+ # Expose port
22
+ EXPOSE 7860
23
+
24
+ # Run the application
25
+ CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "7860"]
Procfile ADDED
@@ -0,0 +1 @@
 
 
1
+ web: uvicorn src.main_minimal:app --host 0.0.0.0 --port $PORT
alembic.ini ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ script_location = alembic
6
+
7
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8
+ # Uncomment the line below if you want the files to be prepended with date and time
9
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
10
+
11
+ # sys.path path, will be prepended to sys.path if present.
12
+ # defaults to the current working directory.
13
+ prepend_sys_path = .
14
+
15
+ # timezone to use when rendering the date within the migration file
16
+ # as well as the filename.
17
+ # If specified, requires the python>=3.9 or backports.zoneinfo library.
18
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
19
+ # string value is passed to ZoneInfo()
20
+ # leave blank for localtime
21
+ # timezone =
22
+
23
+ # max length of characters to apply to the
24
+ # "slug" field
25
+ # truncate_slug_length = 40
26
+
27
+ # set to 'true' to run the environment during
28
+ # the 'revision' command, regardless of autogenerate
29
+ # revision_environment = false
30
+
31
+ # set to 'true' to allow .pyc and .pyo files without
32
+ # a source .py file to be detected as revisions in the
33
+ # versions/ directory
34
+ # sourceless = false
35
+
36
+ # version location specification; This defaults
37
+ # to alembic/versions. When using multiple version
38
+ # directories, initial revisions must be specified with --version-path.
39
+ # The path separator used here should be the separator specified by "version_path_separator" below.
40
+ # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
41
+
42
+ # version path separator; As mentioned above, this is the character used to split
43
+ # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
44
+ # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
45
+ # Valid values for version_path_separator are:
46
+ #
47
+ # version_path_separator = :
48
+ # version_path_separator = ;
49
+ # version_path_separator = space
50
+ version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
51
+
52
+ # set to 'true' to search source files recursively
53
+ # in each "version_locations" directory
54
+ # new in Alembic version 1.10
55
+ # recursive_version_locations = false
56
+
57
+ # the output encoding used when revision files
58
+ # are written from script.py.mako
59
+ # output_encoding = utf-8
60
+
61
+ sqlalchemy.url = postgresql://postgres:postgres@localhost:5432/todo_db
62
+
63
+
64
+ [post_write_hooks]
65
+ # post_write_hooks defines scripts or Python functions that are run
66
+ # on newly generated revision scripts. See the documentation for further
67
+ # detail and examples
68
+
69
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
70
+ # hooks = black
71
+ # black.type = console_scripts
72
+ # black.entrypoint = black
73
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
74
+
75
+ # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
76
+ # hooks = ruff
77
+ # ruff.type = exec
78
+ # ruff.executable = %(here)s/.venv/bin/ruff
79
+ # ruff.options = --fix REVISION_SCRIPT_FILENAME
80
+
81
+ # Logging configuration
82
+ [loggers]
83
+ keys = root,sqlalchemy,alembic
84
+
85
+ [handlers]
86
+ keys = console
87
+
88
+ [formatters]
89
+ keys = generic
90
+
91
+ [logger_root]
92
+ level = WARN
93
+ handlers = console
94
+ qualname =
95
+
96
+ [logger_sqlalchemy]
97
+ level = WARN
98
+ handlers =
99
+ qualname = sqlalchemy.engine
100
+
101
+ [logger_alembic]
102
+ level = INFO
103
+ handlers =
104
+ qualname = alembic
105
+
106
+ [handler_console]
107
+ class = StreamHandler
108
+ args = (sys.stderr,)
109
+ level = NOTSET
110
+ formatter = generic
111
+
112
+ [formatter_generic]
113
+ format = %(levelname)-5.5s [%(name)s] %(message)s
114
+ datefmt = %H:%M:%S
alembic/env.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from logging.config import fileConfig
2
+
3
+ from sqlalchemy import engine_from_config
4
+ from sqlalchemy import pool
5
+
6
+ from alembic import context
7
+
8
+ # Import your models here
9
+ import sys
10
+ import os
11
+ sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
12
+
13
+ from src.models.user import User
14
+ from src.models.task import Task
15
+ from sqlmodel import SQLModel
16
+
17
+ # Load environment variables
18
+ from dotenv import load_dotenv
19
+ load_dotenv()
20
+
21
+ # this is the Alembic Config object, which provides
22
+ # access to the values within the .ini file in use.
23
+ config = context.config
24
+
25
+ # Override sqlalchemy.url with DATABASE_URL from environment
26
+ database_url = os.getenv("DATABASE_URL")
27
+ if database_url:
28
+ config.set_main_option("sqlalchemy.url", database_url)
29
+
30
+ # Interpret the config file for Python logging.
31
+ # This line sets up loggers basically.
32
+ if config.config_file_name is not None:
33
+ fileConfig(config.config_file_name)
34
+
35
+ # add your model's MetaData object here
36
+ # for 'autogenerate' support
37
+ target_metadata = SQLModel.metadata
38
+
39
+ # other values from the config, defined by the needs of env.py,
40
+ # can be acquired:
41
+ # my_important_option = config.get_main_option("my_important_option")
42
+ # ... etc.
43
+
44
+
45
+ def run_migrations_offline() -> None:
46
+ """Run migrations in 'offline' mode.
47
+
48
+ This configures the context with just a URL
49
+ and not an Engine, though an Engine is acceptable
50
+ here as well. By skipping the Engine creation
51
+ we don't even need a DBAPI to be available.
52
+
53
+ Calls to context.execute() here emit the given string to the
54
+ script output.
55
+
56
+ """
57
+ url = config.get_main_option("sqlalchemy.url")
58
+ context.configure(
59
+ url=url,
60
+ target_metadata=target_metadata,
61
+ literal_binds=True,
62
+ dialect_opts={"paramstyle": "named"},
63
+ )
64
+
65
+ with context.begin_transaction():
66
+ context.run_migrations()
67
+
68
+
69
+ def run_migrations_online() -> None:
70
+ """Run migrations in 'online' mode.
71
+
72
+ In this scenario we need to create an Engine
73
+ and associate a connection with the context.
74
+
75
+ """
76
+ connectable = engine_from_config(
77
+ config.get_section(config.config_ini_section, {}),
78
+ prefix="sqlalchemy.",
79
+ poolclass=pool.NullPool,
80
+ )
81
+
82
+ with connectable.connect() as connection:
83
+ context.configure(
84
+ connection=connection, target_metadata=target_metadata
85
+ )
86
+
87
+ with context.begin_transaction():
88
+ context.run_migrations()
89
+
90
+
91
+ if context.is_offline_mode():
92
+ run_migrations_offline()
93
+ else:
94
+ run_migrations_online()
alembic/script.py.mako ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ ${imports if imports else ""}
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = ${repr(up_revision)}
14
+ down_revision = ${repr(down_revision)}
15
+ branch_labels = ${repr(branch_labels)}
16
+ depends_on = ${repr(depends_on)}
17
+
18
+
19
+ def upgrade() -> None:
20
+ ${upgrades if upgrades else "pass"}
21
+
22
+
23
+ def downgrade() -> None:
24
+ ${downgrades if downgrades else "pass"}
alembic/versions/001_initial_schema.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Initial schema
2
+
3
+ Revision ID: 001
4
+ Revises:
5
+ Create Date: 2026-02-05
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = '001'
14
+ down_revision = None
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # Create users table
21
+ op.create_table(
22
+ 'users',
23
+ sa.Column('id', sa.Integer(), nullable=False),
24
+ sa.Column('email', sa.String(length=255), nullable=False),
25
+ sa.Column('hashed_password', sa.String(length=255), nullable=False),
26
+ sa.Column('created_at', sa.DateTime(), nullable=False),
27
+ sa.Column('updated_at', sa.DateTime(), nullable=False),
28
+ sa.PrimaryKeyConstraint('id')
29
+ )
30
+ op.create_index('idx_users_email', 'users', ['email'], unique=True)
31
+
32
+ # Create tasks table
33
+ op.create_table(
34
+ 'tasks',
35
+ sa.Column('id', sa.Integer(), nullable=False),
36
+ sa.Column('user_id', sa.Integer(), nullable=False),
37
+ sa.Column('title', sa.String(length=500), nullable=False),
38
+ sa.Column('description', sa.Text(), nullable=True),
39
+ sa.Column('completed', sa.Boolean(), nullable=False, server_default='false'),
40
+ sa.Column('created_at', sa.DateTime(), nullable=False),
41
+ sa.Column('updated_at', sa.DateTime(), nullable=False),
42
+ sa.PrimaryKeyConstraint('id'),
43
+ sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE')
44
+ )
45
+ op.create_index('idx_tasks_user_id', 'tasks', ['user_id'], unique=False)
46
+
47
+
48
+ def downgrade() -> None:
49
+ op.drop_index('idx_tasks_user_id', table_name='tasks')
50
+ op.drop_table('tasks')
51
+ op.drop_index('idx_users_email', table_name='users')
52
+ op.drop_table('users')
alembic/versions/002_add_password_reset_tokens.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Add password reset tokens table
2
+
3
+ Revision ID: 002
4
+ Revises: a6878af5b66f
5
+ Create Date: 2026-02-07
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = '002'
13
+ down_revision = 'a6878af5b66f'
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade() -> None:
19
+ # Create password_reset_tokens table
20
+ op.create_table(
21
+ 'password_reset_tokens',
22
+ sa.Column('id', sa.Integer(), nullable=False),
23
+ sa.Column('user_id', sa.Integer(), nullable=False),
24
+ sa.Column('token', sa.String(length=255), nullable=False),
25
+ sa.Column('expires_at', sa.DateTime(), nullable=False),
26
+ sa.Column('used', sa.Boolean(), nullable=False, server_default='false'),
27
+ sa.Column('created_at', sa.DateTime(), nullable=False),
28
+ sa.PrimaryKeyConstraint('id'),
29
+ sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE')
30
+ )
31
+ op.create_index('idx_password_reset_tokens_user_id', 'password_reset_tokens', ['user_id'], unique=False)
32
+ op.create_index('idx_password_reset_tokens_token', 'password_reset_tokens', ['token'], unique=True)
33
+ op.create_index('idx_password_reset_tokens_expires_at', 'password_reset_tokens', ['expires_at'], unique=False)
34
+
35
+
36
+ def downgrade() -> None:
37
+ op.drop_index('idx_password_reset_tokens_expires_at', table_name='password_reset_tokens')
38
+ op.drop_index('idx_password_reset_tokens_token', table_name='password_reset_tokens')
39
+ op.drop_index('idx_password_reset_tokens_user_id', table_name='password_reset_tokens')
40
+ op.drop_table('password_reset_tokens')
alembic/versions/003_ai_chatbot_tables.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Alembic migration: Add conversations and messages tables for AI chatbot.
3
+
4
+ Revision ID: 003_ai_chatbot_tables
5
+ Revises: 002_add_password_reset_tokens
6
+ Create Date: 2026-02-15
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers
13
+ revision = '003_ai_chatbot_tables'
14
+ down_revision = '002'
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade():
20
+ # Create conversations table
21
+ op.create_table(
22
+ 'conversations',
23
+ sa.Column('id', sa.Integer(), nullable=False),
24
+ sa.Column('user_id', sa.Integer(), nullable=False),
25
+ sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
26
+ sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
27
+ sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
28
+ sa.PrimaryKeyConstraint('id')
29
+ )
30
+ op.create_index('ix_conversations_user_id', 'conversations', ['user_id'])
31
+ op.create_index('ix_conversations_updated_at', 'conversations', ['updated_at'])
32
+
33
+ # Create messages table
34
+ op.create_table(
35
+ 'messages',
36
+ sa.Column('id', sa.Integer(), nullable=False),
37
+ sa.Column('conversation_id', sa.Integer(), nullable=False),
38
+ sa.Column('user_id', sa.Integer(), nullable=False),
39
+ sa.Column('role', sa.String(length=20), nullable=False),
40
+ sa.Column('content', sa.Text(), nullable=False),
41
+ sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
42
+ sa.CheckConstraint("role IN ('user', 'assistant')", name='chk_messages_role'),
43
+ sa.CheckConstraint("LENGTH(TRIM(content)) > 0", name='chk_messages_content_not_empty'),
44
+ sa.ForeignKeyConstraint(['conversation_id'], ['conversations.id'], ondelete='CASCADE'),
45
+ sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
46
+ sa.PrimaryKeyConstraint('id')
47
+ )
48
+ op.create_index('ix_messages_conversation_id', 'messages', ['conversation_id'])
49
+ op.create_index('ix_messages_user_id', 'messages', ['user_id'])
50
+ op.create_index('ix_messages_created_at', 'messages', ['created_at'])
51
+ op.create_index('ix_messages_conversation_created', 'messages', ['conversation_id', 'created_at'])
52
+
53
+
54
+ def downgrade():
55
+ op.drop_table('messages')
56
+ op.drop_table('conversations')
alembic/versions/a6878af5b66f_add_category_and_due_date_to_tasks.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add_category_and_due_date_to_tasks
2
+
3
+ Revision ID: a6878af5b66f
4
+ Revises: 001
5
+ Create Date: 2026-02-05 14:23:11.577860
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = 'a6878af5b66f'
14
+ down_revision = '001'
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # Add category column
21
+ op.add_column('tasks', sa.Column('category', sa.String(length=50), nullable=True))
22
+
23
+ # Add due_date column
24
+ op.add_column('tasks', sa.Column('due_date', sa.DateTime(), nullable=True))
25
+
26
+
27
+ def downgrade() -> None:
28
+ # Remove columns in reverse order
29
+ op.drop_column('tasks', 'due_date')
30
+ op.drop_column('tasks', 'category')
api/index.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Vercel Serverless Function for FastAPI
3
+ Vercel natively supports ASGI apps - just export the app directly
4
+ """
5
+ import sys
6
+ import os
7
+
8
+ # Add parent directory to path for imports
9
+ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
10
+
11
+ from src.main import app
12
+
13
+ # Vercel will automatically detect and handle the ASGI app
14
+ # No need for Mangum or any wrapper
15
+
16
+ # For local testing
17
+ if __name__ == "__main__":
18
+ import uvicorn
19
+ uvicorn.run(app, host="0.0.0.0", port=8000)
api/test.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Minimal test endpoint for Vercel deployment debugging
3
+ """
4
+ from fastapi import FastAPI
5
+ from fastapi.middleware.cors import CORSMiddleware
6
+ import os
7
+
8
+ app = FastAPI(title="Todo API - Minimal Test")
9
+
10
+ # CORS
11
+ app.add_middleware(
12
+ CORSMiddleware,
13
+ allow_origins=["*"],
14
+ allow_credentials=True,
15
+ allow_methods=["*"],
16
+ allow_headers=["*"],
17
+ )
18
+
19
+ @app.get("/")
20
+ async def root():
21
+ return {
22
+ "status": "ok",
23
+ "message": "Minimal FastAPI working on Vercel",
24
+ "environment": {
25
+ "VERCEL": os.getenv("VERCEL", "not set"),
26
+ "VERCEL_ENV": os.getenv("VERCEL_ENV", "not set"),
27
+ }
28
+ }
29
+
30
+ @app.get("/health")
31
+ async def health():
32
+ return {"status": "healthy"}
33
+
34
+ @app.get("/test-db")
35
+ async def test_db():
36
+ """Test database connection"""
37
+ try:
38
+ from src.database import engine
39
+ from sqlmodel import text
40
+
41
+ with engine.connect() as conn:
42
+ result = conn.execute(text("SELECT 1"))
43
+ return {"status": "ok", "database": "connected"}
44
+ except Exception as e:
45
+ return {"status": "error", "message": str(e)}
init_db.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Initialize database tables for the Todo application.
3
+ """
4
+ from src.database import create_db_and_tables
5
+
6
+ if __name__ == "__main__":
7
+ print("Creating database tables...")
8
+ create_db_and_tables()
9
+ print("Database tables created successfully!")
migrate_db.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Simple migration script to add category and due_date columns to tasks table.
3
+ """
4
+ import sqlite3
5
+
6
+ # Connect to database
7
+ conn = sqlite3.connect('todo.db')
8
+ cursor = conn.cursor()
9
+
10
+ try:
11
+ # Check if columns exist
12
+ cursor.execute("PRAGMA table_info(tasks)")
13
+ columns = [col[1] for col in cursor.fetchall()]
14
+
15
+ # Add category column if it doesn't exist
16
+ if 'category' not in columns:
17
+ cursor.execute("ALTER TABLE tasks ADD COLUMN category VARCHAR(50)")
18
+ print("Added 'category' column")
19
+ else:
20
+ print("'category' column already exists")
21
+
22
+ # Add due_date column if it doesn't exist
23
+ if 'due_date' not in columns:
24
+ cursor.execute("ALTER TABLE tasks ADD COLUMN due_date DATETIME")
25
+ print("Added 'due_date' column")
26
+ else:
27
+ print("'due_date' column already exists")
28
+
29
+ conn.commit()
30
+ print("\nDatabase migration completed successfully!")
31
+
32
+ except Exception as e:
33
+ print(f"Error: {e}")
34
+ conn.rollback()
35
+ finally:
36
+ conn.close()
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi==0.109.0
2
+ sqlmodel==0.0.14
3
+ python-jose[cryptography]==3.3.0
4
+ passlib[argon2]==1.7.4
5
+ python-multipart==0.0.6
6
+ uvicorn[standard]==0.27.0
7
+ pydantic>=2.6.0
8
+ pydantic-settings>=2.2.0
9
+ python-dotenv==1.0.0
10
+ email-validator==2.1.0
11
+ cohere>=5.0.0
12
+ tenacity>=8.2.3
src/__pycache__/database.cpython-314.pyc ADDED
Binary file (2.03 kB). View file
 
src/__pycache__/main.cpython-314.pyc ADDED
Binary file (3.7 kB). View file
 
src/agents/__init__.py ADDED
File without changes
src/agents/__pycache__/__init__.cpython-314.pyc ADDED
Binary file (209 Bytes). View file
 
src/agents/__pycache__/cohere_client.cpython-314.pyc ADDED
Binary file (8.52 kB). View file
 
src/agents/__pycache__/orchestrator.cpython-314.pyc ADDED
Binary file (6.65 kB). View file
 
src/agents/cohere_client.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Cohere client service for AI chatbot.
3
+
4
+ This module provides a wrapper around the Cohere API with:
5
+ - API key management
6
+ - Retry logic for transient failures
7
+ - Timeout handling
8
+ - Structured logging
9
+ - Token usage tracking
10
+ """
11
+
12
+ import os
13
+ import logging
14
+ import time
15
+ from typing import List, Dict, Any, Optional
16
+ from tenacity import (
17
+ retry,
18
+ stop_after_attempt,
19
+ wait_exponential,
20
+ retry_if_exception_type
21
+ )
22
+ import cohere
23
+ from cohere.errors import TooManyRequestsError, ServiceUnavailableError
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ class CohereClient:
29
+ """
30
+ Cohere API client with retry logic and structured logging.
31
+
32
+ This client is specifically configured for the AI chatbot use case
33
+ with deterministic temperature and tool-calling support.
34
+ """
35
+
36
+ def __init__(self):
37
+ """Initialize Cohere client with environment configuration."""
38
+ self.api_key = os.getenv("COHERE_API_KEY")
39
+ if not self.api_key:
40
+ raise ValueError("COHERE_API_KEY not found in environment variables")
41
+
42
+ self.model = os.getenv("COHERE_MODEL", "command-r-plus")
43
+ self.temperature = float(os.getenv("COHERE_TEMPERATURE", "0.3"))
44
+ self.max_tokens = int(os.getenv("COHERE_MAX_TOKENS", "2000"))
45
+ self.timeout = int(os.getenv("COHERE_TIMEOUT", "30"))
46
+
47
+ # Initialize Cohere client
48
+ self.client = cohere.ClientV2(self.api_key)
49
+ logger.info(f"Cohere client initialized with model: {self.model}")
50
+
51
+ @retry(
52
+ stop=stop_after_attempt(3),
53
+ wait=wait_exponential(multiplier=1, min=2, max=10),
54
+ retry=retry_if_exception_type((TooManyRequestsError, ServiceUnavailableError))
55
+ )
56
+ async def chat(
57
+ self,
58
+ messages: List[Dict[str, str]],
59
+ tools: Optional[List[Dict[str, Any]]] = None
60
+ ) -> Dict[str, Any]:
61
+ """
62
+ Send chat request to Cohere API with retry logic.
63
+
64
+ Args:
65
+ messages: List of message dictionaries with 'role' and 'content'
66
+ tools: Optional list of tool definitions for tool-calling
67
+
68
+ Returns:
69
+ Dictionary containing response and tool calls (if any)
70
+
71
+ Raises:
72
+ Exception: If API call fails after retries
73
+ """
74
+ start_time = time.time()
75
+
76
+ try:
77
+ logger.info(f"Sending chat request to Cohere (model: {self.model})")
78
+ logger.debug(f"Messages: {len(messages)}, Tools: {len(tools) if tools else 0}")
79
+
80
+ response = self.client.chat(
81
+ model=self.model,
82
+ messages=messages,
83
+ temperature=self.temperature,
84
+ max_tokens=self.max_tokens,
85
+ tools=tools if tools else None
86
+ )
87
+
88
+ latency = time.time() - start_time
89
+
90
+ # Debug: Print full response structure
91
+ logger.info(f"Cohere response received: {response}")
92
+ logger.info(f"Response dict: {response.__dict__ if hasattr(response, '__dict__') else 'No dict'}")
93
+
94
+ # Extract response content
95
+ response_text = ""
96
+ if hasattr(response, 'message') and hasattr(response.message, 'content') and response.message.content:
97
+ for item in response.message.content:
98
+ if hasattr(item, 'text'):
99
+ response_text = item.text
100
+ break
101
+
102
+ # Extract tool calls if present
103
+ tool_calls = []
104
+ if hasattr(response.message, 'tool_calls') and response.message.tool_calls:
105
+ import json
106
+ for tool_call in response.message.tool_calls:
107
+ try:
108
+ # Parse JSON string arguments into dictionary
109
+ arguments = json.loads(tool_call.function.arguments) if isinstance(tool_call.function.arguments, str) else tool_call.function.arguments
110
+ tool_calls.append({
111
+ "name": tool_call.function.name,
112
+ "parameters": arguments
113
+ })
114
+ except json.JSONDecodeError as e:
115
+ logger.error(f"Failed to parse tool call arguments: {e}")
116
+ continue
117
+
118
+ # Log metrics
119
+ logger.info(f"Cohere API call successful (latency: {latency:.2f}s)")
120
+ if hasattr(response, 'usage'):
121
+ logger.info(f"Token usage - Input: {response.usage.tokens.input_tokens}, "
122
+ f"Output: {response.usage.tokens.output_tokens}")
123
+
124
+ return {
125
+ "response": response_text,
126
+ "tool_calls": tool_calls,
127
+ "latency": latency
128
+ }
129
+
130
+ except TooManyRequestsError as e:
131
+ logger.warning(f"Rate limit hit: {str(e)}")
132
+ raise
133
+ except ServiceUnavailableError as e:
134
+ logger.error(f"Cohere service unavailable: {str(e)}")
135
+ raise
136
+ except Exception as e:
137
+ import traceback
138
+ logger.error(f"Cohere API call failed: {str(e)}")
139
+ logger.error(f"Traceback: {traceback.format_exc()}")
140
+ raise
141
+
142
+ def validate_tool_call(self, tool_call: Dict[str, Any]) -> bool:
143
+ """
144
+ Validate that a tool call has the required structure.
145
+
146
+ Args:
147
+ tool_call: Tool call dictionary to validate
148
+
149
+ Returns:
150
+ True if valid, False otherwise
151
+ """
152
+ if not isinstance(tool_call, dict):
153
+ return False
154
+
155
+ if "name" not in tool_call or "parameters" not in tool_call:
156
+ return False
157
+
158
+ if not isinstance(tool_call["name"], str):
159
+ return False
160
+
161
+ if not isinstance(tool_call["parameters"], dict):
162
+ return False
163
+
164
+ return True
165
+
166
+
167
+ # Global Cohere client instance
168
+ cohere_client = CohereClient()
src/agents/orchestrator.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Agent orchestrator for AI chatbot.
3
+
4
+ This module coordinates between Cohere API and MCP tools,
5
+ managing the conversation flow and tool execution.
6
+ """
7
+
8
+ import logging
9
+ from typing import List, Dict, Any, Optional
10
+ from src.agents.cohere_client import cohere_client
11
+ from src.mcp.server import mcp_server
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class AgentOrchestrator:
17
+ """
18
+ Orchestrates AI agent interactions with tool-calling support.
19
+
20
+ This orchestrator:
21
+ 1. Sends messages to Cohere API
22
+ 2. Receives tool call decisions
23
+ 3. Validates and executes tools via MCP server
24
+ 4. Returns results to continue conversation
25
+ """
26
+
27
+ def __init__(self):
28
+ self.cohere = cohere_client
29
+ self.mcp = mcp_server
30
+
31
+ async def run(
32
+ self,
33
+ messages: List[Dict[str, str]],
34
+ user_id: int,
35
+ db: Any = None
36
+ ) -> Dict[str, Any]:
37
+ """
38
+ Run the agent with conversation history.
39
+
40
+ Args:
41
+ messages: List of conversation messages
42
+ user_id: Authenticated user ID for tool execution
43
+ db: Database session for tool execution
44
+
45
+ Returns:
46
+ Dictionary with response and tool execution results
47
+ """
48
+ try:
49
+ # Get available tools from MCP server
50
+ tools = self.mcp.list_tools()
51
+
52
+ logger.info(f"Running agent with {len(messages)} messages and {len(tools)} tools")
53
+
54
+ # Call Cohere API
55
+ result = await self.cohere.chat(messages=messages, tools=tools)
56
+
57
+ response_text = result["response"]
58
+ tool_calls = result["tool_calls"]
59
+
60
+ # If no tool calls, return response directly
61
+ if not tool_calls:
62
+ logger.info("No tool calls in response")
63
+ return {
64
+ "response": response_text,
65
+ "tool_calls": [],
66
+ "tool_results": []
67
+ }
68
+
69
+ # Execute tool calls
70
+ tool_results = []
71
+ for tool_call in tool_calls:
72
+ if not self.cohere.validate_tool_call(tool_call):
73
+ logger.warning(f"Invalid tool call structure: {tool_call}")
74
+ continue
75
+
76
+ tool_name = tool_call["name"]
77
+ parameters = tool_call["parameters"]
78
+
79
+ # Inject user_id into parameters for security
80
+ parameters["user_id"] = user_id
81
+
82
+ try:
83
+ logger.info(f"Executing tool: {tool_name}")
84
+ tool_result = await self.mcp.execute_tool(tool_name, parameters, db=db)
85
+ tool_results.append({
86
+ "tool": tool_name,
87
+ "result": tool_result
88
+ })
89
+ except Exception as e:
90
+ logger.error(f"Tool execution failed: {tool_name} - {str(e)}")
91
+ tool_results.append({
92
+ "tool": tool_name,
93
+ "result": {
94
+ "success": False,
95
+ "message": f"Tool execution failed: {str(e)}"
96
+ }
97
+ })
98
+
99
+ # Generate final response incorporating tool results
100
+ final_response = await self._generate_final_response(
101
+ messages,
102
+ response_text,
103
+ tool_results
104
+ )
105
+
106
+ return {
107
+ "response": final_response,
108
+ "tool_calls": tool_calls,
109
+ "tool_results": tool_results
110
+ }
111
+
112
+ except Exception as e:
113
+ logger.error(f"Agent orchestration failed: {str(e)}")
114
+ raise
115
+
116
+ async def _generate_final_response(
117
+ self,
118
+ messages: List[Dict[str, str]],
119
+ initial_response: str,
120
+ tool_results: List[Dict[str, Any]]
121
+ ) -> str:
122
+ """
123
+ Generate final response incorporating tool execution results.
124
+
125
+ Args:
126
+ messages: Original conversation messages
127
+ initial_response: Initial AI response with tool calls
128
+ tool_results: Results from tool executions
129
+
130
+ Returns:
131
+ Final response text
132
+ """
133
+ # If no tool results, return initial response
134
+ if not tool_results:
135
+ return initial_response
136
+
137
+ # Build context with tool results
138
+ tool_context = "\n".join([
139
+ f"Tool {tr['tool']}: {tr['result'].get('message', 'Executed')}"
140
+ for tr in tool_results
141
+ ])
142
+
143
+ # Create follow-up message to generate natural response
144
+ # Only include non-empty messages to avoid Cohere API v2 validation errors
145
+ follow_up_messages = [msg for msg in messages if msg.get('content', '').strip()]
146
+
147
+ # Add tool results as user message for context
148
+ follow_up_messages.append({
149
+ "role": "user",
150
+ "content": f"Tool execution results:\n{tool_context}\n\nProvide a natural language response to the user based on these results."
151
+ })
152
+
153
+ try:
154
+ result = await self.cohere.chat(messages=follow_up_messages, tools=None)
155
+ return result["response"]
156
+ except Exception as e:
157
+ logger.error(f"Failed to generate final response: {str(e)}")
158
+ # Fallback to tool results summary
159
+ return f"Operation completed. {tool_context}"
160
+
161
+
162
+ # Global orchestrator instance
163
+ orchestrator = AgentOrchestrator()
src/api/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # API module
src/api/__pycache__/__init__.cpython-314.pyc ADDED
Binary file (206 Bytes). View file
 
src/api/__pycache__/auth.cpython-314.pyc ADDED
Binary file (6.72 kB). View file
 
src/api/__pycache__/chat.cpython-314.pyc ADDED
Binary file (9 kB). View file
 
src/api/__pycache__/password_reset.cpython-314.pyc ADDED
Binary file (8.97 kB). View file
 
src/api/__pycache__/subtasks.cpython-314.pyc ADDED
Binary file (9.82 kB). View file
 
src/api/__pycache__/tasks.cpython-314.pyc ADDED
Binary file (13.3 kB). View file
 
src/api/ai.py ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ AI-powered task management endpoints using Cohere.
3
+
4
+ This module provides REST API endpoints for AI features:
5
+ - Task suggestions
6
+ - Smart auto-completion
7
+ - Task categorization
8
+ - Description enhancement
9
+ - Complexity analysis
10
+ """
11
+
12
+ from fastapi import APIRouter, HTTPException, Depends
13
+ from pydantic import BaseModel, Field
14
+ from typing import List, Dict, Optional
15
+ from src.services.cohere_ai import cohere_service
16
+ from src.middleware.jwt_auth import get_current_user
17
+
18
+ router = APIRouter()
19
+
20
+
21
+ # Request/Response Models
22
+ class TaskSuggestionRequest(BaseModel):
23
+ context: str = Field(..., description="Context to generate suggestions from")
24
+ count: int = Field(default=5, ge=1, le=10, description="Number of suggestions")
25
+
26
+
27
+ class TaskSuggestionResponse(BaseModel):
28
+ suggestions: List[str]
29
+
30
+
31
+ class EnhanceDescriptionRequest(BaseModel):
32
+ title: str = Field(..., description="Task title")
33
+ description: str = Field(default="", description="Current description")
34
+
35
+
36
+ class EnhanceDescriptionResponse(BaseModel):
37
+ enhanced_description: str
38
+
39
+
40
+ class CategorizeTaskRequest(BaseModel):
41
+ title: str = Field(..., description="Task title")
42
+ description: str = Field(default="", description="Task description")
43
+
44
+
45
+ class CategorizeTaskResponse(BaseModel):
46
+ category: str
47
+ priority: str
48
+ tags: List[str]
49
+
50
+
51
+ class AutoCompleteRequest(BaseModel):
52
+ partial_title: str = Field(..., description="Partial task title")
53
+
54
+
55
+ class AutoCompleteResponse(BaseModel):
56
+ completions: List[str]
57
+
58
+
59
+ class AnalyzeComplexityRequest(BaseModel):
60
+ title: str = Field(..., description="Task title")
61
+ description: str = Field(default="", description="Task description")
62
+
63
+
64
+ class AnalyzeComplexityResponse(BaseModel):
65
+ complexity: str
66
+ estimated_time: str
67
+ needs_subtasks: bool
68
+
69
+
70
+ # Endpoints
71
+ @router.post("/suggestions", response_model=TaskSuggestionResponse)
72
+ async def generate_task_suggestions(
73
+ request: TaskSuggestionRequest,
74
+ current_user: dict = Depends(get_current_user)
75
+ ):
76
+ """
77
+ Generate AI-powered task suggestions based on context.
78
+
79
+ Requires authentication.
80
+ """
81
+ try:
82
+ suggestions = cohere_service.generate_task_suggestions(
83
+ context=request.context,
84
+ count=request.count
85
+ )
86
+
87
+ if not suggestions:
88
+ raise HTTPException(
89
+ status_code=500,
90
+ detail="Failed to generate suggestions. Please try again."
91
+ )
92
+
93
+ return TaskSuggestionResponse(suggestions=suggestions)
94
+
95
+ except Exception as e:
96
+ raise HTTPException(
97
+ status_code=500,
98
+ detail=f"Error generating suggestions: {str(e)}"
99
+ )
100
+
101
+
102
+ @router.post("/enhance-description", response_model=EnhanceDescriptionResponse)
103
+ async def enhance_task_description(
104
+ request: EnhanceDescriptionRequest,
105
+ current_user: dict = Depends(get_current_user)
106
+ ):
107
+ """
108
+ Enhance a task description with AI to make it more clear and actionable.
109
+
110
+ Requires authentication.
111
+ """
112
+ try:
113
+ enhanced = cohere_service.enhance_task_description(
114
+ title=request.title,
115
+ description=request.description
116
+ )
117
+
118
+ return EnhanceDescriptionResponse(enhanced_description=enhanced)
119
+
120
+ except Exception as e:
121
+ raise HTTPException(
122
+ status_code=500,
123
+ detail=f"Error enhancing description: {str(e)}"
124
+ )
125
+
126
+
127
+ @router.post("/categorize", response_model=CategorizeTaskResponse)
128
+ async def categorize_task(
129
+ request: CategorizeTaskRequest,
130
+ current_user: dict = Depends(get_current_user)
131
+ ):
132
+ """
133
+ Categorize a task and suggest priority level using AI.
134
+
135
+ Requires authentication.
136
+ """
137
+ try:
138
+ result = cohere_service.categorize_task(
139
+ title=request.title,
140
+ description=request.description
141
+ )
142
+
143
+ return CategorizeTaskResponse(**result)
144
+
145
+ except Exception as e:
146
+ raise HTTPException(
147
+ status_code=500,
148
+ detail=f"Error categorizing task: {str(e)}"
149
+ )
150
+
151
+
152
+ @router.post("/autocomplete", response_model=AutoCompleteResponse)
153
+ async def autocomplete_task(
154
+ request: AutoCompleteRequest,
155
+ current_user: dict = Depends(get_current_user)
156
+ ):
157
+ """
158
+ Provide smart auto-completion suggestions for task titles.
159
+
160
+ Requires authentication.
161
+ """
162
+ try:
163
+ completions = cohere_service.smart_complete_task(
164
+ partial_title=request.partial_title
165
+ )
166
+
167
+ return AutoCompleteResponse(completions=completions)
168
+
169
+ except Exception as e:
170
+ raise HTTPException(
171
+ status_code=500,
172
+ detail=f"Error generating completions: {str(e)}"
173
+ )
174
+
175
+
176
+ @router.post("/analyze-complexity", response_model=AnalyzeComplexityResponse)
177
+ async def analyze_task_complexity(
178
+ request: AnalyzeComplexityRequest,
179
+ current_user: dict = Depends(get_current_user)
180
+ ):
181
+ """
182
+ Analyze task complexity and provide time estimates using AI.
183
+
184
+ Requires authentication.
185
+ """
186
+ try:
187
+ result = cohere_service.analyze_task_complexity(
188
+ title=request.title,
189
+ description=request.description
190
+ )
191
+
192
+ return AnalyzeComplexityResponse(**result)
193
+
194
+ except Exception as e:
195
+ raise HTTPException(
196
+ status_code=500,
197
+ detail=f"Error analyzing complexity: {str(e)}"
198
+ )
199
+
200
+
201
+ @router.get("/health")
202
+ async def ai_health_check():
203
+ """
204
+ Check if AI service is properly configured.
205
+
206
+ Does not require authentication.
207
+ """
208
+ try:
209
+ import os
210
+ api_key = os.getenv("COHERE_API_KEY")
211
+
212
+ if not api_key:
213
+ return {
214
+ "status": "error",
215
+ "message": "COHERE_API_KEY not configured"
216
+ }
217
+
218
+ return {
219
+ "status": "healthy",
220
+ "message": "AI service is configured and ready",
221
+ "provider": "Cohere"
222
+ }
223
+
224
+ except Exception as e:
225
+ return {
226
+ "status": "error",
227
+ "message": str(e)
228
+ }
src/api/auth.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Authentication API endpoints for user signup and signin.
3
+
4
+ This module provides:
5
+ - POST /api/auth/signup - Create new user account
6
+ - POST /api/auth/signin - Authenticate existing user
7
+ """
8
+
9
+ from fastapi import APIRouter, HTTPException, Depends
10
+ from sqlmodel import Session, select
11
+ from pydantic import BaseModel, EmailStr, Field
12
+
13
+ from ..models.user import User
14
+ from ..services.auth import hash_password, verify_password, create_access_token
15
+ from ..database import get_session
16
+
17
+ router = APIRouter()
18
+
19
+
20
+ # Request/Response Models
21
+ class SignUpRequest(BaseModel):
22
+ """Request model for user signup."""
23
+ email: EmailStr = Field(..., description="User email address")
24
+ password: str = Field(..., min_length=8, description="User password (minimum 8 characters)")
25
+
26
+
27
+ class SignInRequest(BaseModel):
28
+ """Request model for user signin."""
29
+ email: EmailStr = Field(..., description="User email address")
30
+ password: str = Field(..., description="User password")
31
+
32
+
33
+ class UserResponse(BaseModel):
34
+ """User data response model."""
35
+ id: int
36
+ email: str
37
+ created_at: str
38
+ updated_at: str
39
+
40
+
41
+ class AuthResponse(BaseModel):
42
+ """Authentication response with token and user data."""
43
+ token: str
44
+ user: UserResponse
45
+
46
+
47
+ @router.post("/signup", response_model=AuthResponse, status_code=201)
48
+ async def signup(
49
+ request: SignUpRequest,
50
+ session: Session = Depends(get_session)
51
+ ) -> AuthResponse:
52
+ """
53
+ Create a new user account.
54
+
55
+ Args:
56
+ request: Signup request with email and password
57
+ session: Database session
58
+
59
+ Returns:
60
+ AuthResponse with JWT token and user data
61
+
62
+ Raises:
63
+ HTTPException 400: If email already exists
64
+ HTTPException 422: If validation fails
65
+ """
66
+ # Check if email already exists
67
+ statement = select(User).where(User.email == request.email)
68
+ existing_user = session.exec(statement).first()
69
+
70
+ if existing_user:
71
+ raise HTTPException(
72
+ status_code=400,
73
+ detail="Email already registered"
74
+ )
75
+
76
+ # Hash password
77
+ hashed_password = hash_password(request.password)
78
+
79
+ # Create new user
80
+ new_user = User(
81
+ email=request.email,
82
+ hashed_password=hashed_password
83
+ )
84
+
85
+ session.add(new_user)
86
+ session.commit()
87
+ session.refresh(new_user)
88
+
89
+ # Create JWT token
90
+ token = create_access_token(
91
+ data={
92
+ "user_id": new_user.id,
93
+ "email": new_user.email
94
+ }
95
+ )
96
+
97
+ # Return response
98
+ return AuthResponse(
99
+ token=token,
100
+ user=UserResponse(
101
+ id=new_user.id,
102
+ email=new_user.email,
103
+ created_at=new_user.created_at.isoformat(),
104
+ updated_at=new_user.updated_at.isoformat()
105
+ )
106
+ )
107
+
108
+
109
+ @router.post("/signin", response_model=AuthResponse)
110
+ async def signin(
111
+ request: SignInRequest,
112
+ session: Session = Depends(get_session)
113
+ ) -> AuthResponse:
114
+ """
115
+ Authenticate an existing user.
116
+
117
+ Args:
118
+ request: Signin request with email and password
119
+ session: Database session
120
+
121
+ Returns:
122
+ AuthResponse with JWT token and user data
123
+
124
+ Raises:
125
+ HTTPException 401: If credentials are invalid
126
+ """
127
+ # Find user by email
128
+ statement = select(User).where(User.email == request.email)
129
+ user = session.exec(statement).first()
130
+
131
+ # Verify user exists and password is correct
132
+ if not user or not verify_password(request.password, user.hashed_password):
133
+ raise HTTPException(
134
+ status_code=401,
135
+ detail="Invalid email or password"
136
+ )
137
+
138
+ # Create JWT token
139
+ token = create_access_token(
140
+ data={
141
+ "user_id": user.id,
142
+ "email": user.email
143
+ }
144
+ )
145
+
146
+ # Return response
147
+ return AuthResponse(
148
+ token=token,
149
+ user=UserResponse(
150
+ id=user.id,
151
+ email=user.email,
152
+ created_at=user.created_at.isoformat(),
153
+ updated_at=user.updated_at.isoformat()
154
+ )
155
+ )
src/api/chat.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Chat endpoint for AI-powered conversational task management.
3
+
4
+ This module provides the REST API endpoint for the AI chatbot,
5
+ implementing stateless conversation management with MCP tool execution.
6
+ """
7
+
8
+ from fastapi import APIRouter, HTTPException, Depends
9
+ from pydantic import BaseModel, Field
10
+ from typing import List, Dict, Any, Optional
11
+ from sqlmodel import Session
12
+ import logging
13
+
14
+ from src.database import get_session
15
+ from src.middleware.jwt_auth import get_current_user
16
+ from src.services.conversation_service import conversation_service
17
+ from src.agents.orchestrator import orchestrator
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+ router = APIRouter()
22
+
23
+
24
+ # Request/Response Models
25
+
26
+ class ChatRequest(BaseModel):
27
+ """Request model for chat endpoint."""
28
+ message: str = Field(
29
+ ...,
30
+ min_length=1,
31
+ max_length=10000,
32
+ description="User's message to the AI chatbot"
33
+ )
34
+
35
+
36
+ class ChatResponse(BaseModel):
37
+ """Response model for chat endpoint."""
38
+ conversation_id: int = Field(description="ID of the conversation")
39
+ message_id: int = Field(description="ID of the assistant's message")
40
+ response: str = Field(description="AI assistant's response")
41
+ timestamp: str = Field(description="ISO 8601 timestamp of the response")
42
+
43
+
44
+ class ConversationHistoryResponse(BaseModel):
45
+ """Response model for conversation history."""
46
+ conversation_id: int
47
+ messages: List[Dict[str, Any]]
48
+ total_count: int
49
+ has_more: bool = False
50
+
51
+
52
+ # Endpoints
53
+
54
+ @router.post("/chat", response_model=ChatResponse)
55
+ async def chat(
56
+ request: ChatRequest,
57
+ current_user: dict = Depends(get_current_user),
58
+ db: Session = Depends(get_session)
59
+ ):
60
+ """
61
+ Send a message to the AI chatbot.
62
+
63
+ The chatbot will:
64
+ - Understand user intent (add task, list tasks, complete task, etc.)
65
+ - Execute appropriate MCP tool operations
66
+ - Return conversational response with operation results
67
+
68
+ All conversation history is automatically persisted and loaded for context.
69
+
70
+ Requires authentication.
71
+ """
72
+ try:
73
+ user_id = current_user["user_id"]
74
+ logger.info(f"Chat request from user {user_id}: {request.message[:50]}...")
75
+
76
+ # 1. Get or create conversation
77
+ conversation = await conversation_service.get_or_create_conversation(db, user_id)
78
+
79
+ # 2. Store user message
80
+ user_message = await conversation_service.store_message(
81
+ db=db,
82
+ conversation_id=conversation.id,
83
+ user_id=user_id,
84
+ role="user",
85
+ content=request.message
86
+ )
87
+
88
+ # 3. Load conversation history
89
+ history = await conversation_service.load_conversation_history(
90
+ db=db,
91
+ conversation_id=conversation.id,
92
+ limit=50
93
+ )
94
+
95
+ # 4. Build message array for AI
96
+ messages = conversation_service.build_message_array(history)
97
+
98
+ # 5. Run agent orchestrator
99
+ result = await orchestrator.run(messages=messages, user_id=user_id, db=db)
100
+
101
+ # 6. Store assistant response
102
+ assistant_message = await conversation_service.store_message(
103
+ db=db,
104
+ conversation_id=conversation.id,
105
+ user_id=user_id,
106
+ role="assistant",
107
+ content=result["response"]
108
+ )
109
+
110
+ # 7. Return structured response
111
+ return ChatResponse(
112
+ conversation_id=conversation.id,
113
+ message_id=assistant_message.id,
114
+ response=result["response"],
115
+ timestamp=assistant_message.created_at.isoformat()
116
+ )
117
+
118
+ except ValueError as e:
119
+ logger.error(f"Validation error in chat endpoint: {str(e)}")
120
+ raise HTTPException(status_code=400, detail=str(e))
121
+ except Exception as e:
122
+ logger.error(f"Error in chat endpoint: {str(e)}")
123
+ raise HTTPException(
124
+ status_code=500,
125
+ detail="An error occurred while processing your message. Please try again."
126
+ )
127
+
128
+
129
+ @router.get("/chat/history", response_model=ConversationHistoryResponse)
130
+ async def get_chat_history(
131
+ limit: int = 50,
132
+ offset: int = 0,
133
+ current_user: dict = Depends(get_current_user),
134
+ db: Session = Depends(get_session)
135
+ ):
136
+ """
137
+ Retrieve conversation history for the authenticated user.
138
+
139
+ Returns messages in chronological order.
140
+
141
+ Requires authentication.
142
+ """
143
+ try:
144
+ user_id = current_user["user_id"]
145
+
146
+ # Get user's conversation
147
+ conversation = await conversation_service.get_or_create_conversation(db, user_id)
148
+
149
+ # Load messages
150
+ messages = await conversation_service.load_conversation_history(
151
+ db=db,
152
+ conversation_id=conversation.id,
153
+ limit=limit
154
+ )
155
+
156
+ # Format messages
157
+ formatted_messages = [
158
+ {
159
+ "id": msg.id,
160
+ "role": msg.role,
161
+ "content": msg.content,
162
+ "timestamp": msg.created_at.isoformat()
163
+ }
164
+ for msg in messages
165
+ ]
166
+
167
+ return ConversationHistoryResponse(
168
+ conversation_id=conversation.id,
169
+ messages=formatted_messages,
170
+ total_count=len(formatted_messages),
171
+ has_more=len(formatted_messages) >= limit
172
+ )
173
+
174
+ except Exception as e:
175
+ logger.error(f"Error retrieving chat history: {str(e)}")
176
+ raise HTTPException(
177
+ status_code=500,
178
+ detail="An error occurred while retrieving chat history."
179
+ )
180
+
181
+
182
+ @router.get("/chat/health")
183
+ async def chat_health_check():
184
+ """
185
+ Check if chat service is properly configured.
186
+
187
+ Does not require authentication.
188
+ """
189
+ try:
190
+ import os
191
+ cohere_key = os.getenv("COHERE_API_KEY")
192
+
193
+ if not cohere_key:
194
+ return {
195
+ "status": "error",
196
+ "message": "COHERE_API_KEY not configured"
197
+ }
198
+
199
+ return {
200
+ "status": "healthy",
201
+ "message": "Chat service is configured and ready",
202
+ "provider": "Cohere",
203
+ "architecture": "Stateless with MCP tools"
204
+ }
205
+
206
+ except Exception as e:
207
+ return {
208
+ "status": "error",
209
+ "message": str(e)
210
+ }
src/api/password_reset.py ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Password Reset API endpoints for secure password recovery.
3
+
4
+ This module provides:
5
+ - POST /api/auth/forgot-password - Request password reset email
6
+ - GET /api/auth/reset-password/{token} - Verify reset token validity
7
+ - POST /api/auth/reset-password - Reset password with token
8
+ """
9
+
10
+ from fastapi import APIRouter, HTTPException, Depends
11
+ from sqlmodel import Session
12
+ from pydantic import BaseModel, EmailStr, Field
13
+ from typing import Optional
14
+
15
+ from ..models.user import User
16
+ from ..services.auth import hash_password
17
+ from ..services.password_reset import (
18
+ create_reset_token,
19
+ validate_reset_token,
20
+ invalidate_token,
21
+ check_rate_limit,
22
+ validate_password_strength,
23
+ get_user_by_email
24
+ )
25
+ from ..services.email import send_password_reset_email
26
+ from ..database import get_session
27
+
28
+ router = APIRouter()
29
+
30
+
31
+ # Request/Response Models
32
+ class ForgotPasswordRequest(BaseModel):
33
+ """Request model for forgot password."""
34
+ email: EmailStr = Field(..., description="User email address")
35
+
36
+
37
+ class ForgotPasswordResponse(BaseModel):
38
+ """Response model for forgot password request."""
39
+ message: str
40
+
41
+
42
+ class TokenValidationResponse(BaseModel):
43
+ """Response model for token validation."""
44
+ valid: bool
45
+ email: Optional[str] = None
46
+ error: Optional[str] = None
47
+
48
+
49
+ class ResetPasswordRequest(BaseModel):
50
+ """Request model for password reset."""
51
+ token: str = Field(..., description="Password reset token")
52
+ new_password: str = Field(..., min_length=8, description="New password (minimum 8 characters)")
53
+
54
+
55
+ class ResetPasswordResponse(BaseModel):
56
+ """Response model for password reset."""
57
+ message: str
58
+
59
+
60
+ @router.post("/forgot-password", response_model=ForgotPasswordResponse)
61
+ async def forgot_password(
62
+ request: ForgotPasswordRequest,
63
+ session: Session = Depends(get_session)
64
+ ) -> ForgotPasswordResponse:
65
+ """
66
+ Request a password reset email.
67
+
68
+ Security features:
69
+ - No user enumeration (same response for existing/non-existing emails)
70
+ - Rate limiting (3 requests per hour per user)
71
+ - Cryptographically secure tokens
72
+ - 15-minute token expiry
73
+
74
+ Args:
75
+ request: Forgot password request with email
76
+ session: Database session
77
+
78
+ Returns:
79
+ Generic success message (no user enumeration)
80
+
81
+ Raises:
82
+ HTTPException 400: If email format is invalid
83
+ HTTPException 429: If rate limit exceeded
84
+ """
85
+ # Find user by email
86
+ user = get_user_by_email(session, request.email)
87
+
88
+ # Always return same message to prevent user enumeration
89
+ generic_message = "If an account exists with this email, you will receive a password reset link shortly."
90
+
91
+ # If user doesn't exist, return generic message (no enumeration)
92
+ if not user:
93
+ return ForgotPasswordResponse(message=generic_message)
94
+
95
+ # Check rate limit
96
+ if not check_rate_limit(session, user.id):
97
+ raise HTTPException(
98
+ status_code=429,
99
+ detail="Too many password reset requests. Please try again later."
100
+ )
101
+
102
+ # Create reset token
103
+ token = create_reset_token(session, user.id)
104
+
105
+ # Send reset email
106
+ email_sent = send_password_reset_email(user.email, token)
107
+
108
+ if not email_sent:
109
+ # Log error but don't expose to user
110
+ print(f"Failed to send password reset email to {user.email}")
111
+
112
+ # Always return generic message
113
+ return ForgotPasswordResponse(message=generic_message)
114
+
115
+
116
+ @router.get("/reset-password/{token}", response_model=TokenValidationResponse)
117
+ async def verify_reset_token(
118
+ token: str,
119
+ session: Session = Depends(get_session)
120
+ ) -> TokenValidationResponse:
121
+ """
122
+ Verify if a password reset token is valid.
123
+
124
+ Checks:
125
+ - Token exists
126
+ - Token has not expired (15 minutes)
127
+ - Token has not been used
128
+
129
+ Args:
130
+ token: Password reset token to verify
131
+ session: Database session
132
+
133
+ Returns:
134
+ TokenValidationResponse with validity status and user email
135
+
136
+ Example:
137
+ GET /api/auth/reset-password/abc123def456
138
+ """
139
+ # Validate token
140
+ token_record = validate_reset_token(session, token)
141
+
142
+ if not token_record:
143
+ return TokenValidationResponse(
144
+ valid=False,
145
+ error="Invalid or expired reset token"
146
+ )
147
+
148
+ # Get user email
149
+ user = session.get(User, token_record.user_id)
150
+
151
+ if not user:
152
+ return TokenValidationResponse(
153
+ valid=False,
154
+ error="User not found"
155
+ )
156
+
157
+ return TokenValidationResponse(
158
+ valid=True,
159
+ email=user.email
160
+ )
161
+
162
+
163
+ @router.post("/reset-password", response_model=ResetPasswordResponse)
164
+ async def reset_password(
165
+ request: ResetPasswordRequest,
166
+ session: Session = Depends(get_session)
167
+ ) -> ResetPasswordResponse:
168
+ """
169
+ Reset user password with a valid token.
170
+
171
+ Security features:
172
+ - Token validation (expiry, usage)
173
+ - Password strength validation
174
+ - One-time use tokens
175
+ - Automatic token invalidation
176
+
177
+ Args:
178
+ request: Reset password request with token and new password
179
+ session: Database session
180
+
181
+ Returns:
182
+ Success message
183
+
184
+ Raises:
185
+ HTTPException 400: If token is invalid or password is weak
186
+ HTTPException 422: If validation fails
187
+ """
188
+ # Validate token
189
+ token_record = validate_reset_token(session, request.token)
190
+
191
+ if not token_record:
192
+ raise HTTPException(
193
+ status_code=400,
194
+ detail="Invalid or expired reset token"
195
+ )
196
+
197
+ # Validate password strength
198
+ password_validation = validate_password_strength(request.new_password)
199
+
200
+ if not password_validation["valid"]:
201
+ raise HTTPException(
202
+ status_code=400,
203
+ detail={
204
+ "message": "Password does not meet strength requirements",
205
+ "errors": password_validation["errors"]
206
+ }
207
+ )
208
+
209
+ # Get user
210
+ user = session.get(User, token_record.user_id)
211
+
212
+ if not user:
213
+ raise HTTPException(
214
+ status_code=400,
215
+ detail="User not found"
216
+ )
217
+
218
+ # Hash new password
219
+ hashed_password = hash_password(request.new_password)
220
+
221
+ # Update user password
222
+ user.hashed_password = hashed_password
223
+ session.add(user)
224
+
225
+ # Invalidate token (mark as used)
226
+ invalidate_token(session, request.token)
227
+
228
+ # Commit changes
229
+ session.commit()
230
+
231
+ return ResetPasswordResponse(
232
+ message="Password successfully reset. You can now sign in with your new password."
233
+ )
src/api/subtasks.py ADDED
@@ -0,0 +1,230 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Subtasks API endpoints for CRUD operations on subtasks.
3
+
4
+ This module provides:
5
+ - GET /api/tasks/{task_id}/subtasks - List all subtasks for a task
6
+ - POST /api/tasks/{task_id}/subtasks - Create new subtask
7
+ - PUT /api/subtasks/{subtask_id} - Update existing subtask
8
+ - DELETE /api/subtasks/{subtask_id} - Delete subtask
9
+
10
+ All endpoints require JWT authentication and enforce user isolation.
11
+ """
12
+
13
+ from fastapi import APIRouter, HTTPException, Depends, status
14
+ from sqlmodel import Session
15
+ from pydantic import BaseModel, Field
16
+ from typing import Optional, List
17
+
18
+ from ..models.subtask import Subtask
19
+ from ..services import subtasks as subtask_service
20
+ from ..middleware.jwt_auth import get_current_user_id
21
+ from ..database import get_session
22
+
23
+ router = APIRouter()
24
+
25
+
26
+ # Request/Response Models
27
+ class CreateSubtaskRequest(BaseModel):
28
+ """Request model for creating a subtask."""
29
+ title: str = Field(..., min_length=1, max_length=500, description="Subtask title")
30
+ order: Optional[int] = Field(0, description="Order position")
31
+
32
+
33
+ class UpdateSubtaskRequest(BaseModel):
34
+ """Request model for updating a subtask."""
35
+ title: Optional[str] = Field(None, min_length=1, max_length=500, description="Subtask title")
36
+ completed: Optional[bool] = Field(None, description="Subtask completion status")
37
+ order: Optional[int] = Field(None, description="Order position")
38
+
39
+
40
+ class SubtaskResponse(BaseModel):
41
+ """Subtask data response model."""
42
+ id: int
43
+ task_id: int
44
+ title: str
45
+ completed: bool
46
+ order: int
47
+ created_at: str
48
+ updated_at: str
49
+
50
+
51
+ class SubtaskListResponse(BaseModel):
52
+ """Response model for subtask list."""
53
+ subtasks: List[SubtaskResponse]
54
+
55
+
56
+ @router.get("/tasks/{task_id}/subtasks", response_model=SubtaskListResponse)
57
+ async def list_subtasks(
58
+ task_id: int,
59
+ user_id: int = Depends(get_current_user_id),
60
+ session: Session = Depends(get_session)
61
+ ) -> SubtaskListResponse:
62
+ """
63
+ Get all subtasks for a task.
64
+
65
+ Args:
66
+ task_id: Task ID
67
+ user_id: Current user ID from JWT token
68
+ session: Database session
69
+
70
+ Returns:
71
+ SubtaskListResponse with array of subtasks
72
+
73
+ Raises:
74
+ HTTPException 401: If JWT token is invalid
75
+ HTTPException 404: If task not found or doesn't belong to user
76
+ """
77
+ # Get subtasks
78
+ subtasks = subtask_service.get_task_subtasks(session, task_id, user_id)
79
+
80
+ # Convert to response format
81
+ subtask_responses = [
82
+ SubtaskResponse(
83
+ id=subtask.id,
84
+ task_id=subtask.task_id,
85
+ title=subtask.title,
86
+ completed=subtask.completed,
87
+ order=subtask.order,
88
+ created_at=subtask.created_at.isoformat(),
89
+ updated_at=subtask.updated_at.isoformat()
90
+ )
91
+ for subtask in subtasks
92
+ ]
93
+
94
+ return SubtaskListResponse(subtasks=subtask_responses)
95
+
96
+
97
+ @router.post("/tasks/{task_id}/subtasks", response_model=SubtaskResponse, status_code=status.HTTP_201_CREATED)
98
+ async def create_subtask(
99
+ task_id: int,
100
+ request: CreateSubtaskRequest,
101
+ user_id: int = Depends(get_current_user_id),
102
+ session: Session = Depends(get_session)
103
+ ) -> SubtaskResponse:
104
+ """
105
+ Create a new subtask for a task.
106
+
107
+ Args:
108
+ task_id: Task ID
109
+ request: Subtask creation request
110
+ user_id: Current user ID from JWT token
111
+ session: Database session
112
+
113
+ Returns:
114
+ SubtaskResponse with created subtask data
115
+
116
+ Raises:
117
+ HTTPException 401: If JWT token is invalid
118
+ HTTPException 404: If task not found or doesn't belong to user
119
+ """
120
+ # Create subtask
121
+ subtask = subtask_service.create_subtask(
122
+ session=session,
123
+ task_id=task_id,
124
+ user_id=user_id,
125
+ title=request.title,
126
+ order=request.order or 0
127
+ )
128
+
129
+ if not subtask:
130
+ raise HTTPException(
131
+ status_code=status.HTTP_404_NOT_FOUND,
132
+ detail="Task not found"
133
+ )
134
+
135
+ # Return response
136
+ return SubtaskResponse(
137
+ id=subtask.id,
138
+ task_id=subtask.task_id,
139
+ title=subtask.title,
140
+ completed=subtask.completed,
141
+ order=subtask.order,
142
+ created_at=subtask.created_at.isoformat(),
143
+ updated_at=subtask.updated_at.isoformat()
144
+ )
145
+
146
+
147
+ @router.put("/subtasks/{subtask_id}", response_model=SubtaskResponse)
148
+ async def update_subtask(
149
+ subtask_id: int,
150
+ request: UpdateSubtaskRequest,
151
+ user_id: int = Depends(get_current_user_id),
152
+ session: Session = Depends(get_session)
153
+ ) -> SubtaskResponse:
154
+ """
155
+ Update an existing subtask.
156
+
157
+ Args:
158
+ subtask_id: ID of the subtask to update
159
+ request: Subtask update request
160
+ user_id: Current user ID from JWT token
161
+ session: Database session
162
+
163
+ Returns:
164
+ SubtaskResponse with updated subtask data
165
+
166
+ Raises:
167
+ HTTPException 401: If JWT token is invalid
168
+ HTTPException 404: If subtask not found or doesn't belong to user
169
+ """
170
+ # Update subtask
171
+ subtask = subtask_service.update_subtask(
172
+ session=session,
173
+ subtask_id=subtask_id,
174
+ user_id=user_id,
175
+ title=request.title,
176
+ completed=request.completed,
177
+ order=request.order
178
+ )
179
+
180
+ if not subtask:
181
+ raise HTTPException(
182
+ status_code=status.HTTP_404_NOT_FOUND,
183
+ detail="Subtask not found"
184
+ )
185
+
186
+ # Return response
187
+ return SubtaskResponse(
188
+ id=subtask.id,
189
+ task_id=subtask.task_id,
190
+ title=subtask.title,
191
+ completed=subtask.completed,
192
+ order=subtask.order,
193
+ created_at=subtask.created_at.isoformat(),
194
+ updated_at=subtask.updated_at.isoformat()
195
+ )
196
+
197
+
198
+ @router.delete("/subtasks/{subtask_id}", status_code=status.HTTP_204_NO_CONTENT)
199
+ async def delete_subtask(
200
+ subtask_id: int,
201
+ user_id: int = Depends(get_current_user_id),
202
+ session: Session = Depends(get_session)
203
+ ) -> None:
204
+ """
205
+ Delete a subtask.
206
+
207
+ Args:
208
+ subtask_id: ID of the subtask to delete
209
+ user_id: Current user ID from JWT token
210
+ session: Database session
211
+
212
+ Returns:
213
+ None (204 No Content)
214
+
215
+ Raises:
216
+ HTTPException 401: If JWT token is invalid
217
+ HTTPException 404: If subtask not found or doesn't belong to user
218
+ """
219
+ # Delete subtask
220
+ success = subtask_service.delete_subtask(
221
+ session=session,
222
+ subtask_id=subtask_id,
223
+ user_id=user_id
224
+ )
225
+
226
+ if not success:
227
+ raise HTTPException(
228
+ status_code=status.HTTP_404_NOT_FOUND,
229
+ detail="Subtask not found"
230
+ )
src/api/tasks.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tasks API endpoints for CRUD operations on tasks.
3
+
4
+ This module provides:
5
+ - GET /api/tasks - List all user tasks
6
+ - POST /api/tasks - Create new task
7
+ - PUT /api/tasks/{id} - Update existing task
8
+ - DELETE /api/tasks/{id} - Delete task
9
+
10
+ All endpoints require JWT authentication and enforce user isolation.
11
+ """
12
+
13
+ from fastapi import APIRouter, HTTPException, Depends, status
14
+ from sqlmodel import Session
15
+ from pydantic import BaseModel, Field
16
+ from typing import Optional, List
17
+
18
+ from ..models.task import Task
19
+ from ..services import tasks as task_service
20
+ from ..middleware.jwt_auth import get_current_user_id
21
+ from ..database import get_session
22
+
23
+ router = APIRouter()
24
+
25
+
26
+ # Request/Response Models
27
+ class CreateTaskRequest(BaseModel):
28
+ """Request model for creating a task."""
29
+ title: str = Field(..., min_length=1, max_length=500, description="Task title")
30
+ description: Optional[str] = Field(None, description="Optional task description")
31
+ category: Optional[str] = Field(None, max_length=50, description="Task category/tag")
32
+ due_date: Optional[str] = Field(None, description="Due date in ISO format")
33
+ priority: Optional[str] = Field("medium", description="Task priority: low, medium, high")
34
+ is_recurring: Optional[bool] = Field(False, description="Whether task is recurring")
35
+ recurrence_type: Optional[str] = Field(None, description="Recurrence type: daily, weekly, monthly, yearly")
36
+ recurrence_interval: Optional[int] = Field(1, description="Recurrence interval (e.g., every 2 days)")
37
+ recurrence_end_date: Optional[str] = Field(None, description="Recurrence end date in ISO format")
38
+
39
+
40
+ class UpdateTaskRequest(BaseModel):
41
+ """Request model for updating a task."""
42
+ title: Optional[str] = Field(None, min_length=1, max_length=500, description="Task title")
43
+ description: Optional[str] = Field(None, description="Task description")
44
+ completed: Optional[bool] = Field(None, description="Task completion status")
45
+ category: Optional[str] = Field(None, max_length=50, description="Task category/tag")
46
+ due_date: Optional[str] = Field(None, description="Due date in ISO format")
47
+ priority: Optional[str] = Field(None, description="Task priority: low, medium, high")
48
+ is_recurring: Optional[bool] = Field(None, description="Whether task is recurring")
49
+ recurrence_type: Optional[str] = Field(None, description="Recurrence type: daily, weekly, monthly, yearly")
50
+ recurrence_interval: Optional[int] = Field(None, description="Recurrence interval")
51
+ recurrence_end_date: Optional[str] = Field(None, description="Recurrence end date in ISO format")
52
+
53
+
54
+ class TaskResponse(BaseModel):
55
+ """Task data response model."""
56
+ id: int
57
+ user_id: int
58
+ title: str
59
+ description: Optional[str]
60
+ completed: bool
61
+ category: Optional[str]
62
+ due_date: Optional[str]
63
+ priority: Optional[str]
64
+ is_recurring: bool
65
+ recurrence_type: Optional[str]
66
+ recurrence_interval: Optional[int]
67
+ recurrence_end_date: Optional[str]
68
+ parent_task_id: Optional[int]
69
+ created_at: str
70
+ updated_at: str
71
+
72
+
73
+ class TaskListResponse(BaseModel):
74
+ """Response model for task list."""
75
+ tasks: List[TaskResponse]
76
+
77
+
78
+ @router.get("", response_model=TaskListResponse)
79
+ async def list_tasks(
80
+ user_id: int = Depends(get_current_user_id),
81
+ session: Session = Depends(get_session)
82
+ ) -> TaskListResponse:
83
+ """
84
+ Get all tasks for the authenticated user.
85
+
86
+ Args:
87
+ user_id: Current user ID from JWT token
88
+ session: Database session
89
+
90
+ Returns:
91
+ TaskListResponse with array of user's tasks
92
+
93
+ Raises:
94
+ HTTPException 401: If JWT token is invalid
95
+ """
96
+ # Get user tasks
97
+ tasks = task_service.get_user_tasks(session, user_id)
98
+
99
+ # Convert to response format
100
+ task_responses = [
101
+ TaskResponse(
102
+ id=task.id,
103
+ user_id=task.user_id,
104
+ title=task.title,
105
+ description=task.description,
106
+ completed=task.completed,
107
+ category=task.category,
108
+ due_date=task.due_date.isoformat() if task.due_date else None,
109
+ priority=task.priority,
110
+ is_recurring=task.is_recurring,
111
+ recurrence_type=task.recurrence_type,
112
+ recurrence_interval=task.recurrence_interval,
113
+ recurrence_end_date=task.recurrence_end_date.isoformat() if task.recurrence_end_date else None,
114
+ parent_task_id=task.parent_task_id,
115
+ created_at=task.created_at.isoformat(),
116
+ updated_at=task.updated_at.isoformat()
117
+ )
118
+ for task in tasks
119
+ ]
120
+
121
+ return TaskListResponse(tasks=task_responses)
122
+
123
+
124
+ @router.post("", response_model=TaskResponse, status_code=status.HTTP_201_CREATED)
125
+ async def create_task(
126
+ request: CreateTaskRequest,
127
+ user_id: int = Depends(get_current_user_id),
128
+ session: Session = Depends(get_session)
129
+ ) -> TaskResponse:
130
+ """
131
+ Create a new task for the authenticated user.
132
+
133
+ Args:
134
+ request: Task creation request with title and optional description
135
+ user_id: Current user ID from JWT token
136
+ session: Database session
137
+
138
+ Returns:
139
+ TaskResponse with created task data
140
+
141
+ Raises:
142
+ HTTPException 401: If JWT token is invalid
143
+ HTTPException 422: If validation fails
144
+ """
145
+ # Create task
146
+ task = task_service.create_task(
147
+ session=session,
148
+ user_id=user_id,
149
+ title=request.title,
150
+ description=request.description,
151
+ category=request.category,
152
+ due_date=request.due_date,
153
+ priority=request.priority,
154
+ is_recurring=request.is_recurring or False,
155
+ recurrence_type=request.recurrence_type,
156
+ recurrence_interval=request.recurrence_interval or 1,
157
+ recurrence_end_date=request.recurrence_end_date
158
+ )
159
+
160
+ # Return response
161
+ return TaskResponse(
162
+ id=task.id,
163
+ user_id=task.user_id,
164
+ title=task.title,
165
+ description=task.description,
166
+ completed=task.completed,
167
+ category=task.category,
168
+ due_date=task.due_date.isoformat() if task.due_date else None,
169
+ priority=task.priority,
170
+ is_recurring=task.is_recurring,
171
+ recurrence_type=task.recurrence_type,
172
+ recurrence_interval=task.recurrence_interval,
173
+ recurrence_end_date=task.recurrence_end_date.isoformat() if task.recurrence_end_date else None,
174
+ parent_task_id=task.parent_task_id,
175
+ created_at=task.created_at.isoformat(),
176
+ updated_at=task.updated_at.isoformat()
177
+ )
178
+
179
+
180
+ @router.put("/{task_id}", response_model=TaskResponse)
181
+ async def update_task(
182
+ task_id: int,
183
+ request: UpdateTaskRequest,
184
+ user_id: int = Depends(get_current_user_id),
185
+ session: Session = Depends(get_session)
186
+ ) -> TaskResponse:
187
+ """
188
+ Update an existing task.
189
+
190
+ Args:
191
+ task_id: ID of the task to update
192
+ request: Task update request with optional fields
193
+ user_id: Current user ID from JWT token
194
+ session: Database session
195
+
196
+ Returns:
197
+ TaskResponse with updated task data
198
+
199
+ Raises:
200
+ HTTPException 401: If JWT token is invalid
201
+ HTTPException 404: If task not found or doesn't belong to user
202
+ """
203
+ # Update task
204
+ task = task_service.update_task(
205
+ session=session,
206
+ task_id=task_id,
207
+ user_id=user_id,
208
+ title=request.title,
209
+ description=request.description,
210
+ completed=request.completed,
211
+ category=request.category,
212
+ due_date=request.due_date,
213
+ priority=request.priority,
214
+ is_recurring=request.is_recurring,
215
+ recurrence_type=request.recurrence_type,
216
+ recurrence_interval=request.recurrence_interval,
217
+ recurrence_end_date=request.recurrence_end_date
218
+ )
219
+
220
+ if not task:
221
+ raise HTTPException(
222
+ status_code=status.HTTP_404_NOT_FOUND,
223
+ detail="Task not found"
224
+ )
225
+
226
+ # Return response
227
+ return TaskResponse(
228
+ id=task.id,
229
+ user_id=task.user_id,
230
+ title=task.title,
231
+ description=task.description,
232
+ completed=task.completed,
233
+ category=task.category,
234
+ due_date=task.due_date.isoformat() if task.due_date else None,
235
+ priority=task.priority,
236
+ is_recurring=task.is_recurring,
237
+ recurrence_type=task.recurrence_type,
238
+ recurrence_interval=task.recurrence_interval,
239
+ recurrence_end_date=task.recurrence_end_date.isoformat() if task.recurrence_end_date else None,
240
+ parent_task_id=task.parent_task_id,
241
+ created_at=task.created_at.isoformat(),
242
+ updated_at=task.updated_at.isoformat()
243
+ )
244
+
245
+
246
+ @router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT)
247
+ async def delete_task(
248
+ task_id: int,
249
+ user_id: int = Depends(get_current_user_id),
250
+ session: Session = Depends(get_session)
251
+ ) -> None:
252
+ """
253
+ Delete a task.
254
+
255
+ Args:
256
+ task_id: ID of the task to delete
257
+ user_id: Current user ID from JWT token
258
+ session: Database session
259
+
260
+ Returns:
261
+ None (204 No Content)
262
+
263
+ Raises:
264
+ HTTPException 401: If JWT token is invalid
265
+ HTTPException 404: If task not found or doesn't belong to user
266
+ """
267
+ # Delete task
268
+ success = task_service.delete_task(
269
+ session=session,
270
+ task_id=task_id,
271
+ user_id=user_id
272
+ )
273
+
274
+ if not success:
275
+ raise HTTPException(
276
+ status_code=status.HTTP_404_NOT_FOUND,
277
+ detail="Task not found"
278
+ )
src/config/__init__.py ADDED
File without changes
src/config/__pycache__/__init__.cpython-314.pyc ADDED
Binary file (209 Bytes). View file
 
src/config/__pycache__/logging.cpython-314.pyc ADDED
Binary file (2.74 kB). View file
 
src/config/logging.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Logging configuration for the AI chatbot backend.
3
+
4
+ This module sets up structured logging with appropriate levels
5
+ and formats for production use.
6
+ """
7
+
8
+ import logging
9
+ import sys
10
+ from typing import Optional
11
+ import os
12
+
13
+
14
+ def setup_logging(
15
+ level: Optional[str] = None,
16
+ log_file: Optional[str] = None
17
+ ) -> None:
18
+ """
19
+ Configure structured logging for the application.
20
+
21
+ Args:
22
+ level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
23
+ log_file: Optional file path for log output
24
+ """
25
+ # Get log level from environment or parameter
26
+ log_level_str = level or os.getenv("LOG_LEVEL", "INFO")
27
+ log_level = getattr(logging, log_level_str.upper(), logging.INFO)
28
+
29
+ # Create formatter
30
+ formatter = logging.Formatter(
31
+ fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
32
+ datefmt='%Y-%m-%d %H:%M:%S'
33
+ )
34
+
35
+ # Configure root logger
36
+ root_logger = logging.getLogger()
37
+ root_logger.setLevel(log_level)
38
+
39
+ # Remove existing handlers
40
+ root_logger.handlers.clear()
41
+
42
+ # Console handler
43
+ console_handler = logging.StreamHandler(sys.stdout)
44
+ console_handler.setLevel(log_level)
45
+ console_handler.setFormatter(formatter)
46
+ root_logger.addHandler(console_handler)
47
+
48
+ # File handler (if specified)
49
+ if log_file:
50
+ file_handler = logging.FileHandler(log_file)
51
+ file_handler.setLevel(log_level)
52
+ file_handler.setFormatter(formatter)
53
+ root_logger.addHandler(file_handler)
54
+
55
+ # Set specific logger levels
56
+ logging.getLogger("uvicorn").setLevel(logging.INFO)
57
+ logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
58
+
59
+ logging.info(f"Logging configured at {log_level_str} level")
60
+
61
+
62
+ # Auto-configure logging on import
63
+ setup_logging()
src/database.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Database configuration and session management.
3
+
4
+ This module provides:
5
+ - Database engine creation
6
+ - Session management
7
+ - Dependency injection for FastAPI routes
8
+ """
9
+
10
+ import os
11
+ from typing import Generator
12
+
13
+ from sqlmodel import Session, create_engine, SQLModel
14
+
15
+ # Get database URL from environment variable
16
+ # For Vercel serverless, use /tmp directory for SQLite
17
+ DATABASE_URL = os.getenv("DATABASE_URL")
18
+
19
+ if DATABASE_URL is None:
20
+ # Check if running on Vercel (serverless environment)
21
+ if os.getenv("VERCEL"):
22
+ # Use /tmp directory which is writable in Vercel serverless
23
+ DATABASE_URL = "sqlite:////tmp/todo.db"
24
+ else:
25
+ # Local development
26
+ DATABASE_URL = "sqlite:///./todo.db"
27
+
28
+ # Create database engine
29
+ # connect_args only needed for SQLite
30
+ connect_args = {"check_same_thread": False} if DATABASE_URL.startswith("sqlite") else {}
31
+
32
+ engine = create_engine(
33
+ DATABASE_URL,
34
+ echo=False, # Disable SQL query logging for serverless
35
+ connect_args=connect_args,
36
+ pool_pre_ping=True, # Verify connections before using
37
+ )
38
+
39
+
40
+ def create_db_and_tables():
41
+ """Create all database tables."""
42
+ SQLModel.metadata.create_all(engine)
43
+
44
+
45
+ def get_session() -> Generator[Session, None, None]:
46
+ """
47
+ Dependency function to provide database session to FastAPI routes.
48
+
49
+ Yields:
50
+ Session: SQLModel database session
51
+
52
+ Example:
53
+ @app.get("/items")
54
+ def get_items(session: Session = Depends(get_session)):
55
+ items = session.exec(select(Item)).all()
56
+ return items
57
+ """
58
+ with Session(engine) as session:
59
+ yield session
src/main.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ from dotenv import load_dotenv
4
+ import os
5
+
6
+ # Load environment variables
7
+ load_dotenv()
8
+
9
+ # Configure logging first
10
+ from src.config.logging import setup_logging
11
+ setup_logging()
12
+
13
+ # Create FastAPI application
14
+ app = FastAPI(
15
+ title="Todo Application API with AI Chatbot",
16
+ description="Backend API for Todo application with JWT authentication and AI-powered conversational task management",
17
+ version="2.0.0",
18
+ )
19
+
20
+ # CORS Configuration
21
+ CORS_ORIGINS = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:3001,http://localhost:3002,http://localhost:3003,http://localhost:3004,http://localhost:3005").split(",")
22
+
23
+ # Configure CORS middleware
24
+ app.add_middleware(
25
+ CORSMiddleware,
26
+ allow_origins=CORS_ORIGINS,
27
+ allow_credentials=True,
28
+ allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
29
+ allow_headers=["*"],
30
+ expose_headers=["*"],
31
+ max_age=3600,
32
+ )
33
+
34
+ # Initialize database tables on startup
35
+ from src.database import create_db_and_tables
36
+
37
+ @app.on_event("startup")
38
+ def on_startup():
39
+ """Initialize database tables and MCP server on application startup."""
40
+ try:
41
+ create_db_and_tables()
42
+ except Exception as e:
43
+ print(f"Warning: Could not initialize database tables: {e}")
44
+ # Continue anyway - tables might already exist
45
+
46
+ # Initialize MCP server with tools
47
+ try:
48
+ from src.mcp.server import mcp_server
49
+ from src.mcp.tools import register_all_tools # This triggers tool registration
50
+ print(f"MCP Server initialized: {mcp_server.name} v{mcp_server.version}")
51
+ print(f"Registered tools: {len(mcp_server.tools)}")
52
+ except Exception as e:
53
+ print(f"Warning: Could not initialize MCP server: {e}")
54
+
55
+ # Health check endpoint
56
+ @app.get("/health")
57
+ async def health_check():
58
+ """Health check endpoint to verify API is running."""
59
+ return {"status": "healthy"}
60
+
61
+ # Root endpoint
62
+ @app.get("/")
63
+ async def root():
64
+ """Root endpoint with API information."""
65
+ return {
66
+ "message": "Todo Application API",
67
+ "version": "1.0.0",
68
+ "docs": "/docs",
69
+ "health": "/health"
70
+ }
71
+
72
+ # Router registration
73
+ from src.api import auth, tasks, subtasks, password_reset, chat
74
+ # AI router temporarily disabled due to Vercel size constraints
75
+ # from src.api import ai
76
+
77
+ app.include_router(auth.router, prefix="/api/auth", tags=["Authentication"])
78
+ app.include_router(password_reset.router, prefix="/api/auth", tags=["Password Reset"])
79
+ app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
80
+ app.include_router(subtasks.router, prefix="/api", tags=["Subtasks"])
81
+ app.include_router(chat.router, prefix="/api/v1", tags=["AI Chat"])
82
+ # app.include_router(ai.router, prefix="/api/ai", tags=["AI Features"])
src/main_minimal.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ import os
3
+
4
+ app = FastAPI(title="Todo API - Minimal Test")
5
+
6
+ @app.get("/")
7
+ def root():
8
+ return {
9
+ "status": "ok",
10
+ "message": "Railway FastAPI is working!",
11
+ "port": os.getenv("PORT", "not set"),
12
+ "database": "connected" if os.getenv("DATABASE_URL") else "not configured"
13
+ }
14
+
15
+ @app.get("/health")
16
+ def health():
17
+ return {"status": "healthy", "service": "railway-test"}
18
+
19
+ @app.get("/api/health")
20
+ def api_health():
21
+ return {"status": "healthy", "api": "working"}
src/mcp/__init__.py ADDED
File without changes
src/mcp/__pycache__/__init__.cpython-314.pyc ADDED
Binary file (206 Bytes). View file
 
src/mcp/__pycache__/server.cpython-314.pyc ADDED
Binary file (6.57 kB). View file
 
src/mcp/server.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ MCP (Model Context Protocol) Server for AI Chatbot.
3
+
4
+ This module initializes and manages MCP tools that the AI can use
5
+ to interact with the task management system.
6
+ """
7
+
8
+ import logging
9
+ from typing import Dict, Any, Callable, List
10
+ from pydantic import BaseModel
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class MCPTool(BaseModel):
16
+ """Base model for MCP tool definition."""
17
+ name: str
18
+ description: str
19
+ parameters: Dict[str, Any]
20
+ function: Any # Will be the actual callable
21
+
22
+
23
+ class MCPServer:
24
+ """
25
+ MCP Server managing tool registration and execution.
26
+
27
+ This server acts as a registry for tools that the AI can invoke.
28
+ All tools must be registered before they can be used.
29
+ """
30
+
31
+ def __init__(self, name: str = "todo-tools", version: str = "1.0.0"):
32
+ self.name = name
33
+ self.version = version
34
+ self.tools: Dict[str, MCPTool] = {}
35
+ logger.info(f"MCP Server initialized: {name} v{version}")
36
+
37
+ def register_tool(
38
+ self,
39
+ name: str,
40
+ description: str,
41
+ parameters: Dict[str, Any],
42
+ function: Callable
43
+ ):
44
+ """
45
+ Register a new tool with the MCP server.
46
+
47
+ Args:
48
+ name: Tool name (must be unique)
49
+ description: Human-readable description of what the tool does
50
+ parameters: JSON schema describing the tool's parameters
51
+ function: The actual function to execute when tool is called
52
+ """
53
+ if name in self.tools:
54
+ logger.warning(f"Tool '{name}' already registered, overwriting")
55
+
56
+ tool = MCPTool(
57
+ name=name,
58
+ description=description,
59
+ parameters=parameters,
60
+ function=function
61
+ )
62
+ self.tools[name] = tool
63
+ logger.info(f"Registered tool: {name}")
64
+
65
+ def get_tool(self, name: str) -> MCPTool:
66
+ """Get a registered tool by name."""
67
+ if name not in self.tools:
68
+ raise ValueError(f"Tool '{name}' not found in MCP server")
69
+ return self.tools[name]
70
+
71
+ def list_tools(self) -> List[Dict[str, Any]]:
72
+ """
73
+ List all registered tools in Cohere-compatible format.
74
+
75
+ Returns:
76
+ List of tool definitions for Cohere API v2
77
+ """
78
+ tools_list = []
79
+ for tool in self.tools.values():
80
+ # Convert parameter definitions to JSON Schema format
81
+ properties = {}
82
+ required = []
83
+
84
+ for param_name, param_def in tool.parameters.items():
85
+ properties[param_name] = {
86
+ "type": param_def.get("type", "string"),
87
+ "description": param_def.get("description", "")
88
+ }
89
+ if param_def.get("required", False):
90
+ required.append(param_name)
91
+
92
+ tools_list.append({
93
+ "type": "function",
94
+ "function": {
95
+ "name": tool.name,
96
+ "description": tool.description,
97
+ "parameters": {
98
+ "type": "object",
99
+ "properties": properties,
100
+ "required": required
101
+ }
102
+ }
103
+ })
104
+
105
+ return tools_list
106
+
107
+ async def execute_tool(
108
+ self,
109
+ tool_name: str,
110
+ parameters: Dict[str, Any],
111
+ db: Any = None
112
+ ) -> Dict[str, Any]:
113
+ """
114
+ Execute a registered tool with given parameters.
115
+
116
+ Args:
117
+ tool_name: Name of the tool to execute
118
+ parameters: Parameters to pass to the tool
119
+ db: Database session to pass to the tool
120
+
121
+ Returns:
122
+ Tool execution result as dictionary
123
+ """
124
+ tool = self.get_tool(tool_name)
125
+
126
+ try:
127
+ logger.info(f"Executing tool: {tool_name} with params: {parameters}")
128
+ # Pass database session to tool
129
+ result = await tool.function(**parameters, db=db)
130
+ logger.info(f"Tool {tool_name} executed successfully")
131
+ return result
132
+ except Exception as e:
133
+ logger.error(f"Tool {tool_name} execution failed: {str(e)}")
134
+ raise
135
+
136
+
137
+ # Global MCP server instance
138
+ mcp_server = MCPServer()
src/mcp/tools/__init__.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ MCP Tools initialization and registration.
3
+
4
+ This module registers all MCP tools with the server on import.
5
+ """
6
+
7
+ import logging
8
+ from src.mcp.server import mcp_server
9
+ from src.mcp.tools.add_task import add_task_tool
10
+ from src.mcp.tools.list_tasks import list_tasks_tool
11
+ from src.mcp.tools.complete_task import complete_task_tool
12
+ from src.mcp.tools.delete_task import delete_task_tool
13
+ from src.mcp.tools.update_task import update_task_tool
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def register_all_tools():
19
+ """Register all MCP tools with the server."""
20
+
21
+ # Register add_task tool (User Story 1)
22
+ mcp_server.register_tool(
23
+ name=add_task_tool.name,
24
+ description=add_task_tool.description,
25
+ parameters=add_task_tool.parameters,
26
+ function=add_task_tool.execute
27
+ )
28
+
29
+ # Register list_tasks tool (User Story 2)
30
+ mcp_server.register_tool(
31
+ name=list_tasks_tool.name,
32
+ description=list_tasks_tool.description,
33
+ parameters=list_tasks_tool.parameters,
34
+ function=list_tasks_tool.execute
35
+ )
36
+
37
+ # Register complete_task tool (User Story 3)
38
+ mcp_server.register_tool(
39
+ name=complete_task_tool.name,
40
+ description=complete_task_tool.description,
41
+ parameters=complete_task_tool.parameters,
42
+ function=complete_task_tool.execute
43
+ )
44
+
45
+ # Register delete_task tool (User Story 4)
46
+ mcp_server.register_tool(
47
+ name=delete_task_tool.name,
48
+ description=delete_task_tool.description,
49
+ parameters=delete_task_tool.parameters,
50
+ function=delete_task_tool.execute
51
+ )
52
+
53
+ # Register update_task tool (User Story 5)
54
+ mcp_server.register_tool(
55
+ name=update_task_tool.name,
56
+ description=update_task_tool.description,
57
+ parameters=update_task_tool.parameters,
58
+ function=update_task_tool.execute
59
+ )
60
+
61
+ logger.info("All 5 MCP tools registered successfully")
62
+
63
+
64
+ # Auto-register tools on import
65
+ register_all_tools()
src/mcp/tools/__pycache__/__init__.cpython-314.pyc ADDED
Binary file (2.3 kB). View file