Spaces:
Runtime error
Runtime error
Commit ·
0887862
1
Parent(s): 24b4d02
new
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .dockerignore +42 -0
- .dxtignore +46 -0
- .github/ISSUE_TEMPLATE/bug-report.md +35 -0
- .github/ISSUE_TEMPLATE/feature_request.md +20 -0
- .github/dependabot.yml +11 -0
- .github/workflows/ruff-format.yml +22 -0
- .github/workflows/ruff.yml +22 -0
- .gitignore +32 -0
- .python-version +1 -0
- Dockerfile +45 -0
- LICENSE +21 -0
- README_NEW.md +464 -0
- SECURITY.md +48 -0
- auth/__init__.py +1 -0
- auth/auth_info_middleware.py +425 -0
- auth/credential_store.py +246 -0
- auth/external_oauth_provider.py +99 -0
- auth/google_auth.py +934 -0
- auth/mcp_session_middleware.py +120 -0
- auth/oauth21_session_store.py +893 -0
- auth/oauth_callback_server.py +288 -0
- auth/oauth_config.py +438 -0
- auth/oauth_responses.py +223 -0
- auth/oauth_types.py +82 -0
- auth/scopes.py +207 -0
- auth/service_decorator.py +789 -0
- commands.rxt +2 -0
- core/__init__.py +1 -0
- core/api_enablement.py +108 -0
- core/attachment_storage.py +217 -0
- core/comments.py +320 -0
- core/config.py +37 -0
- core/context.py +43 -0
- core/log_formatter.py +207 -0
- core/server.py +563 -0
- core/tool_registry.py +105 -0
- core/tool_tier_loader.py +196 -0
- core/tool_tiers.yaml +166 -0
- core/utils.py +341 -0
- docker-compose.yml +16 -0
- fastmcp.json +21 -0
- fastmcp_server.py +175 -0
- gappsscript/README.md +514 -0
- gappsscript/TESTING.md +254 -0
- gappsscript/__init__.py +0 -0
- gappsscript/apps_script_tools.py +1309 -0
- gcalendar/__init__.py +1 -0
- gcalendar/calendar_tools.py +1075 -0
- gchat/__init__.py +7 -0
- gchat/chat_tools.py +223 -0
.dockerignore
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Git and version control
|
| 2 |
+
.git
|
| 3 |
+
.gitignore
|
| 4 |
+
gitdiff.txt
|
| 5 |
+
|
| 6 |
+
# Documentation and notes
|
| 7 |
+
*.md
|
| 8 |
+
AUTHENTICATION_REFACTOR_PROPOSAL.md
|
| 9 |
+
leverage_fastmcp_responses.md
|
| 10 |
+
|
| 11 |
+
# Test files and coverage
|
| 12 |
+
tests/
|
| 13 |
+
htmlcov/
|
| 14 |
+
.coverage
|
| 15 |
+
pytest_out.txt
|
| 16 |
+
|
| 17 |
+
# Build artifacts
|
| 18 |
+
build/
|
| 19 |
+
dist/
|
| 20 |
+
*.egg-info/
|
| 21 |
+
|
| 22 |
+
# Development files
|
| 23 |
+
mcp_server_debug.log
|
| 24 |
+
.credentials/
|
| 25 |
+
|
| 26 |
+
# Cache and temporary files
|
| 27 |
+
__pycache__/
|
| 28 |
+
*.pyc
|
| 29 |
+
*.pyo
|
| 30 |
+
*.pyd
|
| 31 |
+
.Python
|
| 32 |
+
.pytest_cache/
|
| 33 |
+
|
| 34 |
+
# IDE files
|
| 35 |
+
.vscode/
|
| 36 |
+
.idea/
|
| 37 |
+
*.swp
|
| 38 |
+
*.swo
|
| 39 |
+
|
| 40 |
+
# OS files
|
| 41 |
+
.DS_Store
|
| 42 |
+
Thumbs.db
|
.dxtignore
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ---- Python artefacts --------------------------------------------------
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*.so
|
| 5 |
+
|
| 6 |
+
# ---- Packaging ---------------------------------------------------------
|
| 7 |
+
*.egg-info/
|
| 8 |
+
build/
|
| 9 |
+
dist/
|
| 10 |
+
|
| 11 |
+
# ---- Environments & tooling -------------------------------------------
|
| 12 |
+
.env
|
| 13 |
+
.venv/
|
| 14 |
+
venv/
|
| 15 |
+
.idea/
|
| 16 |
+
.vscode/
|
| 17 |
+
.claude/
|
| 18 |
+
|
| 19 |
+
# ---- macOS clutter -----------------------------------------------------
|
| 20 |
+
.DS_Store
|
| 21 |
+
|
| 22 |
+
# ---- Secrets & Credentials --------------------------------------------
|
| 23 |
+
client_secret.json
|
| 24 |
+
.credentials/
|
| 25 |
+
*.key
|
| 26 |
+
*.pem
|
| 27 |
+
*.p12
|
| 28 |
+
*.crt
|
| 29 |
+
*.der
|
| 30 |
+
token.pickle
|
| 31 |
+
credentials.json
|
| 32 |
+
|
| 33 |
+
# ---- Test & Debug Files -----------------------------------------------
|
| 34 |
+
.coverage
|
| 35 |
+
pytest_out.txt
|
| 36 |
+
mcp_server_debug.log
|
| 37 |
+
diff_output.txt
|
| 38 |
+
|
| 39 |
+
# ---- Temporary & Build Files ------------------------------------------
|
| 40 |
+
*.tmp
|
| 41 |
+
*.log
|
| 42 |
+
*.pid
|
| 43 |
+
*.swp
|
| 44 |
+
*.swo
|
| 45 |
+
*~
|
| 46 |
+
|
.github/ISSUE_TEMPLATE/bug-report.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Bug Report
|
| 3 |
+
about: Create a report to help us improve Google Workspace MCP
|
| 4 |
+
title: ''
|
| 5 |
+
labels: ''
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Describe the bug**
|
| 11 |
+
A clear and concise description of what the bug is.
|
| 12 |
+
|
| 13 |
+
**Startup Logs**
|
| 14 |
+
Include the startup output including everything from the Active Configuration section to "Uvicorn running"
|
| 15 |
+
|
| 16 |
+
**To Reproduce**
|
| 17 |
+
Steps to reproduce the behavior:
|
| 18 |
+
1. Go to '...'
|
| 19 |
+
2. Click on '....'
|
| 20 |
+
3. Scroll down to '....'
|
| 21 |
+
4. See error
|
| 22 |
+
|
| 23 |
+
**Expected behavior**
|
| 24 |
+
A clear and concise description of what you expected to happen.
|
| 25 |
+
|
| 26 |
+
**Screenshots**
|
| 27 |
+
If applicable, add screenshots to help explain your problem.
|
| 28 |
+
|
| 29 |
+
**Platform (please complete the following information):**
|
| 30 |
+
- OS: [e.g. macOS, Ubuntu, Windows]
|
| 31 |
+
- Container: [if applicable, e.g. Docker)
|
| 32 |
+
- Version [e.g. v1.2.0]
|
| 33 |
+
|
| 34 |
+
**Additional context**
|
| 35 |
+
Add any other context about the problem here.
|
.github/ISSUE_TEMPLATE/feature_request.md
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Feature request
|
| 3 |
+
about: Suggest an idea for this project
|
| 4 |
+
title: ''
|
| 5 |
+
labels: ''
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Is your feature request related to a problem? Please describe.**
|
| 11 |
+
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
| 12 |
+
|
| 13 |
+
**Describe the solution you'd like**
|
| 14 |
+
A clear and concise description of what you want to happen.
|
| 15 |
+
|
| 16 |
+
**Describe alternatives you've considered**
|
| 17 |
+
A clear and concise description of any alternative solutions or features you've considered.
|
| 18 |
+
|
| 19 |
+
**Additional context**
|
| 20 |
+
Add any other context or screenshots about the feature request here.
|
.github/dependabot.yml
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# To get started with Dependabot version updates, you'll need to specify which
|
| 2 |
+
# package ecosystems to update and where the package manifests are located.
|
| 3 |
+
# Please see the documentation for all configuration options:
|
| 4 |
+
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
| 5 |
+
|
| 6 |
+
version: 2
|
| 7 |
+
updates:
|
| 8 |
+
- package-ecosystem: "" # See documentation for possible values
|
| 9 |
+
directory: "/" # Location of package manifests
|
| 10 |
+
schedule:
|
| 11 |
+
interval: "weekly"
|
.github/workflows/ruff-format.yml
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Ruff Format
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
branches: [ main ]
|
| 6 |
+
push:
|
| 7 |
+
branches: [ main ]
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
ruff-format:
|
| 11 |
+
runs-on: ubuntu-latest
|
| 12 |
+
steps:
|
| 13 |
+
- uses: actions/checkout@v4
|
| 14 |
+
- uses: actions/setup-python@v5
|
| 15 |
+
with:
|
| 16 |
+
python-version: '3.11'
|
| 17 |
+
- name: Install uv
|
| 18 |
+
uses: astral-sh/setup-uv@v4
|
| 19 |
+
- name: Install dependencies
|
| 20 |
+
run: uv sync
|
| 21 |
+
- name: Run ruff format check
|
| 22 |
+
run: uv run ruff format --check
|
.github/workflows/ruff.yml
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Ruff Check
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
branches: [ main ]
|
| 6 |
+
push:
|
| 7 |
+
branches: [ main ]
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
ruff:
|
| 11 |
+
runs-on: ubuntu-latest
|
| 12 |
+
steps:
|
| 13 |
+
- uses: actions/checkout@v4
|
| 14 |
+
- uses: actions/setup-python@v5
|
| 15 |
+
with:
|
| 16 |
+
python-version: '3.11'
|
| 17 |
+
- name: Install uv
|
| 18 |
+
uses: astral-sh/setup-uv@v4
|
| 19 |
+
- name: Install dependencies
|
| 20 |
+
run: uv sync
|
| 21 |
+
- name: Run ruff check
|
| 22 |
+
run: uv run ruff check
|
.gitignore
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ---- Python artefacts --------------------------------------------------
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*.so
|
| 5 |
+
.mcp.json
|
| 6 |
+
claude.md
|
| 7 |
+
|
| 8 |
+
# ---- Packaging ---------------------------------------------------------
|
| 9 |
+
*.egg-info/
|
| 10 |
+
build/
|
| 11 |
+
dist/
|
| 12 |
+
*.dxt
|
| 13 |
+
|
| 14 |
+
# ---- Environments & tooling -------------------------------------------
|
| 15 |
+
.env
|
| 16 |
+
.venv/
|
| 17 |
+
venv/
|
| 18 |
+
.idea/
|
| 19 |
+
.vscode/
|
| 20 |
+
|
| 21 |
+
# ---- macOS clutter -----------------------------------------------------
|
| 22 |
+
.DS_Store
|
| 23 |
+
|
| 24 |
+
# ---- Secrets -----------------------------------------------------------
|
| 25 |
+
client_secret.json
|
| 26 |
+
|
| 27 |
+
# ---- Logs --------------------------------------------------------------
|
| 28 |
+
mcp_server_debug.log
|
| 29 |
+
|
| 30 |
+
# ---- Local development files -------------------------------------------
|
| 31 |
+
/.credentials
|
| 32 |
+
/.claude
|
.python-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
3.11
|
Dockerfile
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
# Install system dependencies
|
| 6 |
+
RUN apt-get update && apt-get install -y \
|
| 7 |
+
curl \
|
| 8 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 9 |
+
|
| 10 |
+
# Install uv for faster dependency management
|
| 11 |
+
RUN pip install --no-cache-dir uv
|
| 12 |
+
|
| 13 |
+
COPY . .
|
| 14 |
+
|
| 15 |
+
# Install Python dependencies using uv sync
|
| 16 |
+
RUN uv sync --frozen --no-dev
|
| 17 |
+
|
| 18 |
+
# Create non-root user for security
|
| 19 |
+
RUN useradd --create-home --shell /bin/bash app \
|
| 20 |
+
&& chown -R app:app /app
|
| 21 |
+
|
| 22 |
+
# Give read and write access to the store_creds volume
|
| 23 |
+
RUN mkdir -p /app/store_creds \
|
| 24 |
+
&& chown -R app:app /app/store_creds \
|
| 25 |
+
&& chmod 755 /app/store_creds
|
| 26 |
+
|
| 27 |
+
USER app
|
| 28 |
+
|
| 29 |
+
# Expose port (use default of 8000 if PORT not set)
|
| 30 |
+
EXPOSE 8000
|
| 31 |
+
# Expose additional port if PORT environment variable is set to a different value
|
| 32 |
+
ARG PORT
|
| 33 |
+
EXPOSE ${PORT:-8000}
|
| 34 |
+
|
| 35 |
+
# Health check
|
| 36 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
|
| 37 |
+
CMD sh -c 'curl -f http://localhost:${PORT:-8000}/health || exit 1'
|
| 38 |
+
|
| 39 |
+
# Set environment variables for Python startup args
|
| 40 |
+
ENV TOOL_TIER=""
|
| 41 |
+
ENV TOOLS=""
|
| 42 |
+
|
| 43 |
+
# Use entrypoint for the base command and CMD for args
|
| 44 |
+
ENTRYPOINT ["/bin/sh", "-c"]
|
| 45 |
+
CMD ["uv run main.py --transport streamable-http ${TOOL_TIER:+--tool-tier \"$TOOL_TIER\"} ${TOOLS:+--tools $TOOLS}"]
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2025 Taylor Wilsdon
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
README_NEW.md
ADDED
|
@@ -0,0 +1,464 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<div align="center">
|
| 2 |
+
|
| 3 |
+
# Google Workspace MCP Server
|
| 4 |
+
|
| 5 |
+
[](https://opensource.org/licenses/MIT)
|
| 6 |
+
[](https://www.python.org/downloads/)
|
| 7 |
+
[](https://pypi.org/project/workspace-mcp/)
|
| 8 |
+
|
| 9 |
+
**Complete Google Workspace control through natural language.** Gmail, Calendar, Drive, Docs, Sheets, Slides, Forms, Tasks, Chat, Apps Script, and Custom Search—all via MCP.
|
| 10 |
+
|
| 11 |
+
[Quick Start](#-quick-start) • [Tools Reference](#-tools-reference) • [Configuration](#-configuration) • [OAuth Setup](#-oauth-setup)
|
| 12 |
+
|
| 13 |
+
</div>
|
| 14 |
+
|
| 15 |
+
---
|
| 16 |
+
|
| 17 |
+
## ⚡ Quick Start
|
| 18 |
+
|
| 19 |
+
### One-Click Install (Claude Desktop)
|
| 20 |
+
|
| 21 |
+
1. Download `google_workspace_mcp.dxt` from [Releases](https://github.com/taylorwilsdon/google_workspace_mcp/releases)
|
| 22 |
+
2. Double-click → Claude Desktop installs automatically
|
| 23 |
+
3. Add your Google OAuth credentials in Settings → Extensions
|
| 24 |
+
|
| 25 |
+
### CLI Install
|
| 26 |
+
|
| 27 |
+
```bash
|
| 28 |
+
# Instant run (no install)
|
| 29 |
+
uvx workspace-mcp
|
| 30 |
+
|
| 31 |
+
# With specific tools only
|
| 32 |
+
uvx workspace-mcp --tools gmail drive calendar
|
| 33 |
+
|
| 34 |
+
# With tool tier
|
| 35 |
+
uvx workspace-mcp --tool-tier core
|
| 36 |
+
```
|
| 37 |
+
|
| 38 |
+
### Environment Variables
|
| 39 |
+
|
| 40 |
+
```bash
|
| 41 |
+
export GOOGLE_OAUTH_CLIENT_ID="your-client-id"
|
| 42 |
+
export GOOGLE_OAUTH_CLIENT_SECRET="your-client-secret"
|
| 43 |
+
export OAUTHLIB_INSECURE_TRANSPORT=1 # Development only
|
| 44 |
+
```
|
| 45 |
+
|
| 46 |
+
---
|
| 47 |
+
|
| 48 |
+
## 🛠 Tools Reference
|
| 49 |
+
|
| 50 |
+
### Gmail (11 tools)
|
| 51 |
+
|
| 52 |
+
| Tool | Tier | Description |
|
| 53 |
+
|------|------|-------------|
|
| 54 |
+
| `search_gmail_messages` | Core | Search with Gmail operators, returns message/thread IDs with web links |
|
| 55 |
+
| `get_gmail_message_content` | Core | Get full message: subject, sender, body, attachments |
|
| 56 |
+
| `get_gmail_messages_content_batch` | Core | Batch retrieve up to 25 messages |
|
| 57 |
+
| `send_gmail_message` | Core | Send emails with HTML support, CC/BCC, threading |
|
| 58 |
+
| `get_gmail_thread_content` | Extended | Get complete conversation thread |
|
| 59 |
+
| `draft_gmail_message` | Extended | Create drafts with threading support |
|
| 60 |
+
| `list_gmail_labels` | Extended | List all system and user labels |
|
| 61 |
+
| `manage_gmail_label` | Extended | Create, update, delete labels |
|
| 62 |
+
| `modify_gmail_message_labels` | Extended | Add/remove labels (archive, trash, etc.) |
|
| 63 |
+
| `get_gmail_threads_content_batch` | Complete | Batch retrieve threads |
|
| 64 |
+
| `batch_modify_gmail_message_labels` | Complete | Bulk label operations |
|
| 65 |
+
|
| 66 |
+
**Also includes:** `get_gmail_attachment_content`, `list_gmail_filters`, `create_gmail_filter`, `delete_gmail_filter`
|
| 67 |
+
|
| 68 |
+
### Google Drive (7 tools)
|
| 69 |
+
|
| 70 |
+
| Tool | Tier | Description |
|
| 71 |
+
|------|------|-------------|
|
| 72 |
+
| `search_drive_files` | Core | Search files with Drive query syntax or free text |
|
| 73 |
+
| `get_drive_file_content` | Core | Read content from Docs, Sheets, Office files (.docx, .xlsx, .pptx) |
|
| 74 |
+
| `create_drive_file` | Core | Create files from content or URL (supports file://, http://, https://) |
|
| 75 |
+
| `list_drive_items` | Extended | List folder contents with shared drive support |
|
| 76 |
+
| `update_drive_file` | Extended | Update metadata, move between folders, star, trash |
|
| 77 |
+
| `get_drive_file_permissions` | Complete | Check sharing status and permissions |
|
| 78 |
+
| `check_drive_file_public_access` | Complete | Verify public link sharing for Docs image insertion |
|
| 79 |
+
|
| 80 |
+
**Also includes:** `get_drive_file_download_url` for generating download URLs
|
| 81 |
+
|
| 82 |
+
### Google Calendar (5 tools)
|
| 83 |
+
|
| 84 |
+
| Tool | Tier | Description |
|
| 85 |
+
|------|------|-------------|
|
| 86 |
+
| `list_calendars` | Core | List all accessible calendars |
|
| 87 |
+
| `get_events` | Core | Query events by time range, search, or specific ID |
|
| 88 |
+
| `create_event` | Core | Create events with attendees, reminders, Google Meet, attachments |
|
| 89 |
+
| `modify_event` | Core | Update any event property including conferencing |
|
| 90 |
+
| `delete_event` | Extended | Remove events |
|
| 91 |
+
|
| 92 |
+
**Event features:** Timezone support, transparency (busy/free), visibility settings, up to 5 custom reminders
|
| 93 |
+
|
| 94 |
+
### Google Docs (16 tools)
|
| 95 |
+
|
| 96 |
+
| Tool | Tier | Description |
|
| 97 |
+
|------|------|-------------|
|
| 98 |
+
| `get_doc_content` | Core | Extract text from Docs or .docx files (supports tabs) |
|
| 99 |
+
| `create_doc` | Core | Create new documents with optional initial content |
|
| 100 |
+
| `modify_doc_text` | Core | Insert, replace, format text (bold, italic, colors, fonts) |
|
| 101 |
+
| `search_docs` | Extended | Find documents by name |
|
| 102 |
+
| `find_and_replace_doc` | Extended | Global find/replace with case matching |
|
| 103 |
+
| `list_docs_in_folder` | Extended | List Docs in a specific folder |
|
| 104 |
+
| `insert_doc_elements` | Extended | Add tables, lists, page breaks |
|
| 105 |
+
| `export_doc_to_pdf` | Extended | Export to PDF and save to Drive |
|
| 106 |
+
| `insert_doc_image` | Complete | Insert images from Drive or URLs |
|
| 107 |
+
| `update_doc_headers_footers` | Complete | Modify headers/footers |
|
| 108 |
+
| `batch_update_doc` | Complete | Execute multiple operations atomically |
|
| 109 |
+
| `inspect_doc_structure` | Complete | Analyze document structure for safe insertion points |
|
| 110 |
+
| `create_table_with_data` | Complete | Create and populate tables in one operation |
|
| 111 |
+
| `debug_table_structure` | Complete | Debug table cell positions and content |
|
| 112 |
+
|
| 113 |
+
**Comments:** `read_document_comments`, `create_document_comment`, `reply_to_document_comment`, `resolve_document_comment`
|
| 114 |
+
|
| 115 |
+
### Google Sheets (13 tools)
|
| 116 |
+
|
| 117 |
+
| Tool | Tier | Description |
|
| 118 |
+
|------|------|-------------|
|
| 119 |
+
| `read_sheet_values` | Core | Read cell ranges with formatted output |
|
| 120 |
+
| `modify_sheet_values` | Core | Write, update, or clear cell values |
|
| 121 |
+
| `create_spreadsheet` | Core | Create new spreadsheets with multiple sheets |
|
| 122 |
+
| `list_spreadsheets` | Extended | List accessible spreadsheets |
|
| 123 |
+
| `get_spreadsheet_info` | Extended | Get metadata, sheets, conditional formats |
|
| 124 |
+
| `create_sheet` | Complete | Add sheets to existing spreadsheets |
|
| 125 |
+
| `format_sheet_range` | Complete | Apply colors and number formats |
|
| 126 |
+
| `add_conditional_formatting` | Complete | Add boolean or gradient rules |
|
| 127 |
+
| `update_conditional_formatting` | Complete | Modify existing rules |
|
| 128 |
+
| `delete_conditional_formatting` | Complete | Remove formatting rules |
|
| 129 |
+
|
| 130 |
+
**Comments:** `read_spreadsheet_comments`, `create_spreadsheet_comment`, `reply_to_spreadsheet_comment`, `resolve_spreadsheet_comment`
|
| 131 |
+
|
| 132 |
+
### Google Slides (9 tools)
|
| 133 |
+
|
| 134 |
+
| Tool | Tier | Description |
|
| 135 |
+
|------|------|-------------|
|
| 136 |
+
| `create_presentation` | Core | Create new presentations |
|
| 137 |
+
| `get_presentation` | Core | Get presentation details with slide text extraction |
|
| 138 |
+
| `batch_update_presentation` | Extended | Apply multiple updates (create slides, shapes, etc.) |
|
| 139 |
+
| `get_page` | Extended | Get specific slide details and elements |
|
| 140 |
+
| `get_page_thumbnail` | Extended | Generate PNG thumbnails |
|
| 141 |
+
|
| 142 |
+
**Comments:** `read_presentation_comments`, `create_presentation_comment`, `reply_to_presentation_comment`, `resolve_presentation_comment`
|
| 143 |
+
|
| 144 |
+
### Google Forms (5 tools)
|
| 145 |
+
|
| 146 |
+
| Tool | Tier | Description |
|
| 147 |
+
|------|------|-------------|
|
| 148 |
+
| `create_form` | Core | Create forms with title and description |
|
| 149 |
+
| `get_form` | Core | Get form details, questions, and URLs |
|
| 150 |
+
| `list_form_responses` | Extended | List responses with pagination |
|
| 151 |
+
| `set_publish_settings` | Complete | Configure template and authentication settings |
|
| 152 |
+
| `get_form_response` | Complete | Get individual response details |
|
| 153 |
+
|
| 154 |
+
### Google Tasks (12 tools)
|
| 155 |
+
|
| 156 |
+
| Tool | Tier | Description |
|
| 157 |
+
|------|------|-------------|
|
| 158 |
+
| `list_tasks` | Core | List tasks with filtering, subtask hierarchy preserved |
|
| 159 |
+
| `get_task` | Core | Get task details |
|
| 160 |
+
| `create_task` | Core | Create tasks with notes, due dates, parent/sibling positioning |
|
| 161 |
+
| `update_task` | Core | Update task properties |
|
| 162 |
+
| `delete_task` | Extended | Remove tasks |
|
| 163 |
+
| `list_task_lists` | Complete | List all task lists |
|
| 164 |
+
| `get_task_list` | Complete | Get task list details |
|
| 165 |
+
| `create_task_list` | Complete | Create new task lists |
|
| 166 |
+
| `update_task_list` | Complete | Rename task lists |
|
| 167 |
+
| `delete_task_list` | Complete | Delete task lists (and all tasks) |
|
| 168 |
+
| `move_task` | Complete | Reposition or move between lists |
|
| 169 |
+
| `clear_completed_tasks` | Complete | Hide completed tasks |
|
| 170 |
+
|
| 171 |
+
### Google Apps Script (11 tools)
|
| 172 |
+
|
| 173 |
+
| Tool | Tier | Description |
|
| 174 |
+
|------|------|-------------|
|
| 175 |
+
| `list_script_projects` | Core | List accessible Apps Script projects |
|
| 176 |
+
| `get_script_project` | Core | Get complete project with all files |
|
| 177 |
+
| `get_script_content` | Core | Retrieve specific file content |
|
| 178 |
+
| `create_script_project` | Core | Create new standalone or bound project |
|
| 179 |
+
| `update_script_content` | Core | Update or create script files |
|
| 180 |
+
| `run_script_function` | Core | Execute function with parameters |
|
| 181 |
+
| `create_deployment` | Extended | Create new script deployment |
|
| 182 |
+
| `list_deployments` | Extended | List all project deployments |
|
| 183 |
+
| `update_deployment` | Extended | Update deployment configuration |
|
| 184 |
+
| `delete_deployment` | Extended | Remove deployment |
|
| 185 |
+
| `list_script_processes` | Extended | View recent executions and status |
|
| 186 |
+
|
| 187 |
+
**Enables:** Cross-app automation, persistent workflows, custom business logic execution, script development and debugging
|
| 188 |
+
|
| 189 |
+
**Note:** Trigger management is not currently supported via MCP tools.
|
| 190 |
+
|
| 191 |
+
### Google Chat (4 tools)
|
| 192 |
+
|
| 193 |
+
| Tool | Tier | Description |
|
| 194 |
+
|------|------|-------------|
|
| 195 |
+
| `get_messages` | Core | Retrieve messages from a space |
|
| 196 |
+
| `send_message` | Core | Send messages with optional threading |
|
| 197 |
+
| `search_messages` | Core | Search across chat history |
|
| 198 |
+
| `list_spaces` | Extended | List rooms and DMs |
|
| 199 |
+
|
| 200 |
+
### Google Custom Search (3 tools)
|
| 201 |
+
|
| 202 |
+
| Tool | Tier | Description |
|
| 203 |
+
|------|------|-------------|
|
| 204 |
+
| `search_custom` | Core | Web search with filters (date, file type, language, safe search) |
|
| 205 |
+
| `search_custom_siterestrict` | Extended | Search within specific domains |
|
| 206 |
+
| `get_search_engine_info` | Complete | Get search engine metadata |
|
| 207 |
+
|
| 208 |
+
**Requires:** `GOOGLE_PSE_API_KEY` and `GOOGLE_PSE_ENGINE_ID` environment variables
|
| 209 |
+
|
| 210 |
+
---
|
| 211 |
+
|
| 212 |
+
## 📊 Tool Tiers
|
| 213 |
+
|
| 214 |
+
Choose a tier based on your needs:
|
| 215 |
+
|
| 216 |
+
| Tier | Tools | Use Case |
|
| 217 |
+
|------|-------|----------|
|
| 218 |
+
| **Core** | ~30 | Essential operations: search, read, create, send |
|
| 219 |
+
| **Extended** | ~50 | Core + management: labels, folders, batch ops |
|
| 220 |
+
| **Complete** | ~80 | Full API: comments, headers, admin functions |
|
| 221 |
+
|
| 222 |
+
```bash
|
| 223 |
+
uvx workspace-mcp --tool-tier core # Start minimal
|
| 224 |
+
uvx workspace-mcp --tool-tier extended # Add management
|
| 225 |
+
uvx workspace-mcp --tool-tier complete # Everything
|
| 226 |
+
```
|
| 227 |
+
|
| 228 |
+
Mix tiers with specific services:
|
| 229 |
+
```bash
|
| 230 |
+
uvx workspace-mcp --tools gmail drive --tool-tier extended
|
| 231 |
+
```
|
| 232 |
+
|
| 233 |
+
---
|
| 234 |
+
|
| 235 |
+
## ⚙ Configuration
|
| 236 |
+
|
| 237 |
+
### Required
|
| 238 |
+
|
| 239 |
+
| Variable | Description |
|
| 240 |
+
|----------|-------------|
|
| 241 |
+
| `GOOGLE_OAUTH_CLIENT_ID` | OAuth client ID from Google Cloud |
|
| 242 |
+
| `GOOGLE_OAUTH_CLIENT_SECRET` | OAuth client secret |
|
| 243 |
+
|
| 244 |
+
### Optional
|
| 245 |
+
|
| 246 |
+
| Variable | Description |
|
| 247 |
+
|----------|-------------|
|
| 248 |
+
| `USER_GOOGLE_EMAIL` | Default email for single-user mode |
|
| 249 |
+
| `GOOGLE_PSE_API_KEY` | Custom Search API key |
|
| 250 |
+
| `GOOGLE_PSE_ENGINE_ID` | Programmable Search Engine ID |
|
| 251 |
+
| `MCP_ENABLE_OAUTH21` | Enable OAuth 2.1 multi-user support |
|
| 252 |
+
| `WORKSPACE_MCP_STATELESS_MODE` | No file writes (container-friendly) |
|
| 253 |
+
| `EXTERNAL_OAUTH21_PROVIDER` | External OAuth flow with bearer tokens |
|
| 254 |
+
| `WORKSPACE_MCP_BASE_URI` | Server base URL (default: `http://localhost`) |
|
| 255 |
+
| `WORKSPACE_MCP_PORT` | Server port (default: `8000`) |
|
| 256 |
+
| `WORKSPACE_EXTERNAL_URL` | External URL for reverse proxy setups |
|
| 257 |
+
| `GOOGLE_MCP_CREDENTIALS_DIR` | Custom credentials storage path |
|
| 258 |
+
|
| 259 |
+
---
|
| 260 |
+
|
| 261 |
+
## 🔐 OAuth Setup
|
| 262 |
+
|
| 263 |
+
### 1. Create Google Cloud Project
|
| 264 |
+
|
| 265 |
+
1. Go to [Google Cloud Console](https://console.cloud.google.com/)
|
| 266 |
+
2. Create a new project
|
| 267 |
+
3. Navigate to **APIs & Services → Credentials**
|
| 268 |
+
4. Click **Create Credentials → OAuth Client ID**
|
| 269 |
+
5. Select **Desktop Application**
|
| 270 |
+
6. Download credentials
|
| 271 |
+
|
| 272 |
+
### 2. Enable APIs
|
| 273 |
+
|
| 274 |
+
Click to enable each API:
|
| 275 |
+
|
| 276 |
+
- [Calendar](https://console.cloud.google.com/flows/enableapi?apiid=calendar-json.googleapis.com)
|
| 277 |
+
- [Drive](https://console.cloud.google.com/flows/enableapi?apiid=drive.googleapis.com)
|
| 278 |
+
- [Gmail](https://console.cloud.google.com/flows/enableapi?apiid=gmail.googleapis.com)
|
| 279 |
+
- [Docs](https://console.cloud.google.com/flows/enableapi?apiid=docs.googleapis.com)
|
| 280 |
+
- [Sheets](https://console.cloud.google.com/flows/enableapi?apiid=sheets.googleapis.com)
|
| 281 |
+
- [Slides](https://console.cloud.google.com/flows/enableapi?apiid=slides.googleapis.com)
|
| 282 |
+
- [Forms](https://console.cloud.google.com/flows/enableapi?apiid=forms.googleapis.com)
|
| 283 |
+
- [Tasks](https://console.cloud.google.com/flows/enableapi?apiid=tasks.googleapis.com)
|
| 284 |
+
- [Chat](https://console.cloud.google.com/flows/enableapi?apiid=chat.googleapis.com)
|
| 285 |
+
- [Custom Search](https://console.cloud.google.com/flows/enableapi?apiid=customsearch.googleapis.com)
|
| 286 |
+
|
| 287 |
+
### 3. First Authentication
|
| 288 |
+
|
| 289 |
+
When you first call a tool:
|
| 290 |
+
1. Server returns an authorization URL
|
| 291 |
+
2. Open URL in browser, authorize access
|
| 292 |
+
3. Paste the authorization code when prompted
|
| 293 |
+
4. Credentials are cached for future use
|
| 294 |
+
|
| 295 |
+
---
|
| 296 |
+
|
| 297 |
+
## 🚀 Transport Modes
|
| 298 |
+
|
| 299 |
+
### Stdio (Default)
|
| 300 |
+
|
| 301 |
+
Best for Claude Desktop and local MCP clients:
|
| 302 |
+
|
| 303 |
+
```bash
|
| 304 |
+
uvx workspace-mcp
|
| 305 |
+
```
|
| 306 |
+
|
| 307 |
+
### HTTP (Streamable)
|
| 308 |
+
|
| 309 |
+
For web interfaces, debugging, or multi-client setups:
|
| 310 |
+
|
| 311 |
+
```bash
|
| 312 |
+
uvx workspace-mcp --transport streamable-http
|
| 313 |
+
```
|
| 314 |
+
|
| 315 |
+
Access at `http://localhost:8000/mcp/`
|
| 316 |
+
|
| 317 |
+
### Docker
|
| 318 |
+
|
| 319 |
+
```bash
|
| 320 |
+
docker build -t workspace-mcp .
|
| 321 |
+
docker run -p 8000:8000 \
|
| 322 |
+
-e GOOGLE_OAUTH_CLIENT_ID="..." \
|
| 323 |
+
-e GOOGLE_OAUTH_CLIENT_SECRET="..." \
|
| 324 |
+
workspace-mcp --transport streamable-http
|
| 325 |
+
```
|
| 326 |
+
|
| 327 |
+
---
|
| 328 |
+
|
| 329 |
+
## 🔧 Client Configuration
|
| 330 |
+
|
| 331 |
+
### Claude Desktop
|
| 332 |
+
|
| 333 |
+
```json
|
| 334 |
+
{
|
| 335 |
+
"mcpServers": {
|
| 336 |
+
"google_workspace": {
|
| 337 |
+
"command": "uvx",
|
| 338 |
+
"args": ["workspace-mcp", "--tool-tier", "core"],
|
| 339 |
+
"env": {
|
| 340 |
+
"GOOGLE_OAUTH_CLIENT_ID": "your-client-id",
|
| 341 |
+
"GOOGLE_OAUTH_CLIENT_SECRET": "your-secret",
|
| 342 |
+
"OAUTHLIB_INSECURE_TRANSPORT": "1"
|
| 343 |
+
}
|
| 344 |
+
}
|
| 345 |
+
}
|
| 346 |
+
}
|
| 347 |
+
```
|
| 348 |
+
|
| 349 |
+
### LM Studio
|
| 350 |
+
|
| 351 |
+
```json
|
| 352 |
+
{
|
| 353 |
+
"mcpServers": {
|
| 354 |
+
"google_workspace": {
|
| 355 |
+
"command": "uvx",
|
| 356 |
+
"args": ["workspace-mcp"],
|
| 357 |
+
"env": {
|
| 358 |
+
"GOOGLE_OAUTH_CLIENT_ID": "your-client-id",
|
| 359 |
+
"GOOGLE_OAUTH_CLIENT_SECRET": "your-secret",
|
| 360 |
+
"OAUTHLIB_INSECURE_TRANSPORT": "1",
|
| 361 |
+
"USER_GOOGLE_EMAIL": "you@example.com"
|
| 362 |
+
}
|
| 363 |
+
}
|
| 364 |
+
}
|
| 365 |
+
}
|
| 366 |
+
```
|
| 367 |
+
|
| 368 |
+
### VS Code
|
| 369 |
+
|
| 370 |
+
```json
|
| 371 |
+
{
|
| 372 |
+
"servers": {
|
| 373 |
+
"google-workspace": {
|
| 374 |
+
"url": "http://localhost:8000/mcp/",
|
| 375 |
+
"type": "http"
|
| 376 |
+
}
|
| 377 |
+
}
|
| 378 |
+
}
|
| 379 |
+
```
|
| 380 |
+
|
| 381 |
+
### Claude Code
|
| 382 |
+
|
| 383 |
+
```bash
|
| 384 |
+
claude mcp add --transport http workspace-mcp http://localhost:8000/mcp
|
| 385 |
+
```
|
| 386 |
+
|
| 387 |
+
---
|
| 388 |
+
|
| 389 |
+
## 🏗 Architecture
|
| 390 |
+
|
| 391 |
+
```
|
| 392 |
+
google_workspace_mcp/
|
| 393 |
+
├── auth/ # OAuth 2.0/2.1, credential storage, decorators
|
| 394 |
+
├── core/ # MCP server, tool registry, utilities
|
| 395 |
+
├── gcalendar/ # Calendar tools
|
| 396 |
+
├── gchat/ # Chat tools
|
| 397 |
+
├── gdocs/ # Docs tools + managers (tables, headers, batch)
|
| 398 |
+
├── gdrive/ # Drive tools + helpers
|
| 399 |
+
├── gforms/ # Forms tools
|
| 400 |
+
├── gmail/ # Gmail tools
|
| 401 |
+
├── gsearch/ # Custom Search tools
|
| 402 |
+
├── gsheets/ # Sheets tools + helpers
|
| 403 |
+
���── gslides/ # Slides tools
|
| 404 |
+
├── gtasks/ # Tasks tools
|
| 405 |
+
└── main.py # Entry point
|
| 406 |
+
```
|
| 407 |
+
|
| 408 |
+
### Key Patterns
|
| 409 |
+
|
| 410 |
+
**Service Decorator:** All tools use `@require_google_service()` for automatic authentication with 30-minute service caching.
|
| 411 |
+
|
| 412 |
+
```python
|
| 413 |
+
@server.tool()
|
| 414 |
+
@require_google_service("gmail", "gmail_read")
|
| 415 |
+
async def search_gmail_messages(service, user_google_email: str, query: str):
|
| 416 |
+
# service is injected automatically
|
| 417 |
+
...
|
| 418 |
+
```
|
| 419 |
+
|
| 420 |
+
**Multi-Service Tools:** Some tools need multiple APIs:
|
| 421 |
+
|
| 422 |
+
```python
|
| 423 |
+
@require_multiple_services([
|
| 424 |
+
{"service_type": "drive", "scopes": "drive_read", "param_name": "drive_service"},
|
| 425 |
+
{"service_type": "docs", "scopes": "docs_read", "param_name": "docs_service"},
|
| 426 |
+
])
|
| 427 |
+
async def get_doc_content(drive_service, docs_service, ...):
|
| 428 |
+
...
|
| 429 |
+
```
|
| 430 |
+
|
| 431 |
+
---
|
| 432 |
+
|
| 433 |
+
## 🧪 Development
|
| 434 |
+
|
| 435 |
+
```bash
|
| 436 |
+
git clone https://github.com/taylorwilsdon/google_workspace_mcp.git
|
| 437 |
+
cd google_workspace_mcp
|
| 438 |
+
|
| 439 |
+
# Install with dev dependencies
|
| 440 |
+
uv sync --group dev
|
| 441 |
+
|
| 442 |
+
# Run locally
|
| 443 |
+
uv run main.py
|
| 444 |
+
|
| 445 |
+
# Run tests
|
| 446 |
+
uv run pytest
|
| 447 |
+
|
| 448 |
+
# Lint
|
| 449 |
+
uv run ruff check .
|
| 450 |
+
```
|
| 451 |
+
|
| 452 |
+
---
|
| 453 |
+
|
| 454 |
+
## 📄 License
|
| 455 |
+
|
| 456 |
+
MIT License - see [LICENSE](LICENSE) for details.
|
| 457 |
+
|
| 458 |
+
---
|
| 459 |
+
|
| 460 |
+
<div align="center">
|
| 461 |
+
|
| 462 |
+
**[Documentation](https://workspacemcp.com)** • **[Issues](https://github.com/taylorwilsdon/google_workspace_mcp/issues)** • **[PyPI](https://pypi.org/project/workspace-mcp/)**
|
| 463 |
+
|
| 464 |
+
</div>
|
SECURITY.md
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Security Policy
|
| 2 |
+
|
| 3 |
+
## Reporting Security Issues
|
| 4 |
+
|
| 5 |
+
**Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
|
| 6 |
+
|
| 7 |
+
Instead, please email us at **taylor@workspacemcp.com**
|
| 8 |
+
|
| 9 |
+
Please include as much of the following information as you can to help us better understand and resolve the issue:
|
| 10 |
+
|
| 11 |
+
- The type of issue (e.g., authentication bypass, credential exposure, command injection, etc.)
|
| 12 |
+
- Full paths of source file(s) related to the manifestation of the issue
|
| 13 |
+
- The location of the affected source code (tag/branch/commit or direct URL)
|
| 14 |
+
- Any special configuration required to reproduce the issue
|
| 15 |
+
- Step-by-step instructions to reproduce the issue
|
| 16 |
+
- Proof-of-concept or exploit code (if possible)
|
| 17 |
+
- Impact of the issue, including how an attacker might exploit the issue
|
| 18 |
+
|
| 19 |
+
This information will help us triage your report more quickly.
|
| 20 |
+
|
| 21 |
+
## Supported Versions
|
| 22 |
+
|
| 23 |
+
We release patches for security vulnerabilities. Which versions are eligible for receiving such patches depends on the CVSS v3.0 Rating:
|
| 24 |
+
|
| 25 |
+
| Version | Supported |
|
| 26 |
+
| ------- | ------------------ |
|
| 27 |
+
| 1.4.x | :white_check_mark: |
|
| 28 |
+
| < 1.4 | :x: |
|
| 29 |
+
|
| 30 |
+
## Security Considerations
|
| 31 |
+
|
| 32 |
+
When using this MCP server, please ensure:
|
| 33 |
+
|
| 34 |
+
1. Store Google OAuth credentials securely
|
| 35 |
+
2. Never commit credentials to version control
|
| 36 |
+
3. Use environment variables for sensitive configuration
|
| 37 |
+
4. Regularly rotate OAuth refresh tokens
|
| 38 |
+
5. Limit OAuth scopes to only what's necessary
|
| 39 |
+
|
| 40 |
+
For more information on securing your use of the project, see https://workspacemcp.com/privacy
|
| 41 |
+
|
| 42 |
+
## Preferred Languages
|
| 43 |
+
|
| 44 |
+
We prefer all communications to be in English.
|
| 45 |
+
|
| 46 |
+
## Policy
|
| 47 |
+
|
| 48 |
+
We follow the principle of responsible disclosure. We will make every effort to address security issues in a timely manner and will coordinate with reporters to understand and resolve issues before public disclosure.
|
auth/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Make the auth directory a Python package
|
auth/auth_info_middleware.py
ADDED
|
@@ -0,0 +1,425 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication middleware to populate context state with user information
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import jwt
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
import time
|
| 9 |
+
from types import SimpleNamespace
|
| 10 |
+
from fastmcp.server.middleware import Middleware, MiddlewareContext
|
| 11 |
+
from fastmcp.server.dependencies import get_http_headers
|
| 12 |
+
|
| 13 |
+
from auth.oauth21_session_store import ensure_session_from_access_token
|
| 14 |
+
|
| 15 |
+
# Configure logging
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class AuthInfoMiddleware(Middleware):
|
| 20 |
+
"""
|
| 21 |
+
Middleware to extract authentication information from JWT tokens
|
| 22 |
+
and populate the FastMCP context state for use in tools and prompts.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self):
|
| 26 |
+
super().__init__()
|
| 27 |
+
self.auth_provider_type = "GoogleProvider"
|
| 28 |
+
|
| 29 |
+
async def _process_request_for_auth(self, context: MiddlewareContext):
|
| 30 |
+
"""Helper to extract, verify, and store auth info from a request."""
|
| 31 |
+
if not context.fastmcp_context:
|
| 32 |
+
logger.warning("No fastmcp_context available")
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
# Return early if authentication state is already set
|
| 36 |
+
if context.fastmcp_context.get_state("authenticated_user_email"):
|
| 37 |
+
logger.info("Authentication state already set.")
|
| 38 |
+
return
|
| 39 |
+
|
| 40 |
+
# Try to get the HTTP request to extract Authorization header
|
| 41 |
+
try:
|
| 42 |
+
# Use the new FastMCP method to get HTTP headers
|
| 43 |
+
headers = get_http_headers()
|
| 44 |
+
if headers:
|
| 45 |
+
logger.debug("Processing HTTP headers for authentication")
|
| 46 |
+
|
| 47 |
+
# Get the Authorization header
|
| 48 |
+
auth_header = headers.get("authorization", "")
|
| 49 |
+
if auth_header.startswith("Bearer "):
|
| 50 |
+
token_str = auth_header[7:] # Remove "Bearer " prefix
|
| 51 |
+
logger.debug("Found Bearer token")
|
| 52 |
+
|
| 53 |
+
# For Google OAuth tokens (ya29.*), we need to verify them differently
|
| 54 |
+
if token_str.startswith("ya29."):
|
| 55 |
+
logger.debug("Detected Google OAuth access token format")
|
| 56 |
+
|
| 57 |
+
# Verify the token to get user info
|
| 58 |
+
from core.server import get_auth_provider
|
| 59 |
+
|
| 60 |
+
auth_provider = get_auth_provider()
|
| 61 |
+
|
| 62 |
+
if auth_provider:
|
| 63 |
+
try:
|
| 64 |
+
# Verify the token
|
| 65 |
+
verified_auth = await auth_provider.verify_token(
|
| 66 |
+
token_str
|
| 67 |
+
)
|
| 68 |
+
if verified_auth:
|
| 69 |
+
# Extract user info from verified token
|
| 70 |
+
user_email = None
|
| 71 |
+
if hasattr(verified_auth, "claims"):
|
| 72 |
+
user_email = verified_auth.claims.get("email")
|
| 73 |
+
|
| 74 |
+
# Get expires_at, defaulting to 1 hour from now if not available
|
| 75 |
+
if hasattr(verified_auth, "expires_at"):
|
| 76 |
+
expires_at = verified_auth.expires_at
|
| 77 |
+
else:
|
| 78 |
+
expires_at = (
|
| 79 |
+
int(time.time()) + 3600
|
| 80 |
+
) # Default to 1 hour
|
| 81 |
+
|
| 82 |
+
# Get client_id from verified auth or use default
|
| 83 |
+
client_id = (
|
| 84 |
+
getattr(verified_auth, "client_id", None)
|
| 85 |
+
or "google"
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
access_token = SimpleNamespace(
|
| 89 |
+
token=token_str,
|
| 90 |
+
client_id=client_id,
|
| 91 |
+
scopes=verified_auth.scopes
|
| 92 |
+
if hasattr(verified_auth, "scopes")
|
| 93 |
+
else [],
|
| 94 |
+
session_id=f"google_oauth_{token_str[:8]}",
|
| 95 |
+
expires_at=expires_at,
|
| 96 |
+
# Add other fields that might be needed
|
| 97 |
+
sub=verified_auth.sub
|
| 98 |
+
if hasattr(verified_auth, "sub")
|
| 99 |
+
else user_email,
|
| 100 |
+
email=user_email,
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
# Store in context state - this is the authoritative authentication state
|
| 104 |
+
context.fastmcp_context.set_state(
|
| 105 |
+
"access_token", access_token
|
| 106 |
+
)
|
| 107 |
+
mcp_session_id = getattr(
|
| 108 |
+
context.fastmcp_context, "session_id", None
|
| 109 |
+
)
|
| 110 |
+
ensure_session_from_access_token(
|
| 111 |
+
verified_auth,
|
| 112 |
+
user_email,
|
| 113 |
+
mcp_session_id,
|
| 114 |
+
)
|
| 115 |
+
context.fastmcp_context.set_state(
|
| 116 |
+
"access_token_obj", verified_auth
|
| 117 |
+
)
|
| 118 |
+
context.fastmcp_context.set_state(
|
| 119 |
+
"auth_provider_type", self.auth_provider_type
|
| 120 |
+
)
|
| 121 |
+
context.fastmcp_context.set_state(
|
| 122 |
+
"token_type", "google_oauth"
|
| 123 |
+
)
|
| 124 |
+
context.fastmcp_context.set_state(
|
| 125 |
+
"user_email", user_email
|
| 126 |
+
)
|
| 127 |
+
context.fastmcp_context.set_state(
|
| 128 |
+
"username", user_email
|
| 129 |
+
)
|
| 130 |
+
# Set the definitive authentication state
|
| 131 |
+
context.fastmcp_context.set_state(
|
| 132 |
+
"authenticated_user_email", user_email
|
| 133 |
+
)
|
| 134 |
+
context.fastmcp_context.set_state(
|
| 135 |
+
"authenticated_via", "bearer_token"
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
logger.info(
|
| 139 |
+
f"Authenticated via Google OAuth: {user_email}"
|
| 140 |
+
)
|
| 141 |
+
else:
|
| 142 |
+
logger.error("Failed to verify Google OAuth token")
|
| 143 |
+
# Don't set authenticated_user_email if verification failed
|
| 144 |
+
except Exception as e:
|
| 145 |
+
logger.error(f"Error verifying Google OAuth token: {e}")
|
| 146 |
+
# Still store the unverified token - service decorator will handle verification
|
| 147 |
+
access_token = SimpleNamespace(
|
| 148 |
+
token=token_str,
|
| 149 |
+
client_id=os.getenv(
|
| 150 |
+
"GOOGLE_OAUTH_CLIENT_ID", "google"
|
| 151 |
+
),
|
| 152 |
+
scopes=[],
|
| 153 |
+
session_id=f"google_oauth_{token_str[:8]}",
|
| 154 |
+
expires_at=int(time.time())
|
| 155 |
+
+ 3600, # Default to 1 hour
|
| 156 |
+
sub="unknown",
|
| 157 |
+
email="",
|
| 158 |
+
)
|
| 159 |
+
context.fastmcp_context.set_state(
|
| 160 |
+
"access_token", access_token
|
| 161 |
+
)
|
| 162 |
+
context.fastmcp_context.set_state(
|
| 163 |
+
"auth_provider_type", self.auth_provider_type
|
| 164 |
+
)
|
| 165 |
+
context.fastmcp_context.set_state(
|
| 166 |
+
"token_type", "google_oauth"
|
| 167 |
+
)
|
| 168 |
+
else:
|
| 169 |
+
logger.warning(
|
| 170 |
+
"No auth provider available to verify Google token"
|
| 171 |
+
)
|
| 172 |
+
# Store unverified token
|
| 173 |
+
access_token = SimpleNamespace(
|
| 174 |
+
token=token_str,
|
| 175 |
+
client_id=os.getenv("GOOGLE_OAUTH_CLIENT_ID", "google"),
|
| 176 |
+
scopes=[],
|
| 177 |
+
session_id=f"google_oauth_{token_str[:8]}",
|
| 178 |
+
expires_at=int(time.time()) + 3600, # Default to 1 hour
|
| 179 |
+
sub="unknown",
|
| 180 |
+
email="",
|
| 181 |
+
)
|
| 182 |
+
context.fastmcp_context.set_state(
|
| 183 |
+
"access_token", access_token
|
| 184 |
+
)
|
| 185 |
+
context.fastmcp_context.set_state(
|
| 186 |
+
"auth_provider_type", self.auth_provider_type
|
| 187 |
+
)
|
| 188 |
+
context.fastmcp_context.set_state(
|
| 189 |
+
"token_type", "google_oauth"
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
else:
|
| 193 |
+
# Decode JWT to get user info
|
| 194 |
+
try:
|
| 195 |
+
token_payload = jwt.decode(
|
| 196 |
+
token_str, options={"verify_signature": False}
|
| 197 |
+
)
|
| 198 |
+
logger.debug(
|
| 199 |
+
f"JWT payload decoded: {list(token_payload.keys())}"
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
# Create an AccessToken-like object
|
| 203 |
+
access_token = SimpleNamespace(
|
| 204 |
+
token=token_str,
|
| 205 |
+
client_id=token_payload.get("client_id", "unknown"),
|
| 206 |
+
scopes=token_payload.get("scope", "").split()
|
| 207 |
+
if token_payload.get("scope")
|
| 208 |
+
else [],
|
| 209 |
+
session_id=token_payload.get(
|
| 210 |
+
"sid",
|
| 211 |
+
token_payload.get(
|
| 212 |
+
"jti",
|
| 213 |
+
token_payload.get("session_id", "unknown"),
|
| 214 |
+
),
|
| 215 |
+
),
|
| 216 |
+
expires_at=token_payload.get("exp", 0),
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
# Store in context state
|
| 220 |
+
context.fastmcp_context.set_state(
|
| 221 |
+
"access_token", access_token
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
# Store additional user info
|
| 225 |
+
context.fastmcp_context.set_state(
|
| 226 |
+
"user_id", token_payload.get("sub")
|
| 227 |
+
)
|
| 228 |
+
context.fastmcp_context.set_state(
|
| 229 |
+
"username",
|
| 230 |
+
token_payload.get(
|
| 231 |
+
"username", token_payload.get("email")
|
| 232 |
+
),
|
| 233 |
+
)
|
| 234 |
+
context.fastmcp_context.set_state(
|
| 235 |
+
"name", token_payload.get("name")
|
| 236 |
+
)
|
| 237 |
+
context.fastmcp_context.set_state(
|
| 238 |
+
"auth_time", token_payload.get("auth_time")
|
| 239 |
+
)
|
| 240 |
+
context.fastmcp_context.set_state(
|
| 241 |
+
"issuer", token_payload.get("iss")
|
| 242 |
+
)
|
| 243 |
+
context.fastmcp_context.set_state(
|
| 244 |
+
"audience", token_payload.get("aud")
|
| 245 |
+
)
|
| 246 |
+
context.fastmcp_context.set_state(
|
| 247 |
+
"jti", token_payload.get("jti")
|
| 248 |
+
)
|
| 249 |
+
context.fastmcp_context.set_state(
|
| 250 |
+
"auth_provider_type", self.auth_provider_type
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
# Set the definitive authentication state for JWT tokens
|
| 254 |
+
user_email = token_payload.get(
|
| 255 |
+
"email", token_payload.get("username")
|
| 256 |
+
)
|
| 257 |
+
if user_email:
|
| 258 |
+
context.fastmcp_context.set_state(
|
| 259 |
+
"authenticated_user_email", user_email
|
| 260 |
+
)
|
| 261 |
+
context.fastmcp_context.set_state(
|
| 262 |
+
"authenticated_via", "jwt_token"
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
logger.debug("JWT token processed successfully")
|
| 266 |
+
|
| 267 |
+
except jwt.DecodeError as e:
|
| 268 |
+
logger.error(f"Failed to decode JWT: {e}")
|
| 269 |
+
except Exception as e:
|
| 270 |
+
logger.error(f"Error processing JWT: {e}")
|
| 271 |
+
else:
|
| 272 |
+
logger.debug("No Bearer token in Authorization header")
|
| 273 |
+
else:
|
| 274 |
+
logger.debug(
|
| 275 |
+
"No HTTP headers available (might be using stdio transport)"
|
| 276 |
+
)
|
| 277 |
+
except Exception as e:
|
| 278 |
+
logger.debug(f"Could not get HTTP request: {e}")
|
| 279 |
+
|
| 280 |
+
# After trying HTTP headers, check for other authentication methods
|
| 281 |
+
# This consolidates all authentication logic in the middleware
|
| 282 |
+
if not context.fastmcp_context.get_state("authenticated_user_email"):
|
| 283 |
+
logger.debug(
|
| 284 |
+
"No authentication found via bearer token, checking other methods"
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
# Check transport mode
|
| 288 |
+
from core.config import get_transport_mode
|
| 289 |
+
|
| 290 |
+
transport_mode = get_transport_mode()
|
| 291 |
+
|
| 292 |
+
if transport_mode == "stdio":
|
| 293 |
+
# In stdio mode, check if there's a session with credentials
|
| 294 |
+
# This is ONLY safe in stdio mode because it's single-user
|
| 295 |
+
logger.debug("Checking for stdio mode authentication")
|
| 296 |
+
|
| 297 |
+
# Get the requested user from the context if available
|
| 298 |
+
requested_user = None
|
| 299 |
+
if hasattr(context, "request") and hasattr(context.request, "params"):
|
| 300 |
+
requested_user = context.request.params.get("user_google_email")
|
| 301 |
+
elif hasattr(context, "arguments"):
|
| 302 |
+
# FastMCP may store arguments differently
|
| 303 |
+
requested_user = context.arguments.get("user_google_email")
|
| 304 |
+
|
| 305 |
+
if requested_user:
|
| 306 |
+
try:
|
| 307 |
+
from auth.oauth21_session_store import get_oauth21_session_store
|
| 308 |
+
|
| 309 |
+
store = get_oauth21_session_store()
|
| 310 |
+
|
| 311 |
+
# Check if user has a recent session
|
| 312 |
+
if store.has_session(requested_user):
|
| 313 |
+
logger.debug(
|
| 314 |
+
f"Using recent stdio session for {requested_user}"
|
| 315 |
+
)
|
| 316 |
+
# In stdio mode, we can trust the user has authenticated recently
|
| 317 |
+
context.fastmcp_context.set_state(
|
| 318 |
+
"authenticated_user_email", requested_user
|
| 319 |
+
)
|
| 320 |
+
context.fastmcp_context.set_state(
|
| 321 |
+
"authenticated_via", "stdio_session"
|
| 322 |
+
)
|
| 323 |
+
context.fastmcp_context.set_state(
|
| 324 |
+
"auth_provider_type", "oauth21_stdio"
|
| 325 |
+
)
|
| 326 |
+
except Exception as e:
|
| 327 |
+
logger.debug(f"Error checking stdio session: {e}")
|
| 328 |
+
|
| 329 |
+
# If no requested user was provided but exactly one session exists, assume it in stdio mode
|
| 330 |
+
if not context.fastmcp_context.get_state("authenticated_user_email"):
|
| 331 |
+
try:
|
| 332 |
+
from auth.oauth21_session_store import get_oauth21_session_store
|
| 333 |
+
|
| 334 |
+
store = get_oauth21_session_store()
|
| 335 |
+
single_user = store.get_single_user_email()
|
| 336 |
+
if single_user:
|
| 337 |
+
logger.debug(
|
| 338 |
+
f"Defaulting to single stdio OAuth session for {single_user}"
|
| 339 |
+
)
|
| 340 |
+
context.fastmcp_context.set_state(
|
| 341 |
+
"authenticated_user_email", single_user
|
| 342 |
+
)
|
| 343 |
+
context.fastmcp_context.set_state(
|
| 344 |
+
"authenticated_via", "stdio_single_session"
|
| 345 |
+
)
|
| 346 |
+
context.fastmcp_context.set_state(
|
| 347 |
+
"auth_provider_type", "oauth21_stdio"
|
| 348 |
+
)
|
| 349 |
+
context.fastmcp_context.set_state("user_email", single_user)
|
| 350 |
+
context.fastmcp_context.set_state("username", single_user)
|
| 351 |
+
except Exception as e:
|
| 352 |
+
logger.debug(
|
| 353 |
+
f"Error determining stdio single-user session: {e}"
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
# Check for MCP session binding
|
| 357 |
+
if not context.fastmcp_context.get_state(
|
| 358 |
+
"authenticated_user_email"
|
| 359 |
+
) and hasattr(context.fastmcp_context, "session_id"):
|
| 360 |
+
mcp_session_id = context.fastmcp_context.session_id
|
| 361 |
+
if mcp_session_id:
|
| 362 |
+
try:
|
| 363 |
+
from auth.oauth21_session_store import get_oauth21_session_store
|
| 364 |
+
|
| 365 |
+
store = get_oauth21_session_store()
|
| 366 |
+
|
| 367 |
+
# Check if this MCP session is bound to a user
|
| 368 |
+
bound_user = store.get_user_by_mcp_session(mcp_session_id)
|
| 369 |
+
if bound_user:
|
| 370 |
+
logger.debug(f"MCP session bound to {bound_user}")
|
| 371 |
+
context.fastmcp_context.set_state(
|
| 372 |
+
"authenticated_user_email", bound_user
|
| 373 |
+
)
|
| 374 |
+
context.fastmcp_context.set_state(
|
| 375 |
+
"authenticated_via", "mcp_session_binding"
|
| 376 |
+
)
|
| 377 |
+
context.fastmcp_context.set_state(
|
| 378 |
+
"auth_provider_type", "oauth21_session"
|
| 379 |
+
)
|
| 380 |
+
except Exception as e:
|
| 381 |
+
logger.debug(f"Error checking MCP session binding: {e}")
|
| 382 |
+
|
| 383 |
+
async def on_call_tool(self, context: MiddlewareContext, call_next):
|
| 384 |
+
"""Extract auth info from token and set in context state"""
|
| 385 |
+
logger.debug("Processing tool call authentication")
|
| 386 |
+
|
| 387 |
+
try:
|
| 388 |
+
await self._process_request_for_auth(context)
|
| 389 |
+
|
| 390 |
+
logger.debug("Passing to next handler")
|
| 391 |
+
result = await call_next(context)
|
| 392 |
+
logger.debug("Handler completed")
|
| 393 |
+
return result
|
| 394 |
+
|
| 395 |
+
except Exception as e:
|
| 396 |
+
# Check if this is an authentication error - don't log traceback for these
|
| 397 |
+
if "GoogleAuthenticationError" in str(
|
| 398 |
+
type(e)
|
| 399 |
+
) or "Access denied: Cannot retrieve credentials" in str(e):
|
| 400 |
+
logger.info(f"Authentication check failed: {e}")
|
| 401 |
+
else:
|
| 402 |
+
logger.error(f"Error in on_call_tool middleware: {e}", exc_info=True)
|
| 403 |
+
raise
|
| 404 |
+
|
| 405 |
+
async def on_get_prompt(self, context: MiddlewareContext, call_next):
|
| 406 |
+
"""Extract auth info for prompt requests too"""
|
| 407 |
+
logger.debug("Processing prompt authentication")
|
| 408 |
+
|
| 409 |
+
try:
|
| 410 |
+
await self._process_request_for_auth(context)
|
| 411 |
+
|
| 412 |
+
logger.debug("Passing prompt to next handler")
|
| 413 |
+
result = await call_next(context)
|
| 414 |
+
logger.debug("Prompt handler completed")
|
| 415 |
+
return result
|
| 416 |
+
|
| 417 |
+
except Exception as e:
|
| 418 |
+
# Check if this is an authentication error - don't log traceback for these
|
| 419 |
+
if "GoogleAuthenticationError" in str(
|
| 420 |
+
type(e)
|
| 421 |
+
) or "Access denied: Cannot retrieve credentials" in str(e):
|
| 422 |
+
logger.info(f"Authentication check failed in prompt: {e}")
|
| 423 |
+
else:
|
| 424 |
+
logger.error(f"Error in on_get_prompt middleware: {e}", exc_info=True)
|
| 425 |
+
raise
|
auth/credential_store.py
ADDED
|
@@ -0,0 +1,246 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Credential Store API for Google Workspace MCP
|
| 3 |
+
|
| 4 |
+
This module provides a standardized interface for credential storage and retrieval,
|
| 5 |
+
supporting multiple backends configurable via environment variables.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import json
|
| 10 |
+
import logging
|
| 11 |
+
from abc import ABC, abstractmethod
|
| 12 |
+
from typing import Optional, List
|
| 13 |
+
from datetime import datetime
|
| 14 |
+
from google.oauth2.credentials import Credentials
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class CredentialStore(ABC):
|
| 20 |
+
"""Abstract base class for credential storage."""
|
| 21 |
+
|
| 22 |
+
@abstractmethod
|
| 23 |
+
def get_credential(self, user_email: str) -> Optional[Credentials]:
|
| 24 |
+
"""
|
| 25 |
+
Get credentials for a user by email.
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
user_email: User's email address
|
| 29 |
+
|
| 30 |
+
Returns:
|
| 31 |
+
Google Credentials object or None if not found
|
| 32 |
+
"""
|
| 33 |
+
pass
|
| 34 |
+
|
| 35 |
+
@abstractmethod
|
| 36 |
+
def store_credential(self, user_email: str, credentials: Credentials) -> bool:
|
| 37 |
+
"""
|
| 38 |
+
Store credentials for a user.
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
user_email: User's email address
|
| 42 |
+
credentials: Google Credentials object to store
|
| 43 |
+
|
| 44 |
+
Returns:
|
| 45 |
+
True if successfully stored, False otherwise
|
| 46 |
+
"""
|
| 47 |
+
pass
|
| 48 |
+
|
| 49 |
+
@abstractmethod
|
| 50 |
+
def delete_credential(self, user_email: str) -> bool:
|
| 51 |
+
"""
|
| 52 |
+
Delete credentials for a user.
|
| 53 |
+
|
| 54 |
+
Args:
|
| 55 |
+
user_email: User's email address
|
| 56 |
+
|
| 57 |
+
Returns:
|
| 58 |
+
True if successfully deleted, False otherwise
|
| 59 |
+
"""
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
@abstractmethod
|
| 63 |
+
def list_users(self) -> List[str]:
|
| 64 |
+
"""
|
| 65 |
+
List all users with stored credentials.
|
| 66 |
+
|
| 67 |
+
Returns:
|
| 68 |
+
List of user email addresses
|
| 69 |
+
"""
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class LocalDirectoryCredentialStore(CredentialStore):
|
| 74 |
+
"""Credential store that uses local JSON files for storage."""
|
| 75 |
+
|
| 76 |
+
def __init__(self, base_dir: Optional[str] = None):
|
| 77 |
+
"""
|
| 78 |
+
Initialize the local JSON credential store.
|
| 79 |
+
|
| 80 |
+
Args:
|
| 81 |
+
base_dir: Base directory for credential files. If None, uses the directory
|
| 82 |
+
configured by the GOOGLE_MCP_CREDENTIALS_DIR environment variable,
|
| 83 |
+
or defaults to ~/.google_workspace_mcp/credentials if the environment
|
| 84 |
+
variable is not set.
|
| 85 |
+
"""
|
| 86 |
+
if base_dir is None:
|
| 87 |
+
if os.getenv("GOOGLE_MCP_CREDENTIALS_DIR"):
|
| 88 |
+
base_dir = os.getenv("GOOGLE_MCP_CREDENTIALS_DIR")
|
| 89 |
+
else:
|
| 90 |
+
home_dir = os.path.expanduser("~")
|
| 91 |
+
if home_dir and home_dir != "~":
|
| 92 |
+
base_dir = os.path.join(
|
| 93 |
+
home_dir, ".google_workspace_mcp", "credentials"
|
| 94 |
+
)
|
| 95 |
+
else:
|
| 96 |
+
base_dir = os.path.join(os.getcwd(), ".credentials")
|
| 97 |
+
|
| 98 |
+
self.base_dir = base_dir
|
| 99 |
+
logger.info(f"LocalJsonCredentialStore initialized with base_dir: {base_dir}")
|
| 100 |
+
|
| 101 |
+
def _get_credential_path(self, user_email: str) -> str:
|
| 102 |
+
"""Get the file path for a user's credentials."""
|
| 103 |
+
if not os.path.exists(self.base_dir):
|
| 104 |
+
os.makedirs(self.base_dir)
|
| 105 |
+
logger.info(f"Created credentials directory: {self.base_dir}")
|
| 106 |
+
return os.path.join(self.base_dir, f"{user_email}.json")
|
| 107 |
+
|
| 108 |
+
def get_credential(self, user_email: str) -> Optional[Credentials]:
|
| 109 |
+
"""Get credentials from local JSON file."""
|
| 110 |
+
creds_path = self._get_credential_path(user_email)
|
| 111 |
+
|
| 112 |
+
if not os.path.exists(creds_path):
|
| 113 |
+
logger.debug(f"No credential file found for {user_email} at {creds_path}")
|
| 114 |
+
return None
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
with open(creds_path, "r") as f:
|
| 118 |
+
creds_data = json.load(f)
|
| 119 |
+
|
| 120 |
+
# Parse expiry if present
|
| 121 |
+
expiry = None
|
| 122 |
+
if creds_data.get("expiry"):
|
| 123 |
+
try:
|
| 124 |
+
expiry = datetime.fromisoformat(creds_data["expiry"])
|
| 125 |
+
# Ensure timezone-naive datetime for Google auth library compatibility
|
| 126 |
+
if expiry.tzinfo is not None:
|
| 127 |
+
expiry = expiry.replace(tzinfo=None)
|
| 128 |
+
except (ValueError, TypeError) as e:
|
| 129 |
+
logger.warning(f"Could not parse expiry time for {user_email}: {e}")
|
| 130 |
+
|
| 131 |
+
credentials = Credentials(
|
| 132 |
+
token=creds_data.get("token"),
|
| 133 |
+
refresh_token=creds_data.get("refresh_token"),
|
| 134 |
+
token_uri=creds_data.get("token_uri"),
|
| 135 |
+
client_id=creds_data.get("client_id"),
|
| 136 |
+
client_secret=creds_data.get("client_secret"),
|
| 137 |
+
scopes=creds_data.get("scopes"),
|
| 138 |
+
expiry=expiry,
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
logger.debug(f"Loaded credentials for {user_email} from {creds_path}")
|
| 142 |
+
return credentials
|
| 143 |
+
|
| 144 |
+
except (IOError, json.JSONDecodeError, KeyError) as e:
|
| 145 |
+
logger.error(
|
| 146 |
+
f"Error loading credentials for {user_email} from {creds_path}: {e}"
|
| 147 |
+
)
|
| 148 |
+
return None
|
| 149 |
+
|
| 150 |
+
def store_credential(self, user_email: str, credentials: Credentials) -> bool:
|
| 151 |
+
"""Store credentials to local JSON file."""
|
| 152 |
+
creds_path = self._get_credential_path(user_email)
|
| 153 |
+
|
| 154 |
+
creds_data = {
|
| 155 |
+
"token": credentials.token,
|
| 156 |
+
"refresh_token": credentials.refresh_token,
|
| 157 |
+
"token_uri": credentials.token_uri,
|
| 158 |
+
"client_id": credentials.client_id,
|
| 159 |
+
"client_secret": credentials.client_secret,
|
| 160 |
+
"scopes": credentials.scopes,
|
| 161 |
+
"expiry": credentials.expiry.isoformat() if credentials.expiry else None,
|
| 162 |
+
}
|
| 163 |
+
|
| 164 |
+
try:
|
| 165 |
+
with open(creds_path, "w") as f:
|
| 166 |
+
json.dump(creds_data, f, indent=2)
|
| 167 |
+
logger.info(f"Stored credentials for {user_email} to {creds_path}")
|
| 168 |
+
return True
|
| 169 |
+
except IOError as e:
|
| 170 |
+
logger.error(
|
| 171 |
+
f"Error storing credentials for {user_email} to {creds_path}: {e}"
|
| 172 |
+
)
|
| 173 |
+
return False
|
| 174 |
+
|
| 175 |
+
def delete_credential(self, user_email: str) -> bool:
|
| 176 |
+
"""Delete credential file for a user."""
|
| 177 |
+
creds_path = self._get_credential_path(user_email)
|
| 178 |
+
|
| 179 |
+
try:
|
| 180 |
+
if os.path.exists(creds_path):
|
| 181 |
+
os.remove(creds_path)
|
| 182 |
+
logger.info(f"Deleted credentials for {user_email} from {creds_path}")
|
| 183 |
+
return True
|
| 184 |
+
else:
|
| 185 |
+
logger.debug(
|
| 186 |
+
f"No credential file to delete for {user_email} at {creds_path}"
|
| 187 |
+
)
|
| 188 |
+
return True # Consider it a success if file doesn't exist
|
| 189 |
+
except IOError as e:
|
| 190 |
+
logger.error(
|
| 191 |
+
f"Error deleting credentials for {user_email} from {creds_path}: {e}"
|
| 192 |
+
)
|
| 193 |
+
return False
|
| 194 |
+
|
| 195 |
+
def list_users(self) -> List[str]:
|
| 196 |
+
"""List all users with credential files."""
|
| 197 |
+
if not os.path.exists(self.base_dir):
|
| 198 |
+
return []
|
| 199 |
+
|
| 200 |
+
users = []
|
| 201 |
+
try:
|
| 202 |
+
for filename in os.listdir(self.base_dir):
|
| 203 |
+
if filename.endswith(".json"):
|
| 204 |
+
user_email = filename[:-5] # Remove .json extension
|
| 205 |
+
users.append(user_email)
|
| 206 |
+
logger.debug(
|
| 207 |
+
f"Found {len(users)} users with credentials in {self.base_dir}"
|
| 208 |
+
)
|
| 209 |
+
except OSError as e:
|
| 210 |
+
logger.error(f"Error listing credential files in {self.base_dir}: {e}")
|
| 211 |
+
|
| 212 |
+
return sorted(users)
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
# Global credential store instance
|
| 216 |
+
_credential_store: Optional[CredentialStore] = None
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def get_credential_store() -> CredentialStore:
|
| 220 |
+
"""
|
| 221 |
+
Get the global credential store instance.
|
| 222 |
+
|
| 223 |
+
Returns:
|
| 224 |
+
Configured credential store instance
|
| 225 |
+
"""
|
| 226 |
+
global _credential_store
|
| 227 |
+
|
| 228 |
+
if _credential_store is None:
|
| 229 |
+
# always use LocalJsonCredentialStore as the default
|
| 230 |
+
# Future enhancement: support other backends via environment variables
|
| 231 |
+
_credential_store = LocalDirectoryCredentialStore()
|
| 232 |
+
logger.info(f"Initialized credential store: {type(_credential_store).__name__}")
|
| 233 |
+
|
| 234 |
+
return _credential_store
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def set_credential_store(store: CredentialStore):
|
| 238 |
+
"""
|
| 239 |
+
Set the global credential store instance.
|
| 240 |
+
|
| 241 |
+
Args:
|
| 242 |
+
store: Credential store instance to use
|
| 243 |
+
"""
|
| 244 |
+
global _credential_store
|
| 245 |
+
_credential_store = store
|
| 246 |
+
logger.info(f"Set credential store: {type(store).__name__}")
|
auth/external_oauth_provider.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
External OAuth Provider for Google Workspace MCP
|
| 3 |
+
|
| 4 |
+
Extends FastMCP's GoogleProvider to support external OAuth flows where
|
| 5 |
+
access tokens (ya29.*) are issued by external systems and need validation.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
import time
|
| 10 |
+
from typing import Optional
|
| 11 |
+
|
| 12 |
+
from fastmcp.server.auth.providers.google import GoogleProvider
|
| 13 |
+
from fastmcp.server.auth import AccessToken
|
| 14 |
+
from google.oauth2.credentials import Credentials
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class ExternalOAuthProvider(GoogleProvider):
|
| 20 |
+
"""
|
| 21 |
+
Extended GoogleProvider that supports validating external Google OAuth access tokens.
|
| 22 |
+
|
| 23 |
+
This provider handles ya29.* access tokens by calling Google's userinfo API,
|
| 24 |
+
while maintaining compatibility with standard JWT ID tokens.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def __init__(self, client_id: str, client_secret: str, **kwargs):
|
| 28 |
+
"""Initialize and store client credentials for token validation."""
|
| 29 |
+
super().__init__(client_id=client_id, client_secret=client_secret, **kwargs)
|
| 30 |
+
# Store credentials as they're not exposed by parent class
|
| 31 |
+
self._client_id = client_id
|
| 32 |
+
self._client_secret = client_secret
|
| 33 |
+
|
| 34 |
+
async def verify_token(self, token: str) -> Optional[AccessToken]:
|
| 35 |
+
"""
|
| 36 |
+
Verify a token - supports both JWT ID tokens and ya29.* access tokens.
|
| 37 |
+
|
| 38 |
+
For ya29.* access tokens (issued externally), validates by calling
|
| 39 |
+
Google's userinfo API. For JWT tokens, delegates to parent class.
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
token: Token string to verify (JWT or ya29.* access token)
|
| 43 |
+
|
| 44 |
+
Returns:
|
| 45 |
+
AccessToken object if valid, None otherwise
|
| 46 |
+
"""
|
| 47 |
+
# For ya29.* access tokens, validate using Google's userinfo API
|
| 48 |
+
if token.startswith("ya29."):
|
| 49 |
+
logger.debug("Validating external Google OAuth access token")
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
from auth.google_auth import get_user_info
|
| 53 |
+
|
| 54 |
+
# Create minimal Credentials object for userinfo API call
|
| 55 |
+
credentials = Credentials(
|
| 56 |
+
token=token,
|
| 57 |
+
token_uri="https://oauth2.googleapis.com/token",
|
| 58 |
+
client_id=self._client_id,
|
| 59 |
+
client_secret=self._client_secret,
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
# Validate token by calling userinfo API
|
| 63 |
+
user_info = get_user_info(credentials)
|
| 64 |
+
|
| 65 |
+
if user_info and user_info.get("email"):
|
| 66 |
+
# Token is valid - create AccessToken object
|
| 67 |
+
logger.info(
|
| 68 |
+
f"Validated external access token for: {user_info['email']}"
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
# Create a mock AccessToken that the middleware expects
|
| 72 |
+
# This matches the structure that FastMCP's AccessToken would have
|
| 73 |
+
from types import SimpleNamespace
|
| 74 |
+
|
| 75 |
+
scope_list = list(getattr(self, "required_scopes", []) or [])
|
| 76 |
+
access_token = SimpleNamespace(
|
| 77 |
+
token=token,
|
| 78 |
+
scopes=scope_list,
|
| 79 |
+
expires_at=int(time.time())
|
| 80 |
+
+ 3600, # Default to 1-hour validity
|
| 81 |
+
claims={
|
| 82 |
+
"email": user_info["email"],
|
| 83 |
+
"sub": user_info.get("id"),
|
| 84 |
+
},
|
| 85 |
+
client_id=self._client_id,
|
| 86 |
+
email=user_info["email"],
|
| 87 |
+
sub=user_info.get("id"),
|
| 88 |
+
)
|
| 89 |
+
return access_token
|
| 90 |
+
else:
|
| 91 |
+
logger.error("Could not get user info from access token")
|
| 92 |
+
return None
|
| 93 |
+
|
| 94 |
+
except Exception as e:
|
| 95 |
+
logger.error(f"Error validating external access token: {e}")
|
| 96 |
+
return None
|
| 97 |
+
|
| 98 |
+
# For JWT tokens, use parent class implementation
|
| 99 |
+
return await super().verify_token(token)
|
auth/google_auth.py
ADDED
|
@@ -0,0 +1,934 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# auth/google_auth.py
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import json
|
| 5 |
+
import jwt
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
|
| 9 |
+
from typing import List, Optional, Tuple, Dict, Any
|
| 10 |
+
from urllib.parse import parse_qs, urlparse
|
| 11 |
+
|
| 12 |
+
from google.oauth2.credentials import Credentials
|
| 13 |
+
from google_auth_oauthlib.flow import Flow
|
| 14 |
+
from google.auth.transport.requests import Request
|
| 15 |
+
from google.auth.exceptions import RefreshError
|
| 16 |
+
from googleapiclient.discovery import build
|
| 17 |
+
from googleapiclient.errors import HttpError
|
| 18 |
+
from auth.scopes import SCOPES, get_current_scopes # noqa
|
| 19 |
+
from auth.oauth21_session_store import get_oauth21_session_store
|
| 20 |
+
from auth.credential_store import get_credential_store
|
| 21 |
+
from auth.oauth_config import get_oauth_config, is_stateless_mode
|
| 22 |
+
from core.config import (
|
| 23 |
+
get_transport_mode,
|
| 24 |
+
get_oauth_redirect_uri,
|
| 25 |
+
)
|
| 26 |
+
from core.context import get_fastmcp_session_id
|
| 27 |
+
|
| 28 |
+
# Try to import FastMCP dependencies (may not be available in all environments)
|
| 29 |
+
try:
|
| 30 |
+
from fastmcp.server.dependencies import get_context as get_fastmcp_context
|
| 31 |
+
except ImportError:
|
| 32 |
+
get_fastmcp_context = None
|
| 33 |
+
|
| 34 |
+
# Configure logging
|
| 35 |
+
logging.basicConfig(level=logging.INFO)
|
| 36 |
+
logger = logging.getLogger(__name__)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
# Constants
|
| 40 |
+
def get_default_credentials_dir():
|
| 41 |
+
"""Get the default credentials directory path, preferring user-specific locations."""
|
| 42 |
+
# Check for explicit environment variable override
|
| 43 |
+
if os.getenv("GOOGLE_MCP_CREDENTIALS_DIR"):
|
| 44 |
+
return os.getenv("GOOGLE_MCP_CREDENTIALS_DIR")
|
| 45 |
+
|
| 46 |
+
# Use user home directory for credentials storage
|
| 47 |
+
home_dir = os.path.expanduser("~")
|
| 48 |
+
if home_dir and home_dir != "~": # Valid home directory found
|
| 49 |
+
return os.path.join(home_dir, ".google_workspace_mcp", "credentials")
|
| 50 |
+
|
| 51 |
+
# Fallback to current working directory if home directory is not accessible
|
| 52 |
+
return os.path.join(os.getcwd(), ".credentials")
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
DEFAULT_CREDENTIALS_DIR = get_default_credentials_dir()
|
| 56 |
+
|
| 57 |
+
# Session credentials now handled by OAuth21SessionStore - no local cache needed
|
| 58 |
+
# Centralized Client Secrets Path Logic
|
| 59 |
+
_client_secrets_env = os.getenv("GOOGLE_CLIENT_SECRET_PATH") or os.getenv(
|
| 60 |
+
"GOOGLE_CLIENT_SECRETS"
|
| 61 |
+
)
|
| 62 |
+
if _client_secrets_env:
|
| 63 |
+
CONFIG_CLIENT_SECRETS_PATH = _client_secrets_env
|
| 64 |
+
else:
|
| 65 |
+
# Assumes this file is in auth/ and client_secret.json is in the root
|
| 66 |
+
CONFIG_CLIENT_SECRETS_PATH = os.path.join(
|
| 67 |
+
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
| 68 |
+
"client_secret.json",
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
# --- Helper Functions ---
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def _find_any_credentials(
|
| 75 |
+
base_dir: str = DEFAULT_CREDENTIALS_DIR,
|
| 76 |
+
) -> Optional[Credentials]:
|
| 77 |
+
"""
|
| 78 |
+
Find and load any valid credentials from the credentials directory.
|
| 79 |
+
Used in single-user mode to bypass session-to-OAuth mapping.
|
| 80 |
+
|
| 81 |
+
Returns:
|
| 82 |
+
First valid Credentials object found, or None if none exist.
|
| 83 |
+
"""
|
| 84 |
+
try:
|
| 85 |
+
store = get_credential_store()
|
| 86 |
+
users = store.list_users()
|
| 87 |
+
if not users:
|
| 88 |
+
logger.info(
|
| 89 |
+
"[single-user] No users found with credentials via credential store"
|
| 90 |
+
)
|
| 91 |
+
return None
|
| 92 |
+
|
| 93 |
+
# Prefer USER_GOOGLE_EMAIL from environment if set
|
| 94 |
+
preferred_user = os.getenv("USER_GOOGLE_EMAIL", "").strip()
|
| 95 |
+
if preferred_user and preferred_user in users:
|
| 96 |
+
credentials = store.get_credential(preferred_user)
|
| 97 |
+
if credentials:
|
| 98 |
+
logger.info(
|
| 99 |
+
f"[single-user] Found credentials for preferred user {preferred_user} via credential store"
|
| 100 |
+
)
|
| 101 |
+
return credentials
|
| 102 |
+
|
| 103 |
+
# Fallback to first user found
|
| 104 |
+
first_user = users[0]
|
| 105 |
+
credentials = store.get_credential(first_user)
|
| 106 |
+
if credentials:
|
| 107 |
+
logger.info(
|
| 108 |
+
f"[single-user] Found credentials for {first_user} via credential store"
|
| 109 |
+
)
|
| 110 |
+
return credentials
|
| 111 |
+
else:
|
| 112 |
+
logger.warning(
|
| 113 |
+
f"[single-user] Could not load credentials for {first_user} via credential store"
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
except Exception as e:
|
| 117 |
+
logger.error(
|
| 118 |
+
f"[single-user] Error finding credentials via credential store: {e}"
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
logger.info("[single-user] No valid credentials found via credential store")
|
| 122 |
+
return None
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def save_credentials_to_session(session_id: str, credentials: Credentials):
|
| 126 |
+
"""Saves user credentials using OAuth21SessionStore."""
|
| 127 |
+
# Get user email from credentials if possible
|
| 128 |
+
user_email = None
|
| 129 |
+
if credentials and credentials.id_token:
|
| 130 |
+
try:
|
| 131 |
+
decoded_token = jwt.decode(
|
| 132 |
+
credentials.id_token, options={"verify_signature": False}
|
| 133 |
+
)
|
| 134 |
+
user_email = decoded_token.get("email")
|
| 135 |
+
except Exception as e:
|
| 136 |
+
logger.debug(f"Could not decode id_token to get email: {e}")
|
| 137 |
+
|
| 138 |
+
if user_email:
|
| 139 |
+
store = get_oauth21_session_store()
|
| 140 |
+
store.store_session(
|
| 141 |
+
user_email=user_email,
|
| 142 |
+
access_token=credentials.token,
|
| 143 |
+
refresh_token=credentials.refresh_token,
|
| 144 |
+
token_uri=credentials.token_uri,
|
| 145 |
+
client_id=credentials.client_id,
|
| 146 |
+
client_secret=credentials.client_secret,
|
| 147 |
+
scopes=credentials.scopes,
|
| 148 |
+
expiry=credentials.expiry,
|
| 149 |
+
mcp_session_id=session_id,
|
| 150 |
+
)
|
| 151 |
+
logger.debug(
|
| 152 |
+
f"Credentials saved to OAuth21SessionStore for session_id: {session_id}, user: {user_email}"
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
logger.warning(
|
| 156 |
+
f"Could not save credentials to session store - no user email found for session: {session_id}"
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def load_credentials_from_session(session_id: str) -> Optional[Credentials]:
|
| 161 |
+
"""Loads user credentials from OAuth21SessionStore."""
|
| 162 |
+
store = get_oauth21_session_store()
|
| 163 |
+
credentials = store.get_credentials_by_mcp_session(session_id)
|
| 164 |
+
if credentials:
|
| 165 |
+
logger.debug(
|
| 166 |
+
f"Credentials loaded from OAuth21SessionStore for session_id: {session_id}"
|
| 167 |
+
)
|
| 168 |
+
else:
|
| 169 |
+
logger.debug(
|
| 170 |
+
f"No credentials found in OAuth21SessionStore for session_id: {session_id}"
|
| 171 |
+
)
|
| 172 |
+
return credentials
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def load_client_secrets_from_env() -> Optional[Dict[str, Any]]:
|
| 176 |
+
"""
|
| 177 |
+
Loads the client secrets from environment variables.
|
| 178 |
+
|
| 179 |
+
Environment variables used:
|
| 180 |
+
- GOOGLE_OAUTH_CLIENT_ID: OAuth 2.0 client ID
|
| 181 |
+
- GOOGLE_OAUTH_CLIENT_SECRET: OAuth 2.0 client secret
|
| 182 |
+
- GOOGLE_OAUTH_REDIRECT_URI: (optional) OAuth redirect URI
|
| 183 |
+
|
| 184 |
+
Returns:
|
| 185 |
+
Client secrets configuration dict compatible with Google OAuth library,
|
| 186 |
+
or None if required environment variables are not set.
|
| 187 |
+
"""
|
| 188 |
+
client_id = os.getenv("GOOGLE_OAUTH_CLIENT_ID")
|
| 189 |
+
client_secret = os.getenv("GOOGLE_OAUTH_CLIENT_SECRET")
|
| 190 |
+
redirect_uri = os.getenv("GOOGLE_OAUTH_REDIRECT_URI")
|
| 191 |
+
|
| 192 |
+
if client_id and client_secret:
|
| 193 |
+
# Create config structure that matches Google client secrets format
|
| 194 |
+
web_config = {
|
| 195 |
+
"client_id": client_id,
|
| 196 |
+
"client_secret": client_secret,
|
| 197 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
| 198 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
| 199 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
| 200 |
+
}
|
| 201 |
+
|
| 202 |
+
# Add redirect_uri if provided via environment variable
|
| 203 |
+
if redirect_uri:
|
| 204 |
+
web_config["redirect_uris"] = [redirect_uri]
|
| 205 |
+
|
| 206 |
+
# Return the full config structure expected by Google OAuth library
|
| 207 |
+
config = {"web": web_config}
|
| 208 |
+
|
| 209 |
+
logger.info("Loaded OAuth client credentials from environment variables")
|
| 210 |
+
return config
|
| 211 |
+
|
| 212 |
+
logger.debug("OAuth client credentials not found in environment variables")
|
| 213 |
+
return None
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
def load_client_secrets(client_secrets_path: str) -> Dict[str, Any]:
|
| 217 |
+
"""
|
| 218 |
+
Loads the client secrets from environment variables (preferred) or from the client secrets file.
|
| 219 |
+
|
| 220 |
+
Priority order:
|
| 221 |
+
1. Environment variables (GOOGLE_OAUTH_CLIENT_ID, GOOGLE_OAUTH_CLIENT_SECRET)
|
| 222 |
+
2. File-based credentials at the specified path
|
| 223 |
+
|
| 224 |
+
Args:
|
| 225 |
+
client_secrets_path: Path to the client secrets JSON file (used as fallback)
|
| 226 |
+
|
| 227 |
+
Returns:
|
| 228 |
+
Client secrets configuration dict
|
| 229 |
+
|
| 230 |
+
Raises:
|
| 231 |
+
ValueError: If client secrets file has invalid format
|
| 232 |
+
IOError: If file cannot be read and no environment variables are set
|
| 233 |
+
"""
|
| 234 |
+
# First, try to load from environment variables
|
| 235 |
+
env_config = load_client_secrets_from_env()
|
| 236 |
+
if env_config:
|
| 237 |
+
# Extract the "web" config from the environment structure
|
| 238 |
+
return env_config["web"]
|
| 239 |
+
|
| 240 |
+
# Fall back to loading from file
|
| 241 |
+
try:
|
| 242 |
+
with open(client_secrets_path, "r") as f:
|
| 243 |
+
client_config = json.load(f)
|
| 244 |
+
# The file usually contains a top-level key like "web" or "installed"
|
| 245 |
+
if "web" in client_config:
|
| 246 |
+
logger.info(
|
| 247 |
+
f"Loaded OAuth client credentials from file: {client_secrets_path}"
|
| 248 |
+
)
|
| 249 |
+
return client_config["web"]
|
| 250 |
+
elif "installed" in client_config:
|
| 251 |
+
logger.info(
|
| 252 |
+
f"Loaded OAuth client credentials from file: {client_secrets_path}"
|
| 253 |
+
)
|
| 254 |
+
return client_config["installed"]
|
| 255 |
+
else:
|
| 256 |
+
logger.error(
|
| 257 |
+
f"Client secrets file {client_secrets_path} has unexpected format."
|
| 258 |
+
)
|
| 259 |
+
raise ValueError("Invalid client secrets file format")
|
| 260 |
+
except (IOError, json.JSONDecodeError) as e:
|
| 261 |
+
logger.error(f"Error loading client secrets file {client_secrets_path}: {e}")
|
| 262 |
+
raise
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
def check_client_secrets() -> Optional[str]:
|
| 266 |
+
"""
|
| 267 |
+
Checks for the presence of OAuth client secrets, either as environment
|
| 268 |
+
variables or as a file.
|
| 269 |
+
|
| 270 |
+
Returns:
|
| 271 |
+
An error message string if secrets are not found, otherwise None.
|
| 272 |
+
"""
|
| 273 |
+
env_config = load_client_secrets_from_env()
|
| 274 |
+
if not env_config and not os.path.exists(CONFIG_CLIENT_SECRETS_PATH):
|
| 275 |
+
logger.error(
|
| 276 |
+
f"OAuth client credentials not found. No environment variables set and no file at {CONFIG_CLIENT_SECRETS_PATH}"
|
| 277 |
+
)
|
| 278 |
+
return f"OAuth client credentials not found. Please set GOOGLE_OAUTH_CLIENT_ID and GOOGLE_OAUTH_CLIENT_SECRET environment variables or provide a client secrets file at {CONFIG_CLIENT_SECRETS_PATH}."
|
| 279 |
+
return None
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
def create_oauth_flow(
|
| 283 |
+
scopes: List[str], redirect_uri: str, state: Optional[str] = None
|
| 284 |
+
) -> Flow:
|
| 285 |
+
"""Creates an OAuth flow using environment variables or client secrets file."""
|
| 286 |
+
# Try environment variables first
|
| 287 |
+
env_config = load_client_secrets_from_env()
|
| 288 |
+
if env_config:
|
| 289 |
+
# Use client config directly
|
| 290 |
+
flow = Flow.from_client_config(
|
| 291 |
+
env_config, scopes=scopes, redirect_uri=redirect_uri, state=state
|
| 292 |
+
)
|
| 293 |
+
logger.debug("Created OAuth flow from environment variables")
|
| 294 |
+
return flow
|
| 295 |
+
|
| 296 |
+
# Fall back to file-based config
|
| 297 |
+
if not os.path.exists(CONFIG_CLIENT_SECRETS_PATH):
|
| 298 |
+
raise FileNotFoundError(
|
| 299 |
+
f"OAuth client secrets file not found at {CONFIG_CLIENT_SECRETS_PATH} and no environment variables set"
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
flow = Flow.from_client_secrets_file(
|
| 303 |
+
CONFIG_CLIENT_SECRETS_PATH,
|
| 304 |
+
scopes=scopes,
|
| 305 |
+
redirect_uri=redirect_uri,
|
| 306 |
+
state=state,
|
| 307 |
+
)
|
| 308 |
+
logger.debug(
|
| 309 |
+
f"Created OAuth flow from client secrets file: {CONFIG_CLIENT_SECRETS_PATH}"
|
| 310 |
+
)
|
| 311 |
+
return flow
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
# --- Core OAuth Logic ---
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
async def start_auth_flow(
|
| 318 |
+
user_google_email: Optional[str],
|
| 319 |
+
service_name: str, # e.g., "Google Calendar", "Gmail" for user messages
|
| 320 |
+
redirect_uri: str, # Added redirect_uri as a required parameter
|
| 321 |
+
) -> str:
|
| 322 |
+
"""
|
| 323 |
+
Initiates the Google OAuth flow and returns an actionable message for the user.
|
| 324 |
+
|
| 325 |
+
Args:
|
| 326 |
+
user_google_email: The user's specified Google email, if provided.
|
| 327 |
+
service_name: The name of the Google service requiring auth (for user messages).
|
| 328 |
+
redirect_uri: The URI Google will redirect to after authorization.
|
| 329 |
+
|
| 330 |
+
Returns:
|
| 331 |
+
A formatted string containing guidance for the LLM/user.
|
| 332 |
+
|
| 333 |
+
Raises:
|
| 334 |
+
Exception: If the OAuth flow cannot be initiated.
|
| 335 |
+
"""
|
| 336 |
+
initial_email_provided = bool(
|
| 337 |
+
user_google_email
|
| 338 |
+
and user_google_email.strip()
|
| 339 |
+
and user_google_email.lower() != "default"
|
| 340 |
+
)
|
| 341 |
+
user_display_name = (
|
| 342 |
+
f"{service_name} for '{user_google_email}'"
|
| 343 |
+
if initial_email_provided
|
| 344 |
+
else service_name
|
| 345 |
+
)
|
| 346 |
+
|
| 347 |
+
logger.info(
|
| 348 |
+
f"[start_auth_flow] Initiating auth for {user_display_name} with scopes for enabled tools."
|
| 349 |
+
)
|
| 350 |
+
|
| 351 |
+
# Note: Caller should ensure OAuth callback is available before calling this function
|
| 352 |
+
|
| 353 |
+
try:
|
| 354 |
+
if "OAUTHLIB_INSECURE_TRANSPORT" not in os.environ and (
|
| 355 |
+
"localhost" in redirect_uri or "127.0.0.1" in redirect_uri
|
| 356 |
+
): # Use passed redirect_uri
|
| 357 |
+
logger.warning(
|
| 358 |
+
"OAUTHLIB_INSECURE_TRANSPORT not set. Setting it for localhost/local development."
|
| 359 |
+
)
|
| 360 |
+
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
|
| 361 |
+
|
| 362 |
+
oauth_state = os.urandom(16).hex()
|
| 363 |
+
|
| 364 |
+
flow = create_oauth_flow(
|
| 365 |
+
scopes=get_current_scopes(), # Use scopes for enabled tools only
|
| 366 |
+
redirect_uri=redirect_uri, # Use passed redirect_uri
|
| 367 |
+
state=oauth_state,
|
| 368 |
+
)
|
| 369 |
+
|
| 370 |
+
auth_url, _ = flow.authorization_url(access_type="offline", prompt="consent")
|
| 371 |
+
|
| 372 |
+
session_id = None
|
| 373 |
+
try:
|
| 374 |
+
session_id = get_fastmcp_session_id()
|
| 375 |
+
except Exception as e:
|
| 376 |
+
logger.debug(
|
| 377 |
+
f"Could not retrieve FastMCP session ID for state binding: {e}"
|
| 378 |
+
)
|
| 379 |
+
|
| 380 |
+
store = get_oauth21_session_store()
|
| 381 |
+
store.store_oauth_state(oauth_state, session_id=session_id)
|
| 382 |
+
|
| 383 |
+
logger.info(
|
| 384 |
+
f"Auth flow started for {user_display_name}. State: {oauth_state[:8]}... Advise user to visit: {auth_url}"
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
+
message_lines = [
|
| 388 |
+
f"**ACTION REQUIRED: Google Authentication Needed for {user_display_name}**\n",
|
| 389 |
+
f"To proceed, the user must authorize this application for {service_name} access using all required permissions.",
|
| 390 |
+
"**LLM, please present this exact authorization URL to the user as a clickable hyperlink:**",
|
| 391 |
+
f"Authorization URL: {auth_url}",
|
| 392 |
+
f"Markdown for hyperlink: [Click here to authorize {service_name} access]({auth_url})\n",
|
| 393 |
+
"**LLM, after presenting the link, instruct the user as follows:**",
|
| 394 |
+
"1. Click the link and complete the authorization in their browser.",
|
| 395 |
+
]
|
| 396 |
+
session_info_for_llm = ""
|
| 397 |
+
|
| 398 |
+
if not initial_email_provided:
|
| 399 |
+
message_lines.extend(
|
| 400 |
+
[
|
| 401 |
+
f"2. After successful authorization{session_info_for_llm}, the browser page will display the authenticated email address.",
|
| 402 |
+
" **LLM: Instruct the user to provide you with this email address.**",
|
| 403 |
+
"3. Once you have the email, **retry their original command, ensuring you include this `user_google_email`.**",
|
| 404 |
+
]
|
| 405 |
+
)
|
| 406 |
+
else:
|
| 407 |
+
message_lines.append(
|
| 408 |
+
f"2. After successful authorization{session_info_for_llm}, **retry their original command**."
|
| 409 |
+
)
|
| 410 |
+
|
| 411 |
+
message_lines.append(
|
| 412 |
+
f"\nThe application will use the new credentials. If '{user_google_email}' was provided, it must match the authenticated account."
|
| 413 |
+
)
|
| 414 |
+
return "\n".join(message_lines)
|
| 415 |
+
|
| 416 |
+
except FileNotFoundError as e:
|
| 417 |
+
error_text = f"OAuth client credentials not found: {e}. Please either:\n1. Set environment variables: GOOGLE_OAUTH_CLIENT_ID and GOOGLE_OAUTH_CLIENT_SECRET\n2. Ensure '{CONFIG_CLIENT_SECRETS_PATH}' file exists"
|
| 418 |
+
logger.error(error_text, exc_info=True)
|
| 419 |
+
raise Exception(error_text)
|
| 420 |
+
except Exception as e:
|
| 421 |
+
error_text = f"Could not initiate authentication for {user_display_name} due to an unexpected error: {str(e)}"
|
| 422 |
+
logger.error(
|
| 423 |
+
f"Failed to start the OAuth flow for {user_display_name}: {e}",
|
| 424 |
+
exc_info=True,
|
| 425 |
+
)
|
| 426 |
+
raise Exception(error_text)
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
def handle_auth_callback(
|
| 430 |
+
scopes: List[str],
|
| 431 |
+
authorization_response: str,
|
| 432 |
+
redirect_uri: str,
|
| 433 |
+
credentials_base_dir: str = DEFAULT_CREDENTIALS_DIR,
|
| 434 |
+
session_id: Optional[str] = None,
|
| 435 |
+
client_secrets_path: Optional[
|
| 436 |
+
str
|
| 437 |
+
] = None, # Deprecated: kept for backward compatibility
|
| 438 |
+
) -> Tuple[str, Credentials]:
|
| 439 |
+
"""
|
| 440 |
+
Handles the callback from Google, exchanges the code for credentials,
|
| 441 |
+
fetches user info, determines user_google_email, saves credentials (file & session),
|
| 442 |
+
and returns them.
|
| 443 |
+
|
| 444 |
+
Args:
|
| 445 |
+
scopes: List of OAuth scopes requested.
|
| 446 |
+
authorization_response: The full callback URL from Google.
|
| 447 |
+
redirect_uri: The redirect URI.
|
| 448 |
+
credentials_base_dir: Base directory for credential files.
|
| 449 |
+
session_id: Optional MCP session ID to associate with the credentials.
|
| 450 |
+
client_secrets_path: (Deprecated) Path to client secrets file. Ignored if environment variables are set.
|
| 451 |
+
|
| 452 |
+
Returns:
|
| 453 |
+
A tuple containing the user_google_email and the obtained Credentials object.
|
| 454 |
+
|
| 455 |
+
Raises:
|
| 456 |
+
ValueError: If the state is missing or doesn't match.
|
| 457 |
+
FlowExchangeError: If the code exchange fails.
|
| 458 |
+
HttpError: If fetching user info fails.
|
| 459 |
+
"""
|
| 460 |
+
try:
|
| 461 |
+
# Log deprecation warning if old parameter is used
|
| 462 |
+
if client_secrets_path:
|
| 463 |
+
logger.warning(
|
| 464 |
+
"The 'client_secrets_path' parameter is deprecated. Use GOOGLE_OAUTH_CLIENT_ID and GOOGLE_OAUTH_CLIENT_SECRET environment variables instead."
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
# Allow HTTP for localhost in development
|
| 468 |
+
if "OAUTHLIB_INSECURE_TRANSPORT" not in os.environ:
|
| 469 |
+
logger.warning(
|
| 470 |
+
"OAUTHLIB_INSECURE_TRANSPORT not set. Setting it for localhost development."
|
| 471 |
+
)
|
| 472 |
+
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
|
| 473 |
+
|
| 474 |
+
store = get_oauth21_session_store()
|
| 475 |
+
parsed_response = urlparse(authorization_response)
|
| 476 |
+
state_values = parse_qs(parsed_response.query).get("state")
|
| 477 |
+
state = state_values[0] if state_values else None
|
| 478 |
+
|
| 479 |
+
state_info = store.validate_and_consume_oauth_state(
|
| 480 |
+
state, session_id=session_id
|
| 481 |
+
)
|
| 482 |
+
logger.debug(
|
| 483 |
+
"Validated OAuth callback state %s for session %s",
|
| 484 |
+
(state[:8] if state else "<missing>"),
|
| 485 |
+
state_info.get("session_id") or "<unknown>",
|
| 486 |
+
)
|
| 487 |
+
|
| 488 |
+
flow = create_oauth_flow(scopes=scopes, redirect_uri=redirect_uri, state=state)
|
| 489 |
+
|
| 490 |
+
# Exchange the authorization code for credentials
|
| 491 |
+
# Note: fetch_token will use the redirect_uri configured in the flow
|
| 492 |
+
flow.fetch_token(authorization_response=authorization_response)
|
| 493 |
+
credentials = flow.credentials
|
| 494 |
+
logger.info("Successfully exchanged authorization code for tokens.")
|
| 495 |
+
|
| 496 |
+
# Get user info to determine user_id (using email here)
|
| 497 |
+
user_info = get_user_info(credentials)
|
| 498 |
+
if not user_info or "email" not in user_info:
|
| 499 |
+
logger.error("Could not retrieve user email from Google.")
|
| 500 |
+
raise ValueError("Failed to get user email for identification.")
|
| 501 |
+
|
| 502 |
+
user_google_email = user_info["email"]
|
| 503 |
+
logger.info(f"Identified user_google_email: {user_google_email}")
|
| 504 |
+
|
| 505 |
+
# Save the credentials
|
| 506 |
+
credential_store = get_credential_store()
|
| 507 |
+
credential_store.store_credential(user_google_email, credentials)
|
| 508 |
+
|
| 509 |
+
# Always save to OAuth21SessionStore for centralized management
|
| 510 |
+
store = get_oauth21_session_store()
|
| 511 |
+
store.store_session(
|
| 512 |
+
user_email=user_google_email,
|
| 513 |
+
access_token=credentials.token,
|
| 514 |
+
refresh_token=credentials.refresh_token,
|
| 515 |
+
token_uri=credentials.token_uri,
|
| 516 |
+
client_id=credentials.client_id,
|
| 517 |
+
client_secret=credentials.client_secret,
|
| 518 |
+
scopes=credentials.scopes,
|
| 519 |
+
expiry=credentials.expiry,
|
| 520 |
+
mcp_session_id=session_id,
|
| 521 |
+
issuer="https://accounts.google.com", # Add issuer for Google tokens
|
| 522 |
+
)
|
| 523 |
+
|
| 524 |
+
# If session_id is provided, also save to session cache for compatibility
|
| 525 |
+
if session_id:
|
| 526 |
+
save_credentials_to_session(session_id, credentials)
|
| 527 |
+
|
| 528 |
+
return user_google_email, credentials
|
| 529 |
+
|
| 530 |
+
except Exception as e: # Catch specific exceptions like FlowExchangeError if needed
|
| 531 |
+
logger.error(f"Error handling auth callback: {e}")
|
| 532 |
+
raise # Re-raise for the caller
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
def get_credentials(
|
| 536 |
+
user_google_email: Optional[str], # Can be None if relying on session_id
|
| 537 |
+
required_scopes: List[str],
|
| 538 |
+
client_secrets_path: Optional[str] = None,
|
| 539 |
+
credentials_base_dir: str = DEFAULT_CREDENTIALS_DIR,
|
| 540 |
+
session_id: Optional[str] = None,
|
| 541 |
+
) -> Optional[Credentials]:
|
| 542 |
+
"""
|
| 543 |
+
Retrieves stored credentials, prioritizing OAuth 2.1 store, then session, then file. Refreshes if necessary.
|
| 544 |
+
If credentials are loaded from file and a session_id is present, they are cached in the session.
|
| 545 |
+
In single-user mode, bypasses session mapping and uses any available credentials.
|
| 546 |
+
|
| 547 |
+
Args:
|
| 548 |
+
user_google_email: Optional user's Google email.
|
| 549 |
+
required_scopes: List of scopes the credentials must have.
|
| 550 |
+
client_secrets_path: Optional path to client secrets (legacy; refresh uses embedded client info).
|
| 551 |
+
credentials_base_dir: Base directory for credential files.
|
| 552 |
+
session_id: Optional MCP session ID.
|
| 553 |
+
|
| 554 |
+
Returns:
|
| 555 |
+
Valid Credentials object or None.
|
| 556 |
+
"""
|
| 557 |
+
# First, try OAuth 2.1 session store if we have a session_id (FastMCP session)
|
| 558 |
+
if session_id:
|
| 559 |
+
try:
|
| 560 |
+
store = get_oauth21_session_store()
|
| 561 |
+
|
| 562 |
+
# Try to get credentials by MCP session
|
| 563 |
+
credentials = store.get_credentials_by_mcp_session(session_id)
|
| 564 |
+
if credentials:
|
| 565 |
+
logger.info(
|
| 566 |
+
f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}"
|
| 567 |
+
)
|
| 568 |
+
|
| 569 |
+
# Check scopes
|
| 570 |
+
if not all(scope in credentials.scopes for scope in required_scopes):
|
| 571 |
+
logger.warning(
|
| 572 |
+
f"[get_credentials] OAuth 2.1 credentials lack required scopes. Need: {required_scopes}, Have: {credentials.scopes}"
|
| 573 |
+
)
|
| 574 |
+
return None
|
| 575 |
+
|
| 576 |
+
# Return if valid
|
| 577 |
+
if credentials.valid:
|
| 578 |
+
return credentials
|
| 579 |
+
elif credentials.expired and credentials.refresh_token:
|
| 580 |
+
# Try to refresh
|
| 581 |
+
try:
|
| 582 |
+
credentials.refresh(Request())
|
| 583 |
+
logger.info(
|
| 584 |
+
f"[get_credentials] Refreshed OAuth 2.1 credentials for session {session_id}"
|
| 585 |
+
)
|
| 586 |
+
# Update stored credentials
|
| 587 |
+
user_email = store.get_user_by_mcp_session(session_id)
|
| 588 |
+
if user_email:
|
| 589 |
+
store.store_session(
|
| 590 |
+
user_email=user_email,
|
| 591 |
+
access_token=credentials.token,
|
| 592 |
+
refresh_token=credentials.refresh_token,
|
| 593 |
+
scopes=credentials.scopes,
|
| 594 |
+
expiry=credentials.expiry,
|
| 595 |
+
mcp_session_id=session_id,
|
| 596 |
+
)
|
| 597 |
+
return credentials
|
| 598 |
+
except Exception as e:
|
| 599 |
+
logger.error(
|
| 600 |
+
f"[get_credentials] Failed to refresh OAuth 2.1 credentials: {e}"
|
| 601 |
+
)
|
| 602 |
+
return None
|
| 603 |
+
except ImportError:
|
| 604 |
+
pass # OAuth 2.1 store not available
|
| 605 |
+
except Exception as e:
|
| 606 |
+
logger.debug(f"[get_credentials] Error checking OAuth 2.1 store: {e}")
|
| 607 |
+
|
| 608 |
+
# Check for single-user mode
|
| 609 |
+
if os.getenv("MCP_SINGLE_USER_MODE") == "1":
|
| 610 |
+
logger.info(
|
| 611 |
+
"[get_credentials] Single-user mode: bypassing session mapping, finding any credentials"
|
| 612 |
+
)
|
| 613 |
+
credentials = _find_any_credentials(credentials_base_dir)
|
| 614 |
+
if not credentials:
|
| 615 |
+
logger.info(
|
| 616 |
+
f"[get_credentials] Single-user mode: No credentials found in {credentials_base_dir}"
|
| 617 |
+
)
|
| 618 |
+
return None
|
| 619 |
+
|
| 620 |
+
# In single-user mode, if user_google_email wasn't provided, try to get it from user info
|
| 621 |
+
# This is needed for proper credential saving after refresh
|
| 622 |
+
if not user_google_email and credentials.valid:
|
| 623 |
+
try:
|
| 624 |
+
user_info = get_user_info(credentials)
|
| 625 |
+
if user_info and "email" in user_info:
|
| 626 |
+
user_google_email = user_info["email"]
|
| 627 |
+
logger.debug(
|
| 628 |
+
f"[get_credentials] Single-user mode: extracted user email {user_google_email} from credentials"
|
| 629 |
+
)
|
| 630 |
+
except Exception as e:
|
| 631 |
+
logger.debug(
|
| 632 |
+
f"[get_credentials] Single-user mode: could not extract user email: {e}"
|
| 633 |
+
)
|
| 634 |
+
else:
|
| 635 |
+
credentials: Optional[Credentials] = None
|
| 636 |
+
|
| 637 |
+
# Session ID should be provided by the caller
|
| 638 |
+
if not session_id:
|
| 639 |
+
logger.debug("[get_credentials] No session_id provided")
|
| 640 |
+
|
| 641 |
+
logger.debug(
|
| 642 |
+
f"[get_credentials] Called for user_google_email: '{user_google_email}', session_id: '{session_id}', required_scopes: {required_scopes}"
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
if session_id:
|
| 646 |
+
credentials = load_credentials_from_session(session_id)
|
| 647 |
+
if credentials:
|
| 648 |
+
logger.debug(
|
| 649 |
+
f"[get_credentials] Loaded credentials from session for session_id '{session_id}'."
|
| 650 |
+
)
|
| 651 |
+
|
| 652 |
+
if not credentials and user_google_email:
|
| 653 |
+
if not is_stateless_mode():
|
| 654 |
+
logger.debug(
|
| 655 |
+
f"[get_credentials] No session credentials, trying credential store for user_google_email '{user_google_email}'."
|
| 656 |
+
)
|
| 657 |
+
store = get_credential_store()
|
| 658 |
+
credentials = store.get_credential(user_google_email)
|
| 659 |
+
else:
|
| 660 |
+
logger.debug(
|
| 661 |
+
f"[get_credentials] No session credentials, skipping file store in stateless mode for user_google_email '{user_google_email}'."
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
+
if credentials and session_id:
|
| 665 |
+
logger.debug(
|
| 666 |
+
f"[get_credentials] Loaded from file for user '{user_google_email}', caching to session '{session_id}'."
|
| 667 |
+
)
|
| 668 |
+
save_credentials_to_session(
|
| 669 |
+
session_id, credentials
|
| 670 |
+
) # Cache for current session
|
| 671 |
+
|
| 672 |
+
if not credentials:
|
| 673 |
+
logger.info(
|
| 674 |
+
f"[get_credentials] No credentials found for user '{user_google_email}' or session '{session_id}'."
|
| 675 |
+
)
|
| 676 |
+
return None
|
| 677 |
+
|
| 678 |
+
logger.debug(
|
| 679 |
+
f"[get_credentials] Credentials found. Scopes: {credentials.scopes}, Valid: {credentials.valid}, Expired: {credentials.expired}"
|
| 680 |
+
)
|
| 681 |
+
|
| 682 |
+
if not all(scope in credentials.scopes for scope in required_scopes):
|
| 683 |
+
logger.warning(
|
| 684 |
+
f"[get_credentials] Credentials lack required scopes. Need: {required_scopes}, Have: {credentials.scopes}. User: '{user_google_email}', Session: '{session_id}'"
|
| 685 |
+
)
|
| 686 |
+
return None # Re-authentication needed for scopes
|
| 687 |
+
|
| 688 |
+
logger.debug(
|
| 689 |
+
f"[get_credentials] Credentials have sufficient scopes. User: '{user_google_email}', Session: '{session_id}'"
|
| 690 |
+
)
|
| 691 |
+
|
| 692 |
+
if credentials.valid:
|
| 693 |
+
logger.debug(
|
| 694 |
+
f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'"
|
| 695 |
+
)
|
| 696 |
+
return credentials
|
| 697 |
+
elif credentials.expired and credentials.refresh_token:
|
| 698 |
+
logger.info(
|
| 699 |
+
f"[get_credentials] Credentials expired. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'"
|
| 700 |
+
)
|
| 701 |
+
try:
|
| 702 |
+
logger.debug(
|
| 703 |
+
"[get_credentials] Refreshing token using embedded client credentials"
|
| 704 |
+
)
|
| 705 |
+
# client_config = load_client_secrets(client_secrets_path) # Not strictly needed if creds have client_id/secret
|
| 706 |
+
credentials.refresh(Request())
|
| 707 |
+
logger.info(
|
| 708 |
+
f"[get_credentials] Credentials refreshed successfully. User: '{user_google_email}', Session: '{session_id}'"
|
| 709 |
+
)
|
| 710 |
+
|
| 711 |
+
# Save refreshed credentials (skip file save in stateless mode)
|
| 712 |
+
if user_google_email: # Always save to credential store if email is known
|
| 713 |
+
if not is_stateless_mode():
|
| 714 |
+
credential_store = get_credential_store()
|
| 715 |
+
credential_store.store_credential(user_google_email, credentials)
|
| 716 |
+
else:
|
| 717 |
+
logger.info(
|
| 718 |
+
f"Skipping credential file save in stateless mode for {user_google_email}"
|
| 719 |
+
)
|
| 720 |
+
|
| 721 |
+
# Also update OAuth21SessionStore
|
| 722 |
+
store = get_oauth21_session_store()
|
| 723 |
+
store.store_session(
|
| 724 |
+
user_email=user_google_email,
|
| 725 |
+
access_token=credentials.token,
|
| 726 |
+
refresh_token=credentials.refresh_token,
|
| 727 |
+
token_uri=credentials.token_uri,
|
| 728 |
+
client_id=credentials.client_id,
|
| 729 |
+
client_secret=credentials.client_secret,
|
| 730 |
+
scopes=credentials.scopes,
|
| 731 |
+
expiry=credentials.expiry,
|
| 732 |
+
mcp_session_id=session_id,
|
| 733 |
+
issuer="https://accounts.google.com", # Add issuer for Google tokens
|
| 734 |
+
)
|
| 735 |
+
|
| 736 |
+
if session_id: # Update session cache if it was the source or is active
|
| 737 |
+
save_credentials_to_session(session_id, credentials)
|
| 738 |
+
return credentials
|
| 739 |
+
except RefreshError as e:
|
| 740 |
+
logger.warning(
|
| 741 |
+
f"[get_credentials] RefreshError - token expired/revoked: {e}. User: '{user_google_email}', Session: '{session_id}'"
|
| 742 |
+
)
|
| 743 |
+
# For RefreshError, we should return None to trigger reauthentication
|
| 744 |
+
return None
|
| 745 |
+
except Exception as e:
|
| 746 |
+
logger.error(
|
| 747 |
+
f"[get_credentials] Error refreshing credentials: {e}. User: '{user_google_email}', Session: '{session_id}'",
|
| 748 |
+
exc_info=True,
|
| 749 |
+
)
|
| 750 |
+
return None # Failed to refresh
|
| 751 |
+
else:
|
| 752 |
+
logger.warning(
|
| 753 |
+
f"[get_credentials] Credentials invalid/cannot refresh. Valid: {credentials.valid}, Refresh Token: {credentials.refresh_token is not None}. User: '{user_google_email}', Session: '{session_id}'"
|
| 754 |
+
)
|
| 755 |
+
return None
|
| 756 |
+
|
| 757 |
+
|
| 758 |
+
def get_user_info(credentials: Credentials) -> Optional[Dict[str, Any]]:
|
| 759 |
+
"""Fetches basic user profile information (requires userinfo.email scope)."""
|
| 760 |
+
if not credentials or not credentials.valid:
|
| 761 |
+
logger.error("Cannot get user info: Invalid or missing credentials.")
|
| 762 |
+
return None
|
| 763 |
+
try:
|
| 764 |
+
# Using googleapiclient discovery to get user info
|
| 765 |
+
# Requires 'google-api-python-client' library
|
| 766 |
+
service = build("oauth2", "v2", credentials=credentials)
|
| 767 |
+
user_info = service.userinfo().get().execute()
|
| 768 |
+
logger.info(f"Successfully fetched user info: {user_info.get('email')}")
|
| 769 |
+
return user_info
|
| 770 |
+
except HttpError as e:
|
| 771 |
+
logger.error(f"HttpError fetching user info: {e.status_code} {e.reason}")
|
| 772 |
+
# Handle specific errors, e.g., 401 Unauthorized might mean token issue
|
| 773 |
+
return None
|
| 774 |
+
except Exception as e:
|
| 775 |
+
logger.error(f"Unexpected error fetching user info: {e}")
|
| 776 |
+
return None
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
# --- Centralized Google Service Authentication ---
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
class GoogleAuthenticationError(Exception):
|
| 783 |
+
"""Exception raised when Google authentication is required or fails."""
|
| 784 |
+
|
| 785 |
+
def __init__(self, message: str, auth_url: Optional[str] = None):
|
| 786 |
+
super().__init__(message)
|
| 787 |
+
self.auth_url = auth_url
|
| 788 |
+
|
| 789 |
+
|
| 790 |
+
async def get_authenticated_google_service(
|
| 791 |
+
service_name: str, # "gmail", "calendar", "drive", "docs"
|
| 792 |
+
version: str, # "v1", "v3"
|
| 793 |
+
tool_name: str, # For logging/debugging
|
| 794 |
+
user_google_email: str, # Required - no more Optional
|
| 795 |
+
required_scopes: List[str],
|
| 796 |
+
session_id: Optional[str] = None, # Session context for logging
|
| 797 |
+
) -> tuple[Any, str]:
|
| 798 |
+
"""
|
| 799 |
+
Centralized Google service authentication for all MCP tools.
|
| 800 |
+
Returns (service, user_email) on success or raises GoogleAuthenticationError.
|
| 801 |
+
|
| 802 |
+
Args:
|
| 803 |
+
service_name: The Google service name ("gmail", "calendar", "drive", "docs")
|
| 804 |
+
version: The API version ("v1", "v3", etc.)
|
| 805 |
+
tool_name: The name of the calling tool (for logging/debugging)
|
| 806 |
+
user_google_email: The user's Google email address (required)
|
| 807 |
+
required_scopes: List of required OAuth scopes
|
| 808 |
+
|
| 809 |
+
Returns:
|
| 810 |
+
tuple[service, user_email] on success
|
| 811 |
+
|
| 812 |
+
Raises:
|
| 813 |
+
GoogleAuthenticationError: When authentication is required or fails
|
| 814 |
+
"""
|
| 815 |
+
|
| 816 |
+
# Try to get FastMCP session ID if not provided
|
| 817 |
+
if not session_id:
|
| 818 |
+
try:
|
| 819 |
+
# First try context variable (works in async context)
|
| 820 |
+
session_id = get_fastmcp_session_id()
|
| 821 |
+
if session_id:
|
| 822 |
+
logger.debug(
|
| 823 |
+
f"[{tool_name}] Got FastMCP session ID from context: {session_id}"
|
| 824 |
+
)
|
| 825 |
+
else:
|
| 826 |
+
logger.debug(
|
| 827 |
+
f"[{tool_name}] Context variable returned None/empty session ID"
|
| 828 |
+
)
|
| 829 |
+
except Exception as e:
|
| 830 |
+
logger.debug(
|
| 831 |
+
f"[{tool_name}] Could not get FastMCP session from context: {e}"
|
| 832 |
+
)
|
| 833 |
+
|
| 834 |
+
# Fallback to direct FastMCP context if context variable not set
|
| 835 |
+
if not session_id and get_fastmcp_context:
|
| 836 |
+
try:
|
| 837 |
+
fastmcp_ctx = get_fastmcp_context()
|
| 838 |
+
if fastmcp_ctx and hasattr(fastmcp_ctx, "session_id"):
|
| 839 |
+
session_id = fastmcp_ctx.session_id
|
| 840 |
+
logger.debug(
|
| 841 |
+
f"[{tool_name}] Got FastMCP session ID directly: {session_id}"
|
| 842 |
+
)
|
| 843 |
+
else:
|
| 844 |
+
logger.debug(
|
| 845 |
+
f"[{tool_name}] FastMCP context exists but no session_id attribute"
|
| 846 |
+
)
|
| 847 |
+
except Exception as e:
|
| 848 |
+
logger.debug(
|
| 849 |
+
f"[{tool_name}] Could not get FastMCP context directly: {e}"
|
| 850 |
+
)
|
| 851 |
+
|
| 852 |
+
# Final fallback: log if we still don't have session_id
|
| 853 |
+
if not session_id:
|
| 854 |
+
logger.warning(
|
| 855 |
+
f"[{tool_name}] Unable to obtain FastMCP session ID from any source"
|
| 856 |
+
)
|
| 857 |
+
|
| 858 |
+
logger.info(
|
| 859 |
+
f"[{tool_name}] Attempting to get authenticated {service_name} service. Email: '{user_google_email}', Session: '{session_id}'"
|
| 860 |
+
)
|
| 861 |
+
|
| 862 |
+
# Validate email format
|
| 863 |
+
if not user_google_email or "@" not in user_google_email:
|
| 864 |
+
error_msg = f"Authentication required for {tool_name}. No valid 'user_google_email' provided. Please provide a valid Google email address."
|
| 865 |
+
logger.info(f"[{tool_name}] {error_msg}")
|
| 866 |
+
raise GoogleAuthenticationError(error_msg)
|
| 867 |
+
|
| 868 |
+
credentials = await asyncio.to_thread(
|
| 869 |
+
get_credentials,
|
| 870 |
+
user_google_email=user_google_email,
|
| 871 |
+
required_scopes=required_scopes,
|
| 872 |
+
client_secrets_path=CONFIG_CLIENT_SECRETS_PATH,
|
| 873 |
+
session_id=session_id, # Pass through session context
|
| 874 |
+
)
|
| 875 |
+
|
| 876 |
+
if not credentials or not credentials.valid:
|
| 877 |
+
logger.warning(
|
| 878 |
+
f"[{tool_name}] No valid credentials. Email: '{user_google_email}'."
|
| 879 |
+
)
|
| 880 |
+
logger.info(
|
| 881 |
+
f"[{tool_name}] Valid email '{user_google_email}' provided, initiating auth flow."
|
| 882 |
+
)
|
| 883 |
+
|
| 884 |
+
# Ensure OAuth callback is available
|
| 885 |
+
from auth.oauth_callback_server import ensure_oauth_callback_available
|
| 886 |
+
|
| 887 |
+
redirect_uri = get_oauth_redirect_uri()
|
| 888 |
+
config = get_oauth_config()
|
| 889 |
+
success, error_msg = ensure_oauth_callback_available(
|
| 890 |
+
get_transport_mode(), config.port, config.base_uri
|
| 891 |
+
)
|
| 892 |
+
if not success:
|
| 893 |
+
error_detail = f" ({error_msg})" if error_msg else ""
|
| 894 |
+
raise GoogleAuthenticationError(
|
| 895 |
+
f"Cannot initiate OAuth flow - callback server unavailable{error_detail}"
|
| 896 |
+
)
|
| 897 |
+
|
| 898 |
+
# Generate auth URL and raise exception with it
|
| 899 |
+
auth_response = await start_auth_flow(
|
| 900 |
+
user_google_email=user_google_email,
|
| 901 |
+
service_name=f"Google {service_name.title()}",
|
| 902 |
+
redirect_uri=redirect_uri,
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
# Extract the auth URL from the response and raise with it
|
| 906 |
+
raise GoogleAuthenticationError(auth_response)
|
| 907 |
+
|
| 908 |
+
try:
|
| 909 |
+
service = build(service_name, version, credentials=credentials)
|
| 910 |
+
log_user_email = user_google_email
|
| 911 |
+
|
| 912 |
+
# Try to get email from credentials if needed for validation
|
| 913 |
+
if credentials and credentials.id_token:
|
| 914 |
+
try:
|
| 915 |
+
# Decode without verification (just to get email for logging)
|
| 916 |
+
decoded_token = jwt.decode(
|
| 917 |
+
credentials.id_token, options={"verify_signature": False}
|
| 918 |
+
)
|
| 919 |
+
token_email = decoded_token.get("email")
|
| 920 |
+
if token_email:
|
| 921 |
+
log_user_email = token_email
|
| 922 |
+
logger.info(f"[{tool_name}] Token email: {token_email}")
|
| 923 |
+
except Exception as e:
|
| 924 |
+
logger.debug(f"[{tool_name}] Could not decode id_token: {e}")
|
| 925 |
+
|
| 926 |
+
logger.info(
|
| 927 |
+
f"[{tool_name}] Successfully authenticated {service_name} service for user: {log_user_email}"
|
| 928 |
+
)
|
| 929 |
+
return service, log_user_email
|
| 930 |
+
|
| 931 |
+
except Exception as e:
|
| 932 |
+
error_msg = f"[{tool_name}] Failed to build {service_name} service: {str(e)}"
|
| 933 |
+
logger.error(error_msg, exc_info=True)
|
| 934 |
+
raise GoogleAuthenticationError(error_msg)
|
auth/mcp_session_middleware.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
MCP Session Middleware
|
| 3 |
+
|
| 4 |
+
This middleware intercepts MCP requests and sets the session context
|
| 5 |
+
for use by tool functions.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
from typing import Callable, Any
|
| 10 |
+
|
| 11 |
+
from starlette.middleware.base import BaseHTTPMiddleware
|
| 12 |
+
from starlette.requests import Request
|
| 13 |
+
|
| 14 |
+
from auth.oauth21_session_store import (
|
| 15 |
+
SessionContext,
|
| 16 |
+
SessionContextManager,
|
| 17 |
+
extract_session_from_headers,
|
| 18 |
+
)
|
| 19 |
+
# OAuth 2.1 is now handled by FastMCP auth
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class MCPSessionMiddleware(BaseHTTPMiddleware):
|
| 25 |
+
"""
|
| 26 |
+
Middleware that extracts session information from requests and makes it
|
| 27 |
+
available to MCP tool functions via context variables.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
async def dispatch(self, request: Request, call_next: Callable) -> Any:
|
| 31 |
+
"""Process request and set session context."""
|
| 32 |
+
|
| 33 |
+
logger.debug(
|
| 34 |
+
f"MCPSessionMiddleware processing request: {request.method} {request.url.path}"
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
# Skip non-MCP paths
|
| 38 |
+
if not request.url.path.startswith("/mcp"):
|
| 39 |
+
logger.debug(f"Skipping non-MCP path: {request.url.path}")
|
| 40 |
+
return await call_next(request)
|
| 41 |
+
|
| 42 |
+
session_context = None
|
| 43 |
+
|
| 44 |
+
try:
|
| 45 |
+
# Extract session information
|
| 46 |
+
headers = dict(request.headers)
|
| 47 |
+
session_id = extract_session_from_headers(headers)
|
| 48 |
+
|
| 49 |
+
# Try to get OAuth 2.1 auth context from FastMCP
|
| 50 |
+
auth_context = None
|
| 51 |
+
user_email = None
|
| 52 |
+
mcp_session_id = None
|
| 53 |
+
# Check for FastMCP auth context
|
| 54 |
+
if hasattr(request.state, "auth"):
|
| 55 |
+
auth_context = request.state.auth
|
| 56 |
+
# Extract user email from auth claims if available
|
| 57 |
+
if hasattr(auth_context, "claims") and auth_context.claims:
|
| 58 |
+
user_email = auth_context.claims.get("email")
|
| 59 |
+
|
| 60 |
+
# Check for FastMCP session ID (from streamable HTTP transport)
|
| 61 |
+
if hasattr(request.state, "session_id"):
|
| 62 |
+
mcp_session_id = request.state.session_id
|
| 63 |
+
logger.debug(f"Found FastMCP session ID: {mcp_session_id}")
|
| 64 |
+
|
| 65 |
+
# Also check Authorization header for bearer tokens
|
| 66 |
+
auth_header = headers.get("authorization")
|
| 67 |
+
if (
|
| 68 |
+
auth_header
|
| 69 |
+
and auth_header.lower().startswith("bearer ")
|
| 70 |
+
and not user_email
|
| 71 |
+
):
|
| 72 |
+
try:
|
| 73 |
+
import jwt
|
| 74 |
+
|
| 75 |
+
token = auth_header[7:] # Remove "Bearer " prefix
|
| 76 |
+
# Decode without verification to extract email
|
| 77 |
+
claims = jwt.decode(token, options={"verify_signature": False})
|
| 78 |
+
user_email = claims.get("email")
|
| 79 |
+
if user_email:
|
| 80 |
+
logger.debug(f"Extracted user email from JWT: {user_email}")
|
| 81 |
+
except Exception:
|
| 82 |
+
pass
|
| 83 |
+
|
| 84 |
+
# Build session context
|
| 85 |
+
if session_id or auth_context or user_email or mcp_session_id:
|
| 86 |
+
# Create session ID hierarchy: explicit session_id > Google user session > FastMCP session
|
| 87 |
+
effective_session_id = session_id
|
| 88 |
+
if not effective_session_id and user_email:
|
| 89 |
+
effective_session_id = f"google_{user_email}"
|
| 90 |
+
elif not effective_session_id and mcp_session_id:
|
| 91 |
+
effective_session_id = mcp_session_id
|
| 92 |
+
|
| 93 |
+
session_context = SessionContext(
|
| 94 |
+
session_id=effective_session_id,
|
| 95 |
+
user_id=user_email
|
| 96 |
+
or (auth_context.user_id if auth_context else None),
|
| 97 |
+
auth_context=auth_context,
|
| 98 |
+
request=request,
|
| 99 |
+
metadata={
|
| 100 |
+
"path": request.url.path,
|
| 101 |
+
"method": request.method,
|
| 102 |
+
"user_email": user_email,
|
| 103 |
+
"mcp_session_id": mcp_session_id,
|
| 104 |
+
},
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
logger.debug(
|
| 108 |
+
f"MCP request with session: session_id={session_context.session_id}, "
|
| 109 |
+
f"user_id={session_context.user_id}, path={request.url.path}"
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
# Process request with session context
|
| 113 |
+
with SessionContextManager(session_context):
|
| 114 |
+
response = await call_next(request)
|
| 115 |
+
return response
|
| 116 |
+
|
| 117 |
+
except Exception as e:
|
| 118 |
+
logger.error(f"Error in MCP session middleware: {e}")
|
| 119 |
+
# Continue without session context
|
| 120 |
+
return await call_next(request)
|
auth/oauth21_session_store.py
ADDED
|
@@ -0,0 +1,893 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
OAuth 2.1 Session Store for Google Services
|
| 3 |
+
|
| 4 |
+
This module provides a global store for OAuth 2.1 authenticated sessions
|
| 5 |
+
that can be accessed by Google service decorators. It also includes
|
| 6 |
+
session context management and credential conversion functionality.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import contextvars
|
| 10 |
+
import logging
|
| 11 |
+
from typing import Dict, Optional, Any, Tuple
|
| 12 |
+
from threading import RLock
|
| 13 |
+
from datetime import datetime, timedelta, timezone
|
| 14 |
+
from dataclasses import dataclass
|
| 15 |
+
|
| 16 |
+
from fastmcp.server.auth import AccessToken
|
| 17 |
+
from google.oauth2.credentials import Credentials
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _normalize_expiry_to_naive_utc(expiry: Optional[Any]) -> Optional[datetime]:
|
| 23 |
+
"""
|
| 24 |
+
Convert expiry values to timezone-naive UTC datetimes for google-auth compatibility.
|
| 25 |
+
|
| 26 |
+
Naive datetime inputs are assumed to already represent UTC and are returned unchanged so that
|
| 27 |
+
google-auth Credentials receive naive UTC datetimes for expiry comparison.
|
| 28 |
+
"""
|
| 29 |
+
if expiry is None:
|
| 30 |
+
return None
|
| 31 |
+
|
| 32 |
+
if isinstance(expiry, datetime):
|
| 33 |
+
if expiry.tzinfo is not None:
|
| 34 |
+
try:
|
| 35 |
+
return expiry.astimezone(timezone.utc).replace(tzinfo=None)
|
| 36 |
+
except Exception: # pragma: no cover - defensive
|
| 37 |
+
logger.debug(
|
| 38 |
+
"Failed to normalize aware expiry; returning without tzinfo"
|
| 39 |
+
)
|
| 40 |
+
return expiry.replace(tzinfo=None)
|
| 41 |
+
return expiry # Already naive; assumed to represent UTC
|
| 42 |
+
|
| 43 |
+
if isinstance(expiry, str):
|
| 44 |
+
try:
|
| 45 |
+
parsed = datetime.fromisoformat(expiry.replace("Z", "+00:00"))
|
| 46 |
+
except ValueError:
|
| 47 |
+
logger.debug("Failed to parse expiry string '%s'", expiry)
|
| 48 |
+
return None
|
| 49 |
+
return _normalize_expiry_to_naive_utc(parsed)
|
| 50 |
+
|
| 51 |
+
logger.debug("Unsupported expiry type '%s' (%s)", expiry, type(expiry))
|
| 52 |
+
return None
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
# Context variable to store the current session information
|
| 56 |
+
_current_session_context: contextvars.ContextVar[Optional["SessionContext"]] = (
|
| 57 |
+
contextvars.ContextVar("current_session_context", default=None)
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
@dataclass
|
| 62 |
+
class SessionContext:
|
| 63 |
+
"""Container for session-related information."""
|
| 64 |
+
|
| 65 |
+
session_id: Optional[str] = None
|
| 66 |
+
user_id: Optional[str] = None
|
| 67 |
+
auth_context: Optional[Any] = None
|
| 68 |
+
request: Optional[Any] = None
|
| 69 |
+
metadata: Dict[str, Any] = None
|
| 70 |
+
issuer: Optional[str] = None
|
| 71 |
+
|
| 72 |
+
def __post_init__(self):
|
| 73 |
+
if self.metadata is None:
|
| 74 |
+
self.metadata = {}
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def set_session_context(context: Optional[SessionContext]):
|
| 78 |
+
"""
|
| 79 |
+
Set the current session context.
|
| 80 |
+
|
| 81 |
+
Args:
|
| 82 |
+
context: The session context to set
|
| 83 |
+
"""
|
| 84 |
+
_current_session_context.set(context)
|
| 85 |
+
if context:
|
| 86 |
+
logger.debug(
|
| 87 |
+
f"Set session context: session_id={context.session_id}, user_id={context.user_id}"
|
| 88 |
+
)
|
| 89 |
+
else:
|
| 90 |
+
logger.debug("Cleared session context")
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def get_session_context() -> Optional[SessionContext]:
|
| 94 |
+
"""
|
| 95 |
+
Get the current session context.
|
| 96 |
+
|
| 97 |
+
Returns:
|
| 98 |
+
The current session context or None
|
| 99 |
+
"""
|
| 100 |
+
return _current_session_context.get()
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def clear_session_context():
|
| 104 |
+
"""Clear the current session context."""
|
| 105 |
+
set_session_context(None)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class SessionContextManager:
|
| 109 |
+
"""
|
| 110 |
+
Context manager for temporarily setting session context.
|
| 111 |
+
|
| 112 |
+
Usage:
|
| 113 |
+
with SessionContextManager(session_context):
|
| 114 |
+
# Code that needs access to session context
|
| 115 |
+
pass
|
| 116 |
+
"""
|
| 117 |
+
|
| 118 |
+
def __init__(self, context: Optional[SessionContext]):
|
| 119 |
+
self.context = context
|
| 120 |
+
self.token = None
|
| 121 |
+
|
| 122 |
+
def __enter__(self):
|
| 123 |
+
"""Set the session context."""
|
| 124 |
+
self.token = _current_session_context.set(self.context)
|
| 125 |
+
return self.context
|
| 126 |
+
|
| 127 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 128 |
+
"""Reset the session context."""
|
| 129 |
+
if self.token:
|
| 130 |
+
_current_session_context.reset(self.token)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def extract_session_from_headers(headers: Dict[str, str]) -> Optional[str]:
|
| 134 |
+
"""
|
| 135 |
+
Extract session ID from request headers.
|
| 136 |
+
|
| 137 |
+
Args:
|
| 138 |
+
headers: Request headers
|
| 139 |
+
|
| 140 |
+
Returns:
|
| 141 |
+
Session ID if found
|
| 142 |
+
"""
|
| 143 |
+
# Try different header names
|
| 144 |
+
session_id = headers.get("mcp-session-id") or headers.get("Mcp-Session-Id")
|
| 145 |
+
if session_id:
|
| 146 |
+
return session_id
|
| 147 |
+
|
| 148 |
+
session_id = headers.get("x-session-id") or headers.get("X-Session-ID")
|
| 149 |
+
if session_id:
|
| 150 |
+
return session_id
|
| 151 |
+
|
| 152 |
+
# Try Authorization header for Bearer token
|
| 153 |
+
auth_header = headers.get("authorization") or headers.get("Authorization")
|
| 154 |
+
if auth_header and auth_header.lower().startswith("bearer "):
|
| 155 |
+
# Extract bearer token and try to find associated session
|
| 156 |
+
token = auth_header[7:] # Remove "Bearer " prefix
|
| 157 |
+
if token:
|
| 158 |
+
# Look for a session that has this access token
|
| 159 |
+
# This requires scanning sessions, but bearer tokens should be unique
|
| 160 |
+
store = get_oauth21_session_store()
|
| 161 |
+
for user_email, session_info in store._sessions.items():
|
| 162 |
+
if session_info.get("access_token") == token:
|
| 163 |
+
return session_info.get("session_id") or f"bearer_{user_email}"
|
| 164 |
+
|
| 165 |
+
# If no session found, create a temporary session ID from token hash
|
| 166 |
+
# This allows header-based authentication to work with session context
|
| 167 |
+
import hashlib
|
| 168 |
+
|
| 169 |
+
token_hash = hashlib.sha256(token.encode()).hexdigest()[:8]
|
| 170 |
+
return f"bearer_token_{token_hash}"
|
| 171 |
+
|
| 172 |
+
return None
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
# =============================================================================
|
| 176 |
+
# OAuth21SessionStore - Main Session Management
|
| 177 |
+
# =============================================================================
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class OAuth21SessionStore:
|
| 181 |
+
"""
|
| 182 |
+
Global store for OAuth 2.1 authenticated sessions.
|
| 183 |
+
|
| 184 |
+
This store maintains a mapping of user emails to their OAuth 2.1
|
| 185 |
+
authenticated credentials, allowing Google services to access them.
|
| 186 |
+
It also maintains a mapping from FastMCP session IDs to user emails.
|
| 187 |
+
|
| 188 |
+
Security: Sessions are bound to specific users and can only access
|
| 189 |
+
their own credentials.
|
| 190 |
+
"""
|
| 191 |
+
|
| 192 |
+
def __init__(self):
|
| 193 |
+
self._sessions: Dict[str, Dict[str, Any]] = {}
|
| 194 |
+
self._mcp_session_mapping: Dict[
|
| 195 |
+
str, str
|
| 196 |
+
] = {} # Maps FastMCP session ID -> user email
|
| 197 |
+
self._session_auth_binding: Dict[
|
| 198 |
+
str, str
|
| 199 |
+
] = {} # Maps session ID -> authenticated user email (immutable)
|
| 200 |
+
self._oauth_states: Dict[str, Dict[str, Any]] = {}
|
| 201 |
+
self._lock = RLock()
|
| 202 |
+
|
| 203 |
+
def _cleanup_expired_oauth_states_locked(self):
|
| 204 |
+
"""Remove expired OAuth state entries. Caller must hold lock."""
|
| 205 |
+
now = datetime.now(timezone.utc)
|
| 206 |
+
expired_states = [
|
| 207 |
+
state
|
| 208 |
+
for state, data in self._oauth_states.items()
|
| 209 |
+
if data.get("expires_at") and data["expires_at"] <= now
|
| 210 |
+
]
|
| 211 |
+
for state in expired_states:
|
| 212 |
+
del self._oauth_states[state]
|
| 213 |
+
logger.debug(
|
| 214 |
+
"Removed expired OAuth state: %s",
|
| 215 |
+
state[:8] if len(state) > 8 else state,
|
| 216 |
+
)
|
| 217 |
+
|
| 218 |
+
def store_oauth_state(
|
| 219 |
+
self,
|
| 220 |
+
state: str,
|
| 221 |
+
session_id: Optional[str] = None,
|
| 222 |
+
expires_in_seconds: int = 600,
|
| 223 |
+
) -> None:
|
| 224 |
+
"""Persist an OAuth state value for later validation."""
|
| 225 |
+
if not state:
|
| 226 |
+
raise ValueError("OAuth state must be provided")
|
| 227 |
+
if expires_in_seconds < 0:
|
| 228 |
+
raise ValueError("expires_in_seconds must be non-negative")
|
| 229 |
+
|
| 230 |
+
with self._lock:
|
| 231 |
+
self._cleanup_expired_oauth_states_locked()
|
| 232 |
+
now = datetime.now(timezone.utc)
|
| 233 |
+
expiry = now + timedelta(seconds=expires_in_seconds)
|
| 234 |
+
self._oauth_states[state] = {
|
| 235 |
+
"session_id": session_id,
|
| 236 |
+
"expires_at": expiry,
|
| 237 |
+
"created_at": now,
|
| 238 |
+
}
|
| 239 |
+
logger.debug(
|
| 240 |
+
"Stored OAuth state %s (expires at %s)",
|
| 241 |
+
state[:8] if len(state) > 8 else state,
|
| 242 |
+
expiry.isoformat(),
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
def validate_and_consume_oauth_state(
|
| 246 |
+
self,
|
| 247 |
+
state: str,
|
| 248 |
+
session_id: Optional[str] = None,
|
| 249 |
+
) -> Dict[str, Any]:
|
| 250 |
+
"""
|
| 251 |
+
Validate that a state value exists and consume it.
|
| 252 |
+
|
| 253 |
+
Args:
|
| 254 |
+
state: The OAuth state returned by Google.
|
| 255 |
+
session_id: Optional session identifier that initiated the flow.
|
| 256 |
+
|
| 257 |
+
Returns:
|
| 258 |
+
Metadata associated with the state.
|
| 259 |
+
|
| 260 |
+
Raises:
|
| 261 |
+
ValueError: If the state is missing, expired, or does not match the session.
|
| 262 |
+
"""
|
| 263 |
+
if not state:
|
| 264 |
+
raise ValueError("Missing OAuth state parameter")
|
| 265 |
+
|
| 266 |
+
with self._lock:
|
| 267 |
+
self._cleanup_expired_oauth_states_locked()
|
| 268 |
+
state_info = self._oauth_states.get(state)
|
| 269 |
+
|
| 270 |
+
if not state_info:
|
| 271 |
+
logger.error(
|
| 272 |
+
"SECURITY: OAuth callback received unknown or expired state"
|
| 273 |
+
)
|
| 274 |
+
raise ValueError("Invalid or expired OAuth state parameter")
|
| 275 |
+
|
| 276 |
+
bound_session = state_info.get("session_id")
|
| 277 |
+
if bound_session and session_id and bound_session != session_id:
|
| 278 |
+
# Consume the state to prevent replay attempts
|
| 279 |
+
del self._oauth_states[state]
|
| 280 |
+
logger.error(
|
| 281 |
+
"SECURITY: OAuth state session mismatch (expected %s, got %s)",
|
| 282 |
+
bound_session,
|
| 283 |
+
session_id,
|
| 284 |
+
)
|
| 285 |
+
raise ValueError("OAuth state does not match the initiating session")
|
| 286 |
+
|
| 287 |
+
# State is valid – consume it to prevent reuse
|
| 288 |
+
del self._oauth_states[state]
|
| 289 |
+
logger.debug(
|
| 290 |
+
"Validated OAuth state %s",
|
| 291 |
+
state[:8] if len(state) > 8 else state,
|
| 292 |
+
)
|
| 293 |
+
return state_info
|
| 294 |
+
|
| 295 |
+
def store_session(
|
| 296 |
+
self,
|
| 297 |
+
user_email: str,
|
| 298 |
+
access_token: str,
|
| 299 |
+
refresh_token: Optional[str] = None,
|
| 300 |
+
token_uri: str = "https://oauth2.googleapis.com/token",
|
| 301 |
+
client_id: Optional[str] = None,
|
| 302 |
+
client_secret: Optional[str] = None,
|
| 303 |
+
scopes: Optional[list] = None,
|
| 304 |
+
expiry: Optional[Any] = None,
|
| 305 |
+
session_id: Optional[str] = None,
|
| 306 |
+
mcp_session_id: Optional[str] = None,
|
| 307 |
+
issuer: Optional[str] = None,
|
| 308 |
+
):
|
| 309 |
+
"""
|
| 310 |
+
Store OAuth 2.1 session information.
|
| 311 |
+
|
| 312 |
+
Args:
|
| 313 |
+
user_email: User's email address
|
| 314 |
+
access_token: OAuth 2.1 access token
|
| 315 |
+
refresh_token: OAuth 2.1 refresh token
|
| 316 |
+
token_uri: Token endpoint URI
|
| 317 |
+
client_id: OAuth client ID
|
| 318 |
+
client_secret: OAuth client secret
|
| 319 |
+
scopes: List of granted scopes
|
| 320 |
+
expiry: Token expiry time
|
| 321 |
+
session_id: OAuth 2.1 session ID
|
| 322 |
+
mcp_session_id: FastMCP session ID to map to this user
|
| 323 |
+
issuer: Token issuer (e.g., "https://accounts.google.com")
|
| 324 |
+
"""
|
| 325 |
+
with self._lock:
|
| 326 |
+
normalized_expiry = _normalize_expiry_to_naive_utc(expiry)
|
| 327 |
+
session_info = {
|
| 328 |
+
"access_token": access_token,
|
| 329 |
+
"refresh_token": refresh_token,
|
| 330 |
+
"token_uri": token_uri,
|
| 331 |
+
"client_id": client_id,
|
| 332 |
+
"client_secret": client_secret,
|
| 333 |
+
"scopes": scopes or [],
|
| 334 |
+
"expiry": normalized_expiry,
|
| 335 |
+
"session_id": session_id,
|
| 336 |
+
"mcp_session_id": mcp_session_id,
|
| 337 |
+
"issuer": issuer,
|
| 338 |
+
}
|
| 339 |
+
|
| 340 |
+
self._sessions[user_email] = session_info
|
| 341 |
+
|
| 342 |
+
# Store MCP session mapping if provided
|
| 343 |
+
if mcp_session_id:
|
| 344 |
+
# Create immutable session binding (first binding wins, cannot be changed)
|
| 345 |
+
if mcp_session_id not in self._session_auth_binding:
|
| 346 |
+
self._session_auth_binding[mcp_session_id] = user_email
|
| 347 |
+
logger.info(
|
| 348 |
+
f"Created immutable session binding: {mcp_session_id} -> {user_email}"
|
| 349 |
+
)
|
| 350 |
+
elif self._session_auth_binding[mcp_session_id] != user_email:
|
| 351 |
+
# Security: Attempt to bind session to different user
|
| 352 |
+
logger.error(
|
| 353 |
+
f"SECURITY: Attempt to rebind session {mcp_session_id} from {self._session_auth_binding[mcp_session_id]} to {user_email}"
|
| 354 |
+
)
|
| 355 |
+
raise ValueError(
|
| 356 |
+
f"Session {mcp_session_id} is already bound to a different user"
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
self._mcp_session_mapping[mcp_session_id] = user_email
|
| 360 |
+
logger.info(
|
| 361 |
+
f"Stored OAuth 2.1 session for {user_email} (session_id: {session_id}, mcp_session_id: {mcp_session_id})"
|
| 362 |
+
)
|
| 363 |
+
else:
|
| 364 |
+
logger.info(
|
| 365 |
+
f"Stored OAuth 2.1 session for {user_email} (session_id: {session_id})"
|
| 366 |
+
)
|
| 367 |
+
|
| 368 |
+
# Also create binding for the OAuth session ID
|
| 369 |
+
if session_id and session_id not in self._session_auth_binding:
|
| 370 |
+
self._session_auth_binding[session_id] = user_email
|
| 371 |
+
|
| 372 |
+
def get_credentials(self, user_email: str) -> Optional[Credentials]:
|
| 373 |
+
"""
|
| 374 |
+
Get Google credentials for a user from OAuth 2.1 session.
|
| 375 |
+
|
| 376 |
+
Args:
|
| 377 |
+
user_email: User's email address
|
| 378 |
+
|
| 379 |
+
Returns:
|
| 380 |
+
Google Credentials object or None
|
| 381 |
+
"""
|
| 382 |
+
with self._lock:
|
| 383 |
+
session_info = self._sessions.get(user_email)
|
| 384 |
+
if not session_info:
|
| 385 |
+
logger.debug(f"No OAuth 2.1 session found for {user_email}")
|
| 386 |
+
return None
|
| 387 |
+
|
| 388 |
+
try:
|
| 389 |
+
# Create Google credentials from session info
|
| 390 |
+
credentials = Credentials(
|
| 391 |
+
token=session_info["access_token"],
|
| 392 |
+
refresh_token=session_info.get("refresh_token"),
|
| 393 |
+
token_uri=session_info["token_uri"],
|
| 394 |
+
client_id=session_info.get("client_id"),
|
| 395 |
+
client_secret=session_info.get("client_secret"),
|
| 396 |
+
scopes=session_info.get("scopes", []),
|
| 397 |
+
expiry=session_info.get("expiry"),
|
| 398 |
+
)
|
| 399 |
+
|
| 400 |
+
logger.debug(f"Retrieved OAuth 2.1 credentials for {user_email}")
|
| 401 |
+
return credentials
|
| 402 |
+
|
| 403 |
+
except Exception as e:
|
| 404 |
+
logger.error(f"Failed to create credentials for {user_email}: {e}")
|
| 405 |
+
return None
|
| 406 |
+
|
| 407 |
+
def get_credentials_by_mcp_session(
|
| 408 |
+
self, mcp_session_id: str
|
| 409 |
+
) -> Optional[Credentials]:
|
| 410 |
+
"""
|
| 411 |
+
Get Google credentials using FastMCP session ID.
|
| 412 |
+
|
| 413 |
+
Args:
|
| 414 |
+
mcp_session_id: FastMCP session ID
|
| 415 |
+
|
| 416 |
+
Returns:
|
| 417 |
+
Google Credentials object or None
|
| 418 |
+
"""
|
| 419 |
+
with self._lock:
|
| 420 |
+
# Look up user email from MCP session mapping
|
| 421 |
+
user_email = self._mcp_session_mapping.get(mcp_session_id)
|
| 422 |
+
if not user_email:
|
| 423 |
+
logger.debug(f"No user mapping found for MCP session {mcp_session_id}")
|
| 424 |
+
return None
|
| 425 |
+
|
| 426 |
+
logger.debug(f"Found user {user_email} for MCP session {mcp_session_id}")
|
| 427 |
+
return self.get_credentials(user_email)
|
| 428 |
+
|
| 429 |
+
def get_credentials_with_validation(
|
| 430 |
+
self,
|
| 431 |
+
requested_user_email: str,
|
| 432 |
+
session_id: Optional[str] = None,
|
| 433 |
+
auth_token_email: Optional[str] = None,
|
| 434 |
+
allow_recent_auth: bool = False,
|
| 435 |
+
) -> Optional[Credentials]:
|
| 436 |
+
"""
|
| 437 |
+
Get Google credentials with session validation.
|
| 438 |
+
|
| 439 |
+
This method ensures that a session can only access credentials for its
|
| 440 |
+
authenticated user, preventing cross-account access.
|
| 441 |
+
|
| 442 |
+
Args:
|
| 443 |
+
requested_user_email: The email of the user whose credentials are requested
|
| 444 |
+
session_id: The current session ID (MCP or OAuth session)
|
| 445 |
+
auth_token_email: Email from the verified auth token (if available)
|
| 446 |
+
|
| 447 |
+
Returns:
|
| 448 |
+
Google Credentials object if validation passes, None otherwise
|
| 449 |
+
"""
|
| 450 |
+
with self._lock:
|
| 451 |
+
# Priority 1: Check auth token email (most secure, from verified JWT)
|
| 452 |
+
if auth_token_email:
|
| 453 |
+
if auth_token_email != requested_user_email:
|
| 454 |
+
logger.error(
|
| 455 |
+
f"SECURITY VIOLATION: Token for {auth_token_email} attempted to access "
|
| 456 |
+
f"credentials for {requested_user_email}"
|
| 457 |
+
)
|
| 458 |
+
return None
|
| 459 |
+
# Token email matches, allow access
|
| 460 |
+
return self.get_credentials(requested_user_email)
|
| 461 |
+
|
| 462 |
+
# Priority 2: Check session binding
|
| 463 |
+
if session_id:
|
| 464 |
+
bound_user = self._session_auth_binding.get(session_id)
|
| 465 |
+
if bound_user:
|
| 466 |
+
if bound_user != requested_user_email:
|
| 467 |
+
logger.error(
|
| 468 |
+
f"SECURITY VIOLATION: Session {session_id} (bound to {bound_user}) "
|
| 469 |
+
f"attempted to access credentials for {requested_user_email}"
|
| 470 |
+
)
|
| 471 |
+
return None
|
| 472 |
+
# Session binding matches, allow access
|
| 473 |
+
return self.get_credentials(requested_user_email)
|
| 474 |
+
|
| 475 |
+
# Check if this is an MCP session
|
| 476 |
+
mcp_user = self._mcp_session_mapping.get(session_id)
|
| 477 |
+
if mcp_user:
|
| 478 |
+
if mcp_user != requested_user_email:
|
| 479 |
+
logger.error(
|
| 480 |
+
f"SECURITY VIOLATION: MCP session {session_id} (user {mcp_user}) "
|
| 481 |
+
f"attempted to access credentials for {requested_user_email}"
|
| 482 |
+
)
|
| 483 |
+
return None
|
| 484 |
+
# MCP session matches, allow access
|
| 485 |
+
return self.get_credentials(requested_user_email)
|
| 486 |
+
|
| 487 |
+
# Special case: Allow access if user has recently authenticated (for clients that don't send tokens)
|
| 488 |
+
# CRITICAL SECURITY: This is ONLY allowed in stdio mode, NEVER in OAuth 2.1 mode
|
| 489 |
+
if allow_recent_auth and requested_user_email in self._sessions:
|
| 490 |
+
# Check transport mode to ensure this is only used in stdio
|
| 491 |
+
try:
|
| 492 |
+
from core.config import get_transport_mode
|
| 493 |
+
|
| 494 |
+
transport_mode = get_transport_mode()
|
| 495 |
+
if transport_mode != "stdio":
|
| 496 |
+
logger.error(
|
| 497 |
+
f"SECURITY: Attempted to use allow_recent_auth in {transport_mode} mode. "
|
| 498 |
+
f"This is only allowed in stdio mode!"
|
| 499 |
+
)
|
| 500 |
+
return None
|
| 501 |
+
except Exception as e:
|
| 502 |
+
logger.error(f"Failed to check transport mode: {e}")
|
| 503 |
+
return None
|
| 504 |
+
|
| 505 |
+
logger.info(
|
| 506 |
+
f"Allowing credential access for {requested_user_email} based on recent authentication "
|
| 507 |
+
f"(stdio mode only - client not sending bearer token)"
|
| 508 |
+
)
|
| 509 |
+
return self.get_credentials(requested_user_email)
|
| 510 |
+
|
| 511 |
+
# No session or token info available - deny access for security
|
| 512 |
+
logger.warning(
|
| 513 |
+
f"Credential access denied for {requested_user_email}: No valid session or token"
|
| 514 |
+
)
|
| 515 |
+
return None
|
| 516 |
+
|
| 517 |
+
def get_user_by_mcp_session(self, mcp_session_id: str) -> Optional[str]:
|
| 518 |
+
"""
|
| 519 |
+
Get user email by FastMCP session ID.
|
| 520 |
+
|
| 521 |
+
Args:
|
| 522 |
+
mcp_session_id: FastMCP session ID
|
| 523 |
+
|
| 524 |
+
Returns:
|
| 525 |
+
User email or None
|
| 526 |
+
"""
|
| 527 |
+
with self._lock:
|
| 528 |
+
return self._mcp_session_mapping.get(mcp_session_id)
|
| 529 |
+
|
| 530 |
+
def get_session_info(self, user_email: str) -> Optional[Dict[str, Any]]:
|
| 531 |
+
"""
|
| 532 |
+
Get complete session information including issuer.
|
| 533 |
+
|
| 534 |
+
Args:
|
| 535 |
+
user_email: User's email address
|
| 536 |
+
|
| 537 |
+
Returns:
|
| 538 |
+
Session information dictionary or None
|
| 539 |
+
"""
|
| 540 |
+
with self._lock:
|
| 541 |
+
return self._sessions.get(user_email)
|
| 542 |
+
|
| 543 |
+
def remove_session(self, user_email: str):
|
| 544 |
+
"""Remove session for a user."""
|
| 545 |
+
with self._lock:
|
| 546 |
+
if user_email in self._sessions:
|
| 547 |
+
# Get session IDs to clean up mappings
|
| 548 |
+
session_info = self._sessions.get(user_email, {})
|
| 549 |
+
mcp_session_id = session_info.get("mcp_session_id")
|
| 550 |
+
session_id = session_info.get("session_id")
|
| 551 |
+
|
| 552 |
+
# Remove from sessions
|
| 553 |
+
del self._sessions[user_email]
|
| 554 |
+
|
| 555 |
+
# Remove from MCP mapping if exists
|
| 556 |
+
if mcp_session_id and mcp_session_id in self._mcp_session_mapping:
|
| 557 |
+
del self._mcp_session_mapping[mcp_session_id]
|
| 558 |
+
# Also remove from auth binding
|
| 559 |
+
if mcp_session_id in self._session_auth_binding:
|
| 560 |
+
del self._session_auth_binding[mcp_session_id]
|
| 561 |
+
logger.info(
|
| 562 |
+
f"Removed OAuth 2.1 session for {user_email} and MCP mapping for {mcp_session_id}"
|
| 563 |
+
)
|
| 564 |
+
|
| 565 |
+
# Remove OAuth session binding if exists
|
| 566 |
+
if session_id and session_id in self._session_auth_binding:
|
| 567 |
+
del self._session_auth_binding[session_id]
|
| 568 |
+
|
| 569 |
+
if not mcp_session_id:
|
| 570 |
+
logger.info(f"Removed OAuth 2.1 session for {user_email}")
|
| 571 |
+
|
| 572 |
+
def has_session(self, user_email: str) -> bool:
|
| 573 |
+
"""Check if a user has an active session."""
|
| 574 |
+
with self._lock:
|
| 575 |
+
return user_email in self._sessions
|
| 576 |
+
|
| 577 |
+
def has_mcp_session(self, mcp_session_id: str) -> bool:
|
| 578 |
+
"""Check if an MCP session has an associated user session."""
|
| 579 |
+
with self._lock:
|
| 580 |
+
return mcp_session_id in self._mcp_session_mapping
|
| 581 |
+
|
| 582 |
+
def get_single_user_email(self) -> Optional[str]:
|
| 583 |
+
"""Return the sole authenticated user email when exactly one session exists."""
|
| 584 |
+
with self._lock:
|
| 585 |
+
if len(self._sessions) == 1:
|
| 586 |
+
return next(iter(self._sessions))
|
| 587 |
+
return None
|
| 588 |
+
|
| 589 |
+
def get_stats(self) -> Dict[str, Any]:
|
| 590 |
+
"""Get store statistics."""
|
| 591 |
+
with self._lock:
|
| 592 |
+
return {
|
| 593 |
+
"total_sessions": len(self._sessions),
|
| 594 |
+
"users": list(self._sessions.keys()),
|
| 595 |
+
"mcp_session_mappings": len(self._mcp_session_mapping),
|
| 596 |
+
"mcp_sessions": list(self._mcp_session_mapping.keys()),
|
| 597 |
+
}
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
# Global instance
|
| 601 |
+
_global_store = OAuth21SessionStore()
|
| 602 |
+
|
| 603 |
+
|
| 604 |
+
def get_oauth21_session_store() -> OAuth21SessionStore:
|
| 605 |
+
"""Get the global OAuth 2.1 session store."""
|
| 606 |
+
return _global_store
|
| 607 |
+
|
| 608 |
+
|
| 609 |
+
# =============================================================================
|
| 610 |
+
# Google Credentials Bridge (absorbed from oauth21_google_bridge.py)
|
| 611 |
+
# =============================================================================
|
| 612 |
+
|
| 613 |
+
# Global auth provider instance (set during server initialization)
|
| 614 |
+
_auth_provider = None
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
def set_auth_provider(provider):
|
| 618 |
+
"""Set the global auth provider instance."""
|
| 619 |
+
global _auth_provider
|
| 620 |
+
_auth_provider = provider
|
| 621 |
+
logger.debug("OAuth 2.1 session store configured")
|
| 622 |
+
|
| 623 |
+
|
| 624 |
+
def get_auth_provider():
|
| 625 |
+
"""Get the global auth provider instance."""
|
| 626 |
+
return _auth_provider
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
def _resolve_client_credentials() -> Tuple[Optional[str], Optional[str]]:
|
| 630 |
+
"""Resolve OAuth client credentials from the active provider or configuration."""
|
| 631 |
+
client_id: Optional[str] = None
|
| 632 |
+
client_secret: Optional[str] = None
|
| 633 |
+
|
| 634 |
+
if _auth_provider:
|
| 635 |
+
client_id = getattr(_auth_provider, "_upstream_client_id", None)
|
| 636 |
+
secret_obj = getattr(_auth_provider, "_upstream_client_secret", None)
|
| 637 |
+
if secret_obj is not None:
|
| 638 |
+
if hasattr(secret_obj, "get_secret_value"):
|
| 639 |
+
try:
|
| 640 |
+
client_secret = secret_obj.get_secret_value() # type: ignore[call-arg]
|
| 641 |
+
except Exception as exc: # pragma: no cover - defensive
|
| 642 |
+
logger.debug(
|
| 643 |
+
f"Failed to resolve client secret from provider: {exc}"
|
| 644 |
+
)
|
| 645 |
+
elif isinstance(secret_obj, str):
|
| 646 |
+
client_secret = secret_obj
|
| 647 |
+
|
| 648 |
+
if not client_id or not client_secret:
|
| 649 |
+
try:
|
| 650 |
+
from auth.oauth_config import get_oauth_config
|
| 651 |
+
|
| 652 |
+
cfg = get_oauth_config()
|
| 653 |
+
client_id = client_id or cfg.client_id
|
| 654 |
+
client_secret = client_secret or cfg.client_secret
|
| 655 |
+
except Exception as exc: # pragma: no cover - defensive
|
| 656 |
+
logger.debug(f"Failed to resolve client credentials from config: {exc}")
|
| 657 |
+
|
| 658 |
+
return client_id, client_secret
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
def _build_credentials_from_provider(
|
| 662 |
+
access_token: AccessToken,
|
| 663 |
+
) -> Optional[Credentials]:
|
| 664 |
+
"""Construct Google credentials from the provider cache."""
|
| 665 |
+
if not _auth_provider:
|
| 666 |
+
return None
|
| 667 |
+
|
| 668 |
+
access_entry = getattr(_auth_provider, "_access_tokens", {}).get(access_token.token)
|
| 669 |
+
if not access_entry:
|
| 670 |
+
access_entry = access_token
|
| 671 |
+
|
| 672 |
+
client_id, client_secret = _resolve_client_credentials()
|
| 673 |
+
|
| 674 |
+
refresh_token_value = getattr(_auth_provider, "_access_to_refresh", {}).get(
|
| 675 |
+
access_token.token
|
| 676 |
+
)
|
| 677 |
+
refresh_token_obj = None
|
| 678 |
+
if refresh_token_value:
|
| 679 |
+
refresh_token_obj = getattr(_auth_provider, "_refresh_tokens", {}).get(
|
| 680 |
+
refresh_token_value
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
expiry = None
|
| 684 |
+
expires_at = getattr(access_entry, "expires_at", None)
|
| 685 |
+
if expires_at:
|
| 686 |
+
try:
|
| 687 |
+
expiry_candidate = datetime.fromtimestamp(expires_at, tz=timezone.utc)
|
| 688 |
+
expiry = _normalize_expiry_to_naive_utc(expiry_candidate)
|
| 689 |
+
except Exception: # pragma: no cover - defensive
|
| 690 |
+
expiry = None
|
| 691 |
+
|
| 692 |
+
scopes = getattr(access_entry, "scopes", None)
|
| 693 |
+
|
| 694 |
+
return Credentials(
|
| 695 |
+
token=access_token.token,
|
| 696 |
+
refresh_token=refresh_token_obj.token if refresh_token_obj else None,
|
| 697 |
+
token_uri="https://oauth2.googleapis.com/token",
|
| 698 |
+
client_id=client_id,
|
| 699 |
+
client_secret=client_secret,
|
| 700 |
+
scopes=scopes,
|
| 701 |
+
expiry=expiry,
|
| 702 |
+
)
|
| 703 |
+
|
| 704 |
+
|
| 705 |
+
def ensure_session_from_access_token(
|
| 706 |
+
access_token: AccessToken,
|
| 707 |
+
user_email: Optional[str],
|
| 708 |
+
mcp_session_id: Optional[str] = None,
|
| 709 |
+
) -> Optional[Credentials]:
|
| 710 |
+
"""Ensure credentials derived from an access token are cached and returned."""
|
| 711 |
+
|
| 712 |
+
if not access_token:
|
| 713 |
+
return None
|
| 714 |
+
|
| 715 |
+
email = user_email
|
| 716 |
+
if not email and getattr(access_token, "claims", None):
|
| 717 |
+
email = access_token.claims.get("email")
|
| 718 |
+
|
| 719 |
+
credentials = _build_credentials_from_provider(access_token)
|
| 720 |
+
store_expiry: Optional[datetime] = None
|
| 721 |
+
|
| 722 |
+
if credentials is None:
|
| 723 |
+
client_id, client_secret = _resolve_client_credentials()
|
| 724 |
+
expiry = None
|
| 725 |
+
expires_at = getattr(access_token, "expires_at", None)
|
| 726 |
+
if expires_at:
|
| 727 |
+
try:
|
| 728 |
+
expiry = datetime.fromtimestamp(expires_at, tz=timezone.utc)
|
| 729 |
+
except Exception: # pragma: no cover - defensive
|
| 730 |
+
expiry = None
|
| 731 |
+
|
| 732 |
+
normalized_expiry = _normalize_expiry_to_naive_utc(expiry)
|
| 733 |
+
credentials = Credentials(
|
| 734 |
+
token=access_token.token,
|
| 735 |
+
refresh_token=None,
|
| 736 |
+
token_uri="https://oauth2.googleapis.com/token",
|
| 737 |
+
client_id=client_id,
|
| 738 |
+
client_secret=client_secret,
|
| 739 |
+
scopes=getattr(access_token, "scopes", None),
|
| 740 |
+
expiry=normalized_expiry,
|
| 741 |
+
)
|
| 742 |
+
store_expiry = expiry
|
| 743 |
+
else:
|
| 744 |
+
store_expiry = credentials.expiry
|
| 745 |
+
|
| 746 |
+
if email:
|
| 747 |
+
try:
|
| 748 |
+
store = get_oauth21_session_store()
|
| 749 |
+
store.store_session(
|
| 750 |
+
user_email=email,
|
| 751 |
+
access_token=credentials.token,
|
| 752 |
+
refresh_token=credentials.refresh_token,
|
| 753 |
+
token_uri=credentials.token_uri,
|
| 754 |
+
client_id=credentials.client_id,
|
| 755 |
+
client_secret=credentials.client_secret,
|
| 756 |
+
scopes=credentials.scopes,
|
| 757 |
+
expiry=store_expiry,
|
| 758 |
+
session_id=f"google_{email}",
|
| 759 |
+
mcp_session_id=mcp_session_id,
|
| 760 |
+
issuer="https://accounts.google.com",
|
| 761 |
+
)
|
| 762 |
+
except Exception as exc: # pragma: no cover - defensive
|
| 763 |
+
logger.debug(f"Failed to cache credentials for {email}: {exc}")
|
| 764 |
+
|
| 765 |
+
return credentials
|
| 766 |
+
|
| 767 |
+
|
| 768 |
+
def get_credentials_from_token(
|
| 769 |
+
access_token: str, user_email: Optional[str] = None
|
| 770 |
+
) -> Optional[Credentials]:
|
| 771 |
+
"""
|
| 772 |
+
Convert a bearer token to Google credentials.
|
| 773 |
+
|
| 774 |
+
Args:
|
| 775 |
+
access_token: The bearer token
|
| 776 |
+
user_email: Optional user email for session lookup
|
| 777 |
+
|
| 778 |
+
Returns:
|
| 779 |
+
Google Credentials object or None
|
| 780 |
+
"""
|
| 781 |
+
try:
|
| 782 |
+
store = get_oauth21_session_store()
|
| 783 |
+
|
| 784 |
+
# If we have user_email, try to get credentials from store
|
| 785 |
+
if user_email:
|
| 786 |
+
credentials = store.get_credentials(user_email)
|
| 787 |
+
if credentials and credentials.token == access_token:
|
| 788 |
+
logger.debug(f"Found matching credentials from store for {user_email}")
|
| 789 |
+
return credentials
|
| 790 |
+
|
| 791 |
+
# If the FastMCP provider is managing tokens, sync from provider storage
|
| 792 |
+
if _auth_provider:
|
| 793 |
+
access_record = getattr(_auth_provider, "_access_tokens", {}).get(
|
| 794 |
+
access_token
|
| 795 |
+
)
|
| 796 |
+
if access_record:
|
| 797 |
+
logger.debug("Building credentials from FastMCP provider cache")
|
| 798 |
+
return ensure_session_from_access_token(access_record, user_email)
|
| 799 |
+
|
| 800 |
+
# Otherwise, create minimal credentials with just the access token
|
| 801 |
+
# Assume token is valid for 1 hour (typical for Google tokens)
|
| 802 |
+
expiry = _normalize_expiry_to_naive_utc(
|
| 803 |
+
datetime.now(timezone.utc) + timedelta(hours=1)
|
| 804 |
+
)
|
| 805 |
+
client_id, client_secret = _resolve_client_credentials()
|
| 806 |
+
|
| 807 |
+
credentials = Credentials(
|
| 808 |
+
token=access_token,
|
| 809 |
+
refresh_token=None,
|
| 810 |
+
token_uri="https://oauth2.googleapis.com/token",
|
| 811 |
+
client_id=client_id,
|
| 812 |
+
client_secret=client_secret,
|
| 813 |
+
scopes=None,
|
| 814 |
+
expiry=expiry,
|
| 815 |
+
)
|
| 816 |
+
|
| 817 |
+
logger.debug("Created fallback Google credentials from bearer token")
|
| 818 |
+
return credentials
|
| 819 |
+
|
| 820 |
+
except Exception as e:
|
| 821 |
+
logger.error(f"Failed to create Google credentials from token: {e}")
|
| 822 |
+
return None
|
| 823 |
+
|
| 824 |
+
|
| 825 |
+
def store_token_session(
|
| 826 |
+
token_response: dict, user_email: str, mcp_session_id: Optional[str] = None
|
| 827 |
+
) -> str:
|
| 828 |
+
"""
|
| 829 |
+
Store a token response in the session store.
|
| 830 |
+
|
| 831 |
+
Args:
|
| 832 |
+
token_response: OAuth token response from Google
|
| 833 |
+
user_email: User's email address
|
| 834 |
+
mcp_session_id: Optional FastMCP session ID to map to this user
|
| 835 |
+
|
| 836 |
+
Returns:
|
| 837 |
+
Session ID
|
| 838 |
+
"""
|
| 839 |
+
if not _auth_provider:
|
| 840 |
+
logger.error("Auth provider not configured")
|
| 841 |
+
return ""
|
| 842 |
+
|
| 843 |
+
try:
|
| 844 |
+
# Try to get FastMCP session ID from context if not provided
|
| 845 |
+
if not mcp_session_id:
|
| 846 |
+
try:
|
| 847 |
+
from core.context import get_fastmcp_session_id
|
| 848 |
+
|
| 849 |
+
mcp_session_id = get_fastmcp_session_id()
|
| 850 |
+
if mcp_session_id:
|
| 851 |
+
logger.debug(
|
| 852 |
+
f"Got FastMCP session ID from context: {mcp_session_id}"
|
| 853 |
+
)
|
| 854 |
+
except Exception as e:
|
| 855 |
+
logger.debug(f"Could not get FastMCP session from context: {e}")
|
| 856 |
+
|
| 857 |
+
# Store session in OAuth21SessionStore
|
| 858 |
+
store = get_oauth21_session_store()
|
| 859 |
+
|
| 860 |
+
session_id = f"google_{user_email}"
|
| 861 |
+
client_id, client_secret = _resolve_client_credentials()
|
| 862 |
+
scopes = token_response.get("scope", "")
|
| 863 |
+
scopes_list = scopes.split() if scopes else None
|
| 864 |
+
expiry = datetime.now(timezone.utc) + timedelta(
|
| 865 |
+
seconds=token_response.get("expires_in", 3600)
|
| 866 |
+
)
|
| 867 |
+
|
| 868 |
+
store.store_session(
|
| 869 |
+
user_email=user_email,
|
| 870 |
+
access_token=token_response.get("access_token"),
|
| 871 |
+
refresh_token=token_response.get("refresh_token"),
|
| 872 |
+
token_uri="https://oauth2.googleapis.com/token",
|
| 873 |
+
client_id=client_id,
|
| 874 |
+
client_secret=client_secret,
|
| 875 |
+
scopes=scopes_list,
|
| 876 |
+
expiry=expiry,
|
| 877 |
+
session_id=session_id,
|
| 878 |
+
mcp_session_id=mcp_session_id,
|
| 879 |
+
issuer="https://accounts.google.com",
|
| 880 |
+
)
|
| 881 |
+
|
| 882 |
+
if mcp_session_id:
|
| 883 |
+
logger.info(
|
| 884 |
+
f"Stored token session for {user_email} with MCP session {mcp_session_id}"
|
| 885 |
+
)
|
| 886 |
+
else:
|
| 887 |
+
logger.info(f"Stored token session for {user_email}")
|
| 888 |
+
|
| 889 |
+
return session_id
|
| 890 |
+
|
| 891 |
+
except Exception as e:
|
| 892 |
+
logger.error(f"Failed to store token session: {e}")
|
| 893 |
+
return ""
|
auth/oauth_callback_server.py
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Transport-aware OAuth callback handling.
|
| 3 |
+
|
| 4 |
+
In streamable-http mode: Uses the existing FastAPI server
|
| 5 |
+
In stdio mode: Starts a minimal HTTP server just for OAuth callbacks
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import asyncio
|
| 9 |
+
import logging
|
| 10 |
+
import threading
|
| 11 |
+
import time
|
| 12 |
+
import socket
|
| 13 |
+
import uvicorn
|
| 14 |
+
|
| 15 |
+
from fastapi import FastAPI, Request
|
| 16 |
+
from fastapi.responses import FileResponse, JSONResponse
|
| 17 |
+
from typing import Optional
|
| 18 |
+
from urllib.parse import urlparse
|
| 19 |
+
|
| 20 |
+
from auth.scopes import SCOPES, get_current_scopes # noqa
|
| 21 |
+
from auth.oauth_responses import (
|
| 22 |
+
create_error_response,
|
| 23 |
+
create_success_response,
|
| 24 |
+
create_server_error_response,
|
| 25 |
+
)
|
| 26 |
+
from auth.google_auth import handle_auth_callback, check_client_secrets
|
| 27 |
+
from auth.oauth_config import get_oauth_redirect_uri
|
| 28 |
+
|
| 29 |
+
logger = logging.getLogger(__name__)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class MinimalOAuthServer:
|
| 33 |
+
"""
|
| 34 |
+
Minimal HTTP server for OAuth callbacks in stdio mode.
|
| 35 |
+
Only starts when needed and uses the same port (8000) as streamable-http mode.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, port: int = 8000, base_uri: str = "http://localhost"):
|
| 39 |
+
self.port = port
|
| 40 |
+
self.base_uri = base_uri
|
| 41 |
+
self.app = FastAPI()
|
| 42 |
+
self.server = None
|
| 43 |
+
self.server_thread = None
|
| 44 |
+
self.is_running = False
|
| 45 |
+
|
| 46 |
+
# Setup the callback route
|
| 47 |
+
self._setup_callback_route()
|
| 48 |
+
# Setup attachment serving route
|
| 49 |
+
self._setup_attachment_route()
|
| 50 |
+
|
| 51 |
+
def _setup_callback_route(self):
|
| 52 |
+
"""Setup the OAuth callback route."""
|
| 53 |
+
|
| 54 |
+
@self.app.get("/oauth2callback")
|
| 55 |
+
async def oauth_callback(request: Request):
|
| 56 |
+
"""Handle OAuth callback - same logic as in core/server.py"""
|
| 57 |
+
state = request.query_params.get("state")
|
| 58 |
+
code = request.query_params.get("code")
|
| 59 |
+
error = request.query_params.get("error")
|
| 60 |
+
|
| 61 |
+
if error:
|
| 62 |
+
error_message = f"Authentication failed: Google returned an error: {error}. State: {state}."
|
| 63 |
+
logger.error(error_message)
|
| 64 |
+
return create_error_response(error_message)
|
| 65 |
+
|
| 66 |
+
if not code:
|
| 67 |
+
error_message = (
|
| 68 |
+
"Authentication failed: No authorization code received from Google."
|
| 69 |
+
)
|
| 70 |
+
logger.error(error_message)
|
| 71 |
+
return create_error_response(error_message)
|
| 72 |
+
|
| 73 |
+
try:
|
| 74 |
+
# Check if we have credentials available (environment variables or file)
|
| 75 |
+
error_message = check_client_secrets()
|
| 76 |
+
if error_message:
|
| 77 |
+
return create_server_error_response(error_message)
|
| 78 |
+
|
| 79 |
+
logger.info(
|
| 80 |
+
f"OAuth callback: Received code (state: {state}). Attempting to exchange for tokens."
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
# Session ID tracking removed - not needed
|
| 84 |
+
|
| 85 |
+
# Exchange code for credentials
|
| 86 |
+
redirect_uri = get_oauth_redirect_uri()
|
| 87 |
+
verified_user_id, credentials = handle_auth_callback(
|
| 88 |
+
scopes=get_current_scopes(),
|
| 89 |
+
authorization_response=str(request.url),
|
| 90 |
+
redirect_uri=redirect_uri,
|
| 91 |
+
session_id=None,
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
logger.info(
|
| 95 |
+
f"OAuth callback: Successfully authenticated user: {verified_user_id} (state: {state})."
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
# Return success page using shared template
|
| 99 |
+
return create_success_response(verified_user_id)
|
| 100 |
+
|
| 101 |
+
except Exception as e:
|
| 102 |
+
error_message_detail = (
|
| 103 |
+
f"Error processing OAuth callback (state: {state}): {str(e)}"
|
| 104 |
+
)
|
| 105 |
+
logger.error(error_message_detail, exc_info=True)
|
| 106 |
+
return create_server_error_response(str(e))
|
| 107 |
+
|
| 108 |
+
def _setup_attachment_route(self):
|
| 109 |
+
"""Setup the attachment serving route."""
|
| 110 |
+
from core.attachment_storage import get_attachment_storage
|
| 111 |
+
|
| 112 |
+
@self.app.get("/attachments/{file_id}")
|
| 113 |
+
async def serve_attachment(file_id: str, request: Request):
|
| 114 |
+
"""Serve a stored attachment file."""
|
| 115 |
+
storage = get_attachment_storage()
|
| 116 |
+
metadata = storage.get_attachment_metadata(file_id)
|
| 117 |
+
|
| 118 |
+
if not metadata:
|
| 119 |
+
return JSONResponse(
|
| 120 |
+
{"error": "Attachment not found or expired"}, status_code=404
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
file_path = storage.get_attachment_path(file_id)
|
| 124 |
+
if not file_path:
|
| 125 |
+
return JSONResponse(
|
| 126 |
+
{"error": "Attachment file not found"}, status_code=404
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
return FileResponse(
|
| 130 |
+
path=str(file_path),
|
| 131 |
+
filename=metadata["filename"],
|
| 132 |
+
media_type=metadata["mime_type"],
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
def start(self) -> tuple[bool, str]:
|
| 136 |
+
"""
|
| 137 |
+
Start the minimal OAuth server.
|
| 138 |
+
|
| 139 |
+
Returns:
|
| 140 |
+
Tuple of (success: bool, error_message: str)
|
| 141 |
+
"""
|
| 142 |
+
if self.is_running:
|
| 143 |
+
logger.info("Minimal OAuth server is already running")
|
| 144 |
+
return True, ""
|
| 145 |
+
|
| 146 |
+
# Check if port is available
|
| 147 |
+
# Extract hostname from base_uri (e.g., "http://localhost" -> "localhost")
|
| 148 |
+
try:
|
| 149 |
+
parsed_uri = urlparse(self.base_uri)
|
| 150 |
+
hostname = parsed_uri.hostname or "localhost"
|
| 151 |
+
except Exception:
|
| 152 |
+
hostname = "localhost"
|
| 153 |
+
|
| 154 |
+
try:
|
| 155 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 156 |
+
s.bind((hostname, self.port))
|
| 157 |
+
except OSError:
|
| 158 |
+
error_msg = f"Port {self.port} is already in use on {hostname}. Cannot start minimal OAuth server."
|
| 159 |
+
logger.error(error_msg)
|
| 160 |
+
return False, error_msg
|
| 161 |
+
|
| 162 |
+
def run_server():
|
| 163 |
+
"""Run the server in a separate thread."""
|
| 164 |
+
try:
|
| 165 |
+
config = uvicorn.Config(
|
| 166 |
+
self.app,
|
| 167 |
+
host=hostname,
|
| 168 |
+
port=self.port,
|
| 169 |
+
log_level="warning",
|
| 170 |
+
access_log=False,
|
| 171 |
+
)
|
| 172 |
+
self.server = uvicorn.Server(config)
|
| 173 |
+
asyncio.run(self.server.serve())
|
| 174 |
+
|
| 175 |
+
except Exception as e:
|
| 176 |
+
logger.error(f"Minimal OAuth server error: {e}", exc_info=True)
|
| 177 |
+
self.is_running = False
|
| 178 |
+
|
| 179 |
+
# Start server in background thread
|
| 180 |
+
self.server_thread = threading.Thread(target=run_server, daemon=True)
|
| 181 |
+
self.server_thread.start()
|
| 182 |
+
|
| 183 |
+
# Wait for server to start
|
| 184 |
+
max_wait = 3.0
|
| 185 |
+
start_time = time.time()
|
| 186 |
+
while time.time() - start_time < max_wait:
|
| 187 |
+
try:
|
| 188 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 189 |
+
result = s.connect_ex((hostname, self.port))
|
| 190 |
+
if result == 0:
|
| 191 |
+
self.is_running = True
|
| 192 |
+
logger.info(
|
| 193 |
+
f"Minimal OAuth server started on {hostname}:{self.port}"
|
| 194 |
+
)
|
| 195 |
+
return True, ""
|
| 196 |
+
except Exception:
|
| 197 |
+
pass
|
| 198 |
+
time.sleep(0.1)
|
| 199 |
+
|
| 200 |
+
error_msg = f"Failed to start minimal OAuth server on {hostname}:{self.port} - server did not respond within {max_wait}s"
|
| 201 |
+
logger.error(error_msg)
|
| 202 |
+
return False, error_msg
|
| 203 |
+
|
| 204 |
+
def stop(self):
|
| 205 |
+
"""Stop the minimal OAuth server."""
|
| 206 |
+
if not self.is_running:
|
| 207 |
+
return
|
| 208 |
+
|
| 209 |
+
try:
|
| 210 |
+
if self.server:
|
| 211 |
+
if hasattr(self.server, "should_exit"):
|
| 212 |
+
self.server.should_exit = True
|
| 213 |
+
|
| 214 |
+
if self.server_thread and self.server_thread.is_alive():
|
| 215 |
+
self.server_thread.join(timeout=3.0)
|
| 216 |
+
|
| 217 |
+
self.is_running = False
|
| 218 |
+
logger.info("Minimal OAuth server stopped")
|
| 219 |
+
|
| 220 |
+
except Exception as e:
|
| 221 |
+
logger.error(f"Error stopping minimal OAuth server: {e}", exc_info=True)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
# Global instance for stdio mode
|
| 225 |
+
_minimal_oauth_server: Optional[MinimalOAuthServer] = None
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def ensure_oauth_callback_available(
|
| 229 |
+
transport_mode: str = "stdio", port: int = 8000, base_uri: str = "http://localhost"
|
| 230 |
+
) -> tuple[bool, str]:
|
| 231 |
+
"""
|
| 232 |
+
Ensure OAuth callback endpoint is available for the given transport mode.
|
| 233 |
+
|
| 234 |
+
For streamable-http: Assumes the main server is already running
|
| 235 |
+
For stdio: Starts a minimal server if needed
|
| 236 |
+
|
| 237 |
+
Args:
|
| 238 |
+
transport_mode: "stdio" or "streamable-http"
|
| 239 |
+
port: Port number (default 8000)
|
| 240 |
+
base_uri: Base URI (default "http://localhost")
|
| 241 |
+
|
| 242 |
+
Returns:
|
| 243 |
+
Tuple of (success: bool, error_message: str)
|
| 244 |
+
"""
|
| 245 |
+
global _minimal_oauth_server
|
| 246 |
+
|
| 247 |
+
if transport_mode == "streamable-http":
|
| 248 |
+
# In streamable-http mode, the main FastAPI server should handle callbacks
|
| 249 |
+
logger.debug(
|
| 250 |
+
"Using existing FastAPI server for OAuth callbacks (streamable-http mode)"
|
| 251 |
+
)
|
| 252 |
+
return True, ""
|
| 253 |
+
|
| 254 |
+
elif transport_mode == "stdio":
|
| 255 |
+
# In stdio mode, start minimal server if not already running
|
| 256 |
+
if _minimal_oauth_server is None:
|
| 257 |
+
logger.info(f"Creating minimal OAuth server instance for {base_uri}:{port}")
|
| 258 |
+
_minimal_oauth_server = MinimalOAuthServer(port, base_uri)
|
| 259 |
+
|
| 260 |
+
if not _minimal_oauth_server.is_running:
|
| 261 |
+
logger.info("Starting minimal OAuth server for stdio mode")
|
| 262 |
+
success, error_msg = _minimal_oauth_server.start()
|
| 263 |
+
if success:
|
| 264 |
+
logger.info(
|
| 265 |
+
f"Minimal OAuth server successfully started on {base_uri}:{port}"
|
| 266 |
+
)
|
| 267 |
+
return True, ""
|
| 268 |
+
else:
|
| 269 |
+
logger.error(
|
| 270 |
+
f"Failed to start minimal OAuth server on {base_uri}:{port}: {error_msg}"
|
| 271 |
+
)
|
| 272 |
+
return False, error_msg
|
| 273 |
+
else:
|
| 274 |
+
logger.info("Minimal OAuth server is already running")
|
| 275 |
+
return True, ""
|
| 276 |
+
|
| 277 |
+
else:
|
| 278 |
+
error_msg = f"Unknown transport mode: {transport_mode}"
|
| 279 |
+
logger.error(error_msg)
|
| 280 |
+
return False, error_msg
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
def cleanup_oauth_callback_server():
|
| 284 |
+
"""Clean up the minimal OAuth server if it was started."""
|
| 285 |
+
global _minimal_oauth_server
|
| 286 |
+
if _minimal_oauth_server:
|
| 287 |
+
_minimal_oauth_server.stop()
|
| 288 |
+
_minimal_oauth_server = None
|
auth/oauth_config.py
ADDED
|
@@ -0,0 +1,438 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
OAuth Configuration Management
|
| 3 |
+
|
| 4 |
+
This module centralizes OAuth-related configuration to eliminate hardcoded values
|
| 5 |
+
scattered throughout the codebase. It provides environment variable support and
|
| 6 |
+
sensible defaults for all OAuth-related settings.
|
| 7 |
+
|
| 8 |
+
Supports both OAuth 2.0 and OAuth 2.1 with automatic client capability detection.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import os
|
| 12 |
+
from urllib.parse import urlparse
|
| 13 |
+
from typing import List, Optional, Dict, Any
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class OAuthConfig:
|
| 17 |
+
"""
|
| 18 |
+
Centralized OAuth configuration management.
|
| 19 |
+
|
| 20 |
+
This class eliminates the hardcoded configuration anti-pattern identified
|
| 21 |
+
in the challenge review by providing a single source of truth for all
|
| 22 |
+
OAuth-related configuration values.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self):
|
| 26 |
+
# Base server configuration
|
| 27 |
+
self.base_uri = os.getenv("WORKSPACE_MCP_BASE_URI", "http://localhost")
|
| 28 |
+
self.port = int(os.getenv("PORT", os.getenv("WORKSPACE_MCP_PORT", "8000")))
|
| 29 |
+
self.base_url = f"{self.base_uri}:{self.port}"
|
| 30 |
+
|
| 31 |
+
# External URL for reverse proxy scenarios
|
| 32 |
+
self.external_url = os.getenv("WORKSPACE_EXTERNAL_URL")
|
| 33 |
+
|
| 34 |
+
# OAuth client configuration
|
| 35 |
+
self.client_id = os.getenv("GOOGLE_OAUTH_CLIENT_ID")
|
| 36 |
+
self.client_secret = os.getenv("GOOGLE_OAUTH_CLIENT_SECRET")
|
| 37 |
+
|
| 38 |
+
# OAuth 2.1 configuration
|
| 39 |
+
self.oauth21_enabled = (
|
| 40 |
+
os.getenv("MCP_ENABLE_OAUTH21", "false").lower() == "true"
|
| 41 |
+
)
|
| 42 |
+
self.pkce_required = self.oauth21_enabled # PKCE is mandatory in OAuth 2.1
|
| 43 |
+
self.supported_code_challenge_methods = (
|
| 44 |
+
["S256", "plain"] if not self.oauth21_enabled else ["S256"]
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# External OAuth 2.1 provider configuration
|
| 48 |
+
self.external_oauth21_provider = (
|
| 49 |
+
os.getenv("EXTERNAL_OAUTH21_PROVIDER", "false").lower() == "true"
|
| 50 |
+
)
|
| 51 |
+
if self.external_oauth21_provider and not self.oauth21_enabled:
|
| 52 |
+
raise ValueError(
|
| 53 |
+
"EXTERNAL_OAUTH21_PROVIDER requires MCP_ENABLE_OAUTH21=true"
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
# Stateless mode configuration
|
| 57 |
+
self.stateless_mode = (
|
| 58 |
+
os.getenv("WORKSPACE_MCP_STATELESS_MODE", "false").lower() == "true"
|
| 59 |
+
)
|
| 60 |
+
if self.stateless_mode and not self.oauth21_enabled:
|
| 61 |
+
raise ValueError(
|
| 62 |
+
"WORKSPACE_MCP_STATELESS_MODE requires MCP_ENABLE_OAUTH21=true"
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
# Transport mode (will be set at runtime)
|
| 66 |
+
self._transport_mode = "stdio" # Default
|
| 67 |
+
|
| 68 |
+
# Redirect URI configuration
|
| 69 |
+
self.redirect_uri = self._get_redirect_uri()
|
| 70 |
+
self.redirect_path = self._get_redirect_path(self.redirect_uri)
|
| 71 |
+
|
| 72 |
+
# Ensure FastMCP's Google provider picks up our existing configuration
|
| 73 |
+
self._apply_fastmcp_google_env()
|
| 74 |
+
|
| 75 |
+
def _get_redirect_uri(self) -> str:
|
| 76 |
+
"""
|
| 77 |
+
Get the OAuth redirect URI, supporting reverse proxy configurations.
|
| 78 |
+
|
| 79 |
+
Returns:
|
| 80 |
+
The configured redirect URI
|
| 81 |
+
"""
|
| 82 |
+
explicit_uri = os.getenv("GOOGLE_OAUTH_REDIRECT_URI")
|
| 83 |
+
if explicit_uri:
|
| 84 |
+
return explicit_uri
|
| 85 |
+
return f"{self.base_url}/oauth2callback"
|
| 86 |
+
|
| 87 |
+
@staticmethod
|
| 88 |
+
def _get_redirect_path(uri: str) -> str:
|
| 89 |
+
"""Extract the redirect path from a full redirect URI."""
|
| 90 |
+
parsed = urlparse(uri)
|
| 91 |
+
if parsed.scheme or parsed.netloc:
|
| 92 |
+
path = parsed.path or "/oauth2callback"
|
| 93 |
+
else:
|
| 94 |
+
# If the value was already a path, ensure it starts with '/'
|
| 95 |
+
path = uri if uri.startswith("/") else f"/{uri}"
|
| 96 |
+
return path or "/oauth2callback"
|
| 97 |
+
|
| 98 |
+
def _apply_fastmcp_google_env(self) -> None:
|
| 99 |
+
"""Mirror legacy GOOGLE_* env vars into FastMCP Google provider settings."""
|
| 100 |
+
if not self.client_id:
|
| 101 |
+
return
|
| 102 |
+
|
| 103 |
+
def _set_if_absent(key: str, value: Optional[str]) -> None:
|
| 104 |
+
if value and key not in os.environ:
|
| 105 |
+
os.environ[key] = value
|
| 106 |
+
|
| 107 |
+
# Don't set FASTMCP_SERVER_AUTH if using external OAuth provider
|
| 108 |
+
# (external OAuth means protocol-level auth is disabled, only tool-level auth)
|
| 109 |
+
if not self.external_oauth21_provider:
|
| 110 |
+
_set_if_absent(
|
| 111 |
+
"FASTMCP_SERVER_AUTH",
|
| 112 |
+
"fastmcp.server.auth.providers.google.GoogleProvider"
|
| 113 |
+
if self.oauth21_enabled
|
| 114 |
+
else None,
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
_set_if_absent("FASTMCP_SERVER_AUTH_GOOGLE_CLIENT_ID", self.client_id)
|
| 118 |
+
_set_if_absent("FASTMCP_SERVER_AUTH_GOOGLE_CLIENT_SECRET", self.client_secret)
|
| 119 |
+
_set_if_absent("FASTMCP_SERVER_AUTH_GOOGLE_BASE_URL", self.get_oauth_base_url())
|
| 120 |
+
_set_if_absent("FASTMCP_SERVER_AUTH_GOOGLE_REDIRECT_PATH", self.redirect_path)
|
| 121 |
+
|
| 122 |
+
def get_redirect_uris(self) -> List[str]:
|
| 123 |
+
"""
|
| 124 |
+
Get all valid OAuth redirect URIs.
|
| 125 |
+
|
| 126 |
+
Returns:
|
| 127 |
+
List of all supported redirect URIs
|
| 128 |
+
"""
|
| 129 |
+
uris = []
|
| 130 |
+
|
| 131 |
+
# Primary redirect URI
|
| 132 |
+
uris.append(self.redirect_uri)
|
| 133 |
+
|
| 134 |
+
# Custom redirect URIs from environment
|
| 135 |
+
custom_uris = os.getenv("OAUTH_CUSTOM_REDIRECT_URIS")
|
| 136 |
+
if custom_uris:
|
| 137 |
+
uris.extend([uri.strip() for uri in custom_uris.split(",")])
|
| 138 |
+
|
| 139 |
+
# Remove duplicates while preserving order
|
| 140 |
+
return list(dict.fromkeys(uris))
|
| 141 |
+
|
| 142 |
+
def get_allowed_origins(self) -> List[str]:
|
| 143 |
+
"""
|
| 144 |
+
Get allowed CORS origins for OAuth endpoints.
|
| 145 |
+
|
| 146 |
+
Returns:
|
| 147 |
+
List of allowed origins for CORS
|
| 148 |
+
"""
|
| 149 |
+
origins = []
|
| 150 |
+
|
| 151 |
+
# Server's own origin
|
| 152 |
+
origins.append(self.base_url)
|
| 153 |
+
|
| 154 |
+
# VS Code and development origins
|
| 155 |
+
origins.extend(
|
| 156 |
+
[
|
| 157 |
+
"vscode-webview://",
|
| 158 |
+
"https://vscode.dev",
|
| 159 |
+
"https://github.dev",
|
| 160 |
+
]
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
# Custom origins from environment
|
| 164 |
+
custom_origins = os.getenv("OAUTH_ALLOWED_ORIGINS")
|
| 165 |
+
if custom_origins:
|
| 166 |
+
origins.extend([origin.strip() for origin in custom_origins.split(",")])
|
| 167 |
+
|
| 168 |
+
return list(dict.fromkeys(origins))
|
| 169 |
+
|
| 170 |
+
def is_configured(self) -> bool:
|
| 171 |
+
"""
|
| 172 |
+
Check if OAuth is properly configured.
|
| 173 |
+
|
| 174 |
+
Returns:
|
| 175 |
+
True if OAuth client credentials are available
|
| 176 |
+
"""
|
| 177 |
+
return bool(self.client_id and self.client_secret)
|
| 178 |
+
|
| 179 |
+
def get_oauth_base_url(self) -> str:
|
| 180 |
+
"""
|
| 181 |
+
Get OAuth base URL for constructing OAuth endpoints.
|
| 182 |
+
|
| 183 |
+
Uses WORKSPACE_EXTERNAL_URL if set (for reverse proxy scenarios),
|
| 184 |
+
otherwise falls back to constructed base_url with port.
|
| 185 |
+
|
| 186 |
+
Returns:
|
| 187 |
+
Base URL for OAuth endpoints
|
| 188 |
+
"""
|
| 189 |
+
if self.external_url:
|
| 190 |
+
return self.external_url
|
| 191 |
+
return self.base_url
|
| 192 |
+
|
| 193 |
+
def validate_redirect_uri(self, uri: str) -> bool:
|
| 194 |
+
"""
|
| 195 |
+
Validate if a redirect URI is allowed.
|
| 196 |
+
|
| 197 |
+
Args:
|
| 198 |
+
uri: The redirect URI to validate
|
| 199 |
+
|
| 200 |
+
Returns:
|
| 201 |
+
True if the URI is allowed, False otherwise
|
| 202 |
+
"""
|
| 203 |
+
allowed_uris = self.get_redirect_uris()
|
| 204 |
+
return uri in allowed_uris
|
| 205 |
+
|
| 206 |
+
def get_environment_summary(self) -> dict:
|
| 207 |
+
"""
|
| 208 |
+
Get a summary of the current OAuth configuration.
|
| 209 |
+
|
| 210 |
+
Returns:
|
| 211 |
+
Dictionary with configuration summary (excluding secrets)
|
| 212 |
+
"""
|
| 213 |
+
return {
|
| 214 |
+
"base_url": self.base_url,
|
| 215 |
+
"external_url": self.external_url,
|
| 216 |
+
"effective_oauth_url": self.get_oauth_base_url(),
|
| 217 |
+
"redirect_uri": self.redirect_uri,
|
| 218 |
+
"redirect_path": self.redirect_path,
|
| 219 |
+
"client_configured": bool(self.client_id),
|
| 220 |
+
"oauth21_enabled": self.oauth21_enabled,
|
| 221 |
+
"external_oauth21_provider": self.external_oauth21_provider,
|
| 222 |
+
"pkce_required": self.pkce_required,
|
| 223 |
+
"transport_mode": self._transport_mode,
|
| 224 |
+
"total_redirect_uris": len(self.get_redirect_uris()),
|
| 225 |
+
"total_allowed_origins": len(self.get_allowed_origins()),
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
def set_transport_mode(self, mode: str) -> None:
|
| 229 |
+
"""
|
| 230 |
+
Set the current transport mode for OAuth callback handling.
|
| 231 |
+
|
| 232 |
+
Args:
|
| 233 |
+
mode: Transport mode ("stdio", "streamable-http", etc.)
|
| 234 |
+
"""
|
| 235 |
+
self._transport_mode = mode
|
| 236 |
+
|
| 237 |
+
def get_transport_mode(self) -> str:
|
| 238 |
+
"""
|
| 239 |
+
Get the current transport mode.
|
| 240 |
+
|
| 241 |
+
Returns:
|
| 242 |
+
Current transport mode
|
| 243 |
+
"""
|
| 244 |
+
return self._transport_mode
|
| 245 |
+
|
| 246 |
+
def is_oauth21_enabled(self) -> bool:
|
| 247 |
+
"""
|
| 248 |
+
Check if OAuth 2.1 mode is enabled.
|
| 249 |
+
|
| 250 |
+
Returns:
|
| 251 |
+
True if OAuth 2.1 is enabled
|
| 252 |
+
"""
|
| 253 |
+
return self.oauth21_enabled
|
| 254 |
+
|
| 255 |
+
def is_external_oauth21_provider(self) -> bool:
|
| 256 |
+
"""
|
| 257 |
+
Check if external OAuth 2.1 provider mode is enabled.
|
| 258 |
+
|
| 259 |
+
When enabled, the server expects external OAuth flow with bearer tokens
|
| 260 |
+
in Authorization headers for tool calls. Protocol-level auth is disabled.
|
| 261 |
+
|
| 262 |
+
Returns:
|
| 263 |
+
True if external OAuth 2.1 provider is enabled
|
| 264 |
+
"""
|
| 265 |
+
return self.external_oauth21_provider
|
| 266 |
+
|
| 267 |
+
def detect_oauth_version(self, request_params: Dict[str, Any]) -> str:
|
| 268 |
+
"""
|
| 269 |
+
Detect OAuth version based on request parameters.
|
| 270 |
+
|
| 271 |
+
This method implements a conservative detection strategy:
|
| 272 |
+
- Only returns "oauth21" when we have clear indicators
|
| 273 |
+
- Defaults to "oauth20" for backward compatibility
|
| 274 |
+
- Respects the global oauth21_enabled flag
|
| 275 |
+
|
| 276 |
+
Args:
|
| 277 |
+
request_params: Request parameters from authorization or token request
|
| 278 |
+
|
| 279 |
+
Returns:
|
| 280 |
+
"oauth21" or "oauth20" based on detection
|
| 281 |
+
"""
|
| 282 |
+
# If OAuth 2.1 is not enabled globally, always return OAuth 2.0
|
| 283 |
+
if not self.oauth21_enabled:
|
| 284 |
+
return "oauth20"
|
| 285 |
+
|
| 286 |
+
# Use the structured type for cleaner detection logic
|
| 287 |
+
from auth.oauth_types import OAuthVersionDetectionParams
|
| 288 |
+
|
| 289 |
+
params = OAuthVersionDetectionParams.from_request(request_params)
|
| 290 |
+
|
| 291 |
+
# Clear OAuth 2.1 indicator: PKCE is present
|
| 292 |
+
if params.has_pkce:
|
| 293 |
+
return "oauth21"
|
| 294 |
+
|
| 295 |
+
# Additional detection: Check if we have an active OAuth 2.1 session
|
| 296 |
+
# This is important for tool calls where PKCE params aren't available
|
| 297 |
+
authenticated_user = request_params.get("authenticated_user")
|
| 298 |
+
if authenticated_user:
|
| 299 |
+
try:
|
| 300 |
+
from auth.oauth21_session_store import get_oauth21_session_store
|
| 301 |
+
|
| 302 |
+
store = get_oauth21_session_store()
|
| 303 |
+
if store.has_session(authenticated_user):
|
| 304 |
+
return "oauth21"
|
| 305 |
+
except (ImportError, AttributeError, RuntimeError):
|
| 306 |
+
pass # Fall back to OAuth 2.0 if session check fails
|
| 307 |
+
|
| 308 |
+
# For public clients in OAuth 2.1 mode, we require PKCE
|
| 309 |
+
# But since they didn't send PKCE, fall back to OAuth 2.0
|
| 310 |
+
# This ensures backward compatibility
|
| 311 |
+
|
| 312 |
+
# Default to OAuth 2.0 for maximum compatibility
|
| 313 |
+
return "oauth20"
|
| 314 |
+
|
| 315 |
+
def get_authorization_server_metadata(
|
| 316 |
+
self, scopes: Optional[List[str]] = None
|
| 317 |
+
) -> Dict[str, Any]:
|
| 318 |
+
"""
|
| 319 |
+
Get OAuth authorization server metadata per RFC 8414.
|
| 320 |
+
|
| 321 |
+
Args:
|
| 322 |
+
scopes: Optional list of supported scopes to include in metadata
|
| 323 |
+
|
| 324 |
+
Returns:
|
| 325 |
+
Authorization server metadata dictionary
|
| 326 |
+
"""
|
| 327 |
+
oauth_base = self.get_oauth_base_url()
|
| 328 |
+
metadata = {
|
| 329 |
+
"issuer": "https://accounts.google.com",
|
| 330 |
+
"authorization_endpoint": f"{oauth_base}/oauth2/authorize",
|
| 331 |
+
"token_endpoint": f"{oauth_base}/oauth2/token",
|
| 332 |
+
"registration_endpoint": f"{oauth_base}/oauth2/register",
|
| 333 |
+
"jwks_uri": "https://www.googleapis.com/oauth2/v3/certs",
|
| 334 |
+
"userinfo_endpoint": "https://openidconnect.googleapis.com/v1/userinfo",
|
| 335 |
+
"response_types_supported": ["code", "token"],
|
| 336 |
+
"grant_types_supported": ["authorization_code", "refresh_token"],
|
| 337 |
+
"token_endpoint_auth_methods_supported": [
|
| 338 |
+
"client_secret_post",
|
| 339 |
+
"client_secret_basic",
|
| 340 |
+
],
|
| 341 |
+
"code_challenge_methods_supported": self.supported_code_challenge_methods,
|
| 342 |
+
}
|
| 343 |
+
|
| 344 |
+
# Include scopes if provided
|
| 345 |
+
if scopes is not None:
|
| 346 |
+
metadata["scopes_supported"] = scopes
|
| 347 |
+
|
| 348 |
+
# Add OAuth 2.1 specific metadata
|
| 349 |
+
if self.oauth21_enabled:
|
| 350 |
+
metadata["pkce_required"] = True
|
| 351 |
+
# OAuth 2.1 deprecates implicit flow
|
| 352 |
+
metadata["response_types_supported"] = ["code"]
|
| 353 |
+
# OAuth 2.1 requires exact redirect URI matching
|
| 354 |
+
metadata["require_exact_redirect_uri"] = True
|
| 355 |
+
|
| 356 |
+
return metadata
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
# Global configuration instance
|
| 360 |
+
_oauth_config = None
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
def get_oauth_config() -> OAuthConfig:
|
| 364 |
+
"""
|
| 365 |
+
Get the global OAuth configuration instance.
|
| 366 |
+
|
| 367 |
+
Returns:
|
| 368 |
+
The singleton OAuth configuration instance
|
| 369 |
+
"""
|
| 370 |
+
global _oauth_config
|
| 371 |
+
if _oauth_config is None:
|
| 372 |
+
_oauth_config = OAuthConfig()
|
| 373 |
+
return _oauth_config
|
| 374 |
+
|
| 375 |
+
|
| 376 |
+
def reload_oauth_config() -> OAuthConfig:
|
| 377 |
+
"""
|
| 378 |
+
Reload the OAuth configuration from environment variables.
|
| 379 |
+
|
| 380 |
+
This is useful for testing or when environment variables change.
|
| 381 |
+
|
| 382 |
+
Returns:
|
| 383 |
+
The reloaded OAuth configuration instance
|
| 384 |
+
"""
|
| 385 |
+
global _oauth_config
|
| 386 |
+
_oauth_config = OAuthConfig()
|
| 387 |
+
return _oauth_config
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
# Convenience functions for backward compatibility
|
| 391 |
+
def get_oauth_base_url() -> str:
|
| 392 |
+
"""Get OAuth base URL."""
|
| 393 |
+
return get_oauth_config().get_oauth_base_url()
|
| 394 |
+
|
| 395 |
+
|
| 396 |
+
def get_redirect_uris() -> List[str]:
|
| 397 |
+
"""Get all valid OAuth redirect URIs."""
|
| 398 |
+
return get_oauth_config().get_redirect_uris()
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
def get_allowed_origins() -> List[str]:
|
| 402 |
+
"""Get allowed CORS origins."""
|
| 403 |
+
return get_oauth_config().get_allowed_origins()
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def is_oauth_configured() -> bool:
|
| 407 |
+
"""Check if OAuth is properly configured."""
|
| 408 |
+
return get_oauth_config().is_configured()
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def set_transport_mode(mode: str) -> None:
|
| 412 |
+
"""Set the current transport mode."""
|
| 413 |
+
get_oauth_config().set_transport_mode(mode)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def get_transport_mode() -> str:
|
| 417 |
+
"""Get the current transport mode."""
|
| 418 |
+
return get_oauth_config().get_transport_mode()
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
def is_oauth21_enabled() -> bool:
|
| 422 |
+
"""Check if OAuth 2.1 is enabled."""
|
| 423 |
+
return get_oauth_config().is_oauth21_enabled()
|
| 424 |
+
|
| 425 |
+
|
| 426 |
+
def get_oauth_redirect_uri() -> str:
|
| 427 |
+
"""Get the primary OAuth redirect URI."""
|
| 428 |
+
return get_oauth_config().redirect_uri
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def is_stateless_mode() -> bool:
|
| 432 |
+
"""Check if stateless mode is enabled."""
|
| 433 |
+
return get_oauth_config().stateless_mode
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
def is_external_oauth21_provider() -> bool:
|
| 437 |
+
"""Check if external OAuth 2.1 provider mode is enabled."""
|
| 438 |
+
return get_oauth_config().is_external_oauth21_provider()
|
auth/oauth_responses.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Shared OAuth callback response templates.
|
| 3 |
+
|
| 4 |
+
Provides reusable HTML response templates for OAuth authentication flows
|
| 5 |
+
to eliminate duplication between server.py and oauth_callback_server.py.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from fastapi.responses import HTMLResponse
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def create_error_response(error_message: str, status_code: int = 400) -> HTMLResponse:
|
| 13 |
+
"""
|
| 14 |
+
Create a standardized error response for OAuth failures.
|
| 15 |
+
|
| 16 |
+
Args:
|
| 17 |
+
error_message: The error message to display
|
| 18 |
+
status_code: HTTP status code (default 400)
|
| 19 |
+
|
| 20 |
+
Returns:
|
| 21 |
+
HTMLResponse with error page
|
| 22 |
+
"""
|
| 23 |
+
content = f"""
|
| 24 |
+
<html>
|
| 25 |
+
<head><title>Authentication Error</title></head>
|
| 26 |
+
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; max-width: 600px; margin: 40px auto; padding: 20px; text-align: center;">
|
| 27 |
+
<h2 style="color: #d32f2f;">Authentication Error</h2>
|
| 28 |
+
<p>{error_message}</p>
|
| 29 |
+
<p>Please ensure you grant the requested permissions. You can close this window and try again.</p>
|
| 30 |
+
<script>setTimeout(function() {{ window.close(); }}, 10000);</script>
|
| 31 |
+
</body>
|
| 32 |
+
</html>
|
| 33 |
+
"""
|
| 34 |
+
return HTMLResponse(content=content, status_code=status_code)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def create_success_response(verified_user_id: Optional[str] = None) -> HTMLResponse:
|
| 38 |
+
"""
|
| 39 |
+
Create a standardized success response for OAuth authentication.
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
verified_user_id: The authenticated user's email (optional)
|
| 43 |
+
|
| 44 |
+
Returns:
|
| 45 |
+
HTMLResponse with success page
|
| 46 |
+
"""
|
| 47 |
+
# Handle the case where no user ID is provided
|
| 48 |
+
user_display = verified_user_id if verified_user_id else "Google User"
|
| 49 |
+
|
| 50 |
+
content = f"""<html>
|
| 51 |
+
<head>
|
| 52 |
+
<title>Authentication Successful</title>
|
| 53 |
+
<style>
|
| 54 |
+
* {{
|
| 55 |
+
margin: 0;
|
| 56 |
+
padding: 0;
|
| 57 |
+
box-sizing: border-box;
|
| 58 |
+
}}
|
| 59 |
+
|
| 60 |
+
body {{
|
| 61 |
+
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
|
| 62 |
+
background: linear-gradient(135deg,#0f172a,#1e293b,#334155);
|
| 63 |
+
min-height: 100vh;
|
| 64 |
+
display: flex;
|
| 65 |
+
align-items: center;
|
| 66 |
+
justify-content: center;
|
| 67 |
+
color: #1a1a1a;
|
| 68 |
+
-webkit-font-smoothing: antialiased;
|
| 69 |
+
-moz-osx-font-smoothing: grayscale;
|
| 70 |
+
}}
|
| 71 |
+
|
| 72 |
+
.container {{
|
| 73 |
+
background: rgba(255, 255, 255, 0.95);
|
| 74 |
+
backdrop-filter: blur(10px);
|
| 75 |
+
padding: 60px;
|
| 76 |
+
border-radius: 20px;
|
| 77 |
+
box-shadow: 0 30px 60px rgba(0, 0, 0, 0.12);
|
| 78 |
+
text-align: center;
|
| 79 |
+
max-width: 480px;
|
| 80 |
+
width: 90%;
|
| 81 |
+
transform: translateY(-20px);
|
| 82 |
+
animation: slideUp 0.6s ease-out;
|
| 83 |
+
}}
|
| 84 |
+
|
| 85 |
+
@keyframes slideUp {{
|
| 86 |
+
from {{
|
| 87 |
+
opacity: 0;
|
| 88 |
+
transform: translateY(0);
|
| 89 |
+
}}
|
| 90 |
+
to {{
|
| 91 |
+
opacity: 1;
|
| 92 |
+
transform: translateY(-20px);
|
| 93 |
+
}}
|
| 94 |
+
}}
|
| 95 |
+
|
| 96 |
+
.icon {{
|
| 97 |
+
width: 80px;
|
| 98 |
+
height: 80px;
|
| 99 |
+
margin: 0 auto 30px;
|
| 100 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 101 |
+
border-radius: 50%;
|
| 102 |
+
display: flex;
|
| 103 |
+
align-items: center;
|
| 104 |
+
justify-content: center;
|
| 105 |
+
font-size: 40px;
|
| 106 |
+
color: white;
|
| 107 |
+
animation: pulse 2s ease-in-out infinite;
|
| 108 |
+
}}
|
| 109 |
+
|
| 110 |
+
@keyframes pulse {{
|
| 111 |
+
0%, 100% {{
|
| 112 |
+
transform: scale(1);
|
| 113 |
+
}}
|
| 114 |
+
50% {{
|
| 115 |
+
transform: scale(1.05);
|
| 116 |
+
}}
|
| 117 |
+
}}
|
| 118 |
+
|
| 119 |
+
h1 {{
|
| 120 |
+
font-size: 28px;
|
| 121 |
+
font-weight: 600;
|
| 122 |
+
margin-bottom: 20px;
|
| 123 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 124 |
+
-webkit-background-clip: text;
|
| 125 |
+
-webkit-text-fill-color: transparent;
|
| 126 |
+
background-clip: text;
|
| 127 |
+
}}
|
| 128 |
+
|
| 129 |
+
.message {{
|
| 130 |
+
font-size: 16px;
|
| 131 |
+
line-height: 1.6;
|
| 132 |
+
color: #4a5568;
|
| 133 |
+
margin-bottom: 20px;
|
| 134 |
+
}}
|
| 135 |
+
|
| 136 |
+
.user-id {{
|
| 137 |
+
font-weight: 600;
|
| 138 |
+
color: #667eea;
|
| 139 |
+
padding: 4px 12px;
|
| 140 |
+
background: rgba(102, 126, 234, 0.1);
|
| 141 |
+
border-radius: 6px;
|
| 142 |
+
display: inline-block;
|
| 143 |
+
margin: 0 4px;
|
| 144 |
+
}}
|
| 145 |
+
|
| 146 |
+
.button {{
|
| 147 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 148 |
+
color: white;
|
| 149 |
+
padding: 16px 40px;
|
| 150 |
+
border: none;
|
| 151 |
+
border-radius: 30px;
|
| 152 |
+
font-size: 16px;
|
| 153 |
+
font-weight: 500;
|
| 154 |
+
cursor: pointer;
|
| 155 |
+
transition: all 0.3s ease;
|
| 156 |
+
margin-top: 30px;
|
| 157 |
+
display: inline-block;
|
| 158 |
+
text-decoration: none;
|
| 159 |
+
box-shadow: 0 4px 15px rgba(102, 126, 234, 0.3);
|
| 160 |
+
}}
|
| 161 |
+
|
| 162 |
+
.button:hover {{
|
| 163 |
+
transform: translateY(-2px);
|
| 164 |
+
box-shadow: 0 7px 20px rgba(102, 126, 234, 0.4);
|
| 165 |
+
}}
|
| 166 |
+
|
| 167 |
+
.button:active {{
|
| 168 |
+
transform: translateY(0);
|
| 169 |
+
}}
|
| 170 |
+
|
| 171 |
+
.auto-close {{
|
| 172 |
+
font-size: 13px;
|
| 173 |
+
color: #a0aec0;
|
| 174 |
+
margin-top: 30px;
|
| 175 |
+
opacity: 0.8;
|
| 176 |
+
}}
|
| 177 |
+
</style>
|
| 178 |
+
<script>
|
| 179 |
+
setTimeout(function() {{
|
| 180 |
+
window.close();
|
| 181 |
+
}}, 10000);
|
| 182 |
+
</script>
|
| 183 |
+
</head>
|
| 184 |
+
<body>
|
| 185 |
+
<div class="container">
|
| 186 |
+
<div class="icon">✓</div>
|
| 187 |
+
<h1>Authentication Successful</h1>
|
| 188 |
+
<div class="message">
|
| 189 |
+
You've been authenticated as <span class="user-id">{user_display}</span>
|
| 190 |
+
</div>
|
| 191 |
+
<div class="message">
|
| 192 |
+
Your credentials have been securely saved. You can now close this window and retry your original command.
|
| 193 |
+
</div>
|
| 194 |
+
<button class="button" onclick="window.close()">Close Window</button>
|
| 195 |
+
<div class="auto-close">This window will close automatically in 10 seconds</div>
|
| 196 |
+
</div>
|
| 197 |
+
</body>
|
| 198 |
+
</html>"""
|
| 199 |
+
return HTMLResponse(content=content)
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def create_server_error_response(error_detail: str) -> HTMLResponse:
|
| 203 |
+
"""
|
| 204 |
+
Create a standardized server error response for OAuth processing failures.
|
| 205 |
+
|
| 206 |
+
Args:
|
| 207 |
+
error_detail: The detailed error message
|
| 208 |
+
|
| 209 |
+
Returns:
|
| 210 |
+
HTMLResponse with server error page
|
| 211 |
+
"""
|
| 212 |
+
content = f"""
|
| 213 |
+
<html>
|
| 214 |
+
<head><title>Authentication Processing Error</title></head>
|
| 215 |
+
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; max-width: 600px; margin: 40px auto; padding: 20px; text-align: center;">
|
| 216 |
+
<h2 style="color: #d32f2f;">Authentication Processing Error</h2>
|
| 217 |
+
<p>An unexpected error occurred while processing your authentication: {error_detail}</p>
|
| 218 |
+
<p>Please try again. You can close this window.</p>
|
| 219 |
+
<script>setTimeout(function() {{ window.close(); }}, 10000);</script>
|
| 220 |
+
</body>
|
| 221 |
+
</html>
|
| 222 |
+
"""
|
| 223 |
+
return HTMLResponse(content=content, status_code=500)
|
auth/oauth_types.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Type definitions for OAuth authentication.
|
| 3 |
+
|
| 4 |
+
This module provides structured types for OAuth-related parameters,
|
| 5 |
+
improving code maintainability and type safety.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
from typing import Optional, List, Dict, Any
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@dataclass
|
| 13 |
+
class OAuth21ServiceRequest:
|
| 14 |
+
"""
|
| 15 |
+
Encapsulates parameters for OAuth 2.1 service authentication requests.
|
| 16 |
+
|
| 17 |
+
This parameter object pattern reduces function complexity and makes
|
| 18 |
+
it easier to extend authentication parameters in the future.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
service_name: str
|
| 22 |
+
version: str
|
| 23 |
+
tool_name: str
|
| 24 |
+
user_google_email: str
|
| 25 |
+
required_scopes: List[str]
|
| 26 |
+
session_id: Optional[str] = None
|
| 27 |
+
auth_token_email: Optional[str] = None
|
| 28 |
+
allow_recent_auth: bool = False
|
| 29 |
+
context: Optional[Dict[str, Any]] = None
|
| 30 |
+
|
| 31 |
+
def to_legacy_params(self) -> dict:
|
| 32 |
+
"""Convert to legacy parameter format for backward compatibility."""
|
| 33 |
+
return {
|
| 34 |
+
"service_name": self.service_name,
|
| 35 |
+
"version": self.version,
|
| 36 |
+
"tool_name": self.tool_name,
|
| 37 |
+
"user_google_email": self.user_google_email,
|
| 38 |
+
"required_scopes": self.required_scopes,
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@dataclass
|
| 43 |
+
class OAuthVersionDetectionParams:
|
| 44 |
+
"""
|
| 45 |
+
Parameters used for OAuth version detection.
|
| 46 |
+
|
| 47 |
+
Encapsulates the various signals we use to determine
|
| 48 |
+
whether a client supports OAuth 2.1 or needs OAuth 2.0.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
client_id: Optional[str] = None
|
| 52 |
+
client_secret: Optional[str] = None
|
| 53 |
+
code_challenge: Optional[str] = None
|
| 54 |
+
code_challenge_method: Optional[str] = None
|
| 55 |
+
code_verifier: Optional[str] = None
|
| 56 |
+
authenticated_user: Optional[str] = None
|
| 57 |
+
session_id: Optional[str] = None
|
| 58 |
+
|
| 59 |
+
@classmethod
|
| 60 |
+
def from_request(
|
| 61 |
+
cls, request_params: Dict[str, Any]
|
| 62 |
+
) -> "OAuthVersionDetectionParams":
|
| 63 |
+
"""Create from raw request parameters."""
|
| 64 |
+
return cls(
|
| 65 |
+
client_id=request_params.get("client_id"),
|
| 66 |
+
client_secret=request_params.get("client_secret"),
|
| 67 |
+
code_challenge=request_params.get("code_challenge"),
|
| 68 |
+
code_challenge_method=request_params.get("code_challenge_method"),
|
| 69 |
+
code_verifier=request_params.get("code_verifier"),
|
| 70 |
+
authenticated_user=request_params.get("authenticated_user"),
|
| 71 |
+
session_id=request_params.get("session_id"),
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
@property
|
| 75 |
+
def has_pkce(self) -> bool:
|
| 76 |
+
"""Check if PKCE parameters are present."""
|
| 77 |
+
return bool(self.code_challenge or self.code_verifier)
|
| 78 |
+
|
| 79 |
+
@property
|
| 80 |
+
def is_public_client(self) -> bool:
|
| 81 |
+
"""Check if this appears to be a public client (no secret)."""
|
| 82 |
+
return bool(self.client_id and not self.client_secret)
|
auth/scopes.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Google Workspace OAuth Scopes
|
| 3 |
+
|
| 4 |
+
This module centralizes OAuth scope definitions for Google Workspace integration.
|
| 5 |
+
Separated from service_decorator.py to avoid circular imports.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
|
| 10 |
+
logger = logging.getLogger(__name__)
|
| 11 |
+
|
| 12 |
+
# Global variable to store enabled tools (set by main.py)
|
| 13 |
+
_ENABLED_TOOLS = None
|
| 14 |
+
|
| 15 |
+
# Individual OAuth Scope Constants
|
| 16 |
+
USERINFO_EMAIL_SCOPE = "https://www.googleapis.com/auth/userinfo.email"
|
| 17 |
+
USERINFO_PROFILE_SCOPE = "https://www.googleapis.com/auth/userinfo.profile"
|
| 18 |
+
OPENID_SCOPE = "openid"
|
| 19 |
+
CALENDAR_SCOPE = "https://www.googleapis.com/auth/calendar"
|
| 20 |
+
CALENDAR_READONLY_SCOPE = "https://www.googleapis.com/auth/calendar.readonly"
|
| 21 |
+
CALENDAR_EVENTS_SCOPE = "https://www.googleapis.com/auth/calendar.events"
|
| 22 |
+
|
| 23 |
+
# Google Drive scopes
|
| 24 |
+
DRIVE_SCOPE = "https://www.googleapis.com/auth/drive"
|
| 25 |
+
DRIVE_READONLY_SCOPE = "https://www.googleapis.com/auth/drive.readonly"
|
| 26 |
+
DRIVE_FILE_SCOPE = "https://www.googleapis.com/auth/drive.file"
|
| 27 |
+
|
| 28 |
+
# Google Docs scopes
|
| 29 |
+
DOCS_READONLY_SCOPE = "https://www.googleapis.com/auth/documents.readonly"
|
| 30 |
+
DOCS_WRITE_SCOPE = "https://www.googleapis.com/auth/documents"
|
| 31 |
+
|
| 32 |
+
# Gmail API scopes
|
| 33 |
+
GMAIL_READONLY_SCOPE = "https://www.googleapis.com/auth/gmail.readonly"
|
| 34 |
+
GMAIL_SEND_SCOPE = "https://www.googleapis.com/auth/gmail.send"
|
| 35 |
+
GMAIL_COMPOSE_SCOPE = "https://www.googleapis.com/auth/gmail.compose"
|
| 36 |
+
GMAIL_MODIFY_SCOPE = "https://www.googleapis.com/auth/gmail.modify"
|
| 37 |
+
GMAIL_LABELS_SCOPE = "https://www.googleapis.com/auth/gmail.labels"
|
| 38 |
+
GMAIL_SETTINGS_BASIC_SCOPE = "https://www.googleapis.com/auth/gmail.settings.basic"
|
| 39 |
+
|
| 40 |
+
# Google Chat API scopes
|
| 41 |
+
CHAT_READONLY_SCOPE = "https://www.googleapis.com/auth/chat.messages.readonly"
|
| 42 |
+
CHAT_WRITE_SCOPE = "https://www.googleapis.com/auth/chat.messages"
|
| 43 |
+
CHAT_SPACES_SCOPE = "https://www.googleapis.com/auth/chat.spaces"
|
| 44 |
+
|
| 45 |
+
# Google Sheets API scopes
|
| 46 |
+
SHEETS_READONLY_SCOPE = "https://www.googleapis.com/auth/spreadsheets.readonly"
|
| 47 |
+
SHEETS_WRITE_SCOPE = "https://www.googleapis.com/auth/spreadsheets"
|
| 48 |
+
|
| 49 |
+
# Google Forms API scopes
|
| 50 |
+
FORMS_BODY_SCOPE = "https://www.googleapis.com/auth/forms.body"
|
| 51 |
+
FORMS_BODY_READONLY_SCOPE = "https://www.googleapis.com/auth/forms.body.readonly"
|
| 52 |
+
FORMS_RESPONSES_READONLY_SCOPE = (
|
| 53 |
+
"https://www.googleapis.com/auth/forms.responses.readonly"
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
# Google Slides API scopes
|
| 57 |
+
SLIDES_SCOPE = "https://www.googleapis.com/auth/presentations"
|
| 58 |
+
SLIDES_READONLY_SCOPE = "https://www.googleapis.com/auth/presentations.readonly"
|
| 59 |
+
|
| 60 |
+
# Google Tasks API scopes
|
| 61 |
+
TASKS_SCOPE = "https://www.googleapis.com/auth/tasks"
|
| 62 |
+
TASKS_READONLY_SCOPE = "https://www.googleapis.com/auth/tasks.readonly"
|
| 63 |
+
|
| 64 |
+
# Google Custom Search API scope
|
| 65 |
+
CUSTOM_SEARCH_SCOPE = "https://www.googleapis.com/auth/cse"
|
| 66 |
+
|
| 67 |
+
# Google Apps Script API scopes
|
| 68 |
+
SCRIPT_PROJECTS_SCOPE = "https://www.googleapis.com/auth/script.projects"
|
| 69 |
+
SCRIPT_PROJECTS_READONLY_SCOPE = (
|
| 70 |
+
"https://www.googleapis.com/auth/script.projects.readonly"
|
| 71 |
+
)
|
| 72 |
+
SCRIPT_DEPLOYMENTS_SCOPE = "https://www.googleapis.com/auth/script.deployments"
|
| 73 |
+
SCRIPT_DEPLOYMENTS_READONLY_SCOPE = (
|
| 74 |
+
"https://www.googleapis.com/auth/script.deployments.readonly"
|
| 75 |
+
)
|
| 76 |
+
SCRIPT_PROCESSES_READONLY_SCOPE = "https://www.googleapis.com/auth/script.processes"
|
| 77 |
+
SCRIPT_METRICS_SCOPE = "https://www.googleapis.com/auth/script.metrics"
|
| 78 |
+
|
| 79 |
+
# Base OAuth scopes required for user identification
|
| 80 |
+
BASE_SCOPES = [USERINFO_EMAIL_SCOPE, USERINFO_PROFILE_SCOPE, OPENID_SCOPE]
|
| 81 |
+
|
| 82 |
+
# Service-specific scope groups
|
| 83 |
+
DOCS_SCOPES = [DOCS_READONLY_SCOPE, DOCS_WRITE_SCOPE]
|
| 84 |
+
|
| 85 |
+
CALENDAR_SCOPES = [CALENDAR_SCOPE, CALENDAR_READONLY_SCOPE, CALENDAR_EVENTS_SCOPE]
|
| 86 |
+
|
| 87 |
+
DRIVE_SCOPES = [DRIVE_SCOPE, DRIVE_READONLY_SCOPE, DRIVE_FILE_SCOPE]
|
| 88 |
+
|
| 89 |
+
GMAIL_SCOPES = [
|
| 90 |
+
GMAIL_READONLY_SCOPE,
|
| 91 |
+
GMAIL_SEND_SCOPE,
|
| 92 |
+
GMAIL_COMPOSE_SCOPE,
|
| 93 |
+
GMAIL_MODIFY_SCOPE,
|
| 94 |
+
GMAIL_LABELS_SCOPE,
|
| 95 |
+
GMAIL_SETTINGS_BASIC_SCOPE,
|
| 96 |
+
]
|
| 97 |
+
|
| 98 |
+
CHAT_SCOPES = [CHAT_READONLY_SCOPE, CHAT_WRITE_SCOPE, CHAT_SPACES_SCOPE]
|
| 99 |
+
|
| 100 |
+
SHEETS_SCOPES = [SHEETS_READONLY_SCOPE, SHEETS_WRITE_SCOPE]
|
| 101 |
+
|
| 102 |
+
FORMS_SCOPES = [
|
| 103 |
+
FORMS_BODY_SCOPE,
|
| 104 |
+
FORMS_BODY_READONLY_SCOPE,
|
| 105 |
+
FORMS_RESPONSES_READONLY_SCOPE,
|
| 106 |
+
]
|
| 107 |
+
|
| 108 |
+
SLIDES_SCOPES = [SLIDES_SCOPE, SLIDES_READONLY_SCOPE]
|
| 109 |
+
|
| 110 |
+
TASKS_SCOPES = [TASKS_SCOPE, TASKS_READONLY_SCOPE]
|
| 111 |
+
|
| 112 |
+
CUSTOM_SEARCH_SCOPES = [CUSTOM_SEARCH_SCOPE]
|
| 113 |
+
|
| 114 |
+
SCRIPT_SCOPES = [
|
| 115 |
+
SCRIPT_PROJECTS_SCOPE,
|
| 116 |
+
SCRIPT_PROJECTS_READONLY_SCOPE,
|
| 117 |
+
SCRIPT_DEPLOYMENTS_SCOPE,
|
| 118 |
+
SCRIPT_DEPLOYMENTS_READONLY_SCOPE,
|
| 119 |
+
SCRIPT_PROCESSES_READONLY_SCOPE, # Required for list_script_processes
|
| 120 |
+
SCRIPT_METRICS_SCOPE, # Required for get_script_metrics
|
| 121 |
+
DRIVE_FILE_SCOPE, # Required for list/delete script projects (uses Drive API)
|
| 122 |
+
]
|
| 123 |
+
|
| 124 |
+
# Tool-to-scopes mapping
|
| 125 |
+
TOOL_SCOPES_MAP = {
|
| 126 |
+
"gmail": GMAIL_SCOPES,
|
| 127 |
+
"drive": DRIVE_SCOPES,
|
| 128 |
+
"calendar": CALENDAR_SCOPES,
|
| 129 |
+
"docs": DOCS_SCOPES,
|
| 130 |
+
"sheets": SHEETS_SCOPES,
|
| 131 |
+
"chat": CHAT_SCOPES,
|
| 132 |
+
"forms": FORMS_SCOPES,
|
| 133 |
+
"slides": SLIDES_SCOPES,
|
| 134 |
+
"tasks": TASKS_SCOPES,
|
| 135 |
+
"search": CUSTOM_SEARCH_SCOPES,
|
| 136 |
+
"appscript": SCRIPT_SCOPES,
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def set_enabled_tools(enabled_tools):
|
| 141 |
+
"""
|
| 142 |
+
Set the globally enabled tools list.
|
| 143 |
+
|
| 144 |
+
Args:
|
| 145 |
+
enabled_tools: List of enabled tool names.
|
| 146 |
+
"""
|
| 147 |
+
global _ENABLED_TOOLS
|
| 148 |
+
_ENABLED_TOOLS = enabled_tools
|
| 149 |
+
logger.info(f"Enabled tools set for scope management: {enabled_tools}")
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def get_current_scopes():
|
| 153 |
+
"""
|
| 154 |
+
Returns scopes for currently enabled tools.
|
| 155 |
+
Uses globally set enabled tools or all tools if not set.
|
| 156 |
+
|
| 157 |
+
Returns:
|
| 158 |
+
List of unique scopes for the enabled tools plus base scopes.
|
| 159 |
+
"""
|
| 160 |
+
enabled_tools = _ENABLED_TOOLS
|
| 161 |
+
if enabled_tools is None:
|
| 162 |
+
# Default behavior - return all scopes
|
| 163 |
+
enabled_tools = TOOL_SCOPES_MAP.keys()
|
| 164 |
+
|
| 165 |
+
# Start with base scopes (always required)
|
| 166 |
+
scopes = BASE_SCOPES.copy()
|
| 167 |
+
|
| 168 |
+
# Add scopes for each enabled tool
|
| 169 |
+
for tool in enabled_tools:
|
| 170 |
+
if tool in TOOL_SCOPES_MAP:
|
| 171 |
+
scopes.extend(TOOL_SCOPES_MAP[tool])
|
| 172 |
+
|
| 173 |
+
logger.debug(
|
| 174 |
+
f"Generated scopes for tools {list(enabled_tools)}: {len(set(scopes))} unique scopes"
|
| 175 |
+
)
|
| 176 |
+
# Return unique scopes
|
| 177 |
+
return list(set(scopes))
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def get_scopes_for_tools(enabled_tools=None):
|
| 181 |
+
"""
|
| 182 |
+
Returns scopes for enabled tools only.
|
| 183 |
+
|
| 184 |
+
Args:
|
| 185 |
+
enabled_tools: List of enabled tool names. If None, returns all scopes.
|
| 186 |
+
|
| 187 |
+
Returns:
|
| 188 |
+
List of unique scopes for the enabled tools plus base scopes.
|
| 189 |
+
"""
|
| 190 |
+
if enabled_tools is None:
|
| 191 |
+
# Default behavior - return all scopes
|
| 192 |
+
enabled_tools = TOOL_SCOPES_MAP.keys()
|
| 193 |
+
|
| 194 |
+
# Start with base scopes (always required)
|
| 195 |
+
scopes = BASE_SCOPES.copy()
|
| 196 |
+
|
| 197 |
+
# Add scopes for each enabled tool
|
| 198 |
+
for tool in enabled_tools:
|
| 199 |
+
if tool in TOOL_SCOPES_MAP:
|
| 200 |
+
scopes.extend(TOOL_SCOPES_MAP[tool])
|
| 201 |
+
|
| 202 |
+
# Return unique scopes
|
| 203 |
+
return list(set(scopes))
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
# Combined scopes for all supported Google Workspace operations (backwards compatibility)
|
| 207 |
+
SCOPES = get_scopes_for_tools()
|
auth/service_decorator.py
ADDED
|
@@ -0,0 +1,789 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import inspect
|
| 2 |
+
import logging
|
| 3 |
+
|
| 4 |
+
import re
|
| 5 |
+
from functools import wraps
|
| 6 |
+
from typing import Dict, List, Optional, Any, Callable, Union, Tuple
|
| 7 |
+
|
| 8 |
+
from google.auth.exceptions import RefreshError
|
| 9 |
+
from googleapiclient.discovery import build
|
| 10 |
+
from fastmcp.server.dependencies import get_access_token, get_context
|
| 11 |
+
from auth.google_auth import get_authenticated_google_service, GoogleAuthenticationError
|
| 12 |
+
from auth.oauth21_session_store import (
|
| 13 |
+
get_auth_provider,
|
| 14 |
+
get_oauth21_session_store,
|
| 15 |
+
ensure_session_from_access_token,
|
| 16 |
+
)
|
| 17 |
+
from auth.oauth_config import is_oauth21_enabled, get_oauth_config
|
| 18 |
+
from core.context import set_fastmcp_session_id
|
| 19 |
+
from auth.scopes import (
|
| 20 |
+
GMAIL_READONLY_SCOPE,
|
| 21 |
+
GMAIL_SEND_SCOPE,
|
| 22 |
+
GMAIL_COMPOSE_SCOPE,
|
| 23 |
+
GMAIL_MODIFY_SCOPE,
|
| 24 |
+
GMAIL_LABELS_SCOPE,
|
| 25 |
+
GMAIL_SETTINGS_BASIC_SCOPE,
|
| 26 |
+
DRIVE_READONLY_SCOPE,
|
| 27 |
+
DRIVE_FILE_SCOPE,
|
| 28 |
+
DOCS_READONLY_SCOPE,
|
| 29 |
+
DOCS_WRITE_SCOPE,
|
| 30 |
+
CALENDAR_READONLY_SCOPE,
|
| 31 |
+
CALENDAR_EVENTS_SCOPE,
|
| 32 |
+
SHEETS_READONLY_SCOPE,
|
| 33 |
+
SHEETS_WRITE_SCOPE,
|
| 34 |
+
CHAT_READONLY_SCOPE,
|
| 35 |
+
CHAT_WRITE_SCOPE,
|
| 36 |
+
CHAT_SPACES_SCOPE,
|
| 37 |
+
FORMS_BODY_SCOPE,
|
| 38 |
+
FORMS_BODY_READONLY_SCOPE,
|
| 39 |
+
FORMS_RESPONSES_READONLY_SCOPE,
|
| 40 |
+
SLIDES_SCOPE,
|
| 41 |
+
SLIDES_READONLY_SCOPE,
|
| 42 |
+
TASKS_SCOPE,
|
| 43 |
+
TASKS_READONLY_SCOPE,
|
| 44 |
+
CUSTOM_SEARCH_SCOPE,
|
| 45 |
+
SCRIPT_PROJECTS_SCOPE,
|
| 46 |
+
SCRIPT_PROJECTS_READONLY_SCOPE,
|
| 47 |
+
SCRIPT_DEPLOYMENTS_SCOPE,
|
| 48 |
+
SCRIPT_DEPLOYMENTS_READONLY_SCOPE,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
logger = logging.getLogger(__name__)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# Authentication helper functions
|
| 55 |
+
def _get_auth_context(
|
| 56 |
+
tool_name: str,
|
| 57 |
+
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
|
| 58 |
+
"""
|
| 59 |
+
Get authentication context from FastMCP.
|
| 60 |
+
|
| 61 |
+
Returns:
|
| 62 |
+
Tuple of (authenticated_user, auth_method, mcp_session_id)
|
| 63 |
+
"""
|
| 64 |
+
try:
|
| 65 |
+
ctx = get_context()
|
| 66 |
+
if not ctx:
|
| 67 |
+
return None, None, None
|
| 68 |
+
|
| 69 |
+
authenticated_user = ctx.get_state("authenticated_user_email")
|
| 70 |
+
auth_method = ctx.get_state("authenticated_via")
|
| 71 |
+
mcp_session_id = ctx.session_id if hasattr(ctx, "session_id") else None
|
| 72 |
+
|
| 73 |
+
if mcp_session_id:
|
| 74 |
+
set_fastmcp_session_id(mcp_session_id)
|
| 75 |
+
|
| 76 |
+
logger.debug(
|
| 77 |
+
f"[{tool_name}] Auth from middleware: {authenticated_user} via {auth_method}"
|
| 78 |
+
)
|
| 79 |
+
return authenticated_user, auth_method, mcp_session_id
|
| 80 |
+
|
| 81 |
+
except Exception as e:
|
| 82 |
+
logger.debug(f"[{tool_name}] Could not get FastMCP context: {e}")
|
| 83 |
+
return None, None, None
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _detect_oauth_version(
|
| 87 |
+
authenticated_user: Optional[str], mcp_session_id: Optional[str], tool_name: str
|
| 88 |
+
) -> bool:
|
| 89 |
+
"""
|
| 90 |
+
Detect whether to use OAuth 2.1 based on configuration and context.
|
| 91 |
+
|
| 92 |
+
Returns:
|
| 93 |
+
True if OAuth 2.1 should be used, False otherwise
|
| 94 |
+
"""
|
| 95 |
+
if not is_oauth21_enabled():
|
| 96 |
+
return False
|
| 97 |
+
|
| 98 |
+
# When OAuth 2.1 is enabled globally, ALWAYS use OAuth 2.1 for authenticated users
|
| 99 |
+
if authenticated_user:
|
| 100 |
+
logger.info(
|
| 101 |
+
f"[{tool_name}] OAuth 2.1 mode: Using OAuth 2.1 for authenticated user '{authenticated_user}'"
|
| 102 |
+
)
|
| 103 |
+
return True
|
| 104 |
+
|
| 105 |
+
# Only use version detection for unauthenticated requests
|
| 106 |
+
config = get_oauth_config()
|
| 107 |
+
request_params = {}
|
| 108 |
+
if mcp_session_id:
|
| 109 |
+
request_params["session_id"] = mcp_session_id
|
| 110 |
+
|
| 111 |
+
oauth_version = config.detect_oauth_version(request_params)
|
| 112 |
+
use_oauth21 = oauth_version == "oauth21"
|
| 113 |
+
logger.info(
|
| 114 |
+
f"[{tool_name}] OAuth version detected: {oauth_version}, will use OAuth 2.1: {use_oauth21}"
|
| 115 |
+
)
|
| 116 |
+
return use_oauth21
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def _update_email_in_args(args: tuple, index: int, new_email: str) -> tuple:
|
| 120 |
+
"""Update email at specific index in args tuple."""
|
| 121 |
+
if index < len(args):
|
| 122 |
+
args_list = list(args)
|
| 123 |
+
args_list[index] = new_email
|
| 124 |
+
return tuple(args_list)
|
| 125 |
+
return args
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _override_oauth21_user_email(
|
| 129 |
+
use_oauth21: bool,
|
| 130 |
+
authenticated_user: Optional[str],
|
| 131 |
+
current_user_email: str,
|
| 132 |
+
args: tuple,
|
| 133 |
+
kwargs: dict,
|
| 134 |
+
param_names: List[str],
|
| 135 |
+
tool_name: str,
|
| 136 |
+
service_type: str = "",
|
| 137 |
+
) -> Tuple[str, tuple]:
|
| 138 |
+
"""
|
| 139 |
+
Override user_google_email with authenticated user when using OAuth 2.1.
|
| 140 |
+
|
| 141 |
+
Returns:
|
| 142 |
+
Tuple of (updated_user_email, updated_args)
|
| 143 |
+
"""
|
| 144 |
+
if not (
|
| 145 |
+
use_oauth21 and authenticated_user and current_user_email != authenticated_user
|
| 146 |
+
):
|
| 147 |
+
return current_user_email, args
|
| 148 |
+
|
| 149 |
+
service_suffix = f" for service '{service_type}'" if service_type else ""
|
| 150 |
+
logger.info(
|
| 151 |
+
f"[{tool_name}] OAuth 2.1: Overriding user_google_email from '{current_user_email}' to authenticated user '{authenticated_user}'{service_suffix}"
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
# Update in kwargs if present
|
| 155 |
+
if "user_google_email" in kwargs:
|
| 156 |
+
kwargs["user_google_email"] = authenticated_user
|
| 157 |
+
|
| 158 |
+
# Update in args if user_google_email is passed positionally
|
| 159 |
+
try:
|
| 160 |
+
user_email_index = param_names.index("user_google_email")
|
| 161 |
+
args = _update_email_in_args(args, user_email_index, authenticated_user)
|
| 162 |
+
except ValueError:
|
| 163 |
+
pass # user_google_email not in positional parameters
|
| 164 |
+
|
| 165 |
+
return authenticated_user, args
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
async def _authenticate_service(
|
| 169 |
+
use_oauth21: bool,
|
| 170 |
+
service_name: str,
|
| 171 |
+
service_version: str,
|
| 172 |
+
tool_name: str,
|
| 173 |
+
user_google_email: str,
|
| 174 |
+
resolved_scopes: List[str],
|
| 175 |
+
mcp_session_id: Optional[str],
|
| 176 |
+
authenticated_user: Optional[str],
|
| 177 |
+
) -> Tuple[Any, str]:
|
| 178 |
+
"""
|
| 179 |
+
Authenticate and get Google service using appropriate OAuth version.
|
| 180 |
+
|
| 181 |
+
Returns:
|
| 182 |
+
Tuple of (service, actual_user_email)
|
| 183 |
+
"""
|
| 184 |
+
if use_oauth21:
|
| 185 |
+
logger.debug(f"[{tool_name}] Using OAuth 2.1 flow")
|
| 186 |
+
return await get_authenticated_google_service_oauth21(
|
| 187 |
+
service_name=service_name,
|
| 188 |
+
version=service_version,
|
| 189 |
+
tool_name=tool_name,
|
| 190 |
+
user_google_email=user_google_email,
|
| 191 |
+
required_scopes=resolved_scopes,
|
| 192 |
+
session_id=mcp_session_id,
|
| 193 |
+
auth_token_email=authenticated_user,
|
| 194 |
+
allow_recent_auth=False,
|
| 195 |
+
)
|
| 196 |
+
else:
|
| 197 |
+
logger.debug(f"[{tool_name}] Using legacy OAuth 2.0 flow")
|
| 198 |
+
return await get_authenticated_google_service(
|
| 199 |
+
service_name=service_name,
|
| 200 |
+
version=service_version,
|
| 201 |
+
tool_name=tool_name,
|
| 202 |
+
user_google_email=user_google_email,
|
| 203 |
+
required_scopes=resolved_scopes,
|
| 204 |
+
session_id=mcp_session_id,
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
async def get_authenticated_google_service_oauth21(
|
| 209 |
+
service_name: str,
|
| 210 |
+
version: str,
|
| 211 |
+
tool_name: str,
|
| 212 |
+
user_google_email: str,
|
| 213 |
+
required_scopes: List[str],
|
| 214 |
+
session_id: Optional[str] = None,
|
| 215 |
+
auth_token_email: Optional[str] = None,
|
| 216 |
+
allow_recent_auth: bool = False,
|
| 217 |
+
) -> tuple[Any, str]:
|
| 218 |
+
"""
|
| 219 |
+
OAuth 2.1 authentication using the session store with security validation.
|
| 220 |
+
"""
|
| 221 |
+
provider = get_auth_provider()
|
| 222 |
+
access_token = get_access_token()
|
| 223 |
+
|
| 224 |
+
if provider and access_token:
|
| 225 |
+
token_email = None
|
| 226 |
+
if getattr(access_token, "claims", None):
|
| 227 |
+
token_email = access_token.claims.get("email")
|
| 228 |
+
|
| 229 |
+
resolved_email = token_email or auth_token_email or user_google_email
|
| 230 |
+
if not resolved_email:
|
| 231 |
+
raise GoogleAuthenticationError(
|
| 232 |
+
"Authenticated user email could not be determined from access token."
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
if auth_token_email and token_email and token_email != auth_token_email:
|
| 236 |
+
raise GoogleAuthenticationError(
|
| 237 |
+
"Access token email does not match authenticated session context."
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
if token_email and user_google_email and token_email != user_google_email:
|
| 241 |
+
raise GoogleAuthenticationError(
|
| 242 |
+
f"Authenticated account {token_email} does not match requested user {user_google_email}."
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
credentials = ensure_session_from_access_token(
|
| 246 |
+
access_token, resolved_email, session_id
|
| 247 |
+
)
|
| 248 |
+
if not credentials:
|
| 249 |
+
raise GoogleAuthenticationError(
|
| 250 |
+
"Unable to build Google credentials from authenticated access token."
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
scopes_available = set(credentials.scopes or [])
|
| 254 |
+
if not scopes_available and getattr(access_token, "scopes", None):
|
| 255 |
+
scopes_available = set(access_token.scopes)
|
| 256 |
+
|
| 257 |
+
if not all(scope in scopes_available for scope in required_scopes):
|
| 258 |
+
raise GoogleAuthenticationError(
|
| 259 |
+
f"OAuth credentials lack required scopes. Need: {required_scopes}, Have: {sorted(scopes_available)}"
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
service = build(service_name, version, credentials=credentials)
|
| 263 |
+
logger.info(f"[{tool_name}] Authenticated {service_name} for {resolved_email}")
|
| 264 |
+
return service, resolved_email
|
| 265 |
+
|
| 266 |
+
store = get_oauth21_session_store()
|
| 267 |
+
|
| 268 |
+
# Use the validation method to ensure session can only access its own credentials
|
| 269 |
+
credentials = store.get_credentials_with_validation(
|
| 270 |
+
requested_user_email=user_google_email,
|
| 271 |
+
session_id=session_id,
|
| 272 |
+
auth_token_email=auth_token_email,
|
| 273 |
+
allow_recent_auth=allow_recent_auth,
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
if not credentials:
|
| 277 |
+
raise GoogleAuthenticationError(
|
| 278 |
+
f"Access denied: Cannot retrieve credentials for {user_google_email}. "
|
| 279 |
+
f"You can only access credentials for your authenticated account."
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
if not credentials.scopes:
|
| 283 |
+
scopes_available = set(required_scopes)
|
| 284 |
+
else:
|
| 285 |
+
scopes_available = set(credentials.scopes)
|
| 286 |
+
|
| 287 |
+
if not all(scope in scopes_available for scope in required_scopes):
|
| 288 |
+
raise GoogleAuthenticationError(
|
| 289 |
+
f"OAuth 2.1 credentials lack required scopes. Need: {required_scopes}, Have: {sorted(scopes_available)}"
|
| 290 |
+
)
|
| 291 |
+
|
| 292 |
+
service = build(service_name, version, credentials=credentials)
|
| 293 |
+
logger.info(f"[{tool_name}] Authenticated {service_name} for {user_google_email}")
|
| 294 |
+
|
| 295 |
+
return service, user_google_email
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def _extract_oauth21_user_email(
|
| 299 |
+
authenticated_user: Optional[str], func_name: str
|
| 300 |
+
) -> str:
|
| 301 |
+
"""
|
| 302 |
+
Extract user email for OAuth 2.1 mode.
|
| 303 |
+
|
| 304 |
+
Args:
|
| 305 |
+
authenticated_user: The authenticated user from context
|
| 306 |
+
func_name: Name of the function being decorated (for error messages)
|
| 307 |
+
|
| 308 |
+
Returns:
|
| 309 |
+
User email string
|
| 310 |
+
|
| 311 |
+
Raises:
|
| 312 |
+
Exception: If no authenticated user found in OAuth 2.1 mode
|
| 313 |
+
"""
|
| 314 |
+
if not authenticated_user:
|
| 315 |
+
raise Exception(
|
| 316 |
+
f"OAuth 2.1 mode requires an authenticated user for {func_name}, but none was found."
|
| 317 |
+
)
|
| 318 |
+
return authenticated_user
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def _extract_oauth20_user_email(
|
| 322 |
+
args: tuple, kwargs: dict, wrapper_sig: inspect.Signature
|
| 323 |
+
) -> str:
|
| 324 |
+
"""
|
| 325 |
+
Extract user email for OAuth 2.0 mode from function arguments.
|
| 326 |
+
|
| 327 |
+
Args:
|
| 328 |
+
args: Positional arguments passed to wrapper
|
| 329 |
+
kwargs: Keyword arguments passed to wrapper
|
| 330 |
+
wrapper_sig: Function signature for parameter binding
|
| 331 |
+
|
| 332 |
+
Returns:
|
| 333 |
+
User email string
|
| 334 |
+
|
| 335 |
+
Raises:
|
| 336 |
+
Exception: If user_google_email parameter not found
|
| 337 |
+
"""
|
| 338 |
+
bound_args = wrapper_sig.bind(*args, **kwargs)
|
| 339 |
+
bound_args.apply_defaults()
|
| 340 |
+
|
| 341 |
+
user_google_email = bound_args.arguments.get("user_google_email")
|
| 342 |
+
if not user_google_email:
|
| 343 |
+
raise Exception("'user_google_email' parameter is required but was not found.")
|
| 344 |
+
return user_google_email
|
| 345 |
+
|
| 346 |
+
|
| 347 |
+
def _remove_user_email_arg_from_docstring(docstring: str) -> str:
|
| 348 |
+
"""
|
| 349 |
+
Remove user_google_email parameter documentation from docstring.
|
| 350 |
+
|
| 351 |
+
Args:
|
| 352 |
+
docstring: The original function docstring
|
| 353 |
+
|
| 354 |
+
Returns:
|
| 355 |
+
Modified docstring with user_google_email parameter removed
|
| 356 |
+
"""
|
| 357 |
+
if not docstring:
|
| 358 |
+
return docstring
|
| 359 |
+
|
| 360 |
+
# Pattern to match user_google_email parameter documentation
|
| 361 |
+
# Handles various formats like:
|
| 362 |
+
# - user_google_email (str): The user's Google email address. Required.
|
| 363 |
+
# - user_google_email: Description
|
| 364 |
+
# - user_google_email (str) - Description
|
| 365 |
+
patterns = [
|
| 366 |
+
r"^\s*user_google_email\s*\([^)]*\)\s*:\s*[^\n]*\.?\s*(?:Required\.?)?\s*\n",
|
| 367 |
+
r"^\s*user_google_email\s*:\s*[^\n]*\n",
|
| 368 |
+
r"^\s*user_google_email\s*\([^)]*\)\s*-\s*[^\n]*\n",
|
| 369 |
+
]
|
| 370 |
+
|
| 371 |
+
modified_docstring = docstring
|
| 372 |
+
for pattern in patterns:
|
| 373 |
+
modified_docstring = re.sub(pattern, "", modified_docstring, flags=re.MULTILINE)
|
| 374 |
+
|
| 375 |
+
# Clean up any sequence of 3 or more newlines that might have been created
|
| 376 |
+
modified_docstring = re.sub(r"\n{3,}", "\n\n", modified_docstring)
|
| 377 |
+
return modified_docstring
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
# Service configuration mapping
|
| 381 |
+
SERVICE_CONFIGS = {
|
| 382 |
+
"gmail": {"service": "gmail", "version": "v1"},
|
| 383 |
+
"drive": {"service": "drive", "version": "v3"},
|
| 384 |
+
"calendar": {"service": "calendar", "version": "v3"},
|
| 385 |
+
"docs": {"service": "docs", "version": "v1"},
|
| 386 |
+
"sheets": {"service": "sheets", "version": "v4"},
|
| 387 |
+
"chat": {"service": "chat", "version": "v1"},
|
| 388 |
+
"forms": {"service": "forms", "version": "v1"},
|
| 389 |
+
"slides": {"service": "slides", "version": "v1"},
|
| 390 |
+
"tasks": {"service": "tasks", "version": "v1"},
|
| 391 |
+
"customsearch": {"service": "customsearch", "version": "v1"},
|
| 392 |
+
"script": {"service": "script", "version": "v1"},
|
| 393 |
+
}
|
| 394 |
+
|
| 395 |
+
|
| 396 |
+
# Scope group definitions for easy reference
|
| 397 |
+
SCOPE_GROUPS = {
|
| 398 |
+
# Gmail scopes
|
| 399 |
+
"gmail_read": GMAIL_READONLY_SCOPE,
|
| 400 |
+
"gmail_send": GMAIL_SEND_SCOPE,
|
| 401 |
+
"gmail_compose": GMAIL_COMPOSE_SCOPE,
|
| 402 |
+
"gmail_modify": GMAIL_MODIFY_SCOPE,
|
| 403 |
+
"gmail_labels": GMAIL_LABELS_SCOPE,
|
| 404 |
+
"gmail_settings_basic": GMAIL_SETTINGS_BASIC_SCOPE,
|
| 405 |
+
# Drive scopes
|
| 406 |
+
"drive_read": DRIVE_READONLY_SCOPE,
|
| 407 |
+
"drive_file": DRIVE_FILE_SCOPE,
|
| 408 |
+
# Docs scopes
|
| 409 |
+
"docs_read": DOCS_READONLY_SCOPE,
|
| 410 |
+
"docs_write": DOCS_WRITE_SCOPE,
|
| 411 |
+
# Calendar scopes
|
| 412 |
+
"calendar_read": CALENDAR_READONLY_SCOPE,
|
| 413 |
+
"calendar_events": CALENDAR_EVENTS_SCOPE,
|
| 414 |
+
# Sheets scopes
|
| 415 |
+
"sheets_read": SHEETS_READONLY_SCOPE,
|
| 416 |
+
"sheets_write": SHEETS_WRITE_SCOPE,
|
| 417 |
+
# Chat scopes
|
| 418 |
+
"chat_read": CHAT_READONLY_SCOPE,
|
| 419 |
+
"chat_write": CHAT_WRITE_SCOPE,
|
| 420 |
+
"chat_spaces": CHAT_SPACES_SCOPE,
|
| 421 |
+
# Forms scopes
|
| 422 |
+
"forms": FORMS_BODY_SCOPE,
|
| 423 |
+
"forms_read": FORMS_BODY_READONLY_SCOPE,
|
| 424 |
+
"forms_responses_read": FORMS_RESPONSES_READONLY_SCOPE,
|
| 425 |
+
# Slides scopes
|
| 426 |
+
"slides": SLIDES_SCOPE,
|
| 427 |
+
"slides_read": SLIDES_READONLY_SCOPE,
|
| 428 |
+
# Tasks scopes
|
| 429 |
+
"tasks": TASKS_SCOPE,
|
| 430 |
+
"tasks_read": TASKS_READONLY_SCOPE,
|
| 431 |
+
# Custom Search scope
|
| 432 |
+
"customsearch": CUSTOM_SEARCH_SCOPE,
|
| 433 |
+
# Apps Script scopes
|
| 434 |
+
"script_readonly": SCRIPT_PROJECTS_READONLY_SCOPE,
|
| 435 |
+
"script_projects": SCRIPT_PROJECTS_SCOPE,
|
| 436 |
+
"script_deployments": SCRIPT_DEPLOYMENTS_SCOPE,
|
| 437 |
+
"script_deployments_readonly": SCRIPT_DEPLOYMENTS_READONLY_SCOPE,
|
| 438 |
+
}
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def _resolve_scopes(scopes: Union[str, List[str]]) -> List[str]:
|
| 442 |
+
"""Resolve scope names to actual scope URLs."""
|
| 443 |
+
if isinstance(scopes, str):
|
| 444 |
+
if scopes in SCOPE_GROUPS:
|
| 445 |
+
return [SCOPE_GROUPS[scopes]]
|
| 446 |
+
else:
|
| 447 |
+
return [scopes]
|
| 448 |
+
|
| 449 |
+
resolved = []
|
| 450 |
+
for scope in scopes:
|
| 451 |
+
if scope in SCOPE_GROUPS:
|
| 452 |
+
resolved.append(SCOPE_GROUPS[scope])
|
| 453 |
+
else:
|
| 454 |
+
resolved.append(scope)
|
| 455 |
+
return resolved
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
def _handle_token_refresh_error(
|
| 459 |
+
error: RefreshError, user_email: str, service_name: str
|
| 460 |
+
) -> str:
|
| 461 |
+
"""
|
| 462 |
+
Handle token refresh errors gracefully, particularly expired/revoked tokens.
|
| 463 |
+
|
| 464 |
+
Args:
|
| 465 |
+
error: The RefreshError that occurred
|
| 466 |
+
user_email: User's email address
|
| 467 |
+
service_name: Name of the Google service
|
| 468 |
+
|
| 469 |
+
Returns:
|
| 470 |
+
A user-friendly error message with instructions for reauthentication
|
| 471 |
+
"""
|
| 472 |
+
error_str = str(error)
|
| 473 |
+
|
| 474 |
+
if (
|
| 475 |
+
"invalid_grant" in error_str.lower()
|
| 476 |
+
or "expired or revoked" in error_str.lower()
|
| 477 |
+
):
|
| 478 |
+
logger.warning(
|
| 479 |
+
f"Token expired or revoked for user {user_email} accessing {service_name}"
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
service_display_name = f"Google {service_name.title()}"
|
| 483 |
+
|
| 484 |
+
return (
|
| 485 |
+
f"**Authentication Required: Token Expired/Revoked for {service_display_name}**\n\n"
|
| 486 |
+
f"Your Google authentication token for {user_email} has expired or been revoked. "
|
| 487 |
+
f"This commonly happens when:\n"
|
| 488 |
+
f"- The token has been unused for an extended period\n"
|
| 489 |
+
f"- You've changed your Google account password\n"
|
| 490 |
+
f"- You've revoked access to the application\n\n"
|
| 491 |
+
f"**To resolve this, please:**\n"
|
| 492 |
+
f"1. Run `start_google_auth` with your email ({user_email}) and service_name='{service_display_name}'\n"
|
| 493 |
+
f"2. Complete the authentication flow in your browser\n"
|
| 494 |
+
f"3. Retry your original command\n\n"
|
| 495 |
+
f"The application will automatically use the new credentials once authentication is complete."
|
| 496 |
+
)
|
| 497 |
+
else:
|
| 498 |
+
# Handle other types of refresh errors
|
| 499 |
+
logger.error(f"Unexpected refresh error for user {user_email}: {error}")
|
| 500 |
+
return (
|
| 501 |
+
f"Authentication error occurred for {user_email}. "
|
| 502 |
+
f"Please try running `start_google_auth` with your email and the appropriate service name to reauthenticate."
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def require_google_service(
|
| 507 |
+
service_type: str,
|
| 508 |
+
scopes: Union[str, List[str]],
|
| 509 |
+
version: Optional[str] = None,
|
| 510 |
+
):
|
| 511 |
+
"""
|
| 512 |
+
Decorator that automatically handles Google service authentication and injection.
|
| 513 |
+
|
| 514 |
+
Args:
|
| 515 |
+
service_type: Type of Google service ("gmail", "drive", "calendar", etc.)
|
| 516 |
+
scopes: Required scopes (can be scope group names or actual URLs)
|
| 517 |
+
version: Service version (defaults to standard version for service type)
|
| 518 |
+
|
| 519 |
+
Usage:
|
| 520 |
+
@require_google_service("gmail", "gmail_read")
|
| 521 |
+
async def search_messages(service, user_google_email: str, query: str):
|
| 522 |
+
# service parameter is automatically injected
|
| 523 |
+
# Original authentication logic is handled automatically
|
| 524 |
+
"""
|
| 525 |
+
|
| 526 |
+
def decorator(func: Callable) -> Callable:
|
| 527 |
+
original_sig = inspect.signature(func)
|
| 528 |
+
params = list(original_sig.parameters.values())
|
| 529 |
+
|
| 530 |
+
# The decorated function must have 'service' as its first parameter.
|
| 531 |
+
if not params or params[0].name != "service":
|
| 532 |
+
raise TypeError(
|
| 533 |
+
f"Function '{func.__name__}' decorated with @require_google_service "
|
| 534 |
+
"must have 'service' as its first parameter."
|
| 535 |
+
)
|
| 536 |
+
|
| 537 |
+
# Create a new signature for the wrapper that excludes the 'service' parameter.
|
| 538 |
+
# In OAuth 2.1 mode, also exclude 'user_google_email' since it's automatically determined.
|
| 539 |
+
if is_oauth21_enabled():
|
| 540 |
+
# Remove both 'service' and 'user_google_email' parameters
|
| 541 |
+
filtered_params = [p for p in params[1:] if p.name != "user_google_email"]
|
| 542 |
+
wrapper_sig = original_sig.replace(parameters=filtered_params)
|
| 543 |
+
else:
|
| 544 |
+
# Only remove 'service' parameter for OAuth 2.0 mode
|
| 545 |
+
wrapper_sig = original_sig.replace(parameters=params[1:])
|
| 546 |
+
|
| 547 |
+
@wraps(func)
|
| 548 |
+
async def wrapper(*args, **kwargs):
|
| 549 |
+
# Note: `args` and `kwargs` are now the arguments for the *wrapper*,
|
| 550 |
+
# which does not include 'service'.
|
| 551 |
+
|
| 552 |
+
# Get authentication context early to determine OAuth mode
|
| 553 |
+
authenticated_user, auth_method, mcp_session_id = _get_auth_context(
|
| 554 |
+
func.__name__
|
| 555 |
+
)
|
| 556 |
+
|
| 557 |
+
# Extract user_google_email based on OAuth mode
|
| 558 |
+
if is_oauth21_enabled():
|
| 559 |
+
user_google_email = _extract_oauth21_user_email(
|
| 560 |
+
authenticated_user, func.__name__
|
| 561 |
+
)
|
| 562 |
+
else:
|
| 563 |
+
user_google_email = _extract_oauth20_user_email(
|
| 564 |
+
args, kwargs, wrapper_sig
|
| 565 |
+
)
|
| 566 |
+
|
| 567 |
+
# Get service configuration from the decorator's arguments
|
| 568 |
+
if service_type not in SERVICE_CONFIGS:
|
| 569 |
+
raise Exception(f"Unknown service type: {service_type}")
|
| 570 |
+
|
| 571 |
+
config = SERVICE_CONFIGS[service_type]
|
| 572 |
+
service_name = config["service"]
|
| 573 |
+
service_version = version or config["version"]
|
| 574 |
+
|
| 575 |
+
# Resolve scopes
|
| 576 |
+
resolved_scopes = _resolve_scopes(scopes)
|
| 577 |
+
|
| 578 |
+
try:
|
| 579 |
+
tool_name = func.__name__
|
| 580 |
+
|
| 581 |
+
# Log authentication status
|
| 582 |
+
logger.debug(
|
| 583 |
+
f"[{tool_name}] Auth: {authenticated_user or 'none'} via {auth_method or 'none'} (session: {mcp_session_id[:8] if mcp_session_id else 'none'})"
|
| 584 |
+
)
|
| 585 |
+
|
| 586 |
+
# Detect OAuth version
|
| 587 |
+
use_oauth21 = _detect_oauth_version(
|
| 588 |
+
authenticated_user, mcp_session_id, tool_name
|
| 589 |
+
)
|
| 590 |
+
|
| 591 |
+
# In OAuth 2.1 mode, user_google_email is already set to authenticated_user
|
| 592 |
+
# In OAuth 2.0 mode, we may need to override it
|
| 593 |
+
if not is_oauth21_enabled():
|
| 594 |
+
wrapper_params = list(wrapper_sig.parameters.keys())
|
| 595 |
+
user_google_email, args = _override_oauth21_user_email(
|
| 596 |
+
use_oauth21,
|
| 597 |
+
authenticated_user,
|
| 598 |
+
user_google_email,
|
| 599 |
+
args,
|
| 600 |
+
kwargs,
|
| 601 |
+
wrapper_params,
|
| 602 |
+
tool_name,
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
# Authenticate service
|
| 606 |
+
service, actual_user_email = await _authenticate_service(
|
| 607 |
+
use_oauth21,
|
| 608 |
+
service_name,
|
| 609 |
+
service_version,
|
| 610 |
+
tool_name,
|
| 611 |
+
user_google_email,
|
| 612 |
+
resolved_scopes,
|
| 613 |
+
mcp_session_id,
|
| 614 |
+
authenticated_user,
|
| 615 |
+
)
|
| 616 |
+
except GoogleAuthenticationError as e:
|
| 617 |
+
logger.error(
|
| 618 |
+
f"[{tool_name}] GoogleAuthenticationError during authentication. "
|
| 619 |
+
f"Method={auth_method or 'none'}, User={authenticated_user or 'none'}, "
|
| 620 |
+
f"Service={service_name} v{service_version}, MCPSessionID={mcp_session_id or 'none'}: {e}"
|
| 621 |
+
)
|
| 622 |
+
# Re-raise the original error without wrapping it
|
| 623 |
+
raise
|
| 624 |
+
|
| 625 |
+
try:
|
| 626 |
+
# In OAuth 2.1 mode, we need to add user_google_email to kwargs since it was removed from signature
|
| 627 |
+
if is_oauth21_enabled():
|
| 628 |
+
kwargs["user_google_email"] = user_google_email
|
| 629 |
+
|
| 630 |
+
# Prepend the fetched service object to the original arguments
|
| 631 |
+
return await func(service, *args, **kwargs)
|
| 632 |
+
except RefreshError as e:
|
| 633 |
+
error_message = _handle_token_refresh_error(
|
| 634 |
+
e, actual_user_email, service_name
|
| 635 |
+
)
|
| 636 |
+
raise Exception(error_message)
|
| 637 |
+
|
| 638 |
+
# Set the wrapper's signature to the one without 'service'
|
| 639 |
+
wrapper.__signature__ = wrapper_sig
|
| 640 |
+
|
| 641 |
+
# Conditionally modify docstring to remove user_google_email parameter documentation
|
| 642 |
+
if is_oauth21_enabled():
|
| 643 |
+
logger.debug(
|
| 644 |
+
"OAuth 2.1 mode enabled, removing user_google_email from docstring"
|
| 645 |
+
)
|
| 646 |
+
if func.__doc__:
|
| 647 |
+
wrapper.__doc__ = _remove_user_email_arg_from_docstring(func.__doc__)
|
| 648 |
+
|
| 649 |
+
return wrapper
|
| 650 |
+
|
| 651 |
+
return decorator
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
def require_multiple_services(service_configs: List[Dict[str, Any]]):
|
| 655 |
+
"""
|
| 656 |
+
Decorator for functions that need multiple Google services.
|
| 657 |
+
|
| 658 |
+
Args:
|
| 659 |
+
service_configs: List of service configurations, each containing:
|
| 660 |
+
- service_type: Type of service
|
| 661 |
+
- scopes: Required scopes
|
| 662 |
+
- param_name: Name to inject service as (e.g., 'drive_service', 'docs_service')
|
| 663 |
+
- version: Optional version override
|
| 664 |
+
|
| 665 |
+
Usage:
|
| 666 |
+
@require_multiple_services([
|
| 667 |
+
{"service_type": "drive", "scopes": "drive_read", "param_name": "drive_service"},
|
| 668 |
+
{"service_type": "docs", "scopes": "docs_read", "param_name": "docs_service"}
|
| 669 |
+
])
|
| 670 |
+
async def get_doc_with_metadata(drive_service, docs_service, user_google_email: str, doc_id: str):
|
| 671 |
+
# Both services are automatically injected
|
| 672 |
+
"""
|
| 673 |
+
|
| 674 |
+
def decorator(func: Callable) -> Callable:
|
| 675 |
+
original_sig = inspect.signature(func)
|
| 676 |
+
|
| 677 |
+
service_param_names = {config["param_name"] for config in service_configs}
|
| 678 |
+
params = list(original_sig.parameters.values())
|
| 679 |
+
|
| 680 |
+
# Remove injected service params from the wrapper signature; drop user_google_email only for OAuth 2.1.
|
| 681 |
+
filtered_params = [p for p in params if p.name not in service_param_names]
|
| 682 |
+
if is_oauth21_enabled():
|
| 683 |
+
filtered_params = [
|
| 684 |
+
p for p in filtered_params if p.name != "user_google_email"
|
| 685 |
+
]
|
| 686 |
+
|
| 687 |
+
wrapper_sig = original_sig.replace(parameters=filtered_params)
|
| 688 |
+
wrapper_param_names = [p.name for p in filtered_params]
|
| 689 |
+
|
| 690 |
+
@wraps(func)
|
| 691 |
+
async def wrapper(*args, **kwargs):
|
| 692 |
+
# Get authentication context early
|
| 693 |
+
tool_name = func.__name__
|
| 694 |
+
authenticated_user, _, mcp_session_id = _get_auth_context(tool_name)
|
| 695 |
+
|
| 696 |
+
# Extract user_google_email based on OAuth mode
|
| 697 |
+
if is_oauth21_enabled():
|
| 698 |
+
user_google_email = _extract_oauth21_user_email(
|
| 699 |
+
authenticated_user, tool_name
|
| 700 |
+
)
|
| 701 |
+
else:
|
| 702 |
+
user_google_email = _extract_oauth20_user_email(
|
| 703 |
+
args, kwargs, wrapper_sig
|
| 704 |
+
)
|
| 705 |
+
|
| 706 |
+
# Authenticate all services
|
| 707 |
+
for config in service_configs:
|
| 708 |
+
service_type = config["service_type"]
|
| 709 |
+
scopes = config["scopes"]
|
| 710 |
+
param_name = config["param_name"]
|
| 711 |
+
version = config.get("version")
|
| 712 |
+
|
| 713 |
+
if service_type not in SERVICE_CONFIGS:
|
| 714 |
+
raise Exception(f"Unknown service type: {service_type}")
|
| 715 |
+
|
| 716 |
+
service_config = SERVICE_CONFIGS[service_type]
|
| 717 |
+
service_name = service_config["service"]
|
| 718 |
+
service_version = version or service_config["version"]
|
| 719 |
+
resolved_scopes = _resolve_scopes(scopes)
|
| 720 |
+
|
| 721 |
+
try:
|
| 722 |
+
# Detect OAuth version (simplified for multiple services)
|
| 723 |
+
use_oauth21 = (
|
| 724 |
+
is_oauth21_enabled() and authenticated_user is not None
|
| 725 |
+
)
|
| 726 |
+
|
| 727 |
+
# In OAuth 2.0 mode, we may need to override user_google_email
|
| 728 |
+
if not is_oauth21_enabled():
|
| 729 |
+
user_google_email, args = _override_oauth21_user_email(
|
| 730 |
+
use_oauth21,
|
| 731 |
+
authenticated_user,
|
| 732 |
+
user_google_email,
|
| 733 |
+
args,
|
| 734 |
+
kwargs,
|
| 735 |
+
wrapper_param_names,
|
| 736 |
+
tool_name,
|
| 737 |
+
service_type,
|
| 738 |
+
)
|
| 739 |
+
|
| 740 |
+
# Authenticate service
|
| 741 |
+
service, _ = await _authenticate_service(
|
| 742 |
+
use_oauth21,
|
| 743 |
+
service_name,
|
| 744 |
+
service_version,
|
| 745 |
+
tool_name,
|
| 746 |
+
user_google_email,
|
| 747 |
+
resolved_scopes,
|
| 748 |
+
mcp_session_id,
|
| 749 |
+
authenticated_user,
|
| 750 |
+
)
|
| 751 |
+
|
| 752 |
+
# Inject service with specified parameter name
|
| 753 |
+
kwargs[param_name] = service
|
| 754 |
+
|
| 755 |
+
except GoogleAuthenticationError as e:
|
| 756 |
+
logger.error(
|
| 757 |
+
f"[{tool_name}] GoogleAuthenticationError for service '{service_type}' (user: {user_google_email}): {e}"
|
| 758 |
+
)
|
| 759 |
+
# Re-raise the original error without wrapping it
|
| 760 |
+
raise
|
| 761 |
+
|
| 762 |
+
# Call the original function with refresh error handling
|
| 763 |
+
try:
|
| 764 |
+
# In OAuth 2.1 mode, we need to add user_google_email to kwargs since it was removed from signature
|
| 765 |
+
if is_oauth21_enabled():
|
| 766 |
+
kwargs["user_google_email"] = user_google_email
|
| 767 |
+
|
| 768 |
+
return await func(*args, **kwargs)
|
| 769 |
+
except RefreshError as e:
|
| 770 |
+
# Handle token refresh errors gracefully
|
| 771 |
+
error_message = _handle_token_refresh_error(
|
| 772 |
+
e, user_google_email, "Multiple Services"
|
| 773 |
+
)
|
| 774 |
+
raise Exception(error_message)
|
| 775 |
+
|
| 776 |
+
# Set the wrapper's signature
|
| 777 |
+
wrapper.__signature__ = wrapper_sig
|
| 778 |
+
|
| 779 |
+
# Conditionally modify docstring to remove user_google_email parameter documentation
|
| 780 |
+
if is_oauth21_enabled():
|
| 781 |
+
logger.debug(
|
| 782 |
+
"OAuth 2.1 mode enabled, removing user_google_email from docstring"
|
| 783 |
+
)
|
| 784 |
+
if func.__doc__:
|
| 785 |
+
wrapper.__doc__ = _remove_user_email_arg_from_docstring(func.__doc__)
|
| 786 |
+
|
| 787 |
+
return wrapper
|
| 788 |
+
|
| 789 |
+
return decorator
|
commands.rxt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
Start-Process -FilePath "powershell" -ArgumentList "-NoExit", "-Command", "cd D:\flutter_assignment\backend; python .\google_workspace_mcp\main.py --transport streamable-http"
|
core/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Make the core directory a Python package
|
core/api_enablement.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import Dict, Optional, Tuple
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
API_ENABLEMENT_LINKS: Dict[str, str] = {
|
| 6 |
+
"calendar-json.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=calendar-json.googleapis.com",
|
| 7 |
+
"drive.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=drive.googleapis.com",
|
| 8 |
+
"gmail.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=gmail.googleapis.com",
|
| 9 |
+
"docs.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=docs.googleapis.com",
|
| 10 |
+
"sheets.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=sheets.googleapis.com",
|
| 11 |
+
"slides.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=slides.googleapis.com",
|
| 12 |
+
"forms.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=forms.googleapis.com",
|
| 13 |
+
"tasks.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=tasks.googleapis.com",
|
| 14 |
+
"chat.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=chat.googleapis.com",
|
| 15 |
+
"customsearch.googleapis.com": "https://console.cloud.google.com/flows/enableapi?apiid=customsearch.googleapis.com",
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
SERVICE_NAME_TO_API: Dict[str, str] = {
|
| 20 |
+
"Google Calendar": "calendar-json.googleapis.com",
|
| 21 |
+
"Google Drive": "drive.googleapis.com",
|
| 22 |
+
"Gmail": "gmail.googleapis.com",
|
| 23 |
+
"Google Docs": "docs.googleapis.com",
|
| 24 |
+
"Google Sheets": "sheets.googleapis.com",
|
| 25 |
+
"Google Slides": "slides.googleapis.com",
|
| 26 |
+
"Google Forms": "forms.googleapis.com",
|
| 27 |
+
"Google Tasks": "tasks.googleapis.com",
|
| 28 |
+
"Google Chat": "chat.googleapis.com",
|
| 29 |
+
"Google Custom Search": "customsearch.googleapis.com",
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
INTERNAL_SERVICE_TO_API: Dict[str, str] = {
|
| 34 |
+
"calendar": "calendar-json.googleapis.com",
|
| 35 |
+
"drive": "drive.googleapis.com",
|
| 36 |
+
"gmail": "gmail.googleapis.com",
|
| 37 |
+
"docs": "docs.googleapis.com",
|
| 38 |
+
"sheets": "sheets.googleapis.com",
|
| 39 |
+
"slides": "slides.googleapis.com",
|
| 40 |
+
"forms": "forms.googleapis.com",
|
| 41 |
+
"tasks": "tasks.googleapis.com",
|
| 42 |
+
"chat": "chat.googleapis.com",
|
| 43 |
+
"customsearch": "customsearch.googleapis.com",
|
| 44 |
+
"search": "customsearch.googleapis.com",
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def extract_api_info_from_error(
|
| 49 |
+
error_details: str,
|
| 50 |
+
) -> Tuple[Optional[str], Optional[str]]:
|
| 51 |
+
"""
|
| 52 |
+
Extract API service and project ID from error details.
|
| 53 |
+
|
| 54 |
+
Returns:
|
| 55 |
+
Tuple of (api_service, project_id) or (None, None) if not found
|
| 56 |
+
"""
|
| 57 |
+
api_pattern = r"https://console\.developers\.google\.com/apis/api/([^/]+)/overview"
|
| 58 |
+
project_pattern = r"project[=\s]+([a-zA-Z0-9-]+)"
|
| 59 |
+
|
| 60 |
+
api_match = re.search(api_pattern, error_details)
|
| 61 |
+
project_match = re.search(project_pattern, error_details)
|
| 62 |
+
|
| 63 |
+
api_service = api_match.group(1) if api_match else None
|
| 64 |
+
project_id = project_match.group(1) if project_match else None
|
| 65 |
+
|
| 66 |
+
return api_service, project_id
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def get_api_enablement_message(
|
| 70 |
+
error_details: str, service_type: Optional[str] = None
|
| 71 |
+
) -> str:
|
| 72 |
+
"""
|
| 73 |
+
Generate a helpful error message with direct API enablement link.
|
| 74 |
+
|
| 75 |
+
Args:
|
| 76 |
+
error_details: The error details string from the HttpError
|
| 77 |
+
service_type: Optional service type (e.g., "calendar", "gmail", or "Google Calendar")
|
| 78 |
+
|
| 79 |
+
Returns:
|
| 80 |
+
Formatted error message with enablement link
|
| 81 |
+
"""
|
| 82 |
+
api_service, project_id = extract_api_info_from_error(error_details)
|
| 83 |
+
|
| 84 |
+
if not api_service and service_type:
|
| 85 |
+
# Check internal service names first (e.g., "calendar", "gmail")
|
| 86 |
+
api_service = INTERNAL_SERVICE_TO_API.get(service_type)
|
| 87 |
+
if not api_service:
|
| 88 |
+
# Check display names (e.g., "Google Calendar")
|
| 89 |
+
api_service = SERVICE_NAME_TO_API.get(service_type)
|
| 90 |
+
|
| 91 |
+
if api_service and api_service in API_ENABLEMENT_LINKS:
|
| 92 |
+
enable_link = API_ENABLEMENT_LINKS[api_service]
|
| 93 |
+
service_display_name = next(
|
| 94 |
+
(name for name, api in SERVICE_NAME_TO_API.items() if api == api_service),
|
| 95 |
+
api_service,
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
message = (
|
| 99 |
+
f"{service_display_name} API is not enabled for your project"
|
| 100 |
+
f"{f' ({project_id})' if project_id else ''}.\n\n"
|
| 101 |
+
f"Enable it here: {enable_link}\n\n"
|
| 102 |
+
f"After enabling, wait 1-2 minutes for the change to propagate, then try again."
|
| 103 |
+
f"IMPORTANT - LLM: share the link provided as a clickable hyperlink and instruct the user to enable the required API."
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
return message
|
| 107 |
+
|
| 108 |
+
return ""
|
core/attachment_storage.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Temporary attachment storage for Gmail attachments.
|
| 3 |
+
|
| 4 |
+
Stores attachments in ./tmp directory and provides HTTP URLs for access.
|
| 5 |
+
Files are automatically cleaned up after expiration (default 1 hour).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import base64
|
| 9 |
+
import logging
|
| 10 |
+
import uuid
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from typing import Optional, Dict
|
| 13 |
+
from datetime import datetime, timedelta
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
# Default expiration: 1 hour
|
| 18 |
+
DEFAULT_EXPIRATION_SECONDS = 3600
|
| 19 |
+
|
| 20 |
+
# Storage directory
|
| 21 |
+
STORAGE_DIR = Path("./tmp/attachments")
|
| 22 |
+
STORAGE_DIR.mkdir(parents=True, exist_ok=True)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class AttachmentStorage:
|
| 26 |
+
"""Manages temporary storage of email attachments."""
|
| 27 |
+
|
| 28 |
+
def __init__(self, expiration_seconds: int = DEFAULT_EXPIRATION_SECONDS):
|
| 29 |
+
self.expiration_seconds = expiration_seconds
|
| 30 |
+
self._metadata: Dict[str, Dict] = {}
|
| 31 |
+
|
| 32 |
+
def save_attachment(
|
| 33 |
+
self,
|
| 34 |
+
base64_data: str,
|
| 35 |
+
filename: Optional[str] = None,
|
| 36 |
+
mime_type: Optional[str] = None,
|
| 37 |
+
) -> str:
|
| 38 |
+
"""
|
| 39 |
+
Save an attachment and return a unique file ID.
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
base64_data: Base64-encoded attachment data
|
| 43 |
+
filename: Original filename (optional)
|
| 44 |
+
mime_type: MIME type (optional)
|
| 45 |
+
|
| 46 |
+
Returns:
|
| 47 |
+
Unique file ID (UUID string)
|
| 48 |
+
"""
|
| 49 |
+
# Generate unique file ID
|
| 50 |
+
file_id = str(uuid.uuid4())
|
| 51 |
+
|
| 52 |
+
# Decode base64 data
|
| 53 |
+
try:
|
| 54 |
+
file_bytes = base64.urlsafe_b64decode(base64_data)
|
| 55 |
+
except Exception as e:
|
| 56 |
+
logger.error(f"Failed to decode base64 attachment data: {e}")
|
| 57 |
+
raise ValueError(f"Invalid base64 data: {e}")
|
| 58 |
+
|
| 59 |
+
# Determine file extension from filename or mime type
|
| 60 |
+
extension = ""
|
| 61 |
+
if filename:
|
| 62 |
+
extension = Path(filename).suffix
|
| 63 |
+
elif mime_type:
|
| 64 |
+
# Basic mime type to extension mapping
|
| 65 |
+
mime_to_ext = {
|
| 66 |
+
"image/jpeg": ".jpg",
|
| 67 |
+
"image/png": ".png",
|
| 68 |
+
"image/gif": ".gif",
|
| 69 |
+
"application/pdf": ".pdf",
|
| 70 |
+
"application/zip": ".zip",
|
| 71 |
+
"text/plain": ".txt",
|
| 72 |
+
"text/html": ".html",
|
| 73 |
+
}
|
| 74 |
+
extension = mime_to_ext.get(mime_type, "")
|
| 75 |
+
|
| 76 |
+
# Save file
|
| 77 |
+
file_path = STORAGE_DIR / f"{file_id}{extension}"
|
| 78 |
+
try:
|
| 79 |
+
file_path.write_bytes(file_bytes)
|
| 80 |
+
logger.info(
|
| 81 |
+
f"Saved attachment {file_id} ({len(file_bytes)} bytes) to {file_path}"
|
| 82 |
+
)
|
| 83 |
+
except Exception as e:
|
| 84 |
+
logger.error(f"Failed to save attachment to {file_path}: {e}")
|
| 85 |
+
raise
|
| 86 |
+
|
| 87 |
+
# Store metadata
|
| 88 |
+
expires_at = datetime.now() + timedelta(seconds=self.expiration_seconds)
|
| 89 |
+
self._metadata[file_id] = {
|
| 90 |
+
"file_path": str(file_path),
|
| 91 |
+
"filename": filename or f"attachment{extension}",
|
| 92 |
+
"mime_type": mime_type or "application/octet-stream",
|
| 93 |
+
"size": len(file_bytes),
|
| 94 |
+
"created_at": datetime.now(),
|
| 95 |
+
"expires_at": expires_at,
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
return file_id
|
| 99 |
+
|
| 100 |
+
def get_attachment_path(self, file_id: str) -> Optional[Path]:
|
| 101 |
+
"""
|
| 102 |
+
Get the file path for an attachment ID.
|
| 103 |
+
|
| 104 |
+
Args:
|
| 105 |
+
file_id: Unique file ID
|
| 106 |
+
|
| 107 |
+
Returns:
|
| 108 |
+
Path object if file exists and not expired, None otherwise
|
| 109 |
+
"""
|
| 110 |
+
if file_id not in self._metadata:
|
| 111 |
+
logger.warning(f"Attachment {file_id} not found in metadata")
|
| 112 |
+
return None
|
| 113 |
+
|
| 114 |
+
metadata = self._metadata[file_id]
|
| 115 |
+
file_path = Path(metadata["file_path"])
|
| 116 |
+
|
| 117 |
+
# Check if expired
|
| 118 |
+
if datetime.now() > metadata["expires_at"]:
|
| 119 |
+
logger.info(f"Attachment {file_id} has expired, cleaning up")
|
| 120 |
+
self._cleanup_file(file_id)
|
| 121 |
+
return None
|
| 122 |
+
|
| 123 |
+
# Check if file exists
|
| 124 |
+
if not file_path.exists():
|
| 125 |
+
logger.warning(f"Attachment file {file_path} does not exist")
|
| 126 |
+
del self._metadata[file_id]
|
| 127 |
+
return None
|
| 128 |
+
|
| 129 |
+
return file_path
|
| 130 |
+
|
| 131 |
+
def get_attachment_metadata(self, file_id: str) -> Optional[Dict]:
|
| 132 |
+
"""
|
| 133 |
+
Get metadata for an attachment.
|
| 134 |
+
|
| 135 |
+
Args:
|
| 136 |
+
file_id: Unique file ID
|
| 137 |
+
|
| 138 |
+
Returns:
|
| 139 |
+
Metadata dict if exists and not expired, None otherwise
|
| 140 |
+
"""
|
| 141 |
+
if file_id not in self._metadata:
|
| 142 |
+
return None
|
| 143 |
+
|
| 144 |
+
metadata = self._metadata[file_id].copy()
|
| 145 |
+
|
| 146 |
+
# Check if expired
|
| 147 |
+
if datetime.now() > metadata["expires_at"]:
|
| 148 |
+
self._cleanup_file(file_id)
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
return metadata
|
| 152 |
+
|
| 153 |
+
def _cleanup_file(self, file_id: str) -> None:
|
| 154 |
+
"""Remove file and metadata."""
|
| 155 |
+
if file_id in self._metadata:
|
| 156 |
+
file_path = Path(self._metadata[file_id]["file_path"])
|
| 157 |
+
try:
|
| 158 |
+
if file_path.exists():
|
| 159 |
+
file_path.unlink()
|
| 160 |
+
logger.debug(f"Deleted expired attachment file: {file_path}")
|
| 161 |
+
except Exception as e:
|
| 162 |
+
logger.warning(f"Failed to delete attachment file {file_path}: {e}")
|
| 163 |
+
del self._metadata[file_id]
|
| 164 |
+
|
| 165 |
+
def cleanup_expired(self) -> int:
|
| 166 |
+
"""
|
| 167 |
+
Clean up expired attachments.
|
| 168 |
+
|
| 169 |
+
Returns:
|
| 170 |
+
Number of files cleaned up
|
| 171 |
+
"""
|
| 172 |
+
now = datetime.now()
|
| 173 |
+
expired_ids = [
|
| 174 |
+
file_id
|
| 175 |
+
for file_id, metadata in self._metadata.items()
|
| 176 |
+
if now > metadata["expires_at"]
|
| 177 |
+
]
|
| 178 |
+
|
| 179 |
+
for file_id in expired_ids:
|
| 180 |
+
self._cleanup_file(file_id)
|
| 181 |
+
|
| 182 |
+
return len(expired_ids)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
# Global instance
|
| 186 |
+
_attachment_storage: Optional[AttachmentStorage] = None
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def get_attachment_storage() -> AttachmentStorage:
|
| 190 |
+
"""Get the global attachment storage instance."""
|
| 191 |
+
global _attachment_storage
|
| 192 |
+
if _attachment_storage is None:
|
| 193 |
+
_attachment_storage = AttachmentStorage()
|
| 194 |
+
return _attachment_storage
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def get_attachment_url(file_id: str) -> str:
|
| 198 |
+
"""
|
| 199 |
+
Generate a URL for accessing an attachment.
|
| 200 |
+
|
| 201 |
+
Args:
|
| 202 |
+
file_id: Unique file ID
|
| 203 |
+
|
| 204 |
+
Returns:
|
| 205 |
+
Full URL to access the attachment
|
| 206 |
+
"""
|
| 207 |
+
import os
|
| 208 |
+
from core.config import WORKSPACE_MCP_PORT, WORKSPACE_MCP_BASE_URI
|
| 209 |
+
|
| 210 |
+
# Use external URL if set (for reverse proxy scenarios)
|
| 211 |
+
external_url = os.getenv("WORKSPACE_EXTERNAL_URL")
|
| 212 |
+
if external_url:
|
| 213 |
+
base_url = external_url.rstrip("/")
|
| 214 |
+
else:
|
| 215 |
+
base_url = f"{WORKSPACE_MCP_BASE_URI}:{WORKSPACE_MCP_PORT}"
|
| 216 |
+
|
| 217 |
+
return f"{base_url}/attachments/{file_id}"
|
core/comments.py
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Core Comments Module
|
| 3 |
+
|
| 4 |
+
This module provides reusable comment management functions for Google Workspace applications.
|
| 5 |
+
All Google Workspace apps (Docs, Sheets, Slides) use the Drive API for comment operations.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
import asyncio
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
from auth.service_decorator import require_google_service
|
| 13 |
+
from core.server import server
|
| 14 |
+
from core.utils import handle_http_errors
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def create_comment_tools(app_name: str, file_id_param: str):
|
| 20 |
+
"""
|
| 21 |
+
Factory function to create comment management tools for a specific Google Workspace app.
|
| 22 |
+
|
| 23 |
+
Args:
|
| 24 |
+
app_name: Name of the app (e.g., "document", "spreadsheet", "presentation")
|
| 25 |
+
file_id_param: Parameter name for the file ID (e.g., "document_id", "spreadsheet_id", "presentation_id")
|
| 26 |
+
|
| 27 |
+
Returns:
|
| 28 |
+
Dict containing the four comment management functions with unique names
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
# Create unique function names based on the app type
|
| 32 |
+
read_func_name = f"read_{app_name}_comments"
|
| 33 |
+
create_func_name = f"create_{app_name}_comment"
|
| 34 |
+
reply_func_name = f"reply_to_{app_name}_comment"
|
| 35 |
+
resolve_func_name = f"resolve_{app_name}_comment"
|
| 36 |
+
|
| 37 |
+
# Create functions without decorators first, then apply decorators with proper names
|
| 38 |
+
if file_id_param == "document_id":
|
| 39 |
+
|
| 40 |
+
@require_google_service("drive", "drive_read")
|
| 41 |
+
@handle_http_errors(read_func_name, service_type="drive")
|
| 42 |
+
async def read_comments(
|
| 43 |
+
service, user_google_email: str, document_id: str
|
| 44 |
+
) -> str:
|
| 45 |
+
"""Read all comments from a Google Document."""
|
| 46 |
+
return await _read_comments_impl(service, app_name, document_id)
|
| 47 |
+
|
| 48 |
+
@require_google_service("drive", "drive_file")
|
| 49 |
+
@handle_http_errors(create_func_name, service_type="drive")
|
| 50 |
+
async def create_comment(
|
| 51 |
+
service, user_google_email: str, document_id: str, comment_content: str
|
| 52 |
+
) -> str:
|
| 53 |
+
"""Create a new comment on a Google Document."""
|
| 54 |
+
return await _create_comment_impl(
|
| 55 |
+
service, app_name, document_id, comment_content
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
@require_google_service("drive", "drive_file")
|
| 59 |
+
@handle_http_errors(reply_func_name, service_type="drive")
|
| 60 |
+
async def reply_to_comment(
|
| 61 |
+
service,
|
| 62 |
+
user_google_email: str,
|
| 63 |
+
document_id: str,
|
| 64 |
+
comment_id: str,
|
| 65 |
+
reply_content: str,
|
| 66 |
+
) -> str:
|
| 67 |
+
"""Reply to a specific comment in a Google Document."""
|
| 68 |
+
return await _reply_to_comment_impl(
|
| 69 |
+
service, app_name, document_id, comment_id, reply_content
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
@require_google_service("drive", "drive_file")
|
| 73 |
+
@handle_http_errors(resolve_func_name, service_type="drive")
|
| 74 |
+
async def resolve_comment(
|
| 75 |
+
service, user_google_email: str, document_id: str, comment_id: str
|
| 76 |
+
) -> str:
|
| 77 |
+
"""Resolve a comment in a Google Document."""
|
| 78 |
+
return await _resolve_comment_impl(
|
| 79 |
+
service, app_name, document_id, comment_id
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
elif file_id_param == "spreadsheet_id":
|
| 83 |
+
|
| 84 |
+
@require_google_service("drive", "drive_read")
|
| 85 |
+
@handle_http_errors(read_func_name, service_type="drive")
|
| 86 |
+
async def read_comments(
|
| 87 |
+
service, user_google_email: str, spreadsheet_id: str
|
| 88 |
+
) -> str:
|
| 89 |
+
"""Read all comments from a Google Spreadsheet."""
|
| 90 |
+
return await _read_comments_impl(service, app_name, spreadsheet_id)
|
| 91 |
+
|
| 92 |
+
@require_google_service("drive", "drive_file")
|
| 93 |
+
@handle_http_errors(create_func_name, service_type="drive")
|
| 94 |
+
async def create_comment(
|
| 95 |
+
service, user_google_email: str, spreadsheet_id: str, comment_content: str
|
| 96 |
+
) -> str:
|
| 97 |
+
"""Create a new comment on a Google Spreadsheet."""
|
| 98 |
+
return await _create_comment_impl(
|
| 99 |
+
service, app_name, spreadsheet_id, comment_content
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
@require_google_service("drive", "drive_file")
|
| 103 |
+
@handle_http_errors(reply_func_name, service_type="drive")
|
| 104 |
+
async def reply_to_comment(
|
| 105 |
+
service,
|
| 106 |
+
user_google_email: str,
|
| 107 |
+
spreadsheet_id: str,
|
| 108 |
+
comment_id: str,
|
| 109 |
+
reply_content: str,
|
| 110 |
+
) -> str:
|
| 111 |
+
"""Reply to a specific comment in a Google Spreadsheet."""
|
| 112 |
+
return await _reply_to_comment_impl(
|
| 113 |
+
service, app_name, spreadsheet_id, comment_id, reply_content
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
@require_google_service("drive", "drive_file")
|
| 117 |
+
@handle_http_errors(resolve_func_name, service_type="drive")
|
| 118 |
+
async def resolve_comment(
|
| 119 |
+
service, user_google_email: str, spreadsheet_id: str, comment_id: str
|
| 120 |
+
) -> str:
|
| 121 |
+
"""Resolve a comment in a Google Spreadsheet."""
|
| 122 |
+
return await _resolve_comment_impl(
|
| 123 |
+
service, app_name, spreadsheet_id, comment_id
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
elif file_id_param == "presentation_id":
|
| 127 |
+
|
| 128 |
+
@require_google_service("drive", "drive_read")
|
| 129 |
+
@handle_http_errors(read_func_name, service_type="drive")
|
| 130 |
+
async def read_comments(
|
| 131 |
+
service, user_google_email: str, presentation_id: str
|
| 132 |
+
) -> str:
|
| 133 |
+
"""Read all comments from a Google Presentation."""
|
| 134 |
+
return await _read_comments_impl(service, app_name, presentation_id)
|
| 135 |
+
|
| 136 |
+
@require_google_service("drive", "drive_file")
|
| 137 |
+
@handle_http_errors(create_func_name, service_type="drive")
|
| 138 |
+
async def create_comment(
|
| 139 |
+
service, user_google_email: str, presentation_id: str, comment_content: str
|
| 140 |
+
) -> str:
|
| 141 |
+
"""Create a new comment on a Google Presentation."""
|
| 142 |
+
return await _create_comment_impl(
|
| 143 |
+
service, app_name, presentation_id, comment_content
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
@require_google_service("drive", "drive_file")
|
| 147 |
+
@handle_http_errors(reply_func_name, service_type="drive")
|
| 148 |
+
async def reply_to_comment(
|
| 149 |
+
service,
|
| 150 |
+
user_google_email: str,
|
| 151 |
+
presentation_id: str,
|
| 152 |
+
comment_id: str,
|
| 153 |
+
reply_content: str,
|
| 154 |
+
) -> str:
|
| 155 |
+
"""Reply to a specific comment in a Google Presentation."""
|
| 156 |
+
return await _reply_to_comment_impl(
|
| 157 |
+
service, app_name, presentation_id, comment_id, reply_content
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
@require_google_service("drive", "drive_file")
|
| 161 |
+
@handle_http_errors(resolve_func_name, service_type="drive")
|
| 162 |
+
async def resolve_comment(
|
| 163 |
+
service, user_google_email: str, presentation_id: str, comment_id: str
|
| 164 |
+
) -> str:
|
| 165 |
+
"""Resolve a comment in a Google Presentation."""
|
| 166 |
+
return await _resolve_comment_impl(
|
| 167 |
+
service, app_name, presentation_id, comment_id
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
# Set the proper function names and register with server
|
| 171 |
+
read_comments.__name__ = read_func_name
|
| 172 |
+
create_comment.__name__ = create_func_name
|
| 173 |
+
reply_to_comment.__name__ = reply_func_name
|
| 174 |
+
resolve_comment.__name__ = resolve_func_name
|
| 175 |
+
|
| 176 |
+
# Register tools with the server using the proper names
|
| 177 |
+
server.tool()(read_comments)
|
| 178 |
+
server.tool()(create_comment)
|
| 179 |
+
server.tool()(reply_to_comment)
|
| 180 |
+
server.tool()(resolve_comment)
|
| 181 |
+
|
| 182 |
+
return {
|
| 183 |
+
"read_comments": read_comments,
|
| 184 |
+
"create_comment": create_comment,
|
| 185 |
+
"reply_to_comment": reply_to_comment,
|
| 186 |
+
"resolve_comment": resolve_comment,
|
| 187 |
+
}
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
async def _read_comments_impl(service, app_name: str, file_id: str) -> str:
|
| 191 |
+
"""Implementation for reading comments from any Google Workspace file."""
|
| 192 |
+
logger.info(f"[read_{app_name}_comments] Reading comments for {app_name} {file_id}")
|
| 193 |
+
|
| 194 |
+
response = await asyncio.to_thread(
|
| 195 |
+
service.comments()
|
| 196 |
+
.list(
|
| 197 |
+
fileId=file_id,
|
| 198 |
+
fields="comments(id,content,author,createdTime,modifiedTime,resolved,replies(content,author,id,createdTime,modifiedTime))",
|
| 199 |
+
)
|
| 200 |
+
.execute
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
comments = response.get("comments", [])
|
| 204 |
+
|
| 205 |
+
if not comments:
|
| 206 |
+
return f"No comments found in {app_name} {file_id}"
|
| 207 |
+
|
| 208 |
+
output = [f"Found {len(comments)} comments in {app_name} {file_id}:\\n"]
|
| 209 |
+
|
| 210 |
+
for comment in comments:
|
| 211 |
+
author = comment.get("author", {}).get("displayName", "Unknown")
|
| 212 |
+
content = comment.get("content", "")
|
| 213 |
+
created = comment.get("createdTime", "")
|
| 214 |
+
resolved = comment.get("resolved", False)
|
| 215 |
+
comment_id = comment.get("id", "")
|
| 216 |
+
status = " [RESOLVED]" if resolved else ""
|
| 217 |
+
|
| 218 |
+
output.append(f"Comment ID: {comment_id}")
|
| 219 |
+
output.append(f"Author: {author}")
|
| 220 |
+
output.append(f"Created: {created}{status}")
|
| 221 |
+
output.append(f"Content: {content}")
|
| 222 |
+
|
| 223 |
+
# Add replies if any
|
| 224 |
+
replies = comment.get("replies", [])
|
| 225 |
+
if replies:
|
| 226 |
+
output.append(f" Replies ({len(replies)}):")
|
| 227 |
+
for reply in replies:
|
| 228 |
+
reply_author = reply.get("author", {}).get("displayName", "Unknown")
|
| 229 |
+
reply_content = reply.get("content", "")
|
| 230 |
+
reply_created = reply.get("createdTime", "")
|
| 231 |
+
reply_id = reply.get("id", "")
|
| 232 |
+
output.append(f" Reply ID: {reply_id}")
|
| 233 |
+
output.append(f" Author: {reply_author}")
|
| 234 |
+
output.append(f" Created: {reply_created}")
|
| 235 |
+
output.append(f" Content: {reply_content}")
|
| 236 |
+
|
| 237 |
+
output.append("") # Empty line between comments
|
| 238 |
+
|
| 239 |
+
return "\\n".join(output)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
async def _create_comment_impl(
|
| 243 |
+
service, app_name: str, file_id: str, comment_content: str
|
| 244 |
+
) -> str:
|
| 245 |
+
"""Implementation for creating a comment on any Google Workspace file."""
|
| 246 |
+
logger.info(f"[create_{app_name}_comment] Creating comment in {app_name} {file_id}")
|
| 247 |
+
|
| 248 |
+
body = {"content": comment_content}
|
| 249 |
+
|
| 250 |
+
comment = await asyncio.to_thread(
|
| 251 |
+
service.comments()
|
| 252 |
+
.create(
|
| 253 |
+
fileId=file_id,
|
| 254 |
+
body=body,
|
| 255 |
+
fields="id,content,author,createdTime,modifiedTime",
|
| 256 |
+
)
|
| 257 |
+
.execute
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
comment_id = comment.get("id", "")
|
| 261 |
+
author = comment.get("author", {}).get("displayName", "Unknown")
|
| 262 |
+
created = comment.get("createdTime", "")
|
| 263 |
+
|
| 264 |
+
return f"Comment created successfully!\\nComment ID: {comment_id}\\nAuthor: {author}\\nCreated: {created}\\nContent: {comment_content}"
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
async def _reply_to_comment_impl(
|
| 268 |
+
service, app_name: str, file_id: str, comment_id: str, reply_content: str
|
| 269 |
+
) -> str:
|
| 270 |
+
"""Implementation for replying to a comment on any Google Workspace file."""
|
| 271 |
+
logger.info(
|
| 272 |
+
f"[reply_to_{app_name}_comment] Replying to comment {comment_id} in {app_name} {file_id}"
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
body = {"content": reply_content}
|
| 276 |
+
|
| 277 |
+
reply = await asyncio.to_thread(
|
| 278 |
+
service.replies()
|
| 279 |
+
.create(
|
| 280 |
+
fileId=file_id,
|
| 281 |
+
commentId=comment_id,
|
| 282 |
+
body=body,
|
| 283 |
+
fields="id,content,author,createdTime,modifiedTime",
|
| 284 |
+
)
|
| 285 |
+
.execute
|
| 286 |
+
)
|
| 287 |
+
|
| 288 |
+
reply_id = reply.get("id", "")
|
| 289 |
+
author = reply.get("author", {}).get("displayName", "Unknown")
|
| 290 |
+
created = reply.get("createdTime", "")
|
| 291 |
+
|
| 292 |
+
return f"Reply posted successfully!\\nReply ID: {reply_id}\\nAuthor: {author}\\nCreated: {created}\\nContent: {reply_content}"
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
async def _resolve_comment_impl(
|
| 296 |
+
service, app_name: str, file_id: str, comment_id: str
|
| 297 |
+
) -> str:
|
| 298 |
+
"""Implementation for resolving a comment on any Google Workspace file."""
|
| 299 |
+
logger.info(
|
| 300 |
+
f"[resolve_{app_name}_comment] Resolving comment {comment_id} in {app_name} {file_id}"
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
body = {"content": "This comment has been resolved.", "action": "resolve"}
|
| 304 |
+
|
| 305 |
+
reply = await asyncio.to_thread(
|
| 306 |
+
service.replies()
|
| 307 |
+
.create(
|
| 308 |
+
fileId=file_id,
|
| 309 |
+
commentId=comment_id,
|
| 310 |
+
body=body,
|
| 311 |
+
fields="id,content,author,createdTime,modifiedTime",
|
| 312 |
+
)
|
| 313 |
+
.execute
|
| 314 |
+
)
|
| 315 |
+
|
| 316 |
+
reply_id = reply.get("id", "")
|
| 317 |
+
author = reply.get("author", {}).get("displayName", "Unknown")
|
| 318 |
+
created = reply.get("createdTime", "")
|
| 319 |
+
|
| 320 |
+
return f"Comment {comment_id} has been resolved successfully.\\nResolve reply ID: {reply_id}\\nAuthor: {author}\\nCreated: {created}"
|
core/config.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Shared configuration for Google Workspace MCP server.
|
| 3 |
+
This module holds configuration values that need to be shared across modules
|
| 4 |
+
to avoid circular imports.
|
| 5 |
+
|
| 6 |
+
NOTE: OAuth configuration has been moved to auth.oauth_config for centralization.
|
| 7 |
+
This module now imports from there for backward compatibility.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
from auth.oauth_config import (
|
| 12 |
+
get_oauth_base_url,
|
| 13 |
+
get_oauth_redirect_uri,
|
| 14 |
+
set_transport_mode,
|
| 15 |
+
get_transport_mode,
|
| 16 |
+
is_oauth21_enabled,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
# Server configuration
|
| 20 |
+
WORKSPACE_MCP_PORT = int(os.getenv("PORT", os.getenv("WORKSPACE_MCP_PORT", 8000)))
|
| 21 |
+
WORKSPACE_MCP_BASE_URI = os.getenv("WORKSPACE_MCP_BASE_URI", "http://localhost")
|
| 22 |
+
|
| 23 |
+
# Disable USER_GOOGLE_EMAIL in OAuth 2.1 multi-user mode
|
| 24 |
+
USER_GOOGLE_EMAIL = (
|
| 25 |
+
None if is_oauth21_enabled() else os.getenv("USER_GOOGLE_EMAIL", None)
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
# Re-export OAuth functions for backward compatibility
|
| 29 |
+
__all__ = [
|
| 30 |
+
"WORKSPACE_MCP_PORT",
|
| 31 |
+
"WORKSPACE_MCP_BASE_URI",
|
| 32 |
+
"USER_GOOGLE_EMAIL",
|
| 33 |
+
"get_oauth_base_url",
|
| 34 |
+
"get_oauth_redirect_uri",
|
| 35 |
+
"set_transport_mode",
|
| 36 |
+
"get_transport_mode",
|
| 37 |
+
]
|
core/context.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# core/context.py
|
| 2 |
+
import contextvars
|
| 3 |
+
from typing import Optional
|
| 4 |
+
|
| 5 |
+
# Context variable to hold injected credentials for the life of a single request.
|
| 6 |
+
_injected_oauth_credentials = contextvars.ContextVar(
|
| 7 |
+
"injected_oauth_credentials", default=None
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
# Context variable to hold FastMCP session ID for the life of a single request.
|
| 11 |
+
_fastmcp_session_id = contextvars.ContextVar("fastmcp_session_id", default=None)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def get_injected_oauth_credentials():
|
| 15 |
+
"""
|
| 16 |
+
Retrieve injected OAuth credentials for the current request context.
|
| 17 |
+
This is called by the authentication layer to check for request-scoped credentials.
|
| 18 |
+
"""
|
| 19 |
+
return _injected_oauth_credentials.get()
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def set_injected_oauth_credentials(credentials: Optional[dict]):
|
| 23 |
+
"""
|
| 24 |
+
Set or clear the injected OAuth credentials for the current request context.
|
| 25 |
+
This is called by the service decorator.
|
| 26 |
+
"""
|
| 27 |
+
_injected_oauth_credentials.set(credentials)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def get_fastmcp_session_id() -> Optional[str]:
|
| 31 |
+
"""
|
| 32 |
+
Retrieve the FastMCP session ID for the current request context.
|
| 33 |
+
This is called by authentication layer to get the current session.
|
| 34 |
+
"""
|
| 35 |
+
return _fastmcp_session_id.get()
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def set_fastmcp_session_id(session_id: Optional[str]):
|
| 39 |
+
"""
|
| 40 |
+
Set or clear the FastMCP session ID for the current request context.
|
| 41 |
+
This is called when a FastMCP request starts.
|
| 42 |
+
"""
|
| 43 |
+
_fastmcp_session_id.set(session_id)
|
core/log_formatter.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Enhanced Log Formatter for Google Workspace MCP
|
| 3 |
+
|
| 4 |
+
Provides visually appealing log formatting with emojis and consistent styling
|
| 5 |
+
to match the safe_print output format.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import sys
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class EnhancedLogFormatter(logging.Formatter):
|
| 15 |
+
"""Custom log formatter that adds ASCII prefixes and visual enhancements to log messages."""
|
| 16 |
+
|
| 17 |
+
# Color codes for terminals that support ANSI colors
|
| 18 |
+
COLORS = {
|
| 19 |
+
"DEBUG": "\033[36m", # Cyan
|
| 20 |
+
"INFO": "\033[32m", # Green
|
| 21 |
+
"WARNING": "\033[33m", # Yellow
|
| 22 |
+
"ERROR": "\033[31m", # Red
|
| 23 |
+
"CRITICAL": "\033[35m", # Magenta
|
| 24 |
+
"RESET": "\033[0m", # Reset
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
def __init__(self, use_colors: bool = True, *args, **kwargs):
|
| 28 |
+
"""
|
| 29 |
+
Initialize the emoji log formatter.
|
| 30 |
+
|
| 31 |
+
Args:
|
| 32 |
+
use_colors: Whether to use ANSI color codes (default: True)
|
| 33 |
+
"""
|
| 34 |
+
super().__init__(*args, **kwargs)
|
| 35 |
+
self.use_colors = use_colors
|
| 36 |
+
|
| 37 |
+
def format(self, record: logging.LogRecord) -> str:
|
| 38 |
+
"""Format the log record with ASCII prefixes and enhanced styling."""
|
| 39 |
+
# Get the appropriate ASCII prefix for the service
|
| 40 |
+
service_prefix = self._get_ascii_prefix(record.name, record.levelname)
|
| 41 |
+
|
| 42 |
+
# Format the message with enhanced styling
|
| 43 |
+
formatted_msg = self._enhance_message(record.getMessage())
|
| 44 |
+
|
| 45 |
+
# Build the formatted log entry
|
| 46 |
+
if self.use_colors:
|
| 47 |
+
color = self.COLORS.get(record.levelname, "")
|
| 48 |
+
reset = self.COLORS["RESET"]
|
| 49 |
+
return f"{service_prefix} {color}{formatted_msg}{reset}"
|
| 50 |
+
else:
|
| 51 |
+
return f"{service_prefix} {formatted_msg}"
|
| 52 |
+
|
| 53 |
+
def _get_ascii_prefix(self, logger_name: str, level_name: str) -> str:
|
| 54 |
+
"""Get ASCII-safe prefix for Windows compatibility."""
|
| 55 |
+
# ASCII-safe prefixes for different services
|
| 56 |
+
ascii_prefixes = {
|
| 57 |
+
"core.tool_tier_loader": "[TOOLS]",
|
| 58 |
+
"core.tool_registry": "[REGISTRY]",
|
| 59 |
+
"auth.scopes": "[AUTH]",
|
| 60 |
+
"core.utils": "[UTILS]",
|
| 61 |
+
"auth.google_auth": "[OAUTH]",
|
| 62 |
+
"auth.credential_store": "[CREDS]",
|
| 63 |
+
"gcalendar.calendar_tools": "[CALENDAR]",
|
| 64 |
+
"gdrive.drive_tools": "[DRIVE]",
|
| 65 |
+
"gmail.gmail_tools": "[GMAIL]",
|
| 66 |
+
"gdocs.docs_tools": "[DOCS]",
|
| 67 |
+
"gsheets.sheets_tools": "[SHEETS]",
|
| 68 |
+
"gchat.chat_tools": "[CHAT]",
|
| 69 |
+
"gforms.forms_tools": "[FORMS]",
|
| 70 |
+
"gslides.slides_tools": "[SLIDES]",
|
| 71 |
+
"gtasks.tasks_tools": "[TASKS]",
|
| 72 |
+
"gsearch.search_tools": "[SEARCH]",
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
return ascii_prefixes.get(logger_name, f"[{level_name}]")
|
| 76 |
+
|
| 77 |
+
def _enhance_message(self, message: str) -> str:
|
| 78 |
+
"""Enhance the log message with better formatting."""
|
| 79 |
+
# Handle common patterns for better visual appeal
|
| 80 |
+
|
| 81 |
+
# Tool tier loading messages
|
| 82 |
+
if "resolved to" in message and "tools across" in message:
|
| 83 |
+
# Extract numbers and service names for better formatting
|
| 84 |
+
pattern = (
|
| 85 |
+
r"Tier '(\w+)' resolved to (\d+) tools across (\d+) services: (.+)"
|
| 86 |
+
)
|
| 87 |
+
match = re.search(pattern, message)
|
| 88 |
+
if match:
|
| 89 |
+
tier, tool_count, service_count, services = match.groups()
|
| 90 |
+
return f"Tool tier '{tier}' loaded: {tool_count} tools across {service_count} services [{services}]"
|
| 91 |
+
|
| 92 |
+
# Configuration loading messages
|
| 93 |
+
if "Loaded tool tiers configuration from" in message:
|
| 94 |
+
path = message.split("from ")[-1]
|
| 95 |
+
return f"Configuration loaded from {path}"
|
| 96 |
+
|
| 97 |
+
# Tool filtering messages
|
| 98 |
+
if "Tool tier filtering" in message:
|
| 99 |
+
pattern = r"removed (\d+) tools, (\d+) enabled"
|
| 100 |
+
match = re.search(pattern, message)
|
| 101 |
+
if match:
|
| 102 |
+
removed, enabled = match.groups()
|
| 103 |
+
return f"Tool filtering complete: {enabled} tools enabled ({removed} filtered out)"
|
| 104 |
+
|
| 105 |
+
# Enabled tools messages
|
| 106 |
+
if "Enabled tools set for scope management" in message:
|
| 107 |
+
tools = message.split(": ")[-1]
|
| 108 |
+
return f"Scope management configured for tools: {tools}"
|
| 109 |
+
|
| 110 |
+
# Credentials directory messages
|
| 111 |
+
if "Credentials directory permissions check passed" in message:
|
| 112 |
+
path = message.split(": ")[-1]
|
| 113 |
+
return f"Credentials directory verified: {path}"
|
| 114 |
+
|
| 115 |
+
# If no specific pattern matches, return the original message
|
| 116 |
+
return message
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def setup_enhanced_logging(
|
| 120 |
+
log_level: int = logging.INFO, use_colors: bool = True
|
| 121 |
+
) -> None:
|
| 122 |
+
"""
|
| 123 |
+
Set up enhanced logging with ASCII prefix formatter for the entire application.
|
| 124 |
+
|
| 125 |
+
Args:
|
| 126 |
+
log_level: The logging level to use (default: INFO)
|
| 127 |
+
use_colors: Whether to use ANSI colors (default: True)
|
| 128 |
+
"""
|
| 129 |
+
# Create the enhanced formatter
|
| 130 |
+
formatter = EnhancedLogFormatter(use_colors=use_colors)
|
| 131 |
+
|
| 132 |
+
# Get the root logger
|
| 133 |
+
root_logger = logging.getLogger()
|
| 134 |
+
|
| 135 |
+
# Update existing console handlers
|
| 136 |
+
for handler in root_logger.handlers:
|
| 137 |
+
if isinstance(handler, logging.StreamHandler) and handler.stream.name in [
|
| 138 |
+
"<stderr>",
|
| 139 |
+
"<stdout>",
|
| 140 |
+
]:
|
| 141 |
+
handler.setFormatter(formatter)
|
| 142 |
+
|
| 143 |
+
# If no console handler exists, create one
|
| 144 |
+
console_handlers = [
|
| 145 |
+
h
|
| 146 |
+
for h in root_logger.handlers
|
| 147 |
+
if isinstance(h, logging.StreamHandler)
|
| 148 |
+
and h.stream.name in ["<stderr>", "<stdout>"]
|
| 149 |
+
]
|
| 150 |
+
|
| 151 |
+
if not console_handlers:
|
| 152 |
+
console_handler = logging.StreamHandler()
|
| 153 |
+
console_handler.setFormatter(formatter)
|
| 154 |
+
console_handler.setLevel(log_level)
|
| 155 |
+
root_logger.addHandler(console_handler)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def configure_file_logging(logger_name: str = None) -> bool:
|
| 159 |
+
"""
|
| 160 |
+
Configure file logging based on stateless mode setting.
|
| 161 |
+
|
| 162 |
+
In stateless mode, file logging is completely disabled to avoid filesystem writes.
|
| 163 |
+
In normal mode, sets up detailed file logging to 'mcp_server_debug.log'.
|
| 164 |
+
|
| 165 |
+
Args:
|
| 166 |
+
logger_name: Optional name for the logger (defaults to root logger)
|
| 167 |
+
|
| 168 |
+
Returns:
|
| 169 |
+
bool: True if file logging was configured, False if skipped (stateless mode)
|
| 170 |
+
"""
|
| 171 |
+
# Check if stateless mode is enabled
|
| 172 |
+
stateless_mode = (
|
| 173 |
+
os.getenv("WORKSPACE_MCP_STATELESS_MODE", "false").lower() == "true"
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
if stateless_mode:
|
| 177 |
+
logger = logging.getLogger(logger_name)
|
| 178 |
+
logger.debug("File logging disabled in stateless mode")
|
| 179 |
+
return False
|
| 180 |
+
|
| 181 |
+
# Configure file logging for normal mode
|
| 182 |
+
try:
|
| 183 |
+
target_logger = logging.getLogger(logger_name)
|
| 184 |
+
log_file_dir = os.path.dirname(os.path.abspath(__file__))
|
| 185 |
+
# Go up one level since we're in core/ subdirectory
|
| 186 |
+
log_file_dir = os.path.dirname(log_file_dir)
|
| 187 |
+
log_file_path = os.path.join(log_file_dir, "mcp_server_debug.log")
|
| 188 |
+
|
| 189 |
+
file_handler = logging.FileHandler(log_file_path, mode="a")
|
| 190 |
+
file_handler.setLevel(logging.DEBUG)
|
| 191 |
+
|
| 192 |
+
file_formatter = logging.Formatter(
|
| 193 |
+
"%(asctime)s - %(name)s - %(levelname)s - %(process)d - %(threadName)s "
|
| 194 |
+
"[%(module)s.%(funcName)s:%(lineno)d] - %(message)s"
|
| 195 |
+
)
|
| 196 |
+
file_handler.setFormatter(file_formatter)
|
| 197 |
+
target_logger.addHandler(file_handler)
|
| 198 |
+
|
| 199 |
+
logger = logging.getLogger(logger_name)
|
| 200 |
+
logger.debug(f"Detailed file logging configured to: {log_file_path}")
|
| 201 |
+
return True
|
| 202 |
+
|
| 203 |
+
except Exception as e:
|
| 204 |
+
sys.stderr.write(
|
| 205 |
+
f"CRITICAL: Failed to set up file logging to '{log_file_path}': {e}\n"
|
| 206 |
+
)
|
| 207 |
+
return False
|
core/server.py
ADDED
|
@@ -0,0 +1,563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
from importlib import metadata
|
| 5 |
+
|
| 6 |
+
from fastapi.responses import HTMLResponse, JSONResponse, FileResponse
|
| 7 |
+
from starlette.applications import Starlette
|
| 8 |
+
from starlette.requests import Request
|
| 9 |
+
from starlette.middleware import Middleware
|
| 10 |
+
from starlette.middleware.cors import CORSMiddleware
|
| 11 |
+
|
| 12 |
+
from fastmcp import FastMCP
|
| 13 |
+
from fastmcp.server.auth.providers.google import GoogleProvider
|
| 14 |
+
|
| 15 |
+
from auth.oauth21_session_store import get_oauth21_session_store, set_auth_provider
|
| 16 |
+
from auth.google_auth import handle_auth_callback, start_auth_flow, check_client_secrets
|
| 17 |
+
from auth.mcp_session_middleware import MCPSessionMiddleware
|
| 18 |
+
from auth.oauth_responses import (
|
| 19 |
+
create_error_response,
|
| 20 |
+
create_success_response,
|
| 21 |
+
create_server_error_response,
|
| 22 |
+
)
|
| 23 |
+
from auth.auth_info_middleware import AuthInfoMiddleware
|
| 24 |
+
from auth.scopes import SCOPES, get_current_scopes # noqa
|
| 25 |
+
from core.config import (
|
| 26 |
+
USER_GOOGLE_EMAIL,
|
| 27 |
+
get_transport_mode,
|
| 28 |
+
set_transport_mode as _set_transport_mode,
|
| 29 |
+
get_oauth_redirect_uri as get_oauth_redirect_uri_for_current_mode,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
logging.basicConfig(level=logging.INFO)
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
_auth_provider: Optional[GoogleProvider] = None
|
| 36 |
+
_legacy_callback_registered = False
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def get_cors_middleware() -> Middleware:
|
| 40 |
+
"""Create CORS middleware with configured origins."""
|
| 41 |
+
cors_origins_env = os.getenv("WORKSPACE_MCP_CORS_ORIGINS", "*").strip()
|
| 42 |
+
cors_origins = (
|
| 43 |
+
[origin.strip() for origin in cors_origins_env.split(",") if origin.strip()]
|
| 44 |
+
if cors_origins_env != "*"
|
| 45 |
+
else ["*"]
|
| 46 |
+
)
|
| 47 |
+
logger.info("CORS configured with origins: %s", ",".join(cors_origins))
|
| 48 |
+
return Middleware(
|
| 49 |
+
CORSMiddleware,
|
| 50 |
+
allow_origins=cors_origins,
|
| 51 |
+
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "PATCH"],
|
| 52 |
+
allow_headers=[
|
| 53 |
+
"*",
|
| 54 |
+
"mcp-protocol-version",
|
| 55 |
+
"mcp-session-id",
|
| 56 |
+
"Authorization",
|
| 57 |
+
"Content-Type",
|
| 58 |
+
],
|
| 59 |
+
expose_headers=["mcp-session-id", "*"],
|
| 60 |
+
allow_credentials=True,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def get_http_middleware() -> list:
|
| 65 |
+
"""Get the list of middleware for HTTP transport."""
|
| 66 |
+
return [
|
| 67 |
+
get_cors_middleware(),
|
| 68 |
+
Middleware(MCPSessionMiddleware),
|
| 69 |
+
]
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
server = FastMCP(
|
| 73 |
+
name="google_workspace",
|
| 74 |
+
auth=None,
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
# Add the AuthInfo middleware to inject authentication into FastMCP context
|
| 78 |
+
auth_info_middleware = AuthInfoMiddleware()
|
| 79 |
+
server.add_middleware(auth_info_middleware)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def _parse_bool_env(value: str) -> bool:
|
| 83 |
+
"""Parse environment variable string to boolean."""
|
| 84 |
+
return value.lower() in ("1", "true", "yes", "on")
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def set_transport_mode(mode: str):
|
| 88 |
+
"""Sets the transport mode for the server."""
|
| 89 |
+
_set_transport_mode(mode)
|
| 90 |
+
logger.info(f"Transport: {mode}")
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _ensure_legacy_callback_route() -> None:
|
| 94 |
+
global _legacy_callback_registered
|
| 95 |
+
if _legacy_callback_registered:
|
| 96 |
+
return
|
| 97 |
+
server.custom_route("/oauth2callback", methods=["GET"])(legacy_oauth2_callback)
|
| 98 |
+
_legacy_callback_registered = True
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def configure_server_for_http():
|
| 102 |
+
"""
|
| 103 |
+
Configures the authentication provider for HTTP transport.
|
| 104 |
+
This must be called BEFORE server.run().
|
| 105 |
+
"""
|
| 106 |
+
global _auth_provider
|
| 107 |
+
|
| 108 |
+
transport_mode = get_transport_mode()
|
| 109 |
+
|
| 110 |
+
if transport_mode != "streamable-http":
|
| 111 |
+
return
|
| 112 |
+
|
| 113 |
+
# Use centralized OAuth configuration
|
| 114 |
+
from auth.oauth_config import get_oauth_config
|
| 115 |
+
|
| 116 |
+
config = get_oauth_config()
|
| 117 |
+
|
| 118 |
+
# Check if OAuth 2.1 is enabled via centralized config
|
| 119 |
+
oauth21_enabled = config.is_oauth21_enabled()
|
| 120 |
+
|
| 121 |
+
if oauth21_enabled:
|
| 122 |
+
if not config.is_configured():
|
| 123 |
+
logger.warning("OAuth 2.1 enabled but OAuth credentials not configured")
|
| 124 |
+
return
|
| 125 |
+
|
| 126 |
+
def validate_and_derive_jwt_key(
|
| 127 |
+
jwt_signing_key_override: str | None, client_secret: str
|
| 128 |
+
) -> bytes:
|
| 129 |
+
"""Validate JWT signing key override and derive the final JWT key."""
|
| 130 |
+
if jwt_signing_key_override:
|
| 131 |
+
if len(jwt_signing_key_override) < 12:
|
| 132 |
+
logger.warning(
|
| 133 |
+
"OAuth 2.1: FASTMCP_SERVER_AUTH_GOOGLE_JWT_SIGNING_KEY is less than 12 characters; "
|
| 134 |
+
"use a longer secret to improve key derivation strength."
|
| 135 |
+
)
|
| 136 |
+
return derive_jwt_key(
|
| 137 |
+
low_entropy_material=jwt_signing_key_override,
|
| 138 |
+
salt="fastmcp-jwt-signing-key",
|
| 139 |
+
)
|
| 140 |
+
else:
|
| 141 |
+
return derive_jwt_key(
|
| 142 |
+
high_entropy_material=client_secret,
|
| 143 |
+
salt="fastmcp-jwt-signing-key",
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
try:
|
| 147 |
+
# Import common dependencies for storage backends
|
| 148 |
+
from key_value.aio.wrappers.encryption import FernetEncryptionWrapper
|
| 149 |
+
from cryptography.fernet import Fernet
|
| 150 |
+
from fastmcp.server.auth.jwt_issuer import derive_jwt_key
|
| 151 |
+
|
| 152 |
+
required_scopes: List[str] = sorted(get_current_scopes())
|
| 153 |
+
|
| 154 |
+
client_storage = None
|
| 155 |
+
jwt_signing_key_override = (
|
| 156 |
+
os.getenv("FASTMCP_SERVER_AUTH_GOOGLE_JWT_SIGNING_KEY", "").strip()
|
| 157 |
+
or None
|
| 158 |
+
)
|
| 159 |
+
storage_backend = (
|
| 160 |
+
os.getenv("WORKSPACE_MCP_OAUTH_PROXY_STORAGE_BACKEND", "")
|
| 161 |
+
.strip()
|
| 162 |
+
.lower()
|
| 163 |
+
)
|
| 164 |
+
valkey_host = os.getenv("WORKSPACE_MCP_OAUTH_PROXY_VALKEY_HOST", "").strip()
|
| 165 |
+
|
| 166 |
+
# Determine storage backend: valkey, disk, memory (default)
|
| 167 |
+
use_valkey = storage_backend == "valkey" or bool(valkey_host)
|
| 168 |
+
use_disk = storage_backend == "disk"
|
| 169 |
+
|
| 170 |
+
if use_valkey:
|
| 171 |
+
try:
|
| 172 |
+
from key_value.aio.stores.valkey import ValkeyStore
|
| 173 |
+
|
| 174 |
+
valkey_port_raw = os.getenv(
|
| 175 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_PORT", "6379"
|
| 176 |
+
).strip()
|
| 177 |
+
valkey_db_raw = os.getenv(
|
| 178 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_DB", "0"
|
| 179 |
+
).strip()
|
| 180 |
+
|
| 181 |
+
valkey_port = int(valkey_port_raw)
|
| 182 |
+
valkey_db = int(valkey_db_raw)
|
| 183 |
+
valkey_use_tls_raw = os.getenv(
|
| 184 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_USE_TLS", ""
|
| 185 |
+
).strip()
|
| 186 |
+
valkey_use_tls = (
|
| 187 |
+
_parse_bool_env(valkey_use_tls_raw)
|
| 188 |
+
if valkey_use_tls_raw
|
| 189 |
+
else valkey_port == 6380
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
valkey_request_timeout_ms_raw = os.getenv(
|
| 193 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_REQUEST_TIMEOUT_MS", ""
|
| 194 |
+
).strip()
|
| 195 |
+
valkey_connection_timeout_ms_raw = os.getenv(
|
| 196 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_CONNECTION_TIMEOUT_MS", ""
|
| 197 |
+
).strip()
|
| 198 |
+
|
| 199 |
+
valkey_request_timeout_ms = (
|
| 200 |
+
int(valkey_request_timeout_ms_raw)
|
| 201 |
+
if valkey_request_timeout_ms_raw
|
| 202 |
+
else None
|
| 203 |
+
)
|
| 204 |
+
valkey_connection_timeout_ms = (
|
| 205 |
+
int(valkey_connection_timeout_ms_raw)
|
| 206 |
+
if valkey_connection_timeout_ms_raw
|
| 207 |
+
else None
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
valkey_username = (
|
| 211 |
+
os.getenv(
|
| 212 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_USERNAME", ""
|
| 213 |
+
).strip()
|
| 214 |
+
or None
|
| 215 |
+
)
|
| 216 |
+
valkey_password = (
|
| 217 |
+
os.getenv(
|
| 218 |
+
"WORKSPACE_MCP_OAUTH_PROXY_VALKEY_PASSWORD", ""
|
| 219 |
+
).strip()
|
| 220 |
+
or None
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
if not valkey_host:
|
| 224 |
+
valkey_host = "localhost"
|
| 225 |
+
|
| 226 |
+
client_storage = ValkeyStore(
|
| 227 |
+
host=valkey_host,
|
| 228 |
+
port=valkey_port,
|
| 229 |
+
db=valkey_db,
|
| 230 |
+
username=valkey_username,
|
| 231 |
+
password=valkey_password,
|
| 232 |
+
)
|
| 233 |
+
|
| 234 |
+
# Configure TLS and timeouts on the underlying Glide client config.
|
| 235 |
+
# ValkeyStore currently doesn't expose these settings directly.
|
| 236 |
+
glide_config = getattr(client_storage, "_client_config", None)
|
| 237 |
+
if glide_config is not None:
|
| 238 |
+
glide_config.use_tls = valkey_use_tls
|
| 239 |
+
|
| 240 |
+
is_remote_host = valkey_host not in {"localhost", "127.0.0.1"}
|
| 241 |
+
if valkey_request_timeout_ms is None and (
|
| 242 |
+
valkey_use_tls or is_remote_host
|
| 243 |
+
):
|
| 244 |
+
# Glide defaults to 250ms if unset; increase for remote/TLS endpoints.
|
| 245 |
+
valkey_request_timeout_ms = 5000
|
| 246 |
+
if valkey_request_timeout_ms is not None:
|
| 247 |
+
glide_config.request_timeout = valkey_request_timeout_ms
|
| 248 |
+
|
| 249 |
+
if valkey_connection_timeout_ms is None and (
|
| 250 |
+
valkey_use_tls or is_remote_host
|
| 251 |
+
):
|
| 252 |
+
valkey_connection_timeout_ms = 10000
|
| 253 |
+
if valkey_connection_timeout_ms is not None:
|
| 254 |
+
from glide_shared.config import (
|
| 255 |
+
AdvancedGlideClientConfiguration,
|
| 256 |
+
)
|
| 257 |
+
|
| 258 |
+
glide_config.advanced_config = (
|
| 259 |
+
AdvancedGlideClientConfiguration(
|
| 260 |
+
connection_timeout=valkey_connection_timeout_ms
|
| 261 |
+
)
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
jwt_signing_key = validate_and_derive_jwt_key(
|
| 265 |
+
jwt_signing_key_override, config.client_secret
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
storage_encryption_key = derive_jwt_key(
|
| 269 |
+
high_entropy_material=jwt_signing_key.decode(),
|
| 270 |
+
salt="fastmcp-storage-encryption-key",
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
client_storage = FernetEncryptionWrapper(
|
| 274 |
+
key_value=client_storage,
|
| 275 |
+
fernet=Fernet(key=storage_encryption_key),
|
| 276 |
+
)
|
| 277 |
+
logger.info(
|
| 278 |
+
"OAuth 2.1: Using ValkeyStore for FastMCP OAuth proxy client_storage (host=%s, port=%s, db=%s, tls=%s)",
|
| 279 |
+
valkey_host,
|
| 280 |
+
valkey_port,
|
| 281 |
+
valkey_db,
|
| 282 |
+
valkey_use_tls,
|
| 283 |
+
)
|
| 284 |
+
if valkey_request_timeout_ms is not None:
|
| 285 |
+
logger.info(
|
| 286 |
+
"OAuth 2.1: Valkey request timeout set to %sms",
|
| 287 |
+
valkey_request_timeout_ms,
|
| 288 |
+
)
|
| 289 |
+
if valkey_connection_timeout_ms is not None:
|
| 290 |
+
logger.info(
|
| 291 |
+
"OAuth 2.1: Valkey connection timeout set to %sms",
|
| 292 |
+
valkey_connection_timeout_ms,
|
| 293 |
+
)
|
| 294 |
+
logger.info(
|
| 295 |
+
"OAuth 2.1: Applied Fernet encryption wrapper to Valkey client_storage (key derived from FASTMCP_SERVER_AUTH_GOOGLE_JWT_SIGNING_KEY or GOOGLE_OAUTH_CLIENT_SECRET)."
|
| 296 |
+
)
|
| 297 |
+
except ImportError as exc:
|
| 298 |
+
logger.warning(
|
| 299 |
+
"OAuth 2.1: Valkey client_storage requested but Valkey dependencies are not installed (%s). "
|
| 300 |
+
"Install 'workspace-mcp[valkey]' (or 'py-key-value-aio[valkey]', which includes 'valkey-glide') "
|
| 301 |
+
"or unset WORKSPACE_MCP_OAUTH_PROXY_STORAGE_BACKEND/WORKSPACE_MCP_OAUTH_PROXY_VALKEY_HOST.",
|
| 302 |
+
exc,
|
| 303 |
+
)
|
| 304 |
+
except ValueError as exc:
|
| 305 |
+
logger.warning(
|
| 306 |
+
"OAuth 2.1: Invalid Valkey configuration; falling back to default storage (%s).",
|
| 307 |
+
exc,
|
| 308 |
+
)
|
| 309 |
+
elif use_disk:
|
| 310 |
+
try:
|
| 311 |
+
from key_value.aio.stores.disk import DiskStore
|
| 312 |
+
|
| 313 |
+
disk_directory = os.getenv(
|
| 314 |
+
"WORKSPACE_MCP_OAUTH_PROXY_DISK_DIRECTORY", ""
|
| 315 |
+
).strip()
|
| 316 |
+
if not disk_directory:
|
| 317 |
+
# Default to FASTMCP_HOME/oauth-proxy or ~/.fastmcp/oauth-proxy
|
| 318 |
+
fastmcp_home = os.getenv("FASTMCP_HOME", "").strip()
|
| 319 |
+
if fastmcp_home:
|
| 320 |
+
disk_directory = os.path.join(fastmcp_home, "oauth-proxy")
|
| 321 |
+
else:
|
| 322 |
+
disk_directory = os.path.expanduser(
|
| 323 |
+
"~/.fastmcp/oauth-proxy"
|
| 324 |
+
)
|
| 325 |
+
|
| 326 |
+
client_storage = DiskStore(directory=disk_directory)
|
| 327 |
+
|
| 328 |
+
jwt_signing_key = validate_and_derive_jwt_key(
|
| 329 |
+
jwt_signing_key_override, config.client_secret
|
| 330 |
+
)
|
| 331 |
+
|
| 332 |
+
storage_encryption_key = derive_jwt_key(
|
| 333 |
+
high_entropy_material=jwt_signing_key.decode(),
|
| 334 |
+
salt="fastmcp-storage-encryption-key",
|
| 335 |
+
)
|
| 336 |
+
|
| 337 |
+
client_storage = FernetEncryptionWrapper(
|
| 338 |
+
key_value=client_storage,
|
| 339 |
+
fernet=Fernet(key=storage_encryption_key),
|
| 340 |
+
)
|
| 341 |
+
logger.info(
|
| 342 |
+
"OAuth 2.1: Using DiskStore for FastMCP OAuth proxy client_storage (directory=%s)",
|
| 343 |
+
disk_directory,
|
| 344 |
+
)
|
| 345 |
+
except ImportError as exc:
|
| 346 |
+
logger.warning(
|
| 347 |
+
"OAuth 2.1: Disk storage requested but dependencies not available (%s). "
|
| 348 |
+
"Falling back to default storage.",
|
| 349 |
+
exc,
|
| 350 |
+
)
|
| 351 |
+
elif storage_backend == "memory":
|
| 352 |
+
from key_value.aio.stores.memory import MemoryStore
|
| 353 |
+
|
| 354 |
+
client_storage = MemoryStore()
|
| 355 |
+
logger.info(
|
| 356 |
+
"OAuth 2.1: Using MemoryStore for FastMCP OAuth proxy client_storage"
|
| 357 |
+
)
|
| 358 |
+
# else: client_storage remains None, FastMCP uses its default
|
| 359 |
+
|
| 360 |
+
# Ensure JWT signing key is always derived for all storage backends
|
| 361 |
+
if "jwt_signing_key" not in locals():
|
| 362 |
+
jwt_signing_key = validate_and_derive_jwt_key(
|
| 363 |
+
jwt_signing_key_override, config.client_secret
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
# Check if external OAuth provider is configured
|
| 367 |
+
if config.is_external_oauth21_provider():
|
| 368 |
+
# External OAuth mode: use custom provider that handles ya29.* access tokens
|
| 369 |
+
from auth.external_oauth_provider import ExternalOAuthProvider
|
| 370 |
+
|
| 371 |
+
provider = ExternalOAuthProvider(
|
| 372 |
+
client_id=config.client_id,
|
| 373 |
+
client_secret=config.client_secret,
|
| 374 |
+
base_url=config.get_oauth_base_url(),
|
| 375 |
+
redirect_path=config.redirect_path,
|
| 376 |
+
required_scopes=required_scopes,
|
| 377 |
+
)
|
| 378 |
+
# Disable protocol-level auth, expect bearer tokens in tool calls
|
| 379 |
+
server.auth = None
|
| 380 |
+
logger.info(
|
| 381 |
+
"OAuth 2.1 enabled with EXTERNAL provider mode - protocol-level auth disabled"
|
| 382 |
+
)
|
| 383 |
+
logger.info(
|
| 384 |
+
"Expecting Authorization bearer tokens in tool call headers"
|
| 385 |
+
)
|
| 386 |
+
else:
|
| 387 |
+
# Standard OAuth 2.1 mode: use FastMCP's GoogleProvider
|
| 388 |
+
provider = GoogleProvider(
|
| 389 |
+
client_id=config.client_id,
|
| 390 |
+
client_secret=config.client_secret,
|
| 391 |
+
base_url=config.get_oauth_base_url(),
|
| 392 |
+
redirect_path=config.redirect_path,
|
| 393 |
+
required_scopes=required_scopes,
|
| 394 |
+
client_storage=client_storage,
|
| 395 |
+
jwt_signing_key=jwt_signing_key,
|
| 396 |
+
)
|
| 397 |
+
# Enable protocol-level auth
|
| 398 |
+
server.auth = provider
|
| 399 |
+
logger.info(
|
| 400 |
+
"OAuth 2.1 enabled using FastMCP GoogleProvider with protocol-level auth"
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
# Always set auth provider for token validation in middleware
|
| 404 |
+
set_auth_provider(provider)
|
| 405 |
+
_auth_provider = provider
|
| 406 |
+
except Exception as exc:
|
| 407 |
+
logger.error(
|
| 408 |
+
"Failed to initialize FastMCP GoogleProvider: %s", exc, exc_info=True
|
| 409 |
+
)
|
| 410 |
+
raise
|
| 411 |
+
else:
|
| 412 |
+
logger.info("OAuth 2.0 mode - Server will use legacy authentication.")
|
| 413 |
+
server.auth = None
|
| 414 |
+
_auth_provider = None
|
| 415 |
+
set_auth_provider(None)
|
| 416 |
+
_ensure_legacy_callback_route()
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
def get_auth_provider() -> Optional[GoogleProvider]:
|
| 420 |
+
"""Gets the global authentication provider instance."""
|
| 421 |
+
return _auth_provider
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
@server.custom_route("/health", methods=["GET"])
|
| 425 |
+
async def health_check(request: Request):
|
| 426 |
+
try:
|
| 427 |
+
version = metadata.version("workspace-mcp")
|
| 428 |
+
except metadata.PackageNotFoundError:
|
| 429 |
+
version = "dev"
|
| 430 |
+
return JSONResponse(
|
| 431 |
+
{
|
| 432 |
+
"status": "healthy",
|
| 433 |
+
"service": "workspace-mcp",
|
| 434 |
+
"version": version,
|
| 435 |
+
"transport": get_transport_mode(),
|
| 436 |
+
}
|
| 437 |
+
)
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
@server.custom_route("/attachments/{file_id}", methods=["GET"])
|
| 441 |
+
async def serve_attachment(file_id: str):
|
| 442 |
+
"""Serve a stored attachment file."""
|
| 443 |
+
from core.attachment_storage import get_attachment_storage
|
| 444 |
+
|
| 445 |
+
storage = get_attachment_storage()
|
| 446 |
+
metadata = storage.get_attachment_metadata(file_id)
|
| 447 |
+
|
| 448 |
+
if not metadata:
|
| 449 |
+
return JSONResponse(
|
| 450 |
+
{"error": "Attachment not found or expired"}, status_code=404
|
| 451 |
+
)
|
| 452 |
+
|
| 453 |
+
file_path = storage.get_attachment_path(file_id)
|
| 454 |
+
if not file_path:
|
| 455 |
+
return JSONResponse({"error": "Attachment file not found"}, status_code=404)
|
| 456 |
+
|
| 457 |
+
return FileResponse(
|
| 458 |
+
path=str(file_path),
|
| 459 |
+
filename=metadata["filename"],
|
| 460 |
+
media_type=metadata["mime_type"],
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
|
| 464 |
+
async def legacy_oauth2_callback(request: Request) -> HTMLResponse:
|
| 465 |
+
state = request.query_params.get("state")
|
| 466 |
+
code = request.query_params.get("code")
|
| 467 |
+
error = request.query_params.get("error")
|
| 468 |
+
|
| 469 |
+
if error:
|
| 470 |
+
msg = (
|
| 471 |
+
f"Authentication failed: Google returned an error: {error}. State: {state}."
|
| 472 |
+
)
|
| 473 |
+
logger.error(msg)
|
| 474 |
+
return create_error_response(msg)
|
| 475 |
+
|
| 476 |
+
if not code:
|
| 477 |
+
msg = "Authentication failed: No authorization code received from Google."
|
| 478 |
+
logger.error(msg)
|
| 479 |
+
return create_error_response(msg)
|
| 480 |
+
|
| 481 |
+
try:
|
| 482 |
+
error_message = check_client_secrets()
|
| 483 |
+
if error_message:
|
| 484 |
+
return create_server_error_response(error_message)
|
| 485 |
+
|
| 486 |
+
logger.info(f"OAuth callback: Received code (state: {state}).")
|
| 487 |
+
|
| 488 |
+
mcp_session_id = None
|
| 489 |
+
if hasattr(request, "state") and hasattr(request.state, "session_id"):
|
| 490 |
+
mcp_session_id = request.state.session_id
|
| 491 |
+
|
| 492 |
+
verified_user_id, credentials = handle_auth_callback(
|
| 493 |
+
scopes=get_current_scopes(),
|
| 494 |
+
authorization_response=str(request.url),
|
| 495 |
+
redirect_uri=get_oauth_redirect_uri_for_current_mode(),
|
| 496 |
+
session_id=mcp_session_id,
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
logger.info(
|
| 500 |
+
f"OAuth callback: Successfully authenticated user: {verified_user_id}."
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
try:
|
| 504 |
+
store = get_oauth21_session_store()
|
| 505 |
+
|
| 506 |
+
store.store_session(
|
| 507 |
+
user_email=verified_user_id,
|
| 508 |
+
access_token=credentials.token,
|
| 509 |
+
refresh_token=credentials.refresh_token,
|
| 510 |
+
token_uri=credentials.token_uri,
|
| 511 |
+
client_id=credentials.client_id,
|
| 512 |
+
client_secret=credentials.client_secret,
|
| 513 |
+
scopes=credentials.scopes,
|
| 514 |
+
expiry=credentials.expiry,
|
| 515 |
+
session_id=f"google-{state}",
|
| 516 |
+
mcp_session_id=mcp_session_id,
|
| 517 |
+
)
|
| 518 |
+
logger.info(
|
| 519 |
+
f"Stored Google credentials in OAuth 2.1 session store for {verified_user_id}"
|
| 520 |
+
)
|
| 521 |
+
except Exception as e:
|
| 522 |
+
logger.error(f"Failed to store credentials in OAuth 2.1 store: {e}")
|
| 523 |
+
|
| 524 |
+
return create_success_response(verified_user_id)
|
| 525 |
+
except Exception as e:
|
| 526 |
+
logger.error(f"Error processing OAuth callback: {str(e)}", exc_info=True)
|
| 527 |
+
return create_server_error_response(str(e))
|
| 528 |
+
|
| 529 |
+
|
| 530 |
+
@server.tool()
|
| 531 |
+
async def start_google_auth(
|
| 532 |
+
service_name: str, user_google_email: str = USER_GOOGLE_EMAIL
|
| 533 |
+
) -> str:
|
| 534 |
+
"""
|
| 535 |
+
Manually initiate Google OAuth authentication flow.
|
| 536 |
+
|
| 537 |
+
NOTE: This tool should typically NOT be called directly. The authentication system
|
| 538 |
+
automatically handles credential checks and prompts for authentication when needed.
|
| 539 |
+
Only use this tool if:
|
| 540 |
+
1. You need to re-authenticate with different credentials
|
| 541 |
+
2. You want to proactively authenticate before using other tools
|
| 542 |
+
3. The automatic authentication flow failed and you need to retry
|
| 543 |
+
|
| 544 |
+
In most cases, simply try calling the Google Workspace tool you need - it will
|
| 545 |
+
automatically handle authentication if required.
|
| 546 |
+
"""
|
| 547 |
+
if not user_google_email:
|
| 548 |
+
raise ValueError("user_google_email must be provided.")
|
| 549 |
+
|
| 550 |
+
error_message = check_client_secrets()
|
| 551 |
+
if error_message:
|
| 552 |
+
return f"**Authentication Error:** {error_message}"
|
| 553 |
+
|
| 554 |
+
try:
|
| 555 |
+
auth_message = await start_auth_flow(
|
| 556 |
+
user_google_email=user_google_email,
|
| 557 |
+
service_name=service_name,
|
| 558 |
+
redirect_uri=get_oauth_redirect_uri_for_current_mode(),
|
| 559 |
+
)
|
| 560 |
+
return auth_message
|
| 561 |
+
except Exception as e:
|
| 562 |
+
logger.error(f"Failed to start Google authentication flow: {e}", exc_info=True)
|
| 563 |
+
return f"**Error:** An unexpected error occurred: {e}"
|
core/tool_registry.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tool Registry for Conditional Tool Registration
|
| 3 |
+
|
| 4 |
+
This module provides a registry system that allows tools to be conditionally registered
|
| 5 |
+
based on tier configuration, replacing direct @server.tool() decorators.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
from typing import Set, Optional, Callable
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
# Global registry of enabled tools
|
| 14 |
+
_enabled_tools: Optional[Set[str]] = None
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def set_enabled_tools(tool_names: Optional[Set[str]]):
|
| 18 |
+
"""Set the globally enabled tools."""
|
| 19 |
+
global _enabled_tools
|
| 20 |
+
_enabled_tools = tool_names
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def get_enabled_tools() -> Optional[Set[str]]:
|
| 24 |
+
"""Get the set of enabled tools, or None if all tools are enabled."""
|
| 25 |
+
return _enabled_tools
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def is_tool_enabled(tool_name: str) -> bool:
|
| 29 |
+
"""Check if a specific tool is enabled."""
|
| 30 |
+
if _enabled_tools is None:
|
| 31 |
+
return True # All tools enabled by default
|
| 32 |
+
return tool_name in _enabled_tools
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def conditional_tool(server, tool_name: str):
|
| 36 |
+
"""
|
| 37 |
+
Decorator that conditionally registers a tool based on the enabled tools set.
|
| 38 |
+
|
| 39 |
+
Args:
|
| 40 |
+
server: The FastMCP server instance
|
| 41 |
+
tool_name: The name of the tool to register
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
Either the registered tool decorator or a no-op decorator
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def decorator(func: Callable) -> Callable:
|
| 48 |
+
if is_tool_enabled(tool_name):
|
| 49 |
+
logger.debug(f"Registering tool: {tool_name}")
|
| 50 |
+
return server.tool()(func)
|
| 51 |
+
else:
|
| 52 |
+
logger.debug(f"Skipping tool registration: {tool_name}")
|
| 53 |
+
return func
|
| 54 |
+
|
| 55 |
+
return decorator
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def wrap_server_tool_method(server):
|
| 59 |
+
"""
|
| 60 |
+
Track tool registrations and filter them post-registration.
|
| 61 |
+
"""
|
| 62 |
+
original_tool = server.tool
|
| 63 |
+
server._tracked_tools = []
|
| 64 |
+
|
| 65 |
+
def tracking_tool(*args, **kwargs):
|
| 66 |
+
original_decorator = original_tool(*args, **kwargs)
|
| 67 |
+
|
| 68 |
+
def wrapper_decorator(func: Callable) -> Callable:
|
| 69 |
+
tool_name = func.__name__
|
| 70 |
+
server._tracked_tools.append(tool_name)
|
| 71 |
+
# Always apply the original decorator to register the tool
|
| 72 |
+
return original_decorator(func)
|
| 73 |
+
|
| 74 |
+
return wrapper_decorator
|
| 75 |
+
|
| 76 |
+
server.tool = tracking_tool
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def filter_server_tools(server):
|
| 80 |
+
"""Remove disabled tools from the server after registration."""
|
| 81 |
+
enabled_tools = get_enabled_tools()
|
| 82 |
+
if enabled_tools is None:
|
| 83 |
+
return
|
| 84 |
+
|
| 85 |
+
tools_removed = 0
|
| 86 |
+
|
| 87 |
+
# Access FastMCP's tool registry via _tool_manager._tools
|
| 88 |
+
if hasattr(server, "_tool_manager"):
|
| 89 |
+
tool_manager = server._tool_manager
|
| 90 |
+
if hasattr(tool_manager, "_tools"):
|
| 91 |
+
tool_registry = tool_manager._tools
|
| 92 |
+
|
| 93 |
+
tools_to_remove = []
|
| 94 |
+
for tool_name in list(tool_registry.keys()):
|
| 95 |
+
if not is_tool_enabled(tool_name):
|
| 96 |
+
tools_to_remove.append(tool_name)
|
| 97 |
+
|
| 98 |
+
for tool_name in tools_to_remove:
|
| 99 |
+
del tool_registry[tool_name]
|
| 100 |
+
tools_removed += 1
|
| 101 |
+
|
| 102 |
+
if tools_removed > 0:
|
| 103 |
+
logger.info(
|
| 104 |
+
f"Tool tier filtering: removed {tools_removed} tools, {len(enabled_tools)} enabled"
|
| 105 |
+
)
|
core/tool_tier_loader.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tool Tier Loader Module
|
| 3 |
+
|
| 4 |
+
This module provides functionality to load and resolve tool tiers from the YAML configuration.
|
| 5 |
+
It integrates with the existing tool enablement workflow to support tiered tool loading.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import Dict, List, Set, Literal, Optional
|
| 11 |
+
|
| 12 |
+
import yaml
|
| 13 |
+
|
| 14 |
+
logger = logging.getLogger(__name__)
|
| 15 |
+
|
| 16 |
+
TierLevel = Literal["core", "extended", "complete"]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class ToolTierLoader:
|
| 20 |
+
"""Loads and manages tool tiers from configuration."""
|
| 21 |
+
|
| 22 |
+
def __init__(self, config_path: Optional[str] = None):
|
| 23 |
+
"""
|
| 24 |
+
Initialize the tool tier loader.
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
config_path: Path to the tool_tiers.yaml file. If None, uses default location.
|
| 28 |
+
"""
|
| 29 |
+
if config_path is None:
|
| 30 |
+
# Default to core/tool_tiers.yaml relative to this file
|
| 31 |
+
config_path = Path(__file__).parent / "tool_tiers.yaml"
|
| 32 |
+
|
| 33 |
+
self.config_path = Path(config_path)
|
| 34 |
+
self._tiers_config: Optional[Dict] = None
|
| 35 |
+
|
| 36 |
+
def _load_config(self) -> Dict:
|
| 37 |
+
"""Load the tool tiers configuration from YAML file."""
|
| 38 |
+
if self._tiers_config is not None:
|
| 39 |
+
return self._tiers_config
|
| 40 |
+
|
| 41 |
+
if not self.config_path.exists():
|
| 42 |
+
raise FileNotFoundError(
|
| 43 |
+
f"Tool tiers configuration not found: {self.config_path}"
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
try:
|
| 47 |
+
with open(self.config_path, "r", encoding="utf-8") as f:
|
| 48 |
+
self._tiers_config = yaml.safe_load(f)
|
| 49 |
+
logger.info(f"Loaded tool tiers configuration from {self.config_path}")
|
| 50 |
+
return self._tiers_config
|
| 51 |
+
except yaml.YAMLError as e:
|
| 52 |
+
raise ValueError(f"Invalid YAML in tool tiers configuration: {e}")
|
| 53 |
+
except Exception as e:
|
| 54 |
+
raise RuntimeError(f"Failed to load tool tiers configuration: {e}")
|
| 55 |
+
|
| 56 |
+
def get_available_services(self) -> List[str]:
|
| 57 |
+
"""Get list of all available services defined in the configuration."""
|
| 58 |
+
config = self._load_config()
|
| 59 |
+
return list(config.keys())
|
| 60 |
+
|
| 61 |
+
def get_tools_for_tier(
|
| 62 |
+
self, tier: TierLevel, services: Optional[List[str]] = None
|
| 63 |
+
) -> List[str]:
|
| 64 |
+
"""
|
| 65 |
+
Get all tools for a specific tier level.
|
| 66 |
+
|
| 67 |
+
Args:
|
| 68 |
+
tier: The tier level (core, extended, complete)
|
| 69 |
+
services: Optional list of services to filter by. If None, includes all services.
|
| 70 |
+
|
| 71 |
+
Returns:
|
| 72 |
+
List of tool names for the specified tier level
|
| 73 |
+
"""
|
| 74 |
+
config = self._load_config()
|
| 75 |
+
tools = []
|
| 76 |
+
|
| 77 |
+
# If no services specified, use all available services
|
| 78 |
+
if services is None:
|
| 79 |
+
services = self.get_available_services()
|
| 80 |
+
|
| 81 |
+
for service in services:
|
| 82 |
+
if service not in config:
|
| 83 |
+
logger.warning(
|
| 84 |
+
f"Service '{service}' not found in tool tiers configuration"
|
| 85 |
+
)
|
| 86 |
+
continue
|
| 87 |
+
|
| 88 |
+
service_config = config[service]
|
| 89 |
+
if tier not in service_config:
|
| 90 |
+
logger.debug(f"Tier '{tier}' not defined for service '{service}'")
|
| 91 |
+
continue
|
| 92 |
+
|
| 93 |
+
tier_tools = service_config[tier]
|
| 94 |
+
if tier_tools: # Handle empty lists
|
| 95 |
+
tools.extend(tier_tools)
|
| 96 |
+
|
| 97 |
+
return tools
|
| 98 |
+
|
| 99 |
+
def get_tools_up_to_tier(
|
| 100 |
+
self, tier: TierLevel, services: Optional[List[str]] = None
|
| 101 |
+
) -> List[str]:
|
| 102 |
+
"""
|
| 103 |
+
Get all tools up to and including the specified tier level.
|
| 104 |
+
|
| 105 |
+
Args:
|
| 106 |
+
tier: The maximum tier level to include
|
| 107 |
+
services: Optional list of services to filter by. If None, includes all services.
|
| 108 |
+
|
| 109 |
+
Returns:
|
| 110 |
+
List of tool names up to the specified tier level
|
| 111 |
+
"""
|
| 112 |
+
tier_order = ["core", "extended", "complete"]
|
| 113 |
+
max_tier_index = tier_order.index(tier)
|
| 114 |
+
|
| 115 |
+
tools = []
|
| 116 |
+
for i in range(max_tier_index + 1):
|
| 117 |
+
current_tier = tier_order[i]
|
| 118 |
+
tools.extend(self.get_tools_for_tier(current_tier, services))
|
| 119 |
+
|
| 120 |
+
# Remove duplicates while preserving order
|
| 121 |
+
seen = set()
|
| 122 |
+
unique_tools = []
|
| 123 |
+
for tool in tools:
|
| 124 |
+
if tool not in seen:
|
| 125 |
+
seen.add(tool)
|
| 126 |
+
unique_tools.append(tool)
|
| 127 |
+
|
| 128 |
+
return unique_tools
|
| 129 |
+
|
| 130 |
+
def get_services_for_tools(self, tool_names: List[str]) -> Set[str]:
|
| 131 |
+
"""
|
| 132 |
+
Get the service names that provide the specified tools.
|
| 133 |
+
|
| 134 |
+
Args:
|
| 135 |
+
tool_names: List of tool names to lookup
|
| 136 |
+
|
| 137 |
+
Returns:
|
| 138 |
+
Set of service names that provide any of the specified tools
|
| 139 |
+
"""
|
| 140 |
+
config = self._load_config()
|
| 141 |
+
services = set()
|
| 142 |
+
|
| 143 |
+
for service, service_config in config.items():
|
| 144 |
+
for tier_name, tier_tools in service_config.items():
|
| 145 |
+
if tier_tools and any(tool in tier_tools for tool in tool_names):
|
| 146 |
+
services.add(service)
|
| 147 |
+
break
|
| 148 |
+
|
| 149 |
+
return services
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def get_tools_for_tier(
|
| 153 |
+
tier: TierLevel, services: Optional[List[str]] = None
|
| 154 |
+
) -> List[str]:
|
| 155 |
+
"""
|
| 156 |
+
Convenience function to get tools for a specific tier.
|
| 157 |
+
|
| 158 |
+
Args:
|
| 159 |
+
tier: The tier level (core, extended, complete)
|
| 160 |
+
services: Optional list of services to filter by
|
| 161 |
+
|
| 162 |
+
Returns:
|
| 163 |
+
List of tool names for the specified tier level
|
| 164 |
+
"""
|
| 165 |
+
loader = ToolTierLoader()
|
| 166 |
+
return loader.get_tools_up_to_tier(tier, services)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def resolve_tools_from_tier(
|
| 170 |
+
tier: TierLevel, services: Optional[List[str]] = None
|
| 171 |
+
) -> tuple[List[str], List[str]]:
|
| 172 |
+
"""
|
| 173 |
+
Resolve tool names and service names for the specified tier.
|
| 174 |
+
|
| 175 |
+
Args:
|
| 176 |
+
tier: The tier level (core, extended, complete)
|
| 177 |
+
services: Optional list of services to filter by
|
| 178 |
+
|
| 179 |
+
Returns:
|
| 180 |
+
Tuple of (tool_names, service_names) where:
|
| 181 |
+
- tool_names: List of specific tool names for the tier
|
| 182 |
+
- service_names: List of service names that should be imported
|
| 183 |
+
"""
|
| 184 |
+
loader = ToolTierLoader()
|
| 185 |
+
|
| 186 |
+
# Get all tools for the tier
|
| 187 |
+
tools = loader.get_tools_up_to_tier(tier, services)
|
| 188 |
+
|
| 189 |
+
# Map back to service names
|
| 190 |
+
service_names = loader.get_services_for_tools(tools)
|
| 191 |
+
|
| 192 |
+
logger.info(
|
| 193 |
+
f"Tier '{tier}' resolved to {len(tools)} tools across {len(service_names)} services: {sorted(service_names)}"
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
return tools, sorted(service_names)
|
core/tool_tiers.yaml
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
gmail:
|
| 2 |
+
core:
|
| 3 |
+
- search_gmail_messages
|
| 4 |
+
- get_gmail_message_content
|
| 5 |
+
- get_gmail_messages_content_batch
|
| 6 |
+
- send_gmail_message
|
| 7 |
+
|
| 8 |
+
extended:
|
| 9 |
+
- get_gmail_attachment_content
|
| 10 |
+
- get_gmail_thread_content
|
| 11 |
+
- modify_gmail_message_labels
|
| 12 |
+
- list_gmail_labels
|
| 13 |
+
- manage_gmail_label
|
| 14 |
+
- draft_gmail_message
|
| 15 |
+
|
| 16 |
+
complete:
|
| 17 |
+
- get_gmail_threads_content_batch
|
| 18 |
+
- batch_modify_gmail_message_labels
|
| 19 |
+
- start_google_auth
|
| 20 |
+
|
| 21 |
+
drive:
|
| 22 |
+
core:
|
| 23 |
+
- search_drive_files
|
| 24 |
+
- get_drive_file_content
|
| 25 |
+
- get_drive_file_download_url
|
| 26 |
+
- create_drive_file
|
| 27 |
+
- share_drive_file
|
| 28 |
+
- get_drive_shareable_link
|
| 29 |
+
extended:
|
| 30 |
+
- list_drive_items
|
| 31 |
+
- update_drive_file
|
| 32 |
+
- update_drive_permission
|
| 33 |
+
- remove_drive_permission
|
| 34 |
+
- transfer_drive_ownership
|
| 35 |
+
- batch_share_drive_file
|
| 36 |
+
complete:
|
| 37 |
+
- get_drive_file_permissions
|
| 38 |
+
- check_drive_file_public_access
|
| 39 |
+
|
| 40 |
+
calendar:
|
| 41 |
+
core:
|
| 42 |
+
- list_calendars
|
| 43 |
+
- get_events
|
| 44 |
+
- create_event
|
| 45 |
+
- modify_event
|
| 46 |
+
extended:
|
| 47 |
+
- delete_event
|
| 48 |
+
complete: []
|
| 49 |
+
|
| 50 |
+
docs:
|
| 51 |
+
core:
|
| 52 |
+
- get_doc_content
|
| 53 |
+
- create_doc
|
| 54 |
+
- modify_doc_text
|
| 55 |
+
extended:
|
| 56 |
+
- export_doc_to_pdf
|
| 57 |
+
- search_docs
|
| 58 |
+
- find_and_replace_doc
|
| 59 |
+
- list_docs_in_folder
|
| 60 |
+
- insert_doc_elements
|
| 61 |
+
complete:
|
| 62 |
+
- insert_doc_image
|
| 63 |
+
- update_doc_headers_footers
|
| 64 |
+
- batch_update_doc
|
| 65 |
+
- inspect_doc_structure
|
| 66 |
+
- create_table_with_data
|
| 67 |
+
- debug_table_structure
|
| 68 |
+
- read_document_comments
|
| 69 |
+
- create_document_comment
|
| 70 |
+
- reply_to_document_comment
|
| 71 |
+
- resolve_document_comment
|
| 72 |
+
|
| 73 |
+
sheets:
|
| 74 |
+
core:
|
| 75 |
+
- create_spreadsheet
|
| 76 |
+
- read_sheet_values
|
| 77 |
+
- modify_sheet_values
|
| 78 |
+
extended:
|
| 79 |
+
- list_spreadsheets
|
| 80 |
+
- get_spreadsheet_info
|
| 81 |
+
complete:
|
| 82 |
+
- create_sheet
|
| 83 |
+
- read_spreadsheet_comments
|
| 84 |
+
- create_spreadsheet_comment
|
| 85 |
+
- reply_to_spreadsheet_comment
|
| 86 |
+
- resolve_spreadsheet_comment
|
| 87 |
+
|
| 88 |
+
chat:
|
| 89 |
+
core:
|
| 90 |
+
- send_message
|
| 91 |
+
- get_messages
|
| 92 |
+
- search_messages
|
| 93 |
+
extended:
|
| 94 |
+
- list_spaces
|
| 95 |
+
complete: []
|
| 96 |
+
|
| 97 |
+
forms:
|
| 98 |
+
core:
|
| 99 |
+
- create_form
|
| 100 |
+
- get_form
|
| 101 |
+
extended:
|
| 102 |
+
- list_form_responses
|
| 103 |
+
complete:
|
| 104 |
+
- set_publish_settings
|
| 105 |
+
- get_form_response
|
| 106 |
+
|
| 107 |
+
slides:
|
| 108 |
+
core:
|
| 109 |
+
- create_presentation
|
| 110 |
+
- get_presentation
|
| 111 |
+
extended:
|
| 112 |
+
- batch_update_presentation
|
| 113 |
+
- get_page
|
| 114 |
+
- get_page_thumbnail
|
| 115 |
+
complete:
|
| 116 |
+
- read_presentation_comments
|
| 117 |
+
- create_presentation_comment
|
| 118 |
+
- reply_to_presentation_comment
|
| 119 |
+
- resolve_presentation_comment
|
| 120 |
+
|
| 121 |
+
tasks:
|
| 122 |
+
core:
|
| 123 |
+
- get_task
|
| 124 |
+
- list_tasks
|
| 125 |
+
- create_task
|
| 126 |
+
- update_task
|
| 127 |
+
extended:
|
| 128 |
+
- delete_task
|
| 129 |
+
complete:
|
| 130 |
+
- list_task_lists
|
| 131 |
+
- get_task_list
|
| 132 |
+
- create_task_list
|
| 133 |
+
- update_task_list
|
| 134 |
+
- delete_task_list
|
| 135 |
+
- move_task
|
| 136 |
+
- clear_completed_tasks
|
| 137 |
+
|
| 138 |
+
search:
|
| 139 |
+
core:
|
| 140 |
+
- search_custom
|
| 141 |
+
extended:
|
| 142 |
+
- search_custom_siterestrict
|
| 143 |
+
complete:
|
| 144 |
+
- get_search_engine_info
|
| 145 |
+
|
| 146 |
+
appscript:
|
| 147 |
+
core:
|
| 148 |
+
- list_script_projects
|
| 149 |
+
- get_script_project
|
| 150 |
+
- get_script_content
|
| 151 |
+
- create_script_project
|
| 152 |
+
- update_script_content
|
| 153 |
+
- run_script_function
|
| 154 |
+
- generate_trigger_code
|
| 155 |
+
extended:
|
| 156 |
+
- create_deployment
|
| 157 |
+
- list_deployments
|
| 158 |
+
- update_deployment
|
| 159 |
+
- delete_deployment
|
| 160 |
+
- delete_script_project
|
| 161 |
+
- list_versions
|
| 162 |
+
- create_version
|
| 163 |
+
- get_version
|
| 164 |
+
- list_script_processes
|
| 165 |
+
- get_script_metrics
|
| 166 |
+
complete: []
|
core/utils.py
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import zipfile
|
| 5 |
+
import xml.etree.ElementTree as ET
|
| 6 |
+
import ssl
|
| 7 |
+
import asyncio
|
| 8 |
+
import functools
|
| 9 |
+
|
| 10 |
+
from typing import List, Optional
|
| 11 |
+
|
| 12 |
+
from googleapiclient.errors import HttpError
|
| 13 |
+
from .api_enablement import get_api_enablement_message
|
| 14 |
+
from auth.google_auth import GoogleAuthenticationError
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TransientNetworkError(Exception):
|
| 20 |
+
"""Custom exception for transient network errors after retries."""
|
| 21 |
+
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class UserInputError(Exception):
|
| 26 |
+
"""Raised for user-facing input/validation errors that shouldn't be retried."""
|
| 27 |
+
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def check_credentials_directory_permissions(credentials_dir: str = None) -> None:
|
| 32 |
+
"""
|
| 33 |
+
Check if the service has appropriate permissions to create and write to the .credentials directory.
|
| 34 |
+
|
| 35 |
+
Args:
|
| 36 |
+
credentials_dir: Path to the credentials directory (default: uses get_default_credentials_dir())
|
| 37 |
+
|
| 38 |
+
Raises:
|
| 39 |
+
PermissionError: If the service lacks necessary permissions
|
| 40 |
+
OSError: If there are other file system issues
|
| 41 |
+
"""
|
| 42 |
+
if credentials_dir is None:
|
| 43 |
+
from auth.google_auth import get_default_credentials_dir
|
| 44 |
+
|
| 45 |
+
credentials_dir = get_default_credentials_dir()
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
# Check if directory exists
|
| 49 |
+
if os.path.exists(credentials_dir):
|
| 50 |
+
# Directory exists, check if we can write to it
|
| 51 |
+
test_file = os.path.join(credentials_dir, ".permission_test")
|
| 52 |
+
try:
|
| 53 |
+
with open(test_file, "w") as f:
|
| 54 |
+
f.write("test")
|
| 55 |
+
os.remove(test_file)
|
| 56 |
+
logger.info(
|
| 57 |
+
f"Credentials directory permissions check passed: {os.path.abspath(credentials_dir)}"
|
| 58 |
+
)
|
| 59 |
+
except (PermissionError, OSError) as e:
|
| 60 |
+
raise PermissionError(
|
| 61 |
+
f"Cannot write to existing credentials directory '{os.path.abspath(credentials_dir)}': {e}"
|
| 62 |
+
)
|
| 63 |
+
else:
|
| 64 |
+
# Directory doesn't exist, try to create it and its parent directories
|
| 65 |
+
try:
|
| 66 |
+
os.makedirs(credentials_dir, exist_ok=True)
|
| 67 |
+
# Test writing to the new directory
|
| 68 |
+
test_file = os.path.join(credentials_dir, ".permission_test")
|
| 69 |
+
with open(test_file, "w") as f:
|
| 70 |
+
f.write("test")
|
| 71 |
+
os.remove(test_file)
|
| 72 |
+
logger.info(
|
| 73 |
+
f"Created credentials directory with proper permissions: {os.path.abspath(credentials_dir)}"
|
| 74 |
+
)
|
| 75 |
+
except (PermissionError, OSError) as e:
|
| 76 |
+
# Clean up if we created the directory but can't write to it
|
| 77 |
+
try:
|
| 78 |
+
if os.path.exists(credentials_dir):
|
| 79 |
+
os.rmdir(credentials_dir)
|
| 80 |
+
except (PermissionError, OSError):
|
| 81 |
+
pass
|
| 82 |
+
raise PermissionError(
|
| 83 |
+
f"Cannot create or write to credentials directory '{os.path.abspath(credentials_dir)}': {e}"
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
except PermissionError:
|
| 87 |
+
raise
|
| 88 |
+
except Exception as e:
|
| 89 |
+
raise OSError(
|
| 90 |
+
f"Unexpected error checking credentials directory permissions: {e}"
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def extract_office_xml_text(file_bytes: bytes, mime_type: str) -> Optional[str]:
|
| 95 |
+
"""
|
| 96 |
+
Very light-weight XML scraper for Word, Excel, PowerPoint files.
|
| 97 |
+
Returns plain-text if something readable is found, else None.
|
| 98 |
+
No external deps – just std-lib zipfile + ElementTree.
|
| 99 |
+
"""
|
| 100 |
+
shared_strings: List[str] = []
|
| 101 |
+
ns_excel_main = "http://schemas.openxmlformats.org/spreadsheetml/2006/main"
|
| 102 |
+
|
| 103 |
+
try:
|
| 104 |
+
with zipfile.ZipFile(io.BytesIO(file_bytes)) as zf:
|
| 105 |
+
targets: List[str] = []
|
| 106 |
+
# Map MIME → iterable of XML files to inspect
|
| 107 |
+
if (
|
| 108 |
+
mime_type
|
| 109 |
+
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
| 110 |
+
):
|
| 111 |
+
targets = ["word/document.xml"]
|
| 112 |
+
elif (
|
| 113 |
+
mime_type
|
| 114 |
+
== "application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
| 115 |
+
):
|
| 116 |
+
targets = [n for n in zf.namelist() if n.startswith("ppt/slides/slide")]
|
| 117 |
+
elif (
|
| 118 |
+
mime_type
|
| 119 |
+
== "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
| 120 |
+
):
|
| 121 |
+
targets = [
|
| 122 |
+
n
|
| 123 |
+
for n in zf.namelist()
|
| 124 |
+
if n.startswith("xl/worksheets/sheet") and "drawing" not in n
|
| 125 |
+
]
|
| 126 |
+
# Attempt to parse sharedStrings.xml for Excel files
|
| 127 |
+
try:
|
| 128 |
+
shared_strings_xml = zf.read("xl/sharedStrings.xml")
|
| 129 |
+
shared_strings_root = ET.fromstring(shared_strings_xml)
|
| 130 |
+
for si_element in shared_strings_root.findall(
|
| 131 |
+
f"{{{ns_excel_main}}}si"
|
| 132 |
+
):
|
| 133 |
+
text_parts = []
|
| 134 |
+
# Find all <t> elements, simple or within <r> runs, and concatenate their text
|
| 135 |
+
for t_element in si_element.findall(f".//{{{ns_excel_main}}}t"):
|
| 136 |
+
if t_element.text:
|
| 137 |
+
text_parts.append(t_element.text)
|
| 138 |
+
shared_strings.append("".join(text_parts))
|
| 139 |
+
except KeyError:
|
| 140 |
+
logger.info(
|
| 141 |
+
"No sharedStrings.xml found in Excel file (this is optional)."
|
| 142 |
+
)
|
| 143 |
+
except ET.ParseError as e:
|
| 144 |
+
logger.error(f"Error parsing sharedStrings.xml: {e}")
|
| 145 |
+
except (
|
| 146 |
+
Exception
|
| 147 |
+
) as e: # Catch any other unexpected error during sharedStrings parsing
|
| 148 |
+
logger.error(
|
| 149 |
+
f"Unexpected error processing sharedStrings.xml: {e}",
|
| 150 |
+
exc_info=True,
|
| 151 |
+
)
|
| 152 |
+
else:
|
| 153 |
+
return None
|
| 154 |
+
|
| 155 |
+
pieces: List[str] = []
|
| 156 |
+
for member in targets:
|
| 157 |
+
try:
|
| 158 |
+
xml_content = zf.read(member)
|
| 159 |
+
xml_root = ET.fromstring(xml_content)
|
| 160 |
+
member_texts: List[str] = []
|
| 161 |
+
|
| 162 |
+
if (
|
| 163 |
+
mime_type
|
| 164 |
+
== "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
| 165 |
+
):
|
| 166 |
+
for cell_element in xml_root.findall(
|
| 167 |
+
f".//{{{ns_excel_main}}}c"
|
| 168 |
+
): # Find all <c> elements
|
| 169 |
+
value_element = cell_element.find(
|
| 170 |
+
f"{{{ns_excel_main}}}v"
|
| 171 |
+
) # Find <v> under <c>
|
| 172 |
+
|
| 173 |
+
# Skip if cell has no value element or value element has no text
|
| 174 |
+
if value_element is None or value_element.text is None:
|
| 175 |
+
continue
|
| 176 |
+
|
| 177 |
+
cell_type = cell_element.get("t")
|
| 178 |
+
if cell_type == "s": # Shared string
|
| 179 |
+
try:
|
| 180 |
+
ss_idx = int(value_element.text)
|
| 181 |
+
if 0 <= ss_idx < len(shared_strings):
|
| 182 |
+
member_texts.append(shared_strings[ss_idx])
|
| 183 |
+
else:
|
| 184 |
+
logger.warning(
|
| 185 |
+
f"Invalid shared string index {ss_idx} in {member}. Max index: {len(shared_strings) - 1}"
|
| 186 |
+
)
|
| 187 |
+
except ValueError:
|
| 188 |
+
logger.warning(
|
| 189 |
+
f"Non-integer shared string index: '{value_element.text}' in {member}."
|
| 190 |
+
)
|
| 191 |
+
else: # Direct value (number, boolean, inline string if not 's')
|
| 192 |
+
member_texts.append(value_element.text)
|
| 193 |
+
else: # Word or PowerPoint
|
| 194 |
+
for elem in xml_root.iter():
|
| 195 |
+
# For Word: <w:t> where w is "http://schemas.openxmlformats.org/wordprocessingml/2006/main"
|
| 196 |
+
# For PowerPoint: <a:t> where a is "http://schemas.openxmlformats.org/drawingml/2006/main"
|
| 197 |
+
if (
|
| 198 |
+
elem.tag.endswith("}t") and elem.text
|
| 199 |
+
): # Check for any namespaced tag ending with 't'
|
| 200 |
+
cleaned_text = elem.text.strip()
|
| 201 |
+
if (
|
| 202 |
+
cleaned_text
|
| 203 |
+
): # Add only if there's non-whitespace text
|
| 204 |
+
member_texts.append(cleaned_text)
|
| 205 |
+
|
| 206 |
+
if member_texts:
|
| 207 |
+
pieces.append(
|
| 208 |
+
" ".join(member_texts)
|
| 209 |
+
) # Join texts from one member with spaces
|
| 210 |
+
|
| 211 |
+
except ET.ParseError as e:
|
| 212 |
+
logger.warning(
|
| 213 |
+
f"Could not parse XML in member '{member}' for {mime_type} file: {e}"
|
| 214 |
+
)
|
| 215 |
+
except Exception as e:
|
| 216 |
+
logger.error(
|
| 217 |
+
f"Error processing member '{member}' for {mime_type}: {e}",
|
| 218 |
+
exc_info=True,
|
| 219 |
+
)
|
| 220 |
+
# continue processing other members
|
| 221 |
+
|
| 222 |
+
if not pieces: # If no text was extracted at all
|
| 223 |
+
return None
|
| 224 |
+
|
| 225 |
+
# Join content from different members (sheets/slides) with double newlines for separation
|
| 226 |
+
text = "\n\n".join(pieces).strip()
|
| 227 |
+
return text or None # Ensure None is returned if text is empty after strip
|
| 228 |
+
|
| 229 |
+
except zipfile.BadZipFile:
|
| 230 |
+
logger.warning(f"File is not a valid ZIP archive (mime_type: {mime_type}).")
|
| 231 |
+
return None
|
| 232 |
+
except (
|
| 233 |
+
ET.ParseError
|
| 234 |
+
) as e: # Catch parsing errors at the top level if zipfile itself is XML-like
|
| 235 |
+
logger.error(f"XML parsing error at a high level for {mime_type}: {e}")
|
| 236 |
+
return None
|
| 237 |
+
except Exception as e:
|
| 238 |
+
logger.error(
|
| 239 |
+
f"Failed to extract office XML text for {mime_type}: {e}", exc_info=True
|
| 240 |
+
)
|
| 241 |
+
return None
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def handle_http_errors(
|
| 245 |
+
tool_name: str, is_read_only: bool = False, service_type: Optional[str] = None
|
| 246 |
+
):
|
| 247 |
+
"""
|
| 248 |
+
A decorator to handle Google API HttpErrors and transient SSL errors in a standardized way.
|
| 249 |
+
|
| 250 |
+
It wraps a tool function, catches HttpError, logs a detailed error message,
|
| 251 |
+
and raises a generic Exception with a user-friendly message.
|
| 252 |
+
|
| 253 |
+
If is_read_only is True, it will also catch ssl.SSLError and retry with
|
| 254 |
+
exponential backoff. After exhausting retries, it raises a TransientNetworkError.
|
| 255 |
+
|
| 256 |
+
Args:
|
| 257 |
+
tool_name (str): The name of the tool being decorated (e.g., 'list_calendars').
|
| 258 |
+
is_read_only (bool): If True, the operation is considered safe to retry on
|
| 259 |
+
transient network errors. Defaults to False.
|
| 260 |
+
service_type (str): Optional. The Google service type (e.g., 'calendar', 'gmail').
|
| 261 |
+
"""
|
| 262 |
+
|
| 263 |
+
def decorator(func):
|
| 264 |
+
@functools.wraps(func)
|
| 265 |
+
async def wrapper(*args, **kwargs):
|
| 266 |
+
max_retries = 3
|
| 267 |
+
base_delay = 1
|
| 268 |
+
|
| 269 |
+
for attempt in range(max_retries):
|
| 270 |
+
try:
|
| 271 |
+
return await func(*args, **kwargs)
|
| 272 |
+
except ssl.SSLError as e:
|
| 273 |
+
if is_read_only and attempt < max_retries - 1:
|
| 274 |
+
delay = base_delay * (2**attempt)
|
| 275 |
+
logger.warning(
|
| 276 |
+
f"SSL error in {tool_name} on attempt {attempt + 1}: {e}. Retrying in {delay} seconds..."
|
| 277 |
+
)
|
| 278 |
+
await asyncio.sleep(delay)
|
| 279 |
+
else:
|
| 280 |
+
logger.error(
|
| 281 |
+
f"SSL error in {tool_name} on final attempt: {e}. Raising exception."
|
| 282 |
+
)
|
| 283 |
+
raise TransientNetworkError(
|
| 284 |
+
f"A transient SSL error occurred in '{tool_name}' after {max_retries} attempts. "
|
| 285 |
+
"This is likely a temporary network or certificate issue. Please try again shortly."
|
| 286 |
+
) from e
|
| 287 |
+
except UserInputError as e:
|
| 288 |
+
message = f"Input error in {tool_name}: {e}"
|
| 289 |
+
logger.warning(message)
|
| 290 |
+
raise e
|
| 291 |
+
except HttpError as error:
|
| 292 |
+
user_google_email = kwargs.get("user_google_email", "N/A")
|
| 293 |
+
error_details = str(error)
|
| 294 |
+
|
| 295 |
+
# Check if this is an API not enabled error
|
| 296 |
+
if (
|
| 297 |
+
error.resp.status == 403
|
| 298 |
+
and ("accessNotConfigured" in error_details or "SERVICE_DISABLED" in error_details)
|
| 299 |
+
):
|
| 300 |
+
enablement_msg = get_api_enablement_message(
|
| 301 |
+
error_details, service_type
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
if enablement_msg:
|
| 305 |
+
message = (
|
| 306 |
+
f"API error in {tool_name}: {enablement_msg}\n\n"
|
| 307 |
+
f"User: {user_google_email}"
|
| 308 |
+
)
|
| 309 |
+
else:
|
| 310 |
+
message = (
|
| 311 |
+
f"API error in {tool_name}: {error}. "
|
| 312 |
+
f"The required API is not enabled for your project. "
|
| 313 |
+
f"Please check the Google Cloud Console to enable it."
|
| 314 |
+
)
|
| 315 |
+
elif error.resp.status in [401, 403]:
|
| 316 |
+
# Authentication/authorization errors
|
| 317 |
+
message = (
|
| 318 |
+
f"API error in {tool_name}: {error}. "
|
| 319 |
+
f"You might need to re-authenticate for user '{user_google_email}'. "
|
| 320 |
+
f"LLM: Try 'start_google_auth' with the user's email and the appropriate service_name."
|
| 321 |
+
)
|
| 322 |
+
else:
|
| 323 |
+
# Other HTTP errors (400 Bad Request, etc.) - don't suggest re-auth
|
| 324 |
+
message = f"API error in {tool_name}: {error}"
|
| 325 |
+
|
| 326 |
+
logger.error(f"API error in {tool_name}: {error}", exc_info=True)
|
| 327 |
+
raise Exception(message) from error
|
| 328 |
+
except TransientNetworkError:
|
| 329 |
+
# Re-raise without wrapping to preserve the specific error type
|
| 330 |
+
raise
|
| 331 |
+
except GoogleAuthenticationError:
|
| 332 |
+
# Re-raise authentication errors without wrapping
|
| 333 |
+
raise
|
| 334 |
+
except Exception as e:
|
| 335 |
+
message = f"An unexpected error occurred in {tool_name}: {e}"
|
| 336 |
+
logger.exception(message)
|
| 337 |
+
raise Exception(message) from e
|
| 338 |
+
|
| 339 |
+
return wrapper
|
| 340 |
+
|
| 341 |
+
return decorator
|
docker-compose.yml
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
services:
|
| 2 |
+
gws_mcp:
|
| 3 |
+
build: .
|
| 4 |
+
container_name: gws_mcp
|
| 5 |
+
ports:
|
| 6 |
+
- "8000:8000"
|
| 7 |
+
environment:
|
| 8 |
+
- GOOGLE_MCP_CREDENTIALS_DIR=/app/store_creds
|
| 9 |
+
volumes:
|
| 10 |
+
- ./client_secret.json:/app/client_secret.json:ro
|
| 11 |
+
- store_creds:/app/store_creds:rw
|
| 12 |
+
env_file:
|
| 13 |
+
- .env
|
| 14 |
+
|
| 15 |
+
volumes:
|
| 16 |
+
store_creds:
|
fastmcp.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"$schema": "https://gofastmcp.com/public/schemas/fastmcp.json/v1.json",
|
| 3 |
+
"source": {
|
| 4 |
+
"path": "fastmcp_server.py",
|
| 5 |
+
"entrypoint": "mcp"
|
| 6 |
+
},
|
| 7 |
+
"environment": {
|
| 8 |
+
"python": ">=3.10",
|
| 9 |
+
"project": "."
|
| 10 |
+
},
|
| 11 |
+
"deployment": {
|
| 12 |
+
"transport": "http",
|
| 13 |
+
"host": "0.0.0.0",
|
| 14 |
+
"port": 8000,
|
| 15 |
+
"log_level": "INFO",
|
| 16 |
+
"env": {
|
| 17 |
+
"MCP_ENABLE_OAUTH21": "true",
|
| 18 |
+
"OAUTHLIB_INSECURE_TRANSPORT": "1"
|
| 19 |
+
}
|
| 20 |
+
}
|
| 21 |
+
}
|
fastmcp_server.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa
|
| 2 |
+
"""
|
| 3 |
+
FastMCP Cloud entrypoint for the Google Workspace MCP server.
|
| 4 |
+
Enforces OAuth 2.1 + stateless defaults required by FastMCP-hosted deployments.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
|
| 12 |
+
# Load environment variables BEFORE any other imports that might read them
|
| 13 |
+
dotenv_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".env")
|
| 14 |
+
load_dotenv(dotenv_path=dotenv_path)
|
| 15 |
+
|
| 16 |
+
from auth.oauth_config import reload_oauth_config, is_stateless_mode
|
| 17 |
+
from core.log_formatter import EnhancedLogFormatter, configure_file_logging
|
| 18 |
+
from core.utils import check_credentials_directory_permissions
|
| 19 |
+
from core.server import server, set_transport_mode, configure_server_for_http
|
| 20 |
+
from core.tool_registry import (
|
| 21 |
+
set_enabled_tools as set_enabled_tool_names,
|
| 22 |
+
wrap_server_tool_method,
|
| 23 |
+
filter_server_tools,
|
| 24 |
+
)
|
| 25 |
+
from auth.scopes import set_enabled_tools
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def enforce_fastmcp_cloud_defaults():
|
| 29 |
+
"""Force FastMCP Cloud-compatible OAuth settings before initializing the server."""
|
| 30 |
+
enforced = []
|
| 31 |
+
|
| 32 |
+
required = {
|
| 33 |
+
"MCP_ENABLE_OAUTH21": "true",
|
| 34 |
+
"WORKSPACE_MCP_STATELESS_MODE": "true",
|
| 35 |
+
}
|
| 36 |
+
defaults = {
|
| 37 |
+
"MCP_SINGLE_USER_MODE": "false",
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
for key, target in required.items():
|
| 41 |
+
current = os.environ.get(key)
|
| 42 |
+
normalized = (current or "").lower()
|
| 43 |
+
if normalized != target:
|
| 44 |
+
os.environ[key] = target
|
| 45 |
+
enforced.append((key, current, target))
|
| 46 |
+
|
| 47 |
+
for key, target in defaults.items():
|
| 48 |
+
current = os.environ.get(key)
|
| 49 |
+
if current != target:
|
| 50 |
+
os.environ[key] = target
|
| 51 |
+
enforced.append((key, current, target))
|
| 52 |
+
|
| 53 |
+
return enforced
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
_fastmcp_cloud_overrides = enforce_fastmcp_cloud_defaults()
|
| 57 |
+
|
| 58 |
+
# Suppress googleapiclient discovery cache warning
|
| 59 |
+
logging.getLogger("googleapiclient.discovery_cache").setLevel(logging.ERROR)
|
| 60 |
+
|
| 61 |
+
# Reload OAuth configuration after env vars loaded
|
| 62 |
+
reload_oauth_config()
|
| 63 |
+
|
| 64 |
+
# Configure basic logging
|
| 65 |
+
logging.basicConfig(
|
| 66 |
+
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 67 |
+
)
|
| 68 |
+
logger = logging.getLogger(__name__)
|
| 69 |
+
|
| 70 |
+
if _fastmcp_cloud_overrides:
|
| 71 |
+
for key, previous, new_value in _fastmcp_cloud_overrides:
|
| 72 |
+
if previous is None:
|
| 73 |
+
logger.info("FastMCP Cloud: set %s=%s", key, new_value)
|
| 74 |
+
else:
|
| 75 |
+
logger.warning(
|
| 76 |
+
"FastMCP Cloud: overriding %s from %s to %s", key, previous, new_value
|
| 77 |
+
)
|
| 78 |
+
else:
|
| 79 |
+
logger.info("FastMCP Cloud: OAuth 2.1 stateless defaults already satisfied")
|
| 80 |
+
|
| 81 |
+
# Configure file logging based on stateless mode
|
| 82 |
+
configure_file_logging()
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def configure_safe_logging():
|
| 86 |
+
"""Configure safe Unicode handling for logging."""
|
| 87 |
+
|
| 88 |
+
class SafeEnhancedFormatter(EnhancedLogFormatter):
|
| 89 |
+
"""Enhanced ASCII formatter with additional Windows safety."""
|
| 90 |
+
|
| 91 |
+
def format(self, record):
|
| 92 |
+
try:
|
| 93 |
+
return super().format(record)
|
| 94 |
+
except UnicodeEncodeError:
|
| 95 |
+
# Fallback to ASCII-safe formatting
|
| 96 |
+
service_prefix = self._get_ascii_prefix(record.name, record.levelname)
|
| 97 |
+
safe_msg = (
|
| 98 |
+
str(record.getMessage())
|
| 99 |
+
.encode("ascii", errors="replace")
|
| 100 |
+
.decode("ascii")
|
| 101 |
+
)
|
| 102 |
+
return f"{service_prefix} {safe_msg}"
|
| 103 |
+
|
| 104 |
+
# Replace all console handlers' formatters with safe enhanced ones
|
| 105 |
+
for handler in logging.root.handlers:
|
| 106 |
+
# Only apply to console/stream handlers, keep file handlers as-is
|
| 107 |
+
if isinstance(handler, logging.StreamHandler) and handler.stream.name in [
|
| 108 |
+
"<stderr>",
|
| 109 |
+
"<stdout>",
|
| 110 |
+
]:
|
| 111 |
+
safe_formatter = SafeEnhancedFormatter(use_colors=True)
|
| 112 |
+
handler.setFormatter(safe_formatter)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
# Configure safe logging
|
| 116 |
+
configure_safe_logging()
|
| 117 |
+
|
| 118 |
+
# Check credentials directory permissions (skip in stateless mode)
|
| 119 |
+
if not is_stateless_mode():
|
| 120 |
+
try:
|
| 121 |
+
logger.info("Checking credentials directory permissions...")
|
| 122 |
+
check_credentials_directory_permissions()
|
| 123 |
+
logger.info("Credentials directory permissions verified")
|
| 124 |
+
except (PermissionError, OSError) as e:
|
| 125 |
+
logger.error(f"Credentials directory permission check failed: {e}")
|
| 126 |
+
logger.error(
|
| 127 |
+
" Please ensure the service has write permissions to create/access the credentials directory"
|
| 128 |
+
)
|
| 129 |
+
sys.exit(1)
|
| 130 |
+
else:
|
| 131 |
+
logger.info("🔍 Skipping credentials directory check (stateless mode)")
|
| 132 |
+
|
| 133 |
+
# Set transport mode for HTTP (FastMCP CLI defaults to streamable-http)
|
| 134 |
+
set_transport_mode("streamable-http")
|
| 135 |
+
|
| 136 |
+
# Import all tool modules to register their @server.tool() decorators
|
| 137 |
+
import gmail.gmail_tools
|
| 138 |
+
import gdrive.drive_tools
|
| 139 |
+
import gcalendar.calendar_tools
|
| 140 |
+
import gdocs.docs_tools
|
| 141 |
+
import gsheets.sheets_tools
|
| 142 |
+
import gchat.chat_tools
|
| 143 |
+
import gforms.forms_tools
|
| 144 |
+
import gslides.slides_tools
|
| 145 |
+
import gtasks.tasks_tools
|
| 146 |
+
import gsearch.search_tools
|
| 147 |
+
|
| 148 |
+
# Configure tool registration
|
| 149 |
+
wrap_server_tool_method(server)
|
| 150 |
+
|
| 151 |
+
# Enable all tools and services by default
|
| 152 |
+
all_services = [
|
| 153 |
+
"gmail",
|
| 154 |
+
"drive",
|
| 155 |
+
"calendar",
|
| 156 |
+
"docs",
|
| 157 |
+
"sheets",
|
| 158 |
+
"chat",
|
| 159 |
+
"forms",
|
| 160 |
+
"slides",
|
| 161 |
+
"tasks",
|
| 162 |
+
"search",
|
| 163 |
+
]
|
| 164 |
+
set_enabled_tools(all_services) # Set enabled services for scopes
|
| 165 |
+
set_enabled_tool_names(None) # Don't filter individual tools - enable all
|
| 166 |
+
|
| 167 |
+
# Filter tools based on configuration
|
| 168 |
+
filter_server_tools(server)
|
| 169 |
+
|
| 170 |
+
# Configure authentication after scopes are known
|
| 171 |
+
configure_server_for_http()
|
| 172 |
+
|
| 173 |
+
# Export server instance for FastMCP CLI (looks for 'mcp', 'server', or 'app')
|
| 174 |
+
mcp = server
|
| 175 |
+
app = server
|
gappsscript/README.md
ADDED
|
@@ -0,0 +1,514 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Google Apps Script MCP Tools
|
| 2 |
+
|
| 3 |
+
This module provides Model Context Protocol (MCP) tools for interacting with Google Apps Script API, enabling AI agents to create, manage, and execute Apps Script projects programmatically.
|
| 4 |
+
|
| 5 |
+
## Overview
|
| 6 |
+
|
| 7 |
+
Google Apps Script allows automation and extension of Google Workspace applications. This MCP integration provides 17 tools across core and extended tiers for complete Apps Script lifecycle management.
|
| 8 |
+
|
| 9 |
+
## Why Apps Script?
|
| 10 |
+
|
| 11 |
+
Apps Script is the automation glue of Google Workspace. While individual service APIs (Docs, Sheets, Gmail) operate on single resources, Apps Script enables:
|
| 12 |
+
|
| 13 |
+
- **Cross-app automation** - Orchestrate workflows across Sheets, Gmail, Calendar, Forms, and Drive
|
| 14 |
+
- **Persistent logic** - Host custom business rules inside Google's environment
|
| 15 |
+
- **Scheduled execution** - Run automations on time-based or event-driven triggers
|
| 16 |
+
- **Advanced integration** - Access functionality not available through standard APIs
|
| 17 |
+
|
| 18 |
+
This MCP integration allows AI agents to author, debug, deploy, and operate these automations end-to-end - something not possible with individual Workspace APIs alone.
|
| 19 |
+
|
| 20 |
+
### What This Enables
|
| 21 |
+
|
| 22 |
+
| Without Apps Script MCP | With Apps Script MCP |
|
| 23 |
+
|------------------------|---------------------|
|
| 24 |
+
| Read/update Sheets, Docs, Gmail individually | Create long-lived automations across services |
|
| 25 |
+
| No persistent automation logic | Host business logic that executes repeatedly |
|
| 26 |
+
| Manual workflow orchestration | Automated multi-step workflows |
|
| 27 |
+
| No execution history | Debug via execution logs and status |
|
| 28 |
+
| No deployment versioning | Manage deployments and roll back versions |
|
| 29 |
+
|
| 30 |
+
### Complete Workflow Example
|
| 31 |
+
|
| 32 |
+
**Scenario:** Automated weekly report system
|
| 33 |
+
|
| 34 |
+
```
|
| 35 |
+
User: "Create a script that runs every Monday at 9 AM. It should:
|
| 36 |
+
1. Read data from the 'Sales' spreadsheet
|
| 37 |
+
2. Calculate weekly totals and growth percentages
|
| 38 |
+
3. Generate a summary with the top 5 performers
|
| 39 |
+
4. Email the report to team@company.com
|
| 40 |
+
5. Log any errors to a monitoring sheet"
|
| 41 |
+
```
|
| 42 |
+
|
| 43 |
+
The AI agent:
|
| 44 |
+
1. Creates a new Apps Script project
|
| 45 |
+
2. Generates the complete automation code
|
| 46 |
+
3. Deploys the script
|
| 47 |
+
4. Sets up the time-based trigger
|
| 48 |
+
5. Tests execution and monitors results
|
| 49 |
+
|
| 50 |
+
All through natural language - no JavaScript knowledge required.
|
| 51 |
+
|
| 52 |
+
### AI Agent Workflow Pattern
|
| 53 |
+
|
| 54 |
+
The MCP client typically follows this pattern when working with Apps Script:
|
| 55 |
+
|
| 56 |
+
1. **Inspect** - Read existing script code and project structure
|
| 57 |
+
2. **Analyze** - Understand current functionality and identify issues
|
| 58 |
+
3. **Propose** - Generate code changes or new functionality
|
| 59 |
+
4. **Update** - Modify files atomically with complete version control
|
| 60 |
+
5. **Execute** - Run functions to test changes
|
| 61 |
+
6. **Deploy** - Create versioned deployments for production use
|
| 62 |
+
7. **Monitor** - Check execution logs and debug failures
|
| 63 |
+
|
| 64 |
+
This ensures safe, auditable automation management.
|
| 65 |
+
|
| 66 |
+
## Features
|
| 67 |
+
|
| 68 |
+
### Project Management
|
| 69 |
+
- List all Apps Script projects
|
| 70 |
+
- Get complete project details including all files
|
| 71 |
+
- Create new standalone or bound script projects
|
| 72 |
+
- Update script content (add/modify JavaScript files)
|
| 73 |
+
- Delete script projects
|
| 74 |
+
|
| 75 |
+
### Execution
|
| 76 |
+
- Execute functions with parameters
|
| 77 |
+
- Development mode for testing latest code
|
| 78 |
+
- Production deployment execution
|
| 79 |
+
- View execution history and status
|
| 80 |
+
|
| 81 |
+
### Deployment Management
|
| 82 |
+
- Create new deployments
|
| 83 |
+
- List all deployments for a project
|
| 84 |
+
- Update deployment configurations
|
| 85 |
+
- Delete outdated deployments
|
| 86 |
+
|
| 87 |
+
### Version Management
|
| 88 |
+
- List all versions of a script
|
| 89 |
+
- Create immutable version snapshots
|
| 90 |
+
- Get details of specific versions
|
| 91 |
+
|
| 92 |
+
### Monitoring & Analytics
|
| 93 |
+
- View recent script executions
|
| 94 |
+
- Check execution status and results
|
| 95 |
+
- Monitor for errors and failures
|
| 96 |
+
- Get execution metrics (active users, total executions, failures)
|
| 97 |
+
|
| 98 |
+
### Trigger Code Generation
|
| 99 |
+
- Generate Apps Script code for time-based triggers (minutes, hours, daily, weekly)
|
| 100 |
+
- Generate code for event triggers (onOpen, onEdit, onFormSubmit, onChange)
|
| 101 |
+
- Provides ready-to-use code snippets with setup instructions
|
| 102 |
+
|
| 103 |
+
## Limitations & Non-Goals
|
| 104 |
+
|
| 105 |
+
**Current Limitations**
|
| 106 |
+
- Direct trigger management via API is not supported (use `generate_trigger_code` instead)
|
| 107 |
+
- Real-time debugging and breakpoints are not available
|
| 108 |
+
- Advanced service enablement must be done manually in the script editor
|
| 109 |
+
|
| 110 |
+
**Non-Goals**
|
| 111 |
+
- This integration does not replace the Apps Script editor UI
|
| 112 |
+
- Does not execute arbitrary JavaScript outside defined script functions
|
| 113 |
+
- Does not provide IDE features like autocomplete or syntax highlighting
|
| 114 |
+
|
| 115 |
+
**Workarounds**
|
| 116 |
+
- Triggers: Use `generate_trigger_code` to get ready-to-use Apps Script code for any trigger type
|
| 117 |
+
- Advanced services can be enabled via the manifest file (appsscript.json)
|
| 118 |
+
- Debugging is supported through execution logs, metrics, and error monitoring
|
| 119 |
+
|
| 120 |
+
## Prerequisites
|
| 121 |
+
|
| 122 |
+
### 1. Google Cloud Project Setup
|
| 123 |
+
|
| 124 |
+
Before using the Apps Script MCP tools, configure your Google Cloud project:
|
| 125 |
+
|
| 126 |
+
**Step 1: Enable Required APIs**
|
| 127 |
+
|
| 128 |
+
Enable these APIs in your Google Cloud Console:
|
| 129 |
+
|
| 130 |
+
1. [Apps Script API](https://console.cloud.google.com/flows/enableapi?apiid=script.googleapis.com) (required for all operations)
|
| 131 |
+
2. [Google Drive API](https://console.cloud.google.com/flows/enableapi?apiid=drive.googleapis.com) (required for listing projects)
|
| 132 |
+
|
| 133 |
+
**Step 2: Create OAuth Credentials**
|
| 134 |
+
|
| 135 |
+
1. Go to [APIs & Services > Credentials](https://console.cloud.google.com/apis/credentials)
|
| 136 |
+
2. Click "Create Credentials" > "OAuth client ID"
|
| 137 |
+
3. Select "Desktop application" as the application type
|
| 138 |
+
4. Download the JSON file and save as `client_secret.json`
|
| 139 |
+
|
| 140 |
+
**Step 3: Configure OAuth Consent Screen**
|
| 141 |
+
|
| 142 |
+
1. Go to [OAuth consent screen](https://console.cloud.google.com/apis/credentials/consent)
|
| 143 |
+
2. Add yourself as a test user (required for unverified apps)
|
| 144 |
+
3. Add the required scopes (see below)
|
| 145 |
+
|
| 146 |
+
### 2. OAuth Scopes
|
| 147 |
+
|
| 148 |
+
The following OAuth scopes are required:
|
| 149 |
+
|
| 150 |
+
```
|
| 151 |
+
https://www.googleapis.com/auth/script.projects
|
| 152 |
+
https://www.googleapis.com/auth/script.projects.readonly
|
| 153 |
+
https://www.googleapis.com/auth/script.deployments
|
| 154 |
+
https://www.googleapis.com/auth/script.deployments.readonly
|
| 155 |
+
https://www.googleapis.com/auth/script.processes
|
| 156 |
+
https://www.googleapis.com/auth/script.metrics
|
| 157 |
+
https://www.googleapis.com/auth/drive.file
|
| 158 |
+
```
|
| 159 |
+
|
| 160 |
+
These are automatically requested when using the appscript tool tier.
|
| 161 |
+
|
| 162 |
+
### 3. Running the MCP Server
|
| 163 |
+
|
| 164 |
+
Start the server with Apps Script tools enabled:
|
| 165 |
+
|
| 166 |
+
```bash
|
| 167 |
+
uv run main.py --tools appscript --single-user
|
| 168 |
+
```
|
| 169 |
+
|
| 170 |
+
Or include with other tools:
|
| 171 |
+
|
| 172 |
+
```bash
|
| 173 |
+
uv run main.py --tools appscript drive sheets
|
| 174 |
+
```
|
| 175 |
+
|
| 176 |
+
On first use, you will be prompted to authorize the application. Complete the OAuth flow in your browser.
|
| 177 |
+
|
| 178 |
+
## Tool Tiers
|
| 179 |
+
|
| 180 |
+
### Core Tier
|
| 181 |
+
Essential operations for reading, writing, and executing scripts:
|
| 182 |
+
|
| 183 |
+
- `list_script_projects`: List accessible projects
|
| 184 |
+
- `get_script_project`: Get full project with all files
|
| 185 |
+
- `get_script_content`: Get specific file content
|
| 186 |
+
- `create_script_project`: Create new project
|
| 187 |
+
- `update_script_content`: Modify project files
|
| 188 |
+
- `run_script_function`: Execute functions
|
| 189 |
+
- `generate_trigger_code`: Generate trigger setup code
|
| 190 |
+
|
| 191 |
+
### Extended Tier
|
| 192 |
+
Advanced deployment, versioning, and monitoring:
|
| 193 |
+
|
| 194 |
+
- `create_deployment`: Create new deployment
|
| 195 |
+
- `list_deployments`: List all deployments
|
| 196 |
+
- `update_deployment`: Update deployment config
|
| 197 |
+
- `delete_deployment`: Remove deployment
|
| 198 |
+
- `delete_script_project`: Delete a project permanently
|
| 199 |
+
- `list_versions`: List all versions
|
| 200 |
+
- `create_version`: Create immutable version snapshot
|
| 201 |
+
- `get_version`: Get version details
|
| 202 |
+
- `list_script_processes`: View execution history
|
| 203 |
+
- `get_script_metrics`: Get execution analytics
|
| 204 |
+
|
| 205 |
+
## Usage Examples
|
| 206 |
+
|
| 207 |
+
### List Projects
|
| 208 |
+
|
| 209 |
+
```python
|
| 210 |
+
# List all Apps Script projects
|
| 211 |
+
uv run main.py --tools appscript
|
| 212 |
+
# In MCP client: "Show me my Apps Script projects"
|
| 213 |
+
```
|
| 214 |
+
|
| 215 |
+
Example output:
|
| 216 |
+
```
|
| 217 |
+
Found 3 Apps Script projects:
|
| 218 |
+
- Email Automation (ID: abc123) Created: 2025-01-10 Modified: 2026-01-12
|
| 219 |
+
- Sheet Processor (ID: def456) Created: 2025-06-15 Modified: 2025-12-20
|
| 220 |
+
- Form Handler (ID: ghi789) Created: 2024-11-03 Modified: 2025-08-14
|
| 221 |
+
```
|
| 222 |
+
|
| 223 |
+
### Create New Project
|
| 224 |
+
|
| 225 |
+
```python
|
| 226 |
+
# Create a new Apps Script project
|
| 227 |
+
# In MCP client: "Create a new Apps Script project called 'Data Sync'"
|
| 228 |
+
```
|
| 229 |
+
|
| 230 |
+
Example output:
|
| 231 |
+
```
|
| 232 |
+
Created Apps Script project: Data Sync
|
| 233 |
+
Script ID: new123
|
| 234 |
+
Edit URL: https://script.google.com/d/new123/edit
|
| 235 |
+
```
|
| 236 |
+
|
| 237 |
+
### Get Project Details
|
| 238 |
+
|
| 239 |
+
```python
|
| 240 |
+
# Get complete project with all files
|
| 241 |
+
# In MCP client: "Show me the code for script abc123"
|
| 242 |
+
```
|
| 243 |
+
|
| 244 |
+
Example output:
|
| 245 |
+
```
|
| 246 |
+
Project: Email Automation (ID: abc123)
|
| 247 |
+
Creator: user@example.com
|
| 248 |
+
Created: 2025-01-10
|
| 249 |
+
Modified: 2026-01-12
|
| 250 |
+
|
| 251 |
+
Files:
|
| 252 |
+
1. Code.gs (SERVER_JS)
|
| 253 |
+
function sendDailyEmail() {
|
| 254 |
+
var sheet = SpreadsheetApp.getActiveSpreadsheet();
|
| 255 |
+
// ... email logic
|
| 256 |
+
}
|
| 257 |
+
|
| 258 |
+
2. appsscript.json (JSON)
|
| 259 |
+
{"timeZone": "America/New_York", "dependencies": {}}
|
| 260 |
+
```
|
| 261 |
+
|
| 262 |
+
### Update Script Content
|
| 263 |
+
|
| 264 |
+
```python
|
| 265 |
+
# Update script files
|
| 266 |
+
# In MCP client: "Update my email script to add error handling"
|
| 267 |
+
```
|
| 268 |
+
|
| 269 |
+
The AI will:
|
| 270 |
+
1. Read current code
|
| 271 |
+
2. Generate improved version
|
| 272 |
+
3. Call `update_script_content` with new files
|
| 273 |
+
|
| 274 |
+
### Run Script Function
|
| 275 |
+
|
| 276 |
+
```python
|
| 277 |
+
# Execute a function
|
| 278 |
+
# In MCP client: "Run the sendDailyEmail function in script abc123"
|
| 279 |
+
```
|
| 280 |
+
|
| 281 |
+
Example output:
|
| 282 |
+
```
|
| 283 |
+
Execution successful
|
| 284 |
+
Function: sendDailyEmail
|
| 285 |
+
Result: Emails sent to 5 recipients
|
| 286 |
+
```
|
| 287 |
+
|
| 288 |
+
### Create Deployment
|
| 289 |
+
|
| 290 |
+
```python
|
| 291 |
+
# Deploy script for production
|
| 292 |
+
# In MCP client: "Deploy my email automation to production"
|
| 293 |
+
```
|
| 294 |
+
|
| 295 |
+
Example output:
|
| 296 |
+
```
|
| 297 |
+
Created deployment for script: abc123
|
| 298 |
+
Deployment ID: AKfy...xyz
|
| 299 |
+
Description: Production release
|
| 300 |
+
```
|
| 301 |
+
|
| 302 |
+
## Common Workflows
|
| 303 |
+
|
| 304 |
+
### 1. Create Automated Workflow (Complete Example)
|
| 305 |
+
|
| 306 |
+
**Scenario:** Form submission handler that sends customized emails
|
| 307 |
+
|
| 308 |
+
```
|
| 309 |
+
User: "When someone submits the Contact Form:
|
| 310 |
+
1. Get their email and department from the form response
|
| 311 |
+
2. Look up their manager in the Team Directory spreadsheet
|
| 312 |
+
3. Send a welcome email to the submitter
|
| 313 |
+
4. Send a notification to their manager
|
| 314 |
+
5. Log the interaction in the Onboarding Tracker sheet"
|
| 315 |
+
```
|
| 316 |
+
|
| 317 |
+
**AI Agent Steps:**
|
| 318 |
+
```
|
| 319 |
+
1. "Create a new Apps Script bound to the Contact Form"
|
| 320 |
+
2. "Add a function that reads form submissions"
|
| 321 |
+
3. "Connect to the Team Directory spreadsheet to look up managers"
|
| 322 |
+
4. "Generate personalized email templates for both messages"
|
| 323 |
+
5. "Add logging to the Onboarding Tracker"
|
| 324 |
+
6. "Run the function to test it with sample data"
|
| 325 |
+
7. "Create a production deployment"
|
| 326 |
+
```
|
| 327 |
+
|
| 328 |
+
Result: Complete automation created and deployed without writing code.
|
| 329 |
+
|
| 330 |
+
### 2. Debug Existing Script
|
| 331 |
+
|
| 332 |
+
```
|
| 333 |
+
User: "My expense tracker script is failing"
|
| 334 |
+
AI: "Show me the code for the expense tracker script"
|
| 335 |
+
AI: "What errors occurred in recent executions?"
|
| 336 |
+
AI: "The calculateTotal function has a division by zero error on line 23"
|
| 337 |
+
AI: "Fix the error by adding a check for zero values"
|
| 338 |
+
AI: "Run calculateTotal to verify the fix"
|
| 339 |
+
User: "Create a new deployment with the bug fix"
|
| 340 |
+
```
|
| 341 |
+
|
| 342 |
+
### 3. Modify and Extend Automation
|
| 343 |
+
|
| 344 |
+
```
|
| 345 |
+
User: "Update my weekly report script to include sales data from the Q1 sheet"
|
| 346 |
+
AI: "Read the current report generation script"
|
| 347 |
+
AI: "Add Q1 data fetching to the generateReport function"
|
| 348 |
+
AI: "Test the updated function"
|
| 349 |
+
User: "Looks good, deploy it"
|
| 350 |
+
AI: "Create a new deployment with description 'Added Q1 sales data'"
|
| 351 |
+
```
|
| 352 |
+
|
| 353 |
+
### 4. Run Existing Business Logic
|
| 354 |
+
|
| 355 |
+
```
|
| 356 |
+
User: "Run the monthlyCleanup function in my Data Management script"
|
| 357 |
+
User: "What does the calculateCommission function do?"
|
| 358 |
+
User: "Execute reconcileAccounts with parameters: ['2024', 'January']"
|
| 359 |
+
```
|
| 360 |
+
|
| 361 |
+
## File Types
|
| 362 |
+
|
| 363 |
+
Apps Script projects support three file types:
|
| 364 |
+
|
| 365 |
+
- **SERVER_JS**: Google Apps Script code (.gs files)
|
| 366 |
+
- **HTML**: HTML files for custom UIs
|
| 367 |
+
- **JSON**: Manifest file (appsscript.json)
|
| 368 |
+
|
| 369 |
+
## API Limitations
|
| 370 |
+
|
| 371 |
+
### Execution Timeouts
|
| 372 |
+
- Simple triggers: 30 seconds
|
| 373 |
+
- Custom functions: 30 seconds
|
| 374 |
+
- Script execution via API: 6 minutes
|
| 375 |
+
|
| 376 |
+
### Quota Limits
|
| 377 |
+
- Script executions per day: varies by account type
|
| 378 |
+
- URL Fetch calls: 20,000 per day (consumer accounts)
|
| 379 |
+
|
| 380 |
+
See [Apps Script Quotas](https://developers.google.com/apps-script/guides/services/quotas) for details.
|
| 381 |
+
|
| 382 |
+
### Cannot Execute Arbitrary Code
|
| 383 |
+
The `run_script_function` tool can only execute functions that are defined in the script. You cannot run arbitrary JavaScript code directly. To run new code:
|
| 384 |
+
|
| 385 |
+
1. Add function to script via `update_script_content`
|
| 386 |
+
2. Execute the function via `run_script_function`
|
| 387 |
+
3. Optionally remove the function after execution
|
| 388 |
+
|
| 389 |
+
### run_script_function Requires API Executable Deployment
|
| 390 |
+
|
| 391 |
+
The `run_script_function` tool requires additional manual configuration in the Apps Script editor:
|
| 392 |
+
|
| 393 |
+
**Why this limitation exists:**
|
| 394 |
+
Google requires scripts to be explicitly deployed as "API Executable" before they can be invoked via the Apps Script API. This is a security measure to prevent unauthorized code execution.
|
| 395 |
+
|
| 396 |
+
**To enable API execution:**
|
| 397 |
+
|
| 398 |
+
1. Open the script in the Apps Script editor
|
| 399 |
+
2. Go to Project Settings (gear icon)
|
| 400 |
+
3. Under "Google Cloud Platform (GCP) Project", click "Change project"
|
| 401 |
+
4. Enter your GCP project number (found in Cloud Console dashboard)
|
| 402 |
+
5. Click "Deploy" > "New deployment"
|
| 403 |
+
6. Select type: "API Executable"
|
| 404 |
+
7. Set "Who has access" to "Anyone" or "Anyone with Google account"
|
| 405 |
+
8. Click "Deploy"
|
| 406 |
+
|
| 407 |
+
After completing these steps, the `run_script_function` tool will work for that script.
|
| 408 |
+
|
| 409 |
+
**Note:** All other tools (create, update, list, deploy) work without this manual step. Only function execution via API requires the API Executable deployment.
|
| 410 |
+
|
| 411 |
+
## Error Handling
|
| 412 |
+
|
| 413 |
+
Common errors and solutions:
|
| 414 |
+
|
| 415 |
+
### 404: Script not found
|
| 416 |
+
- Verify script ID is correct
|
| 417 |
+
- Ensure you have access to the project
|
| 418 |
+
|
| 419 |
+
### 403: Permission denied
|
| 420 |
+
- Check OAuth scopes are authorized
|
| 421 |
+
- Verify you own or have access to the project
|
| 422 |
+
|
| 423 |
+
### Execution timeout
|
| 424 |
+
- Script exceeded 6-minute limit
|
| 425 |
+
- Optimize code or split into smaller functions
|
| 426 |
+
|
| 427 |
+
### Script authorization required
|
| 428 |
+
- Function needs additional permissions
|
| 429 |
+
- User must manually authorize in script editor
|
| 430 |
+
|
| 431 |
+
## Security Considerations
|
| 432 |
+
|
| 433 |
+
### OAuth Scopes
|
| 434 |
+
Scripts inherit the OAuth scopes of the MCP server. Functions that access other Google services (Gmail, Drive, etc.) will only work if those scopes are authorized.
|
| 435 |
+
|
| 436 |
+
### Script Permissions
|
| 437 |
+
Scripts run with the permissions of the script owner, not the user executing them. Be cautious when:
|
| 438 |
+
- Running scripts you did not create
|
| 439 |
+
- Granting additional permissions to scripts
|
| 440 |
+
- Executing functions that modify data
|
| 441 |
+
|
| 442 |
+
### Code Review
|
| 443 |
+
Always review code before executing, especially for:
|
| 444 |
+
- Scripts from unknown sources
|
| 445 |
+
- Functions that access sensitive data
|
| 446 |
+
- Operations that modify or delete data
|
| 447 |
+
|
| 448 |
+
## Testing
|
| 449 |
+
|
| 450 |
+
### Unit Tests
|
| 451 |
+
Run unit tests with mocked API responses:
|
| 452 |
+
|
| 453 |
+
```bash
|
| 454 |
+
uv run pytest tests/gappsscript/test_apps_script_tools.py
|
| 455 |
+
```
|
| 456 |
+
|
| 457 |
+
### Manual Testing
|
| 458 |
+
Test against real Apps Script API:
|
| 459 |
+
|
| 460 |
+
```bash
|
| 461 |
+
python tests/gappsscript/manual_test.py
|
| 462 |
+
```
|
| 463 |
+
|
| 464 |
+
Note: Manual tests create real projects in your account. Delete test projects after running.
|
| 465 |
+
|
| 466 |
+
## References
|
| 467 |
+
|
| 468 |
+
### Apps Script Documentation
|
| 469 |
+
- [Apps Script Overview](https://developers.google.com/apps-script/overview) - Introduction and capabilities
|
| 470 |
+
- [Apps Script Guides](https://developers.google.com/apps-script/guides/services) - Service-specific guides
|
| 471 |
+
- [Apps Script Reference](https://developers.google.com/apps-script/reference) - Complete API reference
|
| 472 |
+
|
| 473 |
+
### Apps Script API (for this MCP integration)
|
| 474 |
+
- [Apps Script API Overview](https://developers.google.com/apps-script/api) - API features and concepts
|
| 475 |
+
- [REST API Reference](https://developers.google.com/apps-script/api/reference/rest) - Endpoint documentation
|
| 476 |
+
- [OAuth Scopes](https://developers.google.com/apps-script/api/how-tos/authorization) - Required permissions
|
| 477 |
+
|
| 478 |
+
### Useful Resources
|
| 479 |
+
- [Apps Script Quotas](https://developers.google.com/apps-script/guides/services/quotas) - Usage limits and restrictions
|
| 480 |
+
- [Best Practices](https://developers.google.com/apps-script/guides/support/best-practices) - Performance and optimization
|
| 481 |
+
- [Troubleshooting](https://developers.google.com/apps-script/guides/support/troubleshooting) - Common issues and solutions
|
| 482 |
+
|
| 483 |
+
## Troubleshooting
|
| 484 |
+
|
| 485 |
+
### "Apps Script API has not been used in project"
|
| 486 |
+
Enable the API in Google Cloud Console
|
| 487 |
+
|
| 488 |
+
### "Insufficient Permission"
|
| 489 |
+
- Verify OAuth scopes are authorized
|
| 490 |
+
- Re-authenticate if needed
|
| 491 |
+
|
| 492 |
+
### "Function not found"
|
| 493 |
+
- Check function name spelling
|
| 494 |
+
- Verify function exists in the script
|
| 495 |
+
- Ensure function is not private
|
| 496 |
+
|
| 497 |
+
### "Invalid project structure"
|
| 498 |
+
- Ensure at least one .gs file exists
|
| 499 |
+
- Verify JSON files are valid JSON
|
| 500 |
+
- Check file names don't contain invalid characters
|
| 501 |
+
|
| 502 |
+
## Contributing
|
| 503 |
+
|
| 504 |
+
When adding new Apps Script tools:
|
| 505 |
+
|
| 506 |
+
1. Follow existing patterns in `apps_script_tools.py`
|
| 507 |
+
2. Add comprehensive docstrings
|
| 508 |
+
3. Include unit tests
|
| 509 |
+
4. Update this README with examples
|
| 510 |
+
5. Test against real API before submitting
|
| 511 |
+
|
| 512 |
+
## License
|
| 513 |
+
|
| 514 |
+
MIT License - see project root LICENSE file
|
gappsscript/TESTING.md
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Apps Script MCP Testing Guide
|
| 2 |
+
|
| 3 |
+
This document provides instructions for running unit tests and end-to-end (E2E) tests for the Apps Script MCP feature.
|
| 4 |
+
|
| 5 |
+
## Test Structure
|
| 6 |
+
|
| 7 |
+
```
|
| 8 |
+
tests/gappsscript/
|
| 9 |
+
__init__.py
|
| 10 |
+
test_apps_script_tools.py # Unit tests with mocked API
|
| 11 |
+
manual_test.py # E2E tests against real API
|
| 12 |
+
```
|
| 13 |
+
|
| 14 |
+
## Unit Tests
|
| 15 |
+
|
| 16 |
+
Unit tests use mocked API responses and do not require Google credentials.
|
| 17 |
+
|
| 18 |
+
### Running Unit Tests
|
| 19 |
+
|
| 20 |
+
```bash
|
| 21 |
+
# Run all Apps Script unit tests
|
| 22 |
+
uv run pytest tests/gappsscript/test_apps_script_tools.py -v
|
| 23 |
+
|
| 24 |
+
# Run specific test
|
| 25 |
+
uv run pytest tests/gappsscript/test_apps_script_tools.py::test_list_script_projects -v
|
| 26 |
+
|
| 27 |
+
# Run with coverage
|
| 28 |
+
uv run pytest tests/gappsscript/test_apps_script_tools.py --cov=gappsscript
|
| 29 |
+
```
|
| 30 |
+
|
| 31 |
+
### Test Coverage
|
| 32 |
+
|
| 33 |
+
Unit tests cover:
|
| 34 |
+
- list_script_projects (uses Drive API)
|
| 35 |
+
- get_script_project
|
| 36 |
+
- get_script_content
|
| 37 |
+
- create_script_project
|
| 38 |
+
- update_script_content
|
| 39 |
+
- run_script_function
|
| 40 |
+
- create_deployment
|
| 41 |
+
- list_deployments
|
| 42 |
+
- update_deployment
|
| 43 |
+
- delete_deployment
|
| 44 |
+
- list_script_processes
|
| 45 |
+
|
| 46 |
+
## E2E Tests
|
| 47 |
+
|
| 48 |
+
E2E tests interact with the real Google Apps Script API. They require valid OAuth credentials and will create real resources in your Google account.
|
| 49 |
+
|
| 50 |
+
### Prerequisites
|
| 51 |
+
|
| 52 |
+
1. **Google Cloud Project** with Apps Script API and Drive API enabled
|
| 53 |
+
2. **OAuth credentials** (Desktop application type)
|
| 54 |
+
3. **Test user** added to OAuth consent screen
|
| 55 |
+
|
| 56 |
+
### Setup
|
| 57 |
+
|
| 58 |
+
**Option 1: Default paths (recommended for CI)**
|
| 59 |
+
|
| 60 |
+
Place credentials in the project root:
|
| 61 |
+
```bash
|
| 62 |
+
# Place your OAuth client credentials here
|
| 63 |
+
cp /path/to/your/client_secret.json ./client_secret.json
|
| 64 |
+
```
|
| 65 |
+
|
| 66 |
+
**Option 2: Custom paths via environment variables**
|
| 67 |
+
|
| 68 |
+
```bash
|
| 69 |
+
export GOOGLE_CLIENT_SECRET_PATH=/path/to/client_secret.json
|
| 70 |
+
export GOOGLE_TOKEN_PATH=/path/to/token.pickle
|
| 71 |
+
```
|
| 72 |
+
|
| 73 |
+
### Running E2E Tests
|
| 74 |
+
|
| 75 |
+
```bash
|
| 76 |
+
# Interactive mode (prompts for confirmation)
|
| 77 |
+
uv run python tests/gappsscript/manual_test.py
|
| 78 |
+
|
| 79 |
+
# Non-interactive mode (for CI)
|
| 80 |
+
uv run python tests/gappsscript/manual_test.py --yes
|
| 81 |
+
```
|
| 82 |
+
|
| 83 |
+
### E2E Test Flow
|
| 84 |
+
|
| 85 |
+
The test script performs the following operations:
|
| 86 |
+
|
| 87 |
+
1. **List Projects** - Lists existing Apps Script projects via Drive API
|
| 88 |
+
2. **Create Project** - Creates a new test project
|
| 89 |
+
3. **Get Project** - Retrieves project details
|
| 90 |
+
4. **Update Content** - Adds code to the project
|
| 91 |
+
5. **Run Function** - Attempts to execute a function (see note below)
|
| 92 |
+
6. **Create Deployment** - Creates a versioned deployment
|
| 93 |
+
7. **List Deployments** - Lists all deployments
|
| 94 |
+
8. **List Processes** - Lists recent script executions
|
| 95 |
+
|
| 96 |
+
### Cleanup
|
| 97 |
+
|
| 98 |
+
The test script does not automatically delete created projects. After running tests:
|
| 99 |
+
|
| 100 |
+
1. Go to [Google Apps Script](https://script.google.com/)
|
| 101 |
+
2. Find projects named "MCP Test Project"
|
| 102 |
+
3. Delete them manually via the menu (three dots) > Remove
|
| 103 |
+
|
| 104 |
+
## Headless Linux Testing
|
| 105 |
+
|
| 106 |
+
For headless environments (servers, CI/CD, WSL without GUI):
|
| 107 |
+
|
| 108 |
+
### OAuth Authentication Flow
|
| 109 |
+
|
| 110 |
+
The test script uses a headless-compatible OAuth flow:
|
| 111 |
+
|
| 112 |
+
1. Script prints an authorization URL
|
| 113 |
+
2. Open the URL in any browser (can be on a different machine)
|
| 114 |
+
3. Complete Google sign-in and authorization
|
| 115 |
+
4. Browser redirects to `http://localhost/?code=...` (page will not load)
|
| 116 |
+
5. Copy the full URL from the browser address bar
|
| 117 |
+
6. Paste it into the terminal when prompted
|
| 118 |
+
|
| 119 |
+
### Example Session
|
| 120 |
+
|
| 121 |
+
```
|
| 122 |
+
$ python tests/gappsscript/manual_test.py --yes
|
| 123 |
+
|
| 124 |
+
============================================================
|
| 125 |
+
HEADLESS AUTH
|
| 126 |
+
============================================================
|
| 127 |
+
|
| 128 |
+
1. Open this URL in any browser:
|
| 129 |
+
|
| 130 |
+
https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=...
|
| 131 |
+
|
| 132 |
+
2. Sign in and authorize the app
|
| 133 |
+
3. You'll be redirected to http://localhost (won't load)
|
| 134 |
+
4. Copy the FULL URL from browser address bar
|
| 135 |
+
(looks like: http://localhost/?code=4/0A...&scope=...)
|
| 136 |
+
5. Paste it below:
|
| 137 |
+
|
| 138 |
+
Paste full redirect URL: http://localhost/?code=4/0AQSTgQ...&scope=...
|
| 139 |
+
|
| 140 |
+
Building API services...
|
| 141 |
+
|
| 142 |
+
=== Test: List Projects ===
|
| 143 |
+
Found 3 Apps Script projects:
|
| 144 |
+
...
|
| 145 |
+
```
|
| 146 |
+
|
| 147 |
+
### Credential Storage
|
| 148 |
+
|
| 149 |
+
OAuth tokens are stored as pickle files:
|
| 150 |
+
- Default: `./test_token.pickle` in project root
|
| 151 |
+
- Custom: Set via `GOOGLE_TOKEN_PATH` environment variable
|
| 152 |
+
|
| 153 |
+
Tokens are reused on subsequent runs until they expire or are revoked.
|
| 154 |
+
|
| 155 |
+
## Known Limitations and Caveats
|
| 156 |
+
|
| 157 |
+
### run_script_function Test Failure
|
| 158 |
+
|
| 159 |
+
The "Run Function" test will fail with a 404 error unless you manually configure the script as an API Executable. This is a Google platform requirement, not a bug.
|
| 160 |
+
|
| 161 |
+
To make run_script_function work:
|
| 162 |
+
|
| 163 |
+
1. Open the created test script in Apps Script editor
|
| 164 |
+
2. Go to Project Settings > Change GCP project
|
| 165 |
+
3. Enter your GCP project number
|
| 166 |
+
4. Deploy as "API Executable"
|
| 167 |
+
|
| 168 |
+
For E2E testing purposes, it is acceptable for this test to fail. All other tests should pass.
|
| 169 |
+
|
| 170 |
+
### Drive API Requirement
|
| 171 |
+
|
| 172 |
+
The `list_script_projects` function uses the Google Drive API (not the Apps Script API) because the Apps Script API does not provide a projects.list endpoint. Ensure the Drive API is enabled in your GCP project.
|
| 173 |
+
|
| 174 |
+
### Scope Requirements
|
| 175 |
+
|
| 176 |
+
The E2E tests require these scopes:
|
| 177 |
+
- `script.projects` and `script.projects.readonly`
|
| 178 |
+
- `script.deployments` and `script.deployments.readonly`
|
| 179 |
+
- `script.processes`
|
| 180 |
+
- `drive.readonly`
|
| 181 |
+
|
| 182 |
+
If you encounter "insufficient scopes" errors, delete the stored token file and re-authenticate.
|
| 183 |
+
|
| 184 |
+
### Rate Limits
|
| 185 |
+
|
| 186 |
+
Google enforces rate limits on the Apps Script API. If running tests repeatedly, you may encounter quota errors. Wait a few minutes before retrying.
|
| 187 |
+
|
| 188 |
+
## CI/CD Integration
|
| 189 |
+
|
| 190 |
+
For automated testing in CI/CD pipelines:
|
| 191 |
+
|
| 192 |
+
### Unit Tests Only (Recommended)
|
| 193 |
+
|
| 194 |
+
```yaml
|
| 195 |
+
# GitHub Actions example
|
| 196 |
+
- name: Run unit tests
|
| 197 |
+
run: uv run pytest tests/gappsscript/test_apps_script_tools.py -v
|
| 198 |
+
```
|
| 199 |
+
|
| 200 |
+
### E2E Tests in CI
|
| 201 |
+
|
| 202 |
+
E2E tests require OAuth credentials. Options:
|
| 203 |
+
|
| 204 |
+
1. **Skip E2E in CI** - Run only unit tests in CI, run E2E locally
|
| 205 |
+
2. **Service Account** - Not supported (Apps Script API requires user OAuth)
|
| 206 |
+
3. **Pre-authenticated Token** - Store encrypted token in CI secrets
|
| 207 |
+
|
| 208 |
+
To use a pre-authenticated token:
|
| 209 |
+
```bash
|
| 210 |
+
# Generate token locally
|
| 211 |
+
python tests/gappsscript/manual_test.py
|
| 212 |
+
|
| 213 |
+
# Store test_token.pickle contents as base64 in CI secret
|
| 214 |
+
base64 test_token.pickle > token.b64
|
| 215 |
+
|
| 216 |
+
# In CI, restore and set path
|
| 217 |
+
echo $TOKEN_SECRET | base64 -d > test_token.pickle
|
| 218 |
+
export GOOGLE_TOKEN_PATH=./test_token.pickle
|
| 219 |
+
python tests/gappsscript/manual_test.py --yes
|
| 220 |
+
```
|
| 221 |
+
|
| 222 |
+
Note: Tokens expire and must be refreshed periodically.
|
| 223 |
+
|
| 224 |
+
## Troubleshooting
|
| 225 |
+
|
| 226 |
+
### "Apps Script API has not been used in project"
|
| 227 |
+
|
| 228 |
+
Enable the Apps Script API in your GCP project:
|
| 229 |
+
https://console.cloud.google.com/flows/enableapi?apiid=script.googleapis.com
|
| 230 |
+
|
| 231 |
+
### "Access Not Configured. Drive API has not been used"
|
| 232 |
+
|
| 233 |
+
Enable the Drive API in your GCP project:
|
| 234 |
+
https://console.cloud.google.com/flows/enableapi?apiid=drive.googleapis.com
|
| 235 |
+
|
| 236 |
+
### "Request had insufficient authentication scopes"
|
| 237 |
+
|
| 238 |
+
Delete the token file and re-authenticate:
|
| 239 |
+
```bash
|
| 240 |
+
rm test_token.pickle
|
| 241 |
+
python tests/gappsscript/manual_test.py
|
| 242 |
+
```
|
| 243 |
+
|
| 244 |
+
### "User is not authorized to access this resource"
|
| 245 |
+
|
| 246 |
+
Ensure your email is added as a test user in the OAuth consent screen configuration.
|
| 247 |
+
|
| 248 |
+
### "Requested entity was not found" (404 on run)
|
| 249 |
+
|
| 250 |
+
The script needs to be deployed as "API Executable". See the run_script_function section above.
|
| 251 |
+
|
| 252 |
+
### OAuth redirect fails on headless machine
|
| 253 |
+
|
| 254 |
+
The redirect to `http://localhost` is expected to fail. Copy the URL from the browser address bar (including the error page URL) and paste it into the terminal.
|
gappsscript/__init__.py
ADDED
|
File without changes
|
gappsscript/apps_script_tools.py
ADDED
|
@@ -0,0 +1,1309 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Google Apps Script MCP Tools
|
| 3 |
+
|
| 4 |
+
This module provides MCP tools for interacting with Google Apps Script API.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
import asyncio
|
| 9 |
+
from typing import List, Dict, Any, Optional
|
| 10 |
+
|
| 11 |
+
from auth.service_decorator import require_google_service
|
| 12 |
+
from core.server import server
|
| 13 |
+
from core.utils import handle_http_errors
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# Internal implementation functions for testing
|
| 19 |
+
async def _list_script_projects_impl(
|
| 20 |
+
service: Any,
|
| 21 |
+
user_google_email: str,
|
| 22 |
+
page_size: int = 50,
|
| 23 |
+
page_token: Optional[str] = None,
|
| 24 |
+
) -> str:
|
| 25 |
+
"""Internal implementation for list_script_projects.
|
| 26 |
+
|
| 27 |
+
Uses Drive API to find Apps Script files since the Script API
|
| 28 |
+
does not have a projects.list method.
|
| 29 |
+
"""
|
| 30 |
+
logger.info(
|
| 31 |
+
f"[list_script_projects] Email: {user_google_email}, PageSize: {page_size}"
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
# Search for Apps Script files using Drive API
|
| 35 |
+
query = "mimeType='application/vnd.google-apps.script' and trashed=false"
|
| 36 |
+
request_params = {
|
| 37 |
+
"q": query,
|
| 38 |
+
"pageSize": page_size,
|
| 39 |
+
"fields": "nextPageToken, files(id, name, createdTime, modifiedTime)",
|
| 40 |
+
"orderBy": "modifiedTime desc",
|
| 41 |
+
}
|
| 42 |
+
if page_token:
|
| 43 |
+
request_params["pageToken"] = page_token
|
| 44 |
+
|
| 45 |
+
response = await asyncio.to_thread(service.files().list(**request_params).execute)
|
| 46 |
+
|
| 47 |
+
files = response.get("files", [])
|
| 48 |
+
|
| 49 |
+
if not files:
|
| 50 |
+
return "No Apps Script projects found."
|
| 51 |
+
|
| 52 |
+
output = [f"Found {len(files)} Apps Script projects:"]
|
| 53 |
+
for file in files:
|
| 54 |
+
title = file.get("name", "Untitled")
|
| 55 |
+
script_id = file.get("id", "Unknown ID")
|
| 56 |
+
create_time = file.get("createdTime", "Unknown")
|
| 57 |
+
update_time = file.get("modifiedTime", "Unknown")
|
| 58 |
+
|
| 59 |
+
output.append(
|
| 60 |
+
f"- {title} (ID: {script_id}) Created: {create_time} Modified: {update_time}"
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
if "nextPageToken" in response:
|
| 64 |
+
output.append(f"\nNext page token: {response['nextPageToken']}")
|
| 65 |
+
|
| 66 |
+
logger.info(
|
| 67 |
+
f"[list_script_projects] Found {len(files)} projects for {user_google_email}"
|
| 68 |
+
)
|
| 69 |
+
return "\n".join(output)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@server.tool()
|
| 73 |
+
@handle_http_errors("list_script_projects", is_read_only=True, service_type="drive")
|
| 74 |
+
@require_google_service("drive", "drive_read")
|
| 75 |
+
async def list_script_projects(
|
| 76 |
+
service: Any,
|
| 77 |
+
user_google_email: str,
|
| 78 |
+
page_size: int = 50,
|
| 79 |
+
page_token: Optional[str] = None,
|
| 80 |
+
) -> str:
|
| 81 |
+
"""
|
| 82 |
+
Lists Google Apps Script projects accessible to the user.
|
| 83 |
+
|
| 84 |
+
Uses Drive API to find Apps Script files.
|
| 85 |
+
|
| 86 |
+
Args:
|
| 87 |
+
service: Injected Google API service client
|
| 88 |
+
user_google_email: User's email address
|
| 89 |
+
page_size: Number of results per page (default: 50)
|
| 90 |
+
page_token: Token for pagination (optional)
|
| 91 |
+
|
| 92 |
+
Returns:
|
| 93 |
+
str: Formatted list of script projects
|
| 94 |
+
"""
|
| 95 |
+
return await _list_script_projects_impl(
|
| 96 |
+
service, user_google_email, page_size, page_token
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
async def _get_script_project_impl(
|
| 101 |
+
service: Any,
|
| 102 |
+
user_google_email: str,
|
| 103 |
+
script_id: str,
|
| 104 |
+
) -> str:
|
| 105 |
+
"""Internal implementation for get_script_project."""
|
| 106 |
+
logger.info(f"[get_script_project] Email: {user_google_email}, ID: {script_id}")
|
| 107 |
+
|
| 108 |
+
project = await asyncio.to_thread(
|
| 109 |
+
service.projects().get(scriptId=script_id).execute
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
title = project.get("title", "Untitled")
|
| 113 |
+
project_script_id = project.get("scriptId", "Unknown")
|
| 114 |
+
creator = project.get("creator", {}).get("email", "Unknown")
|
| 115 |
+
create_time = project.get("createTime", "Unknown")
|
| 116 |
+
update_time = project.get("updateTime", "Unknown")
|
| 117 |
+
|
| 118 |
+
output = [
|
| 119 |
+
f"Project: {title} (ID: {project_script_id})",
|
| 120 |
+
f"Creator: {creator}",
|
| 121 |
+
f"Created: {create_time}",
|
| 122 |
+
f"Modified: {update_time}",
|
| 123 |
+
"",
|
| 124 |
+
"Files:",
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
files = project.get("files", [])
|
| 128 |
+
for i, file in enumerate(files, 1):
|
| 129 |
+
file_name = file.get("name", "Untitled")
|
| 130 |
+
file_type = file.get("type", "Unknown")
|
| 131 |
+
source = file.get("source", "")
|
| 132 |
+
|
| 133 |
+
output.append(f"{i}. {file_name} ({file_type})")
|
| 134 |
+
if source:
|
| 135 |
+
output.append(f" {source[:200]}{'...' if len(source) > 200 else ''}")
|
| 136 |
+
output.append("")
|
| 137 |
+
|
| 138 |
+
logger.info(f"[get_script_project] Retrieved project {script_id}")
|
| 139 |
+
return "\n".join(output)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
@server.tool()
|
| 143 |
+
@handle_http_errors("get_script_project", is_read_only=True, service_type="script")
|
| 144 |
+
@require_google_service("script", "script_readonly")
|
| 145 |
+
async def get_script_project(
|
| 146 |
+
service: Any,
|
| 147 |
+
user_google_email: str,
|
| 148 |
+
script_id: str,
|
| 149 |
+
) -> str:
|
| 150 |
+
"""
|
| 151 |
+
Retrieves complete project details including all source files.
|
| 152 |
+
|
| 153 |
+
Args:
|
| 154 |
+
service: Injected Google API service client
|
| 155 |
+
user_google_email: User's email address
|
| 156 |
+
script_id: The script project ID
|
| 157 |
+
|
| 158 |
+
Returns:
|
| 159 |
+
str: Formatted project details with all file contents
|
| 160 |
+
"""
|
| 161 |
+
return await _get_script_project_impl(service, user_google_email, script_id)
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
async def _get_script_content_impl(
|
| 165 |
+
service: Any,
|
| 166 |
+
user_google_email: str,
|
| 167 |
+
script_id: str,
|
| 168 |
+
file_name: str,
|
| 169 |
+
) -> str:
|
| 170 |
+
"""Internal implementation for get_script_content."""
|
| 171 |
+
logger.info(
|
| 172 |
+
f"[get_script_content] Email: {user_google_email}, ID: {script_id}, File: {file_name}"
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
project = await asyncio.to_thread(
|
| 176 |
+
service.projects().get(scriptId=script_id).execute
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
files = project.get("files", [])
|
| 180 |
+
target_file = None
|
| 181 |
+
|
| 182 |
+
for file in files:
|
| 183 |
+
if file.get("name") == file_name:
|
| 184 |
+
target_file = file
|
| 185 |
+
break
|
| 186 |
+
|
| 187 |
+
if not target_file:
|
| 188 |
+
return f"File '{file_name}' not found in project {script_id}"
|
| 189 |
+
|
| 190 |
+
source = target_file.get("source", "")
|
| 191 |
+
file_type = target_file.get("type", "Unknown")
|
| 192 |
+
|
| 193 |
+
output = [f"File: {file_name} ({file_type})", "", source]
|
| 194 |
+
|
| 195 |
+
logger.info(f"[get_script_content] Retrieved file {file_name} from {script_id}")
|
| 196 |
+
return "\n".join(output)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
@server.tool()
|
| 200 |
+
@handle_http_errors("get_script_content", is_read_only=True, service_type="script")
|
| 201 |
+
@require_google_service("script", "script_readonly")
|
| 202 |
+
async def get_script_content(
|
| 203 |
+
service: Any,
|
| 204 |
+
user_google_email: str,
|
| 205 |
+
script_id: str,
|
| 206 |
+
file_name: str,
|
| 207 |
+
) -> str:
|
| 208 |
+
"""
|
| 209 |
+
Retrieves content of a specific file within a project.
|
| 210 |
+
|
| 211 |
+
Args:
|
| 212 |
+
service: Injected Google API service client
|
| 213 |
+
user_google_email: User's email address
|
| 214 |
+
script_id: The script project ID
|
| 215 |
+
file_name: Name of the file to retrieve
|
| 216 |
+
|
| 217 |
+
Returns:
|
| 218 |
+
str: File content as string
|
| 219 |
+
"""
|
| 220 |
+
return await _get_script_content_impl(
|
| 221 |
+
service, user_google_email, script_id, file_name
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
async def _create_script_project_impl(
|
| 226 |
+
service: Any,
|
| 227 |
+
user_google_email: str,
|
| 228 |
+
title: str,
|
| 229 |
+
parent_id: Optional[str] = None,
|
| 230 |
+
) -> str:
|
| 231 |
+
"""Internal implementation for create_script_project."""
|
| 232 |
+
logger.info(f"[create_script_project] Email: {user_google_email}, Title: {title}")
|
| 233 |
+
|
| 234 |
+
request_body = {"title": title}
|
| 235 |
+
|
| 236 |
+
if parent_id:
|
| 237 |
+
request_body["parentId"] = parent_id
|
| 238 |
+
|
| 239 |
+
project = await asyncio.to_thread(
|
| 240 |
+
service.projects().create(body=request_body).execute
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
script_id = project.get("scriptId", "Unknown")
|
| 244 |
+
edit_url = f"https://script.google.com/d/{script_id}/edit"
|
| 245 |
+
|
| 246 |
+
output = [
|
| 247 |
+
f"Created Apps Script project: {title}",
|
| 248 |
+
f"Script ID: {script_id}",
|
| 249 |
+
f"Edit URL: {edit_url}",
|
| 250 |
+
]
|
| 251 |
+
|
| 252 |
+
logger.info(f"[create_script_project] Created project {script_id}")
|
| 253 |
+
return "\n".join(output)
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
@server.tool()
|
| 257 |
+
@handle_http_errors("create_script_project", service_type="script")
|
| 258 |
+
@require_google_service("script", "script_projects")
|
| 259 |
+
async def create_script_project(
|
| 260 |
+
service: Any,
|
| 261 |
+
user_google_email: str,
|
| 262 |
+
title: str,
|
| 263 |
+
parent_id: Optional[str] = None,
|
| 264 |
+
) -> str:
|
| 265 |
+
"""
|
| 266 |
+
Creates a new Apps Script project.
|
| 267 |
+
|
| 268 |
+
Args:
|
| 269 |
+
service: Injected Google API service client
|
| 270 |
+
user_google_email: User's email address
|
| 271 |
+
title: Project title
|
| 272 |
+
parent_id: Optional Drive folder ID or bound container ID
|
| 273 |
+
|
| 274 |
+
Returns:
|
| 275 |
+
str: Formatted string with new project details
|
| 276 |
+
"""
|
| 277 |
+
return await _create_script_project_impl(
|
| 278 |
+
service, user_google_email, title, parent_id
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
async def _update_script_content_impl(
|
| 283 |
+
service: Any,
|
| 284 |
+
user_google_email: str,
|
| 285 |
+
script_id: str,
|
| 286 |
+
files: List[Dict[str, str]],
|
| 287 |
+
) -> str:
|
| 288 |
+
"""Internal implementation for update_script_content."""
|
| 289 |
+
logger.info(
|
| 290 |
+
f"[update_script_content] Email: {user_google_email}, ID: {script_id}, Files: {len(files)}"
|
| 291 |
+
)
|
| 292 |
+
|
| 293 |
+
request_body = {"files": files}
|
| 294 |
+
|
| 295 |
+
updated_content = await asyncio.to_thread(
|
| 296 |
+
service.projects().updateContent(scriptId=script_id, body=request_body).execute
|
| 297 |
+
)
|
| 298 |
+
|
| 299 |
+
output = [f"Updated script project: {script_id}", "", "Modified files:"]
|
| 300 |
+
|
| 301 |
+
for file in updated_content.get("files", []):
|
| 302 |
+
file_name = file.get("name", "Untitled")
|
| 303 |
+
file_type = file.get("type", "Unknown")
|
| 304 |
+
output.append(f"- {file_name} ({file_type})")
|
| 305 |
+
|
| 306 |
+
logger.info(f"[update_script_content] Updated {len(files)} files in {script_id}")
|
| 307 |
+
return "\n".join(output)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@server.tool()
|
| 311 |
+
@handle_http_errors("update_script_content", service_type="script")
|
| 312 |
+
@require_google_service("script", "script_projects")
|
| 313 |
+
async def update_script_content(
|
| 314 |
+
service: Any,
|
| 315 |
+
user_google_email: str,
|
| 316 |
+
script_id: str,
|
| 317 |
+
files: List[Dict[str, str]],
|
| 318 |
+
) -> str:
|
| 319 |
+
"""
|
| 320 |
+
Updates or creates files in a script project.
|
| 321 |
+
|
| 322 |
+
Args:
|
| 323 |
+
service: Injected Google API service client
|
| 324 |
+
user_google_email: User's email address
|
| 325 |
+
script_id: The script project ID
|
| 326 |
+
files: List of file objects with name, type, and source
|
| 327 |
+
|
| 328 |
+
Returns:
|
| 329 |
+
str: Formatted string confirming update with file list
|
| 330 |
+
"""
|
| 331 |
+
return await _update_script_content_impl(
|
| 332 |
+
service, user_google_email, script_id, files
|
| 333 |
+
)
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
async def _run_script_function_impl(
|
| 337 |
+
service: Any,
|
| 338 |
+
user_google_email: str,
|
| 339 |
+
script_id: str,
|
| 340 |
+
function_name: str,
|
| 341 |
+
parameters: Optional[List[Any]] = None,
|
| 342 |
+
dev_mode: bool = False,
|
| 343 |
+
) -> str:
|
| 344 |
+
"""Internal implementation for run_script_function."""
|
| 345 |
+
logger.info(
|
| 346 |
+
f"[run_script_function] Email: {user_google_email}, ID: {script_id}, Function: {function_name}"
|
| 347 |
+
)
|
| 348 |
+
|
| 349 |
+
request_body = {"function": function_name, "devMode": dev_mode}
|
| 350 |
+
|
| 351 |
+
if parameters:
|
| 352 |
+
request_body["parameters"] = parameters
|
| 353 |
+
|
| 354 |
+
try:
|
| 355 |
+
response = await asyncio.to_thread(
|
| 356 |
+
service.scripts().run(scriptId=script_id, body=request_body).execute
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
if "error" in response:
|
| 360 |
+
error_details = response["error"]
|
| 361 |
+
error_message = error_details.get("message", "Unknown error")
|
| 362 |
+
return (
|
| 363 |
+
f"Execution failed\nFunction: {function_name}\nError: {error_message}"
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
result = response.get("response", {}).get("result")
|
| 367 |
+
output = [
|
| 368 |
+
"Execution successful",
|
| 369 |
+
f"Function: {function_name}",
|
| 370 |
+
f"Result: {result}",
|
| 371 |
+
]
|
| 372 |
+
|
| 373 |
+
logger.info(f"[run_script_function] Successfully executed {function_name}")
|
| 374 |
+
return "\n".join(output)
|
| 375 |
+
|
| 376 |
+
except Exception as e:
|
| 377 |
+
logger.error(f"[run_script_function] Execution error: {str(e)}")
|
| 378 |
+
return f"Execution failed\nFunction: {function_name}\nError: {str(e)}"
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
@server.tool()
|
| 382 |
+
@handle_http_errors("run_script_function", service_type="script")
|
| 383 |
+
@require_google_service("script", "script_projects")
|
| 384 |
+
async def run_script_function(
|
| 385 |
+
service: Any,
|
| 386 |
+
user_google_email: str,
|
| 387 |
+
script_id: str,
|
| 388 |
+
function_name: str,
|
| 389 |
+
parameters: Optional[List[Any]] = None,
|
| 390 |
+
dev_mode: bool = False,
|
| 391 |
+
) -> str:
|
| 392 |
+
"""
|
| 393 |
+
Executes a function in a deployed script.
|
| 394 |
+
|
| 395 |
+
Args:
|
| 396 |
+
service: Injected Google API service client
|
| 397 |
+
user_google_email: User's email address
|
| 398 |
+
script_id: The script project ID
|
| 399 |
+
function_name: Name of function to execute
|
| 400 |
+
parameters: Optional list of parameters to pass
|
| 401 |
+
dev_mode: Whether to run latest code vs deployed version
|
| 402 |
+
|
| 403 |
+
Returns:
|
| 404 |
+
str: Formatted string with execution result or error
|
| 405 |
+
"""
|
| 406 |
+
return await _run_script_function_impl(
|
| 407 |
+
service, user_google_email, script_id, function_name, parameters, dev_mode
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
async def _create_deployment_impl(
|
| 412 |
+
service: Any,
|
| 413 |
+
user_google_email: str,
|
| 414 |
+
script_id: str,
|
| 415 |
+
description: str,
|
| 416 |
+
version_description: Optional[str] = None,
|
| 417 |
+
) -> str:
|
| 418 |
+
"""Internal implementation for create_deployment.
|
| 419 |
+
|
| 420 |
+
Creates a new version first, then creates a deployment using that version.
|
| 421 |
+
"""
|
| 422 |
+
logger.info(
|
| 423 |
+
f"[create_deployment] Email: {user_google_email}, ID: {script_id}, Desc: {description}"
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
# First, create a new version
|
| 427 |
+
version_body = {"description": version_description or description}
|
| 428 |
+
version = await asyncio.to_thread(
|
| 429 |
+
service.projects()
|
| 430 |
+
.versions()
|
| 431 |
+
.create(scriptId=script_id, body=version_body)
|
| 432 |
+
.execute
|
| 433 |
+
)
|
| 434 |
+
version_number = version.get("versionNumber")
|
| 435 |
+
logger.info(f"[create_deployment] Created version {version_number}")
|
| 436 |
+
|
| 437 |
+
# Now create the deployment with the version number
|
| 438 |
+
deployment_body = {
|
| 439 |
+
"versionNumber": version_number,
|
| 440 |
+
"description": description,
|
| 441 |
+
}
|
| 442 |
+
|
| 443 |
+
deployment = await asyncio.to_thread(
|
| 444 |
+
service.projects()
|
| 445 |
+
.deployments()
|
| 446 |
+
.create(scriptId=script_id, body=deployment_body)
|
| 447 |
+
.execute
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
deployment_id = deployment.get("deploymentId", "Unknown")
|
| 451 |
+
|
| 452 |
+
output = [
|
| 453 |
+
f"Created deployment for script: {script_id}",
|
| 454 |
+
f"Deployment ID: {deployment_id}",
|
| 455 |
+
f"Version: {version_number}",
|
| 456 |
+
f"Description: {description}",
|
| 457 |
+
]
|
| 458 |
+
|
| 459 |
+
logger.info(f"[create_deployment] Created deployment {deployment_id}")
|
| 460 |
+
return "\n".join(output)
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
@server.tool()
|
| 464 |
+
@handle_http_errors("create_deployment", service_type="script")
|
| 465 |
+
@require_google_service("script", "script_deployments")
|
| 466 |
+
async def create_deployment(
|
| 467 |
+
service: Any,
|
| 468 |
+
user_google_email: str,
|
| 469 |
+
script_id: str,
|
| 470 |
+
description: str,
|
| 471 |
+
version_description: Optional[str] = None,
|
| 472 |
+
) -> str:
|
| 473 |
+
"""
|
| 474 |
+
Creates a new deployment of the script.
|
| 475 |
+
|
| 476 |
+
Args:
|
| 477 |
+
service: Injected Google API service client
|
| 478 |
+
user_google_email: User's email address
|
| 479 |
+
script_id: The script project ID
|
| 480 |
+
description: Deployment description
|
| 481 |
+
version_description: Optional version description
|
| 482 |
+
|
| 483 |
+
Returns:
|
| 484 |
+
str: Formatted string with deployment details
|
| 485 |
+
"""
|
| 486 |
+
return await _create_deployment_impl(
|
| 487 |
+
service, user_google_email, script_id, description, version_description
|
| 488 |
+
)
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
async def _list_deployments_impl(
|
| 492 |
+
service: Any,
|
| 493 |
+
user_google_email: str,
|
| 494 |
+
script_id: str,
|
| 495 |
+
) -> str:
|
| 496 |
+
"""Internal implementation for list_deployments."""
|
| 497 |
+
logger.info(f"[list_deployments] Email: {user_google_email}, ID: {script_id}")
|
| 498 |
+
|
| 499 |
+
response = await asyncio.to_thread(
|
| 500 |
+
service.projects().deployments().list(scriptId=script_id).execute
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
deployments = response.get("deployments", [])
|
| 504 |
+
|
| 505 |
+
if not deployments:
|
| 506 |
+
return f"No deployments found for script: {script_id}"
|
| 507 |
+
|
| 508 |
+
output = [f"Deployments for script: {script_id}", ""]
|
| 509 |
+
|
| 510 |
+
for i, deployment in enumerate(deployments, 1):
|
| 511 |
+
deployment_id = deployment.get("deploymentId", "Unknown")
|
| 512 |
+
description = deployment.get("description", "No description")
|
| 513 |
+
update_time = deployment.get("updateTime", "Unknown")
|
| 514 |
+
|
| 515 |
+
output.append(f"{i}. {description} ({deployment_id})")
|
| 516 |
+
output.append(f" Updated: {update_time}")
|
| 517 |
+
output.append("")
|
| 518 |
+
|
| 519 |
+
logger.info(f"[list_deployments] Found {len(deployments)} deployments")
|
| 520 |
+
return "\n".join(output)
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
@server.tool()
|
| 524 |
+
@handle_http_errors("list_deployments", is_read_only=True, service_type="script")
|
| 525 |
+
@require_google_service("script", "script_deployments_readonly")
|
| 526 |
+
async def list_deployments(
|
| 527 |
+
service: Any,
|
| 528 |
+
user_google_email: str,
|
| 529 |
+
script_id: str,
|
| 530 |
+
) -> str:
|
| 531 |
+
"""
|
| 532 |
+
Lists all deployments for a script project.
|
| 533 |
+
|
| 534 |
+
Args:
|
| 535 |
+
service: Injected Google API service client
|
| 536 |
+
user_google_email: User's email address
|
| 537 |
+
script_id: The script project ID
|
| 538 |
+
|
| 539 |
+
Returns:
|
| 540 |
+
str: Formatted string with deployment list
|
| 541 |
+
"""
|
| 542 |
+
return await _list_deployments_impl(service, user_google_email, script_id)
|
| 543 |
+
|
| 544 |
+
|
| 545 |
+
async def _update_deployment_impl(
|
| 546 |
+
service: Any,
|
| 547 |
+
user_google_email: str,
|
| 548 |
+
script_id: str,
|
| 549 |
+
deployment_id: str,
|
| 550 |
+
description: Optional[str] = None,
|
| 551 |
+
) -> str:
|
| 552 |
+
"""Internal implementation for update_deployment."""
|
| 553 |
+
logger.info(
|
| 554 |
+
f"[update_deployment] Email: {user_google_email}, Script: {script_id}, Deployment: {deployment_id}"
|
| 555 |
+
)
|
| 556 |
+
|
| 557 |
+
request_body = {}
|
| 558 |
+
if description:
|
| 559 |
+
request_body["description"] = description
|
| 560 |
+
|
| 561 |
+
deployment = await asyncio.to_thread(
|
| 562 |
+
service.projects()
|
| 563 |
+
.deployments()
|
| 564 |
+
.update(scriptId=script_id, deploymentId=deployment_id, body=request_body)
|
| 565 |
+
.execute
|
| 566 |
+
)
|
| 567 |
+
|
| 568 |
+
output = [
|
| 569 |
+
f"Updated deployment: {deployment_id}",
|
| 570 |
+
f"Script: {script_id}",
|
| 571 |
+
f"Description: {deployment.get('description', 'No description')}",
|
| 572 |
+
]
|
| 573 |
+
|
| 574 |
+
logger.info(f"[update_deployment] Updated deployment {deployment_id}")
|
| 575 |
+
return "\n".join(output)
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
@server.tool()
|
| 579 |
+
@handle_http_errors("update_deployment", service_type="script")
|
| 580 |
+
@require_google_service("script", "script_deployments")
|
| 581 |
+
async def update_deployment(
|
| 582 |
+
service: Any,
|
| 583 |
+
user_google_email: str,
|
| 584 |
+
script_id: str,
|
| 585 |
+
deployment_id: str,
|
| 586 |
+
description: Optional[str] = None,
|
| 587 |
+
) -> str:
|
| 588 |
+
"""
|
| 589 |
+
Updates an existing deployment configuration.
|
| 590 |
+
|
| 591 |
+
Args:
|
| 592 |
+
service: Injected Google API service client
|
| 593 |
+
user_google_email: User's email address
|
| 594 |
+
script_id: The script project ID
|
| 595 |
+
deployment_id: The deployment ID to update
|
| 596 |
+
description: Optional new description
|
| 597 |
+
|
| 598 |
+
Returns:
|
| 599 |
+
str: Formatted string confirming update
|
| 600 |
+
"""
|
| 601 |
+
return await _update_deployment_impl(
|
| 602 |
+
service, user_google_email, script_id, deployment_id, description
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
|
| 606 |
+
async def _delete_deployment_impl(
|
| 607 |
+
service: Any,
|
| 608 |
+
user_google_email: str,
|
| 609 |
+
script_id: str,
|
| 610 |
+
deployment_id: str,
|
| 611 |
+
) -> str:
|
| 612 |
+
"""Internal implementation for delete_deployment."""
|
| 613 |
+
logger.info(
|
| 614 |
+
f"[delete_deployment] Email: {user_google_email}, Script: {script_id}, Deployment: {deployment_id}"
|
| 615 |
+
)
|
| 616 |
+
|
| 617 |
+
await asyncio.to_thread(
|
| 618 |
+
service.projects()
|
| 619 |
+
.deployments()
|
| 620 |
+
.delete(scriptId=script_id, deploymentId=deployment_id)
|
| 621 |
+
.execute
|
| 622 |
+
)
|
| 623 |
+
|
| 624 |
+
output = f"Deleted deployment: {deployment_id} from script: {script_id}"
|
| 625 |
+
|
| 626 |
+
logger.info(f"[delete_deployment] Deleted deployment {deployment_id}")
|
| 627 |
+
return output
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
@server.tool()
|
| 631 |
+
@handle_http_errors("delete_deployment", service_type="script")
|
| 632 |
+
@require_google_service("script", "script_deployments")
|
| 633 |
+
async def delete_deployment(
|
| 634 |
+
service: Any,
|
| 635 |
+
user_google_email: str,
|
| 636 |
+
script_id: str,
|
| 637 |
+
deployment_id: str,
|
| 638 |
+
) -> str:
|
| 639 |
+
"""
|
| 640 |
+
Deletes a deployment.
|
| 641 |
+
|
| 642 |
+
Args:
|
| 643 |
+
service: Injected Google API service client
|
| 644 |
+
user_google_email: User's email address
|
| 645 |
+
script_id: The script project ID
|
| 646 |
+
deployment_id: The deployment ID to delete
|
| 647 |
+
|
| 648 |
+
Returns:
|
| 649 |
+
str: Confirmation message
|
| 650 |
+
"""
|
| 651 |
+
return await _delete_deployment_impl(
|
| 652 |
+
service, user_google_email, script_id, deployment_id
|
| 653 |
+
)
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
async def _list_script_processes_impl(
|
| 657 |
+
service: Any,
|
| 658 |
+
user_google_email: str,
|
| 659 |
+
page_size: int = 50,
|
| 660 |
+
script_id: Optional[str] = None,
|
| 661 |
+
) -> str:
|
| 662 |
+
"""Internal implementation for list_script_processes."""
|
| 663 |
+
logger.info(
|
| 664 |
+
f"[list_script_processes] Email: {user_google_email}, PageSize: {page_size}"
|
| 665 |
+
)
|
| 666 |
+
|
| 667 |
+
request_params = {"pageSize": page_size}
|
| 668 |
+
if script_id:
|
| 669 |
+
request_params["scriptId"] = script_id
|
| 670 |
+
|
| 671 |
+
response = await asyncio.to_thread(
|
| 672 |
+
service.processes().list(**request_params).execute
|
| 673 |
+
)
|
| 674 |
+
|
| 675 |
+
processes = response.get("processes", [])
|
| 676 |
+
|
| 677 |
+
if not processes:
|
| 678 |
+
return "No recent script executions found."
|
| 679 |
+
|
| 680 |
+
output = ["Recent script executions:", ""]
|
| 681 |
+
|
| 682 |
+
for i, process in enumerate(processes, 1):
|
| 683 |
+
function_name = process.get("functionName", "Unknown")
|
| 684 |
+
process_status = process.get("processStatus", "Unknown")
|
| 685 |
+
start_time = process.get("startTime", "Unknown")
|
| 686 |
+
duration = process.get("duration", "Unknown")
|
| 687 |
+
|
| 688 |
+
output.append(f"{i}. {function_name}")
|
| 689 |
+
output.append(f" Status: {process_status}")
|
| 690 |
+
output.append(f" Started: {start_time}")
|
| 691 |
+
output.append(f" Duration: {duration}")
|
| 692 |
+
output.append("")
|
| 693 |
+
|
| 694 |
+
logger.info(f"[list_script_processes] Found {len(processes)} processes")
|
| 695 |
+
return "\n".join(output)
|
| 696 |
+
|
| 697 |
+
|
| 698 |
+
@server.tool()
|
| 699 |
+
@handle_http_errors("list_script_processes", is_read_only=True, service_type="script")
|
| 700 |
+
@require_google_service("script", "script_readonly")
|
| 701 |
+
async def list_script_processes(
|
| 702 |
+
service: Any,
|
| 703 |
+
user_google_email: str,
|
| 704 |
+
page_size: int = 50,
|
| 705 |
+
script_id: Optional[str] = None,
|
| 706 |
+
) -> str:
|
| 707 |
+
"""
|
| 708 |
+
Lists recent execution processes for user's scripts.
|
| 709 |
+
|
| 710 |
+
Args:
|
| 711 |
+
service: Injected Google API service client
|
| 712 |
+
user_google_email: User's email address
|
| 713 |
+
page_size: Number of results (default: 50)
|
| 714 |
+
script_id: Optional filter by script ID
|
| 715 |
+
|
| 716 |
+
Returns:
|
| 717 |
+
str: Formatted string with process list
|
| 718 |
+
"""
|
| 719 |
+
return await _list_script_processes_impl(
|
| 720 |
+
service, user_google_email, page_size, script_id
|
| 721 |
+
)
|
| 722 |
+
|
| 723 |
+
|
| 724 |
+
# ============================================================================
|
| 725 |
+
# Delete Script Project
|
| 726 |
+
# ============================================================================
|
| 727 |
+
|
| 728 |
+
|
| 729 |
+
async def _delete_script_project_impl(
|
| 730 |
+
service: Any,
|
| 731 |
+
user_google_email: str,
|
| 732 |
+
script_id: str,
|
| 733 |
+
) -> str:
|
| 734 |
+
"""Internal implementation for delete_script_project."""
|
| 735 |
+
logger.info(
|
| 736 |
+
f"[delete_script_project] Email: {user_google_email}, ScriptID: {script_id}"
|
| 737 |
+
)
|
| 738 |
+
|
| 739 |
+
# Apps Script projects are stored as Drive files
|
| 740 |
+
await asyncio.to_thread(service.files().delete(fileId=script_id).execute)
|
| 741 |
+
|
| 742 |
+
logger.info(f"[delete_script_project] Deleted script {script_id}")
|
| 743 |
+
return f"Deleted Apps Script project: {script_id}"
|
| 744 |
+
|
| 745 |
+
|
| 746 |
+
@server.tool()
|
| 747 |
+
@handle_http_errors("delete_script_project", is_read_only=False, service_type="drive")
|
| 748 |
+
@require_google_service("drive", "drive_full")
|
| 749 |
+
async def delete_script_project(
|
| 750 |
+
service: Any,
|
| 751 |
+
user_google_email: str,
|
| 752 |
+
script_id: str,
|
| 753 |
+
) -> str:
|
| 754 |
+
"""
|
| 755 |
+
Deletes an Apps Script project.
|
| 756 |
+
|
| 757 |
+
This permanently deletes the script project. The action cannot be undone.
|
| 758 |
+
|
| 759 |
+
Args:
|
| 760 |
+
service: Injected Google API service client
|
| 761 |
+
user_google_email: User's email address
|
| 762 |
+
script_id: The script project ID to delete
|
| 763 |
+
|
| 764 |
+
Returns:
|
| 765 |
+
str: Confirmation message
|
| 766 |
+
"""
|
| 767 |
+
return await _delete_script_project_impl(service, user_google_email, script_id)
|
| 768 |
+
|
| 769 |
+
|
| 770 |
+
# ============================================================================
|
| 771 |
+
# Version Management
|
| 772 |
+
# ============================================================================
|
| 773 |
+
|
| 774 |
+
|
| 775 |
+
async def _list_versions_impl(
|
| 776 |
+
service: Any,
|
| 777 |
+
user_google_email: str,
|
| 778 |
+
script_id: str,
|
| 779 |
+
) -> str:
|
| 780 |
+
"""Internal implementation for list_versions."""
|
| 781 |
+
logger.info(f"[list_versions] Email: {user_google_email}, ScriptID: {script_id}")
|
| 782 |
+
|
| 783 |
+
response = await asyncio.to_thread(
|
| 784 |
+
service.projects().versions().list(scriptId=script_id).execute
|
| 785 |
+
)
|
| 786 |
+
|
| 787 |
+
versions = response.get("versions", [])
|
| 788 |
+
|
| 789 |
+
if not versions:
|
| 790 |
+
return f"No versions found for script: {script_id}"
|
| 791 |
+
|
| 792 |
+
output = [f"Versions for script: {script_id}", ""]
|
| 793 |
+
|
| 794 |
+
for version in versions:
|
| 795 |
+
version_number = version.get("versionNumber", "Unknown")
|
| 796 |
+
description = version.get("description", "No description")
|
| 797 |
+
create_time = version.get("createTime", "Unknown")
|
| 798 |
+
|
| 799 |
+
output.append(f"Version {version_number}: {description}")
|
| 800 |
+
output.append(f" Created: {create_time}")
|
| 801 |
+
output.append("")
|
| 802 |
+
|
| 803 |
+
logger.info(f"[list_versions] Found {len(versions)} versions")
|
| 804 |
+
return "\n".join(output)
|
| 805 |
+
|
| 806 |
+
|
| 807 |
+
@server.tool()
|
| 808 |
+
@handle_http_errors("list_versions", is_read_only=True, service_type="script")
|
| 809 |
+
@require_google_service("script", "script_readonly")
|
| 810 |
+
async def list_versions(
|
| 811 |
+
service: Any,
|
| 812 |
+
user_google_email: str,
|
| 813 |
+
script_id: str,
|
| 814 |
+
) -> str:
|
| 815 |
+
"""
|
| 816 |
+
Lists all versions of a script project.
|
| 817 |
+
|
| 818 |
+
Versions are immutable snapshots of your script code.
|
| 819 |
+
They are created when you deploy or explicitly create a version.
|
| 820 |
+
|
| 821 |
+
Args:
|
| 822 |
+
service: Injected Google API service client
|
| 823 |
+
user_google_email: User's email address
|
| 824 |
+
script_id: The script project ID
|
| 825 |
+
|
| 826 |
+
Returns:
|
| 827 |
+
str: Formatted string with version list
|
| 828 |
+
"""
|
| 829 |
+
return await _list_versions_impl(service, user_google_email, script_id)
|
| 830 |
+
|
| 831 |
+
|
| 832 |
+
async def _create_version_impl(
|
| 833 |
+
service: Any,
|
| 834 |
+
user_google_email: str,
|
| 835 |
+
script_id: str,
|
| 836 |
+
description: Optional[str] = None,
|
| 837 |
+
) -> str:
|
| 838 |
+
"""Internal implementation for create_version."""
|
| 839 |
+
logger.info(f"[create_version] Email: {user_google_email}, ScriptID: {script_id}")
|
| 840 |
+
|
| 841 |
+
request_body = {}
|
| 842 |
+
if description:
|
| 843 |
+
request_body["description"] = description
|
| 844 |
+
|
| 845 |
+
version = await asyncio.to_thread(
|
| 846 |
+
service.projects()
|
| 847 |
+
.versions()
|
| 848 |
+
.create(scriptId=script_id, body=request_body)
|
| 849 |
+
.execute
|
| 850 |
+
)
|
| 851 |
+
|
| 852 |
+
version_number = version.get("versionNumber", "Unknown")
|
| 853 |
+
create_time = version.get("createTime", "Unknown")
|
| 854 |
+
|
| 855 |
+
output = [
|
| 856 |
+
f"Created version {version_number} for script: {script_id}",
|
| 857 |
+
f"Description: {description or 'No description'}",
|
| 858 |
+
f"Created: {create_time}",
|
| 859 |
+
]
|
| 860 |
+
|
| 861 |
+
logger.info(f"[create_version] Created version {version_number}")
|
| 862 |
+
return "\n".join(output)
|
| 863 |
+
|
| 864 |
+
|
| 865 |
+
@server.tool()
|
| 866 |
+
@handle_http_errors("create_version", is_read_only=False, service_type="script")
|
| 867 |
+
@require_google_service("script", "script_full")
|
| 868 |
+
async def create_version(
|
| 869 |
+
service: Any,
|
| 870 |
+
user_google_email: str,
|
| 871 |
+
script_id: str,
|
| 872 |
+
description: Optional[str] = None,
|
| 873 |
+
) -> str:
|
| 874 |
+
"""
|
| 875 |
+
Creates a new immutable version of a script project.
|
| 876 |
+
|
| 877 |
+
Versions capture a snapshot of the current script code.
|
| 878 |
+
Once created, versions cannot be modified.
|
| 879 |
+
|
| 880 |
+
Args:
|
| 881 |
+
service: Injected Google API service client
|
| 882 |
+
user_google_email: User's email address
|
| 883 |
+
script_id: The script project ID
|
| 884 |
+
description: Optional description for this version
|
| 885 |
+
|
| 886 |
+
Returns:
|
| 887 |
+
str: Formatted string with new version details
|
| 888 |
+
"""
|
| 889 |
+
return await _create_version_impl(
|
| 890 |
+
service, user_google_email, script_id, description
|
| 891 |
+
)
|
| 892 |
+
|
| 893 |
+
|
| 894 |
+
async def _get_version_impl(
|
| 895 |
+
service: Any,
|
| 896 |
+
user_google_email: str,
|
| 897 |
+
script_id: str,
|
| 898 |
+
version_number: int,
|
| 899 |
+
) -> str:
|
| 900 |
+
"""Internal implementation for get_version."""
|
| 901 |
+
logger.info(
|
| 902 |
+
f"[get_version] Email: {user_google_email}, ScriptID: {script_id}, Version: {version_number}"
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
version = await asyncio.to_thread(
|
| 906 |
+
service.projects()
|
| 907 |
+
.versions()
|
| 908 |
+
.get(scriptId=script_id, versionNumber=version_number)
|
| 909 |
+
.execute
|
| 910 |
+
)
|
| 911 |
+
|
| 912 |
+
ver_num = version.get("versionNumber", "Unknown")
|
| 913 |
+
description = version.get("description", "No description")
|
| 914 |
+
create_time = version.get("createTime", "Unknown")
|
| 915 |
+
|
| 916 |
+
output = [
|
| 917 |
+
f"Version {ver_num} of script: {script_id}",
|
| 918 |
+
f"Description: {description}",
|
| 919 |
+
f"Created: {create_time}",
|
| 920 |
+
]
|
| 921 |
+
|
| 922 |
+
logger.info(f"[get_version] Retrieved version {ver_num}")
|
| 923 |
+
return "\n".join(output)
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
@server.tool()
|
| 927 |
+
@handle_http_errors("get_version", is_read_only=True, service_type="script")
|
| 928 |
+
@require_google_service("script", "script_readonly")
|
| 929 |
+
async def get_version(
|
| 930 |
+
service: Any,
|
| 931 |
+
user_google_email: str,
|
| 932 |
+
script_id: str,
|
| 933 |
+
version_number: int,
|
| 934 |
+
) -> str:
|
| 935 |
+
"""
|
| 936 |
+
Gets details of a specific version.
|
| 937 |
+
|
| 938 |
+
Args:
|
| 939 |
+
service: Injected Google API service client
|
| 940 |
+
user_google_email: User's email address
|
| 941 |
+
script_id: The script project ID
|
| 942 |
+
version_number: The version number to retrieve (1, 2, 3, etc.)
|
| 943 |
+
|
| 944 |
+
Returns:
|
| 945 |
+
str: Formatted string with version details
|
| 946 |
+
"""
|
| 947 |
+
return await _get_version_impl(
|
| 948 |
+
service, user_google_email, script_id, version_number
|
| 949 |
+
)
|
| 950 |
+
|
| 951 |
+
|
| 952 |
+
# ============================================================================
|
| 953 |
+
# Metrics
|
| 954 |
+
# ============================================================================
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
async def _get_script_metrics_impl(
|
| 958 |
+
service: Any,
|
| 959 |
+
user_google_email: str,
|
| 960 |
+
script_id: str,
|
| 961 |
+
metrics_granularity: str = "DAILY",
|
| 962 |
+
) -> str:
|
| 963 |
+
"""Internal implementation for get_script_metrics."""
|
| 964 |
+
logger.info(
|
| 965 |
+
f"[get_script_metrics] Email: {user_google_email}, ScriptID: {script_id}, Granularity: {metrics_granularity}"
|
| 966 |
+
)
|
| 967 |
+
|
| 968 |
+
request_params = {
|
| 969 |
+
"scriptId": script_id,
|
| 970 |
+
"metricsGranularity": metrics_granularity,
|
| 971 |
+
}
|
| 972 |
+
|
| 973 |
+
response = await asyncio.to_thread(
|
| 974 |
+
service.projects().getMetrics(**request_params).execute
|
| 975 |
+
)
|
| 976 |
+
|
| 977 |
+
output = [
|
| 978 |
+
f"Metrics for script: {script_id}",
|
| 979 |
+
f"Granularity: {metrics_granularity}",
|
| 980 |
+
"",
|
| 981 |
+
]
|
| 982 |
+
|
| 983 |
+
# Active users
|
| 984 |
+
active_users = response.get("activeUsers", [])
|
| 985 |
+
if active_users:
|
| 986 |
+
output.append("Active Users:")
|
| 987 |
+
for metric in active_users:
|
| 988 |
+
start_time = metric.get("startTime", "Unknown")
|
| 989 |
+
end_time = metric.get("endTime", "Unknown")
|
| 990 |
+
value = metric.get("value", "0")
|
| 991 |
+
output.append(f" {start_time} to {end_time}: {value} users")
|
| 992 |
+
output.append("")
|
| 993 |
+
|
| 994 |
+
# Total executions
|
| 995 |
+
total_executions = response.get("totalExecutions", [])
|
| 996 |
+
if total_executions:
|
| 997 |
+
output.append("Total Executions:")
|
| 998 |
+
for metric in total_executions:
|
| 999 |
+
start_time = metric.get("startTime", "Unknown")
|
| 1000 |
+
end_time = metric.get("endTime", "Unknown")
|
| 1001 |
+
value = metric.get("value", "0")
|
| 1002 |
+
output.append(f" {start_time} to {end_time}: {value} executions")
|
| 1003 |
+
output.append("")
|
| 1004 |
+
|
| 1005 |
+
# Failed executions
|
| 1006 |
+
failed_executions = response.get("failedExecutions", [])
|
| 1007 |
+
if failed_executions:
|
| 1008 |
+
output.append("Failed Executions:")
|
| 1009 |
+
for metric in failed_executions:
|
| 1010 |
+
start_time = metric.get("startTime", "Unknown")
|
| 1011 |
+
end_time = metric.get("endTime", "Unknown")
|
| 1012 |
+
value = metric.get("value", "0")
|
| 1013 |
+
output.append(f" {start_time} to {end_time}: {value} failures")
|
| 1014 |
+
output.append("")
|
| 1015 |
+
|
| 1016 |
+
if not active_users and not total_executions and not failed_executions:
|
| 1017 |
+
output.append("No metrics data available for this script.")
|
| 1018 |
+
|
| 1019 |
+
logger.info(f"[get_script_metrics] Retrieved metrics for {script_id}")
|
| 1020 |
+
return "\n".join(output)
|
| 1021 |
+
|
| 1022 |
+
|
| 1023 |
+
@server.tool()
|
| 1024 |
+
@handle_http_errors("get_script_metrics", is_read_only=True, service_type="script")
|
| 1025 |
+
@require_google_service("script", "script_readonly")
|
| 1026 |
+
async def get_script_metrics(
|
| 1027 |
+
service: Any,
|
| 1028 |
+
user_google_email: str,
|
| 1029 |
+
script_id: str,
|
| 1030 |
+
metrics_granularity: str = "DAILY",
|
| 1031 |
+
) -> str:
|
| 1032 |
+
"""
|
| 1033 |
+
Gets execution metrics for a script project.
|
| 1034 |
+
|
| 1035 |
+
Returns analytics data including active users, total executions,
|
| 1036 |
+
and failed executions over time.
|
| 1037 |
+
|
| 1038 |
+
Args:
|
| 1039 |
+
service: Injected Google API service client
|
| 1040 |
+
user_google_email: User's email address
|
| 1041 |
+
script_id: The script project ID
|
| 1042 |
+
metrics_granularity: Granularity of metrics - "DAILY" or "WEEKLY"
|
| 1043 |
+
|
| 1044 |
+
Returns:
|
| 1045 |
+
str: Formatted string with metrics data
|
| 1046 |
+
"""
|
| 1047 |
+
return await _get_script_metrics_impl(
|
| 1048 |
+
service, user_google_email, script_id, metrics_granularity
|
| 1049 |
+
)
|
| 1050 |
+
|
| 1051 |
+
|
| 1052 |
+
# ============================================================================
|
| 1053 |
+
# Trigger Code Generation
|
| 1054 |
+
# ============================================================================
|
| 1055 |
+
|
| 1056 |
+
|
| 1057 |
+
def _generate_trigger_code_impl(
|
| 1058 |
+
trigger_type: str,
|
| 1059 |
+
function_name: str,
|
| 1060 |
+
schedule: str = "",
|
| 1061 |
+
) -> str:
|
| 1062 |
+
"""Internal implementation for generate_trigger_code."""
|
| 1063 |
+
code_lines = []
|
| 1064 |
+
|
| 1065 |
+
if trigger_type == "on_open":
|
| 1066 |
+
code_lines = [
|
| 1067 |
+
"// Simple trigger - just rename your function to 'onOpen'",
|
| 1068 |
+
"// This runs automatically when the document is opened",
|
| 1069 |
+
"function onOpen(e) {",
|
| 1070 |
+
f" {function_name}();",
|
| 1071 |
+
"}",
|
| 1072 |
+
]
|
| 1073 |
+
elif trigger_type == "on_edit":
|
| 1074 |
+
code_lines = [
|
| 1075 |
+
"// Simple trigger - just rename your function to 'onEdit'",
|
| 1076 |
+
"// This runs automatically when a user edits the spreadsheet",
|
| 1077 |
+
"function onEdit(e) {",
|
| 1078 |
+
f" {function_name}();",
|
| 1079 |
+
"}",
|
| 1080 |
+
]
|
| 1081 |
+
elif trigger_type == "time_minutes":
|
| 1082 |
+
interval = schedule or "5"
|
| 1083 |
+
code_lines = [
|
| 1084 |
+
"// Run this function ONCE to install the trigger",
|
| 1085 |
+
f"function createTimeTrigger_{function_name}() {{",
|
| 1086 |
+
" // Delete existing triggers for this function first",
|
| 1087 |
+
" const triggers = ScriptApp.getProjectTriggers();",
|
| 1088 |
+
" triggers.forEach(trigger => {",
|
| 1089 |
+
f" if (trigger.getHandlerFunction() === '{function_name}') {{",
|
| 1090 |
+
" ScriptApp.deleteTrigger(trigger);",
|
| 1091 |
+
" }",
|
| 1092 |
+
" });",
|
| 1093 |
+
"",
|
| 1094 |
+
f" // Create new trigger - runs every {interval} minutes",
|
| 1095 |
+
f" ScriptApp.newTrigger('{function_name}')",
|
| 1096 |
+
" .timeBased()",
|
| 1097 |
+
f" .everyMinutes({interval})",
|
| 1098 |
+
" .create();",
|
| 1099 |
+
"",
|
| 1100 |
+
f" Logger.log('Trigger created: {function_name} will run every {interval} minutes');",
|
| 1101 |
+
"}",
|
| 1102 |
+
]
|
| 1103 |
+
elif trigger_type == "time_hours":
|
| 1104 |
+
interval = schedule or "1"
|
| 1105 |
+
code_lines = [
|
| 1106 |
+
"// Run this function ONCE to install the trigger",
|
| 1107 |
+
f"function createTimeTrigger_{function_name}() {{",
|
| 1108 |
+
" // Delete existing triggers for this function first",
|
| 1109 |
+
" const triggers = ScriptApp.getProjectTriggers();",
|
| 1110 |
+
" triggers.forEach(trigger => {",
|
| 1111 |
+
f" if (trigger.getHandlerFunction() === '{function_name}') {{",
|
| 1112 |
+
" ScriptApp.deleteTrigger(trigger);",
|
| 1113 |
+
" }",
|
| 1114 |
+
" });",
|
| 1115 |
+
"",
|
| 1116 |
+
f" // Create new trigger - runs every {interval} hour(s)",
|
| 1117 |
+
f" ScriptApp.newTrigger('{function_name}')",
|
| 1118 |
+
" .timeBased()",
|
| 1119 |
+
f" .everyHours({interval})",
|
| 1120 |
+
" .create();",
|
| 1121 |
+
"",
|
| 1122 |
+
f" Logger.log('Trigger created: {function_name} will run every {interval} hour(s)');",
|
| 1123 |
+
"}",
|
| 1124 |
+
]
|
| 1125 |
+
elif trigger_type == "time_daily":
|
| 1126 |
+
hour = schedule or "9"
|
| 1127 |
+
code_lines = [
|
| 1128 |
+
"// Run this function ONCE to install the trigger",
|
| 1129 |
+
f"function createDailyTrigger_{function_name}() {{",
|
| 1130 |
+
" // Delete existing triggers for this function first",
|
| 1131 |
+
" const triggers = ScriptApp.getProjectTriggers();",
|
| 1132 |
+
" triggers.forEach(trigger => {",
|
| 1133 |
+
f" if (trigger.getHandlerFunction() === '{function_name}') {{",
|
| 1134 |
+
" ScriptApp.deleteTrigger(trigger);",
|
| 1135 |
+
" }",
|
| 1136 |
+
" });",
|
| 1137 |
+
"",
|
| 1138 |
+
f" // Create new trigger - runs daily at {hour}:00",
|
| 1139 |
+
f" ScriptApp.newTrigger('{function_name}')",
|
| 1140 |
+
" .timeBased()",
|
| 1141 |
+
f" .atHour({hour})",
|
| 1142 |
+
" .everyDays(1)",
|
| 1143 |
+
" .create();",
|
| 1144 |
+
"",
|
| 1145 |
+
f" Logger.log('Trigger created: {function_name} will run daily at {hour}:00');",
|
| 1146 |
+
"}",
|
| 1147 |
+
]
|
| 1148 |
+
elif trigger_type == "time_weekly":
|
| 1149 |
+
day = schedule.upper() if schedule else "MONDAY"
|
| 1150 |
+
code_lines = [
|
| 1151 |
+
"// Run this function ONCE to install the trigger",
|
| 1152 |
+
f"function createWeeklyTrigger_{function_name}() {{",
|
| 1153 |
+
" // Delete existing triggers for this function first",
|
| 1154 |
+
" const triggers = ScriptApp.getProjectTriggers();",
|
| 1155 |
+
" triggers.forEach(trigger => {",
|
| 1156 |
+
f" if (trigger.getHandlerFunction() === '{function_name}') {{",
|
| 1157 |
+
" ScriptApp.deleteTrigger(trigger);",
|
| 1158 |
+
" }",
|
| 1159 |
+
" });",
|
| 1160 |
+
"",
|
| 1161 |
+
f" // Create new trigger - runs weekly on {day}",
|
| 1162 |
+
f" ScriptApp.newTrigger('{function_name}')",
|
| 1163 |
+
" .timeBased()",
|
| 1164 |
+
f" .onWeekDay(ScriptApp.WeekDay.{day})",
|
| 1165 |
+
" .atHour(9)",
|
| 1166 |
+
" .create();",
|
| 1167 |
+
"",
|
| 1168 |
+
f" Logger.log('Trigger created: {function_name} will run every {day} at 9:00');",
|
| 1169 |
+
"}",
|
| 1170 |
+
]
|
| 1171 |
+
elif trigger_type == "on_form_submit":
|
| 1172 |
+
code_lines = [
|
| 1173 |
+
"// Run this function ONCE to install the trigger",
|
| 1174 |
+
"// This must be run from a script BOUND to the Google Form",
|
| 1175 |
+
f"function createFormSubmitTrigger_{function_name}() {{",
|
| 1176 |
+
" // Delete existing triggers for this function first",
|
| 1177 |
+
" const triggers = ScriptApp.getProjectTriggers();",
|
| 1178 |
+
" triggers.forEach(trigger => {",
|
| 1179 |
+
f" if (trigger.getHandlerFunction() === '{function_name}') {{",
|
| 1180 |
+
" ScriptApp.deleteTrigger(trigger);",
|
| 1181 |
+
" }",
|
| 1182 |
+
" });",
|
| 1183 |
+
"",
|
| 1184 |
+
" // Create new trigger - runs when form is submitted",
|
| 1185 |
+
f" ScriptApp.newTrigger('{function_name}')",
|
| 1186 |
+
" .forForm(FormApp.getActiveForm())",
|
| 1187 |
+
" .onFormSubmit()",
|
| 1188 |
+
" .create();",
|
| 1189 |
+
"",
|
| 1190 |
+
f" Logger.log('Trigger created: {function_name} will run on form submit');",
|
| 1191 |
+
"}",
|
| 1192 |
+
]
|
| 1193 |
+
elif trigger_type == "on_change":
|
| 1194 |
+
code_lines = [
|
| 1195 |
+
"// Run this function ONCE to install the trigger",
|
| 1196 |
+
"// This must be run from a script BOUND to a Google Sheet",
|
| 1197 |
+
f"function createChangeTrigger_{function_name}() {{",
|
| 1198 |
+
" // Delete existing triggers for this function first",
|
| 1199 |
+
" const triggers = ScriptApp.getProjectTriggers();",
|
| 1200 |
+
" triggers.forEach(trigger => {",
|
| 1201 |
+
f" if (trigger.getHandlerFunction() === '{function_name}') {{",
|
| 1202 |
+
" ScriptApp.deleteTrigger(trigger);",
|
| 1203 |
+
" }",
|
| 1204 |
+
" });",
|
| 1205 |
+
"",
|
| 1206 |
+
" // Create new trigger - runs when spreadsheet changes",
|
| 1207 |
+
f" ScriptApp.newTrigger('{function_name}')",
|
| 1208 |
+
" .forSpreadsheet(SpreadsheetApp.getActive())",
|
| 1209 |
+
" .onChange()",
|
| 1210 |
+
" .create();",
|
| 1211 |
+
"",
|
| 1212 |
+
f" Logger.log('Trigger created: {function_name} will run on spreadsheet change');",
|
| 1213 |
+
"}",
|
| 1214 |
+
]
|
| 1215 |
+
else:
|
| 1216 |
+
return (
|
| 1217 |
+
f"Unknown trigger type: {trigger_type}\n\n"
|
| 1218 |
+
"Valid types: time_minutes, time_hours, time_daily, time_weekly, "
|
| 1219 |
+
"on_open, on_edit, on_form_submit, on_change"
|
| 1220 |
+
)
|
| 1221 |
+
|
| 1222 |
+
code = "\n".join(code_lines)
|
| 1223 |
+
|
| 1224 |
+
instructions = []
|
| 1225 |
+
if trigger_type.startswith("on_"):
|
| 1226 |
+
if trigger_type in ("on_open", "on_edit"):
|
| 1227 |
+
instructions = [
|
| 1228 |
+
"SIMPLE TRIGGER",
|
| 1229 |
+
"=" * 50,
|
| 1230 |
+
"",
|
| 1231 |
+
"Add this code to your script. Simple triggers run automatically",
|
| 1232 |
+
"when the event occurs - no setup function needed.",
|
| 1233 |
+
"",
|
| 1234 |
+
"Note: Simple triggers have limitations:",
|
| 1235 |
+
"- Cannot access services that require authorization",
|
| 1236 |
+
"- Cannot run longer than 30 seconds",
|
| 1237 |
+
"- Cannot make external HTTP requests",
|
| 1238 |
+
"",
|
| 1239 |
+
"For more capabilities, use an installable trigger instead.",
|
| 1240 |
+
"",
|
| 1241 |
+
"CODE TO ADD:",
|
| 1242 |
+
"-" * 50,
|
| 1243 |
+
]
|
| 1244 |
+
else:
|
| 1245 |
+
instructions = [
|
| 1246 |
+
"INSTALLABLE TRIGGER",
|
| 1247 |
+
"=" * 50,
|
| 1248 |
+
"",
|
| 1249 |
+
"1. Add this code to your script",
|
| 1250 |
+
f"2. Run the setup function once: createFormSubmitTrigger_{function_name}() or similar",
|
| 1251 |
+
"3. The trigger will then run automatically",
|
| 1252 |
+
"",
|
| 1253 |
+
"CODE TO ADD:",
|
| 1254 |
+
"-" * 50,
|
| 1255 |
+
]
|
| 1256 |
+
else:
|
| 1257 |
+
instructions = [
|
| 1258 |
+
"INSTALLABLE TRIGGER",
|
| 1259 |
+
"=" * 50,
|
| 1260 |
+
"",
|
| 1261 |
+
"1. Add this code to your script using update_script_content",
|
| 1262 |
+
"2. Run the setup function ONCE (manually in Apps Script editor or via run_script_function)",
|
| 1263 |
+
"3. The trigger will then run automatically on schedule",
|
| 1264 |
+
"",
|
| 1265 |
+
"To check installed triggers: Apps Script editor > Triggers (clock icon)",
|
| 1266 |
+
"",
|
| 1267 |
+
"CODE TO ADD:",
|
| 1268 |
+
"-" * 50,
|
| 1269 |
+
]
|
| 1270 |
+
|
| 1271 |
+
return "\n".join(instructions) + "\n\n" + code
|
| 1272 |
+
|
| 1273 |
+
|
| 1274 |
+
@server.tool()
|
| 1275 |
+
async def generate_trigger_code(
|
| 1276 |
+
trigger_type: str,
|
| 1277 |
+
function_name: str,
|
| 1278 |
+
schedule: str = "",
|
| 1279 |
+
) -> str:
|
| 1280 |
+
"""
|
| 1281 |
+
Generates Apps Script code for creating triggers.
|
| 1282 |
+
|
| 1283 |
+
The Apps Script API cannot create triggers directly - they must be created
|
| 1284 |
+
from within Apps Script itself. This tool generates the code you need.
|
| 1285 |
+
|
| 1286 |
+
Args:
|
| 1287 |
+
trigger_type: Type of trigger. One of:
|
| 1288 |
+
- "time_minutes" (run every N minutes: 1, 5, 10, 15, 30)
|
| 1289 |
+
- "time_hours" (run every N hours: 1, 2, 4, 6, 8, 12)
|
| 1290 |
+
- "time_daily" (run daily at a specific hour: 0-23)
|
| 1291 |
+
- "time_weekly" (run weekly on a specific day)
|
| 1292 |
+
- "on_open" (simple trigger - runs when document opens)
|
| 1293 |
+
- "on_edit" (simple trigger - runs when user edits)
|
| 1294 |
+
- "on_form_submit" (runs when form is submitted)
|
| 1295 |
+
- "on_change" (runs when content changes)
|
| 1296 |
+
|
| 1297 |
+
function_name: The function to run when trigger fires (e.g., "sendDailyReport")
|
| 1298 |
+
|
| 1299 |
+
schedule: Schedule details (depends on trigger_type):
|
| 1300 |
+
- For time_minutes: "1", "5", "10", "15", or "30"
|
| 1301 |
+
- For time_hours: "1", "2", "4", "6", "8", or "12"
|
| 1302 |
+
- For time_daily: hour as "0"-"23" (e.g., "9" for 9am)
|
| 1303 |
+
- For time_weekly: "MONDAY", "TUESDAY", etc.
|
| 1304 |
+
- For simple triggers (on_open, on_edit): not needed
|
| 1305 |
+
|
| 1306 |
+
Returns:
|
| 1307 |
+
str: Apps Script code to create the trigger
|
| 1308 |
+
"""
|
| 1309 |
+
return _generate_trigger_code_impl(trigger_type, function_name, schedule)
|
gcalendar/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Make the calendar directory a Python package
|
gcalendar/calendar_tools.py
ADDED
|
@@ -0,0 +1,1075 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Google Calendar MCP Tools
|
| 3 |
+
|
| 4 |
+
This module provides MCP tools for interacting with Google Calendar API.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import datetime
|
| 8 |
+
import logging
|
| 9 |
+
import asyncio
|
| 10 |
+
import re
|
| 11 |
+
import uuid
|
| 12 |
+
import json
|
| 13 |
+
from typing import List, Optional, Dict, Any, Union
|
| 14 |
+
|
| 15 |
+
from googleapiclient.errors import HttpError
|
| 16 |
+
from googleapiclient.discovery import build
|
| 17 |
+
|
| 18 |
+
from auth.service_decorator import require_google_service
|
| 19 |
+
from core.utils import handle_http_errors
|
| 20 |
+
|
| 21 |
+
from core.server import server
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
# Configure module logger
|
| 25 |
+
logger = logging.getLogger(__name__)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _parse_reminders_json(
|
| 29 |
+
reminders_input: Optional[Union[str, List[Dict[str, Any]]]], function_name: str
|
| 30 |
+
) -> List[Dict[str, Any]]:
|
| 31 |
+
"""
|
| 32 |
+
Parse reminders from JSON string or list object and validate them.
|
| 33 |
+
|
| 34 |
+
Args:
|
| 35 |
+
reminders_input: JSON string containing reminder objects or list of reminder objects
|
| 36 |
+
function_name: Name of calling function for logging
|
| 37 |
+
|
| 38 |
+
Returns:
|
| 39 |
+
List of validated reminder objects
|
| 40 |
+
"""
|
| 41 |
+
if not reminders_input:
|
| 42 |
+
return []
|
| 43 |
+
|
| 44 |
+
# Handle both string (JSON) and list inputs
|
| 45 |
+
if isinstance(reminders_input, str):
|
| 46 |
+
try:
|
| 47 |
+
reminders = json.loads(reminders_input)
|
| 48 |
+
if not isinstance(reminders, list):
|
| 49 |
+
logger.warning(
|
| 50 |
+
f"[{function_name}] Reminders must be a JSON array, got {type(reminders).__name__}"
|
| 51 |
+
)
|
| 52 |
+
return []
|
| 53 |
+
except json.JSONDecodeError as e:
|
| 54 |
+
logger.warning(f"[{function_name}] Invalid JSON for reminders: {e}")
|
| 55 |
+
return []
|
| 56 |
+
elif isinstance(reminders_input, list):
|
| 57 |
+
reminders = reminders_input
|
| 58 |
+
else:
|
| 59 |
+
logger.warning(
|
| 60 |
+
f"[{function_name}] Reminders must be a JSON string or list, got {type(reminders_input).__name__}"
|
| 61 |
+
)
|
| 62 |
+
return []
|
| 63 |
+
|
| 64 |
+
# Validate reminders
|
| 65 |
+
if len(reminders) > 5:
|
| 66 |
+
logger.warning(
|
| 67 |
+
f"[{function_name}] More than 5 reminders provided, truncating to first 5"
|
| 68 |
+
)
|
| 69 |
+
reminders = reminders[:5]
|
| 70 |
+
|
| 71 |
+
validated_reminders = []
|
| 72 |
+
for reminder in reminders:
|
| 73 |
+
if (
|
| 74 |
+
not isinstance(reminder, dict)
|
| 75 |
+
or "method" not in reminder
|
| 76 |
+
or "minutes" not in reminder
|
| 77 |
+
):
|
| 78 |
+
logger.warning(
|
| 79 |
+
f"[{function_name}] Invalid reminder format: {reminder}, skipping"
|
| 80 |
+
)
|
| 81 |
+
continue
|
| 82 |
+
|
| 83 |
+
method = reminder["method"].lower()
|
| 84 |
+
if method not in ["popup", "email"]:
|
| 85 |
+
logger.warning(
|
| 86 |
+
f"[{function_name}] Invalid reminder method '{method}', must be 'popup' or 'email', skipping"
|
| 87 |
+
)
|
| 88 |
+
continue
|
| 89 |
+
|
| 90 |
+
minutes = reminder["minutes"]
|
| 91 |
+
if not isinstance(minutes, int) or minutes < 0 or minutes > 40320:
|
| 92 |
+
logger.warning(
|
| 93 |
+
f"[{function_name}] Invalid reminder minutes '{minutes}', must be integer 0-40320, skipping"
|
| 94 |
+
)
|
| 95 |
+
continue
|
| 96 |
+
|
| 97 |
+
validated_reminders.append({"method": method, "minutes": minutes})
|
| 98 |
+
|
| 99 |
+
return validated_reminders
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _apply_transparency_if_valid(
|
| 103 |
+
event_body: Dict[str, Any],
|
| 104 |
+
transparency: Optional[str],
|
| 105 |
+
function_name: str,
|
| 106 |
+
) -> None:
|
| 107 |
+
"""
|
| 108 |
+
Apply transparency to the event body if the provided value is valid.
|
| 109 |
+
|
| 110 |
+
Args:
|
| 111 |
+
event_body: Event payload being constructed.
|
| 112 |
+
transparency: Provided transparency value.
|
| 113 |
+
function_name: Name of the calling function for logging context.
|
| 114 |
+
"""
|
| 115 |
+
if transparency is None:
|
| 116 |
+
return
|
| 117 |
+
|
| 118 |
+
valid_transparency_values = ["opaque", "transparent"]
|
| 119 |
+
if transparency in valid_transparency_values:
|
| 120 |
+
event_body["transparency"] = transparency
|
| 121 |
+
logger.info(f"[{function_name}] Set transparency to '{transparency}'")
|
| 122 |
+
else:
|
| 123 |
+
logger.warning(
|
| 124 |
+
f"[{function_name}] Invalid transparency value '{transparency}', must be 'opaque' or 'transparent', skipping"
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _apply_visibility_if_valid(
|
| 129 |
+
event_body: Dict[str, Any],
|
| 130 |
+
visibility: Optional[str],
|
| 131 |
+
function_name: str,
|
| 132 |
+
) -> None:
|
| 133 |
+
"""
|
| 134 |
+
Apply visibility to the event body if the provided value is valid.
|
| 135 |
+
|
| 136 |
+
Args:
|
| 137 |
+
event_body: Event payload being constructed.
|
| 138 |
+
visibility: Provided visibility value.
|
| 139 |
+
function_name: Name of the calling function for logging context.
|
| 140 |
+
"""
|
| 141 |
+
if visibility is None:
|
| 142 |
+
return
|
| 143 |
+
|
| 144 |
+
valid_visibility_values = ["default", "public", "private", "confidential"]
|
| 145 |
+
if visibility in valid_visibility_values:
|
| 146 |
+
event_body["visibility"] = visibility
|
| 147 |
+
logger.info(f"[{function_name}] Set visibility to '{visibility}'")
|
| 148 |
+
else:
|
| 149 |
+
logger.warning(
|
| 150 |
+
f"[{function_name}] Invalid visibility value '{visibility}', must be 'default', 'public', 'private', or 'confidential', skipping"
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _preserve_existing_fields(
|
| 155 |
+
event_body: Dict[str, Any],
|
| 156 |
+
existing_event: Dict[str, Any],
|
| 157 |
+
field_mappings: Dict[str, Any],
|
| 158 |
+
) -> None:
|
| 159 |
+
"""
|
| 160 |
+
Helper function to preserve existing event fields when not explicitly provided.
|
| 161 |
+
|
| 162 |
+
Args:
|
| 163 |
+
event_body: The event body being built for the API call
|
| 164 |
+
existing_event: The existing event data from the API
|
| 165 |
+
field_mappings: Dict mapping field names to their new values (None means preserve existing)
|
| 166 |
+
"""
|
| 167 |
+
for field_name, new_value in field_mappings.items():
|
| 168 |
+
if new_value is None and field_name in existing_event:
|
| 169 |
+
event_body[field_name] = existing_event[field_name]
|
| 170 |
+
logger.info(f"[modify_event] Preserving existing {field_name}")
|
| 171 |
+
elif new_value is not None:
|
| 172 |
+
event_body[field_name] = new_value
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def _format_attendee_details(
|
| 176 |
+
attendees: List[Dict[str, Any]], indent: str = " "
|
| 177 |
+
) -> str:
|
| 178 |
+
"""
|
| 179 |
+
Format attendee details including response status, organizer, and optional flags.
|
| 180 |
+
|
| 181 |
+
Example output format:
|
| 182 |
+
" user@example.com: accepted
|
| 183 |
+
manager@example.com: declined (organizer)
|
| 184 |
+
optional-person@example.com: tentative (optional)"
|
| 185 |
+
|
| 186 |
+
Args:
|
| 187 |
+
attendees: List of attendee dictionaries from Google Calendar API
|
| 188 |
+
indent: Indentation to use for newline-separated attendees (default: " ")
|
| 189 |
+
|
| 190 |
+
Returns:
|
| 191 |
+
Formatted string with attendee details, or "None" if no attendees
|
| 192 |
+
"""
|
| 193 |
+
if not attendees:
|
| 194 |
+
return "None"
|
| 195 |
+
|
| 196 |
+
attendee_details_list = []
|
| 197 |
+
for a in attendees:
|
| 198 |
+
email = a.get("email", "unknown")
|
| 199 |
+
response_status = a.get("responseStatus", "unknown")
|
| 200 |
+
optional = a.get("optional", False)
|
| 201 |
+
organizer = a.get("organizer", False)
|
| 202 |
+
|
| 203 |
+
detail_parts = [f"{email}: {response_status}"]
|
| 204 |
+
if organizer:
|
| 205 |
+
detail_parts.append("(organizer)")
|
| 206 |
+
if optional:
|
| 207 |
+
detail_parts.append("(optional)")
|
| 208 |
+
|
| 209 |
+
attendee_details_list.append(" ".join(detail_parts))
|
| 210 |
+
|
| 211 |
+
return f"\n{indent}".join(attendee_details_list)
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def _format_attachment_details(
|
| 215 |
+
attachments: List[Dict[str, Any]], indent: str = " "
|
| 216 |
+
) -> str:
|
| 217 |
+
"""
|
| 218 |
+
Format attachment details including file information.
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
Args:
|
| 222 |
+
attachments: List of attachment dictionaries from Google Calendar API
|
| 223 |
+
indent: Indentation to use for newline-separated attachments (default: " ")
|
| 224 |
+
|
| 225 |
+
Returns:
|
| 226 |
+
Formatted string with attachment details, or "None" if no attachments
|
| 227 |
+
"""
|
| 228 |
+
if not attachments:
|
| 229 |
+
return "None"
|
| 230 |
+
|
| 231 |
+
attachment_details_list = []
|
| 232 |
+
for att in attachments:
|
| 233 |
+
title = att.get("title", "Untitled")
|
| 234 |
+
file_url = att.get("fileUrl", "No URL")
|
| 235 |
+
file_id = att.get("fileId", "No ID")
|
| 236 |
+
mime_type = att.get("mimeType", "Unknown")
|
| 237 |
+
|
| 238 |
+
attachment_info = (
|
| 239 |
+
f"{title}\n"
|
| 240 |
+
f"{indent}File URL: {file_url}\n"
|
| 241 |
+
f"{indent}File ID: {file_id}\n"
|
| 242 |
+
f"{indent}MIME Type: {mime_type}"
|
| 243 |
+
)
|
| 244 |
+
attachment_details_list.append(attachment_info)
|
| 245 |
+
|
| 246 |
+
return f"\n{indent}".join(attachment_details_list)
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
# Helper function to ensure time strings for API calls are correctly formatted
|
| 250 |
+
def _correct_time_format_for_api(
|
| 251 |
+
time_str: Optional[str], param_name: str
|
| 252 |
+
) -> Optional[str]:
|
| 253 |
+
if not time_str:
|
| 254 |
+
return None
|
| 255 |
+
|
| 256 |
+
logger.info(
|
| 257 |
+
f"_correct_time_format_for_api: Processing {param_name} with value '{time_str}'"
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
# Handle date-only format (YYYY-MM-DD)
|
| 261 |
+
if len(time_str) == 10 and time_str.count("-") == 2:
|
| 262 |
+
try:
|
| 263 |
+
# Validate it's a proper date
|
| 264 |
+
datetime.datetime.strptime(time_str, "%Y-%m-%d")
|
| 265 |
+
# For date-only, append T00:00:00Z to make it RFC3339 compliant
|
| 266 |
+
formatted = f"{time_str}T00:00:00Z"
|
| 267 |
+
logger.info(
|
| 268 |
+
f"Formatting date-only {param_name} '{time_str}' to RFC3339: '{formatted}'"
|
| 269 |
+
)
|
| 270 |
+
return formatted
|
| 271 |
+
except ValueError:
|
| 272 |
+
logger.warning(
|
| 273 |
+
f"{param_name} '{time_str}' looks like a date but is not valid YYYY-MM-DD. Using as is."
|
| 274 |
+
)
|
| 275 |
+
return time_str
|
| 276 |
+
|
| 277 |
+
# Specifically address YYYY-MM-DDTHH:MM:SS by appending 'Z'
|
| 278 |
+
if (
|
| 279 |
+
len(time_str) == 19
|
| 280 |
+
and time_str[10] == "T"
|
| 281 |
+
and time_str.count(":") == 2
|
| 282 |
+
and not (
|
| 283 |
+
time_str.endswith("Z") or ("+" in time_str[10:]) or ("-" in time_str[10:])
|
| 284 |
+
)
|
| 285 |
+
):
|
| 286 |
+
try:
|
| 287 |
+
# Validate the format before appending 'Z'
|
| 288 |
+
datetime.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S")
|
| 289 |
+
logger.info(
|
| 290 |
+
f"Formatting {param_name} '{time_str}' by appending 'Z' for UTC."
|
| 291 |
+
)
|
| 292 |
+
return time_str + "Z"
|
| 293 |
+
except ValueError:
|
| 294 |
+
logger.warning(
|
| 295 |
+
f"{param_name} '{time_str}' looks like it needs 'Z' but is not valid YYYY-MM-DDTHH:MM:SS. Using as is."
|
| 296 |
+
)
|
| 297 |
+
return time_str
|
| 298 |
+
|
| 299 |
+
# If it already has timezone info or doesn't match our patterns, return as is
|
| 300 |
+
logger.info(f"{param_name} '{time_str}' doesn't need formatting, using as is.")
|
| 301 |
+
return time_str
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
@server.tool()
|
| 305 |
+
@handle_http_errors("list_calendars", is_read_only=True, service_type="calendar")
|
| 306 |
+
@require_google_service("calendar", "calendar_read")
|
| 307 |
+
async def list_calendars(service, user_google_email: str) -> str:
|
| 308 |
+
"""
|
| 309 |
+
Retrieves a list of calendars accessible to the authenticated user.
|
| 310 |
+
|
| 311 |
+
Args:
|
| 312 |
+
user_google_email (str): The user's Google email address. Required.
|
| 313 |
+
|
| 314 |
+
Returns:
|
| 315 |
+
str: A formatted list of the user's calendars (summary, ID, primary status).
|
| 316 |
+
"""
|
| 317 |
+
logger.info(f"[list_calendars] Invoked. Email: '{user_google_email}'")
|
| 318 |
+
|
| 319 |
+
calendar_list_response = await asyncio.to_thread(
|
| 320 |
+
lambda: service.calendarList().list().execute()
|
| 321 |
+
)
|
| 322 |
+
items = calendar_list_response.get("items", [])
|
| 323 |
+
if not items:
|
| 324 |
+
return f"No calendars found for {user_google_email}."
|
| 325 |
+
|
| 326 |
+
calendars_summary_list = [
|
| 327 |
+
f'- "{cal.get("summary", "No Summary")}"{" (Primary)" if cal.get("primary") else ""} (ID: {cal["id"]})'
|
| 328 |
+
for cal in items
|
| 329 |
+
]
|
| 330 |
+
text_output = (
|
| 331 |
+
f"Successfully listed {len(items)} calendars for {user_google_email}:\n"
|
| 332 |
+
+ "\n".join(calendars_summary_list)
|
| 333 |
+
)
|
| 334 |
+
logger.info(f"Successfully listed {len(items)} calendars for {user_google_email}.")
|
| 335 |
+
return text_output
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
@server.tool()
|
| 339 |
+
@handle_http_errors("get_events", is_read_only=True, service_type="calendar")
|
| 340 |
+
@require_google_service("calendar", "calendar_read")
|
| 341 |
+
async def get_events(
|
| 342 |
+
service,
|
| 343 |
+
user_google_email: str,
|
| 344 |
+
calendar_id: str = "primary",
|
| 345 |
+
event_id: Optional[str] = None,
|
| 346 |
+
time_min: Optional[str] = None,
|
| 347 |
+
time_max: Optional[str] = None,
|
| 348 |
+
max_results: int = 25,
|
| 349 |
+
query: Optional[str] = None,
|
| 350 |
+
detailed: bool = False,
|
| 351 |
+
include_attachments: bool = False,
|
| 352 |
+
) -> str:
|
| 353 |
+
"""
|
| 354 |
+
Retrieves events from a specified Google Calendar. Can retrieve a single event by ID or multiple events within a time range.
|
| 355 |
+
You can also search for events by keyword by supplying the optional "query" param.
|
| 356 |
+
|
| 357 |
+
Args:
|
| 358 |
+
user_google_email (str): The user's Google email address. Required.
|
| 359 |
+
calendar_id (str): The ID of the calendar to query. Use 'primary' for the user's primary calendar. Defaults to 'primary'. Calendar IDs can be obtained using `list_calendars`.
|
| 360 |
+
event_id (Optional[str]): The ID of a specific event to retrieve. If provided, retrieves only this event and ignores time filtering parameters.
|
| 361 |
+
time_min (Optional[str]): The start of the time range (inclusive) in RFC3339 format (e.g., '2024-05-12T10:00:00Z' or '2024-05-12'). If omitted, defaults to the current time. Ignored if event_id is provided.
|
| 362 |
+
time_max (Optional[str]): The end of the time range (exclusive) in RFC3339 format. If omitted, events starting from `time_min` onwards are considered (up to `max_results`). Ignored if event_id is provided.
|
| 363 |
+
max_results (int): The maximum number of events to return. Defaults to 25. Ignored if event_id is provided.
|
| 364 |
+
query (Optional[str]): A keyword to search for within event fields (summary, description, location). Ignored if event_id is provided.
|
| 365 |
+
detailed (bool): Whether to return detailed event information including description, location, attendees, and attendee details (response status, organizer, optional flags). Defaults to False.
|
| 366 |
+
include_attachments (bool): Whether to include attachment information in detailed event output. When True, shows attachment details (fileId, fileUrl, mimeType, title) for events that have attachments. Only applies when detailed=True. Set this to True when you need to view or access files that have been attached to calendar events, such as meeting documents, presentations, or other shared files. Defaults to False.
|
| 367 |
+
|
| 368 |
+
Returns:
|
| 369 |
+
str: A formatted list of events (summary, start and end times, link) within the specified range, or detailed information for a single event if event_id is provided.
|
| 370 |
+
"""
|
| 371 |
+
logger.info(
|
| 372 |
+
f"[get_events] Raw parameters - event_id: '{event_id}', time_min: '{time_min}', time_max: '{time_max}', query: '{query}', detailed: {detailed}, include_attachments: {include_attachments}"
|
| 373 |
+
)
|
| 374 |
+
|
| 375 |
+
# Handle single event retrieval
|
| 376 |
+
if event_id:
|
| 377 |
+
logger.info(f"[get_events] Retrieving single event with ID: {event_id}")
|
| 378 |
+
event = await asyncio.to_thread(
|
| 379 |
+
lambda: service.events()
|
| 380 |
+
.get(calendarId=calendar_id, eventId=event_id)
|
| 381 |
+
.execute()
|
| 382 |
+
)
|
| 383 |
+
items = [event]
|
| 384 |
+
else:
|
| 385 |
+
# Handle multiple events retrieval with time filtering
|
| 386 |
+
# Ensure time_min and time_max are correctly formatted for the API
|
| 387 |
+
formatted_time_min = _correct_time_format_for_api(time_min, "time_min")
|
| 388 |
+
if formatted_time_min:
|
| 389 |
+
effective_time_min = formatted_time_min
|
| 390 |
+
else:
|
| 391 |
+
utc_now = datetime.datetime.now(datetime.timezone.utc)
|
| 392 |
+
effective_time_min = utc_now.isoformat().replace("+00:00", "Z")
|
| 393 |
+
if time_min is None:
|
| 394 |
+
logger.info(
|
| 395 |
+
f"time_min not provided, defaulting to current UTC time: {effective_time_min}"
|
| 396 |
+
)
|
| 397 |
+
else:
|
| 398 |
+
logger.info(
|
| 399 |
+
f"time_min processing: original='{time_min}', formatted='{formatted_time_min}', effective='{effective_time_min}'"
|
| 400 |
+
)
|
| 401 |
+
|
| 402 |
+
effective_time_max = _correct_time_format_for_api(time_max, "time_max")
|
| 403 |
+
if time_max:
|
| 404 |
+
logger.info(
|
| 405 |
+
f"time_max processing: original='{time_max}', formatted='{effective_time_max}'"
|
| 406 |
+
)
|
| 407 |
+
|
| 408 |
+
logger.info(
|
| 409 |
+
f"[get_events] Final API parameters - calendarId: '{calendar_id}', timeMin: '{effective_time_min}', timeMax: '{effective_time_max}', maxResults: {max_results}, query: '{query}'"
|
| 410 |
+
)
|
| 411 |
+
|
| 412 |
+
# Build the request parameters dynamically
|
| 413 |
+
request_params = {
|
| 414 |
+
"calendarId": calendar_id,
|
| 415 |
+
"timeMin": effective_time_min,
|
| 416 |
+
"timeMax": effective_time_max,
|
| 417 |
+
"maxResults": max_results,
|
| 418 |
+
"singleEvents": True,
|
| 419 |
+
"orderBy": "startTime",
|
| 420 |
+
}
|
| 421 |
+
|
| 422 |
+
if query:
|
| 423 |
+
request_params["q"] = query
|
| 424 |
+
|
| 425 |
+
events_result = await asyncio.to_thread(
|
| 426 |
+
lambda: service.events().list(**request_params).execute()
|
| 427 |
+
)
|
| 428 |
+
items = events_result.get("items", [])
|
| 429 |
+
if not items:
|
| 430 |
+
if event_id:
|
| 431 |
+
return f"Event with ID '{event_id}' not found in calendar '{calendar_id}' for {user_google_email}."
|
| 432 |
+
else:
|
| 433 |
+
return f"No events found in calendar '{calendar_id}' for {user_google_email} for the specified time range."
|
| 434 |
+
|
| 435 |
+
# Handle returning detailed output for a single event when requested
|
| 436 |
+
if event_id and detailed:
|
| 437 |
+
item = items[0]
|
| 438 |
+
summary = item.get("summary", "No Title")
|
| 439 |
+
start = item["start"].get("dateTime", item["start"].get("date"))
|
| 440 |
+
end = item["end"].get("dateTime", item["end"].get("date"))
|
| 441 |
+
link = item.get("htmlLink", "No Link")
|
| 442 |
+
description = item.get("description", "No Description")
|
| 443 |
+
location = item.get("location", "No Location")
|
| 444 |
+
color_id = item.get("colorId", "None")
|
| 445 |
+
attendees = item.get("attendees", [])
|
| 446 |
+
attendee_emails = (
|
| 447 |
+
", ".join([a.get("email", "") for a in attendees]) if attendees else "None"
|
| 448 |
+
)
|
| 449 |
+
attendee_details_str = _format_attendee_details(attendees, indent=" ")
|
| 450 |
+
|
| 451 |
+
event_details = (
|
| 452 |
+
f"Event Details:\n"
|
| 453 |
+
f"- Title: {summary}\n"
|
| 454 |
+
f"- Starts: {start}\n"
|
| 455 |
+
f"- Ends: {end}\n"
|
| 456 |
+
f"- Description: {description}\n"
|
| 457 |
+
f"- Location: {location}\n"
|
| 458 |
+
f"- Color ID: {color_id}\n"
|
| 459 |
+
f"- Attendees: {attendee_emails}\n"
|
| 460 |
+
f"- Attendee Details: {attendee_details_str}\n"
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
if include_attachments:
|
| 464 |
+
attachments = item.get("attachments", [])
|
| 465 |
+
attachment_details_str = _format_attachment_details(
|
| 466 |
+
attachments, indent=" "
|
| 467 |
+
)
|
| 468 |
+
event_details += f"- Attachments: {attachment_details_str}\n"
|
| 469 |
+
|
| 470 |
+
event_details += f"- Event ID: {event_id}\n- Link: {link}"
|
| 471 |
+
logger.info(
|
| 472 |
+
f"[get_events] Successfully retrieved detailed event {event_id} for {user_google_email}."
|
| 473 |
+
)
|
| 474 |
+
return event_details
|
| 475 |
+
|
| 476 |
+
# Handle multiple events or single event with basic output
|
| 477 |
+
event_details_list = []
|
| 478 |
+
for item in items:
|
| 479 |
+
summary = item.get("summary", "No Title")
|
| 480 |
+
start_time = item["start"].get("dateTime", item["start"].get("date"))
|
| 481 |
+
end_time = item["end"].get("dateTime", item["end"].get("date"))
|
| 482 |
+
link = item.get("htmlLink", "No Link")
|
| 483 |
+
item_event_id = item.get("id", "No ID")
|
| 484 |
+
|
| 485 |
+
if detailed:
|
| 486 |
+
# Add detailed information for multiple events
|
| 487 |
+
description = item.get("description", "No Description")
|
| 488 |
+
location = item.get("location", "No Location")
|
| 489 |
+
attendees = item.get("attendees", [])
|
| 490 |
+
attendee_emails = (
|
| 491 |
+
", ".join([a.get("email", "") for a in attendees])
|
| 492 |
+
if attendees
|
| 493 |
+
else "None"
|
| 494 |
+
)
|
| 495 |
+
attendee_details_str = _format_attendee_details(attendees, indent=" ")
|
| 496 |
+
|
| 497 |
+
event_detail_parts = (
|
| 498 |
+
f'- "{summary}" (Starts: {start_time}, Ends: {end_time})\n'
|
| 499 |
+
f" Description: {description}\n"
|
| 500 |
+
f" Location: {location}\n"
|
| 501 |
+
f" Attendees: {attendee_emails}\n"
|
| 502 |
+
f" Attendee Details: {attendee_details_str}\n"
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
if include_attachments:
|
| 506 |
+
attachments = item.get("attachments", [])
|
| 507 |
+
attachment_details_str = _format_attachment_details(
|
| 508 |
+
attachments, indent=" "
|
| 509 |
+
)
|
| 510 |
+
event_detail_parts += f" Attachments: {attachment_details_str}\n"
|
| 511 |
+
|
| 512 |
+
event_detail_parts += f" ID: {item_event_id} | Link: {link}"
|
| 513 |
+
event_details_list.append(event_detail_parts)
|
| 514 |
+
else:
|
| 515 |
+
# Basic output format
|
| 516 |
+
event_details_list.append(
|
| 517 |
+
f'- "{summary}" (Starts: {start_time}, Ends: {end_time}) ID: {item_event_id} | Link: {link}'
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
if event_id:
|
| 521 |
+
# Single event basic output
|
| 522 |
+
text_output = (
|
| 523 |
+
f"Successfully retrieved event from calendar '{calendar_id}' for {user_google_email}:\n"
|
| 524 |
+
+ "\n".join(event_details_list)
|
| 525 |
+
)
|
| 526 |
+
else:
|
| 527 |
+
# Multiple events output
|
| 528 |
+
text_output = (
|
| 529 |
+
f"Successfully retrieved {len(items)} events from calendar '{calendar_id}' for {user_google_email}:\n"
|
| 530 |
+
+ "\n".join(event_details_list)
|
| 531 |
+
)
|
| 532 |
+
|
| 533 |
+
logger.info(f"Successfully retrieved {len(items)} events for {user_google_email}.")
|
| 534 |
+
return text_output
|
| 535 |
+
|
| 536 |
+
|
| 537 |
+
@server.tool()
|
| 538 |
+
@handle_http_errors("create_event", service_type="calendar")
|
| 539 |
+
@require_google_service("calendar", "calendar_events")
|
| 540 |
+
async def create_event(
|
| 541 |
+
service,
|
| 542 |
+
user_google_email: str,
|
| 543 |
+
summary: str,
|
| 544 |
+
start_time: str,
|
| 545 |
+
end_time: str,
|
| 546 |
+
calendar_id: str = "primary",
|
| 547 |
+
description: Optional[str] = None,
|
| 548 |
+
location: Optional[str] = None,
|
| 549 |
+
attendees: Optional[List[str]] = None,
|
| 550 |
+
timezone: Optional[str] = None,
|
| 551 |
+
attachments: Optional[List[str]] = None,
|
| 552 |
+
add_google_meet: bool = False,
|
| 553 |
+
reminders: Optional[Union[str, List[Dict[str, Any]]]] = None,
|
| 554 |
+
use_default_reminders: bool = True,
|
| 555 |
+
transparency: Optional[str] = None,
|
| 556 |
+
visibility: Optional[str] = None,
|
| 557 |
+
) -> str:
|
| 558 |
+
"""
|
| 559 |
+
Creates a new event.
|
| 560 |
+
|
| 561 |
+
Args:
|
| 562 |
+
user_google_email (str): The user's Google email address. Required.
|
| 563 |
+
summary (str): Event title.
|
| 564 |
+
start_time (str): Start time (RFC3339, e.g., "2023-10-27T10:00:00-07:00" or "2023-10-27" for all-day).
|
| 565 |
+
end_time (str): End time (RFC3339, e.g., "2023-10-27T11:00:00-07:00" or "2023-10-28" for all-day).
|
| 566 |
+
calendar_id (str): Calendar ID (default: 'primary').
|
| 567 |
+
description (Optional[str]): Event description.
|
| 568 |
+
location (Optional[str]): Event location.
|
| 569 |
+
attendees (Optional[List[str]]): Attendee email addresses.
|
| 570 |
+
timezone (Optional[str]): Timezone (e.g., "America/New_York").
|
| 571 |
+
attachments (Optional[List[str]]): List of Google Drive file URLs or IDs to attach to the event.
|
| 572 |
+
add_google_meet (bool): Whether to add a Google Meet video conference to the event. Defaults to False.
|
| 573 |
+
reminders (Optional[Union[str, List[Dict[str, Any]]]]): JSON string or list of reminder objects. Each should have 'method' ("popup" or "email") and 'minutes' (0-40320). Max 5 reminders. Example: '[{"method": "popup", "minutes": 15}]' or [{"method": "popup", "minutes": 15}]
|
| 574 |
+
use_default_reminders (bool): Whether to use calendar's default reminders. If False, uses custom reminders. Defaults to True.
|
| 575 |
+
transparency (Optional[str]): Event transparency for busy/free status. "opaque" shows as Busy (default), "transparent" shows as Available/Free. Defaults to None (uses Google Calendar default).
|
| 576 |
+
visibility (Optional[str]): Event visibility. "default" uses calendar default, "public" is visible to all, "private" is visible only to attendees, "confidential" is same as private (legacy). Defaults to None (uses Google Calendar default).
|
| 577 |
+
|
| 578 |
+
Returns:
|
| 579 |
+
str: Confirmation message of the successful event creation with event link.
|
| 580 |
+
"""
|
| 581 |
+
logger.info(
|
| 582 |
+
f"[create_event] Invoked. Email: '{user_google_email}', Summary: {summary}"
|
| 583 |
+
)
|
| 584 |
+
logger.info(f"[create_event] Incoming attachments param: {attachments}")
|
| 585 |
+
# If attachments value is a string, split by comma and strip whitespace
|
| 586 |
+
if attachments and isinstance(attachments, str):
|
| 587 |
+
attachments = [a.strip() for a in attachments.split(",") if a.strip()]
|
| 588 |
+
logger.info(
|
| 589 |
+
f"[create_event] Parsed attachments list from string: {attachments}"
|
| 590 |
+
)
|
| 591 |
+
event_body: Dict[str, Any] = {
|
| 592 |
+
"summary": summary,
|
| 593 |
+
"start": (
|
| 594 |
+
{"date": start_time} if "T" not in start_time else {"dateTime": start_time}
|
| 595 |
+
),
|
| 596 |
+
"end": ({"date": end_time} if "T" not in end_time else {"dateTime": end_time}),
|
| 597 |
+
}
|
| 598 |
+
if location:
|
| 599 |
+
event_body["location"] = location
|
| 600 |
+
if description:
|
| 601 |
+
event_body["description"] = description
|
| 602 |
+
if timezone:
|
| 603 |
+
if "dateTime" in event_body["start"]:
|
| 604 |
+
event_body["start"]["timeZone"] = timezone
|
| 605 |
+
if "dateTime" in event_body["end"]:
|
| 606 |
+
event_body["end"]["timeZone"] = timezone
|
| 607 |
+
if attendees:
|
| 608 |
+
event_body["attendees"] = [{"email": email} for email in attendees]
|
| 609 |
+
|
| 610 |
+
# Handle reminders
|
| 611 |
+
if reminders is not None or not use_default_reminders:
|
| 612 |
+
# If custom reminders are provided, automatically disable default reminders
|
| 613 |
+
effective_use_default = use_default_reminders and reminders is None
|
| 614 |
+
|
| 615 |
+
reminder_data = {"useDefault": effective_use_default}
|
| 616 |
+
if reminders is not None:
|
| 617 |
+
validated_reminders = _parse_reminders_json(reminders, "create_event")
|
| 618 |
+
if validated_reminders:
|
| 619 |
+
reminder_data["overrides"] = validated_reminders
|
| 620 |
+
logger.info(
|
| 621 |
+
f"[create_event] Added {len(validated_reminders)} custom reminders"
|
| 622 |
+
)
|
| 623 |
+
if use_default_reminders:
|
| 624 |
+
logger.info(
|
| 625 |
+
"[create_event] Custom reminders provided - disabling default reminders"
|
| 626 |
+
)
|
| 627 |
+
|
| 628 |
+
event_body["reminders"] = reminder_data
|
| 629 |
+
|
| 630 |
+
# Handle transparency validation
|
| 631 |
+
_apply_transparency_if_valid(event_body, transparency, "create_event")
|
| 632 |
+
|
| 633 |
+
# Handle visibility validation
|
| 634 |
+
_apply_visibility_if_valid(event_body, visibility, "create_event")
|
| 635 |
+
|
| 636 |
+
if add_google_meet:
|
| 637 |
+
request_id = str(uuid.uuid4())
|
| 638 |
+
event_body["conferenceData"] = {
|
| 639 |
+
"createRequest": {
|
| 640 |
+
"requestId": request_id,
|
| 641 |
+
"conferenceSolutionKey": {"type": "hangoutsMeet"},
|
| 642 |
+
}
|
| 643 |
+
}
|
| 644 |
+
logger.info(
|
| 645 |
+
f"[create_event] Adding Google Meet conference with request ID: {request_id}"
|
| 646 |
+
)
|
| 647 |
+
|
| 648 |
+
if attachments:
|
| 649 |
+
# Accept both file URLs and file IDs. If a URL, extract the fileId.
|
| 650 |
+
event_body["attachments"] = []
|
| 651 |
+
drive_service = None
|
| 652 |
+
try:
|
| 653 |
+
drive_service = service._http and build("drive", "v3", http=service._http)
|
| 654 |
+
except Exception as e:
|
| 655 |
+
logger.warning(f"Could not build Drive service for MIME type lookup: {e}")
|
| 656 |
+
for att in attachments:
|
| 657 |
+
file_id = None
|
| 658 |
+
if att.startswith("https://"):
|
| 659 |
+
# Match /d/<id>, /file/d/<id>, ?id=<id>
|
| 660 |
+
match = re.search(r"(?:/d/|/file/d/|id=)([\w-]+)", att)
|
| 661 |
+
file_id = match.group(1) if match else None
|
| 662 |
+
logger.info(
|
| 663 |
+
f"[create_event] Extracted file_id '{file_id}' from attachment URL '{att}'"
|
| 664 |
+
)
|
| 665 |
+
else:
|
| 666 |
+
file_id = att
|
| 667 |
+
logger.info(
|
| 668 |
+
f"[create_event] Using direct file_id '{file_id}' for attachment"
|
| 669 |
+
)
|
| 670 |
+
if file_id:
|
| 671 |
+
file_url = f"https://drive.google.com/open?id={file_id}"
|
| 672 |
+
mime_type = "application/vnd.google-apps.drive-sdk"
|
| 673 |
+
title = "Drive Attachment"
|
| 674 |
+
# Try to get the actual MIME type and filename from Drive
|
| 675 |
+
if drive_service:
|
| 676 |
+
try:
|
| 677 |
+
file_metadata = await asyncio.to_thread(
|
| 678 |
+
lambda: drive_service.files()
|
| 679 |
+
.get(
|
| 680 |
+
fileId=file_id,
|
| 681 |
+
fields="mimeType,name",
|
| 682 |
+
supportsAllDrives=True,
|
| 683 |
+
)
|
| 684 |
+
.execute()
|
| 685 |
+
)
|
| 686 |
+
mime_type = file_metadata.get("mimeType", mime_type)
|
| 687 |
+
filename = file_metadata.get("name")
|
| 688 |
+
if filename:
|
| 689 |
+
title = filename
|
| 690 |
+
logger.info(
|
| 691 |
+
f"[create_event] Using filename '{filename}' as attachment title"
|
| 692 |
+
)
|
| 693 |
+
else:
|
| 694 |
+
logger.info(
|
| 695 |
+
"[create_event] No filename found, using generic title"
|
| 696 |
+
)
|
| 697 |
+
except Exception as e:
|
| 698 |
+
logger.warning(
|
| 699 |
+
f"Could not fetch metadata for file {file_id}: {e}"
|
| 700 |
+
)
|
| 701 |
+
event_body["attachments"].append(
|
| 702 |
+
{
|
| 703 |
+
"fileUrl": file_url,
|
| 704 |
+
"title": title,
|
| 705 |
+
"mimeType": mime_type,
|
| 706 |
+
}
|
| 707 |
+
)
|
| 708 |
+
created_event = await asyncio.to_thread(
|
| 709 |
+
lambda: service.events()
|
| 710 |
+
.insert(
|
| 711 |
+
calendarId=calendar_id,
|
| 712 |
+
body=event_body,
|
| 713 |
+
supportsAttachments=True,
|
| 714 |
+
conferenceDataVersion=1 if add_google_meet else 0,
|
| 715 |
+
)
|
| 716 |
+
.execute()
|
| 717 |
+
)
|
| 718 |
+
else:
|
| 719 |
+
created_event = await asyncio.to_thread(
|
| 720 |
+
lambda: service.events()
|
| 721 |
+
.insert(
|
| 722 |
+
calendarId=calendar_id,
|
| 723 |
+
body=event_body,
|
| 724 |
+
conferenceDataVersion=1 if add_google_meet else 0,
|
| 725 |
+
)
|
| 726 |
+
.execute()
|
| 727 |
+
)
|
| 728 |
+
link = created_event.get("htmlLink", "No link available")
|
| 729 |
+
confirmation_message = f"Successfully created event '{created_event.get('summary', summary)}' for {user_google_email}. Link: {link}"
|
| 730 |
+
|
| 731 |
+
# Add Google Meet information if conference was created
|
| 732 |
+
if add_google_meet and "conferenceData" in created_event:
|
| 733 |
+
conference_data = created_event["conferenceData"]
|
| 734 |
+
if "entryPoints" in conference_data:
|
| 735 |
+
for entry_point in conference_data["entryPoints"]:
|
| 736 |
+
if entry_point.get("entryPointType") == "video":
|
| 737 |
+
meet_link = entry_point.get("uri", "")
|
| 738 |
+
if meet_link:
|
| 739 |
+
confirmation_message += f" Google Meet: {meet_link}"
|
| 740 |
+
break
|
| 741 |
+
|
| 742 |
+
logger.info(
|
| 743 |
+
f"Event created successfully for {user_google_email}. ID: {created_event.get('id')}, Link: {link}"
|
| 744 |
+
)
|
| 745 |
+
return confirmation_message
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
def _normalize_attendees(
|
| 749 |
+
attendees: Optional[Union[List[str], List[Dict[str, Any]]]],
|
| 750 |
+
) -> Optional[List[Dict[str, Any]]]:
|
| 751 |
+
"""
|
| 752 |
+
Normalize attendees input to list of attendee objects.
|
| 753 |
+
|
| 754 |
+
Accepts either:
|
| 755 |
+
- List of email strings: ["user@example.com", "other@example.com"]
|
| 756 |
+
- List of attendee objects: [{"email": "user@example.com", "responseStatus": "accepted"}]
|
| 757 |
+
- Mixed list of both formats
|
| 758 |
+
|
| 759 |
+
Returns list of attendee dicts with at minimum 'email' key.
|
| 760 |
+
"""
|
| 761 |
+
if attendees is None:
|
| 762 |
+
return None
|
| 763 |
+
|
| 764 |
+
normalized = []
|
| 765 |
+
for att in attendees:
|
| 766 |
+
if isinstance(att, str):
|
| 767 |
+
normalized.append({"email": att})
|
| 768 |
+
elif isinstance(att, dict) and "email" in att:
|
| 769 |
+
normalized.append(att)
|
| 770 |
+
else:
|
| 771 |
+
logger.warning(
|
| 772 |
+
f"[_normalize_attendees] Invalid attendee format: {att}, skipping"
|
| 773 |
+
)
|
| 774 |
+
return normalized if normalized else None
|
| 775 |
+
|
| 776 |
+
|
| 777 |
+
@server.tool()
|
| 778 |
+
@handle_http_errors("modify_event", service_type="calendar")
|
| 779 |
+
@require_google_service("calendar", "calendar_events")
|
| 780 |
+
async def modify_event(
|
| 781 |
+
service,
|
| 782 |
+
user_google_email: str,
|
| 783 |
+
event_id: str,
|
| 784 |
+
calendar_id: str = "primary",
|
| 785 |
+
summary: Optional[str] = None,
|
| 786 |
+
start_time: Optional[str] = None,
|
| 787 |
+
end_time: Optional[str] = None,
|
| 788 |
+
description: Optional[str] = None,
|
| 789 |
+
location: Optional[str] = None,
|
| 790 |
+
attendees: Optional[Union[List[str], List[Dict[str, Any]]]] = None,
|
| 791 |
+
timezone: Optional[str] = None,
|
| 792 |
+
add_google_meet: Optional[bool] = None,
|
| 793 |
+
reminders: Optional[Union[str, List[Dict[str, Any]]]] = None,
|
| 794 |
+
use_default_reminders: Optional[bool] = None,
|
| 795 |
+
transparency: Optional[str] = None,
|
| 796 |
+
visibility: Optional[str] = None,
|
| 797 |
+
color_id: Optional[str] = None,
|
| 798 |
+
) -> str:
|
| 799 |
+
"""
|
| 800 |
+
Modifies an existing event.
|
| 801 |
+
|
| 802 |
+
Args:
|
| 803 |
+
user_google_email (str): The user's Google email address. Required.
|
| 804 |
+
event_id (str): The ID of the event to modify.
|
| 805 |
+
calendar_id (str): Calendar ID (default: 'primary').
|
| 806 |
+
summary (Optional[str]): New event title.
|
| 807 |
+
start_time (Optional[str]): New start time (RFC3339, e.g., "2023-10-27T10:00:00-07:00" or "2023-10-27" for all-day).
|
| 808 |
+
end_time (Optional[str]): New end time (RFC3339, e.g., "2023-10-27T11:00:00-07:00" or "2023-10-28" for all-day).
|
| 809 |
+
description (Optional[str]): New event description.
|
| 810 |
+
location (Optional[str]): New event location.
|
| 811 |
+
attendees (Optional[Union[List[str], List[Dict[str, Any]]]]): Attendees as email strings or objects with metadata. Supports: ["email@example.com"] or [{"email": "email@example.com", "responseStatus": "accepted", "organizer": true, "optional": true}]. When using objects, existing metadata (responseStatus, organizer, optional) is preserved. New attendees default to responseStatus="needsAction".
|
| 812 |
+
timezone (Optional[str]): New timezone (e.g., "America/New_York").
|
| 813 |
+
add_google_meet (Optional[bool]): Whether to add or remove Google Meet video conference. If True, adds Google Meet; if False, removes it; if None, leaves unchanged.
|
| 814 |
+
reminders (Optional[Union[str, List[Dict[str, Any]]]]): JSON string or list of reminder objects to replace existing reminders. Each should have 'method' ("popup" or "email") and 'minutes' (0-40320). Max 5 reminders. Example: '[{"method": "popup", "minutes": 15}]' or [{"method": "popup", "minutes": 15}]
|
| 815 |
+
use_default_reminders (Optional[bool]): Whether to use calendar's default reminders. If specified, overrides current reminder settings.
|
| 816 |
+
transparency (Optional[str]): Event transparency for busy/free status. "opaque" shows as Busy, "transparent" shows as Available/Free. If None, preserves existing transparency setting.
|
| 817 |
+
visibility (Optional[str]): Event visibility. "default" uses calendar default, "public" is visible to all, "private" is visible only to attendees, "confidential" is same as private (legacy). If None, preserves existing visibility setting.
|
| 818 |
+
color_id (Optional[str]): Event color ID (1-11). If None, preserves existing color.
|
| 819 |
+
|
| 820 |
+
Returns:
|
| 821 |
+
str: Confirmation message of the successful event modification with event link.
|
| 822 |
+
"""
|
| 823 |
+
logger.info(
|
| 824 |
+
f"[modify_event] Invoked. Email: '{user_google_email}', Event ID: {event_id}"
|
| 825 |
+
)
|
| 826 |
+
|
| 827 |
+
# Build the event body with only the fields that are provided
|
| 828 |
+
event_body: Dict[str, Any] = {}
|
| 829 |
+
if summary is not None:
|
| 830 |
+
event_body["summary"] = summary
|
| 831 |
+
if start_time is not None:
|
| 832 |
+
event_body["start"] = (
|
| 833 |
+
{"date": start_time} if "T" not in start_time else {"dateTime": start_time}
|
| 834 |
+
)
|
| 835 |
+
if timezone is not None and "dateTime" in event_body["start"]:
|
| 836 |
+
event_body["start"]["timeZone"] = timezone
|
| 837 |
+
if end_time is not None:
|
| 838 |
+
event_body["end"] = (
|
| 839 |
+
{"date": end_time} if "T" not in end_time else {"dateTime": end_time}
|
| 840 |
+
)
|
| 841 |
+
if timezone is not None and "dateTime" in event_body["end"]:
|
| 842 |
+
event_body["end"]["timeZone"] = timezone
|
| 843 |
+
if description is not None:
|
| 844 |
+
event_body["description"] = description
|
| 845 |
+
if location is not None:
|
| 846 |
+
event_body["location"] = location
|
| 847 |
+
|
| 848 |
+
# Normalize attendees - accepts both email strings and full attendee objects
|
| 849 |
+
normalized_attendees = _normalize_attendees(attendees)
|
| 850 |
+
if normalized_attendees is not None:
|
| 851 |
+
event_body["attendees"] = normalized_attendees
|
| 852 |
+
|
| 853 |
+
if color_id is not None:
|
| 854 |
+
event_body["colorId"] = color_id
|
| 855 |
+
|
| 856 |
+
# Handle reminders
|
| 857 |
+
if reminders is not None or use_default_reminders is not None:
|
| 858 |
+
reminder_data = {}
|
| 859 |
+
if use_default_reminders is not None:
|
| 860 |
+
reminder_data["useDefault"] = use_default_reminders
|
| 861 |
+
else:
|
| 862 |
+
# Preserve existing event's useDefault value if not explicitly specified
|
| 863 |
+
try:
|
| 864 |
+
existing_event = (
|
| 865 |
+
service.events()
|
| 866 |
+
.get(calendarId=calendar_id, eventId=event_id)
|
| 867 |
+
.execute()
|
| 868 |
+
)
|
| 869 |
+
reminder_data["useDefault"] = existing_event.get("reminders", {}).get(
|
| 870 |
+
"useDefault", True
|
| 871 |
+
)
|
| 872 |
+
except Exception as e:
|
| 873 |
+
logger.warning(
|
| 874 |
+
f"[modify_event] Could not fetch existing event for reminders: {e}"
|
| 875 |
+
)
|
| 876 |
+
reminder_data["useDefault"] = (
|
| 877 |
+
True # Fallback to True if unable to fetch
|
| 878 |
+
)
|
| 879 |
+
|
| 880 |
+
# If custom reminders are provided, automatically disable default reminders
|
| 881 |
+
if reminders is not None:
|
| 882 |
+
if reminder_data.get("useDefault", False):
|
| 883 |
+
reminder_data["useDefault"] = False
|
| 884 |
+
logger.info(
|
| 885 |
+
"[modify_event] Custom reminders provided - disabling default reminders"
|
| 886 |
+
)
|
| 887 |
+
|
| 888 |
+
validated_reminders = _parse_reminders_json(reminders, "modify_event")
|
| 889 |
+
if reminders and not validated_reminders:
|
| 890 |
+
logger.warning(
|
| 891 |
+
"[modify_event] Reminders provided but failed validation. No custom reminders will be set."
|
| 892 |
+
)
|
| 893 |
+
elif validated_reminders:
|
| 894 |
+
reminder_data["overrides"] = validated_reminders
|
| 895 |
+
logger.info(
|
| 896 |
+
f"[modify_event] Updated reminders with {len(validated_reminders)} custom reminders"
|
| 897 |
+
)
|
| 898 |
+
|
| 899 |
+
event_body["reminders"] = reminder_data
|
| 900 |
+
|
| 901 |
+
# Handle transparency validation
|
| 902 |
+
_apply_transparency_if_valid(event_body, transparency, "modify_event")
|
| 903 |
+
|
| 904 |
+
# Handle visibility validation
|
| 905 |
+
_apply_visibility_if_valid(event_body, visibility, "modify_event")
|
| 906 |
+
|
| 907 |
+
if timezone is not None and "start" not in event_body and "end" not in event_body:
|
| 908 |
+
# If timezone is provided but start/end times are not, we need to fetch the existing event
|
| 909 |
+
# to apply the timezone correctly. This is a simplification; a full implementation
|
| 910 |
+
# might handle this more robustly or require start/end with timezone.
|
| 911 |
+
# For now, we'll log a warning and skip applying timezone if start/end are missing.
|
| 912 |
+
logger.warning(
|
| 913 |
+
"[modify_event] Timezone provided but start_time and end_time are missing. Timezone will not be applied unless start/end times are also provided."
|
| 914 |
+
)
|
| 915 |
+
|
| 916 |
+
if not event_body:
|
| 917 |
+
message = "No fields provided to modify the event."
|
| 918 |
+
logger.warning(f"[modify_event] {message}")
|
| 919 |
+
raise Exception(message)
|
| 920 |
+
|
| 921 |
+
# Log the event ID for debugging
|
| 922 |
+
logger.info(
|
| 923 |
+
f"[modify_event] Attempting to update event with ID: '{event_id}' in calendar '{calendar_id}'"
|
| 924 |
+
)
|
| 925 |
+
|
| 926 |
+
# Get the existing event to preserve fields that aren't being updated
|
| 927 |
+
try:
|
| 928 |
+
existing_event = await asyncio.to_thread(
|
| 929 |
+
lambda: service.events()
|
| 930 |
+
.get(calendarId=calendar_id, eventId=event_id)
|
| 931 |
+
.execute()
|
| 932 |
+
)
|
| 933 |
+
logger.info(
|
| 934 |
+
"[modify_event] Successfully retrieved existing event before update"
|
| 935 |
+
)
|
| 936 |
+
|
| 937 |
+
# Preserve existing fields if not provided in the update
|
| 938 |
+
_preserve_existing_fields(
|
| 939 |
+
event_body,
|
| 940 |
+
existing_event,
|
| 941 |
+
{
|
| 942 |
+
"summary": summary,
|
| 943 |
+
"description": description,
|
| 944 |
+
"location": location,
|
| 945 |
+
# Use the already-normalized attendee objects (if provided); otherwise preserve existing
|
| 946 |
+
"attendees": event_body.get("attendees"),
|
| 947 |
+
"colorId": event_body.get("colorId"),
|
| 948 |
+
},
|
| 949 |
+
)
|
| 950 |
+
|
| 951 |
+
# Handle Google Meet conference data
|
| 952 |
+
if add_google_meet is not None:
|
| 953 |
+
if add_google_meet:
|
| 954 |
+
# Add Google Meet
|
| 955 |
+
request_id = str(uuid.uuid4())
|
| 956 |
+
event_body["conferenceData"] = {
|
| 957 |
+
"createRequest": {
|
| 958 |
+
"requestId": request_id,
|
| 959 |
+
"conferenceSolutionKey": {"type": "hangoutsMeet"},
|
| 960 |
+
}
|
| 961 |
+
}
|
| 962 |
+
logger.info(
|
| 963 |
+
f"[modify_event] Adding Google Meet conference with request ID: {request_id}"
|
| 964 |
+
)
|
| 965 |
+
else:
|
| 966 |
+
# Remove Google Meet by setting conferenceData to empty
|
| 967 |
+
event_body["conferenceData"] = {}
|
| 968 |
+
logger.info("[modify_event] Removing Google Meet conference")
|
| 969 |
+
elif "conferenceData" in existing_event:
|
| 970 |
+
# Preserve existing conference data if not specified
|
| 971 |
+
event_body["conferenceData"] = existing_event["conferenceData"]
|
| 972 |
+
logger.info("[modify_event] Preserving existing conference data")
|
| 973 |
+
|
| 974 |
+
except HttpError as get_error:
|
| 975 |
+
if get_error.resp.status == 404:
|
| 976 |
+
logger.error(
|
| 977 |
+
f"[modify_event] Event not found during pre-update verification: {get_error}"
|
| 978 |
+
)
|
| 979 |
+
message = f"Event not found during verification. The event with ID '{event_id}' could not be found in calendar '{calendar_id}'. This may be due to incorrect ID format or the event no longer exists."
|
| 980 |
+
raise Exception(message)
|
| 981 |
+
else:
|
| 982 |
+
logger.warning(
|
| 983 |
+
f"[modify_event] Error during pre-update verification, but proceeding with update: {get_error}"
|
| 984 |
+
)
|
| 985 |
+
|
| 986 |
+
# Proceed with the update
|
| 987 |
+
updated_event = await asyncio.to_thread(
|
| 988 |
+
lambda: service.events()
|
| 989 |
+
.update(
|
| 990 |
+
calendarId=calendar_id,
|
| 991 |
+
eventId=event_id,
|
| 992 |
+
body=event_body,
|
| 993 |
+
conferenceDataVersion=1,
|
| 994 |
+
)
|
| 995 |
+
.execute()
|
| 996 |
+
)
|
| 997 |
+
|
| 998 |
+
link = updated_event.get("htmlLink", "No link available")
|
| 999 |
+
confirmation_message = f"Successfully modified event '{updated_event.get('summary', summary)}' (ID: {event_id}) for {user_google_email}. Link: {link}"
|
| 1000 |
+
|
| 1001 |
+
# Add Google Meet information if conference was added
|
| 1002 |
+
if add_google_meet is True and "conferenceData" in updated_event:
|
| 1003 |
+
conference_data = updated_event["conferenceData"]
|
| 1004 |
+
if "entryPoints" in conference_data:
|
| 1005 |
+
for entry_point in conference_data["entryPoints"]:
|
| 1006 |
+
if entry_point.get("entryPointType") == "video":
|
| 1007 |
+
meet_link = entry_point.get("uri", "")
|
| 1008 |
+
if meet_link:
|
| 1009 |
+
confirmation_message += f" Google Meet: {meet_link}"
|
| 1010 |
+
break
|
| 1011 |
+
elif add_google_meet is False:
|
| 1012 |
+
confirmation_message += " (Google Meet removed)"
|
| 1013 |
+
|
| 1014 |
+
logger.info(
|
| 1015 |
+
f"Event modified successfully for {user_google_email}. ID: {updated_event.get('id')}, Link: {link}"
|
| 1016 |
+
)
|
| 1017 |
+
return confirmation_message
|
| 1018 |
+
|
| 1019 |
+
|
| 1020 |
+
@server.tool()
|
| 1021 |
+
@handle_http_errors("delete_event", service_type="calendar")
|
| 1022 |
+
@require_google_service("calendar", "calendar_events")
|
| 1023 |
+
async def delete_event(
|
| 1024 |
+
service, user_google_email: str, event_id: str, calendar_id: str = "primary"
|
| 1025 |
+
) -> str:
|
| 1026 |
+
"""
|
| 1027 |
+
Deletes an existing event.
|
| 1028 |
+
|
| 1029 |
+
Args:
|
| 1030 |
+
user_google_email (str): The user's Google email address. Required.
|
| 1031 |
+
event_id (str): The ID of the event to delete.
|
| 1032 |
+
calendar_id (str): Calendar ID (default: 'primary').
|
| 1033 |
+
|
| 1034 |
+
Returns:
|
| 1035 |
+
str: Confirmation message of the successful event deletion.
|
| 1036 |
+
"""
|
| 1037 |
+
logger.info(
|
| 1038 |
+
f"[delete_event] Invoked. Email: '{user_google_email}', Event ID: {event_id}"
|
| 1039 |
+
)
|
| 1040 |
+
|
| 1041 |
+
# Log the event ID for debugging
|
| 1042 |
+
logger.info(
|
| 1043 |
+
f"[delete_event] Attempting to delete event with ID: '{event_id}' in calendar '{calendar_id}'"
|
| 1044 |
+
)
|
| 1045 |
+
|
| 1046 |
+
# Try to get the event first to verify it exists
|
| 1047 |
+
try:
|
| 1048 |
+
await asyncio.to_thread(
|
| 1049 |
+
lambda: service.events()
|
| 1050 |
+
.get(calendarId=calendar_id, eventId=event_id)
|
| 1051 |
+
.execute()
|
| 1052 |
+
)
|
| 1053 |
+
logger.info("[delete_event] Successfully verified event exists before deletion")
|
| 1054 |
+
except HttpError as get_error:
|
| 1055 |
+
if get_error.resp.status == 404:
|
| 1056 |
+
logger.error(
|
| 1057 |
+
f"[delete_event] Event not found during pre-delete verification: {get_error}"
|
| 1058 |
+
)
|
| 1059 |
+
message = f"Event not found during verification. The event with ID '{event_id}' could not be found in calendar '{calendar_id}'. This may be due to incorrect ID format or the event no longer exists."
|
| 1060 |
+
raise Exception(message)
|
| 1061 |
+
else:
|
| 1062 |
+
logger.warning(
|
| 1063 |
+
f"[delete_event] Error during pre-delete verification, but proceeding with deletion: {get_error}"
|
| 1064 |
+
)
|
| 1065 |
+
|
| 1066 |
+
# Proceed with the deletion
|
| 1067 |
+
await asyncio.to_thread(
|
| 1068 |
+
lambda: service.events()
|
| 1069 |
+
.delete(calendarId=calendar_id, eventId=event_id)
|
| 1070 |
+
.execute()
|
| 1071 |
+
)
|
| 1072 |
+
|
| 1073 |
+
confirmation_message = f"Successfully deleted event (ID: {event_id}) from calendar '{calendar_id}' for {user_google_email}."
|
| 1074 |
+
logger.info(f"Event deleted successfully for {user_google_email}. ID: {event_id}")
|
| 1075 |
+
return confirmation_message
|
gchat/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Google Chat MCP Tools Package
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from . import chat_tools
|
| 6 |
+
|
| 7 |
+
__all__ = ["chat_tools"]
|
gchat/chat_tools.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Google Chat MCP Tools
|
| 3 |
+
|
| 4 |
+
This module provides MCP tools for interacting with Google Chat API.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
import asyncio
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
from googleapiclient.errors import HttpError
|
| 12 |
+
|
| 13 |
+
# Auth & server utilities
|
| 14 |
+
from auth.service_decorator import require_google_service
|
| 15 |
+
from core.server import server
|
| 16 |
+
from core.utils import handle_http_errors
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@server.tool()
|
| 22 |
+
@require_google_service("chat", "chat_read")
|
| 23 |
+
@handle_http_errors("list_spaces", service_type="chat")
|
| 24 |
+
async def list_spaces(
|
| 25 |
+
service,
|
| 26 |
+
user_google_email: str,
|
| 27 |
+
page_size: int = 100,
|
| 28 |
+
space_type: str = "all", # "all", "room", "dm"
|
| 29 |
+
) -> str:
|
| 30 |
+
"""
|
| 31 |
+
Lists Google Chat spaces (rooms and direct messages) accessible to the user.
|
| 32 |
+
|
| 33 |
+
Returns:
|
| 34 |
+
str: A formatted list of Google Chat spaces accessible to the user.
|
| 35 |
+
"""
|
| 36 |
+
logger.info(f"[list_spaces] Email={user_google_email}, Type={space_type}")
|
| 37 |
+
|
| 38 |
+
# Build filter based on space_type
|
| 39 |
+
filter_param = None
|
| 40 |
+
if space_type == "room":
|
| 41 |
+
filter_param = "spaceType = SPACE"
|
| 42 |
+
elif space_type == "dm":
|
| 43 |
+
filter_param = "spaceType = DIRECT_MESSAGE"
|
| 44 |
+
|
| 45 |
+
request_params = {"pageSize": page_size}
|
| 46 |
+
if filter_param:
|
| 47 |
+
request_params["filter"] = filter_param
|
| 48 |
+
|
| 49 |
+
response = await asyncio.to_thread(service.spaces().list(**request_params).execute)
|
| 50 |
+
|
| 51 |
+
spaces = response.get("spaces", [])
|
| 52 |
+
if not spaces:
|
| 53 |
+
return f"No Chat spaces found for type '{space_type}'."
|
| 54 |
+
|
| 55 |
+
output = [f"Found {len(spaces)} Chat spaces (type: {space_type}):"]
|
| 56 |
+
for space in spaces:
|
| 57 |
+
space_name = space.get("displayName", "Unnamed Space")
|
| 58 |
+
space_id = space.get("name", "")
|
| 59 |
+
space_type_actual = space.get("spaceType", "UNKNOWN")
|
| 60 |
+
output.append(f"- {space_name} (ID: {space_id}, Type: {space_type_actual})")
|
| 61 |
+
|
| 62 |
+
return "\n".join(output)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
@server.tool()
|
| 66 |
+
@require_google_service("chat", "chat_read")
|
| 67 |
+
@handle_http_errors("get_messages", service_type="chat")
|
| 68 |
+
async def get_messages(
|
| 69 |
+
service,
|
| 70 |
+
user_google_email: str,
|
| 71 |
+
space_id: str,
|
| 72 |
+
page_size: int = 50,
|
| 73 |
+
order_by: str = "createTime desc",
|
| 74 |
+
) -> str:
|
| 75 |
+
"""
|
| 76 |
+
Retrieves messages from a Google Chat space.
|
| 77 |
+
|
| 78 |
+
Returns:
|
| 79 |
+
str: Formatted messages from the specified space.
|
| 80 |
+
"""
|
| 81 |
+
logger.info(f"[get_messages] Space ID: '{space_id}' for user '{user_google_email}'")
|
| 82 |
+
|
| 83 |
+
# Get space info first
|
| 84 |
+
space_info = await asyncio.to_thread(service.spaces().get(name=space_id).execute)
|
| 85 |
+
space_name = space_info.get("displayName", "Unknown Space")
|
| 86 |
+
|
| 87 |
+
# Get messages
|
| 88 |
+
response = await asyncio.to_thread(
|
| 89 |
+
service.spaces()
|
| 90 |
+
.messages()
|
| 91 |
+
.list(parent=space_id, pageSize=page_size, orderBy=order_by)
|
| 92 |
+
.execute
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
messages = response.get("messages", [])
|
| 96 |
+
if not messages:
|
| 97 |
+
return f"No messages found in space '{space_name}' (ID: {space_id})."
|
| 98 |
+
|
| 99 |
+
output = [f"Messages from '{space_name}' (ID: {space_id}):\n"]
|
| 100 |
+
for msg in messages:
|
| 101 |
+
sender = msg.get("sender", {}).get("displayName", "Unknown Sender")
|
| 102 |
+
create_time = msg.get("createTime", "Unknown Time")
|
| 103 |
+
text_content = msg.get("text", "No text content")
|
| 104 |
+
msg_name = msg.get("name", "")
|
| 105 |
+
|
| 106 |
+
output.append(f"[{create_time}] {sender}:")
|
| 107 |
+
output.append(f" {text_content}")
|
| 108 |
+
output.append(f" (Message ID: {msg_name})\n")
|
| 109 |
+
|
| 110 |
+
return "\n".join(output)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
@server.tool()
|
| 114 |
+
@require_google_service("chat", "chat_write")
|
| 115 |
+
@handle_http_errors("send_message", service_type="chat")
|
| 116 |
+
async def send_message(
|
| 117 |
+
service,
|
| 118 |
+
user_google_email: str,
|
| 119 |
+
space_id: str,
|
| 120 |
+
message_text: str,
|
| 121 |
+
thread_key: Optional[str] = None,
|
| 122 |
+
) -> str:
|
| 123 |
+
"""
|
| 124 |
+
Sends a message to a Google Chat space.
|
| 125 |
+
|
| 126 |
+
Returns:
|
| 127 |
+
str: Confirmation message with sent message details.
|
| 128 |
+
"""
|
| 129 |
+
logger.info(f"[send_message] Email: '{user_google_email}', Space: '{space_id}'")
|
| 130 |
+
|
| 131 |
+
message_body = {"text": message_text}
|
| 132 |
+
|
| 133 |
+
# Add thread key if provided (for threaded replies)
|
| 134 |
+
request_params = {"parent": space_id, "body": message_body}
|
| 135 |
+
if thread_key:
|
| 136 |
+
request_params["threadKey"] = thread_key
|
| 137 |
+
|
| 138 |
+
message = await asyncio.to_thread(
|
| 139 |
+
service.spaces().messages().create(**request_params).execute
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
message_name = message.get("name", "")
|
| 143 |
+
create_time = message.get("createTime", "")
|
| 144 |
+
|
| 145 |
+
msg = f"Message sent to space '{space_id}' by {user_google_email}. Message ID: {message_name}, Time: {create_time}"
|
| 146 |
+
logger.info(
|
| 147 |
+
f"Successfully sent message to space '{space_id}' by {user_google_email}"
|
| 148 |
+
)
|
| 149 |
+
return msg
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
@server.tool()
|
| 153 |
+
@require_google_service("chat", "chat_read")
|
| 154 |
+
@handle_http_errors("search_messages", service_type="chat")
|
| 155 |
+
async def search_messages(
|
| 156 |
+
service,
|
| 157 |
+
user_google_email: str,
|
| 158 |
+
query: str,
|
| 159 |
+
space_id: Optional[str] = None,
|
| 160 |
+
page_size: int = 25,
|
| 161 |
+
) -> str:
|
| 162 |
+
"""
|
| 163 |
+
Searches for messages in Google Chat spaces by text content.
|
| 164 |
+
|
| 165 |
+
Returns:
|
| 166 |
+
str: A formatted list of messages matching the search query.
|
| 167 |
+
"""
|
| 168 |
+
logger.info(f"[search_messages] Email={user_google_email}, Query='{query}'")
|
| 169 |
+
|
| 170 |
+
# If specific space provided, search within that space
|
| 171 |
+
if space_id:
|
| 172 |
+
response = await asyncio.to_thread(
|
| 173 |
+
service.spaces()
|
| 174 |
+
.messages()
|
| 175 |
+
.list(parent=space_id, pageSize=page_size, filter=f'text:"{query}"')
|
| 176 |
+
.execute
|
| 177 |
+
)
|
| 178 |
+
messages = response.get("messages", [])
|
| 179 |
+
context = f"space '{space_id}'"
|
| 180 |
+
else:
|
| 181 |
+
# Search across all accessible spaces (this may require iterating through spaces)
|
| 182 |
+
# For simplicity, we'll search the user's spaces first
|
| 183 |
+
spaces_response = await asyncio.to_thread(
|
| 184 |
+
service.spaces().list(pageSize=100).execute
|
| 185 |
+
)
|
| 186 |
+
spaces = spaces_response.get("spaces", [])
|
| 187 |
+
|
| 188 |
+
messages = []
|
| 189 |
+
for space in spaces[:10]: # Limit to first 10 spaces to avoid timeout
|
| 190 |
+
try:
|
| 191 |
+
space_messages = await asyncio.to_thread(
|
| 192 |
+
service.spaces()
|
| 193 |
+
.messages()
|
| 194 |
+
.list(
|
| 195 |
+
parent=space.get("name"), pageSize=5, filter=f'text:"{query}"'
|
| 196 |
+
)
|
| 197 |
+
.execute
|
| 198 |
+
)
|
| 199 |
+
space_msgs = space_messages.get("messages", [])
|
| 200 |
+
for msg in space_msgs:
|
| 201 |
+
msg["_space_name"] = space.get("displayName", "Unknown")
|
| 202 |
+
messages.extend(space_msgs)
|
| 203 |
+
except HttpError:
|
| 204 |
+
continue # Skip spaces we can't access
|
| 205 |
+
context = "all accessible spaces"
|
| 206 |
+
|
| 207 |
+
if not messages:
|
| 208 |
+
return f"No messages found matching '{query}' in {context}."
|
| 209 |
+
|
| 210 |
+
output = [f"Found {len(messages)} messages matching '{query}' in {context}:"]
|
| 211 |
+
for msg in messages:
|
| 212 |
+
sender = msg.get("sender", {}).get("displayName", "Unknown Sender")
|
| 213 |
+
create_time = msg.get("createTime", "Unknown Time")
|
| 214 |
+
text_content = msg.get("text", "No text content")
|
| 215 |
+
space_name = msg.get("_space_name", "Unknown Space")
|
| 216 |
+
|
| 217 |
+
# Truncate long messages
|
| 218 |
+
if len(text_content) > 100:
|
| 219 |
+
text_content = text_content[:100] + "..."
|
| 220 |
+
|
| 221 |
+
output.append(f"- [{create_time}] {sender} in '{space_name}': {text_content}")
|
| 222 |
+
|
| 223 |
+
return "\n".join(output)
|