sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
PrefectHQ/fastmcp:tests/server/auth/providers/test_discord.py | """Tests for Discord OAuth provider."""
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from key_value.aio.stores.memory import MemoryStore
from fastmcp.server.auth.providers.discord import DiscordProvider, DiscordTokenVerifier
@pytest.fixture
def memory_storage() -> MemoryStore:
"""Provide a MemoryStore for tests to avoid SQLite initialization on Windows."""
return MemoryStore()
class TestDiscordProvider:
"""Test Discord OAuth provider functionality."""
def test_init_with_explicit_params(self, memory_storage: MemoryStore):
"""Test DiscordProvider initialization with explicit parameters."""
provider = DiscordProvider(
client_id="env_client_id",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
required_scopes=["email", "identify"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert provider._upstream_client_id == "env_client_id"
assert provider._upstream_client_secret.get_secret_value() == "GOCSPX-test123"
assert str(provider.base_url) == "https://myserver.com/"
def test_init_defaults(self, memory_storage: MemoryStore):
"""Test that default values are applied correctly."""
provider = DiscordProvider(
client_id="env_client_id",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check defaults
assert provider._redirect_path == "/auth/callback"
def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore):
"""Test that OAuth endpoints are configured correctly."""
provider = DiscordProvider(
client_id="env_client_id",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check that endpoints use Discord's OAuth2 endpoints
assert (
provider._upstream_authorization_endpoint
== "https://discord.com/oauth2/authorize"
)
assert (
provider._upstream_token_endpoint == "https://discord.com/api/oauth2/token"
)
# Discord provider doesn't currently set a revocation endpoint
assert provider._upstream_revocation_endpoint is None
def test_discord_specific_scopes(self, memory_storage: MemoryStore):
"""Test handling of Discord-specific scope formats."""
# Just test that the provider accepts Discord-specific scopes without error
provider = DiscordProvider(
client_id="env_client_id",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
required_scopes=[
"identify",
"email",
],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Provider should initialize successfully with these scopes
assert provider is not None
def test_token_verifier_is_bound_to_provider_client_id(
self, memory_storage: MemoryStore
):
"""Test DiscordProvider binds token verifier to the configured client ID."""
provider = DiscordProvider(
client_id="expected-client-id",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
verifier = provider._token_validator
assert isinstance(verifier, DiscordTokenVerifier)
assert verifier.expected_client_id == "expected-client-id"
class TestDiscordTokenVerifier:
"""Test DiscordTokenVerifier behavior."""
async def test_rejects_token_from_different_discord_application(self):
"""Token must be bound to configured Discord client_id."""
verifier = DiscordTokenVerifier(expected_client_id="expected-app-id")
mock_client = AsyncMock()
token_info_response = MagicMock()
token_info_response.status_code = 200
token_info_response.json.return_value = {
"application": {"id": "different-app-id"},
"user": {"id": "123"},
"scopes": ["identify"],
}
mock_client.get.return_value = token_info_response
with patch(
"fastmcp.server.auth.providers.discord.httpx.AsyncClient"
) as mock_client_class:
mock_client_class.return_value.__aenter__.return_value = mock_client
result = await verifier.verify_token("token")
assert result is None
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_discord.py",
"license": "Apache License 2.0",
"lines": 104,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/http/test_stale_access_token.py | """
Test for issue #1863: get_access_token() returns stale token after OAuth refresh.
This test demonstrates the bug where auth_context_var holds a stale token,
but the current HTTP request (via request_ctx) has a fresh token.
The test should FAIL with the current implementation and PASS after the fix.
"""
from unittest.mock import MagicMock
from mcp.server.auth.middleware.auth_context import auth_context_var
from mcp.server.auth.middleware.bearer_auth import AuthenticatedUser
from mcp.server.lowlevel.server import request_ctx
from mcp.shared.context import RequestContext
from starlette.requests import Request
from fastmcp.server.auth import AccessToken
from fastmcp.server.dependencies import get_access_token
class TestStaleAccessToken:
"""Test that get_access_token returns fresh token from request scope."""
def test_get_access_token_prefers_request_scope_over_stale_context_var(self):
"""
Regression test for issue #1863.
Scenario:
- auth_context_var has a STALE token (set at HTTP middleware level)
- request_ctx.request.scope["user"] has a FRESH token (per MCP message)
- get_access_token() should return the FRESH token
This simulates the case where:
1. A Streamable HTTP session was established with token A
2. auth_context_var was set to token A during session setup
3. Token expired, client refreshed, got token B
4. New MCP message arrives with token B in the request
5. get_access_token() should return token B, not stale token A
"""
# Create STALE token (in auth_context_var)
# Using FastMCP's AccessToken to avoid conversion issues
stale_token = AccessToken(
token="stale-token-from-initial-auth",
client_id="test-client",
scopes=["read"],
)
stale_user = AuthenticatedUser(stale_token)
# Create FRESH token (in request.scope["user"])
fresh_token = AccessToken(
token="fresh-token-after-refresh",
client_id="test-client",
scopes=["read"],
)
fresh_user = AuthenticatedUser(fresh_token)
# Create a mock request with fresh token in scope
scope = {
"type": "http",
"user": fresh_user,
"auth": MagicMock(),
}
mock_request = Request(scope)
# Create a mock RequestContext with the request
mock_request_context = MagicMock(spec=RequestContext)
mock_request_context.request = mock_request
# Set up the context vars:
# - auth_context_var has STALE token
# - request_ctx has request with FRESH token
auth_token = auth_context_var.set(stale_user)
request_token = request_ctx.set(mock_request_context)
try:
# Call get_access_token - should return FRESH token
result = get_access_token()
# Assert we get the FRESH token, not the stale one
assert result is not None, "Expected an access token but got None"
assert result.token == "fresh-token-after-refresh", (
f"Expected fresh token 'fresh-token-after-refresh' but got '{result.token}'. "
"get_access_token() is returning the stale token from auth_context_var "
"instead of the fresh token from request.scope['user']."
)
finally:
# Clean up context vars
auth_context_var.reset(auth_token)
request_ctx.reset(request_token)
def test_get_access_token_falls_back_to_context_var_when_no_request(self):
"""
Verify that get_access_token falls back to auth_context_var
when there's no HTTP request available.
"""
# Create token in auth_context_var using FastMCP's AccessToken
token = AccessToken(
token="context-var-token",
client_id="test-client",
scopes=["read"],
)
user = AuthenticatedUser(token)
# Set up auth_context_var but NOT request_ctx
auth_token = auth_context_var.set(user)
try:
result = get_access_token()
assert result is not None
assert result.token == "context-var-token"
finally:
auth_context_var.reset(auth_token)
def test_get_access_token_returns_none_when_no_auth(self):
"""
Verify that get_access_token returns None when there's no
authenticated user anywhere.
"""
result = get_access_token()
assert result is None
def test_get_access_token_falls_back_when_scope_user_is_not_authenticated(self):
"""
Verify that get_access_token falls back to auth_context_var when
scope["user"] exists but is not an AuthenticatedUser (e.g., UnauthenticatedUser).
"""
from starlette.authentication import UnauthenticatedUser
# Create token in auth_context_var
token = AccessToken(
token="context-var-token",
client_id="test-client",
scopes=["read"],
)
user = AuthenticatedUser(token)
# Create request with UnauthenticatedUser in scope
scope = {
"type": "http",
"user": UnauthenticatedUser(),
}
mock_request = Request(scope)
mock_request_context = MagicMock(spec=RequestContext)
mock_request_context.request = mock_request
auth_token = auth_context_var.set(user)
request_token = request_ctx.set(mock_request_context)
try:
result = get_access_token()
# Should fall back to auth_context_var since scope user is unauthenticated
assert result is not None
assert result.token == "context-var-token"
finally:
auth_context_var.reset(auth_token)
request_ctx.reset(request_token)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/http/test_stale_access_token.py",
"license": "Apache License 2.0",
"lines": 132,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/oci.py | """OCI OIDC provider for FastMCP.
The pull request for the provider is submitted to fastmcp.
This module provides OIDC Implementation to integrate MCP servers with OCI.
You only need OCI Identity Domain's discovery URL, client ID, client secret, and base URL.
Post Authentication, you get OCI IAM domain access token. That is not authorized to invoke OCI control plane.
You need to exchange the IAM domain access token for OCI UPST token to invoke OCI control plane APIs.
The sample code below has get_oci_signer function that returns OCI TokenExchangeSigner object.
You can use the signer object to create OCI service object.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.oci import OCIProvider
from fastmcp.server.dependencies import get_access_token
from fastmcp.utilities.logging import get_logger
import os
import oci
from oci.auth.signers import TokenExchangeSigner
logger = get_logger(__name__)
# Load configuration from environment
config_url = os.environ.get("OCI_CONFIG_URL") # OCI IAM Domain OIDC discovery URL
client_id = os.environ.get("OCI_CLIENT_ID") # Client ID configured for the OCI IAM Domain Integrated Application
client_secret = os.environ.get("OCI_CLIENT_SECRET") # Client secret configured for the OCI IAM Domain Integrated Application
iam_guid = os.environ.get("OCI_IAM_GUID") # IAM GUID configured for the OCI IAM Domain
# Simple OCI OIDC protection
auth = OCIProvider(
config_url=config_url, # config URL is the OCI IAM Domain OIDC discovery URL
client_id=client_id, # This is same as the client ID configured for the OCI IAM Domain Integrated Application
client_secret=client_secret, # This is same as the client secret configured for the OCI IAM Domain Integrated Application
required_scopes=["openid", "profile", "email"],
redirect_path="/auth/callback",
base_url="http://localhost:8000",
)
# NOTE: For production use, replace this with a thread-safe cache implementation
# such as threading.Lock-protected dict or a proper caching library
_global_token_cache = {} # In memory cache for OCI session token signer
def get_oci_signer() -> TokenExchangeSigner:
authntoken = get_access_token()
tokenID = authntoken.claims.get("jti")
token = authntoken.token
# Check if the signer exists for the token ID in memory cache
cached_signer = _global_token_cache.get(tokenID)
logger.debug(f"Global cached signer: {cached_signer}")
if cached_signer:
logger.debug(f"Using globally cached signer for token ID: {tokenID}")
return cached_signer
# If the signer is not yet created for the token then create new OCI signer object
logger.debug(f"Creating new signer for token ID: {tokenID}")
signer = TokenExchangeSigner(
jwt_or_func=token,
oci_domain_id=iam_guid.split(".")[0] if iam_guid else None, # This is same as IAM GUID configured for the OCI IAM Domain
client_id=client_id, # This is same as the client ID configured for the OCI IAM Domain Integrated Application
client_secret=client_secret, # This is same as the client secret configured for the OCI IAM Domain Integrated Application
)
logger.debug(f"Signer {signer} created for token ID: {tokenID}")
#Cache the signer object in memory cache
_global_token_cache[tokenID] = signer
logger.debug(f"Signer cached for token ID: {tokenID}")
return signer
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl
from fastmcp.server.auth.oidc_proxy import OIDCProxy
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class OCIProvider(OIDCProxy):
"""An OCI IAM Domain provider implementation for FastMCP.
This provider is a complete OCI integration that's ready to use with
just the configuration URL, client ID, client secret, and base URL.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.oci import OCIProvider
import os
# Load configuration from environment
auth = OCIProvider(
config_url=os.environ.get("OCI_CONFIG_URL"), # OCI IAM Domain OIDC discovery URL
client_id=os.environ.get("OCI_CLIENT_ID"), # Client ID configured for the OCI IAM Domain Integrated Application
client_secret=os.environ.get("OCI_CLIENT_SECRET"), # Client secret configured for the OCI IAM Domain Integrated Application
base_url="http://localhost:8000",
required_scopes=["openid", "profile", "email"],
redirect_path="/auth/callback",
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
def __init__(
self,
*,
config_url: AnyHttpUrl | str,
client_id: str,
client_secret: str,
base_url: AnyHttpUrl | str,
audience: str | None = None,
issuer_url: AnyHttpUrl | str | None = None,
required_scopes: list[str] | None = None,
redirect_path: str | None = None,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
) -> None:
"""Initialize OCI OIDC provider.
Args:
config_url: OCI OIDC Discovery URL
client_id: OCI IAM Domain Integrated Application client id
client_secret: OCI Integrated Application client secret
base_url: Public URL where OIDC endpoints will be accessible (includes any mount path)
audience: OCI API audience (optional)
issuer_url: Issuer URL for OCI IAM Domain metadata. This will override issuer URL from the discovery URL.
required_scopes: Required OCI scopes (defaults to ["openid"])
redirect_path: Redirect path configured in OCI IAM Domain Integrated Application. The default is "/auth/callback".
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
"""
# Parse scopes if provided as string
oci_required_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else ["openid"]
)
super().__init__(
config_url=config_url,
client_id=client_id,
client_secret=client_secret,
audience=audience,
base_url=base_url,
issuer_url=issuer_url,
redirect_path=redirect_path,
required_scopes=oci_required_scopes,
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
)
logger.debug(
"Initialized OCI OAuth provider for client %s with scopes: %s",
client_id,
oci_required_scopes,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/oci.py",
"license": "Apache License 2.0",
"lines": 141,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:examples/testing_demo/server.py | """
FastMCP Testing Demo Server
A simple MCP server demonstrating tools, resources, and prompts
with comprehensive test coverage.
"""
from fastmcp import FastMCP
# Create server
mcp = FastMCP("Testing Demo")
# Tools
@mcp.tool
def add(a: int, b: int) -> int:
"""Add two numbers together"""
return a + b
@mcp.tool
def greet(name: str, greeting: str = "Hello") -> str:
"""Greet someone with a customizable greeting"""
return f"{greeting}, {name}!"
@mcp.tool
async def async_multiply(x: float, y: float) -> float:
"""Multiply two numbers (async example)"""
return x * y
# Resources
@mcp.resource("demo://info")
def server_info() -> str:
"""Get server information"""
return "This is the FastMCP Testing Demo server"
@mcp.resource("demo://greeting/{name}")
def greeting_resource(name: str) -> str:
"""Get a personalized greeting resource"""
return f"Welcome to FastMCP, {name}!"
# Prompts
@mcp.prompt("hello")
def hello_prompt(name: str = "World") -> str:
"""Generate a hello world prompt"""
return f"Say hello to {name} in a friendly way."
@mcp.prompt("explain")
def explain_prompt(topic: str, detail_level: str = "medium") -> str:
"""Generate a prompt to explain a topic"""
if detail_level == "simple":
return f"Explain {topic} in simple terms for beginners."
elif detail_level == "detailed":
return f"Provide a detailed, technical explanation of {topic}."
else:
return f"Explain {topic} with moderate technical detail."
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/testing_demo/server.py",
"license": "Apache License 2.0",
"lines": 44,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/testing_demo/tests/test_server.py | """
Tests for the Testing Demo server.
Demonstrates pytest-asyncio patterns, fixtures, and testing best practices.
"""
import pytest
from dirty_equals import IsStr
from fastmcp.client import Client
@pytest.fixture
async def client():
"""
Client fixture for testing.
Uses async context manager and yields client synchronously.
No @pytest.mark.asyncio needed - asyncio_mode = "auto" handles it.
"""
# Import here to avoid import-time side effects
from server import mcp
async with Client(mcp) as client:
yield client
async def test_add_tool(client: Client):
"""Test the add tool with simple addition"""
result = await client.call_tool("add", {"a": 2, "b": 3})
assert result.data == 5
async def test_greet_tool_default(client: Client):
"""Test the greet tool with default greeting"""
result = await client.call_tool("greet", {"name": "Alice"})
assert result.data == "Hello, Alice!"
async def test_greet_tool_custom(client: Client):
"""Test the greet tool with custom greeting"""
result = await client.call_tool("greet", {"name": "Bob", "greeting": "Hi"})
assert result.data == "Hi, Bob!"
async def test_async_multiply_tool(client: Client):
"""Test the async multiply tool"""
result = await client.call_tool("async_multiply", {"x": 3.5, "y": 2.0})
assert result.data == 7.0
@pytest.mark.parametrize(
"a,b,expected",
[
(0, 0, 0),
(1, 1, 2),
(-1, 1, 0),
(100, 200, 300),
],
)
async def test_add_parametrized(client: Client, a: int, b: int, expected: int):
"""Test add tool with multiple parameter combinations"""
result = await client.call_tool("add", {"a": a, "b": b})
assert result.data == expected
async def test_server_info_resource(client: Client):
"""Test the server info resource"""
result = await client.read_resource("demo://info")
assert len(result) == 1
assert result[0].text == "This is the FastMCP Testing Demo server"
async def test_greeting_resource_template(client: Client):
"""Test the greeting resource template"""
result = await client.read_resource("demo://greeting/Charlie")
assert len(result) == 1
assert result[0].text == "Welcome to FastMCP, Charlie!"
async def test_hello_prompt_default(client: Client):
"""Test hello prompt with default parameter"""
result = await client.get_prompt("hello")
assert result.messages[0].content.text == "Say hello to World in a friendly way."
async def test_hello_prompt_custom(client: Client):
"""Test hello prompt with custom name"""
result = await client.get_prompt("hello", {"name": "Dave"})
assert result.messages[0].content.text == "Say hello to Dave in a friendly way."
async def test_explain_prompt_levels(client: Client):
"""Test explain prompt with different detail levels"""
# Simple level
result = await client.get_prompt(
"explain", {"topic": "MCP", "detail_level": "simple"}
)
assert "simple terms" in result.messages[0].content.text
assert "MCP" in result.messages[0].content.text
# Detailed level
result = await client.get_prompt(
"explain", {"topic": "MCP", "detail_level": "detailed"}
)
assert "detailed" in result.messages[0].content.text
assert "technical" in result.messages[0].content.text
async def test_list_tools(client: Client):
"""Test listing available tools"""
tools = await client.list_tools()
tool_names = [tool.name for tool in tools]
assert "add" in tool_names
assert "greet" in tool_names
assert "async_multiply" in tool_names
async def test_list_resources(client: Client):
"""Test listing available resources"""
resources = await client.list_resources()
resource_uris = [str(resource.uri) for resource in resources]
# Check that we have at least the static resource
assert "demo://info" in resource_uris
# There should be at least one resource listed
assert len(resource_uris) >= 1
async def test_list_prompts(client: Client):
"""Test listing available prompts"""
prompts = await client.list_prompts()
prompt_names = [prompt.name for prompt in prompts]
assert "hello" in prompt_names
assert "explain" in prompt_names
# Example using dirty-equals for flexible assertions
async def test_greet_with_dirty_equals(client: Client):
"""Test greet tool using dirty-equals for pattern matching"""
result = await client.call_tool("greet", {"name": "Eve"})
# Check that result data matches the pattern
assert result.data == IsStr(regex=r"^Hello, \w+!$")
# Example using inline-snapshot for complex data
async def test_tool_schema_structure(client: Client):
"""Test tool schema structure"""
tools = await client.list_tools()
add_tool = next(tool for tool in tools if tool.name == "add")
# Verify basic schema structure
assert add_tool.name == "add"
assert add_tool.description == "Add two numbers together"
assert "a" in add_tool.inputSchema["properties"]
assert "b" in add_tool.inputSchema["properties"]
assert add_tool.inputSchema["properties"]["a"]["type"] == "integer"
assert add_tool.inputSchema["properties"]["b"]["type"] == "integer"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/testing_demo/tests/test_server.py",
"license": "Apache License 2.0",
"lines": 118,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/client/transports/test_transports.py | from ssl import VerifyMode
import httpx
from fastmcp.client.auth.oauth import OAuth
from fastmcp.client.transports import SSETransport, StreamableHttpTransport
async def test_oauth_uses_same_client_as_transport_streamable_http():
transport = StreamableHttpTransport(
"https://some.fake.url/",
httpx_client_factory=lambda *args, **kwargs: httpx.AsyncClient(
verify=False, *args, **kwargs
),
auth="oauth",
)
assert isinstance(transport.auth, OAuth)
async with transport.auth.httpx_client_factory() as httpx_client:
assert httpx_client._transport is not None
assert (
httpx_client._transport._pool._ssl_context.verify_mode # type: ignore[attr-defined]
== VerifyMode.CERT_NONE
)
async def test_oauth_uses_same_client_as_transport_sse():
transport = SSETransport(
"https://some.fake.url/",
httpx_client_factory=lambda *args, **kwargs: httpx.AsyncClient(
verify=False, *args, **kwargs
),
auth="oauth",
)
assert isinstance(transport.auth, OAuth)
async with transport.auth.httpx_client_factory() as httpx_client:
assert httpx_client._transport is not None
assert (
httpx_client._transport._pool._ssl_context.verify_mode # type: ignore[attr-defined]
== VerifyMode.CERT_NONE
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/client/transports/test_transports.py",
"license": "Apache License 2.0",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:examples/tool_result_echo.py | """
FastMCP Echo Server with Metadata
Demonstrates how to return metadata alongside content and structured data.
The meta field can include execution details, versioning, or other information
that clients may find useful.
"""
import time
from dataclasses import dataclass
from fastmcp import FastMCP
from fastmcp.tools.tool import ToolResult
mcp = FastMCP("Echo Server")
@dataclass
class EchoData:
data: str
length: int
@mcp.tool
def echo(text: str) -> ToolResult:
"""Echo text back with metadata about the operation."""
start = time.perf_counter()
result = EchoData(data=text, length=len(text))
execution_time = (time.perf_counter() - start) * 1000
return ToolResult(
content=f"Echoed: {text}",
structured_content=result,
meta={
"execution_time_ms": round(execution_time, 2),
"character_count": len(text),
"word_count": len(text.split()),
},
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/tool_result_echo.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/debug.py | """Debug token verifier for testing and special cases.
This module provides a flexible token verifier that delegates validation
to a custom callable. Useful for testing, development, or scenarios where
standard verification isn't possible (like opaque tokens without introspection).
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.debug import DebugTokenVerifier
# Accept all tokens (default - useful for testing)
auth = DebugTokenVerifier()
# Custom sync validation logic
auth = DebugTokenVerifier(validate=lambda token: token.startswith("valid-"))
# Custom async validation logic
async def check_cache(token: str) -> bool:
return await redis.exists(f"token:{token}")
auth = DebugTokenVerifier(validate=check_cache)
mcp = FastMCP("My Server", auth=auth)
```
"""
from __future__ import annotations
import inspect
from collections.abc import Awaitable, Callable
from fastmcp.server.auth import TokenVerifier
from fastmcp.server.auth.auth import AccessToken
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class DebugTokenVerifier(TokenVerifier):
"""Token verifier with custom validation logic.
This verifier delegates token validation to a user-provided callable.
By default, it accepts all non-empty tokens (useful for testing).
Use cases:
- Testing: Accept any token without real verification
- Development: Custom validation logic for prototyping
- Opaque tokens: When you have tokens with no introspection endpoint
WARNING: This bypasses standard security checks. Only use in controlled
environments or when you understand the security implications.
"""
def __init__(
self,
validate: Callable[[str], bool]
| Callable[[str], Awaitable[bool]] = lambda token: True,
client_id: str = "debug-client",
scopes: list[str] | None = None,
required_scopes: list[str] | None = None,
):
"""Initialize the debug token verifier.
Args:
validate: Callable that takes a token string and returns True if valid.
Can be sync or async. Default accepts all tokens.
client_id: Client ID to assign to validated tokens
scopes: Scopes to assign to validated tokens
required_scopes: Required scopes (inherited from TokenVerifier base class)
"""
super().__init__(required_scopes=required_scopes)
self.validate = validate
self.client_id = client_id
self.scopes = scopes or []
async def verify_token(self, token: str) -> AccessToken | None:
"""Verify token using custom validation logic.
Args:
token: The token string to validate
Returns:
AccessToken if validation succeeds, None otherwise
"""
# Reject empty tokens
if not token or not token.strip():
logger.debug("Rejecting empty token")
return None
try:
# Call validation function and await if result is awaitable
result = self.validate(token)
if inspect.isawaitable(result):
is_valid = await result
else:
is_valid = result
if not is_valid:
logger.debug("Token validation failed: callable returned False")
return None
# Return valid AccessToken
return AccessToken(
token=token,
client_id=self.client_id,
scopes=self.scopes,
expires_at=None, # No expiration
claims={"token": token}, # Store original token in claims
)
except Exception as e:
logger.debug("Token validation error: %s", e, exc_info=True)
return None
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/debug.py",
"license": "Apache License 2.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/server/auth/test_debug_verifier.py | """Unit tests for DebugTokenVerifier."""
import re
from fastmcp.server.auth.providers.debug import DebugTokenVerifier
class TestDebugTokenVerifier:
"""Test DebugTokenVerifier initialization and validation."""
def test_init_defaults(self):
"""Test initialization with default parameters."""
verifier = DebugTokenVerifier()
assert verifier.client_id == "debug-client"
assert verifier.scopes == []
assert verifier.required_scopes == []
assert callable(verifier.validate)
def test_init_custom_parameters(self):
"""Test initialization with custom parameters."""
verifier = DebugTokenVerifier(
validate=lambda t: t.startswith("valid-"),
client_id="custom-client",
scopes=["read", "write"],
required_scopes=["admin"],
)
assert verifier.client_id == "custom-client"
assert verifier.scopes == ["read", "write"]
assert verifier.required_scopes == ["admin"]
async def test_verify_token_default_accepts_all(self):
"""Test that default verifier accepts all non-empty tokens."""
verifier = DebugTokenVerifier()
result = await verifier.verify_token("any-token")
assert result is not None
assert result.token == "any-token"
assert result.client_id == "debug-client"
assert result.scopes == []
assert result.expires_at is None
assert result.claims == {"token": "any-token"}
async def test_verify_token_rejects_empty(self):
"""Test that empty tokens are rejected even with default verifier."""
verifier = DebugTokenVerifier()
# Empty string
assert await verifier.verify_token("") is None
# Whitespace only
assert await verifier.verify_token(" ") is None
async def test_verify_token_sync_callable_success(self):
"""Test token verification with custom sync callable that passes."""
verifier = DebugTokenVerifier(
validate=lambda t: t.startswith("valid-"),
client_id="test-client",
scopes=["read"],
)
result = await verifier.verify_token("valid-token-123")
assert result is not None
assert result.token == "valid-token-123"
assert result.client_id == "test-client"
assert result.scopes == ["read"]
assert result.expires_at is None
assert result.claims == {"token": "valid-token-123"}
async def test_verify_token_sync_callable_failure(self):
"""Test token verification with custom sync callable that fails."""
verifier = DebugTokenVerifier(validate=lambda t: t.startswith("valid-"))
result = await verifier.verify_token("invalid-token")
assert result is None
async def test_verify_token_async_callable_success(self):
"""Test token verification with custom async callable that passes."""
async def async_validator(token: str) -> bool:
# Simulate async operation (e.g., database check)
return token in {"token1", "token2", "token3"}
verifier = DebugTokenVerifier(
validate=async_validator,
client_id="async-client",
scopes=["admin"],
)
result = await verifier.verify_token("token2")
assert result is not None
assert result.token == "token2"
assert result.client_id == "async-client"
assert result.scopes == ["admin"]
async def test_verify_token_async_callable_failure(self):
"""Test token verification with custom async callable that fails."""
async def async_validator(token: str) -> bool:
return token in {"token1", "token2", "token3"}
verifier = DebugTokenVerifier(validate=async_validator)
result = await verifier.verify_token("token99")
assert result is None
async def test_verify_token_callable_exception(self):
"""Test that exceptions in validate callable are handled gracefully."""
def failing_validator(token: str) -> bool:
raise ValueError("Something went wrong")
verifier = DebugTokenVerifier(validate=failing_validator)
result = await verifier.verify_token("any-token")
assert result is None
async def test_verify_token_async_callable_exception(self):
"""Test that exceptions in async validate callable are handled gracefully."""
async def failing_async_validator(token: str) -> bool:
raise ValueError("Async validation failed")
verifier = DebugTokenVerifier(validate=failing_async_validator)
result = await verifier.verify_token("any-token")
assert result is None
async def test_verify_token_whitelist_pattern(self):
"""Test using verifier with a whitelist of allowed tokens."""
allowed_tokens = {"secret-token-1", "secret-token-2", "admin-token"}
verifier = DebugTokenVerifier(validate=lambda t: t in allowed_tokens)
# Allowed tokens
assert await verifier.verify_token("secret-token-1") is not None
assert await verifier.verify_token("admin-token") is not None
# Disallowed tokens
assert await verifier.verify_token("unknown-token") is None
assert await verifier.verify_token("hacker-token") is None
async def test_verify_token_pattern_matching(self):
"""Test using verifier with regex-like pattern matching."""
pattern = re.compile(r"^[A-Z]{3}-\d{4}-[a-z]{2}$")
verifier = DebugTokenVerifier(
validate=lambda t: bool(pattern.match(t)),
client_id="pattern-client",
)
# Valid patterns
result = await verifier.verify_token("ABC-1234-xy")
assert result is not None
assert result.client_id == "pattern-client"
# Invalid patterns
assert await verifier.verify_token("abc-1234-xy") is None # Wrong case
assert await verifier.verify_token("ABC-123-xy") is None # Wrong digits
assert await verifier.verify_token("ABC-1234-xyz") is None # Too many chars
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_debug_verifier.py",
"license": "Apache License 2.0",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/middleware/tool_injection.py | """A middleware for injecting tools into the MCP server context."""
import warnings
from collections.abc import Sequence
from logging import Logger
from typing import Annotated, Any
import mcp.types
from mcp.types import Prompt
from pydantic import AnyUrl
from typing_extensions import override
import fastmcp
from fastmcp.resources.resource import ResourceResult
from fastmcp.server.context import Context
from fastmcp.server.middleware.middleware import CallNext, Middleware, MiddlewareContext
from fastmcp.tools.tool import Tool, ToolResult
from fastmcp.utilities.logging import get_logger
logger: Logger = get_logger(name=__name__)
class ToolInjectionMiddleware(Middleware):
"""A middleware for injecting tools into the context."""
def __init__(self, tools: Sequence[Tool]):
"""Initialize the tool injection middleware."""
self._tools_to_inject: Sequence[Tool] = tools
self._tools_to_inject_by_name: dict[str, Tool] = {
tool.name: tool for tool in tools
}
@override
async def on_list_tools(
self,
context: MiddlewareContext[mcp.types.ListToolsRequest],
call_next: CallNext[mcp.types.ListToolsRequest, Sequence[Tool]],
) -> Sequence[Tool]:
"""Inject tools into the response."""
return [*self._tools_to_inject, *await call_next(context)]
@override
async def on_call_tool(
self,
context: MiddlewareContext[mcp.types.CallToolRequestParams],
call_next: CallNext[mcp.types.CallToolRequestParams, ToolResult],
) -> ToolResult:
"""Intercept tool calls to injected tools."""
if context.message.name in self._tools_to_inject_by_name:
tool = self._tools_to_inject_by_name[context.message.name]
return await tool.run(arguments=context.message.arguments or {})
return await call_next(context)
async def list_prompts(context: Context) -> list[Prompt]:
"""List prompts available on the server."""
return await context.list_prompts()
list_prompts_tool = Tool.from_function(
fn=list_prompts,
)
async def get_prompt(
context: Context,
name: Annotated[str, "The name of the prompt to render."],
arguments: Annotated[
dict[str, Any] | None, "The arguments to pass to the prompt."
] = None,
) -> mcp.types.GetPromptResult:
"""Render a prompt available on the server."""
return await context.get_prompt(name=name, arguments=arguments)
get_prompt_tool = Tool.from_function(
fn=get_prompt,
)
class PromptToolMiddleware(ToolInjectionMiddleware):
"""A middleware for injecting prompts as tools into the context.
.. deprecated::
Use ``fastmcp.server.transforms.PromptsAsTools`` instead.
"""
def __init__(self) -> None:
if fastmcp.settings.deprecation_warnings:
warnings.warn(
"PromptToolMiddleware is deprecated. Use the PromptsAsTools transform instead: "
"from fastmcp.server.transforms import PromptsAsTools",
DeprecationWarning,
stacklevel=2,
)
tools: list[Tool] = [list_prompts_tool, get_prompt_tool]
super().__init__(tools=tools)
async def list_resources(context: Context) -> list[mcp.types.Resource]:
"""List resources available on the server."""
return await context.list_resources()
list_resources_tool = Tool.from_function(
fn=list_resources,
)
async def read_resource(
context: Context,
uri: Annotated[AnyUrl | str, "The URI of the resource to read."],
) -> ResourceResult:
"""Read a resource available on the server."""
return await context.read_resource(uri=uri)
read_resource_tool = Tool.from_function(
fn=read_resource,
)
class ResourceToolMiddleware(ToolInjectionMiddleware):
"""A middleware for injecting resources as tools into the context.
.. deprecated::
Use ``fastmcp.server.transforms.ResourcesAsTools`` instead.
"""
def __init__(self) -> None:
if fastmcp.settings.deprecation_warnings:
warnings.warn(
"ResourceToolMiddleware is deprecated. Use the ResourcesAsTools transform instead: "
"from fastmcp.server.transforms import ResourcesAsTools",
DeprecationWarning,
stacklevel=2,
)
tools: list[Tool] = [list_resources_tool, read_resource_tool]
super().__init__(tools=tools)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/middleware/tool_injection.py",
"license": "Apache License 2.0",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:tests/server/middleware/test_tool_injection.py | """Tests for tool injection middleware."""
import math
import pytest
from inline_snapshot import snapshot
from mcp.types import Tool as SDKTool
from fastmcp import FastMCP
from fastmcp.client import Client
from fastmcp.client.client import CallToolResult
from fastmcp.client.transports import FastMCPTransport
from fastmcp.server.middleware.tool_injection import (
ToolInjectionMiddleware,
)
from fastmcp.tools.function_tool import FunctionTool
from fastmcp.tools.tool import Tool
def multiply_fn(a: int, b: int) -> int:
"""Multiply two numbers."""
return a * b
def divide_fn(a: int, b: int) -> float:
"""Divide two numbers."""
if b == 0:
raise ValueError("Cannot divide by zero")
return a / b
multiply_tool = Tool.from_function(fn=multiply_fn, name="multiply", tags={"math"})
divide_tool = Tool.from_function(fn=divide_fn, name="divide", tags={"math"})
class TestToolInjectionMiddleware:
"""Tests with real FastMCP server."""
@pytest.fixture
def base_server(self):
"""Create a base FastMCP server."""
mcp = FastMCP("BaseServer")
@mcp.tool
def add(a: int, b: int) -> int:
"""Add two numbers."""
return a + b
@mcp.tool
def subtract(a: int, b: int) -> int:
"""Subtract two numbers."""
return a - b
return mcp
async def test_list_tools_includes_injected_tools(self, base_server: FastMCP):
"""Test that list_tools returns both base and injected tools."""
injected_tools: list[FunctionTool] = [
multiply_tool,
divide_tool,
]
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(
tools=injected_tools
)
base_server.add_middleware(middleware)
async with Client[FastMCPTransport](base_server) as client:
tools: list[SDKTool] = await client.list_tools()
# Should have all tools: multiply, divide, add, subtract
assert len(tools) == 4
tool_names: list[str] = [tool.name for tool in tools]
assert "multiply" in tool_names
assert "divide" in tool_names
assert "add" in tool_names
assert "subtract" in tool_names
async def test_call_injected_tool(self, base_server: FastMCP):
"""Test that injected tools can be called successfully."""
injected_tools: list[FunctionTool] = [multiply_tool]
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(
tools=injected_tools
)
base_server.add_middleware(middleware)
async with Client[FastMCPTransport](base_server) as client:
result: CallToolResult = await client.call_tool(
name="multiply", arguments={"a": 7, "b": 6}
)
assert result.structured_content is not None
assert isinstance(result.structured_content, dict)
assert result.structured_content["result"] == 42
async def test_call_base_tool_still_works(self, base_server: FastMCP):
"""Test that base server tools still work after injecting tools."""
injected_tools: list[FunctionTool] = [multiply_tool]
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(
tools=injected_tools
)
base_server.add_middleware(middleware)
async with Client[FastMCPTransport](base_server) as client:
result: CallToolResult = await client.call_tool(
name="add", arguments={"a": 10, "b": 5}
)
assert result.structured_content is not None
assert isinstance(result.structured_content, dict)
assert result.structured_content["result"] == 15
async def test_injected_tool_error_handling(self, base_server: FastMCP):
"""Test that errors in injected tools are properly handled."""
injected_tools: list[FunctionTool] = [divide_tool]
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(
tools=injected_tools
)
base_server.add_middleware(middleware)
async with Client[FastMCPTransport](base_server) as client:
with pytest.raises(Exception, match="Cannot divide by zero"):
_ = await client.call_tool(name="divide", arguments={"a": 10, "b": 0})
async def test_multiple_tool_injections(self, base_server: FastMCP):
"""Test multiple tool injection middlewares can be stacked."""
def power(a: int, b: int) -> int:
"""Raise a to the power of b."""
return int(math.pow(float(a), float(b)))
def modulo(a: int, b: int) -> int:
"""Calculate a modulo b."""
return a % b
middleware1 = ToolInjectionMiddleware(
tools=[Tool.from_function(fn=power, name="power")]
)
middleware2 = ToolInjectionMiddleware(
tools=[Tool.from_function(fn=modulo, name="modulo")]
)
base_server.add_middleware(middleware1)
base_server.add_middleware(middleware2)
async with Client(base_server) as client:
tools = await client.list_tools()
# Should have all tools
assert len(tools) == 4
tool_names = [tool.name for tool in tools]
assert "power" in tool_names
assert "modulo" in tool_names
assert "add" in tool_names
assert "subtract" in tool_names
# Test that both injected tools work
async with Client(base_server) as client:
power_result = await client.call_tool("power", {"a": 2, "b": 3})
assert power_result.structured_content is not None
assert isinstance(power_result.structured_content, dict)
assert power_result.structured_content["result"] == 8
modulo_result = await client.call_tool("modulo", {"a": 10, "b": 3})
assert modulo_result.structured_content is not None
assert isinstance(modulo_result.structured_content, dict)
assert modulo_result.structured_content["result"] == 1
async def test_injected_tool_with_complex_return_type(self, base_server: FastMCP):
"""Test injected tools with complex return types."""
def calculate_stats(numbers: list[int]) -> dict[str, int | float]:
"""Calculate statistics for a list of numbers."""
return {
"sum": sum(numbers),
"average": sum(numbers) / len(numbers),
"min": min(numbers),
"max": max(numbers),
"count": len(numbers),
}
middleware = ToolInjectionMiddleware(
tools=[Tool.from_function(fn=calculate_stats, name="calculate_stats")]
)
base_server.add_middleware(middleware)
async with Client(base_server) as client:
result = await client.call_tool(
"calculate_stats", {"numbers": [1, 2, 3, 4, 5]}
)
assert result.structured_content is not None
assert isinstance(result.structured_content, dict)
assert result.structured_content == snapshot(
{"sum": 15, "average": 3.0, "min": 1, "max": 5, "count": 5}
)
async def test_injected_tool_metadata_preserved(self, base_server: FastMCP):
"""Test that injected tool metadata is preserved."""
def multiply(a: int, b: int) -> int:
"""Multiply two numbers."""
return a * b
injected_tools = [Tool.from_function(fn=multiply, name="multiply")]
middleware = ToolInjectionMiddleware(tools=injected_tools)
base_server.add_middleware(middleware)
async with Client(base_server) as client:
tools = await client.list_tools()
multiply_tool = next(t for t in tools if t.name == "multiply")
assert multiply_tool.description == "Multiply two numbers."
assert "a" in multiply_tool.inputSchema["properties"]
assert "b" in multiply_tool.inputSchema["properties"]
async def test_injected_tool_does_not_conflict_with_base_tool(
self, base_server: FastMCP
):
"""Test that injected tools with same name as base tools are called correctly."""
def add(a: int, b: int) -> int:
"""Injected add that multiplies instead."""
return a * b
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(
tools=[Tool.from_function(fn=add, name="add")]
)
base_server.add_middleware(middleware)
async with Client[FastMCPTransport](base_server) as client:
result: CallToolResult = await client.call_tool(
name="add", arguments={"a": 5, "b": 3}
)
# Should use the injected tool (multiply behavior)
assert result.structured_content is not None
assert result.structured_content["result"] == 15
async def test_injected_tool_bypass_filtering(self, base_server: FastMCP):
"""Test that injected tools bypass filtering."""
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(
tools=[multiply_tool]
)
base_server.add_middleware(middleware)
base_server.disable(tags={"math"})
async with Client[FastMCPTransport](base_server) as client:
tools: list[SDKTool] = await client.list_tools()
tool_names: list[str] = [tool.name for tool in tools]
assert "multiply" in tool_names
async def test_empty_tool_injection(self, base_server: FastMCP):
"""Test that middleware with no tools doesn't affect behavior."""
middleware: ToolInjectionMiddleware = ToolInjectionMiddleware(tools=[])
base_server.add_middleware(middleware)
async with Client[FastMCPTransport](base_server) as client:
tools: list[SDKTool] = await client.list_tools()
result: CallToolResult = await client.call_tool(
name="add", arguments={"a": 3, "b": 4}
)
# Should only have the base tools
assert len(tools) == 2
tool_names: list[str] = [tool.name for tool in tools]
assert "add" in tool_names
assert "subtract" in tool_names
assert result.structured_content is not None
assert isinstance(result.structured_content, dict)
assert result.structured_content["result"] == 7
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/middleware/test_tool_injection.py",
"license": "Apache License 2.0",
"lines": 215,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/handlers/authorize.py | """Enhanced authorization handler with improved error responses.
This module provides an enhanced authorization handler that wraps the MCP SDK's
AuthorizationHandler to provide better error messages when clients attempt to
authorize with unregistered client IDs.
The enhancement adds:
- Content negotiation: HTML for browsers, JSON for API clients
- Enhanced JSON responses with registration endpoint hints
- Styled HTML error pages with registration links/forms
- Link headers pointing to registration endpoints
"""
from __future__ import annotations
import json
from typing import TYPE_CHECKING
from mcp.server.auth.handlers.authorize import (
AuthorizationHandler as SDKAuthorizationHandler,
)
from pydantic import AnyHttpUrl
from starlette.requests import Request
from starlette.responses import Response
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.ui import (
INFO_BOX_STYLES,
TOOLTIP_STYLES,
create_logo,
create_page,
create_secure_html_response,
)
if TYPE_CHECKING:
from mcp.server.auth.provider import OAuthAuthorizationServerProvider
logger = get_logger(__name__)
def create_unregistered_client_html(
client_id: str,
registration_endpoint: str,
discovery_endpoint: str,
server_name: str | None = None,
server_icon_url: str | None = None,
title: str = "Client Not Registered",
) -> str:
"""Create styled HTML error page for unregistered client attempts.
Args:
client_id: The unregistered client ID that was provided
registration_endpoint: URL of the registration endpoint
discovery_endpoint: URL of the OAuth metadata discovery endpoint
server_name: Optional server name for branding
server_icon_url: Optional server icon URL
title: Page title
Returns:
HTML string for the error page
"""
import html as html_module
client_id_escaped = html_module.escape(client_id)
# Main error message
error_box = f"""
<div class="info-box error">
<p>The client ID <code>{client_id_escaped}</code> was not found in the server's client registry.</p>
</div>
"""
# What to do - yellow warning box
warning_box = """
<div class="info-box warning">
<p>Your MCP client opened this page to complete OAuth authorization,
but the server did not recognize its client ID. To fix this:</p>
<ul>
<li>Close this browser window</li>
<li>Clear authentication tokens in your MCP client (or restart it)</li>
<li>Try connecting again - your client should automatically re-register</li>
</ul>
</div>
"""
# Help link with tooltip (similar to consent screen)
help_link = """
<div class="help-link-container">
<span class="help-link">
Why am I seeing this?
<span class="tooltip">
OAuth 2.0 requires clients to register before authorization.
This server returned a 400 error because the provided client
ID was not found.
<br><br>
In browser-delegated OAuth flows, your application cannot
detect this error automatically; it's waiting for a
callback that will never arrive. You must manually clear
auth tokens and reconnect.
</span>
</span>
</div>
"""
# Build page content
content = f"""
<div class="container">
{create_logo(icon_url=server_icon_url, alt_text=server_name or "FastMCP")}
<h1>{title}</h1>
{error_box}
{warning_box}
</div>
{help_link}
"""
# Use same styles as consent page
additional_styles = (
INFO_BOX_STYLES
+ TOOLTIP_STYLES
+ """
/* Error variant for info-box */
.info-box.error {
background: #fef2f2;
border-color: #f87171;
}
.info-box.error strong {
color: #991b1b;
}
/* Warning variant for info-box (yellow) */
.info-box.warning {
background: #fffbeb;
border-color: #fbbf24;
}
.info-box.warning strong {
color: #92400e;
}
.info-box code {
background: rgba(0, 0, 0, 0.05);
padding: 2px 6px;
border-radius: 3px;
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
font-size: 0.9em;
}
.info-box ul {
margin: 10px 0;
padding-left: 20px;
}
.info-box li {
margin: 6px 0;
}
"""
)
return create_page(
content=content,
title=title,
additional_styles=additional_styles,
)
class AuthorizationHandler(SDKAuthorizationHandler):
"""Authorization handler with enhanced error responses for unregistered clients.
This handler extends the MCP SDK's AuthorizationHandler to provide better UX
when clients attempt to authorize without being registered. It implements
content negotiation to return:
- HTML error pages for browser requests
- Enhanced JSON with registration hints for API clients
- Link headers pointing to registration endpoints
This maintains OAuth 2.1 compliance (returns 400 for invalid client_id)
while providing actionable guidance to fix the error.
"""
def __init__(
self,
provider: OAuthAuthorizationServerProvider,
base_url: AnyHttpUrl | str,
server_name: str | None = None,
server_icon_url: str | None = None,
):
"""Initialize the enhanced authorization handler.
Args:
provider: OAuth authorization server provider
base_url: Base URL of the server for constructing endpoint URLs
server_name: Optional server name for branding
server_icon_url: Optional server icon URL for branding
"""
super().__init__(provider)
self._base_url = str(base_url).rstrip("/")
self._server_name = server_name
self._server_icon_url = server_icon_url
async def handle(self, request: Request) -> Response:
"""Handle authorization request with enhanced error responses.
This method extends the SDK's authorization handler and intercepts
errors for unregistered clients to provide better error responses
based on the client's Accept header.
Args:
request: The authorization request
Returns:
Response (redirect on success, error response on failure)
"""
# Call the SDK handler
response = await super().handle(request)
# Check if this is a client not found error
if response.status_code == 400:
# Try to extract client_id from request for enhanced error
client_id: str | None = None
if request.method == "GET":
client_id = request.query_params.get("client_id")
else:
form = await request.form()
client_id_value = form.get("client_id")
# Ensure client_id is a string, not UploadFile
if isinstance(client_id_value, str):
client_id = client_id_value
# If we have a client_id and the error is about it not being found,
# enhance the response
if client_id:
try:
# Check if response body contains "not found" error
if hasattr(response, "body"):
body = json.loads(bytes(response.body))
if (
body.get("error") == "invalid_request"
and "not found" in body.get("error_description", "").lower()
):
return await self._create_enhanced_error_response(
request, client_id, body.get("state")
)
except Exception:
# If we can't parse the response, just return the original
pass
return response
async def _create_enhanced_error_response(
self, request: Request, client_id: str, state: str | None
) -> Response:
"""Create enhanced error response with content negotiation.
Args:
request: The original request
client_id: The unregistered client ID
state: The state parameter from the request
Returns:
HTML or JSON error response based on Accept header
"""
registration_endpoint = f"{self._base_url}/register"
discovery_endpoint = f"{self._base_url}/.well-known/oauth-authorization-server"
# Extract server metadata from app state (same pattern as consent screen)
from fastmcp.server.server import FastMCP
fastmcp = getattr(request.app.state, "fastmcp_server", None)
if isinstance(fastmcp, FastMCP):
server_name = fastmcp.name
icons = fastmcp.icons
server_icon_url = icons[0].src if icons else None
else:
server_name = self._server_name
server_icon_url = self._server_icon_url
# Check Accept header for content negotiation
accept = request.headers.get("accept", "")
# Prefer HTML for browsers
if "text/html" in accept:
html = create_unregistered_client_html(
client_id=client_id,
registration_endpoint=registration_endpoint,
discovery_endpoint=discovery_endpoint,
server_name=server_name,
server_icon_url=server_icon_url,
)
response = create_secure_html_response(html, status_code=400)
else:
# Return enhanced JSON for API clients
from mcp.server.auth.handlers.authorize import AuthorizationErrorResponse
error_data = AuthorizationErrorResponse(
error="invalid_request",
error_description=(
f"Client ID '{client_id}' is not registered with this server. "
f"MCP clients should automatically re-register by sending a POST request to "
f"the registration_endpoint and retry authorization. "
f"If this persists, clear cached authentication tokens and reconnect."
),
state=state,
)
# Add extra fields to help clients discover registration
error_dict = error_data.model_dump(exclude_none=True)
error_dict["registration_endpoint"] = registration_endpoint
error_dict["authorization_server_metadata"] = discovery_endpoint
from starlette.responses import JSONResponse
response = JSONResponse(
status_code=400,
content=error_dict,
headers={"Cache-Control": "no-store"},
)
# Add Link header for registration endpoint discovery
response.headers["Link"] = (
f'<{registration_endpoint}>; rel="http://oauth.net/core/2.1/#registration"'
)
logger.info(
"Unregistered client_id=%s, returned %s error response",
client_id,
"HTML" if "text/html" in accept else "JSON",
)
return response
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/handlers/authorize.py",
"license": "Apache License 2.0",
"lines": 278,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:src/fastmcp/server/auth/middleware.py | """Enhanced authentication middleware with better error messages.
This module provides enhanced versions of MCP SDK authentication middleware
that return more helpful error messages for developers troubleshooting
authentication issues.
"""
from __future__ import annotations
import json
from mcp.server.auth.middleware.bearer_auth import (
RequireAuthMiddleware as SDKRequireAuthMiddleware,
)
from starlette.types import Send
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class RequireAuthMiddleware(SDKRequireAuthMiddleware):
"""Enhanced authentication middleware with detailed error messages.
Extends the SDK's RequireAuthMiddleware to provide more actionable
error messages when authentication fails. This helps developers
understand what went wrong and how to fix it.
"""
async def _send_auth_error(
self, send: Send, status_code: int, error: str, description: str
) -> None:
"""Send an authentication error response with enhanced error messages.
Overrides the SDK's _send_auth_error to provide more detailed
error descriptions that help developers troubleshoot authentication
issues.
Args:
send: ASGI send callable
status_code: HTTP status code (401 or 403)
error: OAuth error code
description: Base error description
"""
# Enhance error descriptions based on error type
enhanced_description = description
if error == "invalid_token" and status_code == 401:
# This is the "Authentication required" error
enhanced_description = (
"Authentication failed. The provided bearer token is invalid, expired, or no longer recognized by the server. "
"To resolve: clear authentication tokens in your MCP client and reconnect. "
"Your client should automatically re-register and obtain new tokens."
)
elif error == "insufficient_scope":
# Scope error - already has good detail from SDK
pass
# Build WWW-Authenticate header value
www_auth_parts = [
f'error="{error}"',
f'error_description="{enhanced_description}"',
]
if self.resource_metadata_url:
www_auth_parts.append(f'resource_metadata="{self.resource_metadata_url}"')
www_authenticate = f"Bearer {', '.join(www_auth_parts)}"
# Send response
body = {"error": error, "error_description": enhanced_description}
body_bytes = json.dumps(body).encode()
await send(
{
"type": "http.response.start",
"status": status_code,
"headers": [
(b"content-type", b"application/json"),
(b"content-length", str(len(body_bytes)).encode()),
(b"www-authenticate", www_authenticate.encode()),
],
}
)
await send(
{
"type": "http.response.body",
"body": body_bytes,
}
)
logger.info(
"Auth error returned: %s (status=%d)",
error,
status_code,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/middleware.py",
"license": "Apache License 2.0",
"lines": 77,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:tests/server/auth/test_enhanced_error_responses.py | """Tests for enhanced OAuth error responses.
This test suite covers:
1. Enhanced authorization handler (HTML and JSON error pages)
2. Enhanced middleware (better error messages)
3. Content negotiation
4. Server branding in error pages
"""
import pytest
from mcp.shared.auth import OAuthClientInformationFull
from pydantic import AnyUrl
from starlette.applications import Starlette
from starlette.testclient import TestClient
from fastmcp import FastMCP
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.providers.jwt import JWTVerifier, RSAKeyPair
class TestEnhancedAuthorizationHandler:
"""Tests for enhanced authorization handler error responses."""
@pytest.fixture
def rsa_key_pair(self) -> RSAKeyPair:
"""Generate RSA key pair for testing."""
return RSAKeyPair.generate()
@pytest.fixture
def oauth_proxy(self, rsa_key_pair):
"""Create OAuth proxy for testing."""
from key_value.aio.stores.memory import MemoryStore
return OAuthProxy(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=JWTVerifier(
public_key=rsa_key_pair.public_key,
issuer="https://test.com",
audience="https://test.com",
base_url="https://test.com",
),
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
def test_unregistered_client_returns_html_for_browser(self, oauth_proxy):
"""Test that unregistered client returns styled HTML for browser requests."""
app = Starlette(routes=oauth_proxy.get_routes())
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "unregistered-client-id",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
"state": "test-state",
},
headers={"Accept": "text/html"},
)
# Should return 400 with HTML content
assert response.status_code == 400
assert "text/html" in response.headers["content-type"]
# HTML should contain error message
html = response.text
assert "Client Not Registered" in html
assert "unregistered-client-id" in html
assert "To fix this" in html
assert "Close this browser window" in html
assert "Clear authentication tokens" in html
# Should have Link header for registration endpoint
assert "Link" in response.headers
assert "/register" in response.headers["Link"]
def test_unregistered_client_returns_json_for_api(self, oauth_proxy):
"""Test that unregistered client returns enhanced JSON for API clients."""
app = Starlette(routes=oauth_proxy.get_routes())
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "unregistered-client-id",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
"state": "test-state",
},
headers={"Accept": "application/json"},
)
# Should return 400 with JSON content
assert response.status_code == 400
assert "application/json" in response.headers["content-type"]
# JSON should have enhanced error response
data = response.json()
assert data["error"] == "invalid_request"
assert "unregistered-client-id" in data["error_description"]
assert data["state"] == "test-state"
# Should include registration endpoint hints
assert "registration_endpoint" in data
assert data["registration_endpoint"] == "https://myserver.com/register"
assert "authorization_server_metadata" in data
# Should have Link header
assert "Link" in response.headers
assert "/register" in response.headers["Link"]
def test_successful_authorization_not_enhanced(self, oauth_proxy):
"""Test that successful authorizations are not modified by enhancement."""
app = Starlette(routes=oauth_proxy.get_routes())
# Register a valid client first
client_info = OAuthClientInformationFull(
client_id="valid-client",
client_secret="valid-secret",
redirect_uris=[AnyUrl("http://localhost:12345/callback")],
)
# Need to register synchronously
import asyncio
asyncio.run(oauth_proxy.register_client(client_info))
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "valid-client",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
"state": "test-state",
},
headers={"Accept": "text/html"},
follow_redirects=False,
)
# Should redirect to consent page (302), not return error
assert response.status_code == 302
assert "/consent" in response.headers["location"]
def test_html_error_includes_server_branding(self, oauth_proxy):
"""Test that HTML error page includes server branding from FastMCP instance."""
from mcp.types import Icon
# Create FastMCP server with custom branding
mcp = FastMCP(
"My Custom Server",
icons=[Icon(src="https://example.com/icon.png", mimeType="image/png")],
)
# Create app with OAuth routes
app = Starlette(routes=oauth_proxy.get_routes())
# Attach FastMCP instance to app state (same as done in http.py)
app.state.fastmcp_server = mcp
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "unregistered-client-id",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
},
headers={"Accept": "text/html"},
)
assert response.status_code == 400
html = response.text
# Should include custom server icon
assert "https://example.com/icon.png" in html
class TestEnhancedRequireAuthMiddleware:
"""Tests for enhanced authentication middleware error messages."""
@pytest.fixture
def rsa_key_pair(self) -> RSAKeyPair:
"""Generate RSA key pair for testing."""
return RSAKeyPair.generate()
@pytest.fixture
def jwt_verifier(self, rsa_key_pair):
"""Create JWT verifier for testing."""
return JWTVerifier(
public_key=rsa_key_pair.public_key,
issuer="https://test.com",
audience="https://test.com",
base_url="https://test.com",
)
def test_invalid_token_enhanced_error_message(self, jwt_verifier):
"""Test that invalid_token errors have enhanced error messages."""
from fastmcp.server.http import create_streamable_http_app
server = FastMCP("Test Server")
@server.tool
def test_tool() -> str:
return "test"
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=jwt_verifier,
)
with TestClient(app) as client:
# Request without Authorization header
response = client.post("/mcp")
assert response.status_code == 401
assert "www-authenticate" in response.headers
# Check enhanced error message
data = response.json()
assert data["error"] == "invalid_token"
# Should have enhanced description with resolution steps
assert "clear authentication tokens" in data["error_description"]
assert "automatically re-register" in data["error_description"]
def test_invalid_token_www_authenticate_header_format(self, jwt_verifier):
"""Test that WWW-Authenticate header format matches SDK."""
from fastmcp.server.http import create_streamable_http_app
server = FastMCP("Test Server")
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=jwt_verifier,
)
with TestClient(app) as client:
response = client.post("/mcp")
assert response.status_code == 401
www_auth = response.headers["www-authenticate"]
# Should follow Bearer challenge format
assert www_auth.startswith("Bearer ")
assert 'error="invalid_token"' in www_auth
assert "error_description=" in www_auth
def test_insufficient_scope_not_enhanced(self, rsa_key_pair):
"""Test that insufficient_scope errors are not modified."""
# Create a valid token with wrong scopes
from fastmcp.server.http import create_streamable_http_app
jwt_verifier = JWTVerifier(
public_key=rsa_key_pair.public_key,
issuer="https://test.com",
audience="https://test.com",
base_url="https://test.com",
)
server = FastMCP("Test Server")
@server.tool
def test_tool() -> str:
return "test"
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=jwt_verifier,
)
# Note: Testing insufficient_scope would require mocking the verifier
# to return a token with wrong scopes. For now, we verify the middleware
# is properly in place by checking it rejects unauthenticated requests.
with TestClient(app) as client:
response = client.post("/mcp")
# Without a valid token, we get invalid_token
assert response.status_code == 401
class TestContentNegotiation:
"""Tests for content negotiation in error responses."""
@pytest.fixture
def oauth_proxy(self):
"""Create OAuth proxy for testing."""
from key_value.aio.stores.memory import MemoryStore
return OAuthProxy(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=JWTVerifier(
public_key=RSAKeyPair.generate().public_key,
issuer="https://test.com",
audience="https://test.com",
base_url="https://test.com",
),
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
def test_html_preferred_when_both_accepted(self, oauth_proxy):
"""Test that HTML is preferred when both text/html and application/json are accepted."""
app = Starlette(routes=oauth_proxy.get_routes())
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "unregistered-client-id",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
},
headers={"Accept": "text/html,application/json"},
)
# Should prefer HTML
assert response.status_code == 400
assert "text/html" in response.headers["content-type"]
def test_json_when_only_json_accepted(self, oauth_proxy):
"""Test that JSON is returned when only application/json is accepted."""
app = Starlette(routes=oauth_proxy.get_routes())
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "unregistered-client-id",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
},
headers={"Accept": "application/json"},
)
assert response.status_code == 400
assert "application/json" in response.headers["content-type"]
def test_json_when_no_accept_header(self, oauth_proxy):
"""Test that JSON is returned when no Accept header is provided."""
app = Starlette(routes=oauth_proxy.get_routes())
with TestClient(app) as client:
response = client.get(
"/authorize",
params={
"client_id": "unregistered-client-id",
"redirect_uri": "http://localhost:12345/callback",
"response_type": "code",
"code_challenge": "test-challenge",
},
)
# Without Accept header, should return JSON (API default)
assert response.status_code == 400
assert "application/json" in response.headers["content-type"]
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_enhanced_error_responses.py",
"license": "Apache License 2.0",
"lines": 304,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/test_icons.py | """Tests for icon support across all MCP object types."""
from mcp.types import Icon
from fastmcp import Client, FastMCP
from fastmcp.prompts import Message, Prompt
from fastmcp.resources import Resource
from fastmcp.resources.template import ResourceTemplate
from fastmcp.tools import Tool
class TestServerIcons:
"""Test icon support at the server/implementation level."""
async def test_server_with_icons_and_website_url(self):
"""Test that server accepts icons and websiteUrl in constructor."""
icons = [
Icon(
src="https://example.com/icon.png",
mimeType="image/png",
sizes=["48x48"],
),
Icon(
src="data:image/svg+xml;base64,PHN2Zz48L3N2Zz4=",
mimeType="image/svg+xml",
sizes=["any"],
),
]
mcp = FastMCP(
name="TestServer",
version="1.0.0",
website_url="https://example.com",
icons=icons,
)
# Verify that icons and website_url are passed to the underlying server
async with Client(mcp) as client:
server_info = client.initialize_result.serverInfo
assert server_info.websiteUrl == "https://example.com"
assert server_info.icons == icons
async def test_server_without_icons_and_website_url(self):
"""Test that server works without icons and websiteUrl."""
mcp = FastMCP(name="TestServer")
async with Client(mcp) as client:
server_info = client.initialize_result.serverInfo
assert server_info.websiteUrl is None
assert server_info.icons is None
class TestToolIcons:
"""Test icon support for tools."""
async def test_tool_with_icons(self):
"""Test that tools can have icons."""
mcp = FastMCP("TestServer")
icons = [
Icon(src="https://example.com/tool-icon.png", mimeType="image/png"),
]
@mcp.tool(icons=icons)
def my_tool(name: str) -> str:
"""A tool with an icon."""
return f"Hello, {name}!"
async with Client(mcp) as client:
tools = await client.list_tools()
assert len(tools) == 1
tool = tools[0]
assert tool.icons == icons
async def test_tool_from_function_with_icons(self):
"""Test creating a tool from a function with icons."""
icons = [Icon(src="https://example.com/icon.png")]
def my_function(x: int) -> int:
"""A function."""
return x * 2
tool = Tool.from_function(my_function, icons=icons)
assert tool.icons == icons
# Verify it converts to MCP tool correctly
mcp_tool = tool.to_mcp_tool()
assert mcp_tool.icons == icons
async def test_tool_without_icons(self):
"""Test that tools work without icons."""
mcp = FastMCP("TestServer")
@mcp.tool
def my_tool(name: str) -> str:
"""A tool without an icon."""
return f"Hello, {name}!"
async with Client(mcp) as client:
tools = await client.list_tools()
assert len(tools) == 1
tool = tools[0]
assert tool.icons is None
class TestResourceIcons:
"""Test icon support for resources."""
async def test_resource_with_icons(self):
"""Test that resources can have icons."""
mcp = FastMCP("TestServer")
icons = [Icon(src="https://example.com/resource-icon.png")]
@mcp.resource("test://resource", icons=icons)
def my_resource() -> str:
"""A resource with an icon."""
return "Resource content"
async with Client(mcp) as client:
resources = await client.list_resources()
assert len(resources) == 1
resource = resources[0]
assert resource.icons == icons
async def test_resource_from_function_with_icons(self):
"""Test creating a resource from a function with icons."""
icons = [Icon(src="https://example.com/icon.png")]
def my_function() -> str:
"""A function."""
return "content"
resource = Resource.from_function(
my_function,
uri="test://resource",
icons=icons,
)
assert resource.icons == icons
# Verify it converts to MCP resource correctly
mcp_resource = resource.to_mcp_resource()
assert mcp_resource.icons == icons
async def test_resource_without_icons(self):
"""Test that resources work without icons."""
mcp = FastMCP("TestServer")
@mcp.resource("test://resource")
def my_resource() -> str:
"""A resource without an icon."""
return "Resource content"
async with Client(mcp) as client:
resources = await client.list_resources()
assert len(resources) == 1
resource = resources[0]
assert resource.icons is None
class TestResourceTemplateIcons:
"""Test icon support for resource templates."""
async def test_resource_template_with_icons(self):
"""Test that resource templates can have icons."""
mcp = FastMCP("TestServer")
icons = [Icon(src="https://example.com/template-icon.png")]
@mcp.resource("test://resource/{id}", icons=icons)
def my_template(id: str) -> str:
"""A resource template with an icon."""
return f"Resource {id}"
async with Client(mcp) as client:
templates = await client.list_resource_templates()
assert len(templates) == 1
template = templates[0]
assert template.icons == icons
async def test_resource_template_from_function_with_icons(self):
"""Test creating a resource template from a function with icons."""
icons = [Icon(src="https://example.com/icon.png")]
def my_function(id: str) -> str:
"""A function."""
return f"content-{id}"
template = ResourceTemplate.from_function(
my_function,
uri_template="test://resource/{id}",
icons=icons,
)
assert template.icons == icons
# Verify it converts to MCP template correctly
mcp_template = template.to_mcp_template()
assert mcp_template.icons == icons
async def test_resource_template_without_icons(self):
"""Test that resource templates work without icons."""
mcp = FastMCP("TestServer")
@mcp.resource("test://resource/{id}")
def my_template(id: str) -> str:
"""A resource template without an icon."""
return f"Resource {id}"
async with Client(mcp) as client:
templates = await client.list_resource_templates()
assert len(templates) == 1
template = templates[0]
assert template.icons is None
class TestPromptIcons:
"""Test icon support for prompts."""
async def test_prompt_with_icons(self):
"""Test that prompts can have icons."""
mcp = FastMCP("TestServer")
icons = [Icon(src="https://example.com/prompt-icon.png")]
@mcp.prompt(icons=icons)
def my_prompt(name: str):
"""A prompt with an icon."""
return Message(f"Hello, {name}!")
async with Client(mcp) as client:
prompts = await client.list_prompts()
assert len(prompts) == 1
prompt = prompts[0]
assert prompt.icons == icons
async def test_prompt_from_function_with_icons(self):
"""Test creating a prompt from a function with icons."""
icons = [Icon(src="https://example.com/icon.png")]
def my_function(topic: str):
"""A function."""
return Message(f"Tell me about {topic}")
prompt = Prompt.from_function(my_function, icons=icons)
assert prompt.icons == icons
# Verify it converts to MCP prompt correctly
mcp_prompt = prompt.to_mcp_prompt()
assert mcp_prompt.icons == icons
async def test_prompt_without_icons(self):
"""Test that prompts work without icons."""
mcp = FastMCP("TestServer")
@mcp.prompt
def my_prompt(name: str):
"""A prompt without an icon."""
return Message(f"Hello, {name}!")
async with Client(mcp) as client:
prompts = await client.list_prompts()
assert len(prompts) == 1
prompt = prompts[0]
assert prompt.icons is None
class TestIconTypes:
"""Test different types of icon data."""
async def test_multiple_icon_sizes(self):
"""Test that multiple icon sizes can be specified."""
icons = [
Icon(
src="https://example.com/icon-48.png",
mimeType="image/png",
sizes=["48x48"],
),
Icon(
src="https://example.com/icon-96.png",
mimeType="image/png",
sizes=["96x96"],
),
Icon(
src="https://example.com/icon.svg",
mimeType="image/svg+xml",
sizes=["any"],
),
]
mcp = FastMCP("TestServer", icons=icons)
async with Client(mcp) as client:
server_info = client.initialize_result.serverInfo
assert len(server_info.icons) == 3
assert server_info.icons == icons
async def test_data_uri_icon(self):
"""Test using data URIs for icons."""
# Simple SVG data URI
data_uri = "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCI+PHBhdGggZD0iTTEyIDJDNi40OCAyIDIgNi40OCAyIDEyczQuNDggMTAgMTAgMTAgMTAtNC40OCAxMC0xMFMxNy41MiAyIDEyIDJ6Ii8+PC9zdmc+"
icons = [Icon(src=data_uri, mimeType="image/svg+xml")]
mcp = FastMCP("TestServer")
@mcp.tool(icons=icons)
def my_tool() -> str:
"""A tool with a data URI icon."""
return "result"
async with Client(mcp) as client:
tools = await client.list_tools()
assert tools[0].icons[0].src == data_uri
async def test_icon_without_optional_fields(self):
"""Test that icons work with only the src field."""
icons = [Icon(src="https://example.com/icon.png")]
mcp = FastMCP("TestServer", icons=icons)
async with Client(mcp) as client:
server_info = client.initialize_result.serverInfo
assert server_info.icons[0].src == "https://example.com/icon.png"
assert server_info.icons[0].mimeType is None
assert server_info.icons[0].sizes is None
class TestIconImport:
"""Test that Icon must be imported from mcp.types."""
def test_icon_import(self):
"""Test that Icon must be imported from mcp.types, not fastmcp."""
# Icon should NOT be available from fastmcp
import fastmcp
assert not hasattr(fastmcp, "Icon")
# Icon should be imported from mcp.types
from mcp.types import Icon as MCPIcon
icon = MCPIcon(src="https://example.com/icon.png")
assert icon.src == "https://example.com/icon.png"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/test_icons.py",
"license": "Apache License 2.0",
"lines": 262,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/jwt_issuer.py | """JWT token issuance and verification for FastMCP OAuth Proxy.
This module implements the token factory pattern for OAuth proxies, where the proxy
issues its own JWT tokens to clients instead of forwarding upstream provider tokens.
This maintains proper OAuth 2.0 token audience boundaries.
"""
from __future__ import annotations
import base64
import time
from typing import Any, overload
from authlib.jose import JsonWebToken
from authlib.jose.errors import JoseError
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
KDF_ITERATIONS = 1000000
@overload
def derive_jwt_key(*, high_entropy_material: str, salt: str) -> bytes:
"""Derive JWT signing key from a high-entropy key material and server salt."""
@overload
def derive_jwt_key(*, low_entropy_material: str, salt: str) -> bytes:
"""Derive JWT signing key from a low-entropy key material and server salt."""
def derive_jwt_key(
*,
high_entropy_material: str | None = None,
low_entropy_material: str | None = None,
salt: str,
) -> bytes:
"""Derive JWT signing key from a high-entropy or low-entropy key material and server salt."""
if high_entropy_material is not None and low_entropy_material is not None:
raise ValueError(
"Either high_entropy_material or low_entropy_material must be provided, but not both"
)
if high_entropy_material is not None:
derived_key = HKDF(
algorithm=hashes.SHA256(),
length=32,
salt=salt.encode(),
info=b"Fernet",
).derive(key_material=high_entropy_material.encode())
return base64.urlsafe_b64encode(derived_key)
if low_entropy_material is not None:
pbkdf2 = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=salt.encode(),
iterations=KDF_ITERATIONS,
).derive(key_material=low_entropy_material.encode())
return base64.urlsafe_b64encode(pbkdf2)
raise ValueError(
"Either high_entropy_material or low_entropy_material must be provided"
)
class JWTIssuer:
"""Issues and validates FastMCP-signed JWT tokens using HS256.
This issuer creates JWT tokens for MCP clients with proper audience claims,
maintaining OAuth 2.0 token boundaries. Tokens are signed with HS256 using
a key derived from the upstream client secret.
"""
def __init__(
self,
issuer: str,
audience: str,
signing_key: bytes,
):
"""Initialize JWT issuer.
Args:
issuer: Token issuer (FastMCP server base URL)
audience: Token audience (typically {base_url}/mcp)
signing_key: HS256 signing key (32 bytes)
"""
self.issuer = issuer
self.audience = audience
self._signing_key = signing_key
self._jwt = JsonWebToken(["HS256"])
def issue_access_token(
self,
client_id: str,
scopes: list[str],
jti: str,
expires_in: int = 3600,
upstream_claims: dict[str, Any] | None = None,
) -> str:
"""Issue a minimal FastMCP access token.
FastMCP tokens are reference tokens containing only the minimal claims
needed for validation and lookup. The JTI maps to the upstream token
which contains actual user identity and authorization data.
Args:
client_id: MCP client ID
scopes: Token scopes
jti: Unique token identifier (maps to upstream token)
expires_in: Token lifetime in seconds
upstream_claims: Optional claims from upstream IdP token to include
Returns:
Signed JWT token
"""
now = int(time.time())
header = {"alg": "HS256", "typ": "JWT"}
payload: dict[str, Any] = {
"iss": self.issuer,
"aud": self.audience,
"client_id": client_id,
"scope": " ".join(scopes),
"exp": now + expires_in,
"iat": now,
"jti": jti,
}
if upstream_claims:
payload["upstream_claims"] = upstream_claims
token_bytes = self._jwt.encode(header, payload, self._signing_key)
token = token_bytes.decode("utf-8")
logger.debug(
"Issued access token for client=%s jti=%s exp=%d",
client_id,
jti[:8],
payload["exp"],
)
return token
def issue_refresh_token(
self,
client_id: str,
scopes: list[str],
jti: str,
expires_in: int,
upstream_claims: dict[str, Any] | None = None,
) -> str:
"""Issue a minimal FastMCP refresh token.
FastMCP refresh tokens are reference tokens containing only the minimal
claims needed for validation and lookup. The JTI maps to the upstream
token which contains actual user identity and authorization data.
Args:
client_id: MCP client ID
scopes: Token scopes
jti: Unique token identifier (maps to upstream token)
expires_in: Token lifetime in seconds (should match upstream refresh expiry)
upstream_claims: Optional claims from upstream IdP token to include
Returns:
Signed JWT token
"""
now = int(time.time())
header = {"alg": "HS256", "typ": "JWT"}
payload: dict[str, Any] = {
"iss": self.issuer,
"aud": self.audience,
"client_id": client_id,
"scope": " ".join(scopes),
"exp": now + expires_in,
"iat": now,
"jti": jti,
"token_use": "refresh",
}
if upstream_claims:
payload["upstream_claims"] = upstream_claims
token_bytes = self._jwt.encode(header, payload, self._signing_key)
token = token_bytes.decode("utf-8")
logger.debug(
"Issued refresh token for client=%s jti=%s exp=%d",
client_id,
jti[:8],
payload["exp"],
)
return token
def verify_token(self, token: str) -> dict[str, Any]:
"""Verify and decode a FastMCP token.
Validates JWT signature, expiration, issuer, and audience.
Args:
token: JWT token to verify
Returns:
Decoded token payload
Raises:
JoseError: If token is invalid, expired, or has wrong claims
"""
try:
# Decode and verify signature
payload = self._jwt.decode(token, self._signing_key)
# Validate expiration
exp = payload.get("exp")
if exp and exp < time.time():
logger.debug("Token expired")
raise JoseError("Token has expired")
# Validate issuer
if payload.get("iss") != self.issuer:
logger.debug("Token has invalid issuer")
raise JoseError("Invalid token issuer")
# Validate audience
if payload.get("aud") != self.audience:
logger.debug("Token has invalid audience")
raise JoseError("Invalid token audience")
logger.debug(
"Token verified successfully for subject=%s", payload.get("sub")
)
return payload
except JoseError as e:
logger.debug("Token validation failed: %s", e)
raise
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/jwt_issuer.py",
"license": "Apache License 2.0",
"lines": 195,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/auth/test_jwt_issuer.py | """Unit tests for JWT issuer and token encryption."""
import base64
import time
import pytest
from authlib.jose.errors import JoseError
from fastmcp.server.auth.jwt_issuer import (
JWTIssuer,
derive_jwt_key,
)
class TestKeyDerivation:
"""Tests for HKDF key derivation functions."""
def test_derive_jwt_key_produces_32_bytes(self):
"""Test that JWT key derivation produces 32-byte key."""
key = derive_jwt_key(high_entropy_material="test-secret", salt="test-salt")
assert len(key) == 44
assert isinstance(key, bytes)
# base64 decode and make sure its 32 bytes
key_bytes = base64.urlsafe_b64decode(key)
assert len(key_bytes) == 32
key = derive_jwt_key(low_entropy_material="test-secret", salt="test-salt")
assert len(key) == 44
assert isinstance(key, bytes)
# base64 decode and make sure its 32 bytes
key_bytes = base64.urlsafe_b64decode(key)
assert len(key_bytes) == 32
def test_derive_jwt_key_with_different_secrets_produces_different_keys(self):
"""Test that different secrets produce different keys."""
key1 = derive_jwt_key(high_entropy_material="secret1", salt="salt")
key2 = derive_jwt_key(high_entropy_material="secret2", salt="salt")
assert key1 != key2
key1 = derive_jwt_key(low_entropy_material="secret1", salt="salt")
key2 = derive_jwt_key(low_entropy_material="secret2", salt="salt")
assert key1 != key2
def test_derive_jwt_key_with_different_salts_produces_different_keys(self):
"""Test that different salts produce different keys."""
key1 = derive_jwt_key(high_entropy_material="secret", salt="salt1")
key2 = derive_jwt_key(high_entropy_material="secret", salt="salt2")
assert key1 != key2
key1 = derive_jwt_key(low_entropy_material="secret", salt="salt1")
key2 = derive_jwt_key(low_entropy_material="secret", salt="salt2")
assert key1 != key2
def test_derive_jwt_key_is_deterministic(self):
"""Test that same inputs always produce same key."""
key1 = derive_jwt_key(high_entropy_material="secret", salt="salt")
key2 = derive_jwt_key(high_entropy_material="secret", salt="salt")
assert key1 == key2
key1 = derive_jwt_key(low_entropy_material="secret", salt="salt")
key2 = derive_jwt_key(low_entropy_material="secret", salt="salt")
assert key1 == key2
class TestJWTIssuer:
"""Tests for JWT token issuance and verification."""
@pytest.fixture
def issuer(self):
"""Create a JWT issuer for testing."""
signing_key = derive_jwt_key(
low_entropy_material="test-secret", salt="test-salt"
)
return JWTIssuer(
issuer="https://test-server.com",
audience="https://test-server.com/mcp",
signing_key=signing_key,
)
def test_issue_access_token_creates_valid_jwt(self, issuer):
"""Test that access token is a minimal JWT with correct structure."""
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read", "write"],
jti="token-id-123",
expires_in=3600,
)
# Should be a JWT with 3 segments
assert len(token.split(".")) == 3
# Should be verifiable
payload = issuer.verify_token(token)
# Minimal token should only have required claims
assert payload["client_id"] == "client-abc"
assert payload["scope"] == "read write"
assert payload["jti"] == "token-id-123"
assert payload["iss"] == "https://test-server.com"
assert payload["aud"] == "https://test-server.com/mcp"
# Should NOT have user identity claims
assert "sub" not in payload
assert "azp" not in payload
def test_minimal_token_has_no_user_identity(self, issuer):
"""Test that minimal tokens contain no user identity or custom claims."""
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read"],
jti="token-id",
expires_in=3600,
)
payload = issuer.verify_token(token)
# Should only have minimal required claims
assert "sub" not in payload
assert "azp" not in payload
assert "groups" not in payload
assert "roles" not in payload
assert "email" not in payload
# Should have exactly these claims
expected_keys = {"iss", "aud", "client_id", "scope", "exp", "iat", "jti"}
assert set(payload.keys()) == expected_keys
def test_issue_refresh_token_creates_valid_jwt(self, issuer):
"""Test that refresh token is a minimal JWT with token_use claim."""
token = issuer.issue_refresh_token(
client_id="client-abc",
scopes=["read"],
jti="refresh-token-id",
expires_in=60 * 60 * 24 * 30, # 30 days
)
payload = issuer.verify_token(token)
assert payload["client_id"] == "client-abc"
assert payload["token_use"] == "refresh"
assert payload["jti"] == "refresh-token-id"
# Should NOT have user identity
assert "sub" not in payload
def test_verify_token_validates_signature(self, issuer):
"""Test that token verification fails with wrong signing key."""
# Create token with one issuer
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read"],
jti="token-id",
)
# Try to verify with different issuer (different key)
other_key = derive_jwt_key(
low_entropy_material="different-secret", salt="different-salt"
)
other_issuer = JWTIssuer(
issuer="https://test-server.com",
audience="https://test-server.com/mcp",
signing_key=other_key,
)
with pytest.raises(JoseError):
other_issuer.verify_token(token)
def test_verify_token_validates_expiration(self, issuer):
"""Test that expired tokens are rejected."""
# Create token that expires in 1 second
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read"],
jti="token-id",
expires_in=1,
)
# Should be valid immediately
payload = issuer.verify_token(token)
assert payload["client_id"] == "client-abc"
# Wait for token to expire
time.sleep(1.1)
# Should be rejected
with pytest.raises(JoseError, match="expired"):
issuer.verify_token(token)
def test_verify_token_validates_issuer(self, issuer):
"""Test that tokens from different issuers are rejected."""
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read"],
jti="token-id",
)
# Create issuer with different issuer URL but same key
other_issuer = JWTIssuer(
issuer="https://other-server.com", # Different issuer
audience="https://test-server.com/mcp",
signing_key=issuer._signing_key, # Same key
)
with pytest.raises(JoseError, match="issuer"):
other_issuer.verify_token(token)
def test_verify_token_validates_audience(self, issuer):
"""Test that tokens for different audiences are rejected."""
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read"],
jti="token-id",
)
# Create issuer with different audience but same key
other_issuer = JWTIssuer(
issuer="https://test-server.com",
audience="https://other-server.com/mcp", # Different audience
signing_key=issuer._signing_key, # Same key
)
with pytest.raises(JoseError, match="audience"):
other_issuer.verify_token(token)
def test_verify_token_rejects_malformed_tokens(self, issuer):
"""Test that malformed tokens are rejected."""
with pytest.raises(JoseError):
issuer.verify_token("not-a-jwt")
with pytest.raises(JoseError):
issuer.verify_token("too.few.segments")
with pytest.raises(JoseError):
issuer.verify_token("header.payload") # Missing signature
def test_issue_access_token_with_upstream_claims(self, issuer):
"""Test that upstream claims are included when provided."""
upstream_claims = {
"sub": "user-123",
"oid": "object-id-456",
"name": "Test User",
"email": "test@example.com",
"roles": ["Admin", "Reader"],
}
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read", "write"],
jti="token-id-123",
expires_in=3600,
upstream_claims=upstream_claims,
)
payload = issuer.verify_token(token)
assert "upstream_claims" in payload
assert payload["upstream_claims"]["sub"] == "user-123"
assert payload["upstream_claims"]["oid"] == "object-id-456"
assert payload["upstream_claims"]["name"] == "Test User"
assert payload["upstream_claims"]["email"] == "test@example.com"
assert payload["upstream_claims"]["roles"] == ["Admin", "Reader"]
def test_issue_access_token_without_upstream_claims(self, issuer):
"""Test that upstream_claims is not present when not provided."""
token = issuer.issue_access_token(
client_id="client-abc",
scopes=["read"],
jti="token-id-123",
expires_in=3600,
)
payload = issuer.verify_token(token)
assert "upstream_claims" not in payload
def test_issue_refresh_token_with_upstream_claims(self, issuer):
"""Test that refresh tokens also include upstream claims when provided."""
upstream_claims = {
"sub": "user-123",
"name": "Test User",
}
token = issuer.issue_refresh_token(
client_id="client-abc",
scopes=["read"],
jti="refresh-token-id",
expires_in=60 * 60 * 24 * 30,
upstream_claims=upstream_claims,
)
payload = issuer.verify_token(token)
assert "upstream_claims" in payload
assert payload["upstream_claims"]["sub"] == "user-123"
assert payload["upstream_claims"]["name"] == "Test User"
assert payload["token_use"] == "refresh"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_jwt_issuer.py",
"license": "Apache License 2.0",
"lines": 240,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/test_oauth_mounting.py | """Tests for OAuth .well-known routes when FastMCP apps are mounted in parent ASGI apps.
This test file validates the fix for issue #2077 where .well-known/oauth-protected-resource
returns 404 at root level when a FastMCP app is mounted under a path prefix.
The fix uses MCP SDK 1.17+ which implements RFC 9728 path-scoped well-known URLs.
"""
import httpx
import pytest
from key_value.aio.stores.memory import MemoryStore
from pydantic import AnyHttpUrl
from starlette.applications import Starlette
from starlette.routing import Mount
from fastmcp import FastMCP
from fastmcp.server.auth import RemoteAuthProvider
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.providers.jwt import StaticTokenVerifier
@pytest.fixture
def test_tokens():
"""Standard test tokens fixture."""
return {
"test_token": {
"client_id": "test-client",
"scopes": ["read", "write"],
}
}
class TestOAuthMounting:
"""Test OAuth .well-known routes with mounted FastMCP apps."""
async def test_well_known_with_direct_deployment(self, test_tokens):
"""Test that .well-known routes work when app is deployed directly (not mounted).
This is the baseline - it should work as expected.
Per RFC 9728, if the resource is at /mcp, the well-known endpoint is at
/.well-known/oauth-protected-resource/mcp (path-scoped).
"""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_app),
base_url="https://api.example.com",
) as client:
# RFC 9728: path-scoped well-known URL
# Resource is at /mcp, so well-known should be at /.well-known/oauth-protected-resource/mcp
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
assert data["resource"] == "https://api.example.com/mcp"
assert data["authorization_servers"] == ["https://auth.example.com/"]
async def test_well_known_with_mounted_app(self, test_tokens):
"""Test that .well-known routes work when explicitly mounted at root.
This test uses the CANONICAL pattern for mounting:
- base_url includes the mount prefix ("/api")
- mcp_path is just the internal MCP path ("/mcp")
- These combine: base_url + mcp_path = actual URL
The well-known routes are mounted at root level for RFC 9728 compliance.
"""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
# CANONICAL PATTERN: base_url includes the mount prefix
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com/api", # Includes /api mount prefix
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_app = mcp.http_app(path="/mcp")
# Pass just the internal mcp_path, NOT the full mount path
# The auth provider will combine base_url + mcp_path internally
well_known_routes = auth_provider.get_well_known_routes(mcp_path="/mcp")
parent_app = Starlette(
routes=[
*well_known_routes, # Well-known routes at root level
Mount("/api", app=mcp_app), # MCP app under /api
],
lifespan=mcp_app.lifespan,
)
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=parent_app),
base_url="https://api.example.com",
) as client:
# The CORRECT RFC 9728 path-scoped well-known URL at root
# Resource is at /api/mcp, so well-known is at /.well-known/oauth-protected-resource/api/mcp
response = await client.get("/.well-known/oauth-protected-resource/api/mcp")
assert response.status_code == 200
data = response.json()
assert data["resource"] == "https://api.example.com/api/mcp"
assert data["authorization_servers"] == ["https://auth.example.com/"]
# There will also be an extra route at /api/.well-known/oauth-protected-resource/mcp
# (from the mounted MCP app), but we don't care about that as long as the correct one exists
async def test_mcp_endpoint_with_mounted_app(self, test_tokens):
"""Test that MCP endpoint works correctly when mounted.
This confirms the MCP functionality itself works with mounting.
"""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
mcp = FastMCP("test-server", auth=auth_provider)
@mcp.tool
def test_tool(message: str) -> str:
return f"Echo: {message}"
mcp_app = mcp.http_app(path="/mcp")
# Mount the MCP app under /api prefix
parent_app = Starlette(
routes=[
Mount("/api", app=mcp_app),
],
lifespan=mcp_app.lifespan,
)
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=parent_app),
base_url="https://api.example.com",
) as client:
# The MCP endpoint should work at /api/mcp (mounted correctly)
# This is a basic connectivity test
response = await client.get("/api/mcp")
# We expect either 200 (if no auth required for GET) or 401 (if auth required)
# The key is that it's NOT 404
assert response.status_code in [200, 401, 405]
async def test_nested_mounting(self, test_tokens):
"""Test .well-known routes with deeply nested mounts.
Uses CANONICAL pattern: base_url includes all mount prefixes.
"""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
# CANONICAL PATTERN: base_url includes full mount path /outer/inner
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com/outer/inner", # Includes nested mount path
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_app = mcp.http_app(path="/mcp")
# Pass just the internal mcp_path
well_known_routes = auth_provider.get_well_known_routes(mcp_path="/mcp")
# Create nested mounts
inner_app = Starlette(
routes=[Mount("/inner", app=mcp_app)],
)
outer_app = Starlette(
routes=[
*well_known_routes, # Well-known routes at root level
Mount("/outer", app=inner_app),
],
lifespan=mcp_app.lifespan,
)
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=outer_app),
base_url="https://api.example.com",
) as client:
# RFC 9728: path-scoped well-known URL for nested mounting
# Resource is at /outer/inner/mcp, so well-known is at /.well-known/oauth-protected-resource/outer/inner/mcp
response = await client.get(
"/.well-known/oauth-protected-resource/outer/inner/mcp"
)
assert response.status_code == 200
data = response.json()
assert data["resource"] == "https://api.example.com/outer/inner/mcp"
async def test_oauth_authorization_server_metadata_with_base_url_and_issuer_url(
self, test_tokens
):
"""Test OAuth authorization server metadata when base_url and issuer_url differ.
This validates the fix for issue #2287 where operational OAuth endpoints
(/authorize, /token) should be declared at base_url in the metadata,
not at issuer_url.
Scenario: FastMCP server mounted at /api prefix
- issuer_url: https://api.example.com (root level)
- base_url: https://api.example.com/api (includes mount prefix)
- Expected: metadata declares endpoints at base_url
"""
# Create OAuth proxy with different base_url and issuer_url
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_provider = OAuthProxy(
upstream_authorization_endpoint="https://upstream.example.com/authorize",
upstream_token_endpoint="https://upstream.example.com/token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=token_verifier,
base_url="https://api.example.com/api", # Includes mount prefix
issuer_url="https://api.example.com", # Root level
client_storage=MemoryStore(),
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_app = mcp.http_app(path="/mcp")
# Get well-known routes for mounting at root
well_known_routes = auth_provider.get_well_known_routes(mcp_path="/mcp")
# Mount the app under /api prefix
parent_app = Starlette(
routes=[
*well_known_routes, # Well-known routes at root level
Mount("/api", app=mcp_app), # MCP app under /api
],
lifespan=mcp_app.lifespan,
)
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=parent_app),
base_url="https://api.example.com",
) as client:
# Fetch the authorization server metadata
response = await client.get("/.well-known/oauth-authorization-server")
assert response.status_code == 200
metadata = response.json()
# CRITICAL: The metadata should declare endpoints at base_url,
# not issuer_url, because that's where they're actually mounted
assert (
metadata["authorization_endpoint"]
== "https://api.example.com/api/authorize"
)
assert metadata["token_endpoint"] == "https://api.example.com/api/token"
assert (
metadata["registration_endpoint"]
== "https://api.example.com/api/register"
)
# The issuer field should use base_url (where the server is actually running)
# Note: MCP SDK may or may not add a trailing slash
assert metadata["issuer"] in [
"https://api.example.com/api",
"https://api.example.com/api/",
]
async def test_oauth_authorization_server_metadata_path_aware_discovery(
self, test_tokens
):
"""Test RFC 8414 path-aware discovery when issuer_url has a path.
This validates the fix for issue #2527 where authorization server metadata
should be exposed at a path-aware URL when issuer_url has a path component.
When issuer_url defaults to base_url (e.g., http://example.com/api), the
authorization server metadata should be at:
/.well-known/oauth-authorization-server/api
This is consistent with how protected resource metadata already works
(RFC 9728) and complies with RFC 8414 path-aware discovery.
"""
# Create OAuth proxy where issuer_url defaults to base_url (which has a path)
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_provider = OAuthProxy(
upstream_authorization_endpoint="https://upstream.example.com/authorize",
upstream_token_endpoint="https://upstream.example.com/token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=token_verifier,
base_url="https://api.example.com/api", # Has path, no explicit issuer_url
client_storage=MemoryStore(),
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_app = mcp.http_app(path="/mcp")
# Get well-known routes - should include path-aware authorization server metadata
well_known_routes = auth_provider.get_well_known_routes(mcp_path="/mcp")
# Find the authorization server metadata route
auth_server_routes = [
r for r in well_known_routes if "oauth-authorization-server" in r.path
]
assert len(auth_server_routes) == 1
# The route should be path-aware (RFC 8414)
assert (
auth_server_routes[0].path == "/.well-known/oauth-authorization-server/api"
)
# Find the protected resource metadata route for comparison
protected_resource_routes = [
r for r in well_known_routes if "oauth-protected-resource" in r.path
]
assert len(protected_resource_routes) == 1
# Protected resource should also be path-aware (RFC 9728)
assert (
protected_resource_routes[0].path
== "/.well-known/oauth-protected-resource/api/mcp"
)
# Mount the app and verify the routes are accessible
parent_app = Starlette(
routes=[
*well_known_routes,
Mount("/api", app=mcp_app),
],
lifespan=mcp_app.lifespan,
)
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=parent_app),
base_url="https://api.example.com",
) as client:
# Path-aware authorization server metadata should be accessible
response = await client.get("/.well-known/oauth-authorization-server/api")
assert response.status_code == 200
metadata = response.json()
assert (
metadata["authorization_endpoint"]
== "https://api.example.com/api/authorize"
)
assert metadata["token_endpoint"] == "https://api.example.com/api/token"
# Path-aware protected resource metadata should also work
response = await client.get("/.well-known/oauth-protected-resource/api/mcp")
assert response.status_code == 200
async def test_oauth_authorization_server_metadata_root_issuer(self, test_tokens):
"""Test that root-level issuer_url still uses root discovery path.
When issuer_url is explicitly set to root (no path), the authorization
server metadata should remain at the root path:
/.well-known/oauth-authorization-server
This maintains backwards compatibility with the documented mounting pattern.
"""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_provider = OAuthProxy(
upstream_authorization_endpoint="https://upstream.example.com/authorize",
upstream_token_endpoint="https://upstream.example.com/token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=token_verifier,
base_url="https://api.example.com/api",
issuer_url="https://api.example.com", # Explicitly root
client_storage=MemoryStore(),
)
well_known_routes = auth_provider.get_well_known_routes(mcp_path="/mcp")
# Find the authorization server metadata route
auth_server_routes = [
r for r in well_known_routes if "oauth-authorization-server" in r.path
]
assert len(auth_server_routes) == 1
# Should be at root (no path suffix) when issuer_url is root
assert auth_server_routes[0].path == "/.well-known/oauth-authorization-server"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_oauth_mounting.py",
"license": "Apache License 2.0",
"lines": 318,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/middleware/caching.py | """A middleware for response caching."""
import hashlib
from collections.abc import Sequence
from logging import Logger
from typing import Any, TypedDict
import mcp.types
import pydantic_core
from key_value.aio.adapters.pydantic import PydanticAdapter
from key_value.aio.protocols.key_value import AsyncKeyValue
from key_value.aio.stores.memory import MemoryStore
from key_value.aio.wrappers.limit_size import LimitSizeWrapper
from key_value.aio.wrappers.statistics import StatisticsWrapper
from key_value.aio.wrappers.statistics.wrapper import (
KVStoreCollectionStatistics,
)
from pydantic import Field
from typing_extensions import NotRequired, Self, override
from fastmcp.prompts.prompt import Message, Prompt, PromptResult
from fastmcp.resources.resource import Resource, ResourceContent, ResourceResult
from fastmcp.server.middleware.middleware import CallNext, Middleware, MiddlewareContext
from fastmcp.tools.tool import Tool, ToolResult
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.types import FastMCPBaseModel
logger: Logger = get_logger(name=__name__)
# Constants
ONE_HOUR_IN_SECONDS = 3600
FIVE_MINUTES_IN_SECONDS = 300
ONE_MB_IN_BYTES = 1024 * 1024
GLOBAL_KEY = "__global__"
class CachableResourceContent(FastMCPBaseModel):
"""A wrapper for ResourceContent that can be cached."""
content: str | bytes
mime_type: str | None = None
meta: dict[str, Any] | None = None
class CachableResourceResult(FastMCPBaseModel):
"""A wrapper for ResourceResult that can be cached."""
contents: list[CachableResourceContent]
meta: dict[str, Any] | None = None
def get_size(self) -> int:
return len(self.model_dump_json())
@classmethod
def wrap(cls, value: ResourceResult) -> Self:
return cls(
contents=[
CachableResourceContent(
content=item.content, mime_type=item.mime_type, meta=item.meta
)
for item in value.contents
],
meta=value.meta,
)
def unwrap(self) -> ResourceResult:
return ResourceResult(
contents=[
ResourceContent(
content=item.content, mime_type=item.mime_type, meta=item.meta
)
for item in self.contents
],
meta=self.meta,
)
class CachableToolResult(FastMCPBaseModel):
content: list[mcp.types.ContentBlock]
structured_content: dict[str, Any] | None
meta: dict[str, Any] | None
@classmethod
def wrap(cls, value: ToolResult) -> Self:
return cls(
content=value.content,
structured_content=value.structured_content,
meta=value.meta,
)
def unwrap(self) -> ToolResult:
return ToolResult(
content=self.content,
structured_content=self.structured_content,
meta=self.meta,
)
class CachableMessage(FastMCPBaseModel):
"""A wrapper for Message that can be cached."""
role: str
content: (
mcp.types.TextContent
| mcp.types.ImageContent
| mcp.types.AudioContent
| mcp.types.EmbeddedResource
)
class CachablePromptResult(FastMCPBaseModel):
"""A wrapper for PromptResult that can be cached."""
messages: list[CachableMessage]
description: str | None = None
meta: dict[str, Any] | None = None
def get_size(self) -> int:
return len(self.model_dump_json())
@classmethod
def wrap(cls, value: PromptResult) -> Self:
return cls(
messages=[
CachableMessage(role=m.role, content=m.content) for m in value.messages
],
description=value.description,
meta=value.meta,
)
def unwrap(self) -> PromptResult:
return PromptResult(
messages=[
Message(content=m.content, role=m.role) # type: ignore[arg-type]
for m in self.messages
],
description=self.description,
meta=self.meta,
)
class SharedMethodSettings(TypedDict):
"""Shared config for a cache method."""
ttl: NotRequired[int]
enabled: NotRequired[bool]
class ListToolsSettings(SharedMethodSettings):
"""Configuration options for Tool-related caching."""
class ListResourcesSettings(SharedMethodSettings):
"""Configuration options for Resource-related caching."""
class ListPromptsSettings(SharedMethodSettings):
"""Configuration options for Prompt-related caching."""
class CallToolSettings(SharedMethodSettings):
"""Configuration options for Tool-related caching."""
included_tools: NotRequired[list[str]]
excluded_tools: NotRequired[list[str]]
class ReadResourceSettings(SharedMethodSettings):
"""Configuration options for Resource-related caching."""
class GetPromptSettings(SharedMethodSettings):
"""Configuration options for Prompt-related caching."""
class ResponseCachingStatistics(FastMCPBaseModel):
list_tools: KVStoreCollectionStatistics | None = Field(default=None)
list_resources: KVStoreCollectionStatistics | None = Field(default=None)
list_prompts: KVStoreCollectionStatistics | None = Field(default=None)
read_resource: KVStoreCollectionStatistics | None = Field(default=None)
get_prompt: KVStoreCollectionStatistics | None = Field(default=None)
call_tool: KVStoreCollectionStatistics | None = Field(default=None)
class ResponseCachingMiddleware(Middleware):
"""The response caching middleware offers a simple way to cache responses to mcp methods. The Middleware
supports cache invalidation via notifications from the server. The Middleware implements TTL-based caching
but cache implementations may offer additional features like LRU eviction, size limits, and more.
When items are retrieved from the cache they will no longer be the original objects, but rather no-op objects
this means that response caching may not be compatible with other middleware that expects original subclasses.
Notes:
- Caches `tools/call`, `resources/read`, `prompts/get`, `tools/list`, `resources/list`, and `prompts/list` requests.
- Cache keys are derived from method name and arguments.
"""
def __init__(
self,
cache_storage: AsyncKeyValue | None = None,
list_tools_settings: ListToolsSettings | None = None,
list_resources_settings: ListResourcesSettings | None = None,
list_prompts_settings: ListPromptsSettings | None = None,
read_resource_settings: ReadResourceSettings | None = None,
get_prompt_settings: GetPromptSettings | None = None,
call_tool_settings: CallToolSettings | None = None,
max_item_size: int = ONE_MB_IN_BYTES,
):
"""Initialize the response caching middleware.
Args:
cache_storage: The cache backend to use. If None, an in-memory cache is used.
list_tools_settings: The settings for the list tools method. If None, the default settings are used (5 minute TTL).
list_resources_settings: The settings for the list resources method. If None, the default settings are used (5 minute TTL).
list_prompts_settings: The settings for the list prompts method. If None, the default settings are used (5 minute TTL).
read_resource_settings: The settings for the read resource method. If None, the default settings are used (1 hour TTL).
get_prompt_settings: The settings for the get prompt method. If None, the default settings are used (1 hour TTL).
call_tool_settings: The settings for the call tool method. If None, the default settings are used (1 hour TTL).
max_item_size: The maximum size of items eligible for caching. Defaults to 1MB.
"""
self._backend: AsyncKeyValue = cache_storage or MemoryStore()
# When the size limit is exceeded, the put will silently fail
self._size_limiter: LimitSizeWrapper = LimitSizeWrapper(
key_value=self._backend, max_size=max_item_size, raise_on_too_large=False
)
self._stats: StatisticsWrapper = StatisticsWrapper(key_value=self._size_limiter)
self._list_tools_settings: ListToolsSettings = (
list_tools_settings or ListToolsSettings()
)
self._list_resources_settings: ListResourcesSettings = (
list_resources_settings or ListResourcesSettings()
)
self._list_prompts_settings: ListPromptsSettings = (
list_prompts_settings or ListPromptsSettings()
)
self._read_resource_settings: ReadResourceSettings = (
read_resource_settings or ReadResourceSettings()
)
self._get_prompt_settings: GetPromptSettings = (
get_prompt_settings or GetPromptSettings()
)
self._call_tool_settings: CallToolSettings = (
call_tool_settings or CallToolSettings()
)
self._list_tools_cache: PydanticAdapter[list[Tool]] = PydanticAdapter(
key_value=self._stats,
pydantic_model=list[Tool],
default_collection="tools/list",
)
self._list_resources_cache: PydanticAdapter[list[Resource]] = PydanticAdapter(
key_value=self._stats,
pydantic_model=list[Resource],
default_collection="resources/list",
)
self._list_prompts_cache: PydanticAdapter[list[Prompt]] = PydanticAdapter(
key_value=self._stats,
pydantic_model=list[Prompt],
default_collection="prompts/list",
)
self._read_resource_cache: PydanticAdapter[CachableResourceResult] = (
PydanticAdapter(
key_value=self._stats,
pydantic_model=CachableResourceResult,
default_collection="resources/read",
)
)
self._get_prompt_cache: PydanticAdapter[CachablePromptResult] = PydanticAdapter(
key_value=self._stats,
pydantic_model=CachablePromptResult,
default_collection="prompts/get",
)
self._call_tool_cache: PydanticAdapter[CachableToolResult] = PydanticAdapter(
key_value=self._stats,
pydantic_model=CachableToolResult,
default_collection="tools/call",
)
@override
async def on_list_tools(
self,
context: MiddlewareContext[mcp.types.ListToolsRequest],
call_next: CallNext[mcp.types.ListToolsRequest, Sequence[Tool]],
) -> Sequence[Tool]:
"""List tools from the cache, if caching is enabled, and the result is in the cache. Otherwise,
otherwise call the next middleware and store the result in the cache if caching is enabled."""
if self._list_tools_settings.get("enabled") is False:
return await call_next(context)
if cached_value := await self._list_tools_cache.get(key=GLOBAL_KEY):
return cached_value
tools: Sequence[Tool] = await call_next(context=context)
# Turn any subclass of Tool into a Tool
cachable_tools: list[Tool] = [
Tool(
name=tool.name,
title=tool.title,
description=tool.description,
parameters=tool.parameters,
output_schema=tool.output_schema,
annotations=tool.annotations,
meta=tool.meta,
tags=tool.tags,
)
for tool in tools
]
await self._list_tools_cache.put(
key=GLOBAL_KEY,
value=cachable_tools,
ttl=self._list_tools_settings.get("ttl", FIVE_MINUTES_IN_SECONDS),
)
return cachable_tools
@override
async def on_list_resources(
self,
context: MiddlewareContext[mcp.types.ListResourcesRequest],
call_next: CallNext[mcp.types.ListResourcesRequest, Sequence[Resource]],
) -> Sequence[Resource]:
"""List resources from the cache, if caching is enabled, and the result is in the cache. Otherwise,
otherwise call the next middleware and store the result in the cache if caching is enabled."""
if self._list_resources_settings.get("enabled") is False:
return await call_next(context)
if cached_value := await self._list_resources_cache.get(key=GLOBAL_KEY):
return cached_value
resources: Sequence[Resource] = await call_next(context=context)
# Turn any subclass of Resource into a Resource
cachable_resources: list[Resource] = [
Resource(
name=resource.name,
title=resource.title,
description=resource.description,
tags=resource.tags,
meta=resource.meta,
mime_type=resource.mime_type,
annotations=resource.annotations,
uri=resource.uri,
)
for resource in resources
]
await self._list_resources_cache.put(
key=GLOBAL_KEY,
value=cachable_resources,
ttl=self._list_resources_settings.get("ttl", FIVE_MINUTES_IN_SECONDS),
)
return cachable_resources
@override
async def on_list_prompts(
self,
context: MiddlewareContext[mcp.types.ListPromptsRequest],
call_next: CallNext[mcp.types.ListPromptsRequest, Sequence[Prompt]],
) -> Sequence[Prompt]:
"""List prompts from the cache, if caching is enabled, and the result is in the cache. Otherwise,
otherwise call the next middleware and store the result in the cache if caching is enabled."""
if self._list_prompts_settings.get("enabled") is False:
return await call_next(context)
if cached_value := await self._list_prompts_cache.get(key=GLOBAL_KEY):
return cached_value
prompts: Sequence[Prompt] = await call_next(context=context)
# Turn any subclass of Prompt into a Prompt
cachable_prompts: list[Prompt] = [
Prompt(
name=prompt.name,
title=prompt.title,
description=prompt.description,
tags=prompt.tags,
meta=prompt.meta,
arguments=prompt.arguments,
)
for prompt in prompts
]
await self._list_prompts_cache.put(
key=GLOBAL_KEY,
value=cachable_prompts,
ttl=self._list_prompts_settings.get("ttl", FIVE_MINUTES_IN_SECONDS),
)
return cachable_prompts
@override
async def on_call_tool(
self,
context: MiddlewareContext[mcp.types.CallToolRequestParams],
call_next: CallNext[mcp.types.CallToolRequestParams, ToolResult],
) -> ToolResult:
"""Call a tool from the cache, if caching is enabled, and the result is in the cache. Otherwise,
otherwise call the next middleware and store the result in the cache if caching is enabled."""
tool_name = context.message.name
if self._call_tool_settings.get(
"enabled"
) is False or not self._matches_tool_cache_settings(tool_name=tool_name):
return await call_next(context=context)
cache_key: str = _make_call_tool_cache_key(msg=context.message)
if cached_value := await self._call_tool_cache.get(key=cache_key):
return cached_value.unwrap()
tool_result: ToolResult = await call_next(context=context)
cachable_tool_result: CachableToolResult = CachableToolResult.wrap(
value=tool_result
)
await self._call_tool_cache.put(
key=cache_key,
value=cachable_tool_result,
ttl=self._call_tool_settings.get("ttl", ONE_HOUR_IN_SECONDS),
)
return cachable_tool_result.unwrap()
@override
async def on_read_resource(
self,
context: MiddlewareContext[mcp.types.ReadResourceRequestParams],
call_next: CallNext[mcp.types.ReadResourceRequestParams, ResourceResult],
) -> ResourceResult:
"""Read a resource from the cache, if caching is enabled, and the result is in the cache. Otherwise,
otherwise call the next middleware and store the result in the cache if caching is enabled."""
if self._read_resource_settings.get("enabled") is False:
return await call_next(context=context)
cache_key: str = _make_read_resource_cache_key(msg=context.message)
cached_value: CachableResourceResult | None
if cached_value := await self._read_resource_cache.get(key=cache_key):
return cached_value.unwrap()
value: ResourceResult = await call_next(context=context)
cached_value = CachableResourceResult.wrap(value)
await self._read_resource_cache.put(
key=cache_key,
value=cached_value,
ttl=self._read_resource_settings.get("ttl", ONE_HOUR_IN_SECONDS),
)
return cached_value.unwrap()
@override
async def on_get_prompt(
self,
context: MiddlewareContext[mcp.types.GetPromptRequestParams],
call_next: CallNext[mcp.types.GetPromptRequestParams, PromptResult],
) -> PromptResult:
"""Get a prompt from the cache, if caching is enabled, and the result is in the cache. Otherwise,
otherwise call the next middleware and store the result in the cache if caching is enabled."""
if self._get_prompt_settings.get("enabled") is False:
return await call_next(context=context)
cache_key: str = _make_get_prompt_cache_key(msg=context.message)
if cached_value := await self._get_prompt_cache.get(key=cache_key):
return cached_value.unwrap()
value: PromptResult = await call_next(context=context)
await self._get_prompt_cache.put(
key=cache_key,
value=CachablePromptResult.wrap(value),
ttl=self._get_prompt_settings.get("ttl", ONE_HOUR_IN_SECONDS),
)
return value
def _matches_tool_cache_settings(self, tool_name: str) -> bool:
"""Check if the tool matches the cache settings for tool calls."""
if included_tools := self._call_tool_settings.get("included_tools"):
if tool_name not in included_tools:
return False
if excluded_tools := self._call_tool_settings.get("excluded_tools"):
if tool_name in excluded_tools:
return False
return True
def statistics(self) -> ResponseCachingStatistics:
"""Get the statistics for the cache."""
return ResponseCachingStatistics(
list_tools=self._stats.statistics.collections.get("tools/list"),
list_resources=self._stats.statistics.collections.get("resources/list"),
list_prompts=self._stats.statistics.collections.get("prompts/list"),
read_resource=self._stats.statistics.collections.get("resources/read"),
get_prompt=self._stats.statistics.collections.get("prompts/get"),
call_tool=self._stats.statistics.collections.get("tools/call"),
)
def _get_arguments_str(arguments: dict[str, Any] | None) -> str:
"""Get a string representation of the arguments."""
if arguments is None:
return "null"
try:
return pydantic_core.to_json(value=arguments, fallback=str).decode()
except TypeError:
return repr(arguments)
def _hash_cache_key(value: str) -> str:
"""Build a fixed-length SHA-256 cache key from request-derived input."""
return hashlib.sha256(value.encode()).hexdigest()
def _make_call_tool_cache_key(msg: mcp.types.CallToolRequestParams) -> str:
"""Make a cache key for a tool call using a stable hash of name and arguments."""
return _hash_cache_key(f"{msg.name}:{_get_arguments_str(msg.arguments)}")
def _make_read_resource_cache_key(msg: mcp.types.ReadResourceRequestParams) -> str:
"""Make a cache key for a resource read using a stable hash of URI."""
return _hash_cache_key(str(msg.uri))
def _make_get_prompt_cache_key(msg: mcp.types.GetPromptRequestParams) -> str:
"""Make a cache key for a prompt get using a stable hash of name and arguments."""
return _hash_cache_key(f"{msg.name}:{_get_arguments_str(msg.arguments)}")
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/middleware/caching.py",
"license": "Apache License 2.0",
"lines": 429,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/middleware/test_caching.py | """Tests for response caching middleware."""
import sys
import tempfile
import warnings
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock
import mcp.types
import pytest
from inline_snapshot import snapshot
from key_value.aio.stores.filetree import (
FileTreeStore,
FileTreeV1CollectionSanitizationStrategy,
FileTreeV1KeySanitizationStrategy,
)
from key_value.aio.stores.memory import MemoryStore
from key_value.aio.wrappers.statistics.wrapper import (
GetStatistics,
KVStoreCollectionStatistics,
PutStatistics,
)
from mcp.server.lowlevel.helper_types import ReadResourceContents
from mcp.types import TextContent, TextResourceContents
from pydantic import AnyUrl, BaseModel
from fastmcp import Context, FastMCP
from fastmcp.client.client import CallToolResult, Client
from fastmcp.client.transports import FastMCPTransport
from fastmcp.prompts.function_prompt import FunctionPrompt
from fastmcp.prompts.prompt import Message, Prompt
from fastmcp.resources.resource import Resource
from fastmcp.server.middleware.caching import (
CachableToolResult,
CallToolSettings,
ResponseCachingMiddleware,
ResponseCachingStatistics,
_make_call_tool_cache_key,
_make_get_prompt_cache_key,
_make_read_resource_cache_key,
)
from fastmcp.server.middleware.middleware import CallNext, MiddlewareContext
from fastmcp.tools.tool import Tool, ToolResult
TEST_URI = AnyUrl("https://test_uri")
SAMPLE_READ_RESOURCE_CONTENTS = ReadResourceContents(
content="test_text",
mime_type="text/plain",
)
def sample_resource_fn() -> list[ReadResourceContents]:
return [SAMPLE_READ_RESOURCE_CONTENTS]
def sample_prompt_fn() -> Message:
return Message("test_text")
SAMPLE_RESOURCE = Resource.from_function(
fn=sample_resource_fn, uri=TEST_URI, name="test_resource"
)
SAMPLE_PROMPT = Prompt.from_function(fn=sample_prompt_fn, name="test_prompt")
SAMPLE_GET_PROMPT_RESULT = mcp.types.GetPromptResult(
messages=[Message("test_text").to_mcp_prompt_message()]
)
SAMPLE_TOOL = Tool(name="test_tool", parameters={"param1": "value1", "param2": 42})
SAMPLE_TOOL_RESULT = ToolResult(
content=[TextContent(type="text", text="test_text")],
structured_content={"result": "test_result"},
)
SAMPLE_TOOL_RESULT_LARGE = ToolResult(
content=[TextContent(type="text", text="test_text" * 100)],
structured_content={"result": "test_result"},
)
class CrazyModel(BaseModel):
a: int
b: int
c: str
d: float
e: bool
f: list[int]
g: dict[str, int]
h: list[dict[str, int]]
i: dict[str, list[int]]
@pytest.fixture
def crazy_model() -> CrazyModel:
return CrazyModel(
a=5,
b=10,
c="test",
d=1.0,
e=True,
f=[1, 2, 3],
g={"a": 1, "b": 2},
h=[{"a": 1, "b": 2}],
i={"a": [1, 2]},
)
class TrackingCalculator:
add_calls: int
multiply_calls: int
crazy_calls: int
very_large_response_calls: int
def __init__(self):
self.add_calls = 0
self.multiply_calls = 0
self.crazy_calls = 0
self.very_large_response_calls = 0
def add(self, a: int, b: int) -> int:
self.add_calls += 1
return a + b
def multiply(self, a: int, b: int) -> int:
self.multiply_calls += 1
return a * b
def very_large_response(self) -> str:
self.very_large_response_calls += 1
return "istenchars" * 100000 # 1,000,000 characters, 1mb
def crazy(self, a: CrazyModel) -> CrazyModel:
self.crazy_calls += 1
return a
def how_to_calculate(self, a: int, b: int) -> str:
return f"To calculate {a} + {b}, you need to add {a} and {b} together."
def get_add_calls(self) -> str:
return str(self.add_calls)
def get_multiply_calls(self) -> str:
return str(self.multiply_calls)
def get_crazy_calls(self) -> str:
return str(self.crazy_calls)
async def update_tool_list(self, context: Context):
import mcp.types
await context.send_notification(mcp.types.ToolListChangedNotification())
def add_tools(self, fastmcp: FastMCP, prefix: str = ""):
_ = fastmcp.add_tool(tool=Tool.from_function(fn=self.add, name=f"{prefix}add"))
_ = fastmcp.add_tool(
tool=Tool.from_function(fn=self.multiply, name=f"{prefix}multiply")
)
_ = fastmcp.add_tool(
tool=Tool.from_function(fn=self.crazy, name=f"{prefix}crazy")
)
_ = fastmcp.add_tool(
tool=Tool.from_function(
fn=self.very_large_response, name=f"{prefix}very_large_response"
)
)
_ = fastmcp.add_tool(
tool=Tool.from_function(
fn=self.update_tool_list, name=f"{prefix}update_tool_list"
)
)
def add_prompts(self, fastmcp: FastMCP, prefix: str = ""):
_ = fastmcp.add_prompt(
prompt=FunctionPrompt.from_function(
fn=self.how_to_calculate, name=f"{prefix}how_to_calculate"
)
)
def add_resources(self, fastmcp: FastMCP, prefix: str = ""):
_ = fastmcp.add_resource(
resource=Resource.from_function(
fn=self.get_add_calls,
uri="resource://add_calls",
name=f"{prefix}add_calls",
)
)
_ = fastmcp.add_resource(
resource=Resource.from_function(
fn=self.get_multiply_calls,
uri="resource://multiply_calls",
name=f"{prefix}multiply_calls",
)
)
_ = fastmcp.add_resource(
resource=Resource.from_function(
fn=self.get_crazy_calls,
uri="resource://crazy_calls",
name=f"{prefix}crazy_calls",
)
)
@pytest.fixture
def tracking_calculator() -> TrackingCalculator:
return TrackingCalculator()
@pytest.fixture
def mock_context() -> MiddlewareContext[mcp.types.CallToolRequestParams]:
"""Create a mock middleware context for tool calls."""
context = MagicMock(spec=MiddlewareContext[mcp.types.CallToolRequestParams])
context.message = mcp.types.CallToolRequestParams(
name="test_tool", arguments={"param1": "value1", "param2": 42}
)
context.method = "tools/call"
return context
@pytest.fixture
def mock_call_next() -> CallNext[mcp.types.CallToolRequestParams, ToolResult]:
"""Create a mock call_next function."""
return AsyncMock(
return_value=ToolResult(
content=[TextContent(type="text", text="test result")],
structured_content={"result": "success", "value": 123},
)
)
@pytest.fixture
def sample_tool_result() -> ToolResult:
"""Create a sample tool result for testing."""
return ToolResult(
content=[TextContent(type="text", text="cached result")],
structured_content={"cached": True, "data": "test"},
)
class TestResponseCachingMiddleware:
"""Test ResponseCachingMiddleware functionality."""
def test_initialization(self):
"""Test middleware initialization."""
assert ResponseCachingMiddleware(
call_tool_settings=CallToolSettings(
included_tools=["tool1"],
excluded_tools=["tool2"],
),
)
@pytest.mark.parametrize(
("tool_name", "included_tools", "excluded_tools", "result"),
[
("tool", ["tool", "tool2"], [], True),
("tool", ["second tool", "third tool"], [], False),
("tool", [], ["tool"], False),
("tool", [], ["second tool"], True),
("tool", ["tool", "second tool"], ["tool"], False),
("tool", ["tool", "second tool"], ["second tool"], True),
],
ids=[
"tool is included",
"tool is not included",
"tool is excluded",
"tool is not excluded",
"tool is included and excluded (excluded takes precedence)",
"tool is included and not excluded",
],
)
def test_tool_call_filtering(
self,
tool_name: str,
included_tools: list[str],
excluded_tools: list[str],
result: bool,
):
"""Test tool filtering logic."""
middleware1 = ResponseCachingMiddleware(
call_tool_settings=CallToolSettings(
included_tools=included_tools, excluded_tools=excluded_tools
),
)
assert middleware1._matches_tool_cache_settings(tool_name=tool_name) is result
@pytest.mark.skipif(
sys.platform == "win32",
reason="SQLite caching tests are flaky on Windows due to temp directory issues.",
)
class TestResponseCachingMiddlewareIntegration:
"""Integration tests with real FastMCP server."""
@pytest.fixture(params=["memory", "filetree"])
async def caching_server(
self,
tracking_calculator: TrackingCalculator,
request: pytest.FixtureRequest,
):
"""Create a FastMCP server for caching tests."""
mcp = FastMCP("CachingTestServer", dereference_schemas=False)
with tempfile.TemporaryDirectory(ignore_cleanup_errors=True) as temp_dir:
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
file_store = FileTreeStore(
data_directory=Path(temp_dir),
key_sanitization_strategy=FileTreeV1KeySanitizationStrategy(
Path(temp_dir)
),
collection_sanitization_strategy=FileTreeV1CollectionSanitizationStrategy(
Path(temp_dir)
),
)
response_caching_middleware = ResponseCachingMiddleware(
cache_storage=file_store
if request.param == "filetree"
else MemoryStore(),
)
mcp.add_middleware(middleware=response_caching_middleware)
tracking_calculator.add_tools(fastmcp=mcp)
tracking_calculator.add_resources(fastmcp=mcp)
tracking_calculator.add_prompts(fastmcp=mcp)
yield mcp
@pytest.fixture
def non_caching_server(self, tracking_calculator: TrackingCalculator):
"""Create a FastMCP server for non-caching tests."""
mcp = FastMCP("NonCachingTestServer")
tracking_calculator.add_tools(fastmcp=mcp)
return mcp
async def test_list_tools(
self, caching_server: FastMCP, tracking_calculator: TrackingCalculator
):
"""Test that tool list caching works with a real FastMCP server."""
async with Client(caching_server) as client:
pre_tool_list: list[mcp.types.Tool] = await client.list_tools()
assert len(pre_tool_list) == 5
# Add a tool and make sure it's missing from the list tool response
_ = caching_server.add_tool(
tool=Tool.from_function(fn=tracking_calculator.add, name="add_2")
)
post_tool_list: list[mcp.types.Tool] = await client.list_tools()
assert len(post_tool_list) == 5
assert pre_tool_list == post_tool_list
async def test_call_tool(
self,
caching_server: FastMCP,
tracking_calculator: TrackingCalculator,
):
"""Test that caching works with a real FastMCP server."""
tracking_calculator.add_tools(fastmcp=caching_server)
async with Client[FastMCPTransport](transport=caching_server) as client:
call_tool_result_one: CallToolResult = await client.call_tool(
"add", {"a": 5, "b": 3}
)
assert tracking_calculator.add_calls == 1
call_tool_result_two: CallToolResult = await client.call_tool(
"add", {"a": 5, "b": 3}
)
assert call_tool_result_one == call_tool_result_two
async def test_call_tool_very_large_value(
self,
caching_server: FastMCP,
tracking_calculator: TrackingCalculator,
):
"""Test that caching works with a real FastMCP server."""
tracking_calculator.add_tools(fastmcp=caching_server)
async with Client[FastMCPTransport](transport=caching_server) as client:
call_tool_result_one: CallToolResult = await client.call_tool(
"very_large_response", {}
)
assert tracking_calculator.very_large_response_calls == 1
call_tool_result_two: CallToolResult = await client.call_tool(
"very_large_response", {}
)
assert call_tool_result_one == call_tool_result_two
assert tracking_calculator.very_large_response_calls == 2
async def test_call_tool_crazy_value(
self,
caching_server: FastMCP,
tracking_calculator: TrackingCalculator,
crazy_model: CrazyModel,
):
"""Test that caching works with a real FastMCP server."""
tracking_calculator.add_tools(fastmcp=caching_server)
async with Client[FastMCPTransport](transport=caching_server) as client:
call_tool_result_one: CallToolResult = await client.call_tool(
"crazy", {"a": crazy_model}
)
assert tracking_calculator.crazy_calls == 1
call_tool_result_two: CallToolResult = await client.call_tool(
"crazy", {"a": crazy_model}
)
assert call_tool_result_one == call_tool_result_two
assert tracking_calculator.crazy_calls == 1
async def test_list_resources(
self, caching_server: FastMCP, tracking_calculator: TrackingCalculator
):
"""Test that list resources caching works with a real FastMCP server."""
async with Client[FastMCPTransport](transport=caching_server) as client:
pre_resource_list: list[mcp.types.Resource] = await client.list_resources()
assert len(pre_resource_list) == 3
tracking_calculator.add_resources(fastmcp=caching_server)
post_resource_list: list[mcp.types.Resource] = await client.list_resources()
assert len(post_resource_list) == 3
assert pre_resource_list == post_resource_list
async def test_read_resource(
self, caching_server: FastMCP, tracking_calculator: TrackingCalculator
):
"""Test that get resources caching works with a real FastMCP server."""
async with Client[FastMCPTransport](transport=caching_server) as client:
pre_resource = await client.read_resource(uri="resource://add_calls")
assert isinstance(pre_resource[0], TextResourceContents)
assert pre_resource[0].text == "0"
tracking_calculator.add_calls = 1
post_resource = await client.read_resource(uri="resource://add_calls")
assert isinstance(post_resource[0], TextResourceContents)
assert post_resource[0].text == "0"
assert pre_resource == post_resource
async def test_list_prompts(
self, caching_server: FastMCP, tracking_calculator: TrackingCalculator
):
"""Test that list prompts caching works with a real FastMCP server."""
async with Client[FastMCPTransport](transport=caching_server) as client:
pre_prompt_list: list[mcp.types.Prompt] = await client.list_prompts()
assert len(pre_prompt_list) == 1
tracking_calculator.add_prompts(fastmcp=caching_server)
post_prompt_list: list[mcp.types.Prompt] = await client.list_prompts()
assert len(post_prompt_list) == 1
assert pre_prompt_list == post_prompt_list
async def test_get_prompts(
self, caching_server: FastMCP, tracking_calculator: TrackingCalculator
):
"""Test that get prompts caching works with a real FastMCP server."""
async with Client[FastMCPTransport](transport=caching_server) as client:
pre_prompt = await client.get_prompt(
name="how_to_calculate", arguments={"a": 5, "b": 3}
)
pre_prompt_content = pre_prompt.messages[0].content
assert isinstance(pre_prompt_content, TextContent)
assert (
pre_prompt_content.text
== "To calculate 5 + 3, you need to add 5 and 3 together."
)
tracking_calculator.add_prompts(fastmcp=caching_server)
post_prompt = await client.get_prompt(
name="how_to_calculate", arguments={"a": 5, "b": 3}
)
assert pre_prompt == post_prompt
async def test_statistics(
self,
caching_server: FastMCP,
):
"""Test that statistics are collected correctly."""
caching_middleware = caching_server.middleware[0]
assert isinstance(caching_middleware, ResponseCachingMiddleware)
async with Client[FastMCPTransport](transport=caching_server) as client:
statistics = caching_middleware.statistics()
assert statistics == snapshot(ResponseCachingStatistics())
_ = await client.call_tool("add", {"a": 5, "b": 3})
statistics = caching_middleware.statistics()
assert statistics == snapshot(
ResponseCachingStatistics(
list_tools=KVStoreCollectionStatistics(
get=GetStatistics(count=2, hit=1, miss=1),
put=PutStatistics(count=1),
),
call_tool=KVStoreCollectionStatistics(
get=GetStatistics(count=1, miss=1), put=PutStatistics(count=1)
),
)
)
_ = await client.call_tool("add", {"a": 5, "b": 3})
statistics = caching_middleware.statistics()
assert statistics == snapshot(
ResponseCachingStatistics(
list_tools=KVStoreCollectionStatistics(
get=GetStatistics(count=2, hit=1, miss=1),
put=PutStatistics(count=1),
),
call_tool=KVStoreCollectionStatistics(
get=GetStatistics(count=2, hit=1, miss=1),
put=PutStatistics(count=1),
),
)
)
class TestCachableToolResult:
def test_wrap_and_unwrap(self):
tool_result = ToolResult(
"unstructured content",
structured_content={"structured": "content"},
meta={"meta": "data"},
)
cached_tool_result = CachableToolResult.wrap(tool_result).unwrap()
assert cached_tool_result.content == tool_result.content
assert cached_tool_result.structured_content == tool_result.structured_content
assert cached_tool_result.meta == tool_result.meta
class TestCachingWithImportedServerPrefixes:
"""Test that caching preserves prefixes from imported servers.
Regression tests for issue #2300: ResponseCachingMiddleware was losing
prefix information when caching components from imported servers.
"""
@pytest.fixture
async def parent_with_imported_child(self, tracking_calculator: TrackingCalculator):
"""Create a parent server with an imported child server (prefixed)."""
child = FastMCP("child")
tracking_calculator.add_tools(fastmcp=child)
tracking_calculator.add_resources(fastmcp=child)
tracking_calculator.add_prompts(fastmcp=child)
parent = FastMCP("parent")
parent.add_middleware(ResponseCachingMiddleware())
parent.mount(child, namespace="child")
return parent
async def test_tool_prefixes_preserved_after_cache_hit(
self, parent_with_imported_child: FastMCP
):
"""Tool names should retain prefix after being served from cache."""
async with Client(parent_with_imported_child) as client:
# First call populates cache
tools_first = await client.list_tools()
tool_names_first = [t.name for t in tools_first]
# Second call should come from cache
tools_cached = await client.list_tools()
tool_names_cached = [t.name for t in tools_cached]
# All tools should have prefix in both calls
assert all(name.startswith("child_") for name in tool_names_first)
assert all(name.startswith("child_") for name in tool_names_cached)
assert tool_names_first == tool_names_cached
async def test_resource_prefixes_preserved_after_cache_hit(
self, parent_with_imported_child: FastMCP
):
"""Resource URIs should retain prefix after being served from cache."""
async with Client(parent_with_imported_child) as client:
# First call populates cache
resources_first = await client.list_resources()
resource_uris_first = [str(r.uri) for r in resources_first]
# Second call should come from cache
resources_cached = await client.list_resources()
resource_uris_cached = [str(r.uri) for r in resources_cached]
# All resources should have prefix in URI path in both calls
# Resources get path-style prefix: resource://child/path
assert all("://child/" in uri for uri in resource_uris_first)
assert all("://child/" in uri for uri in resource_uris_cached)
assert resource_uris_first == resource_uris_cached
async def test_prompt_prefixes_preserved_after_cache_hit(
self, parent_with_imported_child: FastMCP
):
"""Prompt names should retain prefix after being served from cache."""
async with Client(parent_with_imported_child) as client:
# First call populates cache
prompts_first = await client.list_prompts()
prompt_names_first = [p.name for p in prompts_first]
# Second call should come from cache
prompts_cached = await client.list_prompts()
prompt_names_cached = [p.name for p in prompts_cached]
# All prompts should have prefix in both calls
assert all(name.startswith("child_") for name in prompt_names_first)
assert all(name.startswith("child_") for name in prompt_names_cached)
assert prompt_names_first == prompt_names_cached
async def test_prefixed_tool_callable_after_cache_hit(
self,
parent_with_imported_child: FastMCP,
tracking_calculator: TrackingCalculator,
):
"""Prefixed tools should be callable after cache populates."""
async with Client(parent_with_imported_child) as client:
# Trigger cache population
await client.list_tools()
await client.list_tools() # From cache
# Tool should be callable with prefixed name
result = await client.call_tool("child_add", {"a": 5, "b": 3})
assert not result.is_error
assert tracking_calculator.add_calls == 1
class TestCacheKeyGeneration:
def test_call_tool_key_is_hashed_and_does_not_include_raw_input(self):
msg = mcp.types.CallToolRequestParams(
name="toolX",
arguments={"password": "secret", "path": "../../etc/passwd"},
)
key = _make_call_tool_cache_key(msg)
assert len(key) == 64
assert "secret" not in key
assert "../../etc/passwd" not in key
def test_read_resource_key_is_hashed_and_does_not_include_raw_uri(self):
msg = mcp.types.ReadResourceRequestParams(
uri=AnyUrl("file:///tmp/../../etc/shadow?token=abcd")
)
key = _make_read_resource_cache_key(msg)
assert len(key) == 64
assert "shadow" not in key
assert "token=abcd" not in key
def test_get_prompt_key_is_hashed_and_stable(self):
msg = mcp.types.GetPromptRequestParams(
name="promptY",
arguments={"api_key": "ABC123", "scope": "admin"},
)
key = _make_get_prompt_cache_key(msg)
assert len(key) == 64
assert "ABC123" not in key
assert key == _make_get_prompt_cache_key(msg)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/middleware/test_caching.py",
"license": "Apache License 2.0",
"lines": 550,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:scripts/auto_close_needs_mre.py | #!/usr/bin/env python
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "httpx",
# ]
# ///
"""
Auto-close issues that need MRE (Minimal Reproducible Example).
This script runs on a schedule to automatically close issues that have been
marked as "needs MRE" and haven't received activity from the issue author
within 7 days.
"""
import os
from dataclasses import dataclass
from datetime import datetime, timedelta, timezone
import httpx
@dataclass
class Issue:
"""Represents a GitHub issue."""
number: int
title: str
state: str
created_at: str
user_id: int
user_login: str
body: str | None
@dataclass
class Comment:
"""Represents a GitHub comment."""
id: int
body: str
created_at: str
user_id: int
user_login: str
@dataclass
class Event:
"""Represents a GitHub issue event."""
event: str
created_at: str
label_name: str | None
class GitHubClient:
"""Client for interacting with GitHub API."""
def __init__(self, token: str, owner: str, repo: str):
self.token = token
self.owner = owner
self.repo = repo
self.headers = {
"Authorization": f"token {token}",
"Accept": "application/vnd.github.v3+json",
}
self.base_url = f"https://api.github.com/repos/{owner}/{repo}"
def get_issues_with_label(
self, label: str, page: int = 1, per_page: int = 100
) -> list[Issue]:
"""Fetch open issues with a specific label."""
url = f"{self.base_url}/issues"
issues = []
with httpx.Client() as client:
response = client.get(
url,
headers=self.headers,
params={
"state": "open",
"labels": label,
"per_page": per_page,
"page": page,
},
)
if response.status_code != 200:
print(f"Error fetching issues: {response.status_code}")
return issues
data = response.json()
for item in data:
# Skip pull requests
if "pull_request" in item:
continue
issues.append(
Issue(
number=item["number"],
title=item["title"],
state=item["state"],
created_at=item["created_at"],
user_id=item["user"]["id"],
user_login=item["user"]["login"],
body=item.get("body"),
)
)
return issues
def get_issue_events(self, issue_number: int) -> list[Event]:
"""Fetch all events for an issue."""
url = f"{self.base_url}/issues/{issue_number}/events"
events = []
with httpx.Client() as client:
page = 1
while True:
response = client.get(
url, headers=self.headers, params={"page": page, "per_page": 100}
)
if response.status_code != 200:
break
data = response.json()
if not data:
break
for event_data in data:
label_name = None
if event_data["event"] == "labeled" and "label" in event_data:
label_name = event_data["label"]["name"]
events.append(
Event(
event=event_data["event"],
created_at=event_data["created_at"],
label_name=label_name,
)
)
page += 1
if page > 10: # Safety limit
break
return events
def get_issue_comments(self, issue_number: int) -> list[Comment]:
"""Fetch all comments for an issue."""
url = f"{self.base_url}/issues/{issue_number}/comments"
comments = []
with httpx.Client() as client:
page = 1
while True:
response = client.get(
url, headers=self.headers, params={"page": page, "per_page": 100}
)
if response.status_code != 200:
break
data = response.json()
if not data:
break
for comment_data in data:
comments.append(
Comment(
id=comment_data["id"],
body=comment_data["body"],
created_at=comment_data["created_at"],
user_id=comment_data["user"]["id"],
user_login=comment_data["user"]["login"],
)
)
page += 1
if page > 10: # Safety limit
break
return comments
def get_issue_timeline(self, issue_number: int) -> list[dict]:
"""Fetch timeline events for an issue (includes issue edits)."""
url = f"{self.base_url}/issues/{issue_number}/timeline"
timeline = []
with httpx.Client() as client:
page = 1
while True:
response = client.get(
url,
headers={
**self.headers,
"Accept": "application/vnd.github.mockingbird-preview+json",
},
params={"page": page, "per_page": 100},
)
if response.status_code != 200:
break
data = response.json()
if not data:
break
timeline.extend(data)
page += 1
if page > 10: # Safety limit
break
return timeline
def close_issue(self, issue_number: int, comment: str) -> tuple[bool, bool]:
"""Close an issue with a comment.
Closes first, then comments — so a failed comment never leaves
a misleading "closing" notice on a still-open issue.
Returns (closed, commented) so the caller can log partial failures.
"""
# Close the issue first
issue_url = f"{self.base_url}/issues/{issue_number}"
with httpx.Client() as client:
response = client.patch(
issue_url, headers=self.headers, json={"state": "closed"}
)
if response.status_code != 200:
print(
f"Failed to close issue #{issue_number}: "
f"{response.status_code} {response.text}"
)
return False, False
# Then add the comment
comment_url = f"{self.base_url}/issues/{issue_number}/comments"
with httpx.Client() as client:
response = client.post(
comment_url, headers=self.headers, json={"body": comment}
)
if response.status_code != 201:
print(
f"Issue #{issue_number} was closed but comment failed: "
f"{response.status_code} {response.text}"
)
return True, False
return True, True
def find_label_application_date(
events: list[Event], label_name: str
) -> datetime | None:
"""Find when a specific label was applied to an issue."""
# Look for the most recent application of this label
for event in reversed(events):
if event.event == "labeled" and event.label_name == label_name:
return datetime.fromisoformat(event.created_at.replace("Z", "+00:00"))
return None
def has_author_activity_after(
issue: Issue,
comments: list[Comment],
timeline: list[dict],
after_date: datetime,
) -> bool:
"""Check if the issue author had any activity after a specific date."""
# Check for comments from author
for comment in comments:
if comment.user_id == issue.user_id:
comment_date = datetime.fromisoformat(
comment.created_at.replace("Z", "+00:00")
)
if comment_date > after_date:
print(
f"Issue #{issue.number}: Author commented after label application"
)
return True
# Check for issue body edits from author
for event in timeline:
if event.get("event") == "renamed" or event.get("event") == "edited":
if event.get("actor", {}).get("id") == issue.user_id:
event_date = datetime.fromisoformat(
event["created_at"].replace("Z", "+00:00")
)
if event_date > after_date:
print(
f"Issue #{issue.number}: Author edited issue after label application"
)
return True
return False
def should_close_as_needs_mre(
issue: Issue,
label_date: datetime,
comments: list[Comment],
timeline: list[dict],
) -> bool:
"""Determine if an issue should be closed for needing an MRE."""
# Check if label is old enough (7 days)
seven_days_ago = datetime.now(timezone.utc) - timedelta(days=7)
if label_date > seven_days_ago:
return False
# Check for author activity after the label was applied
if has_author_activity_after(issue, comments, timeline, label_date):
return False
return True
def main():
"""Main entry point for auto-closing needs MRE issues."""
print("[DEBUG] Starting auto-close needs MRE script")
# Get environment variables
token = os.environ.get("GITHUB_TOKEN")
if not token:
raise ValueError("GITHUB_TOKEN environment variable is required")
owner = os.environ.get("GITHUB_REPOSITORY_OWNER", "prefecthq")
repo = os.environ.get("GITHUB_REPOSITORY_NAME", "fastmcp")
print(f"[DEBUG] Repository: {owner}/{repo}")
# Initialize client
client = GitHubClient(token, owner, repo)
# Get issues with "needs MRE" label
all_issues = []
page = 1
while page <= 20: # Safety limit
issues = client.get_issues_with_label("needs MRE", page=page)
if not issues:
break
all_issues.extend(issues)
page += 1
print(f"[DEBUG] Found {len(all_issues)} open issues with 'needs MRE' label")
processed_count = 0
closed_count = 0
for issue in all_issues:
processed_count += 1
if processed_count % 10 == 0:
print(f"[DEBUG] Processed {processed_count}/{len(all_issues)} issues")
# Get events to find when label was applied
events = client.get_issue_events(issue.number)
label_date = find_label_application_date(events, "needs MRE")
if not label_date:
print(
f"[DEBUG] Issue #{issue.number}: Could not find label application date"
)
continue
print(
f"[DEBUG] Issue #{issue.number}: Label applied on {label_date.isoformat()}"
)
# Get comments and timeline
comments = client.get_issue_comments(issue.number)
timeline = client.get_issue_timeline(issue.number)
# Check if we should close
if should_close_as_needs_mre(issue, label_date, comments, timeline):
close_message = (
"This issue is being automatically closed because we requested a minimal reproducible example (MRE) "
"7 days ago and haven't received a response from the issue author.\n\n"
"**If you can provide an MRE**, please add it as a comment and we'll reopen this issue. "
"An MRE should be a complete, runnable code snippet that demonstrates the problem.\n\n"
"**If this was closed in error**, please leave a comment explaining the situation and we'll reopen it."
)
closed, commented = client.close_issue(issue.number, close_message)
if closed:
closed_count += 1
if commented:
print(f"[SUCCESS] Closed issue #{issue.number} (needs MRE)")
else:
print(
f"[WARNING] Closed issue #{issue.number} but "
f"comment was not posted"
)
else:
print(f"[ERROR] Failed to close issue #{issue.number}")
print(f"[DEBUG] Processing complete. Closed {closed_count} issues needing MRE")
if __name__ == "__main__":
main()
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "scripts/auto_close_needs_mre.py",
"license": "Apache License 2.0",
"lines": 325,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/introspection.py | """OAuth 2.0 Token Introspection (RFC 7662) provider for FastMCP.
This module provides token verification for opaque tokens using the OAuth 2.0
Token Introspection protocol defined in RFC 7662. It allows FastMCP servers to
validate tokens issued by authorization servers that don't use JWT format.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.introspection import IntrospectionTokenVerifier
# Verify opaque tokens via RFC 7662 introspection
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="your-client-id",
client_secret="your-client-secret",
required_scopes=["read", "write"]
)
mcp = FastMCP("My Protected Server", auth=verifier)
```
"""
from __future__ import annotations
import base64
import contextlib
import hashlib
import time
from dataclasses import dataclass
from typing import Any, Literal, get_args
import httpx
from pydantic import AnyHttpUrl, SecretStr
from fastmcp.server.auth import AccessToken, TokenVerifier
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
@dataclass
class _IntrospectionCacheEntry:
"""Cached introspection result with expiration."""
result: AccessToken
expires_at: float
ClientAuthMethod = Literal["client_secret_basic", "client_secret_post"]
class IntrospectionTokenVerifier(TokenVerifier):
"""
OAuth 2.0 Token Introspection verifier (RFC 7662).
This verifier validates opaque tokens by calling an OAuth 2.0 token introspection
endpoint. Unlike JWT verification which is stateless, token introspection requires
a network call to the authorization server for each token validation.
The verifier authenticates to the introspection endpoint using either:
- HTTP Basic Auth (client_secret_basic, default): credentials in Authorization header
- POST body authentication (client_secret_post): credentials in request body
Both methods are specified in RFC 6749 (OAuth 2.0) and RFC 7662 (Token Introspection).
Use this when:
- Your authorization server issues opaque (non-JWT) tokens
- You need to validate tokens from Auth0, Okta, Keycloak, or other OAuth servers
- Your tokens require real-time revocation checking
- Your authorization server supports RFC 7662 introspection
Caching is disabled by default to preserve real-time revocation semantics.
Set ``cache_ttl_seconds`` to enable caching and reduce load on the
introspection endpoint (e.g., ``cache_ttl_seconds=300`` for 5 minutes).
Example:
```python
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="my-service",
client_secret="secret-key",
required_scopes=["api:read"]
)
```
"""
# Default cache settings
DEFAULT_MAX_CACHE_SIZE = 10000
def __init__(
self,
*,
introspection_url: str,
client_id: str,
client_secret: str | SecretStr,
client_auth_method: ClientAuthMethod = "client_secret_basic",
timeout_seconds: int = 10,
required_scopes: list[str] | None = None,
base_url: AnyHttpUrl | str | None = None,
cache_ttl_seconds: int | None = None,
max_cache_size: int | None = None,
http_client: httpx.AsyncClient | None = None,
):
"""
Initialize the introspection token verifier.
Args:
introspection_url: URL of the OAuth 2.0 token introspection endpoint
client_id: OAuth client ID for authenticating to the introspection endpoint
client_secret: OAuth client secret for authenticating to the introspection endpoint
client_auth_method: Client authentication method. "client_secret_basic" (default)
uses HTTP Basic Auth header, "client_secret_post" sends credentials in POST body
timeout_seconds: HTTP request timeout in seconds (default: 10)
required_scopes: Required scopes for all tokens (optional)
base_url: Base URL for TokenVerifier protocol
cache_ttl_seconds: How long to cache introspection results in seconds.
Caching is disabled by default (None) to preserve real-time
revocation semantics. Set to a positive integer to enable caching
(e.g., 300 for 5 minutes).
max_cache_size: Maximum number of tokens to cache when caching is
enabled. Default: 10000.
http_client: Optional httpx.AsyncClient for connection pooling. When provided,
the client is reused across calls and the caller is responsible for its
lifecycle. When None (default), a fresh client is created per call.
"""
# Parse scopes if provided as string
parsed_required_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else None
)
super().__init__(base_url=base_url, required_scopes=parsed_required_scopes)
self.introspection_url = introspection_url
self.client_id = client_id
self.client_secret = (
client_secret.get_secret_value()
if isinstance(client_secret, SecretStr)
else client_secret
)
# Validate client_auth_method to catch typos/invalid values early
valid_methods = get_args(ClientAuthMethod)
if client_auth_method not in valid_methods:
options = " or ".join(f"'{m}'" for m in valid_methods)
raise ValueError(
f"Invalid client_auth_method: {client_auth_method!r}. "
f"Must be {options}."
)
self.client_auth_method: ClientAuthMethod = client_auth_method
self.timeout_seconds = timeout_seconds
self._http_client = http_client
self.logger = get_logger(__name__)
# Cache configuration (None or 0 = disabled)
self._cache_ttl = cache_ttl_seconds or 0
self._max_cache_size = (
max_cache_size
if max_cache_size is not None
else self.DEFAULT_MAX_CACHE_SIZE
)
self._cache: dict[str, _IntrospectionCacheEntry] = {}
self._last_cleanup = time.monotonic()
self._cleanup_interval = 60 # Cleanup every 60 seconds
def _hash_token(self, token: str) -> str:
"""Hash token for use as cache key.
Using SHA-256 for memory efficiency (fixed 64-char hex digest
regardless of token length).
"""
return hashlib.sha256(token.encode("utf-8")).hexdigest()
def _cleanup_expired_cache(self) -> None:
"""Remove expired entries from cache."""
now = time.time()
expired = [key for key, entry in self._cache.items() if entry.expires_at < now]
for key in expired:
del self._cache[key]
if expired:
self.logger.debug("Cleaned up %d expired cache entries", len(expired))
def _maybe_cleanup(self) -> None:
"""Periodically cleanup expired entries to prevent unbounded growth."""
now = time.monotonic()
if now - self._last_cleanup > self._cleanup_interval:
self._cleanup_expired_cache()
self._last_cleanup = now
def _get_cached(self, token: str) -> tuple[bool, AccessToken | None]:
"""Get cached introspection result.
Returns:
Tuple of (is_cached, result):
- (True, AccessToken) if cached valid token
- (False, None) if not in cache or expired
"""
if self._cache_ttl <= 0 or self._max_cache_size <= 0:
return (False, None) # Caching disabled
cache_key = self._hash_token(token)
entry = self._cache.get(cache_key)
if entry is None:
return (False, None) # Not in cache
if entry.expires_at < time.time():
del self._cache[cache_key]
return (False, None) # Expired
# Return a copy to prevent mutations from affecting cached value
return (True, entry.result.model_copy(deep=True))
def _set_cached(self, token: str, result: AccessToken) -> None:
"""Cache a valid introspection result with TTL.
Only successful validations are cached. Failures (inactive, expired,
missing scopes, errors) are never cached to avoid sticky false negatives.
"""
if self._cache_ttl <= 0 or self._max_cache_size <= 0:
return # Caching disabled
# Periodic cleanup
self._maybe_cleanup()
# Check cache size limit
if len(self._cache) >= self._max_cache_size:
self._cleanup_expired_cache()
# If still at limit after cleanup, evict oldest entry
if len(self._cache) >= self._max_cache_size:
oldest_key = next(iter(self._cache))
del self._cache[oldest_key]
cache_key = self._hash_token(token)
# Use token's expiration if available and sooner than TTL
expires_at = time.time() + self._cache_ttl
if result.expires_at:
expires_at = min(expires_at, float(result.expires_at))
# Store a deep copy to prevent mutations from affecting cached value
self._cache[cache_key] = _IntrospectionCacheEntry(
result=result.model_copy(deep=True),
expires_at=expires_at,
)
def _create_basic_auth_header(self) -> str:
"""Create HTTP Basic Auth header value from client credentials."""
credentials = f"{self.client_id}:{self.client_secret}"
encoded = base64.b64encode(credentials.encode("utf-8")).decode("utf-8")
return f"Basic {encoded}"
def _extract_scopes(self, introspection_response: dict[str, Any]) -> list[str]:
"""
Extract scopes from introspection response.
RFC 7662 allows scopes to be returned as either:
- A space-separated string in the 'scope' field
- An array of strings in the 'scope' field (less common but valid)
"""
scope_value = introspection_response.get("scope")
if scope_value is None:
return []
# Handle string (space-separated) scopes
if isinstance(scope_value, str):
return [s.strip() for s in scope_value.split() if s.strip()]
# Handle array of scopes
if isinstance(scope_value, list):
return [str(s) for s in scope_value if s]
return []
async def verify_token(self, token: str) -> AccessToken | None:
"""
Verify a bearer token using OAuth 2.0 Token Introspection (RFC 7662).
This method makes a POST request to the introspection endpoint with the token,
authenticated using the configured client authentication method (client_secret_basic
or client_secret_post).
Results are cached in-memory to reduce load on the introspection endpoint.
Cache TTL and size are configurable via constructor parameters.
Args:
token: The opaque token string to validate
Returns:
AccessToken object if valid and active, None if invalid, inactive, or expired
"""
# Check cache first
is_cached, cached_result = self._get_cached(token)
if is_cached:
self.logger.debug("Token introspection cache hit")
return cached_result
try:
async with (
contextlib.nullcontext(self._http_client)
if self._http_client is not None
else httpx.AsyncClient(timeout=self.timeout_seconds)
) as client:
# Prepare introspection request per RFC 7662
# Build request data with token and token_type_hint
data = {
"token": token,
"token_type_hint": "access_token",
}
# Build headers
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json",
}
# Add client authentication based on method
if self.client_auth_method == "client_secret_basic":
headers["Authorization"] = self._create_basic_auth_header()
elif self.client_auth_method == "client_secret_post":
data["client_id"] = self.client_id
data["client_secret"] = self.client_secret
response = await client.post(
self.introspection_url,
data=data,
headers=headers,
)
# Check for HTTP errors - don't cache HTTP errors (may be transient)
if response.status_code != 200:
self.logger.debug(
"Token introspection failed: HTTP %d - %s",
response.status_code,
response.text[:200] if response.text else "",
)
return None
introspection_data = response.json()
# Check if token is active (required field per RFC 7662)
# Don't cache inactive tokens - they may become valid later
# (e.g., tokens with future nbf, or propagation delays)
if not introspection_data.get("active", False):
self.logger.debug("Token introspection returned active=false")
return None
# Extract client_id (should be present for active tokens)
client_id = introspection_data.get(
"client_id"
) or introspection_data.get("sub", "unknown")
# Extract expiration time
exp = introspection_data.get("exp")
if exp:
# Validate expiration (belt and suspenders - server should set active=false)
if exp < time.time():
self.logger.debug(
"Token validation failed: expired token for client %s",
client_id,
)
return None
# Extract scopes
scopes = self._extract_scopes(introspection_data)
# Check required scopes
# Don't cache scope failures - permissions may be updated dynamically
if self.required_scopes:
token_scopes = set(scopes)
required_scopes = set(self.required_scopes)
if not required_scopes.issubset(token_scopes):
self.logger.debug(
"Token missing required scopes. Has: %s, Required: %s",
token_scopes,
required_scopes,
)
return None
# Create AccessToken with introspection response data
result = AccessToken(
token=token,
client_id=str(client_id),
scopes=scopes,
expires_at=int(exp) if exp else None,
claims=introspection_data, # Store full response for extensibility
)
self._set_cached(token, result)
return result
except httpx.TimeoutException:
self.logger.debug(
"Token introspection timed out after %d seconds", self.timeout_seconds
)
return None
except httpx.RequestError as e:
self.logger.debug("Token introspection request failed: %s", e)
return None
except Exception as e:
self.logger.debug("Token introspection error: %s", e)
return None
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/introspection.py",
"license": "Apache License 2.0",
"lines": 332,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/auth/providers/test_introspection.py | """Tests for OAuth 2.0 Token Introspection verifier (RFC 7662)."""
import base64
import time
from typing import Any
import pytest
from pytest_httpx import HTTPXMock
from fastmcp.server.auth.providers.introspection import (
IntrospectionTokenVerifier,
)
class TestIntrospectionTokenVerifier:
"""Test core token verification logic."""
@pytest.fixture
def verifier(self) -> IntrospectionTokenVerifier:
"""Create a basic introspection verifier for testing."""
return IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
timeout_seconds=5,
)
@pytest.fixture
def verifier_with_required_scopes(self) -> IntrospectionTokenVerifier:
"""Create verifier with required scopes."""
return IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
required_scopes=["read", "write"],
)
def test_initialization(self):
"""Test verifier initialization."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
)
assert verifier.introspection_url == "https://auth.example.com/oauth/introspect"
assert verifier.client_id == "test-client"
assert verifier.client_secret == "test-secret"
assert verifier.timeout_seconds == 10
assert verifier.client_auth_method == "client_secret_basic"
def test_initialization_requires_introspection_url(self):
"""Test that introspection_url is required."""
with pytest.raises(TypeError):
IntrospectionTokenVerifier( # ty: ignore[missing-argument]
client_id="test-client",
client_secret="test-secret",
)
def test_initialization_requires_client_id(self):
"""Test that client_id is required."""
with pytest.raises(TypeError):
IntrospectionTokenVerifier( # ty: ignore[missing-argument]
introspection_url="https://auth.example.com/oauth/introspect",
client_secret="test-secret",
)
def test_initialization_requires_client_secret(self):
"""Test that client_secret is required."""
with pytest.raises(TypeError):
IntrospectionTokenVerifier( # ty: ignore[missing-argument]
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
)
def test_create_basic_auth_header(self, verifier: IntrospectionTokenVerifier):
"""Test HTTP Basic Auth header creation."""
auth_header = verifier._create_basic_auth_header()
# Decode and verify
assert auth_header.startswith("Basic ")
encoded = auth_header[6:]
decoded = base64.b64decode(encoded).decode("utf-8")
assert decoded == "test-client:test-secret"
def test_extract_scopes_from_string(self, verifier: IntrospectionTokenVerifier):
"""Test scope extraction from space-separated string."""
response = {"scope": "read write admin"}
scopes = verifier._extract_scopes(response)
assert scopes == ["read", "write", "admin"]
def test_extract_scopes_from_array(self, verifier: IntrospectionTokenVerifier):
"""Test scope extraction from array."""
response = {"scope": ["read", "write", "admin"]}
scopes = verifier._extract_scopes(response)
assert scopes == ["read", "write", "admin"]
def test_extract_scopes_missing(self, verifier: IntrospectionTokenVerifier):
"""Test scope extraction when scope field is missing."""
response: dict[str, Any] = {}
scopes = verifier._extract_scopes(response)
assert scopes == []
def test_extract_scopes_with_extra_whitespace(
self, verifier: IntrospectionTokenVerifier
):
"""Test scope extraction handles extra whitespace."""
response = {"scope": " read write admin "}
scopes = verifier._extract_scopes(response)
assert scopes == ["read", "write", "admin"]
async def test_valid_token_verification(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test successful token verification."""
# Mock introspection endpoint
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read write",
"exp": int(time.time()) + 3600,
"iat": int(time.time()),
"sub": "user-123",
"username": "testuser",
},
)
access_token = await verifier.verify_token("test-token")
assert access_token is not None
assert access_token.client_id == "user-123"
assert access_token.scopes == ["read", "write"]
assert access_token.expires_at is not None
assert access_token.claims["active"] is True
assert access_token.claims["username"] == "testuser"
async def test_inactive_token_returns_none(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that inactive tokens return None."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": False},
)
access_token = await verifier.verify_token("expired-token")
assert access_token is None
async def test_expired_token_returns_none(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that expired tokens return None."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read",
"exp": int(time.time()) - 3600, # Expired 1 hour ago
},
)
access_token = await verifier.verify_token("expired-token")
assert access_token is None
async def test_token_without_expiration(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test token without expiration field."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read",
},
)
access_token = await verifier.verify_token("test-token")
assert access_token is not None
assert access_token.expires_at is None
async def test_token_without_scopes(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test token without scope field."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
},
)
access_token = await verifier.verify_token("test-token")
assert access_token is not None
assert access_token.scopes == []
async def test_required_scopes_validation(
self,
verifier_with_required_scopes: IntrospectionTokenVerifier,
httpx_mock: HTTPXMock,
):
"""Test that required scopes are validated."""
# Token with insufficient scopes
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read", # Missing 'write'
},
)
access_token = await verifier_with_required_scopes.verify_token("test-token")
assert access_token is None
async def test_required_scopes_validation_success(
self,
verifier_with_required_scopes: IntrospectionTokenVerifier,
httpx_mock: HTTPXMock,
):
"""Test successful validation with required scopes."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read write admin", # Has all required scopes
},
)
access_token = await verifier_with_required_scopes.verify_token("test-token")
assert access_token is not None
assert set(access_token.scopes) >= {"read", "write"}
async def test_http_error_returns_none(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that HTTP errors return None."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
status_code=500,
text="Internal Server Error",
)
access_token = await verifier.verify_token("test-token")
assert access_token is None
async def test_authentication_failure_returns_none(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that authentication failures return None."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
status_code=401,
text="Unauthorized",
)
access_token = await verifier.verify_token("test-token")
assert access_token is None
async def test_timeout_returns_none(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that timeouts return None."""
from httpx import TimeoutException
httpx_mock.add_exception(
TimeoutException("Request timed out"),
url="https://auth.example.com/oauth/introspect",
)
access_token = await verifier.verify_token("test-token")
assert access_token is None
async def test_malformed_json_returns_none(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that malformed JSON responses return None."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
status_code=200,
text="not json",
)
access_token = await verifier.verify_token("test-token")
assert access_token is None
async def test_request_includes_correct_headers(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that the request includes correct headers and auth."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
await verifier.verify_token("test-token")
# Verify request was made with correct parameters
request = httpx_mock.get_request()
assert request is not None
assert request.method == "POST"
assert "Authorization" in request.headers
assert request.headers["Authorization"].startswith("Basic ")
assert request.headers["Content-Type"] == "application/x-www-form-urlencoded"
assert request.headers["Accept"] == "application/json"
async def test_request_includes_token_and_hint(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that the request includes token and token_type_hint."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
await verifier.verify_token("my-test-token")
request = httpx_mock.get_request()
assert request is not None
# Parse form data
body = request.content.decode("utf-8")
assert "token=my-test-token" in body
assert "token_type_hint=access_token" in body
async def test_client_id_fallback_to_sub(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that client_id falls back to sub if not present."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"sub": "user-456",
"scope": "read",
},
)
access_token = await verifier.verify_token("test-token")
assert access_token is not None
assert access_token.client_id == "user-456"
async def test_client_id_defaults_to_unknown(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that client_id defaults to 'unknown' if neither client_id nor sub present."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"scope": "read",
},
)
access_token = await verifier.verify_token("test-token")
assert access_token is not None
assert access_token.client_id == "unknown"
def test_initialization_with_client_secret_post(self):
"""Test verifier initialization with client_secret_post method."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
client_auth_method="client_secret_post",
)
assert verifier.client_auth_method == "client_secret_post"
assert verifier.introspection_url == "https://auth.example.com/oauth/introspect"
assert verifier.client_id == "test-client"
assert verifier.client_secret == "test-secret"
def test_initialization_defaults_to_client_secret_basic(self):
"""Test that client_secret_basic is the default auth method."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
)
assert verifier.client_auth_method == "client_secret_basic"
def test_initialization_rejects_invalid_client_auth_method(self):
"""Test that invalid client_auth_method values are rejected."""
# Test typo with trailing space
with pytest.raises(ValueError) as exc_info:
IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
client_auth_method="client_secret_basic ", # ty: ignore[invalid-argument-type]
)
assert "Invalid client_auth_method" in str(exc_info.value)
assert "client_secret_basic " in str(exc_info.value)
# Test completely invalid value
with pytest.raises(ValueError) as exc_info:
IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
client_auth_method="basic", # ty: ignore[invalid-argument-type]
)
assert "Invalid client_auth_method" in str(exc_info.value)
assert "basic" in str(exc_info.value)
async def test_client_secret_post_includes_credentials_in_body(
self, httpx_mock: HTTPXMock
):
"""Test that client_secret_post includes credentials in POST body."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
client_auth_method="client_secret_post",
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
await verifier.verify_token("test-token")
# Verify request was made with credentials in body, not header
request = httpx_mock.get_request()
assert request is not None
assert request.method == "POST"
assert "Authorization" not in request.headers
assert request.headers["Content-Type"] == "application/x-www-form-urlencoded"
assert request.headers["Accept"] == "application/json"
# Parse form data
body = request.content.decode("utf-8")
assert "token=test-token" in body
assert "token_type_hint=access_token" in body
assert "client_id=test-client" in body
assert "client_secret=test-secret" in body
async def test_client_secret_post_verification_success(self, httpx_mock: HTTPXMock):
"""Test successful token verification with client_secret_post."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
client_auth_method="client_secret_post",
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read write",
"exp": int(time.time()) + 3600,
},
)
access_token = await verifier.verify_token("test-token")
assert access_token is not None
assert access_token.client_id == "user-123"
assert access_token.scopes == ["read", "write"]
async def test_client_secret_basic_still_works(
self, verifier: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that client_secret_basic continues to work unchanged."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
await verifier.verify_token("test-token")
# Verify request was made with Basic Auth header
request = httpx_mock.get_request()
assert request is not None
assert "Authorization" in request.headers
assert request.headers["Authorization"].startswith("Basic ")
# Verify credentials are NOT in body
body = request.content.decode("utf-8")
assert "client_id=" not in body
assert "client_secret=" not in body
class TestIntrospectionCaching:
"""Test in-memory caching for token introspection."""
@pytest.fixture
def verifier_with_cache(self) -> IntrospectionTokenVerifier:
"""Create verifier with caching enabled."""
return IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=300, # 5 minutes
max_cache_size=100,
)
@pytest.fixture
def verifier_no_cache(self) -> IntrospectionTokenVerifier:
"""Create verifier with caching disabled."""
return IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=0, # Disabled
)
def test_default_cache_settings(self):
"""Test that caching is disabled by default."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
)
assert verifier._cache_ttl == 0 # Disabled by default
assert verifier._max_cache_size == 10000
def test_custom_cache_settings(self):
"""Test that cache settings can be customized."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=60,
max_cache_size=500,
)
assert verifier._cache_ttl == 60
assert verifier._max_cache_size == 500
def test_cache_disabled_with_zero_ttl(self):
"""Test that cache is disabled when TTL is 0 or None."""
# Explicit 0
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=0,
)
assert verifier._cache_ttl == 0
# Explicit None (same as default)
verifier2 = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=None,
)
assert verifier2._cache_ttl == 0
async def test_cache_disabled_with_zero_or_negative_max_size(
self, httpx_mock: HTTPXMock
):
"""Test that cache is disabled when max_cache_size is 0 or negative."""
# Add two responses for the two verifiers
for _ in range(2):
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read",
},
)
# Zero max_cache_size should disable caching (not raise StopIteration)
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=300,
max_cache_size=0,
)
result = await verifier.verify_token("test-token")
assert result is not None
assert result.client_id == "user-123"
# Negative max_cache_size should also disable caching
verifier2 = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=300,
max_cache_size=-1,
)
result2 = await verifier2.verify_token("test-token")
assert result2 is not None
async def test_cache_hit_returns_cached_result(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that cached valid tokens are returned without introspection call."""
# First call - introspection endpoint called
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read write",
"exp": int(time.time()) + 3600,
},
)
# First verification
result1 = await verifier_with_cache.verify_token("test-token")
assert result1 is not None
assert result1.client_id == "user-123"
# Verify one request was made
requests = httpx_mock.get_requests()
assert len(requests) == 1
# Second verification - should use cache, no new request
result2 = await verifier_with_cache.verify_token("test-token")
assert result2 is not None
assert result2.client_id == "user-123"
# Still only one request
requests = httpx_mock.get_requests()
assert len(requests) == 1
async def test_cache_returns_defensive_copy(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that cached tokens are defensive copies (mutations don't leak)."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"scope": "read write",
"exp": int(time.time()) + 3600,
"custom_claim": "original",
},
)
# First verification
result1 = await verifier_with_cache.verify_token("test-token")
assert result1 is not None
assert result1.claims["custom_claim"] == "original"
# Mutate the result (simulating request-path code adding derived claims)
result1.claims["custom_claim"] = "mutated"
result1.claims["new_claim"] = "injected"
result1.scopes.append("admin")
# Second verification - should get clean copy, not mutated one
result2 = await verifier_with_cache.verify_token("test-token")
assert result2 is not None
assert result2.claims["custom_claim"] == "original"
assert "new_claim" not in result2.claims
assert "admin" not in result2.scopes
# Verify they are different object instances
assert result1 is not result2
async def test_inactive_tokens_not_cached(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that inactive tokens are NOT cached (may become valid later)."""
# Add two responses - inactive tokens should trigger re-introspection
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": False},
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": False},
)
# First verification
result1 = await verifier_with_cache.verify_token("inactive-token")
assert result1 is None
# Verify one request was made
requests = httpx_mock.get_requests()
assert len(requests) == 1
# Second verification - should NOT use cache, makes another request
result2 = await verifier_with_cache.verify_token("inactive-token")
assert result2 is None
# Two requests made (inactive tokens not cached)
requests = httpx_mock.get_requests()
assert len(requests) == 2
async def test_cache_disabled_makes_every_call(
self, verifier_no_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that with caching disabled, every call makes a request."""
# Add multiple responses for the same token
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
# First call
await verifier_no_cache.verify_token("test-token")
# Second call - should also make a request
await verifier_no_cache.verify_token("test-token")
# Two requests were made
requests = httpx_mock.get_requests()
assert len(requests) == 2
async def test_different_tokens_are_cached_separately(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that different tokens have separate cache entries."""
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-1"},
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-2"},
)
# Verify two different tokens
result1 = await verifier_with_cache.verify_token("token-1")
result2 = await verifier_with_cache.verify_token("token-2")
assert result1 is not None
assert result1.client_id == "user-1"
assert result2 is not None
assert result2.client_id == "user-2"
# Two requests were made
requests = httpx_mock.get_requests()
assert len(requests) == 2
# Verify both again - no new requests
await verifier_with_cache.verify_token("token-1")
await verifier_with_cache.verify_token("token-2")
requests = httpx_mock.get_requests()
assert len(requests) == 2
async def test_http_errors_are_not_cached(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that HTTP errors are not cached (transient failures)."""
# First call - HTTP error
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
status_code=500,
text="Internal Server Error",
)
# Second call - success
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
# First verification - fails
result1 = await verifier_with_cache.verify_token("test-token")
assert result1 is None
# Second verification - should retry since error wasn't cached
result2 = await verifier_with_cache.verify_token("test-token")
assert result2 is not None
assert result2.client_id == "user-123"
# Two requests were made
requests = httpx_mock.get_requests()
assert len(requests) == 2
async def test_timeout_errors_are_not_cached(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that timeout errors are not cached (transient failures)."""
from httpx import TimeoutException
# First call - timeout
httpx_mock.add_exception(
TimeoutException("Request timed out"),
url="https://auth.example.com/oauth/introspect",
)
# Second call - success
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
# First verification - times out
result1 = await verifier_with_cache.verify_token("test-token")
assert result1 is None
# Second verification - should retry since timeout wasn't cached
result2 = await verifier_with_cache.verify_token("test-token")
assert result2 is not None
# Two requests were made
requests = httpx_mock.get_requests()
assert len(requests) == 2
def test_token_hashing(self, verifier_with_cache: IntrospectionTokenVerifier):
"""Test that tokens are hashed consistently."""
hash1 = verifier_with_cache._hash_token("test-token")
hash2 = verifier_with_cache._hash_token("test-token")
hash3 = verifier_with_cache._hash_token("different-token")
# Same token produces same hash
assert hash1 == hash2
# Different tokens produce different hashes
assert hash1 != hash3
# Hash is a hex string (SHA-256 = 64 chars)
assert len(hash1) == 64
async def test_cache_respects_token_expiration(
self, verifier_with_cache: IntrospectionTokenVerifier, httpx_mock: HTTPXMock
):
"""Test that cache respects token's exp claim for TTL."""
# Token expiring in 60 seconds (shorter than cache TTL of 300)
short_exp = int(time.time()) + 60
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={
"active": True,
"client_id": "user-123",
"exp": short_exp,
},
)
await verifier_with_cache.verify_token("test-token")
# Check that cache entry uses the shorter expiration
cache_key = verifier_with_cache._hash_token("test-token")
entry = verifier_with_cache._cache[cache_key]
# Cache expiration should be at or before token expiration
assert entry.expires_at <= short_exp
async def test_expired_cache_entry_triggers_new_introspection(
self, httpx_mock: HTTPXMock
):
"""Test that expired cache entries are evicted and a new call is made."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=1, # 1 second TTL
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": "user-123"},
)
# First call — caches the result
await verifier.verify_token("test-token")
assert len(httpx_mock.get_requests()) == 1
# Expire the cache entry manually
cache_key = verifier._hash_token("test-token")
verifier._cache[cache_key].expires_at = time.time() - 1
# Second call — cache miss, new introspection
await verifier.verify_token("test-token")
assert len(httpx_mock.get_requests()) == 2
async def test_cache_eviction_at_max_size(self, httpx_mock: HTTPXMock):
"""Test that cache evicts entries when max size is reached."""
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
cache_ttl_seconds=300,
max_cache_size=2,
)
for i in range(3):
httpx_mock.add_response(
url="https://auth.example.com/oauth/introspect",
method="POST",
json={"active": True, "client_id": f"user-{i}"},
)
# Fill cache to capacity
await verifier.verify_token("token-0")
await verifier.verify_token("token-1")
assert len(verifier._cache) == 2
# Third token should evict the oldest entry
await verifier.verify_token("token-2")
assert len(verifier._cache) == 2
# token-0 should have been evicted (FIFO)
hash_0 = verifier._hash_token("token-0")
assert hash_0 not in verifier._cache
class TestIntrospectionTokenVerifierIntegration:
"""Integration tests with FastMCP server."""
async def test_verifier_used_by_fastmcp(self):
"""Test that IntrospectionTokenVerifier can be used as FastMCP auth."""
from fastmcp import FastMCP
# Create verifier
verifier = IntrospectionTokenVerifier(
introspection_url="https://auth.example.com/oauth/introspect",
client_id="test-client",
client_secret="test-secret",
)
# Create protected server - should work without errors
mcp = FastMCP("Test Server", auth=verifier)
@mcp.tool()
def greet(name: str) -> str:
"""Greet someone."""
return f"Hello, {name}!"
# Verify the auth is set correctly
assert mcp.auth is verifier
tools = await mcp.list_tools()
assert len(list(tools)) == 1
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_introspection.py",
"license": "Apache License 2.0",
"lines": 827,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/client/test_oauth_callback_xss.py | """Comprehensive XSS protection tests for OAuth callback HTML rendering."""
import pytest
from fastmcp.client.oauth_callback import create_callback_html
from fastmcp.utilities.ui import (
create_detail_box,
create_info_box,
create_page,
create_status_message,
)
def test_ui_create_page_escapes_title():
"""Test that page title is properly escaped."""
xss_title = "<script>alert(1)</script>"
html = create_page("content", title=xss_title)
assert "<script>alert(1)</script>" in html
assert "<script>alert(1)</script>" not in html
def test_ui_create_status_message_escapes():
"""Test that status messages are properly escaped."""
xss_message = "<img src=x onerror=alert(1)>"
html = create_status_message(xss_message)
assert "<img src=x onerror=alert(1)>" in html
assert "<img src=x onerror=alert(1)>" not in html
def test_ui_create_info_box_escapes():
"""Test that info box content is properly escaped."""
xss_content = "<iframe src=javascript:alert(1)></iframe>"
html = create_info_box(xss_content)
assert "<iframe" in html
assert "<iframe src=javascript:alert(1)>" not in html
def test_ui_create_detail_box_escapes():
"""Test that detail box labels and values are properly escaped."""
xss_label = '<script>alert("label")</script>'
xss_value = '<script>alert("value")</script>'
html = create_detail_box([(xss_label, xss_value)])
assert "<script>" in html
assert '<script>alert("label")</script>' not in html
assert '<script>alert("value")</script>' not in html
def test_callback_html_escapes_error_message():
"""Test that XSS payloads in error messages are properly escaped."""
xss_payload = "<img/src/onerror=alert(1)>"
html = create_callback_html(xss_payload, is_success=False)
assert "<img/src/onerror=alert(1)>" in html
assert "<img/src/onerror=alert(1)>" not in html
def test_callback_html_escapes_server_url():
"""Test that XSS payloads in server_url are properly escaped."""
xss_payload = "<script>alert(1)</script>"
html = create_callback_html("Success", is_success=True, server_url=xss_payload)
assert "<script>alert(1)</script>" in html
assert "<script>alert(1)</script>" not in html
def test_callback_html_escapes_title():
"""Test that XSS payloads in title are properly escaped."""
xss_payload = "<script>alert(document.domain)</script>"
html = create_callback_html("Success", title=xss_payload)
assert "<script>alert(document.domain)</script>" in html
assert "<script>alert(document.domain)</script>" not in html
def test_callback_html_mixed_content():
"""Test that legitimate text mixed with XSS attempts is properly escaped."""
mixed_payload = "Error: <img src=x onerror=alert(1)> occurred"
html = create_callback_html(mixed_payload, is_success=False)
assert "<img src=x onerror=alert(1)>" in html
assert "Error:" in html
assert "occurred" in html
assert "<img src=x onerror=alert(1)>" not in html
def test_callback_html_event_handlers():
"""Test that event handler attributes are escaped."""
xss_payload = '" onload="alert(1)'
html = create_callback_html(xss_payload, is_success=False)
assert "" onload="alert(1)" in html
assert '" onload="alert(1)' not in html
def test_callback_html_special_characters():
"""Test that special HTML characters are properly escaped."""
special_chars = "&<>\"'/"
html = create_callback_html(special_chars, is_success=False)
assert "&" in html
assert "<" in html
assert ">" in html
assert """ in html
# Apostrophe gets escaped to ' by html.escape()
assert "'" in html
@pytest.mark.parametrize(
"xss_vector",
[
"<img src=x onerror=alert(1)>",
"<script>alert(document.cookie)</script>",
"<iframe src=javascript:alert(1)>",
"<svg/onload=alert(1)>",
"<body onload=alert(1)>",
"<input onfocus=alert(1) autofocus>",
"<select onfocus=alert(1) autofocus>",
"<textarea onfocus=alert(1) autofocus>",
"<marquee onstart=alert(1)>",
"<div style=background:url('javascript:alert(1)')>",
],
)
def test_common_xss_vectors(xss_vector: str):
"""Test that common XSS attack vectors are properly escaped."""
html = create_callback_html(xss_vector, is_success=False)
# Should not contain the raw XSS vector
assert xss_vector not in html
# Should contain escaped version (at least the < and > should be escaped)
assert "<" in html
assert ">" in html
def test_legitimate_content_still_works():
"""Ensure legitimate content is displayed correctly after escaping."""
legitimate_message = "Authentication failed: Invalid credentials"
legitimate_url = "https://example.com:8080/mcp"
# Error case
html = create_callback_html(legitimate_message, is_success=False)
assert legitimate_message in html
assert "Authentication failed" in html
# Success case
html = create_callback_html("Success", is_success=True, server_url=legitimate_url)
assert legitimate_url in html
assert "Authentication successful" in html
def test_no_hardcoded_html_tags():
"""Verify that there are no hardcoded HTML tags that bypass escaping."""
server_url = "test-server"
html = create_callback_html("Success", is_success=True, server_url=server_url)
# Should not have <strong> tags around the server URL
assert f"<strong>{server_url}</strong>" not in html
# Should have the server URL displayed normally (escaped)
assert server_url in html
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/client/test_oauth_callback_xss.py",
"license": "Apache License 2.0",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/test_input_validation.py | """
Tests for input validation behavior with strict_input_validation setting.
This module tests the difference between strict JSON schema validation (when
strict_input_validation=True) and Pydantic-based coercion (when
strict_input_validation=False, the default).
"""
import json
import pytest
from mcp.types import TextContent
from pydantic import BaseModel
from fastmcp import Client, FastMCP
class UserProfile(BaseModel):
"""A test model for validating Pydantic model arguments."""
name: str
age: int
email: str
class TestStringToIntegerCoercion:
"""Test string-to-integer coercion behavior."""
async def test_string_integer_with_strict_validation(self):
"""With strict validation, string integers should raise an error."""
mcp = FastMCP("TestServer", strict_input_validation=True)
@mcp.tool
def add_numbers(a: int, b: int) -> int:
"""Add two numbers together."""
return a + b
async with Client(mcp) as client:
# String integers should fail with strict validation
with pytest.raises(Exception) as exc_info:
await client.call_tool("add_numbers", {"a": "10", "b": "20"})
# Verify it's a validation error
error_msg = str(exc_info.value).lower()
assert (
"validation" in error_msg
or "invalid" in error_msg
or "type" in error_msg
)
async def test_string_integer_without_strict_validation(self):
"""Without strict validation, string integers should be coerced."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def add_numbers(a: int, b: int) -> int:
"""Add two numbers together."""
return a + b
async with Client(mcp) as client:
# String integers should be coerced to integers
result = await client.call_tool("add_numbers", {"a": "10", "b": "20"})
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "30"
async def test_default_is_not_strict(self):
"""By default, strict_input_validation should be False."""
mcp = FastMCP("TestServer")
@mcp.tool
def multiply(x: int, y: int) -> int:
"""Multiply two numbers."""
return x * y
async with Client(mcp) as client:
# Should work with string integers by default
result = await client.call_tool("multiply", {"x": "5", "y": "3"})
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "15"
async def test_string_float_coercion(self):
"""Test that string floats are also coerced."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def calculate_area(length: float, width: float) -> float:
"""Calculate rectangle area."""
return length * width
async with Client(mcp) as client:
result = await client.call_tool(
"calculate_area", {"length": "10.5", "width": "20.0"}
)
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "210.0"
async def test_invalid_coercion_still_fails(self):
"""Even without strict validation, truly invalid inputs should fail."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def square(n: int) -> int:
"""Square a number."""
return n * n
async with Client(mcp) as client:
# Non-numeric strings should still fail
with pytest.raises(Exception):
await client.call_tool("square", {"n": "not-a-number"})
class TestPydanticModelArguments:
"""Test validation of Pydantic model arguments."""
async def test_pydantic_model_with_dict_no_strict(self):
"""Pydantic models should accept dict arguments without strict validation."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def create_user(profile: UserProfile) -> str:
"""Create a user from a profile."""
return f"Created user {profile.name}, age {profile.age}"
async with Client(mcp) as client:
result = await client.call_tool(
"create_user",
{"profile": {"name": "Alice", "age": 30, "email": "alice@example.com"}},
)
assert isinstance(result.content[0], TextContent)
assert "Alice" in result.content[0].text
assert "30" in result.content[0].text
async def test_pydantic_model_with_stringified_json_no_strict(self):
"""Test if stringified JSON is accepted for Pydantic models without strict validation."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def create_user(profile: UserProfile) -> str:
"""Create a user from a profile."""
return f"Created user {profile.name}, age {profile.age}"
async with Client(mcp) as client:
# Some LLM clients send stringified JSON instead of actual JSON
stringified = json.dumps(
{"name": "Bob", "age": 25, "email": "bob@example.com"}
)
# This test verifies whether we handle stringified JSON
try:
result = await client.call_tool("create_user", {"profile": stringified})
# If this succeeds, we're handling stringified JSON
assert isinstance(result.content[0], TextContent)
assert "Bob" in result.content[0].text
stringified_json_works = True
except Exception as e:
# If this fails, we're not handling stringified JSON
stringified_json_works = False
error_msg = str(e)
# Document the behavior - we want to know if this works or not
if stringified_json_works:
# This is the desired behavior
pass
else:
# This means stringified JSON doesn't work - document it
assert (
"validation" in error_msg.lower() or "invalid" in error_msg.lower()
)
async def test_pydantic_model_with_coercion(self):
"""Pydantic models should benefit from coercion without strict validation."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def create_user(profile: UserProfile) -> str:
"""Create a user from a profile."""
return f"Created user {profile.name}, age {profile.age}"
async with Client(mcp) as client:
# Age as string should be coerced
result = await client.call_tool(
"create_user",
{
"profile": {
"name": "Charlie",
"age": "35", # String instead of int
"email": "charlie@example.com",
}
},
)
assert isinstance(result.content[0], TextContent)
assert "Charlie" in result.content[0].text
assert "35" in result.content[0].text
async def test_pydantic_model_strict_validation(self):
"""With strict validation, Pydantic models should enforce exact types."""
mcp = FastMCP("TestServer", strict_input_validation=True)
@mcp.tool
def create_user(profile: UserProfile) -> str:
"""Create a user from a profile."""
return f"Created user {profile.name}, age {profile.age}"
async with Client(mcp) as client:
# Age as string should fail with strict validation
with pytest.raises(Exception):
await client.call_tool(
"create_user",
{
"profile": {
"name": "Dave",
"age": "40", # String instead of int
"email": "dave@example.com",
}
},
)
class TestValidationErrorMessages:
"""Test the quality of validation error messages."""
async def test_error_message_quality_strict(self):
"""Capture error message with strict validation."""
mcp = FastMCP("TestServer", strict_input_validation=True)
@mcp.tool
def process_data(count: int, name: str) -> str:
"""Process some data."""
return f"Processed {count} items for {name}"
async with Client(mcp) as client:
with pytest.raises(Exception) as exc_info:
await client.call_tool(
"process_data", {"count": "not-a-number", "name": "test"}
)
error_msg = str(exc_info.value)
# Strict validation error message
# Should mention validation or type error
assert (
"validation" in error_msg.lower()
or "invalid" in error_msg.lower()
or "type" in error_msg.lower()
)
async def test_error_message_quality_pydantic(self):
"""Capture error message with Pydantic validation."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def process_data(count: int, name: str) -> str:
"""Process some data."""
return f"Processed {count} items for {name}"
async with Client(mcp) as client:
with pytest.raises(Exception) as exc_info:
await client.call_tool(
"process_data", {"count": "not-a-number", "name": "test"}
)
error_msg = str(exc_info.value)
# Pydantic validation error message
# Should be more detailed and mention validation
assert "validation" in error_msg.lower() or "invalid" in error_msg.lower()
async def test_missing_required_field_error(self):
"""Test error message for missing required fields."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def greet(name: str, age: int) -> str:
"""Greet a person."""
return f"Hello {name}, you are {age} years old"
async with Client(mcp) as client:
with pytest.raises(Exception) as exc_info:
# Missing 'age' parameter
await client.call_tool("greet", {"name": "Alice"})
error_msg = str(exc_info.value)
# Should mention the missing field
assert "age" in error_msg.lower() or "required" in error_msg.lower()
class TestEdgeCases:
"""Test edge cases and boundary conditions."""
async def test_optional_parameters_with_coercion(self):
"""Optional parameters should work with coercion."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def format_message(text: str, repeat: int = 1) -> str:
"""Format a message with optional repetition."""
return text * repeat
async with Client(mcp) as client:
# String for optional int parameter
result = await client.call_tool(
"format_message", {"text": "hi", "repeat": "3"}
)
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "hihihi"
async def test_none_values(self):
"""Test handling of None values."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def process_optional(value: int | None) -> str:
"""Process an optional value."""
return f"Value: {value}"
async with Client(mcp) as client:
result = await client.call_tool("process_optional", {"value": None})
assert isinstance(result.content[0], TextContent)
assert "None" in result.content[0].text
async def test_empty_string_to_int(self):
"""Empty strings should fail conversion to int."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def square(n: int) -> int:
"""Square a number."""
return n * n
async with Client(mcp) as client:
with pytest.raises(Exception):
await client.call_tool("square", {"n": ""})
async def test_boolean_coercion(self):
"""Test boolean value coercion."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def toggle(enabled: bool) -> str:
"""Toggle a feature."""
return f"Feature is {'enabled' if enabled else 'disabled'}"
async with Client(mcp) as client:
# String "true" should be coerced to boolean
result = await client.call_tool("toggle", {"enabled": "true"})
assert isinstance(result.content[0], TextContent)
assert "enabled" in result.content[0].text.lower()
# String "false" should be coerced to boolean
result = await client.call_tool("toggle", {"enabled": "false"})
assert isinstance(result.content[0], TextContent)
assert "disabled" in result.content[0].text.lower()
async def test_list_of_integers_with_string_elements(self):
"""Test lists containing string representations of integers."""
mcp = FastMCP("TestServer", strict_input_validation=False)
@mcp.tool
def sum_numbers(numbers: list[int]) -> int:
"""Sum a list of numbers."""
return sum(numbers)
async with Client(mcp) as client:
# List with string integers
result = await client.call_tool("sum_numbers", {"numbers": ["1", "2", "3"]})
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "6"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/test_input_validation.py",
"license": "Apache License 2.0",
"lines": 292,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/test_server_lifespan.py | """Tests for server_lifespan and session_lifespan behavior."""
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from typing import Any
import pytest
from fastmcp import Client, FastMCP
from fastmcp.server.context import Context
from fastmcp.server.lifespan import ContextManagerLifespan, lifespan
from fastmcp.utilities.lifespan import combine_lifespans
class TestServerLifespan:
"""Test server_lifespan functionality."""
async def test_server_lifespan_basic(self):
"""Test that server_lifespan is entered once and persists across sessions."""
lifespan_events: list[str] = []
@asynccontextmanager
async def server_lifespan(mcp: FastMCP) -> AsyncIterator[dict[str, Any]]:
lifespan_events.append("enter")
try:
yield {"initialized": True}
finally:
lifespan_events.append("exit")
mcp = FastMCP("TestServer", lifespan=server_lifespan)
@mcp.tool
def get_value() -> str:
return "test"
# Server lifespan should be entered when run_async starts
assert lifespan_events == []
# Connect first client session
async with Client(mcp) as client1:
result1 = await client1.call_tool("get_value", {})
assert result1.data == "test"
# Server lifespan should have been entered once
assert lifespan_events == ["enter"]
# Connect second client session while first is still active
async with Client(mcp) as client2:
result2 = await client2.call_tool("get_value", {})
assert result2.data == "test"
# Server lifespan should still only have been entered once
assert lifespan_events == ["enter"]
# Because we're using a fastmcptransport, the server lifespan should be exited
# when the client session closes
assert lifespan_events == ["enter", "exit"]
async def test_server_lifespan_overlapping_sessions(self):
"""Test that overlapping sessions keep lifespan active until all sessions close."""
lifespan_events: list[str] = []
resource_state = "missing"
@asynccontextmanager
async def server_lifespan(mcp: FastMCP) -> AsyncIterator[dict[str, Any]]:
nonlocal resource_state
lifespan_events.append("enter")
resource_state = "open"
try:
yield {"initialized": True}
finally:
resource_state = "closed"
lifespan_events.append("exit")
mcp = FastMCP("TestServer", lifespan=server_lifespan)
@mcp.tool
def get_resource_state() -> str:
return resource_state
async with Client(mcp) as client1:
result1 = await client1.call_tool("get_resource_state", {})
assert result1.data == "open"
async with Client(mcp) as client2:
result2 = await client2.call_tool("get_resource_state", {})
assert result2.data == "open"
# client2 exited while client1 is still active; lifespan should remain open
result3 = await client1.call_tool("get_resource_state", {})
assert result3.data == "open"
assert lifespan_events == ["enter"]
assert lifespan_events == ["enter", "exit"]
async def test_server_lifespan_context_available(self):
"""Test that server_lifespan context is available to tools."""
@asynccontextmanager
async def server_lifespan(mcp: FastMCP) -> AsyncIterator[dict]:
yield {"db_connection": "mock_db"}
mcp = FastMCP("TestServer", lifespan=server_lifespan)
@mcp.tool
def get_db_info(ctx: Context) -> str:
# Access the server lifespan context
assert ctx.request_context is not None # type narrowing for type checker
lifespan_context = ctx.request_context.lifespan_context
return lifespan_context.get("db_connection", "no_db")
async with Client(mcp) as client:
result = await client.call_tool("get_db_info", {})
assert result.data == "mock_db"
class TestComposableLifespans:
"""Test composable lifespan functionality."""
async def test_lifespan_decorator_basic(self):
"""Test that the @lifespan decorator works like @asynccontextmanager."""
events: list[str] = []
@lifespan
async def my_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("enter")
try:
yield {"key": "value"}
finally:
events.append("exit")
mcp = FastMCP("TestServer", lifespan=my_lifespan)
@mcp.tool
def get_info(ctx: Context) -> str:
assert ctx.request_context is not None
lifespan_context = ctx.request_context.lifespan_context
return lifespan_context.get("key", "missing")
assert events == []
async with Client(mcp) as client:
result = await client.call_tool("get_info", {})
assert result.data == "value"
assert events == ["enter"]
assert events == ["enter", "exit"]
async def test_lifespan_composition_two(self):
"""Test composing two lifespans with |."""
events: list[str] = []
@lifespan
async def first_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("first_enter")
try:
yield {"first": "a"}
finally:
events.append("first_exit")
@lifespan
async def second_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("second_enter")
try:
yield {"second": "b"}
finally:
events.append("second_exit")
composed = first_lifespan | second_lifespan
mcp = FastMCP("TestServer", lifespan=composed)
@mcp.tool
def get_both(ctx: Context) -> dict:
assert ctx.request_context is not None
return dict(ctx.request_context.lifespan_context)
async with Client(mcp) as client:
result = await client.call_tool("get_both", {})
# Results should be merged
assert result.data == {"first": "a", "second": "b"}
# Should enter in order
assert events == ["first_enter", "second_enter"]
# Should exit in reverse order (LIFO)
assert events == ["first_enter", "second_enter", "second_exit", "first_exit"]
async def test_lifespan_composition_three(self):
"""Test composing three lifespans with |."""
events: list[str] = []
@lifespan
async def ls_a(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("a_enter")
try:
yield {"a": 1}
finally:
events.append("a_exit")
@lifespan
async def ls_b(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("b_enter")
try:
yield {"b": 2}
finally:
events.append("b_exit")
@lifespan
async def ls_c(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("c_enter")
try:
yield {"c": 3}
finally:
events.append("c_exit")
composed = ls_a | ls_b | ls_c
mcp = FastMCP("TestServer", lifespan=composed)
@mcp.tool
def get_all(ctx: Context) -> dict:
assert ctx.request_context is not None
return dict(ctx.request_context.lifespan_context)
async with Client(mcp) as client:
result = await client.call_tool("get_all", {})
assert result.data == {"a": 1, "b": 2, "c": 3}
assert events == ["a_enter", "b_enter", "c_enter"]
assert events == [
"a_enter",
"b_enter",
"c_enter",
"c_exit",
"b_exit",
"a_exit",
]
async def test_lifespan_result_merge_later_wins(self):
"""Test that later lifespans overwrite earlier ones on key conflict."""
@lifespan
async def first(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
yield {"key": "first", "only_first": "yes"}
@lifespan
async def second(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
yield {"key": "second", "only_second": "yes"}
composed = first | second
mcp = FastMCP("TestServer", lifespan=composed)
@mcp.tool
def get_context(ctx: Context) -> dict:
assert ctx.request_context is not None
return dict(ctx.request_context.lifespan_context)
async with Client(mcp) as client:
result = await client.call_tool("get_context", {})
# "key" should be overwritten by second
assert result.data == {
"key": "second",
"only_first": "yes",
"only_second": "yes",
}
async def test_lifespan_composition_with_context_manager_lifespan(self):
"""Test composing with ContextManagerLifespan for @asynccontextmanager functions."""
events: list[str] = []
@asynccontextmanager
async def legacy_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("legacy_enter")
try:
yield {"legacy": True}
finally:
events.append("legacy_exit")
@lifespan
async def new_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
events.append("new_enter")
try:
yield {"new": True}
finally:
events.append("new_exit")
# Wrap the @asynccontextmanager function explicitly
composed = ContextManagerLifespan(legacy_lifespan) | new_lifespan
mcp = FastMCP("TestServer", lifespan=composed)
@mcp.tool
def get_context(ctx: Context) -> dict:
assert ctx.request_context is not None
return dict(ctx.request_context.lifespan_context)
async with Client(mcp) as client:
result = await client.call_tool("get_context", {})
assert result.data == {"legacy": True, "new": True}
assert events == [
"legacy_enter",
"new_enter",
"new_exit",
"legacy_exit",
]
async def test_backwards_compatibility_asynccontextmanager(self):
"""Test that existing @asynccontextmanager lifespans still work."""
@asynccontextmanager
async def old_style_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
yield {"old_style": True}
mcp = FastMCP("TestServer", lifespan=old_style_lifespan)
@mcp.tool
def get_context(ctx: Context) -> dict:
assert ctx.request_context is not None
return dict(ctx.request_context.lifespan_context)
async with Client(mcp) as client:
result = await client.call_tool("get_context", {})
assert result.data == {"old_style": True}
async def test_lifespan_or_requires_lifespan_instance(self):
"""Test that | operator requires Lifespan instances and gives helpful error."""
@lifespan
async def my_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
yield {"key": "value"}
@asynccontextmanager
async def regular_lifespan(server: FastMCP) -> AsyncIterator[dict[str, Any]]:
yield {"regular": True}
# Composing with non-Lifespan should raise TypeError with helpful message
with pytest.raises(TypeError) as exc_info:
my_lifespan | regular_lifespan # type: ignore[operator]
assert "ContextManagerLifespan" in str(exc_info.value)
class TestCombineLifespans:
"""Test combine_lifespans utility function."""
async def test_combine_lifespans_fastapi_style(self):
"""Test combining lifespans that yield None (FastAPI-style)."""
events: list[str] = []
@asynccontextmanager
async def first_lifespan(app: Any) -> AsyncIterator[None]:
events.append("first_enter")
try:
yield
finally:
events.append("first_exit")
@asynccontextmanager
async def second_lifespan(app: Any) -> AsyncIterator[None]:
events.append("second_enter")
try:
yield
finally:
events.append("second_exit")
combined = combine_lifespans(first_lifespan, second_lifespan)
async with combined("mock_app") as result:
assert result == {} # Empty dict when lifespans yield None
assert events == ["first_enter", "second_enter"]
# LIFO exit order
assert events == ["first_enter", "second_enter", "second_exit", "first_exit"]
async def test_combine_lifespans_fastmcp_style(self):
"""Test combining lifespans that yield dicts (FastMCP-style)."""
events: list[str] = []
@asynccontextmanager
async def db_lifespan(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("db_enter")
try:
yield {"db": "connected"}
finally:
events.append("db_exit")
@asynccontextmanager
async def cache_lifespan(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("cache_enter")
try:
yield {"cache": "ready"}
finally:
events.append("cache_exit")
combined = combine_lifespans(db_lifespan, cache_lifespan)
async with combined("mock_app") as result:
assert result == {"db": "connected", "cache": "ready"}
assert events == ["db_enter", "cache_enter"]
assert events == ["db_enter", "cache_enter", "cache_exit", "db_exit"]
async def test_combine_lifespans_mixed_styles(self):
"""Test combining FastAPI-style (yield None) and FastMCP-style (yield dict)."""
events: list[str] = []
@asynccontextmanager
async def fastapi_lifespan(app: Any) -> AsyncIterator[None]:
events.append("fastapi_enter")
try:
yield # FastAPI-style: yield None
finally:
events.append("fastapi_exit")
@asynccontextmanager
async def fastmcp_lifespan(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("fastmcp_enter")
try:
yield {"mcp": "initialized"} # FastMCP-style: yield dict
finally:
events.append("fastmcp_exit")
combined = combine_lifespans(fastapi_lifespan, fastmcp_lifespan)
async with combined("mock_app") as result:
# Only the dict from fastmcp_lifespan should be present
assert result == {"mcp": "initialized"}
assert events == ["fastapi_enter", "fastmcp_enter"]
assert events == [
"fastapi_enter",
"fastmcp_enter",
"fastmcp_exit",
"fastapi_exit",
]
async def test_combine_lifespans_result_merge_later_wins(self):
"""Test that later lifespans overwrite earlier ones on key conflict."""
@asynccontextmanager
async def first(app: Any) -> AsyncIterator[dict[str, Any]]:
yield {"key": "first", "only_first": "yes"}
@asynccontextmanager
async def second(app: Any) -> AsyncIterator[dict[str, Any]]:
yield {"key": "second", "only_second": "yes"}
combined = combine_lifespans(first, second)
async with combined("mock_app") as result:
assert result == {
"key": "second", # Overwritten by later lifespan
"only_first": "yes",
"only_second": "yes",
}
async def test_combine_lifespans_three(self):
"""Test combining three lifespans."""
events: list[str] = []
@asynccontextmanager
async def ls_a(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("a_enter")
try:
yield {"a": 1}
finally:
events.append("a_exit")
@asynccontextmanager
async def ls_b(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("b_enter")
try:
yield {"b": 2}
finally:
events.append("b_exit")
@asynccontextmanager
async def ls_c(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("c_enter")
try:
yield {"c": 3}
finally:
events.append("c_exit")
combined = combine_lifespans(ls_a, ls_b, ls_c)
async with combined("mock_app") as result:
assert result == {"a": 1, "b": 2, "c": 3}
assert events == ["a_enter", "b_enter", "c_enter"]
assert events == [
"a_enter",
"b_enter",
"c_enter",
"c_exit",
"b_exit",
"a_exit",
]
async def test_combine_lifespans_empty(self):
"""Test combining zero lifespans."""
combined = combine_lifespans()
async with combined("mock_app") as result:
assert result == {}
async def test_combine_lifespans_with_mapping_return_type(self):
"""Test combining lifespans that return Mapping (like Starlette's Lifespan).
This verifies that combine_lifespans accepts lifespans returning Mapping[str, Any],
which is the type that Starlette's Lifespan uses, not just dict[str, Any].
"""
from collections.abc import Mapping
events: list[str] = []
@asynccontextmanager
async def mapping_lifespan(app: Any) -> AsyncIterator[Mapping[str, Any]]:
"""Simulates a Starlette-style lifespan that yields a Mapping."""
events.append("mapping_enter")
try:
yield {"starlette_state": "initialized"}
finally:
events.append("mapping_exit")
@asynccontextmanager
async def dict_lifespan(app: Any) -> AsyncIterator[dict[str, Any]]:
events.append("dict_enter")
try:
yield {"fastmcp_state": "ready"}
finally:
events.append("dict_exit")
combined = combine_lifespans(mapping_lifespan, dict_lifespan)
async with combined("mock_app") as result:
assert result == {
"starlette_state": "initialized",
"fastmcp_state": "ready",
}
assert events == ["mapping_enter", "dict_enter"]
assert events == [
"mapping_enter",
"dict_enter",
"dict_exit",
"mapping_exit",
]
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/test_server_lifespan.py",
"license": "Apache License 2.0",
"lines": 429,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/utilities/ui.py | """
Shared UI utilities for FastMCP HTML pages.
This module provides reusable HTML/CSS components for OAuth callbacks,
consent pages, and other user-facing interfaces.
"""
from __future__ import annotations
import html
from starlette.responses import HTMLResponse
# FastMCP branding
FASTMCP_LOGO_URL = "https://gofastmcp.com/assets/brand/blue-logo.png"
# Base CSS styles shared across all FastMCP pages
BASE_STYLES = """
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
margin: 0;
padding: 0;
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background: #f9fafb;
color: #0a0a0a;
}
.container {
background: #ffffff;
border: 1px solid #e5e7eb;
padding: 3rem 2.5rem;
border-radius: 1rem;
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
text-align: center;
max-width: 36rem;
margin: 1rem;
width: 100%;
}
@media (max-width: 640px) {
.container {
padding: 2rem 1.5rem;
margin: 0.5rem;
}
}
.logo {
width: 64px;
height: auto;
margin-bottom: 1.5rem;
display: block;
margin-left: auto;
margin-right: auto;
}
h1 {
font-size: 1.5rem;
font-weight: 600;
margin-bottom: 1.5rem;
color: #111827;
}
"""
# Button styles
BUTTON_STYLES = """
.button-group {
display: flex;
gap: 0.75rem;
margin-top: 1.5rem;
justify-content: center;
}
button {
padding: 0.75rem 2rem;
font-size: 0.9375rem;
font-weight: 500;
border-radius: 0.5rem;
border: none;
cursor: pointer;
transition: all 0.15s;
font-family: inherit;
}
button:hover {
transform: translateY(-1px);
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1);
}
.btn-approve, .btn-primary {
background: #10b981;
color: #ffffff;
min-width: 120px;
}
.btn-deny, .btn-secondary {
background: #6b7280;
color: #ffffff;
min-width: 120px;
}
"""
# Info box / message box styles
INFO_BOX_STYLES = """
.info-box {
background: #f0f9ff;
border: 1px solid #bae6fd;
border-radius: 0.5rem;
padding: 1rem;
margin-bottom: 1.5rem;
text-align: left;
font-size: 0.9375rem;
line-height: 1.5;
color: #374151;
}
.info-box p {
margin-bottom: 0.5rem;
}
.info-box p:last-child {
margin-bottom: 0;
}
.info-box.centered {
text-align: center;
}
.info-box.error {
background: #fef2f2;
border-color: #fecaca;
color: #991b1b;
}
.info-box strong {
color: #0ea5e9;
font-weight: 600;
}
.info-box .server-name-link {
color: #0ea5e9;
text-decoration: underline;
font-weight: 600;
cursor: pointer;
transition: opacity 0.15s;
}
.info-box .server-name-link:hover {
opacity: 0.8;
}
/* Monospace info box - gray styling with code font */
.info-box-mono {
background: #f9fafb;
border: 1px solid #e5e7eb;
border-radius: 0.5rem;
padding: 0.875rem;
margin: 1.25rem 0;
font-size: 0.875rem;
color: #6b7280;
font-family: 'SF Mono', 'Monaco', 'Consolas', 'Courier New', monospace;
text-align: left;
}
.info-box-mono.centered {
text-align: center;
}
.info-box-mono.error {
background: #fef2f2;
border-color: #fecaca;
color: #991b1b;
}
.info-box-mono strong {
color: #111827;
font-weight: 600;
}
.warning-box {
background: #f0f9ff;
border: 1px solid #bae6fd;
border-radius: 0.5rem;
padding: 1rem;
margin-bottom: 1.5rem;
text-align: center;
}
.warning-box p {
margin-bottom: 0.5rem;
line-height: 1.5;
color: #6b7280;
font-size: 0.9375rem;
}
.warning-box p:last-child {
margin-bottom: 0;
}
.warning-box strong {
color: #0ea5e9;
font-weight: 600;
}
.warning-box a {
color: #0ea5e9;
text-decoration: underline;
font-weight: 600;
}
.warning-box a:hover {
color: #0284c7;
text-decoration: underline;
}
"""
# Status message styles (for success/error indicators)
STATUS_MESSAGE_STYLES = """
.status-message {
display: flex;
align-items: center;
justify-content: center;
gap: 0.75rem;
margin-bottom: 1.5rem;
}
.status-icon {
font-size: 1.5rem;
line-height: 1;
display: inline-flex;
align-items: center;
justify-content: center;
width: 2rem;
height: 2rem;
border-radius: 0.5rem;
flex-shrink: 0;
}
.status-icon.success {
background: #10b98120;
}
.status-icon.error {
background: #ef444420;
}
.message {
font-size: 1.125rem;
line-height: 1.75;
color: #111827;
font-weight: 600;
text-align: left;
}
"""
# Detail box styles (for key-value pairs)
DETAIL_BOX_STYLES = """
.detail-box {
background: #f9fafb;
border: 1px solid #e5e7eb;
border-radius: 0.5rem;
padding: 1rem;
margin-bottom: 1.5rem;
text-align: left;
}
.detail-row {
display: flex;
padding: 0.5rem 0;
border-bottom: 1px solid #e5e7eb;
}
.detail-row:last-child {
border-bottom: none;
}
.detail-label {
font-weight: 600;
min-width: 160px;
color: #6b7280;
font-size: 0.875rem;
flex-shrink: 0;
padding-right: 1rem;
}
.detail-value {
flex: 1;
font-family: 'SF Mono', 'Monaco', 'Consolas', 'Courier New', monospace;
font-size: 0.75rem;
color: #111827;
word-break: break-all;
overflow-wrap: break-word;
}
"""
# Redirect section styles (for OAuth redirect URI box)
REDIRECT_SECTION_STYLES = """
.redirect-section {
background: #fffbeb;
border: 1px solid #fcd34d;
border-radius: 0.5rem;
padding: 1rem;
margin-bottom: 1.5rem;
text-align: left;
}
.redirect-section .label {
font-size: 0.875rem;
color: #6b7280;
font-weight: 600;
margin-bottom: 0.5rem;
display: block;
}
.redirect-section .value {
font-family: 'SF Mono', 'Monaco', 'Consolas', 'Courier New', monospace;
font-size: 0.875rem;
color: #111827;
word-break: break-all;
margin-top: 0.25rem;
}
"""
# Collapsible details styles
DETAILS_STYLES = """
details {
margin-bottom: 1.5rem;
text-align: left;
}
summary {
cursor: pointer;
font-size: 0.875rem;
color: #6b7280;
font-weight: 600;
list-style: none;
padding: 0.5rem;
border-radius: 0.25rem;
}
summary:hover {
background: #f9fafb;
}
summary::marker {
display: none;
}
summary::before {
content: "▶";
display: inline-block;
margin-right: 0.5rem;
transition: transform 0.2s;
font-size: 0.75rem;
}
details[open] summary::before {
transform: rotate(90deg);
}
"""
# Helper text styles
HELPER_TEXT_STYLES = """
.close-instruction, .help-text {
font-size: 0.875rem;
color: #6b7280;
margin-top: 1.5rem;
}
"""
# Tooltip styles for hover help
TOOLTIP_STYLES = """
.help-link-container {
position: fixed;
bottom: 1.5rem;
right: 1.5rem;
font-size: 0.875rem;
}
.help-link {
color: #6b7280;
text-decoration: none;
cursor: help;
position: relative;
display: inline-block;
border-bottom: 1px dotted #9ca3af;
}
@media (max-width: 640px) {
.help-link {
background: #ffffff;
padding: 0.25rem 0.5rem;
border-radius: 0.25rem;
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
}
}
.help-link:hover {
color: #111827;
border-bottom-color: #111827;
}
.help-link:hover .tooltip {
opacity: 1;
visibility: visible;
}
.tooltip {
position: absolute;
bottom: 100%;
right: 0;
left: auto;
margin-bottom: 0.5rem;
background: #1f2937;
color: #ffffff;
padding: 0.75rem 1rem;
border-radius: 0.5rem;
font-size: 0.8125rem;
line-height: 1.5;
width: 280px;
max-width: calc(100vw - 3rem);
opacity: 0;
visibility: hidden;
transition: opacity 0.2s, visibility 0.2s;
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1);
text-align: left;
}
.tooltip::after {
content: '';
position: absolute;
top: 100%;
right: 1rem;
border: 6px solid transparent;
border-top-color: #1f2937;
}
.tooltip-link {
color: #60a5fa;
text-decoration: underline;
}
"""
def create_page(
content: str,
title: str = "FastMCP",
additional_styles: str = "",
csp_policy: str = "default-src 'none'; style-src 'unsafe-inline'; img-src https: data:; base-uri 'none'",
) -> str:
"""
Create a complete HTML page with FastMCP styling.
Args:
content: HTML content to place inside the page
title: Page title
additional_styles: Extra CSS to include
csp_policy: Content Security Policy header value.
If empty string "", the CSP meta tag is omitted entirely.
Returns:
Complete HTML page as string
"""
title = html.escape(title)
# Only include CSP meta tag if policy is non-empty
csp_meta = (
f'<meta http-equiv="Content-Security-Policy" content="{html.escape(csp_policy, quote=True)}" />'
if csp_policy
else ""
)
return f"""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{title}</title>
<style>
{BASE_STYLES}
{additional_styles}
</style>
{csp_meta}
</head>
<body>
{content}
</body>
</html>
"""
def create_logo(icon_url: str | None = None, alt_text: str = "FastMCP") -> str:
"""Create logo HTML.
Args:
icon_url: Optional custom icon URL. If not provided, uses the FastMCP logo.
alt_text: Alt text for the logo image.
Returns:
HTML for logo image tag.
"""
url = icon_url or FASTMCP_LOGO_URL
alt = html.escape(alt_text)
return f'<img src="{html.escape(url)}" alt="{alt}" class="logo" />'
def create_status_message(message: str, is_success: bool = True) -> str:
"""
Create a status message with icon.
Args:
message: Status message text
is_success: True for success (✓), False for error (✕)
Returns:
HTML for status message
"""
message = html.escape(message)
icon = "✓" if is_success else "✕"
icon_class = "success" if is_success else "error"
return f"""
<div class="status-message">
<span class="status-icon {icon_class}">{icon}</span>
<div class="message">{message}</div>
</div>
"""
def create_info_box(
content: str,
is_error: bool = False,
centered: bool = False,
monospace: bool = False,
) -> str:
"""
Create an info box.
Args:
content: HTML content for the info box
is_error: True for error styling, False for normal
centered: True to center the text, False for left-aligned
monospace: True to use gray monospace font styling instead of blue
Returns:
HTML for info box
"""
content = html.escape(content)
base_class = "info-box-mono" if monospace else "info-box"
classes = [base_class]
if is_error:
classes.append("error")
if centered:
classes.append("centered")
class_str = " ".join(classes)
return f'<div class="{class_str}">{content}</div>'
def create_detail_box(rows: list[tuple[str, str]]) -> str:
"""
Create a detail box with key-value pairs.
Args:
rows: List of (label, value) tuples
Returns:
HTML for detail box
"""
rows_html = "\n".join(
f"""
<div class="detail-row">
<div class="detail-label">{html.escape(label)}:</div>
<div class="detail-value">{html.escape(value)}</div>
</div>
"""
for label, value in rows
)
return f'<div class="detail-box">{rows_html}</div>'
def create_button_group(buttons: list[tuple[str, str, str]]) -> str:
"""
Create a group of buttons.
Args:
buttons: List of (text, value, css_class) tuples
Returns:
HTML for button group
"""
buttons_html = "\n".join(
f'<button type="submit" name="action" value="{value}" class="{css_class}">{text}</button>'
for text, value, css_class in buttons
)
return f'<div class="button-group">{buttons_html}</div>'
def create_secure_html_response(html: str, status_code: int = 200) -> HTMLResponse:
"""
Create an HTMLResponse with security headers.
Adds X-Frame-Options: DENY to prevent clickjacking attacks per MCP security best practices.
Args:
html: HTML content to return
status_code: HTTP status code
Returns:
HTMLResponse with security headers
"""
return HTMLResponse(
content=html,
status_code=status_code,
headers={"X-Frame-Options": "DENY"},
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/utilities/ui.py",
"license": "Apache License 2.0",
"lines": 530,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/server/auth/test_oauth_consent_flow.py | """Tests for OAuth Proxy consent flow with server-side storage.
This test suite verifies:
1. OAuth transactions are stored in server-side storage (not in-memory)
2. Authorization codes are stored in server-side storage
3. Consent flow redirects correctly through /consent endpoint
4. CSRF protection works with cookies
5. State persists across storage backends
6. Security headers (X-Frame-Options) are set correctly
7. Cookie signing and tampering detection
8. Auto-approve behavior with valid cookies
9. Consent binding cookie prevents confused deputy attacks (GHSA-rww4-4w9c-7733)
"""
import re
import secrets
import time
from urllib.parse import parse_qs, urlparse
import pytest
from key_value.aio.stores.memory import MemoryStore
from mcp.server.auth.provider import AuthorizationParams
from mcp.shared.auth import OAuthClientInformationFull
from pydantic import AnyUrl
from starlette.applications import Starlette
from starlette.testclient import TestClient
from fastmcp.server.auth.auth import AccessToken, TokenVerifier
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.oauth_proxy.models import OAuthTransaction
class MockTokenVerifier(TokenVerifier):
"""Mock token verifier for testing."""
def __init__(self):
self.required_scopes = ["read", "write"]
async def verify_token(self, token: str):
"""Mock token verification."""
return AccessToken(
token=token,
client_id="mock-client",
scopes=self.required_scopes,
expires_at=int(time.time() + 3600),
)
class _Verifier(TokenVerifier):
"""Minimal token verifier for security tests."""
def __init__(self):
self.required_scopes = ["read"]
async def verify_token(self, token: str):
return AccessToken(
token=token, client_id="c", scopes=self.required_scopes, expires_at=None
)
@pytest.fixture
def storage():
"""Create a fresh in-memory storage for each test."""
return MemoryStore()
@pytest.fixture
def oauth_proxy_with_storage(storage):
"""Create OAuth proxy with explicit storage backend."""
return OAuthProxy(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id="test-upstream-client",
upstream_client_secret="test-upstream-secret",
token_verifier=MockTokenVerifier(),
base_url="https://myserver.com",
redirect_path="/auth/callback",
client_storage=storage, # Use our test storage
jwt_signing_key="test-secret",
)
@pytest.fixture
def oauth_proxy_https():
"""OAuthProxy configured with HTTPS base_url for __Host- cookies."""
return OAuthProxy(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id="client-id",
upstream_client_secret="client-secret",
token_verifier=_Verifier(),
base_url="https://myserver.example",
client_storage=MemoryStore(),
jwt_signing_key="test-secret",
)
async def _start_flow(
proxy: OAuthProxy, client_id: str, redirect: str
) -> tuple[str, str]:
"""Register client and start auth; returns (txn_id, consent_url)."""
await proxy.register_client(
OAuthClientInformationFull(
client_id=client_id,
client_secret="s",
redirect_uris=[AnyUrl(redirect)],
)
)
params = AuthorizationParams(
redirect_uri=AnyUrl(redirect),
redirect_uri_provided_explicitly=True,
state="client-state-xyz",
code_challenge="challenge",
scopes=["read"],
)
consent_url = await proxy.authorize(
OAuthClientInformationFull(
client_id=client_id,
client_secret="s",
redirect_uris=[AnyUrl(redirect)],
),
params,
)
qs = parse_qs(urlparse(consent_url).query)
return qs["txn_id"][0], consent_url
def _extract_csrf(html: str) -> str | None:
"""Extract CSRF token from HTML form."""
m = re.search(r"name=\"csrf_token\"\s+value=\"([^\"]+)\"", html)
return m.group(1) if m else None
class TestServerSideStorage:
"""Tests verifying OAuth state is stored in AsyncKeyValue storage."""
async def test_transaction_stored_in_storage_not_memory(
self, oauth_proxy_with_storage, storage
):
"""Verify OAuth transactions are stored in AsyncKeyValue, not in-memory dict."""
# Register client
client = OAuthClientInformationFull(
client_id="test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:54321/callback")],
)
await oauth_proxy_with_storage.register_client(client)
# Start authorization flow
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:54321/callback"),
redirect_uri_provided_explicitly=True,
state="client-state-123",
code_challenge="challenge-abc",
scopes=["read", "write"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
# Extract transaction ID from consent redirect
parsed = urlparse(redirect_url)
assert "/consent" in parsed.path, "Should redirect to consent page"
query_params = parse_qs(parsed.query)
txn_id = query_params["txn_id"][0]
# Verify transaction is NOT in the old in-memory dict
# (the attribute should not exist or should be empty)
assert (
not hasattr(oauth_proxy_with_storage, "_oauth_transactions")
or len(getattr(oauth_proxy_with_storage, "_oauth_transactions", {})) == 0
)
# Verify transaction IS in storage backend
transaction = await storage.get(collection="mcp-oauth-transactions", key=txn_id)
assert transaction is not None, "Transaction should be in storage"
# Verify transaction has expected structure
assert transaction["client_id"] == "test-client"
assert transaction["client_redirect_uri"] == "http://localhost:54321/callback"
assert transaction["client_state"] == "client-state-123"
assert transaction["code_challenge"] == "challenge-abc"
assert transaction["scopes"] == ["read", "write"]
async def test_authorization_code_stored_in_storage(
self, oauth_proxy_with_storage, storage
):
"""Verify authorization codes are stored in AsyncKeyValue storage."""
# Register client
client = OAuthClientInformationFull(
client_id="test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:54321/callback")],
)
await oauth_proxy_with_storage.register_client(client)
# Create a test app with OAuth routes
app = Starlette(routes=oauth_proxy_with_storage.get_routes())
with TestClient(app) as test_client:
# Start authorization flow
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:54321/callback"),
redirect_uri_provided_explicitly=True,
state="client-state",
code_challenge="challenge-xyz",
scopes=["read"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
# Extract txn_id from consent redirect
parsed = urlparse(redirect_url)
query_params = parse_qs(parsed.query)
txn_id = query_params["txn_id"][0]
# Simulate consent approval
# First, get the consent page to establish CSRF cookie
consent_response = test_client.get(
f"/consent?txn_id={txn_id}", follow_redirects=False
)
# Extract CSRF token from response (it's in the HTML form)
csrf_token = None
if consent_response.status_code == 200:
# For this test, we'll generate a CSRF token manually
# In production, this comes from the consent page HTML
csrf_token = secrets.token_urlsafe(32)
# Approve consent with CSRF token
# Set cookies on client instance to avoid deprecation warning
for k, v in consent_response.cookies.items():
test_client.cookies.set(k, v)
approval_response = test_client.post(
"/consent",
data={
"action": "approve",
"txn_id": txn_id,
"csrf_token": csrf_token if csrf_token else "",
},
follow_redirects=False,
)
# After approval, authorization code should be in storage
# The code is returned in the redirect URL
if approval_response.status_code in (302, 303):
location = approval_response.headers.get("location", "")
callback_params = parse_qs(urlparse(location).query)
if "code" in callback_params:
auth_code = callback_params["code"][0]
# Verify code is NOT in old in-memory dict
assert (
not hasattr(oauth_proxy_with_storage, "_client_codes")
or len(getattr(oauth_proxy_with_storage, "_client_codes", {}))
== 0
)
# Verify code IS in storage
code_data = await storage.get(
collection="mcp-authorization-codes", key=auth_code
)
assert code_data is not None, (
"Authorization code should be in storage"
)
assert code_data["client_id"] == "test-client"
assert code_data["scopes"] == ["read"]
async def test_storage_collections_are_isolated(self, oauth_proxy_with_storage):
"""Verify that transactions, codes, and clients use separate collections."""
# Register a client
client = OAuthClientInformationFull(
client_id="isolation-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:12345/callback")],
)
await oauth_proxy_with_storage.register_client(client)
# Start authorization to create transaction
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:12345/callback"),
redirect_uri_provided_explicitly=True,
state="test-state",
code_challenge="test-challenge",
scopes=["read"],
)
await oauth_proxy_with_storage.authorize(client, params)
# Get all collections from storage
storage = oauth_proxy_with_storage._client_storage
# Verify client is in client collection
client_data = await storage.get(
collection="mcp-oauth-proxy-clients", key="isolation-test-client"
)
assert client_data is not None
# Verify we can list transactions separately
# (This tests that collections are properly namespaced)
transactions = await storage.keys(collection="mcp-oauth-transactions")
assert len(transactions) > 0, "Should have at least one transaction"
# Verify transaction keys don't collide with client keys
for txn_key in transactions:
assert txn_key != "isolation-test-client"
class TestConsentFlowRedirects:
"""Tests for consent flow redirect behavior."""
async def test_authorize_redirects_to_consent_page(self, oauth_proxy_with_storage):
"""Verify authorize() redirects to /consent instead of upstream."""
client = OAuthClientInformationFull(
client_id="consent-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:8080/callback")],
)
await oauth_proxy_with_storage.register_client(client)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:8080/callback"),
redirect_uri_provided_explicitly=True,
state="test-state",
code_challenge="",
scopes=["read"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
# Should redirect to consent page, not upstream
assert "/consent" in redirect_url
assert "github.com" not in redirect_url
assert "?txn_id=" in redirect_url
async def test_consent_page_contains_transaction_id(self, oauth_proxy_with_storage):
"""Verify consent page receives and displays transaction ID."""
client = OAuthClientInformationFull(
client_id="txn-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:9090/callback")],
)
await oauth_proxy_with_storage.register_client(client)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:9090/callback"),
redirect_uri_provided_explicitly=True,
state="test-state",
code_challenge="test-challenge",
scopes=["read", "write"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
# Extract txn_id parameter
parsed = urlparse(redirect_url)
query = parse_qs(parsed.query)
assert "txn_id" in query
txn_id = query["txn_id"][0]
assert len(txn_id) > 0
# Create test client
app = Starlette(routes=oauth_proxy_with_storage.get_routes())
with TestClient(app) as test_client:
# Request consent page
response = test_client.get(
f"/consent?txn_id={txn_id}", follow_redirects=False
)
assert response.status_code == 200
# Consent page should contain transaction reference
assert txn_id.encode() in response.content or b"consent" in response.content
class TestCSRFProtection:
"""Tests for CSRF protection in consent flow."""
async def test_consent_requires_csrf_token(self, oauth_proxy_with_storage):
"""Verify consent submission requires valid CSRF token."""
client = OAuthClientInformationFull(
client_id="csrf-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:7070/callback")],
)
await oauth_proxy_with_storage.register_client(client)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:7070/callback"),
redirect_uri_provided_explicitly=True,
state="test-state",
code_challenge="",
scopes=["read"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
parsed = urlparse(redirect_url)
query = parse_qs(parsed.query)
txn_id = query["txn_id"][0]
app = Starlette(routes=oauth_proxy_with_storage.get_routes())
with TestClient(app) as test_client:
# Try to submit consent WITHOUT CSRF token
response = test_client.post(
"/consent",
data={"action": "approve", "txn_id": txn_id},
# No CSRF token!
follow_redirects=False,
)
# Should reject or require CSRF
# (Implementation may vary - checking for error response)
assert response.status_code in (
400,
403,
302,
) # Error or redirect to error
async def test_consent_cookie_established_on_page_visit(
self, oauth_proxy_with_storage
):
"""Verify consent page establishes CSRF cookie."""
client = OAuthClientInformationFull(
client_id="cookie-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:6060/callback")],
)
await oauth_proxy_with_storage.register_client(client)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:6060/callback"),
redirect_uri_provided_explicitly=True,
state="test-state",
code_challenge="",
scopes=["read"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
parsed = urlparse(redirect_url)
query = parse_qs(parsed.query)
txn_id = query["txn_id"][0]
app = Starlette(routes=oauth_proxy_with_storage.get_routes())
with TestClient(app) as test_client:
# Visit consent page
response = test_client.get(
f"/consent?txn_id={txn_id}", follow_redirects=False
)
# Should set cookies for CSRF protection
assert response.status_code == 200
# Cookie may be set via Set-Cookie header
cookies = response.cookies
# Look for any CSRF-related cookie (implementation dependent)
assert len(cookies) > 0 or "csrf" in response.text.lower(), (
"Consent page should establish CSRF protection"
)
class TestStoragePersistence:
"""Tests for state persistence across storage backends."""
async def test_transaction_persists_after_retrieval(self, oauth_proxy_with_storage):
"""Verify transaction can be retrieved multiple times (until deleted)."""
client = OAuthClientInformationFull(
client_id="persist-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:5050/callback")],
)
await oauth_proxy_with_storage.register_client(client)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:5050/callback"),
redirect_uri_provided_explicitly=True,
state="persist-state",
code_challenge="persist-challenge",
scopes=["read"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
parsed = urlparse(redirect_url)
query = parse_qs(parsed.query)
txn_id = query["txn_id"][0]
storage = oauth_proxy_with_storage._client_storage
# Retrieve transaction multiple times
txn1 = await storage.get(collection="mcp-oauth-transactions", key=txn_id)
assert txn1 is not None
txn2 = await storage.get(collection="mcp-oauth-transactions", key=txn_id)
assert txn2 is not None
# Should be the same data
assert txn1["client_id"] == txn2["client_id"]
assert txn1["client_state"] == txn2["client_state"]
async def test_storage_uses_pydantic_adapter(self, oauth_proxy_with_storage):
"""Verify that PydanticAdapter serializes/deserializes correctly."""
client = OAuthClientInformationFull(
client_id="pydantic-test-client",
client_secret="test-secret",
redirect_uris=[AnyUrl("http://localhost:4040/callback")],
)
await oauth_proxy_with_storage.register_client(client)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:4040/callback"),
redirect_uri_provided_explicitly=True,
state="pydantic-state",
code_challenge="pydantic-challenge",
scopes=["read", "write"],
)
redirect_url = await oauth_proxy_with_storage.authorize(client, params)
parsed = urlparse(redirect_url)
query = parse_qs(parsed.query)
txn_id = query["txn_id"][0]
# Retrieve using PydanticAdapter (which is what the proxy uses)
transaction_store = oauth_proxy_with_storage._transaction_store
txn_model = await transaction_store.get(key=txn_id)
# Should be a Pydantic model instance
assert isinstance(txn_model, OAuthTransaction)
assert txn_model.client_id == "pydantic-test-client"
assert txn_model.client_state == "pydantic-state"
assert txn_model.code_challenge == "pydantic-challenge"
assert txn_model.scopes == ["read", "write"]
class TestConsentSecurity:
"""Tests for consent page security features."""
async def test_consent_sets_xfo_header(self, oauth_proxy_https):
"""Verify consent page sets X-Frame-Options header to prevent clickjacking."""
txn_id, _ = await _start_flow(
oauth_proxy_https, "client-a", "http://localhost:5001/callback"
)
app = Starlette(routes=oauth_proxy_https.get_routes())
with TestClient(app) as c:
r = c.get(f"/consent?txn_id={txn_id}")
assert r.status_code == 200
assert r.headers.get("X-Frame-Options") == "DENY"
async def test_deny_sets_cookie_and_redirects_with_error(self, oauth_proxy_https):
"""Verify denying consent sets signed cookie and redirects with error."""
client_redirect = "http://localhost:5002/callback"
txn_id, _ = await _start_flow(oauth_proxy_https, "client-b", client_redirect)
app = Starlette(routes=oauth_proxy_https.get_routes())
with TestClient(app) as c:
consent = c.get(f"/consent?txn_id={txn_id}")
csrf = _extract_csrf(consent.text)
assert csrf
# Persist consent page cookies on client instance to avoid per-request deprecation
for k, v in consent.cookies.items():
c.cookies.set(k, v)
r = c.post(
"/consent",
data={"action": "deny", "txn_id": txn_id, "csrf_token": csrf},
follow_redirects=False,
)
assert r.status_code in (302, 303)
loc = r.headers.get("location", "")
parsed = urlparse(loc)
assert parsed.scheme == "http" and parsed.netloc.startswith("localhost")
q = parse_qs(parsed.query)
assert q.get("error") == ["access_denied"]
assert q.get("state") == ["client-state-xyz"]
# Signed denied cookie should be set
assert "MCP_DENIED_CLIENTS" in ";\n".join(
r.headers.get("set-cookie", "").splitlines()
)
async def test_approve_sets_cookie_and_redirects_to_upstream(
self, oauth_proxy_https
):
"""Verify approving consent sets signed cookie and redirects to upstream."""
txn_id, _ = await _start_flow(
oauth_proxy_https, "client-c", "http://localhost:5003/callback"
)
app = Starlette(routes=oauth_proxy_https.get_routes())
with TestClient(app) as c:
consent = c.get(f"/consent?txn_id={txn_id}")
csrf = _extract_csrf(consent.text)
assert csrf
for k, v in consent.cookies.items():
c.cookies.set(k, v)
r = c.post(
"/consent",
data={"action": "approve", "txn_id": txn_id, "csrf_token": csrf},
follow_redirects=False,
)
assert r.status_code in (302, 303)
loc = r.headers.get("location", "")
assert loc.startswith("https://github.com/login/oauth/authorize")
assert f"state={txn_id}" in loc
# Signed approved cookie should be set with __Host- prefix for HTTPS
set_cookie = ";\n".join(r.headers.get("set-cookie", "").splitlines())
assert "__Host-MCP_APPROVED_CLIENTS" in set_cookie
async def test_tampered_cookie_is_ignored(self, oauth_proxy_https):
"""Verify tampered approval cookie is ignored and consent page shown."""
txn_id, _ = await _start_flow(
oauth_proxy_https, "client-d", "http://localhost:5004/callback"
)
app = Starlette(routes=oauth_proxy_https.get_routes())
with TestClient(app) as c:
# Create a tampered cookie (invalid signature)
# Value format: payload.signature; using wrong signature to force failure
tampered_value = "W10=.invalidsig"
c.cookies.set("__Host-MCP_APPROVED_CLIENTS", tampered_value)
r = c.get(f"/consent?txn_id={txn_id}", follow_redirects=False)
# Should not auto-redirect to upstream; should show consent page
assert r.status_code == 200
# httpx returns a URL object; compare path or stringify
assert urlparse(str(r.request.url)).path == "/consent"
async def test_autoapprove_cookie_skips_consent(self, oauth_proxy_https):
"""Verify valid approval cookie auto-approves and redirects to upstream."""
client_id = "client-e"
redirect = "http://localhost:5005/callback"
txn_id, _ = await _start_flow(oauth_proxy_https, client_id, redirect)
app = Starlette(routes=oauth_proxy_https.get_routes())
with TestClient(app) as c:
# Approve once to set approved cookie
consent = c.get(f"/consent?txn_id={txn_id}")
csrf = _extract_csrf(consent.text)
for k, v in consent.cookies.items():
c.cookies.set(k, v)
r = c.post(
"/consent",
data={
"action": "approve",
"txn_id": txn_id,
"csrf_token": csrf if csrf else "",
},
follow_redirects=False,
)
# Extract approved cookie value
set_cookie = ";\n".join(r.headers.get("set-cookie", "").splitlines())
m = re.search(r"__Host-MCP_APPROVED_CLIENTS=([^;]+)", set_cookie)
assert m, "approved cookie should be set"
approved_cookie = m.group(1)
# Start a new flow for the same client and redirect
new_txn, _ = await _start_flow(oauth_proxy_https, client_id, redirect)
# Should auto-redirect to upstream when visiting consent due to cookie
c.cookies.set("__Host-MCP_APPROVED_CLIENTS", approved_cookie)
r2 = c.get(f"/consent?txn_id={new_txn}", follow_redirects=False)
assert r2.status_code in (302, 303)
assert r2.headers.get("location", "").startswith(
"https://github.com/login/oauth/authorize"
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_oauth_consent_flow.py",
"license": "Apache License 2.0",
"lines": 557,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/supabase.py | """Supabase authentication provider for FastMCP.
This module provides SupabaseProvider - a complete authentication solution that integrates
with Supabase Auth's JWT verification, supporting Dynamic Client Registration (DCR)
for seamless MCP client authentication.
"""
from __future__ import annotations
from typing import Literal
import httpx
from pydantic import AnyHttpUrl
from starlette.responses import JSONResponse
from starlette.routing import Route
from fastmcp.server.auth import RemoteAuthProvider, TokenVerifier
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class SupabaseProvider(RemoteAuthProvider):
"""Supabase metadata provider for DCR (Dynamic Client Registration).
This provider implements Supabase Auth integration using metadata forwarding.
This approach allows Supabase to handle the OAuth flow directly while FastMCP acts
as a resource server, verifying JWTs issued by Supabase Auth.
IMPORTANT SETUP REQUIREMENTS:
1. Supabase Project Setup:
- Create a Supabase project at https://supabase.com
- Note your project URL (e.g., "https://abc123.supabase.co")
- Configure your JWT algorithm in Supabase Auth settings (RS256 or ES256)
- Asymmetric keys (RS256/ES256) are recommended for production
2. JWT Verification:
- FastMCP verifies JWTs using the JWKS endpoint at {project_url}{auth_route}/.well-known/jwks.json
- JWTs are issued by {project_url}{auth_route}
- Default auth_route is "/auth/v1" (can be customized for self-hosted setups)
- Tokens are cached for up to 10 minutes by Supabase's edge servers
- Algorithm must match your Supabase Auth configuration
3. Authorization:
- Supabase uses Row Level Security (RLS) policies for database authorization
- OAuth-level scopes are an upcoming feature in Supabase Auth
- Both approaches will be supported once scope handling is available
For detailed setup instructions, see:
https://supabase.com/docs/guides/auth/jwts
Example:
```python
from fastmcp.server.auth.providers.supabase import SupabaseProvider
# Create Supabase metadata provider (JWT verifier created automatically)
supabase_auth = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://your-fastmcp-server.com",
algorithm="ES256", # Match your Supabase Auth configuration
)
# Use with FastMCP
mcp = FastMCP("My App", auth=supabase_auth)
```
"""
def __init__(
self,
*,
project_url: AnyHttpUrl | str,
base_url: AnyHttpUrl | str,
auth_route: str = "/auth/v1",
algorithm: Literal["RS256", "ES256"] = "ES256",
required_scopes: list[str] | None = None,
scopes_supported: list[str] | None = None,
resource_name: str | None = None,
resource_documentation: AnyHttpUrl | None = None,
token_verifier: TokenVerifier | None = None,
):
"""Initialize Supabase metadata provider.
Args:
project_url: Your Supabase project URL (e.g., "https://abc123.supabase.co")
base_url: Public URL of this FastMCP server
auth_route: Supabase Auth route. Defaults to "/auth/v1". Can be customized
for self-hosted Supabase Auth setups using custom routes.
algorithm: JWT signing algorithm (RS256 or ES256). Must match your
Supabase Auth configuration. Defaults to ES256.
required_scopes: Optional list of scopes to require for all requests.
Note: Supabase currently uses RLS policies for authorization. OAuth-level
scopes are an upcoming feature.
scopes_supported: Optional list of scopes to advertise in OAuth metadata.
If None, uses required_scopes. Use this when the scopes clients should
request differ from the scopes enforced on tokens.
resource_name: Optional name for the protected resource metadata.
resource_documentation: Optional documentation URL for the protected resource.
token_verifier: Optional token verifier. If None, creates JWT verifier for Supabase
"""
self.project_url = str(project_url).rstrip("/")
self.base_url = AnyHttpUrl(str(base_url).rstrip("/"))
self.auth_route = auth_route.strip("/")
# Parse scopes if provided as string
parsed_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else None
)
# Create default JWT verifier if none provided
if token_verifier is None:
logger.warning(
"SupabaseProvider cannot validate token audience for the specific resource "
"because Supabase Auth does not support RFC 8707 resource indicators. "
"This may leave the server vulnerable to cross-server token replay."
)
token_verifier = JWTVerifier(
jwks_uri=f"{self.project_url}/{self.auth_route}/.well-known/jwks.json",
issuer=f"{self.project_url}/{self.auth_route}",
algorithm=algorithm,
audience="authenticated",
required_scopes=parsed_scopes,
)
# Initialize RemoteAuthProvider with Supabase as the authorization server
super().__init__(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl(f"{self.project_url}/{self.auth_route}")],
base_url=self.base_url,
scopes_supported=scopes_supported,
resource_name=resource_name,
resource_documentation=resource_documentation,
)
def get_routes(
self,
mcp_path: str | None = None,
) -> list[Route]:
"""Get OAuth routes including Supabase authorization server metadata forwarding.
This returns the standard protected resource routes plus an authorization server
metadata endpoint that forwards Supabase's OAuth metadata to clients.
Args:
mcp_path: The path where the MCP endpoint is mounted (e.g., "/mcp")
This is used to advertise the resource URL in metadata.
"""
# Get the standard protected resource routes from RemoteAuthProvider
routes = super().get_routes(mcp_path)
async def oauth_authorization_server_metadata(request):
"""Forward Supabase OAuth authorization server metadata with FastMCP customizations."""
try:
async with httpx.AsyncClient() as client:
response = await client.get(
f"{self.project_url}/{self.auth_route}/.well-known/oauth-authorization-server"
)
response.raise_for_status()
metadata = response.json()
return JSONResponse(metadata)
except Exception as e:
return JSONResponse(
{
"error": "server_error",
"error_description": f"Failed to fetch Supabase metadata: {e}",
},
status_code=500,
)
# Add Supabase authorization server metadata forwarding
routes.append(
Route(
"/.well-known/oauth-authorization-server",
endpoint=oauth_authorization_server_metadata,
methods=["GET"],
)
)
return routes
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/supabase.py",
"license": "Apache License 2.0",
"lines": 153,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/server/auth/providers/test_supabase.py | """Tests for Supabase Auth provider."""
from collections.abc import Generator
import httpx
import pytest
from fastmcp import Client, FastMCP
from fastmcp.client.transports import StreamableHttpTransport
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.server.auth.providers.supabase import SupabaseProvider
from fastmcp.utilities.tests import HeadlessOAuth, run_server_in_process
class TestSupabaseProvider:
"""Test Supabase Auth provider functionality."""
def test_init_with_explicit_params(self):
"""Test SupabaseProvider initialization with explicit parameters."""
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
)
assert provider.project_url == "https://abc123.supabase.co"
assert str(provider.base_url) == "https://myserver.com/"
def test_environment_variable_loading(self):
"""Test that environment variables are loaded correctly."""
provider = SupabaseProvider(
project_url="https://env123.supabase.co",
base_url="http://env-server.com",
)
assert provider.project_url == "https://env123.supabase.co"
assert str(provider.base_url) == "http://env-server.com/"
def test_project_url_normalization(self):
"""Test that project_url handles trailing slashes correctly."""
# Without trailing slash
provider1 = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
)
assert provider1.project_url == "https://abc123.supabase.co"
# With trailing slash - should be stripped
provider2 = SupabaseProvider(
project_url="https://abc123.supabase.co/",
base_url="https://myserver.com",
)
assert provider2.project_url == "https://abc123.supabase.co"
def test_jwt_verifier_configured_correctly(self):
"""Test that JWT verifier is configured correctly."""
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
)
# Check that JWT verifier uses the correct endpoints (default auth_route)
assert isinstance(provider.token_verifier, JWTVerifier)
assert (
provider.token_verifier.jwks_uri
== "https://abc123.supabase.co/auth/v1/.well-known/jwks.json"
)
assert provider.token_verifier.issuer == "https://abc123.supabase.co/auth/v1"
assert provider.token_verifier.algorithm == "ES256"
def test_jwt_verifier_with_required_scopes(self):
"""Test that JWT verifier respects required_scopes."""
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
required_scopes=["openid", "email"],
)
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.required_scopes == ["openid", "email"]
def test_authorization_servers_configured(self):
"""Test that authorization servers list is configured correctly."""
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
)
assert len(provider.authorization_servers) == 1
assert (
str(provider.authorization_servers[0])
== "https://abc123.supabase.co/auth/v1"
)
@pytest.mark.parametrize(
"algorithm",
["RS256", "ES256"],
)
def test_algorithm_configuration(self, algorithm):
"""Test that algorithm can be configured for different JWT signing methods."""
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
algorithm=algorithm,
)
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.algorithm == algorithm
def test_algorithm_rejects_hs256(self):
"""Test that HS256 is rejected for Supabase's JWKS-based verifier."""
with pytest.raises(ValueError, match="cannot be used with jwks_uri"):
SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
algorithm="HS256", # type: ignore[arg-type]
)
def test_algorithm_default_es256(self):
"""Test that algorithm defaults to ES256 when not specified."""
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
)
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.algorithm == "ES256"
def test_algorithm_from_parameter(self):
"""Test that algorithm can be configured via parameter."""
provider = SupabaseProvider(
project_url="https://env123.supabase.co",
base_url="https://envserver.com",
algorithm="RS256",
)
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.algorithm == "RS256"
def test_custom_auth_route(self):
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
auth_route="/custom/auth/route",
)
assert provider.auth_route == "custom/auth/route"
assert isinstance(provider.token_verifier, JWTVerifier)
assert (
provider.token_verifier.jwks_uri
== "https://abc123.supabase.co/custom/auth/route/.well-known/jwks.json"
)
def test_custom_auth_route_trailing_slash(self):
provider = SupabaseProvider(
project_url="https://abc123.supabase.co",
base_url="https://myserver.com",
auth_route="/custom/auth/route/",
)
assert provider.auth_route == "custom/auth/route"
def run_mcp_server(host: str, port: int) -> None:
mcp = FastMCP(
auth=SupabaseProvider(
project_url="https://test123.supabase.co",
base_url="http://localhost:4321",
)
)
@mcp.tool
def add(a: int, b: int) -> int:
return a + b
mcp.run(host=host, port=port, transport="http")
@pytest.fixture
def mcp_server_url() -> Generator[str]:
with run_server_in_process(run_mcp_server) as url:
yield f"{url}/mcp"
@pytest.fixture()
def client_with_headless_oauth(
mcp_server_url: str,
) -> Generator[Client, None, None]:
"""Client with headless OAuth that bypasses browser interaction."""
client = Client(
transport=StreamableHttpTransport(mcp_server_url),
auth=HeadlessOAuth(mcp_url=mcp_server_url),
)
yield client
class TestSupabaseProviderIntegration:
async def test_unauthorized_access(self, mcp_server_url: str):
with pytest.raises(httpx.HTTPStatusError) as exc_info:
async with Client(mcp_server_url) as client:
tools = await client.list_tools() # noqa: F841
assert isinstance(exc_info.value, httpx.HTTPStatusError)
assert exc_info.value.response.status_code == 401
assert "tools" not in locals()
# async def test_authorized_access(self, client_with_headless_oauth: Client):
# async with client_with_headless_oauth:
# tools = await client_with_headless_oauth.list_tools()
# assert tools is not None
# assert len(tools) > 0
# assert "add" in tools
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_supabase.py",
"license": "Apache License 2.0",
"lines": 171,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/middleware/test_initialization_middleware.py | """Tests for middleware support during initialization."""
from collections.abc import Sequence
from typing import Any
import mcp.types as mt
import pytest
from mcp import McpError
from mcp.types import ErrorData, TextContent
from fastmcp import Client, FastMCP
from fastmcp.server.middleware import CallNext, Middleware, MiddlewareContext
from fastmcp.tools.tool import Tool
class InitializationMiddleware(Middleware):
"""Middleware that captures initialization details.
Note: Session state is NOT available during on_initialize because
the MCP session has not been established yet. Use instance variables
to store data that needs to persist across the session.
"""
def __init__(self):
super().__init__()
self.initialized = False
self.client_info = None
self.session_data = {}
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
"""Capture initialization details."""
self.initialized = True
# Extract client info from the initialize params
if hasattr(context.message, "params") and hasattr(
context.message.params, "clientInfo"
):
self.client_info = context.message.params.clientInfo
# Store in instance for cross-request access
# (session state is not available during on_initialize)
self.session_data["client_initialized"] = True
if self.client_info:
self.session_data["client_name"] = getattr(
self.client_info, "name", "unknown"
)
return await call_next(context)
class ClientDetectionMiddleware(Middleware):
"""Middleware that detects specific clients and modifies behavior.
This demonstrates storing data in the middleware instance itself
for cross-request access, since context state is request-scoped.
"""
def __init__(self):
super().__init__()
self.is_test_client = False
self.tools_modified = False
self.initialization_called = False
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
"""Detect test client during initialization."""
self.initialization_called = True
# For testing purposes, always set it to true
# Store in instance variable for cross-request access
self.is_test_client = True
return await call_next(context)
async def on_list_tools(
self,
context: MiddlewareContext[mt.ListToolsRequest],
call_next: CallNext[mt.ListToolsRequest, Sequence[Tool]],
) -> Sequence[Tool]:
"""Modify tools based on client detection."""
tools = await call_next(context)
# Use the instance variable set during initialization
if self.is_test_client:
# Add a special annotation to tools for test clients
for tool in tools:
if not hasattr(tool, "annotations"):
tool.annotations = mt.ToolAnnotations()
if tool.annotations is None:
tool.annotations = mt.ToolAnnotations()
# Mark as read-only for test clients
tool.annotations.readOnlyHint = True
self.tools_modified = True
return tools
async def test_simple_initialization_hook():
"""Test that the on_initialize hook is called."""
server = FastMCP("TestServer")
class SimpleInitMiddleware(Middleware):
def __init__(self):
super().__init__()
self.called = False
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
self.called = True
return await call_next(context)
middleware = SimpleInitMiddleware()
server.add_middleware(middleware)
# Connect client
async with Client(server):
# Middleware should have been called
assert middleware.called is True, "on_initialize was not called"
async def test_middleware_receives_initialization():
"""Test that middleware can intercept initialization requests."""
server = FastMCP("TestServer")
middleware = InitializationMiddleware()
server.add_middleware(middleware)
@server.tool
def test_tool(x: int) -> str:
return f"Result: {x}"
# Connect client
async with Client(server) as client:
# Middleware should have been called during initialization
assert middleware.initialized is True
# Test that the tool still works
result = await client.call_tool("test_tool", {"x": 42})
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "Result: 42"
async def test_client_detection_middleware():
"""Test middleware that detects specific clients and modifies behavior."""
server = FastMCP("TestServer")
middleware = ClientDetectionMiddleware()
server.add_middleware(middleware)
@server.tool
def example_tool() -> str:
return "example"
# Connect with a client
async with Client(server) as client:
# Middleware should have been called during initialization
assert middleware.initialization_called is True
assert middleware.is_test_client is True
# List tools to trigger modification
tools = await client.list_tools()
assert len(tools) == 1
assert middleware.tools_modified is True
# Check that the tool has the modified annotation
tool = tools[0]
assert tool.annotations is not None
assert tool.annotations.readOnlyHint is True
async def test_multiple_middleware_initialization():
"""Test that multiple middleware can handle initialization."""
server = FastMCP("TestServer")
init_mw = InitializationMiddleware()
detect_mw = ClientDetectionMiddleware()
server.add_middleware(init_mw)
server.add_middleware(detect_mw)
@server.tool
def test_tool() -> str:
return "test"
async with Client(server) as client:
# Both middleware should have processed initialization
assert init_mw.initialized is True
assert detect_mw.initialization_called is True
assert detect_mw.is_test_client is True
# List tools to check detection worked
await client.list_tools()
assert detect_mw.tools_modified is True
async def test_session_state_persists_across_tool_calls():
"""Test that session-scoped state persists across multiple tool calls.
Session state is only available after the session is established,
so it can't be set during on_initialize. This test shows state set
during one tool call is accessible in subsequent tool calls.
"""
server = FastMCP("TestServer")
class StateTrackingMiddleware(Middleware):
def __init__(self):
super().__init__()
self.call_count = 0
self.state_values = []
async def on_call_tool(
self,
context: MiddlewareContext[mt.CallToolRequestParams],
call_next: CallNext[mt.CallToolRequestParams, Any],
) -> Any:
self.call_count += 1
if context.fastmcp_context:
# Read existing state
counter = await context.fastmcp_context.get_state("call_counter")
self.state_values.append(counter)
# Increment and save
new_counter = (counter or 0) + 1
await context.fastmcp_context.set_state("call_counter", new_counter)
return await call_next(context)
middleware = StateTrackingMiddleware()
server.add_middleware(middleware)
@server.tool
def test_tool() -> str:
return "success"
async with Client(server) as client:
# First call - state should be None initially
result = await client.call_tool("test_tool", {})
assert isinstance(result.content[0], TextContent)
assert result.content[0].text == "success"
# Second call - state should show previous value (1)
result = await client.call_tool("test_tool", {})
assert isinstance(result.content[0], TextContent)
# Third call - state should show previous value (2)
result = await client.call_tool("test_tool", {})
assert isinstance(result.content[0], TextContent)
# Verify state persisted across calls within the session
assert middleware.call_count == 3
# First call saw None, second saw 1, third saw 2
assert middleware.state_values == [None, 1, 2]
async def test_middleware_can_access_initialize_result():
"""Test that middleware can access the InitializeResult from call_next().
This verifies that the initialize response is returned through the middleware
chain, not just sent directly via the responder (fixes #2504).
"""
server = FastMCP("TestServer")
class ResponseCapturingMiddleware(Middleware):
def __init__(self):
super().__init__()
self.initialize_result: mt.InitializeResult | None = None
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
# Call next and capture the result
result = await call_next(context)
self.initialize_result = result
return result
middleware = ResponseCapturingMiddleware()
server.add_middleware(middleware)
async with Client(server):
# Middleware should have captured the InitializeResult
assert middleware.initialize_result is not None
assert isinstance(middleware.initialize_result, mt.InitializeResult)
# Verify the result contains expected server info
assert middleware.initialize_result.serverInfo.name == "TestServer"
assert middleware.initialize_result.protocolVersion is not None
assert middleware.initialize_result.capabilities is not None
async def test_middleware_mcp_error_during_initialization():
"""Test that McpError raised in middleware during initialization is sent to client."""
server = FastMCP("TestServer")
class ErrorThrowingMiddleware(Middleware):
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
raise McpError(
ErrorData(
code=mt.INVALID_PARAMS, message="Invalid initialization parameters"
)
)
server.add_middleware(ErrorThrowingMiddleware())
with pytest.raises(McpError) as exc_info:
async with Client(server):
pass
assert exc_info.value.error.message == "Invalid initialization parameters"
assert exc_info.value.error.code == mt.INVALID_PARAMS
async def test_middleware_mcp_error_before_call_next():
"""Test McpError raised before calling next middleware."""
server = FastMCP("TestServer")
class EarlyErrorMiddleware(Middleware):
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
raise McpError(
ErrorData(code=mt.INVALID_REQUEST, message="Request validation failed")
)
server.add_middleware(EarlyErrorMiddleware())
with pytest.raises(McpError) as exc_info:
async with Client(server):
pass
assert exc_info.value.error.message == "Request validation failed"
assert exc_info.value.error.code == mt.INVALID_REQUEST
async def test_middleware_mcp_error_after_call_next():
"""Test that McpError raised after call_next doesn't break the connection.
When an error is raised after call_next, the responder has already completed,
so the error is caught but not sent to the responder (checked via _completed flag).
"""
server = FastMCP("TestServer")
class PostProcessingErrorMiddleware(Middleware):
def __init__(self):
super().__init__()
self.error_raised = False
async def on_initialize(
self,
context: MiddlewareContext[mt.InitializeRequest],
call_next: CallNext[mt.InitializeRequest, mt.InitializeResult | None],
) -> mt.InitializeResult | None:
await call_next(context)
self.error_raised = True
raise McpError(
ErrorData(code=mt.INTERNAL_ERROR, message="Post-processing failed")
)
middleware = PostProcessingErrorMiddleware()
server.add_middleware(middleware)
# Error is logged but not re-raised to prevent duplicate response
async with Client(server):
pass
assert middleware.error_raised is True
async def test_state_isolation_between_streamable_http_clients():
"""Test that different HTTP clients have isolated session state.
Each client should have its own session ID and isolated state.
"""
from fastmcp.client.transports import StreamableHttpTransport
from fastmcp.server.context import Context
from fastmcp.utilities.tests import run_server_async
server = FastMCP("TestServer")
@server.tool
async def store_and_read(value: str, ctx: Context) -> dict:
"""Store a value and return session info."""
existing = await ctx.get_state("client_value")
await ctx.set_state("client_value", value)
return {
"existing": existing,
"stored": value,
"session_id": ctx.session_id,
}
async with run_server_async(server, transport="streamable-http") as url:
import json
# Client 1 stores its value
transport1 = StreamableHttpTransport(url=url)
async with Client(transport=transport1) as client1:
result1 = await client1.call_tool(
"store_and_read", {"value": "client1-value"}
)
data1 = json.loads(result1.content[0].text)
assert data1["existing"] is None
assert data1["stored"] == "client1-value"
session_id_1 = data1["session_id"]
# Client 2 should have completely isolated state
transport2 = StreamableHttpTransport(url=url)
async with Client(transport=transport2) as client2:
result2 = await client2.call_tool(
"store_and_read", {"value": "client2-value"}
)
data2 = json.loads(result2.content[0].text)
# Should NOT see client1's value
assert data2["existing"] is None
assert data2["stored"] == "client2-value"
session_id_2 = data2["session_id"]
# Session IDs should be different
assert session_id_1 != session_id_2
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/middleware/test_initialization_middleware.py",
"license": "Apache License 2.0",
"lines": 338,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:examples/auth/authkit_dcr/client.py | """OAuth client example for connecting to FastMCP servers.
This example demonstrates how to connect to an OAuth-protected FastMCP server.
To run:
python client.py
"""
import asyncio
from fastmcp.client import Client
from fastmcp.client.auth import OAuth
SERVER_URL = "http://127.0.0.1:8000/mcp"
async def main():
# AuthKit defaults DCR clients to client_secret_basic, which conflicts
# with how MCP SDKs send credentials. Force "none" to register as a
# public client and avoid token exchange errors.
auth = OAuth(additional_client_metadata={"token_endpoint_auth_method": "none"})
async with Client(SERVER_URL, auth=auth) as client:
assert await client.ping()
print("Successfully authenticated!")
tools = await client.list_tools()
print(f"Available tools ({len(tools)}):")
for tool in tools:
print(f" - {tool.name}: {tool.description}")
if __name__ == "__main__":
asyncio.run(main())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/authkit_dcr/client.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/authkit_dcr/server.py | """AuthKit DCR server example for FastMCP.
This example demonstrates how to protect a FastMCP server with AuthKit DCR.
Required environment variables:
- FASTMCP_SERVER_AUTH_AUTHKITPROVIDER_AUTHKIT_DOMAIN: Your AuthKit domain (e.g., "https://your-app.authkit.app")
To run:
python server.py
"""
import os
from fastmcp import FastMCP
from fastmcp.server.auth.providers.workos import AuthKitProvider
auth = AuthKitProvider(
authkit_domain=os.getenv("AUTHKIT_DOMAIN") or "",
base_url="http://localhost:8000",
)
mcp = FastMCP("AuthKit DCR Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/authkit_dcr/server.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/scalekit_oauth/client.py | """OAuth client example for connecting to Scalekit-protected FastMCP servers.
This example demonstrates how to connect to a Scalekit OAuth-protected FastMCP server.
To run:
python client.py
"""
import asyncio
from fastmcp.client import Client
SERVER_URL = "http://127.0.0.1:8000/mcp"
async def main():
try:
async with Client(SERVER_URL, auth="oauth") as client:
assert await client.ping()
print("✅ Successfully authenticated with Scalekit!")
tools = await client.list_tools()
print(f"🔧 Available tools ({len(tools)}):")
for tool in tools:
print(f" - {tool.name}: {tool.description}")
# Test calling a tool
result = await client.call_tool("echo", {"message": "Hello from Scalekit!"})
print(f"🎯 Echo result: {result}")
# Test calling auth status tool
auth_status = await client.call_tool("auth_status", {})
print(f"👤 Auth status: {auth_status}")
except Exception as e:
print(f"❌ Authentication failed: {e}")
raise
if __name__ == "__main__":
asyncio.run(main())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/scalekit_oauth/client.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/scalekit_oauth/server.py | """Scalekit OAuth server example for FastMCP.
This example demonstrates how to protect a FastMCP server with Scalekit OAuth.
Required environment variables:
- SCALEKIT_ENVIRONMENT_URL: Your Scalekit environment URL (e.g., "https://your-env.scalekit.com")
- SCALEKIT_RESOURCE_ID: Your Scalekit resource ID
Optional:
- SCALEKIT_REQUIRED_SCOPES: Comma-separated scopes tokens must include
- BASE_URL: Public URL where the FastMCP server is exposed (defaults to `http://localhost:8000/`)
To run:
python server.py
"""
import os
from fastmcp import FastMCP
from fastmcp.server.auth.providers.scalekit import ScalekitProvider
required_scopes_env = os.getenv("SCALEKIT_REQUIRED_SCOPES")
required_scopes = (
[scope.strip() for scope in required_scopes_env.split(",") if scope.strip()]
if required_scopes_env
else None
)
auth = ScalekitProvider(
environment_url=os.getenv("SCALEKIT_ENVIRONMENT_URL")
or "https://your-env.scalekit.com",
resource_id=os.getenv("SCALEKIT_RESOURCE_ID") or "",
base_url=os.getenv("BASE_URL", "http://localhost:8000/"),
required_scopes=required_scopes,
)
mcp = FastMCP("Scalekit OAuth Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
@mcp.tool
def auth_status() -> dict:
"""Show Scalekit authentication status."""
# In a real implementation, you would extract user info from the JWT token
return {
"message": "This tool requires authentication via Scalekit",
"authenticated": True,
"provider": "Scalekit",
}
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/scalekit_oauth/server.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/scalekit.py | """Scalekit authentication provider for FastMCP.
This module provides ScalekitProvider - a complete authentication solution that integrates
with Scalekit's OAuth 2.1 and OpenID Connect services, supporting Resource Server
authentication for seamless MCP client authentication.
"""
from __future__ import annotations
import httpx
from pydantic import AnyHttpUrl
from starlette.responses import JSONResponse
from starlette.routing import Route
from fastmcp.server.auth import RemoteAuthProvider, TokenVerifier
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class ScalekitProvider(RemoteAuthProvider):
"""Scalekit resource server provider for OAuth 2.1 authentication.
This provider implements Scalekit integration using resource server pattern.
FastMCP acts as a protected resource server that validates access tokens issued
by Scalekit's authorization server.
IMPORTANT SETUP REQUIREMENTS:
1. Create an MCP Server in Scalekit Dashboard:
- Go to your [Scalekit Dashboard](https://app.scalekit.com/)
- Navigate to MCP Servers section
- Register a new MCP Server with appropriate scopes
- Ensure the Resource Identifier matches exactly what you configure as MCP URL
- Note the Resource ID
2. Environment Configuration:
- Set SCALEKIT_ENVIRONMENT_URL (e.g., https://your-env.scalekit.com)
- Set SCALEKIT_RESOURCE_ID from your created resource
- Set BASE_URL to your FastMCP server's public URL
For detailed setup instructions, see:
https://docs.scalekit.com/mcp/overview/
Example:
```python
from fastmcp.server.auth.providers.scalekit import ScalekitProvider
# Create Scalekit resource server provider
scalekit_auth = ScalekitProvider(
environment_url="https://your-env.scalekit.com",
resource_id="sk_resource_...",
base_url="https://your-fastmcp-server.com",
)
# Use with FastMCP
mcp = FastMCP("My App", auth=scalekit_auth)
```
"""
def __init__(
self,
*,
environment_url: AnyHttpUrl | str,
resource_id: str,
base_url: AnyHttpUrl | str | None = None,
mcp_url: AnyHttpUrl | str | None = None,
client_id: str | None = None,
required_scopes: list[str] | None = None,
scopes_supported: list[str] | None = None,
resource_name: str | None = None,
resource_documentation: AnyHttpUrl | None = None,
token_verifier: TokenVerifier | None = None,
):
"""Initialize Scalekit resource server provider.
Args:
environment_url: Your Scalekit environment URL (e.g., "https://your-env.scalekit.com")
resource_id: Your Scalekit resource ID
base_url: Public URL of this FastMCP server (or use mcp_url for backwards compatibility)
mcp_url: Deprecated alias for base_url. Will be removed in a future release.
client_id: Deprecated parameter, no longer required. Will be removed in a future release.
required_scopes: Optional list of scopes that must be present in tokens
scopes_supported: Optional list of scopes to advertise in OAuth metadata.
If None, uses required_scopes. Use this when the scopes clients should
request differ from the scopes enforced on tokens.
resource_name: Optional name for the protected resource metadata.
resource_documentation: Optional documentation URL for the protected resource.
token_verifier: Optional token verifier. If None, creates JWT verifier for Scalekit
"""
# Resolve base_url from mcp_url if needed (backwards compatibility)
resolved_base_url = base_url or mcp_url
if not resolved_base_url:
raise ValueError("Either base_url or mcp_url must be provided")
if mcp_url is not None:
logger.warning(
"ScalekitProvider parameter 'mcp_url' is deprecated and will be removed in a future release. "
"Rename it to 'base_url'."
)
if client_id is not None:
logger.warning(
"ScalekitProvider no longer requires 'client_id'. The parameter is accepted only for backward "
"compatibility and will be removed in a future release."
)
self.environment_url = str(environment_url).rstrip("/")
self.resource_id = resource_id
parsed_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else []
)
self.required_scopes = parsed_scopes
base_url_value = str(resolved_base_url)
logger.debug(
"Initializing ScalekitProvider: environment_url=%s resource_id=%s base_url=%s required_scopes=%s",
self.environment_url,
self.resource_id,
base_url_value,
self.required_scopes,
)
# Create default JWT verifier if none provided
if token_verifier is None:
logger.debug(
"Creating default JWTVerifier for Scalekit: jwks_uri=%s issuer=%s required_scopes=%s",
f"{self.environment_url}/keys",
self.environment_url,
self.required_scopes,
)
token_verifier = JWTVerifier(
jwks_uri=f"{self.environment_url}/keys",
issuer=self.environment_url,
algorithm="RS256",
audience=self.resource_id,
required_scopes=self.required_scopes or None,
)
else:
logger.debug("Using custom token verifier for ScalekitProvider")
# Initialize RemoteAuthProvider with Scalekit as the authorization server
super().__init__(
token_verifier=token_verifier,
authorization_servers=[
AnyHttpUrl(f"{self.environment_url}/resources/{self.resource_id}")
],
base_url=base_url_value,
scopes_supported=scopes_supported,
resource_name=resource_name,
resource_documentation=resource_documentation,
)
def get_routes(
self,
mcp_path: str | None = None,
) -> list[Route]:
"""Get OAuth routes including Scalekit authorization server metadata forwarding.
This returns the standard protected resource routes plus an authorization server
metadata endpoint that forwards Scalekit's OAuth metadata to clients.
Args:
mcp_path: The path where the MCP endpoint is mounted (e.g., "/mcp")
This is used to advertise the resource URL in metadata.
"""
# Get the standard protected resource routes from RemoteAuthProvider
routes = super().get_routes(mcp_path)
logger.debug(
"Preparing Scalekit metadata routes: mcp_path=%s resource_id=%s",
mcp_path,
self.resource_id,
)
async def oauth_authorization_server_metadata(request):
"""Forward Scalekit OAuth authorization server metadata with FastMCP customizations."""
try:
metadata_url = f"{self.environment_url}/.well-known/oauth-authorization-server/resources/{self.resource_id}"
logger.debug(
"Fetching Scalekit OAuth metadata: metadata_url=%s", metadata_url
)
async with httpx.AsyncClient() as client:
response = await client.get(metadata_url)
response.raise_for_status()
metadata = response.json()
logger.debug(
"Scalekit metadata fetched successfully: metadata_keys=%s",
list(metadata.keys()),
)
return JSONResponse(metadata)
except Exception as e:
logger.error(f"Failed to fetch Scalekit metadata: {e}")
return JSONResponse(
{
"error": "server_error",
"error_description": f"Failed to fetch Scalekit metadata: {e}",
},
status_code=500,
)
# Add Scalekit authorization server metadata forwarding
routes.append(
Route(
"/.well-known/oauth-authorization-server",
endpoint=oauth_authorization_server_metadata,
methods=["GET"],
)
)
return routes
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/scalekit.py",
"license": "Apache License 2.0",
"lines": 183,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/auth/providers/test_scalekit.py | """Tests for Scalekit OAuth provider."""
import httpx
import pytest
from fastmcp import Client, FastMCP
from fastmcp.client.transports import StreamableHttpTransport
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.server.auth.providers.scalekit import ScalekitProvider
from fastmcp.utilities.tests import HeadlessOAuth, run_server_async
class TestScalekitProvider:
"""Test Scalekit OAuth provider functionality."""
def test_init_with_explicit_params(self):
"""Test ScalekitProvider initialization with explicit parameters."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com",
resource_id="sk_resource_456",
base_url="https://myserver.com/",
required_scopes=["read"],
)
assert provider.environment_url == "https://my-env.scalekit.com"
assert provider.resource_id == "sk_resource_456"
assert str(provider.base_url) == "https://myserver.com/"
assert provider.required_scopes == ["read"]
def test_init_with_mcp_url_only(self):
"""Allow legacy mcp_url parameter as base_url."""
provider = ScalekitProvider(
environment_url="https://legacy.scalekit.com",
resource_id="sk_resource_legacy",
mcp_url="https://legacy-app.com/",
)
assert str(provider.base_url) == "https://legacy-app.com/"
def test_init_prefers_base_url_over_mcp_url(self):
"""mcp_url should take precedence over base_url when both provided."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com",
resource_id="sk_resource_456",
base_url="https://preferred-base.com/",
mcp_url="https://unused-base.com/",
)
assert str(provider.base_url) == "https://preferred-base.com/"
def test_environment_variable_loading(self):
"""Test that environment variables are loaded correctly."""
provider = ScalekitProvider(
environment_url="https://test-env.scalekit.com",
resource_id="sk_resource_test_456",
base_url="http://test-server.com",
)
assert provider.environment_url == "https://test-env.scalekit.com"
assert provider.resource_id == "sk_resource_test_456"
assert str(provider.base_url) == "http://test-server.com/"
def test_accepts_client_id_argument(self):
"""client_id parameter should be accepted but ignored."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com",
resource_id="sk_resource_456",
base_url="https://myserver.com/",
client_id="client_123",
)
assert str(provider.base_url) == "https://myserver.com/"
def test_url_trailing_slash_handling(self):
"""Test that URLs handle trailing slashes correctly."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com/",
resource_id="sk_resource_456",
base_url="https://myserver.com/",
)
assert provider.environment_url == "https://my-env.scalekit.com"
assert str(provider.base_url) == "https://myserver.com/"
def test_jwt_verifier_configured_correctly(self):
"""Test that JWT verifier is configured correctly."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com",
resource_id="sk_resource_456",
base_url="https://myserver.com/",
)
# Check that JWT verifier uses the correct endpoints
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.jwks_uri == "https://my-env.scalekit.com/keys"
assert provider.token_verifier.issuer == "https://my-env.scalekit.com"
assert provider.token_verifier.audience == "sk_resource_456"
def test_required_scopes_hooks_into_verifier(self):
"""Token verifier should enforce required scopes when provided."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com",
resource_id="sk_resource_456",
base_url="https://myserver.com/",
required_scopes=["read"],
)
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.required_scopes == ["read"]
def test_authorization_servers_configuration(self):
"""Test that authorization servers are configured correctly."""
provider = ScalekitProvider(
environment_url="https://my-env.scalekit.com",
resource_id="sk_resource_456",
base_url="https://myserver.com/",
)
assert len(provider.authorization_servers) == 1
assert (
str(provider.authorization_servers[0])
== "https://my-env.scalekit.com/resources/sk_resource_456"
)
@pytest.fixture
async def mcp_server_url():
"""Start Scalekit server."""
mcp = FastMCP(
auth=ScalekitProvider(
environment_url="https://test-env.scalekit.com",
resource_id="sk_resource_test_456",
base_url="http://localhost:4321",
)
)
@mcp.tool
def add(a: int, b: int) -> int:
return a + b
async with run_server_async(mcp, transport="http") as url:
yield url
@pytest.fixture
def client_with_headless_oauth(mcp_server_url: str) -> Client:
"""Client with headless OAuth that bypasses browser interaction."""
return Client(
transport=StreamableHttpTransport(mcp_server_url),
auth=HeadlessOAuth(mcp_url=mcp_server_url),
)
class TestScalekitProviderIntegration:
async def test_unauthorized_access(self, mcp_server_url: str):
with pytest.raises(httpx.HTTPStatusError) as exc_info:
async with Client(mcp_server_url) as client:
tools = await client.list_tools() # noqa: F841
assert isinstance(exc_info.value, httpx.HTTPStatusError)
assert exc_info.value.response.status_code == 401
assert "tools" not in locals()
async def test_metadata_route_forwards_scalekit_response(
self,
monkeypatch: pytest.MonkeyPatch,
mcp_server_url: str,
) -> None:
"""Ensure Scalekit metadata route proxies upstream JSON."""
metadata_payload = {
"issuer": "https://test-env.scalekit.com",
"token_endpoint": "https://test-env.scalekit.com/token",
"authorization_endpoint": "https://test-env.scalekit.com/authorize",
}
class DummyResponse:
status_code = 200
def __init__(self, data: dict[str, str]):
self._data = data
def json(self):
return self._data
def raise_for_status(self):
return None
class DummyAsyncClient:
last_url: str | None = None
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
return False
async def get(self, url: str):
DummyAsyncClient.last_url = url
return DummyResponse(metadata_payload)
real_httpx_client = httpx.AsyncClient
monkeypatch.setattr(
"fastmcp.server.auth.providers.scalekit.httpx.AsyncClient",
DummyAsyncClient,
)
base_url = mcp_server_url.rsplit("/mcp", 1)[0]
async with real_httpx_client() as client:
response = await client.get(
f"{base_url}/.well-known/oauth-authorization-server"
)
assert response.status_code == 200
assert response.json() == metadata_payload
assert (
DummyAsyncClient.last_url
== "https://test-env.scalekit.com/.well-known/oauth-authorization-server/resources/sk_resource_test_456"
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_scalekit.py",
"license": "Apache License 2.0",
"lines": 175,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:examples/auth/aws_oauth/client.py | """OAuth client example for connecting to FastMCP servers.
This example demonstrates how to connect to an OAuth-protected FastMCP server.
To run:
python client.py
"""
import asyncio
from fastmcp.client import Client
SERVER_URL = "http://localhost:8000/mcp"
async def main():
try:
async with Client(SERVER_URL, auth="oauth") as client:
assert await client.ping()
print("✅ Successfully authenticated!")
tools = await client.list_tools()
print(f"🔧 Available tools ({len(tools)}):")
for tool in tools:
print(f" - {tool.name}: {tool.description}")
# Test the protected tool
print("🔒 Calling protected tool: get_access_token_claims")
result = await client.call_tool("get_access_token_claims")
user_data = result.data
print("📄 Available access token claims:")
print(f" - sub: {user_data.get('sub', 'N/A')}")
print(f" - username: {user_data.get('username', 'N/A')}")
print(f" - cognito:groups: {user_data.get('cognito:groups', [])}")
except Exception as e:
print(f"❌ Authentication failed: {e}")
raise
if __name__ == "__main__":
asyncio.run(main())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/aws_oauth/client.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/aws_oauth/server.py | """AWS Cognito OAuth server example for FastMCP.
This example demonstrates how to protect a FastMCP server with AWS Cognito.
Required environment variables:
- FASTMCP_SERVER_AUTH_AWS_COGNITO_USER_POOL_ID: Your AWS Cognito User Pool ID
- FASTMCP_SERVER_AUTH_AWS_COGNITO_AWS_REGION: Your AWS region (optional, defaults to eu-central-1)
- FASTMCP_SERVER_AUTH_AWS_COGNITO_CLIENT_ID: Your Cognito app client ID
- FASTMCP_SERVER_AUTH_AWS_COGNITO_CLIENT_SECRET: Your Cognito app client secret
To run:
python server.py
"""
import logging
import os
from dotenv import load_dotenv
from fastmcp import FastMCP
from fastmcp.server.auth.providers.aws import AWSCognitoProvider
from fastmcp.server.dependencies import get_access_token
logging.basicConfig(level=logging.DEBUG)
load_dotenv(".env", override=True)
auth = AWSCognitoProvider(
user_pool_id=os.getenv("FASTMCP_SERVER_AUTH_AWS_COGNITO_USER_POOL_ID") or "",
aws_region=os.getenv("FASTMCP_SERVER_AUTH_AWS_COGNITO_AWS_REGION")
or "eu-central-1",
client_id=os.getenv("FASTMCP_SERVER_AUTH_AWS_COGNITO_CLIENT_ID") or "",
client_secret=os.getenv("FASTMCP_SERVER_AUTH_AWS_COGNITO_CLIENT_SECRET") or "",
base_url="http://localhost:8000",
# redirect_path="/custom/callback"
)
mcp = FastMCP("AWS Cognito OAuth Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
@mcp.tool
async def get_access_token_claims() -> dict:
"""Get the authenticated user's access token claims."""
token = get_access_token()
return {
"sub": token.claims.get("sub"),
"username": token.claims.get("username"),
"cognito:groups": token.claims.get("cognito:groups", []),
}
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/aws_oauth/server.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/aws.py | """AWS Cognito OAuth provider for FastMCP.
This module provides a complete AWS Cognito OAuth integration that's ready to use
with a user pool ID, domain prefix, client ID and client secret. It handles all
the complexity of AWS Cognito's OAuth flow, token validation, and user management.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.aws_cognito import AWSCognitoProvider
# Simple AWS Cognito OAuth protection
auth = AWSCognitoProvider(
user_pool_id="your-user-pool-id",
aws_region="eu-central-1",
client_id="your-cognito-client-id",
client_secret="your-cognito-client-secret"
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
from __future__ import annotations
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl
from fastmcp.server.auth.auth import AccessToken
from fastmcp.server.auth.oidc_proxy import OIDCProxy
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class AWSCognitoTokenVerifier(JWTVerifier):
"""Token verifier that filters claims to Cognito-specific subset."""
async def verify_token(self, token: str) -> AccessToken | None:
"""Verify token and filter claims to Cognito-specific subset."""
# Use base JWT verification
access_token = await super().verify_token(token)
if not access_token:
return None
# Filter claims to Cognito-specific subset
cognito_claims = {
"sub": access_token.claims.get("sub"),
"username": access_token.claims.get("username"),
"cognito:groups": access_token.claims.get("cognito:groups", []),
}
# Return new AccessToken with filtered claims
return AccessToken(
token=access_token.token,
client_id=access_token.client_id,
scopes=access_token.scopes,
expires_at=access_token.expires_at,
claims=cognito_claims,
)
class AWSCognitoProvider(OIDCProxy):
"""Complete AWS Cognito OAuth provider for FastMCP.
This provider makes it trivial to add AWS Cognito OAuth protection to any
FastMCP server using OIDC Discovery. Just provide your Cognito User Pool details,
client credentials, and a base URL, and you're ready to go.
Features:
- Automatic OIDC Discovery from AWS Cognito User Pool
- Automatic JWT token validation via Cognito's public keys
- Cognito-specific claim filtering (sub, username, cognito:groups)
- Support for Cognito User Pools
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.aws_cognito import AWSCognitoProvider
auth = AWSCognitoProvider(
user_pool_id="eu-central-1_XXXXXXXXX",
aws_region="eu-central-1",
client_id="your-cognito-client-id",
client_secret="your-cognito-client-secret",
base_url="https://my-server.com",
redirect_path="/custom/callback",
)
mcp = FastMCP("My App", auth=auth)
```
"""
def __init__(
self,
*,
user_pool_id: str,
client_id: str,
client_secret: str,
base_url: AnyHttpUrl | str,
aws_region: str = "eu-central-1",
issuer_url: AnyHttpUrl | str | None = None,
redirect_path: str = "/auth/callback",
required_scopes: list[str] | None = None,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
):
"""Initialize AWS Cognito OAuth provider.
Args:
user_pool_id: Your Cognito User Pool ID (e.g., "eu-central-1_XXXXXXXXX")
client_id: Cognito app client ID
client_secret: Cognito app client secret
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
aws_region: AWS region where your User Pool is located (defaults to "eu-central-1")
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
redirect_path: Redirect path configured in Cognito app (defaults to "/auth/callback")
required_scopes: Required Cognito scopes (defaults to ["openid"])
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
If None (default), all URIs are allowed. If empty list, no URIs are allowed.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to AWS Cognito.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
"""
# Parse scopes if provided as string
required_scopes_final = (
parse_scopes(required_scopes) if required_scopes is not None else ["openid"]
)
# Construct OIDC discovery URL
config_url = f"https://cognito-idp.{aws_region}.amazonaws.com/{user_pool_id}/.well-known/openid-configuration"
# Store Cognito-specific info for claim filtering
self.user_pool_id = user_pool_id
self.aws_region = aws_region
self.client_id = client_id
# Initialize OIDC proxy with Cognito discovery
super().__init__(
config_url=config_url,
client_id=client_id,
client_secret=client_secret,
algorithm="RS256",
required_scopes=required_scopes_final,
base_url=base_url,
issuer_url=issuer_url,
redirect_path=redirect_path,
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
)
logger.debug(
"Initialized AWS Cognito OAuth provider for client %s with scopes: %s",
client_id,
required_scopes_final,
)
def get_token_verifier(
self,
*,
algorithm: str | None = None,
audience: str | None = None,
required_scopes: list[str] | None = None,
timeout_seconds: int | None = None,
) -> AWSCognitoTokenVerifier:
"""Creates a Cognito-specific token verifier with claim filtering.
Args:
algorithm: Optional token verifier algorithm
audience: Optional token verifier audience
required_scopes: Optional token verifier required_scopes
timeout_seconds: HTTP request timeout in seconds
"""
return AWSCognitoTokenVerifier(
issuer=str(self.oidc_config.issuer),
audience=audience or self.client_id,
algorithm=algorithm,
jwks_uri=str(self.oidc_config.jwks_uri),
required_scopes=required_scopes,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/aws.py",
"license": "Apache License 2.0",
"lines": 168,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/server/auth/providers/test_aws.py | """Unit tests for AWS Cognito OAuth provider."""
from contextlib import contextmanager
from unittest.mock import patch
from fastmcp.server.auth.providers.aws import (
AWSCognitoProvider,
)
@contextmanager
def mock_cognito_oidc_discovery():
"""Context manager to mock AWS Cognito OIDC discovery endpoint."""
mock_oidc_config = {
"issuer": "https://cognito-idp.us-east-1.amazonaws.com/us-east-1_XXXXXXXXX",
"authorization_endpoint": "https://test.auth.us-east-1.amazoncognito.com/oauth2/authorize",
"token_endpoint": "https://test.auth.us-east-1.amazoncognito.com/oauth2/token",
"jwks_uri": "https://cognito-idp.us-east-1.amazonaws.com/us-east-1_XXXXXXXXX/.well-known/jwks.json",
"userinfo_endpoint": "https://test.auth.us-east-1.amazoncognito.com/oauth2/userInfo",
"response_types_supported": ["code", "token"],
"subject_types_supported": ["public"],
"id_token_signing_alg_values_supported": ["RS256"],
"scopes_supported": ["openid", "email", "phone", "profile"],
"token_endpoint_auth_methods_supported": [
"client_secret_basic",
"client_secret_post",
],
}
with patch("httpx.get") as mock_get:
mock_response = mock_get.return_value
mock_response.raise_for_status.return_value = None
mock_response.json.return_value = mock_oidc_config
yield
class TestAWSCognitoProvider:
"""Test AWSCognitoProvider initialization."""
def test_init_with_explicit_params(self):
"""Test initialization with explicit parameters."""
with mock_cognito_oidc_discovery():
provider = AWSCognitoProvider(
user_pool_id="us-east-1_XXXXXXXXX",
aws_region="us-east-1",
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
redirect_path="/custom/callback",
required_scopes=["openid", "email"],
jwt_signing_key="test-secret",
)
# Check that the provider was initialized correctly
assert provider._upstream_client_id == "test_client"
assert provider._upstream_client_secret.get_secret_value() == "test_secret"
assert (
str(provider.base_url) == "https://example.com/"
) # URLs get normalized with trailing slash
assert provider._redirect_path == "/custom/callback"
# OIDC provider should have discovered the endpoints automatically
assert (
provider._upstream_authorization_endpoint
== "https://test.auth.us-east-1.amazoncognito.com/oauth2/authorize"
)
assert (
provider._upstream_token_endpoint
== "https://test.auth.us-east-1.amazoncognito.com/oauth2/token"
)
def test_init_defaults(self):
"""Test that default values are applied correctly."""
with mock_cognito_oidc_discovery():
provider = AWSCognitoProvider(
user_pool_id="us-east-1_XXXXXXXXX",
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
jwt_signing_key="test-secret",
)
# Check defaults
assert str(provider.base_url) == "https://example.com/"
assert provider._redirect_path == "/auth/callback"
assert provider._token_validator.required_scopes == ["openid"]
assert provider.aws_region == "eu-central-1"
def test_oidc_discovery_integration(self):
"""Test that OIDC discovery endpoints are used correctly."""
with mock_cognito_oidc_discovery():
provider = AWSCognitoProvider(
user_pool_id="us-west-2_YYYYYYYY",
aws_region="us-west-2",
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
jwt_signing_key="test-secret",
)
# OIDC discovery should have configured the endpoints automatically
assert provider._upstream_authorization_endpoint is not None
assert provider._upstream_token_endpoint is not None
assert "amazoncognito.com" in provider._upstream_authorization_endpoint
def test_token_verifier_defaults_audience_to_client_id(self):
"""Test Cognito token verifier enforces the configured client ID by default."""
with mock_cognito_oidc_discovery():
provider = AWSCognitoProvider(
user_pool_id="us-east-1_XXXXXXXXX",
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
jwt_signing_key="test-secret",
)
verifier = provider.get_token_verifier()
assert verifier.audience == "test_client"
def test_token_verifier_supports_audience_override(self):
"""Test Cognito token verifier still allows explicit audience overrides."""
with mock_cognito_oidc_discovery():
provider = AWSCognitoProvider(
user_pool_id="us-east-1_XXXXXXXXX",
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
jwt_signing_key="test-secret",
)
verifier = provider.get_token_verifier(audience="custom-audience")
assert verifier.audience == "custom-audience"
# Token verification functionality is now tested as part of the OIDC provider integration
# The CognitoTokenVerifier class is an internal implementation detail
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_aws.py",
"license": "Apache License 2.0",
"lines": 116,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/test_oauth_proxy_storage.py | """Tests for OAuth proxy with persistent storage."""
import tempfile
import warnings
from collections.abc import AsyncGenerator
from pathlib import Path
from unittest.mock import AsyncMock, Mock
import pytest
from inline_snapshot import snapshot
from key_value.aio.protocols import AsyncKeyValue
from key_value.aio.stores.filetree import FileTreeStore
from key_value.aio.stores.memory import MemoryStore
from mcp.shared.auth import OAuthClientInformationFull
from pydantic import AnyUrl
from fastmcp.server.auth.auth import TokenVerifier
from fastmcp.server.auth.oauth_proxy import OAuthProxy
class TestOAuthProxyStorage:
"""Tests for OAuth proxy client storage functionality."""
@pytest.fixture
def jwt_verifier(self):
"""Create a mock JWT verifier."""
verifier = Mock()
verifier.required_scopes = ["read", "write"]
verifier.verify_token = AsyncMock(return_value=None)
return verifier
@pytest.fixture
async def temp_storage(self) -> AsyncGenerator[FileTreeStore, None]:
"""Create file-based storage for testing."""
with tempfile.TemporaryDirectory() as temp_dir:
with warnings.catch_warnings():
warnings.simplefilter("ignore", UserWarning)
yield FileTreeStore(data_directory=Path(temp_dir))
@pytest.fixture
def memory_storage(self) -> MemoryStore:
"""Create in-memory storage for testing."""
return MemoryStore()
def create_proxy(
self, jwt_verifier: TokenVerifier, storage: AsyncKeyValue | None = None
) -> OAuthProxy:
"""Create an OAuth proxy with specified storage."""
return OAuthProxy(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=jwt_verifier,
base_url="https://myserver.com",
redirect_path="/auth/callback",
client_storage=storage,
jwt_signing_key="test-secret",
)
async def test_register_and_get_client(self, jwt_verifier, temp_storage):
"""Test registering and retrieving a client."""
proxy = self.create_proxy(jwt_verifier, storage=temp_storage)
# Register client
client_info = OAuthClientInformationFull(
client_id="test-client-123",
client_secret="secret-456",
redirect_uris=[AnyUrl("http://localhost:8080/callback")],
grant_types=["authorization_code", "refresh_token"],
scope="read write",
)
await proxy.register_client(client_info)
# Get client back
client = await proxy.get_client("test-client-123")
assert client is not None
assert client.client_id == "test-client-123"
# Proxy uses token_endpoint_auth_method="none", so client_secret is not stored
assert client.client_secret is None
assert client.scope == "read write"
async def test_client_persists_across_proxy_instances(
self, jwt_verifier: TokenVerifier, temp_storage: AsyncKeyValue
):
"""Test that clients persist when proxy is recreated."""
# First proxy registers client
proxy1 = self.create_proxy(jwt_verifier, storage=temp_storage)
client_info = OAuthClientInformationFull(
client_id="persistent-client",
client_secret="persistent-secret",
redirect_uris=[AnyUrl("http://localhost:9999/callback")],
scope="openid profile",
)
await proxy1.register_client(client_info)
# Second proxy can retrieve it
proxy2 = self.create_proxy(jwt_verifier, storage=temp_storage)
client = await proxy2.get_client("persistent-client")
assert client is not None
# Proxy uses token_endpoint_auth_method="none", so client_secret is not stored
assert client.client_secret is None
assert client.scope == "openid profile"
async def test_nonexistent_client_returns_none(
self, jwt_verifier: TokenVerifier, temp_storage: AsyncKeyValue
):
"""Test that requesting non-existent client returns None."""
proxy = self.create_proxy(jwt_verifier, storage=temp_storage)
client = await proxy.get_client("does-not-exist")
assert client is None
async def test_proxy_dcr_client_redirect_validation(
self, jwt_verifier: TokenVerifier, temp_storage: AsyncKeyValue
):
"""Test that OAuthProxyClient is created with redirect URI patterns."""
proxy = OAuthProxy(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id="test-client-id",
upstream_client_secret="test-client-secret",
token_verifier=jwt_verifier,
base_url="https://myserver.com",
allowed_client_redirect_uris=["http://localhost:*"],
client_storage=temp_storage,
jwt_signing_key="test-secret",
)
client_info = OAuthClientInformationFull(
client_id="test-proxy-client",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:8080/callback")],
)
await proxy.register_client(client_info)
# Get client back - should be OAuthProxyClient
client = await proxy.get_client("test-proxy-client")
assert client is not None
# OAuthProxyClient should validate dynamic localhost ports
validated = client.validate_redirect_uri(
AnyUrl("http://localhost:12345/callback")
)
assert validated is not None
async def test_in_memory_storage_option(self, jwt_verifier):
"""Test using in-memory storage explicitly."""
storage = MemoryStore()
proxy = self.create_proxy(jwt_verifier, storage=storage)
client_info = OAuthClientInformationFull(
client_id="memory-client",
client_secret="memory-secret",
redirect_uris=[AnyUrl("http://localhost:8080/callback")],
)
await proxy.register_client(client_info)
client = await proxy.get_client("memory-client")
assert client is not None
# Create new proxy with same storage instance
proxy2 = self.create_proxy(jwt_verifier, storage=storage)
client2 = await proxy2.get_client("memory-client")
assert client2 is not None
# But new storage instance won't have it
proxy3 = self.create_proxy(jwt_verifier, storage=MemoryStore())
client3 = await proxy3.get_client("memory-client")
assert client3 is None
async def test_storage_data_structure(self, jwt_verifier, temp_storage):
"""Test that storage uses proper structured format."""
proxy = self.create_proxy(jwt_verifier, storage=temp_storage)
client_info = OAuthClientInformationFull(
client_id="structured-client",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:8080/callback")],
)
await proxy.register_client(client_info)
# Check raw storage data
raw_data = await temp_storage.get(
collection="mcp-oauth-proxy-clients", key="structured-client"
)
assert raw_data is not None
assert raw_data == snapshot(
{
"redirect_uris": ["http://localhost:8080/callback"],
"token_endpoint_auth_method": "none",
"grant_types": ["authorization_code", "refresh_token"],
"response_types": ["code"],
"scope": "read write",
"client_name": None,
"client_uri": None,
"logo_uri": None,
"contacts": None,
"tos_uri": None,
"policy_uri": None,
"jwks_uri": None,
"jwks": None,
"software_id": None,
"software_version": None,
"client_id": "structured-client",
"client_secret": None,
"client_id_issued_at": None,
"client_secret_expires_at": None,
"allowed_redirect_uri_patterns": None,
"cimd_document": None,
"cimd_fetched_at": None,
}
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_oauth_proxy_storage.py",
"license": "Apache License 2.0",
"lines": 185,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/descope.py | """Descope authentication provider for FastMCP.
This module provides DescopeProvider - a complete authentication solution that integrates
with Descope's OAuth 2.1 and OpenID Connect services, supporting Dynamic Client Registration (DCR)
for seamless MCP client authentication.
"""
from __future__ import annotations
from urllib.parse import urlparse
import httpx
from pydantic import AnyHttpUrl
from starlette.responses import JSONResponse
from starlette.routing import Route
from fastmcp.server.auth import RemoteAuthProvider, TokenVerifier
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class DescopeProvider(RemoteAuthProvider):
"""Descope metadata provider for DCR (Dynamic Client Registration).
This provider implements Descope integration using metadata forwarding.
This is the recommended approach for Descope DCR
as it allows Descope to handle the OAuth flow directly while FastMCP acts
as a resource server.
IMPORTANT SETUP REQUIREMENTS:
1. Create an MCP Server in Descope Console:
- Go to the [MCP Servers page](https://app.descope.com/mcp-servers) of the Descope Console
- Create a new MCP Server
- Ensure that **Dynamic Client Registration (DCR)** is enabled
- Note your Well-Known URL
2. Note your Well-Known URL:
- Save your Well-Known URL from [MCP Server Settings](https://app.descope.com/mcp-servers)
- Format: ``https://.../v1/apps/agentic/P.../M.../.well-known/openid-configuration``
For detailed setup instructions, see:
https://docs.descope.com/identity-federation/inbound-apps/creating-inbound-apps#method-2-dynamic-client-registration-dcr
Example:
```python
from fastmcp.server.auth.providers.descope import DescopeProvider
# Create Descope metadata provider (JWT verifier created automatically)
descope_auth = DescopeProvider(
config_url="https://.../v1/apps/agentic/P.../M.../.well-known/openid-configuration",
base_url="https://your-fastmcp-server.com",
)
# Use with FastMCP
mcp = FastMCP("My App", auth=descope_auth)
```
"""
def __init__(
self,
*,
base_url: AnyHttpUrl | str,
config_url: AnyHttpUrl | str | None = None,
project_id: str | None = None,
descope_base_url: AnyHttpUrl | str | None = None,
required_scopes: list[str] | None = None,
scopes_supported: list[str] | None = None,
resource_name: str | None = None,
resource_documentation: AnyHttpUrl | None = None,
token_verifier: TokenVerifier | None = None,
):
"""Initialize Descope metadata provider.
Args:
base_url: Public URL of this FastMCP server
config_url: Your Descope Well-Known URL (e.g., "https://.../v1/apps/agentic/P.../M.../.well-known/openid-configuration")
This is the new recommended way. If provided, project_id and descope_base_url are ignored.
project_id: Your Descope Project ID (e.g., "P2abc123"). Used with descope_base_url for backwards compatibility.
descope_base_url: Your Descope base URL (e.g., "https://api.descope.com"). Used with project_id for backwards compatibility.
required_scopes: Optional list of scopes that must be present in validated tokens.
These scopes will be included in the protected resource metadata.
scopes_supported: Optional list of scopes to advertise in OAuth metadata.
If None, uses required_scopes. Use this when the scopes clients should
request differ from the scopes enforced on tokens.
resource_name: Optional name for the protected resource metadata.
resource_documentation: Optional documentation URL for the protected resource.
token_verifier: Optional token verifier. If None, creates JWT verifier for Descope
"""
self.base_url = AnyHttpUrl(str(base_url).rstrip("/"))
# Parse scopes if provided as string
parsed_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else None
)
# Determine which API is being used
if config_url is not None:
# New API: use config_url
# Strip /.well-known/openid-configuration from config_url if present
issuer_url = str(config_url)
if issuer_url.endswith("/.well-known/openid-configuration"):
issuer_url = issuer_url[: -len("/.well-known/openid-configuration")]
# Parse the issuer URL to extract descope_base_url and project_id for other uses
parsed_url = urlparse(issuer_url)
path_parts = parsed_url.path.strip("/").split("/")
# Extract project_id from path (format: /v1/apps/agentic/P.../M...)
if "agentic" in path_parts:
agentic_index = path_parts.index("agentic")
if agentic_index + 1 < len(path_parts):
self.project_id = path_parts[agentic_index + 1]
else:
raise ValueError(
f"Could not extract project_id from config_url: {issuer_url}"
)
else:
raise ValueError(
f"Could not find 'agentic' in config_url path: {issuer_url}"
)
# Extract descope_base_url (scheme + netloc)
self.descope_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}".rstrip(
"/"
)
elif project_id is not None and descope_base_url is not None:
# Old API: use project_id and descope_base_url
self.project_id = project_id
descope_base_url_str = str(descope_base_url).rstrip("/")
# Ensure descope_base_url has a scheme
if not descope_base_url_str.startswith(("http://", "https://")):
descope_base_url_str = f"https://{descope_base_url_str}"
self.descope_base_url = descope_base_url_str
# Old issuer format
issuer_url = f"{self.descope_base_url}/v1/apps/{self.project_id}"
else:
raise ValueError(
"Either config_url (new API) or both project_id and descope_base_url (old API) must be provided"
)
# Create default JWT verifier if none provided
if token_verifier is None:
token_verifier = JWTVerifier(
jwks_uri=f"{self.descope_base_url}/{self.project_id}/.well-known/jwks.json",
issuer=issuer_url,
algorithm="RS256",
audience=self.project_id,
required_scopes=parsed_scopes,
)
# Initialize RemoteAuthProvider with Descope as the authorization server
super().__init__(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl(issuer_url)],
base_url=self.base_url,
scopes_supported=scopes_supported,
resource_name=resource_name,
resource_documentation=resource_documentation,
)
def get_routes(
self,
mcp_path: str | None = None,
) -> list[Route]:
"""Get OAuth routes including Descope authorization server metadata forwarding.
This returns the standard protected resource routes plus an authorization server
metadata endpoint that forwards Descope's OAuth metadata to clients.
Args:
mcp_path: The path where the MCP endpoint is mounted (e.g., "/mcp")
This is used to advertise the resource URL in metadata.
"""
# Get the standard protected resource routes from RemoteAuthProvider
routes = super().get_routes(mcp_path)
async def oauth_authorization_server_metadata(request):
"""Forward Descope OAuth authorization server metadata with FastMCP customizations."""
try:
async with httpx.AsyncClient() as client:
response = await client.get(
f"{self.descope_base_url}/v1/apps/{self.project_id}/.well-known/oauth-authorization-server"
)
response.raise_for_status()
metadata = response.json()
return JSONResponse(metadata)
except Exception as e:
return JSONResponse(
{
"error": "server_error",
"error_description": f"Failed to fetch Descope metadata: {e}",
},
status_code=500,
)
# Add Descope authorization server metadata forwarding
routes.append(
Route(
"/.well-known/oauth-authorization-server",
endpoint=oauth_authorization_server_metadata,
methods=["GET"],
)
)
return routes
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/descope.py",
"license": "Apache License 2.0",
"lines": 178,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/auth/providers/test_descope.py | """Tests for Descope OAuth provider."""
import os
from unittest.mock import patch
import httpx
import pytest
from fastmcp import Client, FastMCP
from fastmcp.client.transports import StreamableHttpTransport
from fastmcp.server.auth.providers.descope import DescopeProvider
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.tests import HeadlessOAuth, run_server_async
class TestDescopeProvider:
"""Test Descope OAuth provider functionality."""
def test_init_with_explicit_params(self):
"""Test DescopeProvider initialization with explicit parameters."""
provider = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2abc123/M123/.well-known/openid-configuration",
base_url="https://myserver.com",
)
assert provider.project_id == "P2abc123"
assert str(provider.base_url) == "https://myserver.com/"
assert str(provider.descope_base_url) == "https://api.descope.com"
def test_environment_variable_loading(self):
"""Test that environment variables are loaded correctly."""
# This test verifies that the provider can be created with environment variables
provider = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2env123/M123/.well-known/openid-configuration",
base_url="http://env-server.com",
)
# Should have loaded from environment
assert provider.project_id == "P2env123"
assert str(provider.base_url) == "http://env-server.com/"
assert str(provider.descope_base_url) == "https://api.descope.com"
def test_config_url_parsing(self):
"""Test that config_url is parsed correctly to extract base URL and project ID."""
# Standard HTTPS URL
provider1 = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2abc123/M123/.well-known/openid-configuration",
base_url="https://myserver.com",
)
assert str(provider1.descope_base_url) == "https://api.descope.com"
assert provider1.project_id == "P2abc123"
# HTTP URL (for local testing)
provider2 = DescopeProvider(
config_url="http://localhost:8080/v1/apps/agentic/P2abc123/M123/.well-known/openid-configuration",
base_url="https://myserver.com",
)
assert str(provider2.descope_base_url) == "http://localhost:8080"
assert provider2.project_id == "P2abc123"
# URL without .well-known/openid-configuration suffix
provider3 = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2abc123/M123",
base_url="https://myserver.com",
)
assert str(provider3.descope_base_url) == "https://api.descope.com"
assert provider3.project_id == "P2abc123"
def test_requires_config_url_or_project_id_and_descope_base_url(self):
"""Test that either config_url or both project_id and descope_base_url are required."""
# Should raise error when neither API is provided
with pytest.raises(ValueError, match="Either config_url"):
DescopeProvider(
base_url="https://myserver.com",
)
def test_backwards_compatibility_with_project_id_and_descope_base_url(self):
"""Test backwards compatibility with old API using project_id and descope_base_url."""
provider = DescopeProvider(
project_id="P2abc123",
descope_base_url="https://api.descope.com",
base_url="https://myserver.com",
)
assert provider.project_id == "P2abc123"
assert str(provider.descope_base_url) == "https://api.descope.com"
assert str(provider.base_url) == "https://myserver.com/"
# Check that JWT verifier uses the old issuer format
assert isinstance(provider.token_verifier, JWTVerifier)
assert (
provider.token_verifier.issuer == "https://api.descope.com/v1/apps/P2abc123"
)
assert (
provider.token_verifier.jwks_uri
== "https://api.descope.com/P2abc123/.well-known/jwks.json"
)
def test_backwards_compatibility_descope_base_url_without_scheme(self):
"""Test that descope_base_url without scheme gets https:// prefix added."""
provider = DescopeProvider(
project_id="P2abc123",
descope_base_url="api.descope.com",
base_url="https://myserver.com",
)
assert str(provider.descope_base_url) == "https://api.descope.com"
assert isinstance(provider.token_verifier, JWTVerifier)
assert (
provider.token_verifier.issuer == "https://api.descope.com/v1/apps/P2abc123"
)
def test_config_url_takes_precedence_over_old_api(self):
"""Test that config_url takes precedence when both APIs are provided."""
provider = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2new123/M123/.well-known/openid-configuration",
project_id="P2old123", # Should be ignored
descope_base_url="https://old.descope.com", # Should be ignored
base_url="https://myserver.com",
)
# Should use values from config_url, not the old API
assert provider.project_id == "P2new123"
assert str(provider.descope_base_url) == "https://api.descope.com"
assert isinstance(provider.token_verifier, JWTVerifier)
assert (
provider.token_verifier.issuer
== "https://api.descope.com/v1/apps/agentic/P2new123/M123"
)
def test_jwt_verifier_configured_correctly(self):
"""Test that JWT verifier is configured correctly."""
config_url = "https://api.descope.com/v1/apps/agentic/P2abc123/M123/.well-known/openid-configuration"
issuer_url = "https://api.descope.com/v1/apps/agentic/P2abc123/M123"
provider = DescopeProvider(
config_url=config_url,
base_url="https://myserver.com",
)
# Check that JWT verifier uses the correct endpoints
assert isinstance(provider.token_verifier, JWTVerifier)
assert (
provider.token_verifier.jwks_uri
== "https://api.descope.com/P2abc123/.well-known/jwks.json"
)
assert provider.token_verifier.issuer == issuer_url
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.audience == "P2abc123"
def test_required_scopes_support(self):
"""Test that required_scopes are supported and passed to JWT verifier."""
provider = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2abc123/M123/.well-known/openid-configuration",
base_url="https://myserver.com",
required_scopes=["read", "write"],
)
# Check that required_scopes are set on the token verifier
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.required_scopes == ["read", "write"]
def test_required_scopes_with_old_api(self):
"""Test that required_scopes work with the old API (project_id + descope_base_url)."""
provider = DescopeProvider(
project_id="P2abc123",
descope_base_url="https://api.descope.com",
base_url="https://myserver.com",
required_scopes=["openid", "email"],
)
# Check that required_scopes are set on the token verifier
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.required_scopes == ["openid", "email"]
def test_required_scopes_from_env(self):
"""Test that required_scopes can be set via environment variable."""
with patch.dict(
os.environ,
{
"FASTMCP_SERVER_AUTH_DESCOPEPROVIDER_CONFIG_URL": "https://api.descope.com/v1/apps/agentic/P2env123/M123/.well-known/openid-configuration",
"FASTMCP_SERVER_AUTH_DESCOPEPROVIDER_BASE_URL": "https://envserver.com",
"FASTMCP_SERVER_AUTH_DESCOPEPROVIDER_REQUIRED_SCOPES": "read,write",
},
):
provider = DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2env123/M123/.well-known/openid-configuration",
base_url="https://envserver.com",
required_scopes=["read", "write"],
)
assert isinstance(provider.token_verifier, JWTVerifier)
assert provider.token_verifier.required_scopes == ["read", "write"]
@pytest.fixture
async def mcp_server_url():
"""Start Descope server."""
mcp = FastMCP(
auth=DescopeProvider(
config_url="https://api.descope.com/v1/apps/agentic/P2test123/M123/.well-known/openid-configuration",
base_url="http://localhost:4321",
)
)
@mcp.tool
def add(a: int, b: int) -> int:
return a + b
async with run_server_async(mcp, transport="http") as url:
yield url
@pytest.fixture
def client_with_headless_oauth(mcp_server_url: str) -> Client:
"""Client with headless OAuth that bypasses browser interaction."""
return Client(
transport=StreamableHttpTransport(mcp_server_url),
auth=HeadlessOAuth(mcp_url=mcp_server_url),
)
class TestDescopeProviderIntegration:
async def test_unauthorized_access(self, mcp_server_url: str):
with pytest.raises(httpx.HTTPStatusError) as exc_info:
async with Client(mcp_server_url) as client:
tools = await client.list_tools() # noqa: F841
assert isinstance(exc_info.value, httpx.HTTPStatusError)
assert exc_info.value.response.status_code == 401
assert "tools" not in locals()
# async def test_authorized_access(self, client_with_headless_oauth: Client):
# async with client_with_headless_oauth:
# tools = await client_with_headless_oauth.list_tools()
# assert tools is not None
# assert len(tools) > 0
# assert "add" in tools
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_descope.py",
"license": "Apache License 2.0",
"lines": 199,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/test_log_level.py | """Test log_level parameter support in FastMCP server."""
import asyncio
from unittest.mock import AsyncMock, patch
from fastmcp import FastMCP
class TestLogLevelParameter:
"""Test that log_level parameter is properly accepted by run methods."""
async def test_run_stdio_accepts_log_level(self):
"""Test that run_stdio_async accepts log_level parameter."""
server = FastMCP("TestServer")
# Mock the stdio_server to avoid actual stdio operations
with patch("fastmcp.server.mixins.transport.stdio_server") as mock_stdio:
mock_stdio.return_value.__aenter__ = AsyncMock(
return_value=(AsyncMock(), AsyncMock())
)
mock_stdio.return_value.__aexit__ = AsyncMock()
# Mock the underlying MCP server run method
with patch.object(server._mcp_server, "run", new_callable=AsyncMock):
try:
# This should accept the log_level parameter without error
await asyncio.wait_for(
server.run_stdio_async(log_level="DEBUG", show_banner=False),
timeout=0.1,
)
except asyncio.TimeoutError:
pass # Expected since we're mocking
async def test_run_http_accepts_log_level(self):
"""Test that run_http_async accepts log_level parameter."""
server = FastMCP("TestServer")
# Mock uvicorn to avoid actual server start
with patch(
"fastmcp.server.mixins.transport.uvicorn.Server"
) as mock_server_class:
mock_instance = mock_server_class.return_value
mock_instance.serve = AsyncMock()
# This should accept the log_level parameter without error
await server.run_http_async(
log_level="INFO", show_banner=False, host="127.0.0.1", port=8000
)
# Verify serve was called
mock_instance.serve.assert_called_once()
async def test_run_async_passes_log_level(self):
"""Test that run_async passes log_level to transport methods."""
server = FastMCP("TestServer")
# Test stdio transport
with patch.object(
server, "run_stdio_async", new_callable=AsyncMock
) as mock_stdio:
await server.run_async(transport="stdio", log_level="WARNING")
mock_stdio.assert_called_once_with(show_banner=True, log_level="WARNING")
# Test http transport
with patch.object(
server, "run_http_async", new_callable=AsyncMock
) as mock_http:
await server.run_async(transport="http", log_level="ERROR")
mock_http.assert_called_once_with(
transport="http", show_banner=True, log_level="ERROR"
)
def test_sync_run_accepts_log_level(self):
"""Test that the synchronous run method accepts log_level."""
server = FastMCP("TestServer")
with patch.object(server, "run_async", new_callable=AsyncMock):
# Mock anyio.run to avoid actual async execution
with patch("anyio.run") as mock_anyio_run:
server.run(transport="stdio", log_level="CRITICAL")
# Verify anyio.run was called
mock_anyio_run.assert_called_once()
# Get the function that was passed to anyio.run
called_func = mock_anyio_run.call_args[0][0]
# The function should be a partial that includes log_level
assert hasattr(called_func, "keywords")
assert called_func.keywords.get("log_level") == "CRITICAL"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/test_log_level.py",
"license": "Apache License 2.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/oidc_proxy.py | """OIDC Proxy Provider for FastMCP.
This provider acts as a transparent proxy to an upstream OIDC compliant Authorization
Server. It leverages the OAuthProxy class to handle Dynamic Client Registration and
forwarding of all OAuth flows.
This implementation is based on:
OpenID Connect Discovery 1.0 - https://openid.net/specs/openid-connect-discovery-1_0.html
OAuth 2.0 Authorization Server Metadata - https://datatracker.ietf.org/doc/html/rfc8414
"""
from collections.abc import Sequence
import httpx
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl, BaseModel, model_validator
from typing_extensions import Self
from fastmcp.server.auth import TokenVerifier
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.oauth_proxy.models import UpstreamTokenSet
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class OIDCConfiguration(BaseModel):
"""OIDC Configuration.
See:
https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
https://datatracker.ietf.org/doc/html/rfc8414#section-2
"""
strict: bool = True
# OpenID Connect Discovery 1.0
issuer: AnyHttpUrl | str | None = None # Strict
authorization_endpoint: AnyHttpUrl | str | None = None # Strict
token_endpoint: AnyHttpUrl | str | None = None # Strict
userinfo_endpoint: AnyHttpUrl | str | None = None
jwks_uri: AnyHttpUrl | str | None = None # Strict
registration_endpoint: AnyHttpUrl | str | None = None
scopes_supported: Sequence[str] | None = None
response_types_supported: Sequence[str] | None = None # Strict
response_modes_supported: Sequence[str] | None = None
grant_types_supported: Sequence[str] | None = None
acr_values_supported: Sequence[str] | None = None
subject_types_supported: Sequence[str] | None = None # Strict
id_token_signing_alg_values_supported: Sequence[str] | None = None # Strict
id_token_encryption_alg_values_supported: Sequence[str] | None = None
id_token_encryption_enc_values_supported: Sequence[str] | None = None
userinfo_signing_alg_values_supported: Sequence[str] | None = None
userinfo_encryption_alg_values_supported: Sequence[str] | None = None
userinfo_encryption_enc_values_supported: Sequence[str] | None = None
request_object_signing_alg_values_supported: Sequence[str] | None = None
request_object_encryption_alg_values_supported: Sequence[str] | None = None
request_object_encryption_enc_values_supported: Sequence[str] | None = None
token_endpoint_auth_methods_supported: Sequence[str] | None = None
token_endpoint_auth_signing_alg_values_supported: Sequence[str] | None = None
display_values_supported: Sequence[str] | None = None
claim_types_supported: Sequence[str] | None = None
claims_supported: Sequence[str] | None = None
service_documentation: AnyHttpUrl | str | None = None
claims_locales_supported: Sequence[str] | None = None
ui_locales_supported: Sequence[str] | None = None
claims_parameter_supported: bool | None = None
request_parameter_supported: bool | None = None
request_uri_parameter_supported: bool | None = None
require_request_uri_registration: bool | None = None
op_policy_uri: AnyHttpUrl | str | None = None
op_tos_uri: AnyHttpUrl | str | None = None
# OAuth 2.0 Authorization Server Metadata
revocation_endpoint: AnyHttpUrl | str | None = None
revocation_endpoint_auth_methods_supported: Sequence[str] | None = None
revocation_endpoint_auth_signing_alg_values_supported: Sequence[str] | None = None
introspection_endpoint: AnyHttpUrl | str | None = None
introspection_endpoint_auth_methods_supported: Sequence[str] | None = None
introspection_endpoint_auth_signing_alg_values_supported: Sequence[str] | None = (
None
)
code_challenge_methods_supported: Sequence[str] | None = None
signed_metadata: str | None = None
@model_validator(mode="after")
def _enforce_strict(self) -> Self:
"""Enforce strict rules."""
if not self.strict:
return self
def enforce(attr: str, is_url: bool = False) -> None:
value = getattr(self, attr, None)
if not value:
message = f"Missing required configuration metadata: {attr}"
logger.error(message)
raise ValueError(message)
if not is_url or isinstance(value, AnyHttpUrl):
return
try:
AnyHttpUrl(value)
except Exception as e:
message = f"Invalid URL for configuration metadata: {attr}"
logger.error(message)
raise ValueError(message) from e
enforce("issuer", True)
enforce("authorization_endpoint", True)
enforce("token_endpoint", True)
enforce("jwks_uri", True)
enforce("response_types_supported")
enforce("subject_types_supported")
enforce("id_token_signing_alg_values_supported")
return self
@classmethod
def get_oidc_configuration(
cls, config_url: AnyHttpUrl, *, strict: bool | None, timeout_seconds: int | None
) -> Self:
"""Get the OIDC configuration for the specified config URL.
Args:
config_url: The OIDC config URL
strict: The strict flag for the configuration
timeout_seconds: HTTP request timeout in seconds
"""
get_kwargs = {}
if timeout_seconds is not None:
get_kwargs["timeout"] = timeout_seconds
try:
response = httpx.get(str(config_url), **get_kwargs)
response.raise_for_status()
config_data = response.json()
if strict is not None:
config_data["strict"] = strict
return cls.model_validate(config_data)
except Exception:
logger.exception(
f"Unable to get OIDC configuration for config url: {config_url}"
)
raise
class OIDCProxy(OAuthProxy):
"""OAuth provider that wraps OAuthProxy to provide configuration via an OIDC configuration URL.
This provider makes it easier to add OAuth protection for any upstream provider
that is OIDC compliant.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.oidc_proxy import OIDCProxy
# Simple OIDC based protection
auth = OIDCProxy(
config_url="https://oidc.config.url",
client_id="your-oidc-client-id",
client_secret="your-oidc-client-secret",
base_url="https://your.server.url",
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
oidc_config: OIDCConfiguration
def __init__(
self,
*,
# OIDC configuration
config_url: AnyHttpUrl | str,
strict: bool | None = None,
# Upstream server configuration
client_id: str,
client_secret: str,
audience: str | None = None,
timeout_seconds: int | None = None,
# Token verifier
token_verifier: TokenVerifier | None = None,
algorithm: str | None = None,
required_scopes: list[str] | None = None,
verify_id_token: bool = False,
# FastMCP server configuration
base_url: AnyHttpUrl | str,
issuer_url: AnyHttpUrl | str | None = None,
redirect_path: str | None = None,
# Client configuration
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
# JWT and encryption keys
jwt_signing_key: str | bytes | None = None,
# Token validation configuration
token_endpoint_auth_method: str | None = None,
# Consent screen configuration
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
# Extra parameters
extra_authorize_params: dict[str, str] | None = None,
extra_token_params: dict[str, str] | None = None,
# Token expiry fallback
fallback_access_token_expiry_seconds: int | None = None,
# CIMD configuration
enable_cimd: bool = True,
) -> None:
"""Initialize the OIDC proxy provider.
Args:
config_url: URL of upstream configuration
strict: Optional strict flag for the configuration
client_id: Client ID registered with upstream server
client_secret: Client secret for upstream server
audience: Audience for upstream server
timeout_seconds: HTTP request timeout in seconds
token_verifier: Optional custom token verifier (e.g., IntrospectionTokenVerifier for opaque tokens).
If not provided, a JWTVerifier will be created using the OIDC configuration.
Cannot be used with algorithm or required_scopes parameters (configure these on your verifier instead).
algorithm: Token verifier algorithm (only used if token_verifier is not provided)
required_scopes: Required scopes for token validation (only used if token_verifier is not provided)
verify_id_token: If True, verify the OIDC id_token instead of the access_token.
Useful for providers that issue opaque (non-JWT) access tokens, since the
id_token is always a standard JWT verifiable via the provider's JWKS.
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
redirect_path: Redirect path configured in upstream OAuth app (defaults to "/auth/callback")
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
Patterns support wildcards (e.g., "http://localhost:*", "https://*.example.com/*").
If None (default), all redirect URIs are allowed (for DCR compatibility).
If empty list, no redirect URIs are allowed.
These are for MCP clients performing loopback redirects, NOT for the upstream OAuth app.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
token_endpoint_auth_method: Token endpoint authentication method for upstream server.
Common values: "client_secret_basic", "client_secret_post", "none".
If None, authlib will use its default (typically "client_secret_basic").
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to the upstream IdP.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
consent_csp_policy: Content Security Policy for the consent page.
If None (default), uses the built-in CSP policy with appropriate directives.
If empty string "", disables CSP entirely (no meta tag is rendered).
If a non-empty string, uses that as the CSP policy value.
extra_authorize_params: Additional parameters to forward to the upstream authorization endpoint.
Useful for provider-specific parameters like prompt=consent or access_type=offline.
Example: {"prompt": "consent", "access_type": "offline"}
extra_token_params: Additional parameters to forward to the upstream token endpoint.
Useful for provider-specific parameters during token exchange.
fallback_access_token_expiry_seconds: Expiry time to use when upstream provider
doesn't return `expires_in` in the token response. If not set, uses smart
defaults: 1 hour if a refresh token is available (since we can refresh),
or 1 year if no refresh token (for API-key-style tokens like GitHub OAuth Apps).
enable_cimd: Whether to enable CIMD (Client ID Metadata Document) client support.
When True, clients can use their metadata document URL as client_id instead of
Dynamic Client Registration. Default is True.
"""
if not config_url:
raise ValueError("Missing required config URL")
if not client_id:
raise ValueError("Missing required client id")
if not client_secret:
raise ValueError("Missing required client secret")
if not base_url:
raise ValueError("Missing required base URL")
# Validate that verifier-specific parameters are not used with custom verifier
if token_verifier is not None:
if algorithm is not None:
raise ValueError(
"Cannot specify 'algorithm' when providing a custom token_verifier. "
"Configure the algorithm on your token verifier instead."
)
if required_scopes is not None:
raise ValueError(
"Cannot specify 'required_scopes' when providing a custom token_verifier. "
"Configure required scopes on your token verifier instead."
)
if isinstance(config_url, str):
config_url = AnyHttpUrl(config_url)
self.oidc_config = self.get_oidc_configuration(
config_url, strict, timeout_seconds
)
if (
not self.oidc_config.authorization_endpoint
or not self.oidc_config.token_endpoint
):
logger.debug(f"Invalid OIDC Configuration: {self.oidc_config}")
raise ValueError("Missing required OIDC endpoints")
revocation_endpoint = (
str(self.oidc_config.revocation_endpoint)
if self.oidc_config.revocation_endpoint
else None
)
# Use custom verifier if provided, otherwise create default JWTVerifier
if token_verifier is None:
# When verifying id_tokens:
# - aud is always the OAuth client_id (per OIDC Core §2), not
# the API audience, so use client_id for audience validation.
# - id_tokens don't carry scope/scp claims, so don't pass
# required_scopes to the verifier (scope enforcement happens
# at the FastMCP token level instead).
verifier_audience = client_id if verify_id_token else audience
verifier_scopes = None if verify_id_token else required_scopes
token_verifier = self.get_token_verifier(
algorithm=algorithm,
audience=verifier_audience,
required_scopes=verifier_scopes,
timeout_seconds=timeout_seconds,
)
init_kwargs: dict[str, object] = {
"upstream_authorization_endpoint": str(
self.oidc_config.authorization_endpoint
),
"upstream_token_endpoint": str(self.oidc_config.token_endpoint),
"upstream_client_id": client_id,
"upstream_client_secret": client_secret,
"upstream_revocation_endpoint": revocation_endpoint,
"token_verifier": token_verifier,
"base_url": base_url,
"issuer_url": issuer_url or base_url,
"service_documentation_url": self.oidc_config.service_documentation,
"allowed_client_redirect_uris": allowed_client_redirect_uris,
"client_storage": client_storage,
"jwt_signing_key": jwt_signing_key,
"token_endpoint_auth_method": token_endpoint_auth_method,
"require_authorization_consent": require_authorization_consent,
"consent_csp_policy": consent_csp_policy,
"fallback_access_token_expiry_seconds": fallback_access_token_expiry_seconds,
"enable_cimd": enable_cimd,
}
if redirect_path:
init_kwargs["redirect_path"] = redirect_path
# Build extra params, merging audience with user-provided params
# User params override audience if there's a conflict
final_authorize_params: dict[str, str] = {}
final_token_params: dict[str, str] = {}
if audience:
final_authorize_params["audience"] = audience
final_token_params["audience"] = audience
if extra_authorize_params:
final_authorize_params.update(extra_authorize_params)
if extra_token_params:
final_token_params.update(extra_token_params)
if final_authorize_params:
init_kwargs["extra_authorize_params"] = final_authorize_params
if final_token_params:
init_kwargs["extra_token_params"] = final_token_params
super().__init__(**init_kwargs) # ty: ignore[invalid-argument-type]
self._verify_id_token = verify_id_token
# When verify_id_token strips scopes from the verifier, restore
# them on the provider so they're still advertised to clients
# and enforced at the FastMCP token level. We also need to
# recompute derived state that OAuthProxy.__init__ already built
# from the (empty) verifier scopes.
if verify_id_token and required_scopes:
self.required_scopes = required_scopes
self._default_scope_str = " ".join(required_scopes)
if self.client_registration_options:
self.client_registration_options.valid_scopes = required_scopes
if self._cimd_manager is not None:
self._cimd_manager.default_scope = self._default_scope_str
def _get_verification_token(
self, upstream_token_set: UpstreamTokenSet
) -> str | None:
"""Get the token to verify from the upstream token set.
When verify_id_token is enabled, returns the id_token from the
upstream token response instead of the access_token.
"""
if self._verify_id_token:
id_token = upstream_token_set.raw_token_data.get("id_token")
if id_token is None:
logger.warning(
"verify_id_token is enabled but no id_token found in"
" upstream token response"
)
return id_token
return upstream_token_set.access_token
def get_oidc_configuration(
self,
config_url: AnyHttpUrl,
strict: bool | None,
timeout_seconds: int | None,
) -> OIDCConfiguration:
"""Gets the OIDC configuration for the specified configuration URL.
Args:
config_url: The OIDC configuration URL
strict: The strict flag for the configuration
timeout_seconds: HTTP request timeout in seconds
"""
return OIDCConfiguration.get_oidc_configuration(
config_url, strict=strict, timeout_seconds=timeout_seconds
)
def get_token_verifier(
self,
*,
algorithm: str | None = None,
audience: str | None = None,
required_scopes: list[str] | None = None,
timeout_seconds: int | None = None,
) -> TokenVerifier:
"""Creates the token verifier for the specified OIDC configuration and arguments.
Args:
algorithm: Optional token verifier algorithm
audience: Optional token verifier audience
required_scopes: Optional token verifier required_scopes
timeout_seconds: HTTP request timeout in seconds
"""
return JWTVerifier(
jwks_uri=str(self.oidc_config.jwks_uri),
issuer=str(self.oidc_config.issuer),
algorithm=algorithm,
audience=audience,
required_scopes=required_scopes,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/oidc_proxy.py",
"license": "Apache License 2.0",
"lines": 394,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/auth0.py | """Auth0 OAuth provider for FastMCP.
This module provides a complete Auth0 integration that's ready to use with
just the configuration URL, client ID, client secret, audience, and base URL.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.auth0 import Auth0Provider
# Simple Auth0 OAuth protection
auth = Auth0Provider(
config_url="https://auth0.config.url",
client_id="your-auth0-client-id",
client_secret="your-auth0-client-secret",
audience="your-auth0-api-audience",
base_url="http://localhost:8000",
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl
from fastmcp.server.auth.oidc_proxy import OIDCProxy
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class Auth0Provider(OIDCProxy):
"""An Auth0 provider implementation for FastMCP.
This provider is a complete Auth0 integration that's ready to use with
just the configuration URL, client ID, client secret, audience, and base URL.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.auth0 import Auth0Provider
# Simple Auth0 OAuth protection
auth = Auth0Provider(
config_url="https://auth0.config.url",
client_id="your-auth0-client-id",
client_secret="your-auth0-client-secret",
audience="your-auth0-api-audience",
base_url="http://localhost:8000",
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
def __init__(
self,
*,
config_url: AnyHttpUrl | str,
client_id: str,
client_secret: str,
audience: str,
base_url: AnyHttpUrl | str,
issuer_url: AnyHttpUrl | str | None = None,
required_scopes: list[str] | None = None,
redirect_path: str | None = None,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
) -> None:
"""Initialize Auth0 OAuth provider.
Args:
config_url: Auth0 config URL
client_id: Auth0 application client id
client_secret: Auth0 application client secret
audience: Auth0 API audience
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
required_scopes: Required Auth0 scopes (defaults to ["openid"])
redirect_path: Redirect path configured in Auth0 application
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
If None (default), all URIs are allowed. If empty list, no URIs are allowed.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to Auth0.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
"""
# Parse scopes if provided as string
auth0_required_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else ["openid"]
)
super().__init__(
config_url=config_url,
client_id=client_id,
client_secret=client_secret,
audience=audience,
base_url=base_url,
issuer_url=issuer_url,
redirect_path=redirect_path,
required_scopes=auth0_required_scopes,
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
)
logger.debug(
"Initialized Auth0 OAuth provider for client %s with scopes: %s",
client_id,
auth0_required_scopes,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/auth0.py",
"license": "Apache License 2.0",
"lines": 108,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/server/auth/providers/test_auth0.py | """Unit tests for Auth0 OAuth provider."""
from unittest.mock import patch
import pytest
from fastmcp.server.auth.oidc_proxy import OIDCConfiguration
from fastmcp.server.auth.providers.auth0 import Auth0Provider
from fastmcp.server.auth.providers.jwt import JWTVerifier
TEST_CONFIG_URL = "https://example.com/.well-known/openid-configuration"
TEST_CLIENT_ID = "test-client-id"
TEST_CLIENT_SECRET = "test-client-secret"
TEST_AUDIENCE = "test-audience"
TEST_BASE_URL = "https://example.com:8000/"
TEST_REDIRECT_PATH = "/test/callback"
TEST_REQUIRED_SCOPES = ["openid", "email"]
@pytest.fixture
def valid_oidc_configuration_dict():
"""Create a valid OIDC configuration dict for testing."""
return {
"issuer": "https://example.com",
"authorization_endpoint": "https://example.com/authorize",
"token_endpoint": "https://example.com/oauth/token",
"jwks_uri": "https://example.com/.well-known/jwks.json",
"response_types_supported": ["code"],
"subject_types_supported": ["public"],
"id_token_signing_alg_values_supported": ["RS256"],
}
class TestAuth0Provider:
"""Test Auth0Provider initialization."""
def test_init_with_explicit_params(self, valid_oidc_configuration_dict):
"""Test initialization with explicit parameters."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
provider = Auth0Provider(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
audience=TEST_AUDIENCE,
base_url=TEST_BASE_URL,
redirect_path=TEST_REDIRECT_PATH,
required_scopes=TEST_REQUIRED_SCOPES,
jwt_signing_key="test-secret",
)
mock_get.assert_called_once()
call_args = mock_get.call_args
assert str(call_args[0][0]) == TEST_CONFIG_URL
assert provider._upstream_client_id == TEST_CLIENT_ID
assert (
provider._upstream_client_secret.get_secret_value()
== TEST_CLIENT_SECRET
)
assert isinstance(provider._token_validator, JWTVerifier)
assert provider._token_validator.audience == TEST_AUDIENCE
assert str(provider.base_url) == TEST_BASE_URL
assert provider._redirect_path == TEST_REDIRECT_PATH
assert provider._token_validator.required_scopes == TEST_REQUIRED_SCOPES
def test_init_defaults(self, valid_oidc_configuration_dict):
"""Test that default values are applied correctly."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
provider = Auth0Provider(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
audience=TEST_AUDIENCE,
base_url=TEST_BASE_URL,
jwt_signing_key="test-secret",
)
# Check defaults
assert str(provider.base_url) == TEST_BASE_URL
assert provider._redirect_path == "/auth/callback"
assert provider._token_validator.required_scopes == ["openid"]
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_auth0.py",
"license": "Apache License 2.0",
"lines": 80,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/test_oidc_proxy.py | """Comprehensive tests for OIDC Proxy Provider functionality."""
import json
from unittest.mock import MagicMock, patch
import pytest
from httpx import Response
from pydantic import AnyHttpUrl
from fastmcp.server.auth.oidc_proxy import OIDCConfiguration, OIDCProxy
from fastmcp.server.auth.providers.introspection import IntrospectionTokenVerifier
from fastmcp.server.auth.providers.jwt import JWTVerifier
TEST_ISSUER = "https://example.com"
TEST_AUTHORIZATION_ENDPOINT = "https://example.com/authorize"
TEST_TOKEN_ENDPOINT = "https://example.com/oauth/token"
TEST_CONFIG_URL = AnyHttpUrl("https://example.com/.well-known/openid-configuration")
TEST_CLIENT_ID = "test-client-id"
TEST_CLIENT_SECRET = "test-client-secret"
TEST_BASE_URL = AnyHttpUrl("https://example.com:8000/")
# =============================================================================
# Test Fixtures
# =============================================================================
@pytest.fixture
def valid_oidc_configuration_dict():
"""Create a valid OIDC configuration dict for testing."""
return {
"issuer": TEST_ISSUER,
"authorization_endpoint": TEST_AUTHORIZATION_ENDPOINT,
"token_endpoint": TEST_TOKEN_ENDPOINT,
"jwks_uri": "https://example.com/.well-known/jwks.json",
"response_types_supported": ["code"],
"subject_types_supported": ["public"],
"id_token_signing_alg_values_supported": ["RS256"],
}
@pytest.fixture
def invalid_oidc_configuration_dict():
"""Create an invalid OIDC configuration dict for testing."""
return {
"issuer": TEST_ISSUER,
"authorization_endpoint": TEST_AUTHORIZATION_ENDPOINT,
"token_endpoint": TEST_TOKEN_ENDPOINT,
"jwks_uri": "https://example.com/.well-known/jwks.json",
}
@pytest.fixture
def valid_google_oidc_configuration_dict():
"""Create a valid Google OIDC configuration dict for testing.
See: https://accounts.google.com/.well-known/openid-configuration
"""
google_config_str = """
{
"issuer": "https://accounts.google.com",
"authorization_endpoint": "https://accounts.google.com/o/oauth2/v2/auth",
"device_authorization_endpoint": "https://oauth2.googleapis.com/device/code",
"token_endpoint": "https://oauth2.googleapis.com/token",
"userinfo_endpoint": "https://openidconnect.googleapis.com/v1/userinfo",
"revocation_endpoint": "https://oauth2.googleapis.com/revoke",
"jwks_uri": "https://www.googleapis.com/oauth2/v3/certs",
"response_types_supported": [
"code",
"token",
"id_token",
"code token",
"code id_token",
"token id_token",
"code token id_token",
"none"
],
"response_modes_supported": [
"query",
"fragment",
"form_post"
],
"subject_types_supported": [
"public"
],
"id_token_signing_alg_values_supported": [
"RS256"
],
"scopes_supported": [
"openid",
"email",
"profile"
],
"token_endpoint_auth_methods_supported": [
"client_secret_post",
"client_secret_basic"
],
"claims_supported": [
"aud",
"email",
"email_verified",
"exp",
"family_name",
"given_name",
"iat",
"iss",
"name",
"picture",
"sub"
],
"code_challenge_methods_supported": [
"plain",
"S256"
],
"grant_types_supported": [
"authorization_code",
"refresh_token",
"urn:ietf:params:oauth:grant-type:device_code",
"urn:ietf:params:oauth:grant-type:jwt-bearer"
]
}
"""
return json.loads(google_config_str)
@pytest.fixture
def valid_auth0_oidc_configuration_dict():
"""Create a valid Auth0 OIDC configuration dict for testing.
See: https://<tenant>.us.auth0.com/.well-known/openid-configuration
"""
auth0_config_str = """
{
"issuer": "https://example.us.auth0.com/",
"authorization_endpoint": "https://example.us.auth0.com/authorize",
"token_endpoint": "https://example.us.auth0.com/oauth/token",
"device_authorization_endpoint": "https://example.us.auth0.com/oauth/device/code",
"userinfo_endpoint": "https://example.us.auth0.com/userinfo",
"mfa_challenge_endpoint": "https://example.us.auth0.com/mfa/challenge",
"jwks_uri": "https://example.us.auth0.com/.well-known/jwks.json",
"registration_endpoint": "https://example.us.auth0.com/oidc/register",
"revocation_endpoint": "https://example.us.auth0.com/oauth/revoke",
"scopes_supported": [
"openid",
"profile",
"offline_access",
"name",
"given_name",
"family_name",
"nickname",
"email",
"email_verified",
"picture",
"created_at",
"identities",
"phone",
"address"
],
"response_types_supported": [
"code",
"token",
"id_token",
"code token",
"code id_token",
"token id_token",
"code token id_token"
],
"code_challenge_methods_supported": [
"S256",
"plain"
],
"response_modes_supported": [
"query",
"fragment",
"form_post"
],
"subject_types_supported": [
"public"
],
"token_endpoint_auth_methods_supported": [
"client_secret_basic",
"client_secret_post",
"private_key_jwt",
"tls_client_auth",
"self_signed_tls_client_auth"
],
"token_endpoint_auth_signing_alg_values_supported": [
"RS256",
"RS384",
"PS256"
],
"claims_supported": [
"aud",
"auth_time",
"created_at",
"email",
"email_verified",
"exp",
"family_name",
"given_name",
"iat",
"identities",
"iss",
"name",
"nickname",
"phone_number",
"picture",
"sub"
],
"request_uri_parameter_supported": false,
"request_parameter_supported": true,
"id_token_signing_alg_values_supported": [
"HS256",
"RS256",
"PS256"
],
"tls_client_certificate_bound_access_tokens": true,
"request_object_signing_alg_values_supported": [
"RS256",
"RS384",
"PS256"
],
"backchannel_logout_supported": true,
"backchannel_logout_session_supported": true,
"end_session_endpoint": "https://example.us.auth0.com/oidc/logout",
"backchannel_authentication_endpoint": "https://example.us.auth0.com/bc-authorize",
"backchannel_token_delivery_modes_supported": [
"poll"
],
"global_token_revocation_endpoint": "https://example.us.auth0.com/oauth/global-token-revocation/connection/{connectionName}",
"global_token_revocation_endpoint_auth_methods_supported": [
"global-token-revocation+jwt"
]
}
"""
return json.loads(auth0_config_str)
# =============================================================================
# Test Classes
# =============================================================================
def validate_config(config, source_dict):
"""Validate an OIDC configuration against the source dict."""
for source_key, source_value in source_dict.items():
config_value = getattr(config, source_key, None)
if not hasattr(config, source_key):
continue
config_value = getattr(config, source_key, None)
if isinstance(config_value, AnyHttpUrl):
config_value = str(config_value)
assert config_value == source_value
class TestOIDCConfiguration:
"""Tests for OIDC configuration."""
def test_default_configuration(self, valid_oidc_configuration_dict):
"""Test default configuration with valid dict."""
config = OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
validate_config(config, valid_oidc_configuration_dict)
def test_default_configuration_with_issuer_trailing_slash(
self, valid_oidc_configuration_dict
):
"""Test default configuration with valid dict and issuer trailing slash."""
valid_oidc_configuration_dict["issuer"] += "/"
config = OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
validate_config(config, valid_oidc_configuration_dict)
def test_explicit_strict_configuration(self, valid_oidc_configuration_dict):
"""Test default configuration with explicit True strict setting and valid dict."""
valid_oidc_configuration_dict["strict"] = True
config = OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
validate_config(config, valid_oidc_configuration_dict)
def test_explicit_strict_configuration_with_issuer_trailing_slash(
self, valid_oidc_configuration_dict
):
"""Test default configuration with explicit True strict setting, valid dict and issuer trailing slash."""
valid_oidc_configuration_dict["issuer"] += "/"
config = OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
validate_config(config, valid_oidc_configuration_dict)
def test_default_configuration_raises_error(self, invalid_oidc_configuration_dict):
"""Test default configuration with invalid dict."""
with pytest.raises(ValueError, match="Missing required configuration metadata"):
OIDCConfiguration.model_validate(invalid_oidc_configuration_dict)
def test_explicit_strict_configuration_raises_error(
self, invalid_oidc_configuration_dict
):
"""Test default configuration with explicit True strict setting and invalid dict."""
invalid_oidc_configuration_dict["strict"] = True
with pytest.raises(ValueError, match="Missing required configuration metadata"):
OIDCConfiguration.model_validate(invalid_oidc_configuration_dict)
def test_bad_url_raises_error(self, valid_oidc_configuration_dict):
"""Test default configuration with bad URL setting."""
valid_oidc_configuration_dict["issuer"] = "not-a-URL"
with pytest.raises(ValueError, match="Invalid URL for configuration metadata"):
OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
def test_explicit_strict_with_bad_url_raises_error(
self, valid_oidc_configuration_dict
):
"""Test default configuration with explicit True strict setting and bad URL setting."""
valid_oidc_configuration_dict["strict"] = True
valid_oidc_configuration_dict["issuer"] = "not-a-URL"
with pytest.raises(ValueError, match="Invalid URL for configuration metadata"):
OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
def test_not_strict_configuration(self):
"""Test default configuration with explicit False strict setting."""
config = OIDCConfiguration.model_validate({"strict": False})
assert config.issuer is None
assert config.authorization_endpoint is None
assert config.token_endpoint is None
assert config.jwks_uri is None
assert config.response_types_supported is None
assert config.subject_types_supported is None
assert config.id_token_signing_alg_values_supported is None
def test_not_strict_configuration_with_invalid_config(
self, invalid_oidc_configuration_dict
):
"""Test default configuration with explicit False strict setting."""
invalid_oidc_configuration_dict["strict"] = False
config = OIDCConfiguration.model_validate(invalid_oidc_configuration_dict)
validate_config(config, invalid_oidc_configuration_dict)
def test_not_strict_configuration_with_bad_url(self, valid_oidc_configuration_dict):
"""Test default configuration with explicit False strict setting."""
valid_oidc_configuration_dict["strict"] = False
valid_oidc_configuration_dict["issuer"] = "not-a-url"
config = OIDCConfiguration.model_validate(valid_oidc_configuration_dict)
validate_config(config, valid_oidc_configuration_dict)
def test_google_configuration(self, valid_google_oidc_configuration_dict):
"""Test Google configuration."""
config = OIDCConfiguration.model_validate(valid_google_oidc_configuration_dict)
validate_config(config, valid_google_oidc_configuration_dict)
def test_auth0_configuration(self, valid_auth0_oidc_configuration_dict):
"""Test Auth0 configuration."""
config = OIDCConfiguration.model_validate(valid_auth0_oidc_configuration_dict)
validate_config(config, valid_auth0_oidc_configuration_dict)
def validate_get_oidc_configuration(oidc_configuration, strict, timeout_seconds):
"""Validate get_oidc_configuration call."""
with patch("httpx.get") as mock_get:
mock_response = MagicMock(spec=Response)
mock_response.json.return_value = oidc_configuration
mock_get.return_value = mock_response
config = OIDCConfiguration.get_oidc_configuration(
config_url=TEST_CONFIG_URL,
strict=strict,
timeout_seconds=timeout_seconds,
)
validate_config(config, oidc_configuration)
mock_get.assert_called_once()
call_args = mock_get.call_args
assert str(call_args[0][0]) == str(TEST_CONFIG_URL)
return call_args
class TestGetOIDCConfiguration:
"""Tests for getting OIDC configuration."""
def test_get_oidc_configuration(self, valid_oidc_configuration_dict):
"""Test with valid response and explicit timeout."""
call_args = validate_get_oidc_configuration(
valid_oidc_configuration_dict, True, 10
)
assert call_args[1]["timeout"] == 10
def test_get_oidc_configuration_no_timeout(self, valid_oidc_configuration_dict):
"""Test with valid response and no timeout."""
call_args = validate_get_oidc_configuration(
valid_oidc_configuration_dict, True, None
)
assert "timeout" not in call_args[1]
def test_get_oidc_configuration_raises_error(
self, invalid_oidc_configuration_dict
) -> None:
"""Test with invalid response."""
with pytest.raises(ValueError, match="Missing required configuration metadata"):
validate_get_oidc_configuration(invalid_oidc_configuration_dict, True, 10)
def test_get_oidc_configuration_not_strict(
self, invalid_oidc_configuration_dict
) -> None:
"""Test with invalid response and strict set to False."""
with patch("httpx.get") as mock_get:
mock_response = MagicMock(spec=Response)
mock_response.json.return_value = invalid_oidc_configuration_dict
mock_get.return_value = mock_response
OIDCConfiguration.get_oidc_configuration(
config_url=TEST_CONFIG_URL,
strict=False,
timeout_seconds=10,
)
mock_get.assert_called_once()
call_args = mock_get.call_args
assert str(call_args[0][0]) == str(TEST_CONFIG_URL)
def validate_proxy(mock_get, proxy, oidc_config):
"""Validate OIDC proxy."""
mock_get.assert_called_once()
call_args = mock_get.call_args
assert str(call_args[0][0]) == str(TEST_CONFIG_URL)
assert proxy._upstream_authorization_endpoint == TEST_AUTHORIZATION_ENDPOINT
assert proxy._upstream_token_endpoint == TEST_TOKEN_ENDPOINT
assert proxy._upstream_client_id == TEST_CLIENT_ID
assert proxy._upstream_client_secret.get_secret_value() == TEST_CLIENT_SECRET
assert str(proxy.base_url) == str(TEST_BASE_URL)
assert proxy.oidc_config == oidc_config
class TestOIDCProxyInitialization:
"""Tests for OIDC proxy initialization."""
def test_default_initialization(self, valid_oidc_configuration_dict):
"""Test default initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
def test_timeout_seconds_initialization(self, valid_oidc_configuration_dict):
"""Test timeout seconds initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
timeout_seconds=12,
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
call_args = mock_get.call_args
assert call_args[1]["timeout_seconds"] == 12
def test_token_verifier_initialization(self, valid_oidc_configuration_dict):
"""Test token verifier initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
algorithm="RS256",
audience="oidc-proxy-test-audience",
required_scopes=["required", "scopes"],
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
assert isinstance(proxy._token_validator, JWTVerifier)
assert proxy._token_validator.algorithm == "RS256"
assert proxy._token_validator.audience == "oidc-proxy-test-audience"
assert proxy._token_validator.required_scopes == ["required", "scopes"]
def test_extra_parameters_initialization(self, valid_oidc_configuration_dict):
"""Test other parameters initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
audience="oidc-proxy-test-audience",
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
assert proxy._extra_authorize_params == {
"audience": "oidc-proxy-test-audience"
}
assert proxy._extra_token_params == {"audience": "oidc-proxy-test-audience"}
def test_other_parameters_initialization(self, valid_oidc_configuration_dict):
"""Test other parameters initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
redirect_path="/oidc/proxy",
allowed_client_redirect_uris=["http://localhost:*"],
token_endpoint_auth_method="client_secret_post",
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
assert proxy._redirect_path == "/oidc/proxy"
assert proxy._allowed_client_redirect_uris == ["http://localhost:*"]
assert proxy._token_endpoint_auth_method == "client_secret_post"
def test_no_config_url_initialization_raises_error(
self, valid_oidc_configuration_dict
):
"""Test no config URL initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
with pytest.raises(ValueError, match="Missing required config URL"):
OIDCProxy(
config_url=None, # type: ignore
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
jwt_signing_key="test-secret",
)
def test_no_client_id_initialization_raises_error(
self, valid_oidc_configuration_dict
):
"""Test no client id initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
with pytest.raises(ValueError, match="Missing required client id"):
OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=None, # type: ignore
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
)
def test_no_client_secret_initialization_raises_error(
self, valid_oidc_configuration_dict
):
"""Test no client secret initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
with pytest.raises(ValueError, match="Missing required client secret"):
OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=None, # type: ignore
base_url=TEST_BASE_URL,
)
def test_no_base_url_initialization_raises_error(
self, valid_oidc_configuration_dict
):
"""Test no base URL initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
with pytest.raises(ValueError, match="Missing required base URL"):
OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=None, # type: ignore
)
def test_custom_token_verifier_initialization(self, valid_oidc_configuration_dict):
"""Test initialization with custom token verifier."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
# Create custom verifier for opaque tokens
custom_verifier = IntrospectionTokenVerifier(
introspection_url="https://example.com/oauth/introspect",
client_id="introspection-client",
client_secret="introspection-secret",
required_scopes=["custom", "scopes"],
)
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
token_verifier=custom_verifier,
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
# Verify the custom verifier is used
assert proxy._token_validator is custom_verifier
assert isinstance(proxy._token_validator, IntrospectionTokenVerifier)
# Verify required_scopes are properly loaded from the custom verifier
assert proxy.required_scopes == ["custom", "scopes"]
def test_custom_token_verifier_with_algorithm_raises_error(
self, valid_oidc_configuration_dict
):
"""Test that providing algorithm with custom verifier raises error."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
custom_verifier = IntrospectionTokenVerifier(
introspection_url="https://example.com/oauth/introspect",
client_id="introspection-client",
client_secret="introspection-secret",
)
with pytest.raises(
ValueError,
match="Cannot specify 'algorithm' when providing a custom token_verifier",
):
OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
token_verifier=custom_verifier,
algorithm="RS256", # This should cause an error
jwt_signing_key="test-secret",
)
def test_custom_token_verifier_with_required_scopes_raises_error(
self, valid_oidc_configuration_dict
):
"""Test that providing required_scopes with custom verifier raises error."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
custom_verifier = IntrospectionTokenVerifier(
introspection_url="https://example.com/oauth/introspect",
client_id="introspection-client",
client_secret="introspection-secret",
)
with pytest.raises(
ValueError,
match="Cannot specify 'required_scopes' when providing a custom token_verifier",
):
OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
token_verifier=custom_verifier,
required_scopes=["read", "write"], # This should cause an error
jwt_signing_key="test-secret",
)
def test_custom_token_verifier_with_audience_allowed(
self, valid_oidc_configuration_dict
):
"""Test that providing audience with custom verifier is allowed (for OAuth flow)."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
custom_verifier = IntrospectionTokenVerifier(
introspection_url="https://example.com/oauth/introspect",
client_id="introspection-client",
client_secret="introspection-secret",
)
# This should NOT raise an error - audience is for OAuth flow
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
token_verifier=custom_verifier,
audience="test-audience", # Should be allowed for OAuth flow
jwt_signing_key="test-secret",
)
validate_proxy(mock_get, proxy, oidc_config)
assert proxy._extra_authorize_params == {"audience": "test-audience"}
assert proxy._extra_token_params == {"audience": "test-audience"}
def test_extra_authorize_params_initialization(self, valid_oidc_configuration_dict):
"""Test extra authorize params initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
jwt_signing_key="test-secret",
extra_authorize_params={
"prompt": "consent",
"access_type": "offline",
},
)
validate_proxy(mock_get, proxy, oidc_config)
assert proxy._extra_authorize_params == {
"prompt": "consent",
"access_type": "offline",
}
# Token params should be empty since we didn't set them
assert proxy._extra_token_params == {}
def test_extra_token_params_initialization(self, valid_oidc_configuration_dict):
"""Test extra token params initialization."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
jwt_signing_key="test-secret",
extra_token_params={"custom_param": "custom_value"},
)
validate_proxy(mock_get, proxy, oidc_config)
# Authorize params should be empty since we didn't set them
assert proxy._extra_authorize_params == {}
assert proxy._extra_token_params == {"custom_param": "custom_value"}
def test_extra_params_merge_with_audience(self, valid_oidc_configuration_dict):
"""Test that extra params merge with audience, with user params taking precedence."""
with patch(
"fastmcp.server.auth.oidc_proxy.OIDCConfiguration.get_oidc_configuration"
) as mock_get:
oidc_config = OIDCConfiguration.model_validate(
valid_oidc_configuration_dict
)
mock_get.return_value = oidc_config
proxy = OIDCProxy(
config_url=TEST_CONFIG_URL,
client_id=TEST_CLIENT_ID,
client_secret=TEST_CLIENT_SECRET,
base_url=TEST_BASE_URL,
audience="original-audience",
jwt_signing_key="test-secret",
extra_authorize_params={
"prompt": "consent",
"audience": "overridden-audience", # Should override the audience param
},
extra_token_params={"custom": "value"},
)
validate_proxy(mock_get, proxy, oidc_config)
# User's extra_authorize_params should override audience
assert proxy._extra_authorize_params == {
"audience": "overridden-audience",
"prompt": "consent",
}
# Token params should have both audience (from audience param) and custom
assert proxy._extra_token_params == {
"audience": "original-audience",
"custom": "value",
}
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_oidc_proxy.py",
"license": "Apache License 2.0",
"lines": 755,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/cli/install/gemini_cli.py | """Gemini CLI integration for FastMCP install using Cyclopts."""
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Annotated
import cyclopts
from rich import print
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from .shared import process_common_args
logger = get_logger(__name__)
def find_gemini_command() -> str | None:
"""Find the Gemini CLI command."""
# First try shutil.which() in case it's a real executable in PATH
gemini_in_path = shutil.which("gemini")
if gemini_in_path:
try:
# If 'gemini --version' fails, it's not the correct path
subprocess.run(
[gemini_in_path, "--version"],
check=True,
capture_output=True,
)
return gemini_in_path
except (subprocess.CalledProcessError, FileNotFoundError):
pass
# Check common installation locations (aliases don't work with subprocess)
potential_paths = [
# Default Gemini CLI installation location (after migration)
Path.home() / ".gemini" / "local" / "gemini",
# npm global installation on macOS/Linux (default)
Path("/usr/local/bin/gemini"),
# npm global installation with custom prefix
Path.home() / ".npm-global" / "bin" / "gemini",
# Homebrew installation on macOS
Path("/opt/homebrew/bin/gemini"),
]
for path in potential_paths:
if path.exists():
# If 'gemini --version' fails, it's not the correct path
try:
subprocess.run(
[str(path), "--version"],
check=True,
capture_output=True,
)
return str(path)
except (subprocess.CalledProcessError, FileNotFoundError):
continue
return None
def check_gemini_cli_available() -> bool:
"""Check if Gemini CLI is available."""
return find_gemini_command() is not None
def install_gemini_cli(
file: Path,
server_object: str | None,
name: str,
*,
with_editable: list[Path] | None = None,
with_packages: list[str] | None = None,
env_vars: dict[str, str] | None = None,
python_version: str | None = None,
with_requirements: Path | None = None,
project: Path | None = None,
) -> bool:
"""Install FastMCP server in Gemini CLI.
Args:
file: Path to the server file
server_object: Optional server object name (for :object suffix)
name: Name for the server in Gemini CLI
with_editable: Optional list of directories to install in editable mode
with_packages: Optional list of additional packages to install
env_vars: Optional dictionary of environment variables
python_version: Optional Python version to use
with_requirements: Optional requirements file to install from
project: Optional project directory to run within
Returns:
True if installation was successful, False otherwise
"""
# Check if Gemini CLI is available
gemini_cmd = find_gemini_command()
if not gemini_cmd:
print(
"[red]Gemini CLI not found.[/red]\n"
"[blue]Please ensure Gemini CLI is installed. Try running 'gemini --version' to verify.[/blue]\n"
"[blue]You can install it using 'npm install -g @google/gemini-cli'.[/blue]\n"
)
return False
env_config = UVEnvironment(
python=python_version,
dependencies=(with_packages or []) + ["fastmcp"],
requirements=with_requirements,
project=project,
editable=with_editable,
)
# Build server spec from parsed components
if server_object:
server_spec = f"{file.resolve()}:{server_object}"
else:
server_spec = str(file.resolve())
# Build the full command
full_command = env_config.build_command(["fastmcp", "run", server_spec])
# Build gemini mcp add command
cmd_parts = [gemini_cmd, "mcp", "add"]
# Add environment variables if specified (before the name and command)
if env_vars:
for key, value in env_vars.items():
cmd_parts.extend(["-e", f"{key}={value}"])
# Add server name and command
cmd_parts.extend([name, full_command[0], "--"])
cmd_parts.extend(full_command[1:])
try:
# Run the gemini mcp add command
subprocess.run(cmd_parts, check=True, capture_output=True, text=True)
return True
except subprocess.CalledProcessError as e:
print(
f"[red]Failed to install '[bold]{name}[/bold]' in Gemini CLI: {e.stderr.strip() if e.stderr else str(e)}[/red]"
)
return False
except Exception as e:
print(f"[red]Failed to install '[bold]{name}[/bold]' in Gemini CLI: {e}[/red]")
return False
async def gemini_cli_command(
server_spec: str,
*,
server_name: Annotated[
str | None,
cyclopts.Parameter(
name=["--name", "-n"],
help="Custom name for the server in Gemini CLI",
),
] = None,
with_editable: Annotated[
list[Path] | None,
cyclopts.Parameter(
"--with-editable",
help="Directory with pyproject.toml to install in editable mode (can be used multiple times)",
),
] = None,
with_packages: Annotated[
list[str] | None,
cyclopts.Parameter(
"--with", help="Additional packages to install (can be used multiple times)"
),
] = None,
env_vars: Annotated[
list[str] | None,
cyclopts.Parameter(
"--env",
help="Environment variables in KEY=VALUE format (can be used multiple times)",
),
] = None,
env_file: Annotated[
Path | None,
cyclopts.Parameter(
"--env-file",
help="Load environment variables from .env file",
),
] = None,
python: Annotated[
str | None,
cyclopts.Parameter(
"--python",
help="Python version to use (e.g., 3.10, 3.11)",
),
] = None,
with_requirements: Annotated[
Path | None,
cyclopts.Parameter(
"--with-requirements",
help="Requirements file to install dependencies from",
),
] = None,
project: Annotated[
Path | None,
cyclopts.Parameter(
"--project",
help="Run the command within the given project directory",
),
] = None,
) -> None:
"""Install an MCP server in Gemini CLI.
Args:
server_spec: Python file to install, optionally with :object suffix
"""
# Convert None to empty lists for list parameters
with_editable = with_editable or []
with_packages = with_packages or []
env_vars = env_vars or []
file, server_object, name, packages, env_dict = await process_common_args(
server_spec, server_name, with_packages, env_vars, env_file
)
success = install_gemini_cli(
file=file,
server_object=server_object,
name=name,
with_editable=with_editable,
with_packages=packages,
env_vars=env_dict,
python_version=python,
with_requirements=with_requirements,
project=project,
)
if success:
print(f"[green]Successfully installed '{name}' in Gemini CLI")
else:
sys.exit(1)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/cli/install/gemini_cli.py",
"license": "Apache License 2.0",
"lines": 209,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/auth/test_auth_provider.py | import re
import httpx
import pytest
from pydantic import AnyHttpUrl
from fastmcp import FastMCP
from fastmcp.server.auth import RemoteAuthProvider
from fastmcp.server.auth.providers.jwt import StaticTokenVerifier
class TestAuthProviderBase:
"""Test suite for base AuthProvider behaviors that apply to all auth providers."""
@pytest.fixture
def basic_remote_provider(self):
"""Basic RemoteAuthProvider fixture for testing base AuthProvider behaviors."""
# Create a static token verifier with a test token
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read", "write"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
return RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://my-server.com",
)
async def test_www_authenticate_header_points_to_base_url(
self, basic_remote_provider
):
"""Test that WWW-Authenticate header points to RFC 9728-compliant metadata URL.
The WWW-Authenticate header includes the resource path per RFC 9728,
so clients can discover where the metadata is actually registered.
"""
mcp = FastMCP("test-server", auth=basic_remote_provider)
# Mount MCP at a non-root path
mcp_http_app = mcp.http_app(path="/api/v1/mcp")
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
# Make unauthorized request to MCP endpoint
response = await client.get("/api/v1/mcp")
assert response.status_code == 401
www_auth = response.headers.get("www-authenticate", "")
assert "resource_metadata=" in www_auth
# Extract the metadata URL from the header
match = re.search(r'resource_metadata="([^"]+)"', www_auth)
assert match is not None
metadata_url = match.group(1)
# The metadata URL includes the resource path per RFC 9728
assert (
metadata_url
== "https://my-server.com/.well-known/oauth-protected-resource/api/v1/mcp"
)
async def test_automatic_resource_url_capture(self, basic_remote_provider):
"""Test that resource URL is automatically captured from MCP path.
This test verifies PR #1682 functionality where the resource URL
should be automatically set based on the MCP endpoint path.
"""
mcp = FastMCP("test-server", auth=basic_remote_provider)
# Mount MCP at a specific path
mcp_http_app = mcp.http_app(path="/mcp")
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
# The .well-known metadata is at a path-aware location per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
# The resource URL should be automatically set to the MCP path
assert data.get("resource") == "https://my-server.com/mcp"
async def test_automatic_resource_url_with_nested_path(self, basic_remote_provider):
"""Test automatic resource URL capture with deeply nested MCP path."""
mcp = FastMCP("test-server", auth=basic_remote_provider)
mcp_http_app = mcp.http_app(path="/api/v2/services/mcp")
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
# The .well-known metadata includes the resource path per RFC 9728
response = await client.get(
"/.well-known/oauth-protected-resource/api/v2/services/mcp"
)
assert response.status_code == 200
data = response.json()
# Should automatically capture the nested path
assert data.get("resource") == "https://my-server.com/api/v2/services/mcp"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_auth_provider.py",
"license": "Apache License 2.0",
"lines": 87,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/utilities/mcp_server_config/v1/environments/base.py | from abc import ABC, abstractmethod
from pathlib import Path
from pydantic import BaseModel, Field
class Environment(BaseModel, ABC):
"""Base class for environment configuration."""
type: str = Field(description="Environment type identifier")
@abstractmethod
def build_command(self, command: list[str]) -> list[str]:
"""Build the full command with environment setup.
Args:
command: Base command to wrap with environment setup
Returns:
Full command ready for subprocess execution
"""
async def prepare(self, output_dir: Path | None = None) -> None:
"""Prepare the environment (optional, can be no-op).
Args:
output_dir: Directory for persistent environment setup
"""
# Default no-op implementation
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/utilities/mcp_server_config/v1/environments/base.py",
"license": "Apache License 2.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:src/fastmcp/utilities/mcp_server_config/v1/environments/uv.py | import shutil
import subprocess
from pathlib import Path
from typing import Literal
from pydantic import Field
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.mcp_server_config.v1.environments.base import Environment
logger = get_logger("cli.config")
class UVEnvironment(Environment):
"""Configuration for Python environment setup."""
type: Literal["uv"] = "uv"
python: str | None = Field(
default=None,
description="Python version constraint",
examples=["3.10", "3.11", "3.12"],
)
dependencies: list[str] | None = Field(
default=None,
description="Python packages to install with PEP 508 specifiers",
examples=[["fastmcp>=2.0,<3", "httpx", "pandas>=2.0"]],
)
requirements: Path | None = Field(
default=None,
description="Path to requirements.txt file",
examples=["requirements.txt", "../requirements/prod.txt"],
)
project: Path | None = Field(
default=None,
description="Path to project directory containing pyproject.toml",
examples=[".", "../my-project"],
)
editable: list[Path] | None = Field(
default=None,
description="Directories to install in editable mode",
examples=[[".", "../my-package"], ["/path/to/package"]],
)
def build_command(self, command: list[str]) -> list[str]:
"""Build complete uv run command with environment args and command to execute.
Args:
command: Command to execute (e.g., ["fastmcp", "run", "server.py"])
Returns:
Complete command ready for subprocess.run, including "uv" prefix if needed.
If no environment configuration is set, returns the command unchanged.
"""
# If no environment setup is needed, return command as-is
if not self._must_run_with_uv():
return command
args = ["uv", "run"]
# Add project if specified
if self.project:
args.extend(["--project", str(self.project.resolve())])
# Add Python version if specified (only if no project, as project has its own Python)
if self.python and not self.project:
args.extend(["--python", self.python])
# Always add dependencies, requirements, and editable packages
# These work with --project to add additional packages on top of the project env
if self.dependencies:
for dep in sorted(set(self.dependencies)):
args.extend(["--with", dep])
# Add requirements file
if self.requirements:
args.extend(["--with-requirements", str(self.requirements.resolve())])
# Add editable packages
if self.editable:
for editable_path in self.editable:
args.extend(["--with-editable", str(editable_path.resolve())])
# Add the command
args.extend(command)
return args
def _must_run_with_uv(self) -> bool:
"""Check if this environment config requires uv to set up.
Returns:
True if any environment settings require uv run
"""
return any(
[
self.python is not None,
self.dependencies is not None,
self.requirements is not None,
self.project is not None,
self.editable is not None,
]
)
async def prepare(self, output_dir: Path | None = None) -> None:
"""Prepare the Python environment using uv.
Args:
output_dir: Directory where the persistent uv project will be created.
If None, creates a temporary directory for ephemeral use.
"""
# Check if uv is available
if not shutil.which("uv"):
raise RuntimeError(
"uv is not installed. Please install it with: "
"curl -LsSf https://astral.sh/uv/install.sh | sh"
)
# Only prepare environment if there are actual settings to apply
if not self._must_run_with_uv():
logger.debug("No environment settings configured, skipping preparation")
return
# Handle None case for ephemeral use
if output_dir is None:
import tempfile
output_dir = Path(tempfile.mkdtemp(prefix="fastmcp-env-"))
logger.info(f"Creating ephemeral environment in {output_dir}")
else:
logger.info(f"Creating persistent environment in {output_dir}")
output_dir = Path(output_dir).resolve()
# Initialize the project
logger.debug(f"Initializing uv project in {output_dir}")
try:
subprocess.run(
[
"uv",
"init",
"--project",
str(output_dir),
"--name",
"fastmcp-env",
],
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
# If project already exists, that's fine - continue
if "already initialized" in e.stderr.lower():
logger.debug(
f"Project already initialized at {output_dir}, continuing..."
)
else:
logger.error(f"Failed to initialize project: {e.stderr}")
raise RuntimeError(f"Failed to initialize project: {e.stderr}") from e
# Pin Python version if specified
if self.python:
logger.debug(f"Pinning Python version to {self.python}")
try:
subprocess.run(
[
"uv",
"python",
"pin",
self.python,
"--project",
str(output_dir),
],
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
logger.error(f"Failed to pin Python version: {e.stderr}")
raise RuntimeError(f"Failed to pin Python version: {e.stderr}") from e
# Add dependencies with --no-sync to defer installation
# dependencies ALWAYS include fastmcp; this is compatible with
# specific fastmcp versions that might be in the dependencies list
dependencies = (self.dependencies or []) + ["fastmcp"]
logger.debug(f"Adding dependencies: {', '.join(dependencies)}")
try:
subprocess.run(
[
"uv",
"add",
*dependencies,
"--no-sync",
"--project",
str(output_dir),
],
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
logger.error(f"Failed to add dependencies: {e.stderr}")
raise RuntimeError(f"Failed to add dependencies: {e.stderr}") from e
# Add requirements file if specified
if self.requirements:
logger.debug(f"Adding requirements from {self.requirements}")
# Resolve requirements path relative to current directory
req_path = Path(self.requirements).resolve()
try:
subprocess.run(
[
"uv",
"add",
"-r",
str(req_path),
"--no-sync",
"--project",
str(output_dir),
],
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
logger.error(f"Failed to add requirements: {e.stderr}")
raise RuntimeError(f"Failed to add requirements: {e.stderr}") from e
# Add editable packages if specified
if self.editable:
editable_paths = [str(Path(e).resolve()) for e in self.editable]
logger.debug(f"Adding editable packages: {', '.join(editable_paths)}")
try:
subprocess.run(
[
"uv",
"add",
"--editable",
*editable_paths,
"--no-sync",
"--project",
str(output_dir),
],
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
logger.error(f"Failed to add editable packages: {e.stderr}")
raise RuntimeError(
f"Failed to add editable packages: {e.stderr}"
) from e
# Final sync to install everything
logger.info("Installing dependencies...")
try:
subprocess.run(
["uv", "sync", "--project", str(output_dir)],
check=True,
capture_output=True,
text=True,
)
except subprocess.CalledProcessError as e:
logger.error(f"Failed to sync dependencies: {e.stderr}")
raise RuntimeError(f"Failed to sync dependencies: {e.stderr}") from e
logger.info(f"Environment prepared successfully in {output_dir}")
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/utilities/mcp_server_config/v1/environments/uv.py",
"license": "Apache License 2.0",
"lines": 234,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/utilities/auth.py | """Authentication utility helpers."""
from __future__ import annotations
import base64
import json
from typing import Any
def _decode_jwt_part(token: str, part_index: int) -> dict[str, Any]:
"""Decode a JWT part (header or payload) without signature verification.
Args:
token: JWT token string (header.payload.signature)
part_index: 0 for header, 1 for payload
Returns:
Decoded part as a dictionary
Raises:
ValueError: If token is not a valid JWT format
"""
parts = token.split(".")
if len(parts) != 3:
raise ValueError("Invalid JWT format (expected 3 parts)")
part_b64 = parts[part_index]
part_b64 += "=" * (-len(part_b64) % 4) # Add padding
return json.loads(base64.urlsafe_b64decode(part_b64))
def decode_jwt_header(token: str) -> dict[str, Any]:
"""Decode JWT header without signature verification.
Useful for extracting the key ID (kid) for JWKS lookup.
Args:
token: JWT token string (header.payload.signature)
Returns:
Decoded header as a dictionary
Raises:
ValueError: If token is not a valid JWT format
"""
return _decode_jwt_part(token, 0)
def decode_jwt_payload(token: str) -> dict[str, Any]:
"""Decode JWT payload without signature verification.
Use only for tokens received directly from trusted sources (e.g., IdP token endpoints).
Args:
token: JWT token string (header.payload.signature)
Returns:
Decoded payload as a dictionary
Raises:
ValueError: If token is not a valid JWT format
"""
return _decode_jwt_part(token, 1)
def parse_scopes(value: Any) -> list[str] | None:
"""Parse scopes from environment variables or settings values.
Accepts either a JSON array string, a comma- or space-separated string,
a list of strings, or ``None``. Returns a list of scopes or ``None`` if
no value is provided.
"""
if value is None or value == "":
return None if value is None else []
if isinstance(value, list):
return [str(v).strip() for v in value if str(v).strip()]
if isinstance(value, str):
value = value.strip()
if not value:
return []
# Try JSON array first
if value.startswith("["):
try:
data = json.loads(value)
if isinstance(data, list):
return [str(v).strip() for v in data if str(v).strip()]
except Exception:
pass
# Fallback to comma/space separated list
return [s.strip() for s in value.replace(",", " ").split() if s.strip()]
return value
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/utilities/auth.py",
"license": "Apache License 2.0",
"lines": 68,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/cli/test_project_prepare.py | """Tests for the fastmcp project prepare command."""
import subprocess
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from fastmcp.utilities.mcp_server_config import MCPServerConfig
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from fastmcp.utilities.mcp_server_config.v1.sources.filesystem import FileSystemSource
class TestMCPServerConfigPrepare:
"""Test the MCPServerConfig.prepare() method."""
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_source",
new_callable=AsyncMock,
)
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_environment",
new_callable=AsyncMock,
)
async def test_prepare_calls_both_methods(self, mock_env, mock_src):
"""Test that prepare() calls both prepare_environment and prepare_source."""
config = MCPServerConfig(
source=FileSystemSource(path="server.py"),
environment=UVEnvironment(python="3.10"),
)
await config.prepare()
mock_env.assert_called_once()
mock_src.assert_called_once()
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_source",
new_callable=AsyncMock,
)
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_environment",
new_callable=AsyncMock,
)
async def test_prepare_with_output_dir(self, mock_env, mock_src):
"""Test that prepare() with output_dir calls prepare_environment with it."""
config = MCPServerConfig(
source=FileSystemSource(path="server.py"),
environment=UVEnvironment(python="3.10"),
)
output_path = Path("/tmp/test-env")
await config.prepare(skip_source=False, output_dir=output_path)
mock_env.assert_called_once_with(output_dir=output_path)
mock_src.assert_called_once()
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_source",
new_callable=AsyncMock,
)
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_environment",
new_callable=AsyncMock,
)
async def test_prepare_skip_source(self, mock_env, mock_src):
"""Test that prepare() skips source when skip_source=True."""
config = MCPServerConfig(
source=FileSystemSource(path="server.py"),
environment=UVEnvironment(python="3.10"),
)
await config.prepare(skip_source=True)
mock_env.assert_called_once_with(output_dir=None)
mock_src.assert_not_called()
@patch(
"fastmcp.utilities.mcp_server_config.v1.mcp_server_config.MCPServerConfig.prepare_source",
new_callable=AsyncMock,
)
@patch(
"fastmcp.utilities.mcp_server_config.v1.environments.uv.UVEnvironment.prepare",
new_callable=AsyncMock,
)
async def test_prepare_no_environment_settings(self, mock_env_prepare, mock_src):
"""Test that prepare() works with default empty environment config."""
config = MCPServerConfig(
source=FileSystemSource(path="server.py"),
# environment defaults to empty Environment()
)
await config.prepare(skip_source=False)
# Environment prepare should be called even with empty config
mock_env_prepare.assert_called_once_with(output_dir=None)
mock_src.assert_called_once()
class TestEnvironmentPrepare:
"""Test the Environment.prepare() method."""
@patch("shutil.which")
async def test_prepare_no_uv_installed(self, mock_which, tmp_path):
"""Test that prepare() raises error when uv is not installed."""
mock_which.return_value = None
env = UVEnvironment(python="3.10")
with pytest.raises(RuntimeError, match="uv is not installed"):
await env.prepare(tmp_path / "test-env")
@patch("subprocess.run")
@patch("shutil.which")
async def test_prepare_no_settings(self, mock_which, mock_run, tmp_path):
"""Test that prepare() does nothing when no settings are configured."""
mock_which.return_value = "/usr/bin/uv"
env = UVEnvironment() # No settings
await env.prepare(tmp_path / "test-env")
# Should not run any commands
mock_run.assert_not_called()
@patch("subprocess.run")
@patch("shutil.which")
async def test_prepare_with_python(self, mock_which, mock_run, tmp_path):
"""Test that prepare() runs uv with python version."""
mock_which.return_value = "/usr/bin/uv"
mock_run.return_value = MagicMock(
returncode=0, stdout="Environment cached", stderr=""
)
env = UVEnvironment(python="3.10")
await env.prepare(tmp_path / "test-env")
# Should run multiple uv commands for initializing the project
assert mock_run.call_count > 0
# Check the first call should be uv init
first_call_args = mock_run.call_args_list[0][0][0]
assert first_call_args[0] == "uv"
assert "init" in first_call_args
@patch("subprocess.run")
@patch("shutil.which")
async def test_prepare_with_dependencies(self, mock_which, mock_run, tmp_path):
"""Test that prepare() includes dependencies."""
mock_which.return_value = "/usr/bin/uv"
mock_run.return_value = MagicMock(returncode=0, stdout="", stderr="")
env = UVEnvironment(dependencies=["numpy", "pandas"])
await env.prepare(tmp_path / "test-env")
# Should run multiple uv commands, one of which should be uv add
assert mock_run.call_count > 0
# Find the add command call
add_call = None
for call_args, _ in mock_run.call_args_list:
args = call_args[0]
if "add" in args:
add_call = args
break
assert add_call is not None, "Should have called uv add"
assert "numpy" in add_call
assert "pandas" in add_call
assert "fastmcp" in add_call # Always added
@patch("subprocess.run")
@patch("shutil.which")
async def test_prepare_command_fails(self, mock_which, mock_run, tmp_path):
"""Test that prepare() raises error when uv command fails."""
mock_which.return_value = "/usr/bin/uv"
mock_run.side_effect = subprocess.CalledProcessError(
1, ["uv"], stderr="Package not found"
)
env = UVEnvironment(python="3.10")
with pytest.raises(RuntimeError, match="Failed to initialize project"):
await env.prepare(tmp_path / "test-env")
class TestProjectPrepareCommand:
"""Test the CLI project prepare command."""
@patch("fastmcp.utilities.mcp_server_config.MCPServerConfig.from_file")
@patch("fastmcp.utilities.mcp_server_config.MCPServerConfig.find_config")
async def test_project_prepare_auto_detect(self, mock_find, mock_from_file):
"""Test project prepare with auto-detected config."""
from fastmcp.cli.cli import prepare
# Setup mocks
mock_find.return_value = Path("fastmcp.json")
mock_config = AsyncMock()
mock_from_file.return_value = mock_config
# Run command with output_dir
with patch("sys.exit"):
with patch("fastmcp.cli.cli.console.print") as mock_print:
await prepare(config_path=None, output_dir="./test-env")
# Should find and load config
mock_find.assert_called_once()
mock_from_file.assert_called_once_with(Path("fastmcp.json"))
# Should call prepare with output_dir
mock_config.prepare.assert_called_once_with(
skip_source=False,
output_dir=Path("./test-env"),
)
# Should print success message
mock_print.assert_called()
success_call = mock_print.call_args_list[-1][0][0]
assert "Project prepared successfully" in success_call
@patch("pathlib.Path.exists")
@patch("fastmcp.utilities.mcp_server_config.MCPServerConfig.from_file")
async def test_project_prepare_explicit_path(self, mock_from_file, mock_exists):
"""Test project prepare with explicit config path."""
from fastmcp.cli.cli import prepare
# Setup mocks
mock_exists.return_value = True
mock_config = AsyncMock()
mock_from_file.return_value = mock_config
# Run command with explicit path
with patch("fastmcp.cli.cli.console.print"):
await prepare(config_path="myconfig.json", output_dir="./test-env")
# Should load specified config
mock_from_file.assert_called_once_with(Path("myconfig.json"))
# Should call prepare
mock_config.prepare.assert_called_once_with(
skip_source=False,
output_dir=Path("./test-env"),
)
@patch("fastmcp.utilities.mcp_server_config.MCPServerConfig.find_config")
async def test_project_prepare_no_config_found(self, mock_find):
"""Test project prepare when no config is found."""
from fastmcp.cli.cli import prepare
# Setup mocks
mock_find.return_value = None
# Run command without output_dir - should exit with error for missing output_dir
with pytest.raises(SystemExit) as exc_info:
with patch("fastmcp.cli.cli.logger.error") as mock_error:
await prepare(config_path=None, output_dir=None)
assert isinstance(exc_info.value, SystemExit)
assert exc_info.value.code == 1
mock_error.assert_called()
error_msg = mock_error.call_args[0][0]
assert "--output-dir parameter is required" in error_msg
@patch("pathlib.Path.exists")
async def test_project_prepare_config_not_exists(self, mock_exists):
"""Test project prepare when specified config doesn't exist."""
from fastmcp.cli.cli import prepare
# Setup mocks
mock_exists.return_value = False
# Run command without output_dir - should exit with error for missing output_dir
with pytest.raises(SystemExit) as exc_info:
with patch("fastmcp.cli.cli.logger.error") as mock_error:
await prepare(config_path="missing.json", output_dir=None)
assert isinstance(exc_info.value, SystemExit)
assert exc_info.value.code == 1
mock_error.assert_called()
error_msg = mock_error.call_args[0][0]
assert "--output-dir parameter is required" in error_msg
@patch("pathlib.Path.exists")
@patch("fastmcp.utilities.mcp_server_config.MCPServerConfig.from_file")
async def test_project_prepare_failure(self, mock_from_file, mock_exists):
"""Test project prepare when prepare() fails."""
from fastmcp.cli.cli import prepare
# Setup mocks
mock_exists.return_value = True
mock_config = AsyncMock()
mock_config.prepare.side_effect = RuntimeError("Preparation failed")
mock_from_file.return_value = mock_config
# Run command - should exit with error
with pytest.raises(SystemExit) as exc_info:
with patch("fastmcp.cli.cli.console.print") as mock_print:
await prepare(config_path="config.json", output_dir="./test-env")
assert isinstance(exc_info.value, SystemExit)
assert exc_info.value.code == 1
# Should print error message
error_call = mock_print.call_args_list[-1][0][0]
assert "Failed to prepare project" in error_call
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_project_prepare.py",
"license": "Apache License 2.0",
"lines": 241,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/redirect_validation.py | """Utilities for validating client redirect URIs in OAuth flows.
This module provides secure redirect URI validation with wildcard support,
protecting against userinfo-based bypass attacks like http://localhost@evil.com.
"""
import fnmatch
from urllib.parse import urlparse
from pydantic import AnyUrl
def _parse_host_port(netloc: str) -> tuple[str | None, str | None]:
"""Parse host and port from netloc, handling wildcards.
Args:
netloc: The netloc component (e.g., "localhost:8080" or "localhost:*")
Returns:
Tuple of (host, port_str) where port_str may be "*" or a number string
"""
# Handle userinfo (remove it for parsing, but we check separately)
if "@" in netloc:
netloc = netloc.split("@")[-1]
# Handle IPv6 addresses [::1]:port
if netloc.startswith("["):
bracket_end = netloc.find("]")
if bracket_end == -1:
return netloc, None
host = netloc[1:bracket_end]
rest = netloc[bracket_end + 1 :]
if rest.startswith(":"):
return host, rest[1:]
return host, None
# Handle regular host:port
if ":" in netloc:
host, port = netloc.rsplit(":", 1)
return host, port
return netloc, None
def _match_host(uri_host: str | None, pattern_host: str | None) -> bool:
"""Match host component, supporting *.example.com wildcard patterns.
Args:
uri_host: The host from the URI being validated
pattern_host: The host pattern (may start with *.)
Returns:
True if the host matches
"""
if not uri_host or not pattern_host:
return uri_host == pattern_host
# Normalize to lowercase for comparison
uri_host = uri_host.lower()
pattern_host = pattern_host.lower()
# Handle *.example.com wildcard subdomain patterns
if pattern_host.startswith("*."):
suffix = pattern_host[1:] # .example.com
# Only match actual subdomains (foo.example.com), NOT the base domain
return uri_host.endswith(suffix) and uri_host != pattern_host[2:]
return uri_host == pattern_host
def _match_port(
uri_port: str | None,
pattern_port: str | None,
uri_scheme: str,
) -> bool:
"""Match port component, supporting * wildcard for any port.
Args:
uri_port: The port from the URI (None if default, string otherwise)
pattern_port: The port from the pattern (None if default, "*" for wildcard)
uri_scheme: The URI scheme (http/https) for default port handling
Returns:
True if the port matches
"""
# Wildcard matches any port
if pattern_port == "*":
return True
# Normalize None to default ports
default_port = "443" if uri_scheme == "https" else "80"
uri_effective = uri_port if uri_port else default_port
pattern_effective = pattern_port if pattern_port else default_port
return uri_effective == pattern_effective
def _match_path(uri_path: str, pattern_path: str) -> bool:
"""Match path component using fnmatch for wildcard support.
Args:
uri_path: The path from the URI
pattern_path: The path pattern (may contain * wildcards)
Returns:
True if the path matches
"""
# Normalize empty paths to /
uri_path = uri_path or "/"
pattern_path = pattern_path or "/"
# Empty or root pattern path matches any path
# This makes http://localhost:* match http://localhost:3000/callback
if pattern_path == "/":
return True
# Use fnmatch for path wildcards (e.g., /auth/*)
return fnmatch.fnmatch(uri_path, pattern_path)
def matches_allowed_pattern(uri: str, pattern: str) -> bool:
"""Securely check if a URI matches an allowed pattern with wildcard support.
This function parses both the URI and pattern as URLs, comparing each
component separately to prevent bypass attacks like userinfo injection.
Patterns support wildcards:
- http://localhost:* matches any localhost port
- http://127.0.0.1:* matches any 127.0.0.1 port
- https://*.example.com/* matches any subdomain of example.com
- https://app.example.com/auth/* matches any path under /auth/
Security: Rejects URIs with userinfo (user:pass@host) which could bypass
naive string matching (e.g., http://localhost@evil.com).
Args:
uri: The redirect URI to validate
pattern: The allowed pattern (may contain wildcards)
Returns:
True if the URI matches the pattern
"""
try:
uri_parsed = urlparse(uri)
pattern_parsed = urlparse(pattern)
except ValueError:
return False
# SECURITY: Reject URIs with userinfo (user:pass@host)
# This prevents bypass attacks like http://localhost@evil.com/callback
# which would match http://localhost:* with naive fnmatch
if uri_parsed.username is not None or uri_parsed.password is not None:
return False
# Scheme must match exactly
if uri_parsed.scheme.lower() != pattern_parsed.scheme.lower():
return False
# Parse host and port manually to handle wildcards
uri_host, uri_port = _parse_host_port(uri_parsed.netloc)
pattern_host, pattern_port = _parse_host_port(pattern_parsed.netloc)
# Host must match (with subdomain wildcard support)
if not _match_host(uri_host, pattern_host):
return False
# Port must match (with * wildcard support)
if not _match_port(uri_port, pattern_port, uri_parsed.scheme.lower()):
return False
# Path must match (with fnmatch wildcards)
return _match_path(uri_parsed.path, pattern_parsed.path)
def validate_redirect_uri(
redirect_uri: str | AnyUrl | None,
allowed_patterns: list[str] | None,
) -> bool:
"""Validate a redirect URI against allowed patterns.
Args:
redirect_uri: The redirect URI to validate
allowed_patterns: List of allowed patterns. If None, all URIs are allowed (for DCR compatibility).
If empty list, no URIs are allowed.
To restrict to localhost only, explicitly pass DEFAULT_LOCALHOST_PATTERNS.
Returns:
True if the redirect URI is allowed
"""
if redirect_uri is None:
return True # None is allowed (will use client's default)
uri_str = str(redirect_uri)
# If no patterns specified, allow all for DCR compatibility
# (clients need to dynamically register with their own redirect URIs)
if allowed_patterns is None:
return True
# Check if URI matches any allowed pattern
for pattern in allowed_patterns:
if matches_allowed_pattern(uri_str, pattern):
return True
return False
# Default patterns for localhost-only validation
DEFAULT_LOCALHOST_PATTERNS = [
"http://localhost:*",
"http://127.0.0.1:*",
]
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/redirect_validation.py",
"license": "Apache License 2.0",
"lines": 160,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/cli/test_server_args.py | """Test server argument passing functionality."""
from pathlib import Path
import pytest
from fastmcp.utilities.mcp_server_config import MCPServerConfig
from fastmcp.utilities.mcp_server_config.v1.sources.filesystem import FileSystemSource
class TestServerArguments:
"""Test passing arguments to servers."""
async def test_server_with_argparse(self, tmp_path):
"""Test a server that uses argparse with command line arguments."""
server_file = tmp_path / "argparse_server.py"
server_file.write_text("""
import argparse
from fastmcp import FastMCP
parser = argparse.ArgumentParser()
parser.add_argument("--name", default="DefaultServer")
parser.add_argument("--port", type=int, default=8000)
parser.add_argument("--debug", action="store_true")
args = parser.parse_args()
server_name = f"{args.name}:{args.port}"
if args.debug:
server_name += " (Debug)"
mcp = FastMCP(server_name)
@mcp.tool
def get_config() -> dict:
return {"name": args.name, "port": args.port, "debug": args.debug}
""")
# Test with arguments
source = FileSystemSource(path=str(server_file))
config = MCPServerConfig(source=source)
from fastmcp.cli.cli import with_argv
# Simulate passing arguments
with with_argv(["--name", "TestServer", "--port", "9000", "--debug"]):
server = await config.source.load_server()
assert server.name == "TestServer:9000 (Debug)"
# Test the tool works and can access the parsed args
tools = await server.list_tools()
assert any(t.name == "get_config" for t in tools)
async def test_server_with_no_args(self, tmp_path):
"""Test a server that uses argparse with no arguments (defaults)."""
server_file = tmp_path / "default_server.py"
server_file.write_text("""
import argparse
from fastmcp import FastMCP
parser = argparse.ArgumentParser()
parser.add_argument("--name", default="DefaultName")
args = parser.parse_args()
mcp = FastMCP(args.name)
""")
source = FileSystemSource(path=str(server_file))
config = MCPServerConfig(source=source)
from fastmcp.cli.cli import with_argv
# Test with empty args list (should use defaults)
with with_argv([]):
server = await config.source.load_server()
assert server.name == "DefaultName"
async def test_server_with_sys_argv_access(self, tmp_path):
"""Test a server that directly accesses sys.argv."""
server_file = tmp_path / "sysargv_server.py"
server_file.write_text("""
import sys
from fastmcp import FastMCP
# Direct sys.argv access (less common but should work)
name = "DirectServer"
if len(sys.argv) > 1 and sys.argv[1] == "--custom":
name = "CustomServer"
mcp = FastMCP(name)
""")
source = FileSystemSource(path=str(server_file))
config = MCPServerConfig(source=source)
from fastmcp.cli.cli import with_argv
# Test with custom argument
with with_argv(["--custom"]):
server = await config.source.load_server()
assert server.name == "CustomServer"
# Test without argument
with with_argv([]):
server = await config.source.load_server()
assert server.name == "DirectServer"
async def test_config_server_example(self):
"""Test the actual config_server.py example."""
# Find the examples directory
examples_dir = Path(__file__).parent.parent.parent / "examples"
config_server = examples_dir / "config_server.py"
if not config_server.exists():
pytest.skip("config_server.py example not found")
source = FileSystemSource(path=str(config_server))
config = MCPServerConfig(source=source)
from fastmcp.cli.cli import with_argv
# Test with debug flag
with with_argv(["--name", "TestExample", "--debug"]):
server = await config.source.load_server()
assert server.name == "TestExample (Debug)"
# Verify tools are available
tools = await server.list_tools()
assert any(t.name == "get_status" for t in tools)
assert any(t.name == "echo_message" for t in tools)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_server_args.py",
"license": "Apache License 2.0",
"lines": 96,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/cli/test_with_argv.py | """Test the with_argv context manager."""
import sys
from unittest.mock import patch
import pytest
from fastmcp.cli.cli import with_argv
class TestWithArgv:
"""Test the with_argv context manager."""
def test_with_argv_replaces_args(self):
"""Test that with_argv properly replaces sys.argv."""
original_argv = sys.argv[:]
test_args = ["--name", "TestServer", "--debug"]
with with_argv(test_args):
# Should preserve script name and add new args
assert sys.argv[0] == original_argv[0]
assert sys.argv[1:] == test_args
# Should restore original argv after context
assert sys.argv == original_argv
def test_with_argv_none_does_nothing(self):
"""Test that with_argv(None) doesn't change sys.argv."""
original_argv = sys.argv[:]
with with_argv(None):
assert sys.argv == original_argv
assert sys.argv == original_argv
def test_with_argv_empty_list(self):
"""Test that with_argv([]) clears arguments but keeps script name."""
original_argv = sys.argv[:]
with with_argv([]):
# Should have only the script name (no additional args)
assert sys.argv == [original_argv[0]]
assert len(sys.argv) == 1
assert sys.argv == original_argv
def test_with_argv_restores_on_exception(self):
"""Test that sys.argv is restored even if an exception occurs."""
original_argv = sys.argv[:]
test_args = ["--error"]
with pytest.raises(ValueError):
with with_argv(test_args):
assert sys.argv == [original_argv[0]] + test_args
raise ValueError("Test error")
# Should still restore original argv
assert sys.argv == original_argv
def test_with_argv_nested(self):
"""Test nested with_argv contexts."""
original_argv = sys.argv[:]
args1 = ["--level1"]
args2 = ["--level2", "--debug"]
with with_argv(args1):
assert sys.argv == [original_argv[0]] + args1
with with_argv(args2):
assert sys.argv == [original_argv[0]] + args2
# Should restore to level 1
assert sys.argv == [original_argv[0]] + args1
# Should restore to original
assert sys.argv == original_argv
@patch("sys.argv", ["test_script.py", "existing", "args"])
def test_with_argv_with_existing_args(self):
"""Test with_argv when sys.argv already has arguments."""
original_argv = sys.argv[:]
assert original_argv == ["test_script.py", "existing", "args"]
test_args = ["--new", "args"]
with with_argv(test_args):
# Should replace existing args but keep script name
assert sys.argv == ["test_script.py", "--new", "args"]
# Should restore original
assert sys.argv == original_argv
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_with_argv.py",
"license": "Apache License 2.0",
"lines": 65,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/test_oauth_proxy_redirect_validation.py | """Tests for OAuth proxy redirect URI validation."""
from unittest.mock import patch
import pytest
from key_value.aio.stores.memory import MemoryStore
from mcp.shared.auth import InvalidRedirectUriError
from pydantic import AnyHttpUrl, AnyUrl
from fastmcp.server.auth.auth import TokenVerifier
from fastmcp.server.auth.cimd import CIMDDocument
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.oauth_proxy.models import ProxyDCRClient
# Standard public IP used for DNS mocking in tests
TEST_PUBLIC_IP = "93.184.216.34"
class MockTokenVerifier(TokenVerifier):
"""Mock token verifier for testing."""
def __init__(self):
self.required_scopes = []
async def verify_token(self, token: str) -> dict | None: # type: ignore[override]
return {"sub": "test-user"}
class TestProxyDCRClient:
"""Test ProxyDCRClient redirect URI validation."""
def test_default_allows_all(self):
"""Test that default configuration allows all URIs for DCR compatibility."""
client = ProxyDCRClient(
client_id="test",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:3000")],
)
# All URIs should be allowed by default for DCR compatibility
assert client.validate_redirect_uri(AnyUrl("http://localhost:3000")) == AnyUrl(
"http://localhost:3000"
)
assert client.validate_redirect_uri(AnyUrl("http://localhost:8080")) == AnyUrl(
"http://localhost:8080"
)
assert client.validate_redirect_uri(AnyUrl("http://127.0.0.1:3000")) == AnyUrl(
"http://127.0.0.1:3000"
)
assert client.validate_redirect_uri(AnyUrl("http://example.com")) == AnyUrl(
"http://example.com"
)
assert client.validate_redirect_uri(
AnyUrl("https://claude.ai/api/mcp/auth_callback")
) == AnyUrl("https://claude.ai/api/mcp/auth_callback")
def test_custom_patterns(self):
"""Test custom redirect URI patterns."""
client = ProxyDCRClient(
client_id="test",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:3000")],
allowed_redirect_uri_patterns=[
"http://localhost:*",
"https://app.example.com/*",
],
)
# Allowed by patterns
assert client.validate_redirect_uri(AnyUrl("http://localhost:3000"))
assert client.validate_redirect_uri(AnyUrl("https://app.example.com/callback"))
# Not allowed by patterns - will fallback to base validation
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://127.0.0.1:3000"))
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(
AnyUrl("cursor://anysphere.cursor-mcp/oauth/callback")
)
def test_default_not_applied_when_custom_patterns_supplied(self):
"""Test that default validation is not applied when custom patterns are supplied."""
allowed_patterns = [
"cursor://anysphere.cursor-mcp/oauth/callback",
"https://app.example.com/*",
]
client = ProxyDCRClient(
client_id="test",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:3000")],
allowed_redirect_uri_patterns=allowed_patterns,
)
assert client.validate_redirect_uri(
AnyUrl("https://app.example.com/oauth/callback")
)
assert client.validate_redirect_uri(
AnyUrl("cursor://anysphere.cursor-mcp/oauth/callback")
)
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://localhost:3000"))
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://127.0.0.1:3000"))
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("https://example.com"))
def test_empty_list_allows_none(self):
"""Test that empty pattern list allows no URIs."""
client = ProxyDCRClient(
client_id="test",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:3000")],
allowed_redirect_uri_patterns=[],
)
# Nothing should be allowed (except the pre-registered redirect_uris via fallback)
# Pre-registered URI should work via fallback to base validation
assert client.validate_redirect_uri(AnyUrl("http://localhost:3000"))
# Non-registered URIs should be rejected
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://example.com"))
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("https://anywhere.com:9999/path"))
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://localhost:5000"))
def test_none_redirect_uri(self):
"""Test that None redirect URI uses default behavior."""
client = ProxyDCRClient(
client_id="test",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:3000")],
)
# None should use the first registered URI
result = client.validate_redirect_uri(None)
assert result == AnyUrl("http://localhost:3000")
def test_cimd_none_redirect_uri_single_exact(self):
"""CIMD clients may omit redirect_uri only when a single exact URI exists."""
cimd_doc = CIMDDocument(
client_id=AnyHttpUrl("https://example.com/client.json"),
redirect_uris=["http://localhost:3000/callback"],
)
client = ProxyDCRClient(
client_id="https://example.com/client.json",
client_secret=None,
redirect_uris=None,
cimd_document=cimd_doc,
)
result = client.validate_redirect_uri(None)
assert result == AnyUrl("http://localhost:3000/callback")
def test_cimd_none_redirect_uri_respects_proxy_patterns(self):
"""CIMD fallback redirect_uri must still satisfy proxy allowlist patterns."""
cimd_doc = CIMDDocument(
client_id=AnyHttpUrl("https://example.com/client.json"),
redirect_uris=["https://evil.com/callback"],
)
client = ProxyDCRClient(
client_id="https://example.com/client.json",
client_secret=None,
redirect_uris=None,
cimd_document=cimd_doc,
allowed_redirect_uri_patterns=["http://localhost:*"],
)
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(None)
def test_cimd_none_redirect_uri_wildcard_rejected(self):
"""CIMD clients must specify redirect_uri when only wildcard patterns exist."""
cimd_doc = CIMDDocument(
client_id=AnyHttpUrl("https://example.com/client.json"),
redirect_uris=["http://localhost:*/callback"],
)
client = ProxyDCRClient(
client_id="https://example.com/client.json",
client_secret=None,
redirect_uris=None,
cimd_document=cimd_doc,
)
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(None)
def test_cimd_empty_proxy_allowlist_rejects_redirect_uri(self):
"""An explicit empty proxy allowlist should reject all CIMD redirect URIs."""
cimd_doc = CIMDDocument(
client_id=AnyHttpUrl("https://example.com/client.json"),
redirect_uris=["http://localhost:3000/callback"],
)
client = ProxyDCRClient(
client_id="https://example.com/client.json",
client_secret=None,
redirect_uris=None,
cimd_document=cimd_doc,
allowed_redirect_uri_patterns=[],
)
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://localhost:3000/callback"))
class TestOAuthProxyRedirectValidation:
"""Test OAuth proxy with redirect URI validation."""
def test_proxy_default_allows_all(self):
"""Test that OAuth proxy defaults to allowing all URIs for DCR compatibility."""
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
# The proxy should store None for default (allow all)
assert proxy._allowed_client_redirect_uris is None
def test_proxy_custom_patterns(self):
"""Test OAuth proxy with custom redirect patterns."""
custom_patterns = ["http://localhost:*", "https://*.myapp.com/*"]
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
allowed_client_redirect_uris=custom_patterns,
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
assert proxy._allowed_client_redirect_uris == custom_patterns
def test_proxy_empty_list_validation(self):
"""Test OAuth proxy with empty list (allow none)."""
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
allowed_client_redirect_uris=[],
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
assert proxy._allowed_client_redirect_uris == []
async def test_proxy_register_client_uses_patterns(self):
"""Test that registered clients use the configured patterns."""
custom_patterns = ["https://app.example.com/*"]
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
allowed_client_redirect_uris=custom_patterns,
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
# Register a client
from mcp.shared.auth import OAuthClientInformationFull
client_info = OAuthClientInformationFull(
client_id="new-client",
client_secret="new-secret",
redirect_uris=[AnyUrl("https://app.example.com/callback")],
)
await proxy.register_client(client_info)
# Get the registered client
registered = await proxy.get_client(
"new-client"
) # Use the client ID we registered
assert isinstance(registered, ProxyDCRClient)
assert registered.allowed_redirect_uri_patterns == custom_patterns
async def test_proxy_unregistered_client_returns_none(self):
"""Test that unregistered clients return None."""
custom_patterns = ["http://localhost:*", "http://127.0.0.1:*"]
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
allowed_client_redirect_uris=custom_patterns,
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
# Get an unregistered client
client = await proxy.get_client("unknown-client")
assert client is None
class TestOAuthProxyCIMDClient:
"""Test that CIMD clients obtained via proxy carry their document and apply dual validation."""
@pytest.fixture
def mock_dns(self):
"""Mock DNS resolution to return test public IP."""
with patch(
"fastmcp.server.auth.ssrf.resolve_hostname",
return_value=[TEST_PUBLIC_IP],
):
yield
async def test_proxy_get_client_returns_cimd_client(self, httpx_mock, mock_dns):
"""CIMD client obtained via proxy's get_client has cimd_document attached."""
url = "https://example.com/client.json"
doc_data = {
"client_id": url,
"client_name": "CIMD App",
"redirect_uris": ["http://localhost:*/callback"],
"token_endpoint_auth_method": "none",
}
httpx_mock.add_response(
json=doc_data,
headers={"content-length": "200"},
)
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
client = await proxy.get_client(url)
assert isinstance(client, ProxyDCRClient)
assert client.cimd_document is not None
assert client.cimd_document.client_name == "CIMD App"
assert client.client_id == url
async def test_proxy_cimd_dual_redirect_validation(self, httpx_mock, mock_dns):
"""CIMD client from proxy enforces both CIMD redirect_uris and proxy patterns."""
url = "https://example.com/client.json"
doc_data = {
"client_id": url,
"client_name": "Dual Validation App",
"redirect_uris": [
"http://localhost:3000/callback",
"https://evil.com/callback",
],
"token_endpoint_auth_method": "none",
}
httpx_mock.add_response(
json=doc_data,
headers={"content-length": "200"},
)
proxy = OAuthProxy(
upstream_authorization_endpoint="https://auth.example.com/authorize",
upstream_token_endpoint="https://auth.example.com/token",
upstream_client_id="test-client",
upstream_client_secret="test-secret",
token_verifier=MockTokenVerifier(),
base_url="http://localhost:8000",
allowed_client_redirect_uris=["http://localhost:*"],
jwt_signing_key="test-secret",
client_storage=MemoryStore(),
)
client = await proxy.get_client(url)
assert client is not None
# In CIMD AND matches proxy pattern → accepted
assert client.validate_redirect_uri(AnyUrl("http://localhost:3000/callback"))
# In CIMD but NOT in proxy pattern → rejected
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("https://evil.com/callback"))
# NOT in CIMD but matches proxy pattern → rejected
with pytest.raises(InvalidRedirectUriError):
client.validate_redirect_uri(AnyUrl("http://localhost:9999/other"))
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_oauth_proxy_redirect_validation.py",
"license": "Apache License 2.0",
"lines": 339,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/test_redirect_validation.py | """Tests for redirect URI validation in OAuth flows."""
from pydantic import AnyUrl
from fastmcp.server.auth.redirect_validation import (
DEFAULT_LOCALHOST_PATTERNS,
matches_allowed_pattern,
validate_redirect_uri,
)
class TestMatchesAllowedPattern:
"""Test wildcard pattern matching for redirect URIs."""
def test_exact_match(self):
"""Test exact URI matching without wildcards."""
assert matches_allowed_pattern(
"http://localhost:3000/callback", "http://localhost:3000/callback"
)
assert not matches_allowed_pattern(
"http://localhost:3000/callback", "http://localhost:3001/callback"
)
def test_port_wildcard(self):
"""Test wildcard matching for ports."""
pattern = "http://localhost:*/callback"
assert matches_allowed_pattern("http://localhost:3000/callback", pattern)
assert matches_allowed_pattern("http://localhost:54321/callback", pattern)
assert not matches_allowed_pattern("http://example.com:3000/callback", pattern)
def test_path_wildcard(self):
"""Test wildcard matching for paths."""
pattern = "http://localhost:3000/*"
assert matches_allowed_pattern("http://localhost:3000/callback", pattern)
assert matches_allowed_pattern("http://localhost:3000/auth/callback", pattern)
assert not matches_allowed_pattern("http://localhost:3001/callback", pattern)
def test_subdomain_wildcard(self):
"""Test wildcard matching for subdomains."""
pattern = "https://*.example.com/callback"
assert matches_allowed_pattern("https://app.example.com/callback", pattern)
assert matches_allowed_pattern("https://api.example.com/callback", pattern)
assert not matches_allowed_pattern("https://example.com/callback", pattern)
assert not matches_allowed_pattern("http://app.example.com/callback", pattern)
def test_multiple_wildcards(self):
"""Test patterns with multiple wildcards."""
pattern = "https://*.example.com:*/auth/*"
assert matches_allowed_pattern(
"https://app.example.com:8080/auth/callback", pattern
)
assert matches_allowed_pattern(
"https://api.example.com:3000/auth/redirect", pattern
)
assert not matches_allowed_pattern(
"http://app.example.com:8080/auth/callback", pattern
)
class TestValidateRedirectUri:
"""Test redirect URI validation with pattern lists."""
def test_none_redirect_uri_allowed(self):
"""Test that None redirect URI is always allowed."""
assert validate_redirect_uri(None, None)
assert validate_redirect_uri(None, [])
assert validate_redirect_uri(None, ["http://localhost:*"])
def test_default_allows_all(self):
"""Test that None (default) allows all URIs for DCR compatibility."""
# All URIs should be allowed when None is provided (DCR compatibility)
assert validate_redirect_uri("http://localhost:3000", None)
assert validate_redirect_uri("http://127.0.0.1:8080", None)
assert validate_redirect_uri("http://example.com", None)
assert validate_redirect_uri("https://app.example.com", None)
assert validate_redirect_uri("https://claude.ai/api/mcp/auth_callback", None)
def test_empty_list_allows_none(self):
"""Test that empty list allows no redirect URIs."""
assert not validate_redirect_uri("http://localhost:3000", [])
assert not validate_redirect_uri("http://example.com", [])
assert not validate_redirect_uri("https://anywhere.com:9999/path", [])
def test_custom_patterns(self):
"""Test validation with custom pattern list."""
patterns = [
"http://localhost:*",
"https://app.example.com/*",
"https://*.trusted.io/*",
]
# Allowed URIs
assert validate_redirect_uri("http://localhost:3000", patterns)
assert validate_redirect_uri("https://app.example.com/callback", patterns)
assert validate_redirect_uri("https://api.trusted.io/auth", patterns)
# Rejected URIs
assert not validate_redirect_uri("http://127.0.0.1:3000", patterns)
assert not validate_redirect_uri("https://other.example.com/callback", patterns)
assert not validate_redirect_uri("http://app.example.com/callback", patterns)
def test_anyurl_conversion(self):
"""Test that AnyUrl objects are properly converted to strings."""
patterns = ["http://localhost:*"]
uri = AnyUrl("http://localhost:3000/callback")
assert validate_redirect_uri(uri, patterns)
uri = AnyUrl("http://example.com/callback")
assert not validate_redirect_uri(uri, patterns)
class TestSecurityBypass:
"""Test protection against redirect URI security bypass attacks."""
def test_userinfo_bypass_blocked(self):
"""Test that userinfo-style bypasses are blocked.
Attack: http://localhost@evil.com/callback would match http://localhost:*
with naive string matching, but actually points to evil.com.
"""
pattern = "http://localhost:*"
# These should be blocked - the "host" is actually in the userinfo
assert not matches_allowed_pattern(
"http://localhost@evil.com/callback", pattern
)
assert not matches_allowed_pattern(
"http://localhost:3000@malicious.io/callback", pattern
)
assert not matches_allowed_pattern(
"http://user:pass@localhost:3000/callback", pattern
)
def test_userinfo_bypass_with_subdomain_pattern(self):
"""Test userinfo bypass with subdomain wildcard patterns."""
pattern = "https://*.example.com/callback"
# Blocked: userinfo tricks
assert not matches_allowed_pattern(
"https://app.example.com@attacker.com/callback", pattern
)
assert not matches_allowed_pattern(
"https://user:pass@app.example.com/callback", pattern
)
def test_legitimate_uris_still_work(self):
"""Test that legitimate URIs work after security hardening."""
pattern = "http://localhost:*"
assert matches_allowed_pattern("http://localhost:3000/callback", pattern)
assert matches_allowed_pattern("http://localhost:8080/auth", pattern)
pattern = "https://*.example.com/callback"
assert matches_allowed_pattern("https://app.example.com/callback", pattern)
def test_scheme_mismatch_blocked(self):
"""Test that scheme mismatches are blocked."""
assert not matches_allowed_pattern(
"http://localhost:3000/callback", "https://localhost:*"
)
assert not matches_allowed_pattern(
"https://localhost:3000/callback", "http://localhost:*"
)
def test_host_mismatch_blocked(self):
"""Test that host mismatches are blocked even with wildcards."""
pattern = "http://localhost:*"
assert not matches_allowed_pattern("http://127.0.0.1:3000/callback", pattern)
assert not matches_allowed_pattern("http://example.com:3000/callback", pattern)
class TestDefaultPatterns:
"""Test the default localhost patterns constant."""
def test_default_patterns_exist(self):
"""Test that default patterns are defined."""
assert DEFAULT_LOCALHOST_PATTERNS is not None
assert len(DEFAULT_LOCALHOST_PATTERNS) > 0
def test_default_patterns_include_localhost(self):
"""Test that default patterns include localhost variations."""
assert "http://localhost:*" in DEFAULT_LOCALHOST_PATTERNS
assert "http://127.0.0.1:*" in DEFAULT_LOCALHOST_PATTERNS
def test_explicit_localhost_patterns(self):
"""Test that explicitly passing DEFAULT_LOCALHOST_PATTERNS restricts to localhost."""
# Localhost patterns should be allowed
assert validate_redirect_uri(
"http://localhost:3000", DEFAULT_LOCALHOST_PATTERNS
)
assert validate_redirect_uri(
"http://127.0.0.1:8080", DEFAULT_LOCALHOST_PATTERNS
)
# Non-localhost should be rejected
assert not validate_redirect_uri(
"http://example.com", DEFAULT_LOCALHOST_PATTERNS
)
assert not validate_redirect_uri(
"https://claude.ai/api/mcp/auth_callback", DEFAULT_LOCALHOST_PATTERNS
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_redirect_validation.py",
"license": "Apache License 2.0",
"lines": 164,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/utilities/test_cli.py | """Tests for CLI utility functions."""
from pathlib import Path
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
class TestEnvironmentBuildUVRunCommand:
"""Test the Environment.build_uv_run_command() method."""
def test_build_uv_run_command_basic(self):
"""Test building basic uv command with no environment config."""
env = UVEnvironment()
cmd = env.build_command(["fastmcp", "run", "server.py"])
# With no config, the command should be returned unchanged
expected = ["fastmcp", "run", "server.py"]
assert cmd == expected
def test_build_uv_run_command_with_editable(self):
"""Test building uv command with editable package."""
editable_path = Path("/path/to/package")
env = UVEnvironment(editable=[editable_path])
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--with-editable",
str(editable_path.resolve()),
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
def test_build_uv_run_command_with_packages(self):
"""Test building uv command with additional packages."""
env = UVEnvironment(dependencies=["pkg1", "pkg2"])
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--with",
"pkg1",
"--with",
"pkg2",
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
def test_build_uv_run_command_with_python_version(self):
"""Test building uv command with Python version."""
env = UVEnvironment(python="3.10")
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--python",
"3.10",
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
def test_build_uv_run_command_with_requirements(self):
"""Test building uv command with requirements file."""
requirements_path = Path("/path/to/requirements.txt")
env = UVEnvironment(requirements=requirements_path)
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--with-requirements",
str(requirements_path.resolve()),
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
def test_build_uv_run_command_with_project(self):
"""Test building uv command with project directory."""
project_path = Path("/path/to/project")
env = UVEnvironment(project=project_path)
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--project",
str(project_path.resolve()),
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
def test_build_uv_run_command_with_everything(self):
"""Test building uv command with all options."""
requirements_path = Path("/path/to/requirements.txt")
editable_path = Path("/local/pkg")
env = UVEnvironment(
python="3.10",
dependencies=["pandas", "numpy"],
requirements=requirements_path,
editable=[editable_path],
)
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--python",
"3.10",
"--with",
"numpy",
"--with",
"pandas",
"--with-requirements",
str(requirements_path.resolve()),
"--with-editable",
str(editable_path.resolve()),
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
# Note: These tests are removed because build_uv_run_command now requires a command
# and only accepts a list, not optional or string commands
def test_build_uv_run_command_project_with_extras(self):
"""Test that project flag works with additional dependencies."""
project_path = Path("/path/to/project")
editable_path = Path("/pkg")
env = UVEnvironment(
project=project_path,
python="3.10", # Should be ignored with project
dependencies=["pandas"], # Should be added on top of project
editable=[editable_path], # Should be added on top of project
)
cmd = env.build_command(["fastmcp", "run", "server.py"])
expected = [
"uv",
"run",
"--project",
str(project_path.resolve()),
"--with",
"pandas",
"--with-editable",
str(editable_path.resolve()),
"fastmcp",
"run",
"server.py",
]
assert cmd == expected
class TestEnvironmentNeedsUV:
"""Test the Environment.needs_uv() method."""
def test_needs_uv_with_python(self):
"""Test that needs_uv returns True with Python version."""
env = UVEnvironment(python="3.10")
assert env._must_run_with_uv() is True
def test_needs_uv_with_dependencies(self):
"""Test that needs_uv returns True with dependencies."""
env = UVEnvironment(dependencies=["pandas"])
assert env._must_run_with_uv() is True
def test_needs_uv_with_requirements(self):
"""Test that needs_uv returns True with requirements."""
env = UVEnvironment(requirements=Path("/path/to/requirements.txt"))
assert env._must_run_with_uv() is True
def test_needs_uv_with_project(self):
"""Test that needs_uv returns True with project."""
env = UVEnvironment(project=Path("/path/to/project"))
assert env._must_run_with_uv() is True
def test_needs_uv_with_editable(self):
"""Test that needs_uv returns True with editable."""
env = UVEnvironment(editable=[Path("/pkg")])
assert env._must_run_with_uv() is True
def test_needs_uv_empty(self):
"""Test that needs_uv returns False with empty config."""
env = UVEnvironment()
assert env._must_run_with_uv() is False
def test_needs_uv_with_empty_lists(self):
"""Test that needs_uv returns False with empty lists."""
env = UVEnvironment(dependencies=None, editable=None)
assert env._must_run_with_uv() is False
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/utilities/test_cli.py",
"license": "Apache License 2.0",
"lines": 173,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:examples/auth/azure_oauth/client.py | """OAuth client example for connecting to FastMCP servers.
This example demonstrates how to connect to an OAuth-protected FastMCP server.
To run:
python client.py
"""
import asyncio
from fastmcp.client import Client
SERVER_URL = "http://127.0.0.1:8000/mcp"
async def main():
try:
async with Client(SERVER_URL, auth="oauth") as client:
assert await client.ping()
print("✅ Successfully authenticated!")
tools = await client.list_tools()
print(f"🔧 Available tools ({len(tools)}):")
for tool in tools:
print(f" - {tool.name}: {tool.description}")
except Exception as e:
print(f"❌ Authentication failed: {e}")
raise
if __name__ == "__main__":
asyncio.run(main())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/azure_oauth/client.py",
"license": "Apache License 2.0",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/azure_oauth/server.py | """Azure (Microsoft Entra) OAuth server example for FastMCP.
This example demonstrates how to protect a FastMCP server with Azure/Microsoft OAuth.
Required environment variables:
- AZURE_CLIENT_ID: Your Azure application (client) ID
- AZURE_CLIENT_SECRET: Your Azure client secret
- AZURE_TENANT_ID: Tenant ID
Options: "organizations" (work/school), "consumers" (personal), or specific tenant ID
- AZURE_REQUIRED_SCOPES: At least one scope required (e.g., "read" or "read,write")
These must match scope names created under "Expose an API" in your Azure App registration
To run:
python server.py
"""
import os
from fastmcp import FastMCP
from fastmcp.server.auth.providers.azure import AzureProvider
auth = AzureProvider(
client_id=os.getenv("FASTMCP_SERVER_AUTH_AZURE_CLIENT_ID") or "",
client_secret=os.getenv("FASTMCP_SERVER_AUTH_AZURE_CLIENT_SECRET") or "",
tenant_id=os.getenv("FASTMCP_SERVER_AUTH_AZURE_TENANT_ID")
or "", # Required for single-tenant apps - get from Azure Portal
base_url="http://localhost:8000",
required_scopes=["read"],
# required_scopes is automatically loaded from FASTMCP_SERVER_AUTH_AZURE_REQUIRED_SCOPES
# At least one scope is required - use unprefixed scope names from your Azure App (e.g., ["read", "write"])
# redirect_path="/auth/callback", # Default path - change if using a different callback URL
)
mcp = FastMCP("Azure OAuth Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/azure_oauth/server.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/workos_oauth/server.py | """WorkOS OAuth server example for FastMCP.
This example demonstrates how to protect a FastMCP server with WorkOS OAuth.
Required environment variables:
- WORKOS_CLIENT_ID: Your WorkOS Connect application client ID
- WORKOS_CLIENT_SECRET: Your WorkOS Connect application client secret
- WORKOS_AUTHKIT_DOMAIN: Your AuthKit domain (e.g., "https://your-app.authkit.app")
To run:
python server.py
"""
import os
from fastmcp import FastMCP
from fastmcp.server.auth.providers.workos import WorkOSProvider
auth = WorkOSProvider(
client_id=os.getenv("WORKOS_CLIENT_ID") or "",
client_secret=os.getenv("WORKOS_CLIENT_SECRET") or "",
authkit_domain=os.getenv("WORKOS_AUTHKIT_DOMAIN") or "https://your-app.authkit.app",
base_url="http://localhost:8000",
# redirect_path="/auth/callback", # Default path - change if using a different callback URL
)
mcp = FastMCP("WorkOS OAuth Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/workos_oauth/server.py",
"license": "Apache License 2.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/azure.py | """Azure (Microsoft Entra) OAuth provider for FastMCP.
This provider implements Azure/Microsoft Entra ID OAuth authentication
using the OAuth Proxy pattern for non-DCR OAuth flows.
"""
from __future__ import annotations
import hashlib
from collections import OrderedDict
from typing import TYPE_CHECKING, Any, cast
import httpx
from key_value.aio.protocols import AsyncKeyValue
from fastmcp.dependencies import Dependency
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.auth import decode_jwt_payload, parse_scopes
from fastmcp.utilities.logging import get_logger
if TYPE_CHECKING:
from azure.identity.aio import OnBehalfOfCredential
from mcp.server.auth.provider import AuthorizationParams
from mcp.shared.auth import OAuthClientInformationFull
logger = get_logger(__name__)
# Standard OIDC scopes that should never be prefixed with identifier_uri.
# Per Microsoft docs: https://learn.microsoft.com/en-us/entra/identity-platform/scopes-oidc
# "OIDC scopes are requested as simple string identifiers without resource prefixes"
OIDC_SCOPES = frozenset({"openid", "profile", "email", "offline_access"})
class AzureProvider(OAuthProxy):
"""Azure (Microsoft Entra) OAuth provider for FastMCP.
This provider implements Azure/Microsoft Entra ID authentication using the
OAuth Proxy pattern. It supports both organizational accounts and personal
Microsoft accounts depending on the tenant configuration.
Scope Handling:
- required_scopes: Provide unprefixed scope names (e.g., ["read", "write"])
→ Automatically prefixed with identifier_uri during initialization
→ Validated on all tokens and advertised to MCP clients
- additional_authorize_scopes: Provide full format (e.g., ["User.Read"])
→ NOT prefixed, NOT validated, NOT advertised to clients
→ Used to request Microsoft Graph or other upstream API permissions
Features:
- OAuth proxy to Azure/Microsoft identity platform
- JWT validation using tenant issuer and JWKS
- Supports tenant configurations: specific tenant ID, "organizations", or "consumers"
- Custom API scopes and Microsoft Graph scopes in a single provider
Setup:
1. Create an App registration in Azure Portal
2. Configure Web platform redirect URI: http://localhost:8000/auth/callback (or your custom path)
3. Add an Application ID URI under "Expose an API" (defaults to api://{client_id})
4. Add custom scopes (e.g., "read", "write") under "Expose an API"
5. Set access token version to 2 in the App manifest: "requestedAccessTokenVersion": 2
6. Create a client secret
7. Get Application (client) ID, Directory (tenant) ID, and client secret
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.azure import AzureProvider
# Standard Azure (Public Cloud)
auth = AzureProvider(
client_id="your-client-id",
client_secret="your-client-secret",
tenant_id="your-tenant-id",
required_scopes=["read", "write"], # Unprefixed scope names
additional_authorize_scopes=["User.Read", "Mail.Read"], # Optional Graph scopes
base_url="http://localhost:8000",
# identifier_uri defaults to api://{client_id}
)
# Azure Government
auth_gov = AzureProvider(
client_id="your-client-id",
client_secret="your-client-secret",
tenant_id="your-tenant-id",
required_scopes=["read", "write"],
base_authority="login.microsoftonline.us", # Override for Azure Gov
base_url="http://localhost:8000",
)
mcp = FastMCP("My App", auth=auth)
```
"""
def __init__(
self,
*,
client_id: str,
client_secret: str,
tenant_id: str,
required_scopes: list[str],
base_url: str,
identifier_uri: str | None = None,
issuer_url: str | None = None,
redirect_path: str | None = None,
additional_authorize_scopes: list[str] | None = None,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
base_authority: str = "login.microsoftonline.com",
http_client: httpx.AsyncClient | None = None,
) -> None:
"""Initialize Azure OAuth provider.
Args:
client_id: Azure application (client) ID from your App registration
client_secret: Azure client secret from your App registration
tenant_id: Azure tenant ID (specific tenant GUID, "organizations", or "consumers")
identifier_uri: Optional Application ID URI for your custom API (defaults to api://{client_id}).
This URI is automatically prefixed to all required_scopes during initialization.
Example: identifier_uri="api://my-api" + required_scopes=["read"]
→ tokens validated for "api://my-api/read"
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
redirect_path: Redirect path configured in Azure App registration (defaults to "/auth/callback")
base_authority: Azure authority base URL (defaults to "login.microsoftonline.com").
For Azure Government, use "login.microsoftonline.us".
required_scopes: Custom API scope names WITHOUT prefix (e.g., ["read", "write"]).
- Automatically prefixed with identifier_uri during initialization
- Validated on all tokens
- Advertised in Protected Resource Metadata
- Must match scope names defined in Azure Portal under "Expose an API"
Example: ["read", "write"] → validates tokens containing ["api://xxx/read", "api://xxx/write"]
additional_authorize_scopes: Microsoft Graph or other upstream scopes in full format.
- NOT prefixed with identifier_uri
- NOT validated on tokens
- NOT advertised to MCP clients
- Used to request additional permissions from Azure (e.g., Graph API access)
Example: ["User.Read", "Mail.Read"]
These scopes allow your FastMCP server to call Microsoft Graph APIs using the
upstream Azure token, but MCP clients are unaware of them.
Note: "offline_access" is automatically included to obtain refresh tokens.
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
If None (default), all URIs are allowed. If empty list, no URIs are allowed.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to Azure.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
http_client: Optional httpx.AsyncClient for connection pooling in JWKS fetches.
When provided, the client is reused for JWT key fetches and the caller
is responsible for its lifecycle. When None (default), a fresh client is created per fetch.
"""
# Parse scopes if provided as string
parsed_required_scopes = parse_scopes(required_scopes)
parsed_additional_scopes: list[str] = (
parse_scopes(additional_authorize_scopes) or []
if additional_authorize_scopes
else []
)
# Always include offline_access to get refresh tokens from Azure
if "offline_access" not in parsed_additional_scopes:
parsed_additional_scopes = [*parsed_additional_scopes, "offline_access"]
# Store Azure-specific config for OBO credential creation
self._tenant_id = tenant_id
self._base_authority = base_authority
# Cache of OBO credentials keyed by hash of user assertion token.
# Reusing credentials allows the Azure SDK's internal token cache
# to avoid redundant OBO exchanges for the same user + scopes.
self._obo_credentials: OrderedDict[str, OnBehalfOfCredential] = OrderedDict()
self._obo_max_credentials: int = 128
# Apply defaults
self.identifier_uri = identifier_uri or f"api://{client_id}"
self.additional_authorize_scopes: list[str] = parsed_additional_scopes
# Always validate tokens against the app's API client ID using JWT
issuer = f"https://{base_authority}/{tenant_id}/v2.0"
jwks_uri = f"https://{base_authority}/{tenant_id}/discovery/v2.0/keys"
# Azure access tokens only include custom API scopes in the `scp` claim,
# NOT standard OIDC scopes (openid, profile, email, offline_access).
# Filter out OIDC scopes from validation - they'll still be sent to Azure
# during authorization (handled by _prefix_scopes_for_azure).
if parsed_required_scopes:
validation_scopes = [
s for s in parsed_required_scopes if s not in OIDC_SCOPES
]
# If all scopes were OIDC scopes, use None (no scope validation)
if not validation_scopes:
validation_scopes = None
else:
validation_scopes = None
token_verifier = JWTVerifier(
jwks_uri=jwks_uri,
issuer=issuer,
audience=client_id,
algorithm="RS256",
required_scopes=validation_scopes, # Only validate non-OIDC scopes
http_client=http_client,
)
# Build Azure OAuth endpoints with tenant
authorization_endpoint = (
f"https://{base_authority}/{tenant_id}/oauth2/v2.0/authorize"
)
token_endpoint = f"https://{base_authority}/{tenant_id}/oauth2/v2.0/token"
# Initialize OAuth proxy with Azure endpoints
# Remember there's hooks called, such as _prepare_scopes_for_token_exchange
# and _prepare_scopes_for_upstream_refresh
super().__init__(
upstream_authorization_endpoint=authorization_endpoint,
upstream_token_endpoint=token_endpoint,
upstream_client_id=client_id,
upstream_client_secret=client_secret,
token_verifier=token_verifier,
base_url=base_url,
redirect_path=redirect_path,
issuer_url=issuer_url or base_url, # Default to base_url if not specified
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
valid_scopes=parsed_required_scopes,
)
authority_info = ""
if base_authority != "login.microsoftonline.com":
authority_info = f" using authority {base_authority}"
logger.info(
"Initialized Azure OAuth provider for client %s with tenant %s%s%s",
client_id,
tenant_id,
f" and identifier_uri {self.identifier_uri}" if self.identifier_uri else "",
authority_info,
)
async def authorize(
self,
client: OAuthClientInformationFull,
params: AuthorizationParams,
) -> str:
"""Start OAuth transaction and redirect to Azure AD.
Override parent's authorize method to filter out the 'resource' parameter
which is not supported by Azure AD v2.0 endpoints. The v2.0 endpoints use
scopes to determine the resource/audience instead of a separate parameter.
Args:
client: OAuth client information
params: Authorization parameters from the client
Returns:
Authorization URL to redirect the user to Azure AD
"""
# Clear the resource parameter that Azure AD v2.0 doesn't support
# This parameter comes from RFC 8707 (OAuth 2.0 Resource Indicators)
# but Azure AD v2.0 uses scopes instead to determine the audience
params_to_use = params
if hasattr(params, "resource"):
original_resource = getattr(params, "resource", None)
if original_resource is not None:
params_to_use = params.model_copy(update={"resource": None})
if original_resource:
logger.debug(
"Filtering out 'resource' parameter '%s' for Azure AD v2.0 (use scopes instead)",
original_resource,
)
# Don't modify the scopes in params - they stay unprefixed for MCP clients
# We'll prefix them when building the Azure authorization URL (in _build_upstream_authorize_url)
auth_url = await super().authorize(client, params_to_use)
separator = "&" if "?" in auth_url else "?"
return f"{auth_url}{separator}prompt=select_account"
def _prefix_scopes_for_azure(self, scopes: list[str]) -> list[str]:
"""Prefix unprefixed custom API scopes with identifier_uri for Azure.
This helper centralizes the scope prefixing logic used in both
authorization and token refresh flows.
Scopes that are NOT prefixed:
- Standard OIDC scopes (openid, profile, email, offline_access)
- Fully-qualified URIs (contain "://")
- Scopes with path component (contain "/")
Note: Microsoft Graph scopes (e.g., User.Read) should be passed via
`additional_authorize_scopes` or use fully-qualified format
(e.g., https://graph.microsoft.com/User.Read).
Args:
scopes: List of scopes, may be prefixed or unprefixed
Returns:
List of scopes with identifier_uri prefix applied where needed
"""
prefixed = []
for scope in scopes:
if scope in OIDC_SCOPES:
# Standard OIDC scopes - never prefix
prefixed.append(scope)
elif "://" in scope or "/" in scope:
# Already fully-qualified (e.g., "api://xxx/read" or
# "https://graph.microsoft.com/User.Read")
prefixed.append(scope)
else:
# Unprefixed custom API scope - prefix with identifier_uri
prefixed.append(f"{self.identifier_uri}/{scope}")
return prefixed
def _build_upstream_authorize_url(
self, txn_id: str, transaction: dict[str, Any]
) -> str:
"""Build Azure authorization URL with prefixed scopes.
Overrides parent to prefix scopes with identifier_uri before sending to Azure,
while keeping unprefixed scopes in the transaction for MCP clients.
"""
# Get unprefixed scopes from transaction
unprefixed_scopes = transaction.get("scopes") or self.required_scopes or []
# Prefix scopes for Azure authorization request
prefixed_scopes = self._prefix_scopes_for_azure(unprefixed_scopes)
# Add Microsoft Graph scopes (not validated, not prefixed)
if self.additional_authorize_scopes:
prefixed_scopes.extend(self.additional_authorize_scopes)
# Temporarily modify transaction dict for parent's URL building
modified_transaction = transaction.copy()
modified_transaction["scopes"] = prefixed_scopes
# Let parent build the URL with prefixed scopes
return super()._build_upstream_authorize_url(txn_id, modified_transaction)
def _prepare_scopes_for_token_exchange(self, scopes: list[str]) -> list[str]:
"""Prepare scopes for Azure authorization code exchange.
Azure requires scopes during token exchange (AADSTS28003 error if missing).
Azure only allows ONE resource per token request (AADSTS28000), so we only
include scopes for this API plus OIDC scopes.
Args:
scopes: Scopes from the authorization request (unprefixed)
Returns:
List of scopes for Azure token endpoint
"""
# Prefix scopes for this API
prefixed_scopes = self._prefix_scopes_for_azure(scopes or [])
# Add OIDC scopes only (not other API scopes) to avoid AADSTS28000
if self.additional_authorize_scopes:
prefixed_scopes.extend(
s for s in self.additional_authorize_scopes if s in OIDC_SCOPES
)
deduplicated = list(dict.fromkeys(prefixed_scopes))
logger.debug("Token exchange scopes: %s", deduplicated)
return deduplicated
def _prepare_scopes_for_upstream_refresh(self, scopes: list[str]) -> list[str]:
"""Prepare scopes for Azure token refresh.
Azure requires fully-qualified scopes and only allows ONE resource per
token request (AADSTS28000). We include scopes for this API plus OIDC scopes.
Args:
scopes: Base scopes from RefreshToken (unprefixed, e.g., ["read"])
Returns:
Deduplicated list of scopes formatted for Azure token endpoint
"""
logger.debug("Base scopes from storage: %s", scopes)
# Filter out any additional_authorize_scopes that may have been stored
additional_scopes_set = set(self.additional_authorize_scopes or [])
base_scopes = [s for s in scopes if s not in additional_scopes_set]
# Prefix base scopes with identifier_uri for Azure
prefixed_scopes = self._prefix_scopes_for_azure(base_scopes)
# Add OIDC scopes only (not other API scopes) to avoid AADSTS28000
if self.additional_authorize_scopes:
prefixed_scopes.extend(
s for s in self.additional_authorize_scopes if s in OIDC_SCOPES
)
deduplicated_scopes = list(dict.fromkeys(prefixed_scopes))
logger.debug("Scopes for Azure token endpoint: %s", deduplicated_scopes)
return deduplicated_scopes
async def _extract_upstream_claims(
self, idp_tokens: dict[str, Any]
) -> dict[str, Any] | None:
"""Extract claims from Azure token response to embed in FastMCP JWT.
Decodes the Azure access token (which is a JWT) to extract user identity
claims. This allows gateways to inspect upstream identity information by
decoding the FastMCP JWT without needing server-side storage lookups.
Azure access tokens contain claims like:
- sub: Subject identifier (unique per user per application)
- oid: Object ID (unique user identifier across Azure AD)
- tid: Tenant ID
- azp: Authorized party (client ID that requested the token)
- name: Display name
- given_name: First name
- family_name: Last name
- preferred_username: User principal name (email format)
- upn: User Principal Name
- email: Email address (if available)
- roles: Application roles assigned to the user
- groups: Group memberships (if configured)
Args:
idp_tokens: Full token response from Azure, containing access_token
and potentially id_token.
Returns:
Dict of extracted claims, or None if extraction fails.
"""
access_token = idp_tokens.get("access_token")
if not access_token:
return None
try:
# Azure access tokens are JWTs - decode without verification
# (already validated by token_verifier during token exchange)
payload = decode_jwt_payload(access_token)
# Extract useful identity claims
claims: dict[str, Any] = {}
claim_keys = [
"sub",
"oid",
"tid",
"azp",
"name",
"given_name",
"family_name",
"preferred_username",
"upn",
"email",
"roles",
"groups",
]
for claim in claim_keys:
if claim in payload:
claims[claim] = payload[claim]
if claims:
logger.debug(
"Extracted %d Azure claims for embedding in FastMCP JWT",
len(claims),
)
return claims
return None
except Exception as e:
logger.debug("Failed to extract Azure claims: %s", e)
return None
async def get_obo_credential(self, user_assertion: str) -> OnBehalfOfCredential:
"""Get a cached or new OnBehalfOfCredential for OBO token exchange.
Credentials are cached by user assertion so the Azure SDK's internal
token cache can avoid redundant OBO exchanges when the same user
calls multiple tools with the same scopes.
Args:
user_assertion: The user's access token to exchange via OBO.
Returns:
A configured OnBehalfOfCredential ready for get_token() calls.
Raises:
ImportError: If azure-identity is not installed (requires fastmcp[azure]).
"""
_require_azure_identity("OBO token exchange")
from azure.identity.aio import OnBehalfOfCredential
key = hashlib.sha256(user_assertion.encode()).hexdigest()
if key in self._obo_credentials:
self._obo_credentials.move_to_end(key)
return self._obo_credentials[key]
credential = OnBehalfOfCredential(
tenant_id=self._tenant_id,
client_id=self._upstream_client_id,
client_secret=self._upstream_client_secret.get_secret_value(),
user_assertion=user_assertion,
authority=f"https://{self._base_authority}",
)
self._obo_credentials[key] = credential
# Evict oldest if over capacity
while len(self._obo_credentials) > self._obo_max_credentials:
_, evicted = self._obo_credentials.popitem(last=False)
await evicted.close()
return credential
async def close_obo_credentials(self) -> None:
"""Close all cached OBO credentials."""
credentials = list(self._obo_credentials.values())
self._obo_credentials.clear()
for credential in credentials:
try:
await credential.close()
except Exception:
logger.debug("Error closing OBO credential", exc_info=True)
class AzureJWTVerifier(JWTVerifier):
"""JWT verifier pre-configured for Azure AD / Microsoft Entra ID.
Auto-configures JWKS URI, issuer, audience, and scope handling from your
Azure app registration details. Designed for Managed Identity and other
token-verification-only scenarios where AzureProvider's full OAuth proxy
isn't needed.
Handles Azure's scope format automatically:
- Validates tokens using short-form scopes (what Azure puts in ``scp`` claims)
- Advertises full-URI scopes in OAuth metadata (what clients need to request)
Example::
from fastmcp.server.auth import RemoteAuthProvider
from fastmcp.server.auth.providers.azure import AzureJWTVerifier
from pydantic import AnyHttpUrl
verifier = AzureJWTVerifier(
client_id="your-client-id",
tenant_id="your-tenant-id",
required_scopes=["access_as_user"],
)
auth = RemoteAuthProvider(
token_verifier=verifier,
authorization_servers=[
AnyHttpUrl("https://login.microsoftonline.com/your-tenant-id/v2.0")
],
base_url="https://my-server.com",
)
"""
def __init__(
self,
*,
client_id: str,
tenant_id: str,
required_scopes: list[str] | None = None,
identifier_uri: str | None = None,
base_authority: str = "login.microsoftonline.com",
):
"""Initialize Azure JWT verifier.
Args:
client_id: Azure application (client) ID from your App registration
tenant_id: Azure tenant ID (specific tenant GUID, "organizations", or "consumers").
For multi-tenant apps ("organizations" or "consumers"), issuer validation
is skipped since Azure tokens carry the actual tenant GUID as issuer.
required_scopes: Scope names as they appear in Azure Portal under "Expose an API"
(e.g., ["access_as_user", "read"]). These are validated against
the short-form scopes in token ``scp`` claims, and automatically
prefixed with identifier_uri for OAuth metadata.
identifier_uri: Application ID URI (defaults to ``api://{client_id}``).
Used to prefix scopes in OAuth metadata so clients know the full
scope URIs to request from Azure.
base_authority: Azure authority base URL (defaults to "login.microsoftonline.com").
For Azure Government, use "login.microsoftonline.us".
"""
self._identifier_uri = identifier_uri or f"api://{client_id}"
# For multi-tenant apps, Azure tokens carry the actual tenant GUID as
# issuer, not the literal "organizations" or "consumers" string. Skip
# issuer validation for these — audience still protects against wrong-app tokens.
multi_tenant_values = {"organizations", "consumers", "common"}
issuer: str | None = (
None
if tenant_id in multi_tenant_values
else f"https://{base_authority}/{tenant_id}/v2.0"
)
super().__init__(
jwks_uri=f"https://{base_authority}/{tenant_id}/discovery/v2.0/keys",
issuer=issuer,
audience=client_id,
algorithm="RS256",
required_scopes=required_scopes,
)
@property
def scopes_supported(self) -> list[str]:
"""Return scopes with Azure URI prefix for OAuth metadata.
Azure tokens contain short-form scopes (e.g., ``read``) in the ``scp``
claim, but clients must request full URI scopes (e.g.,
``api://client-id/read``) from the Azure authorization endpoint. This
property returns the full-URI form for OAuth metadata while
``required_scopes`` retains the short form for token validation.
"""
if not self.required_scopes:
return []
prefixed = []
for scope in self.required_scopes:
if scope in OIDC_SCOPES or "://" in scope or "/" in scope:
prefixed.append(scope)
else:
prefixed.append(f"{self._identifier_uri}/{scope}")
return prefixed
# --- Dependency injection support ---
# These require fastmcp[azure] extra for azure-identity
def _require_azure_identity(feature: str) -> None:
"""Raise ImportError with install instructions if azure-identity is not available."""
try:
import azure.identity # noqa: F401
except ImportError as e:
raise ImportError(
f"{feature} requires the `azure` extra. "
"Install with: pip install 'fastmcp[azure]'"
) from e
class _EntraOBOToken(Dependency[str]):
"""Dependency that performs OBO token exchange for Microsoft Entra.
Uses azure.identity's OnBehalfOfCredential for async-native OBO,
with automatic token caching and refresh. Credentials are cached on
the AzureProvider so repeated tool calls reuse existing credentials
and benefit from the Azure SDK's internal token cache.
"""
def __init__(self, scopes: list[str]):
self.scopes = scopes
async def __aenter__(self) -> str:
_require_azure_identity("EntraOBOToken")
from fastmcp.server.dependencies import get_access_token, get_server
access_token = get_access_token()
if access_token is None:
raise RuntimeError(
"No access token available. Cannot perform OBO exchange."
)
server = get_server()
if not isinstance(server.auth, AzureProvider):
raise RuntimeError(
"EntraOBOToken requires an AzureProvider as the auth provider. "
f"Current provider: {type(server.auth).__name__}"
)
credential = await server.auth.get_obo_credential(
user_assertion=access_token.token,
)
result = await credential.get_token(*self.scopes)
return result.token
def EntraOBOToken(scopes: list[str]) -> str:
"""Exchange the user's Entra token for a downstream API token via OBO.
This dependency performs a Microsoft Entra On-Behalf-Of (OBO) token exchange,
allowing your MCP server to call downstream APIs (like Microsoft Graph) on
behalf of the authenticated user.
Args:
scopes: The scopes to request for the downstream API. For Microsoft Graph,
use scopes like ["https://graph.microsoft.com/Mail.Read"] or
["https://graph.microsoft.com/.default"].
Returns:
A dependency that resolves to the downstream API access token string
Raises:
ImportError: If fastmcp[azure] is not installed
RuntimeError: If no access token is available, provider is not Azure,
or OBO exchange fails
Example:
```python
from fastmcp.server.auth.providers.azure import EntraOBOToken
import httpx
@mcp.tool()
async def get_my_emails(
graph_token: str = EntraOBOToken(["https://graph.microsoft.com/Mail.Read"])
):
async with httpx.AsyncClient() as client:
resp = await client.get(
"https://graph.microsoft.com/v1.0/me/messages",
headers={"Authorization": f"Bearer {graph_token}"}
)
return resp.json()
```
Note:
For OBO to work, ensure the scopes are included in the AzureProvider's
`additional_authorize_scopes` parameter, and that admin consent has been
granted for those scopes in your Entra app registration.
"""
return cast(str, _EntraOBOToken(scopes))
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/azure.py",
"license": "Apache License 2.0",
"lines": 607,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:tests/server/auth/providers/test_azure.py | """Tests for Azure (Microsoft Entra) OAuth provider."""
from urllib.parse import parse_qs, urlparse
import pytest
from key_value.aio.stores.memory import MemoryStore
from mcp.server.auth.provider import AuthorizationParams
from mcp.shared.auth import OAuthClientInformationFull
from pydantic import AnyUrl
from fastmcp.server.auth.providers.azure import AzureProvider
from fastmcp.server.auth.providers.jwt import JWTVerifier
@pytest.fixture
def memory_storage() -> MemoryStore:
"""Provide a MemoryStore for tests to avoid SQLite initialization on Windows."""
return MemoryStore()
class TestAzureProvider:
"""Test Azure OAuth provider functionality."""
def test_init_with_explicit_params(self, memory_storage: MemoryStore):
"""Test AzureProvider initialization with explicit parameters."""
provider = AzureProvider(
client_id="12345678-1234-1234-1234-123456789012",
client_secret="azure_secret_123",
tenant_id="87654321-4321-4321-4321-210987654321",
base_url="https://myserver.com",
required_scopes=["read", "write"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert provider._upstream_client_id == "12345678-1234-1234-1234-123456789012"
assert provider._upstream_client_secret.get_secret_value() == "azure_secret_123"
assert str(provider.base_url) == "https://myserver.com/"
# Check tenant is in the endpoints
parsed_auth = urlparse(provider._upstream_authorization_endpoint)
assert "87654321-4321-4321-4321-210987654321" in parsed_auth.path
parsed_token = urlparse(provider._upstream_token_endpoint)
assert "87654321-4321-4321-4321-210987654321" in parsed_token.path
def test_init_defaults(self, memory_storage: MemoryStore):
"""Test that default values are applied correctly."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check defaults
assert provider._redirect_path == "/auth/callback"
# Azure provider defaults are set but we can't easily verify them without accessing internals
def test_offline_access_automatically_included(self, memory_storage: MemoryStore):
"""Test that offline_access is automatically added to get refresh tokens."""
# Without specifying offline_access
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert "offline_access" in provider.additional_authorize_scopes
def test_offline_access_not_duplicated(self, memory_storage: MemoryStore):
"""Test that offline_access is not duplicated if already specified."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
required_scopes=["read"],
additional_authorize_scopes=["User.Read", "offline_access"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Should appear exactly once
assert provider.additional_authorize_scopes.count("offline_access") == 1
assert "User.Read" in provider.additional_authorize_scopes
def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore):
"""Test that OAuth endpoints are configured correctly."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="my-tenant-id",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test_secret",
client_storage=memory_storage,
)
# Check that endpoints use the correct Azure OAuth2 v2.0 endpoints with tenant
assert (
provider._upstream_authorization_endpoint
== "https://login.microsoftonline.com/my-tenant-id/oauth2/v2.0/authorize"
)
assert (
provider._upstream_token_endpoint
== "https://login.microsoftonline.com/my-tenant-id/oauth2/v2.0/token"
)
assert (
provider._upstream_revocation_endpoint is None
) # Azure doesn't support revocation
def test_special_tenant_values(self, memory_storage: MemoryStore):
"""Test that special tenant values are accepted."""
# Test with "organizations"
provider1 = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="organizations",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
parsed = urlparse(provider1._upstream_authorization_endpoint)
assert "/organizations/" in parsed.path
# Test with "consumers"
provider2 = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="consumers",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
parsed = urlparse(provider2._upstream_authorization_endpoint)
assert "/consumers/" in parsed.path
def test_azure_specific_scopes(self, memory_storage: MemoryStore):
"""Test handling of custom API scope formats."""
# Test that the provider accepts custom API scopes without error
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
required_scopes=[
"read",
"write",
"admin",
],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Provider should initialize successfully with these scopes
assert provider is not None
# Scopes are stored unprefixed for token validation
# (Azure returns unprefixed scopes in JWT tokens)
assert provider._token_validator.required_scopes == [
"read",
"write",
"admin",
]
def test_init_does_not_require_api_client_id_anymore(
self, memory_storage: MemoryStore
):
"""API client ID is no longer required; audience is client_id."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert provider is not None
def test_init_with_custom_audience_uses_jwt_verifier(
self, memory_storage: MemoryStore
):
"""When audience is provided, JWTVerifier is configured with JWKS and issuer."""
from fastmcp.server.auth.providers.jwt import JWTVerifier
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="my-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=[".default"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert provider._token_validator is not None
assert isinstance(provider._token_validator, JWTVerifier)
verifier = provider._token_validator
assert verifier.jwks_uri is not None
assert verifier.jwks_uri.startswith(
"https://login.microsoftonline.com/my-tenant/discovery/v2.0/keys"
)
assert verifier.issuer == "https://login.microsoftonline.com/my-tenant/v2.0"
assert verifier.audience == "test_client"
# Scopes are stored unprefixed for token validation
# (Azure returns unprefixed scopes like ".default" in JWT tokens)
assert verifier.required_scopes == [".default"]
async def test_authorize_filters_resource_and_stores_unprefixed_scopes(
self, memory_storage: MemoryStore
):
"""authorize() should drop resource parameter and store unprefixed scopes for MCP clients."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="common",
identifier_uri="api://my-api",
required_scopes=["read", "write"],
base_url="https://srv.example",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
await provider.register_client(
OAuthClientInformationFull(
client_id="dummy",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:12345/callback")],
)
)
client = OAuthClientInformationFull(
client_id="dummy",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:12345/callback")],
)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:12345/callback"),
redirect_uri_provided_explicitly=True,
scopes=[
"read",
"write",
], # Client sends unprefixed scopes (from PRM which advertises unprefixed)
state="abc",
code_challenge="xyz",
resource="https://should.be.ignored",
)
url = await provider.authorize(client, params)
# Extract transaction ID from consent redirect
parsed = urlparse(url)
qs = parse_qs(parsed.query)
assert "txn_id" in qs, "Should redirect to consent page with transaction ID"
txn_id = qs["txn_id"][0]
# Verify transaction stores UNPREFIXED scopes for MCP clients
transaction = await provider._transaction_store.get(key=txn_id)
assert transaction is not None
assert "read" in transaction.scopes
assert "write" in transaction.scopes
# Azure provider filters resource parameter (not stored in transaction)
assert transaction.resource is None
# Verify the upstream Azure URL will have PREFIXED scopes
upstream_url = provider._build_upstream_authorize_url(
txn_id, transaction.model_dump()
)
assert (
"api%3A%2F%2Fmy-api%2Fread" in upstream_url
or "api://my-api/read" in upstream_url
)
assert (
"api%3A%2F%2Fmy-api%2Fwrite" in upstream_url
or "api://my-api/write" in upstream_url
)
async def test_authorize_appends_additional_scopes(
self, memory_storage: MemoryStore
):
"""authorize() should append additional_authorize_scopes to the authorization request."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="common",
identifier_uri="api://my-api",
required_scopes=["read"],
base_url="https://srv.example",
additional_authorize_scopes=["Mail.Read", "User.Read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
await provider.register_client(
OAuthClientInformationFull(
client_id="dummy",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:12345/callback")],
)
)
client = OAuthClientInformationFull(
client_id="dummy",
client_secret="secret",
redirect_uris=[AnyUrl("http://localhost:12345/callback")],
)
params = AuthorizationParams(
redirect_uri=AnyUrl("http://localhost:12345/callback"),
redirect_uri_provided_explicitly=True,
scopes=["read"], # Client sends unprefixed scopes
state="abc",
code_challenge="xyz",
)
url = await provider.authorize(client, params)
# Extract transaction ID from consent redirect
parsed = urlparse(url)
qs = parse_qs(parsed.query)
assert "txn_id" in qs, "Should redirect to consent page with transaction ID"
txn_id = qs["txn_id"][0]
# Verify transaction stores ONLY MCP scopes (unprefixed)
# additional_authorize_scopes are NOT stored in transaction
transaction = await provider._transaction_store.get(key=txn_id)
assert transaction is not None
assert "read" in transaction.scopes
assert "Mail.Read" not in transaction.scopes # Not in transaction
assert "User.Read" not in transaction.scopes # Not in transaction
# Verify upstream URL includes both MCP scopes (prefixed) AND additional Graph scopes
upstream_url = provider._build_upstream_authorize_url(
txn_id, transaction.model_dump()
)
assert (
"api%3A%2F%2Fmy-api%2Fread" in upstream_url
or "api://my-api/read" in upstream_url
)
assert "Mail.Read" in upstream_url
assert "User.Read" in upstream_url
def test_base_authority_defaults_to_public_cloud(self, memory_storage: MemoryStore):
"""Test that base_authority defaults to login.microsoftonline.com."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert (
provider._upstream_authorization_endpoint
== "https://login.microsoftonline.com/test-tenant/oauth2/v2.0/authorize"
)
assert (
provider._upstream_token_endpoint
== "https://login.microsoftonline.com/test-tenant/oauth2/v2.0/token"
)
assert isinstance(provider._token_validator, JWTVerifier)
assert (
provider._token_validator.issuer
== "https://login.microsoftonline.com/test-tenant/v2.0"
)
assert (
provider._token_validator.jwks_uri
== "https://login.microsoftonline.com/test-tenant/discovery/v2.0/keys"
)
def test_base_authority_azure_government(self, memory_storage: MemoryStore):
"""Test Azure Government endpoints with login.microsoftonline.us."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="gov-tenant-id",
base_url="https://myserver.com",
required_scopes=["read"],
base_authority="login.microsoftonline.us",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert (
provider._upstream_authorization_endpoint
== "https://login.microsoftonline.us/gov-tenant-id/oauth2/v2.0/authorize"
)
assert (
provider._upstream_token_endpoint
== "https://login.microsoftonline.us/gov-tenant-id/oauth2/v2.0/token"
)
assert isinstance(provider._token_validator, JWTVerifier)
assert (
provider._token_validator.issuer
== "https://login.microsoftonline.us/gov-tenant-id/v2.0"
)
assert (
provider._token_validator.jwks_uri
== "https://login.microsoftonline.us/gov-tenant-id/discovery/v2.0/keys"
)
def test_base_authority_from_parameter(self, memory_storage: MemoryStore):
"""Test that base_authority can be set via parameter."""
provider = AzureProvider(
client_id="env-client-id",
client_secret="env-secret",
tenant_id="env-tenant-id",
base_url="https://myserver.com",
required_scopes=["read"],
base_authority="login.microsoftonline.us",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert (
provider._upstream_authorization_endpoint
== "https://login.microsoftonline.us/env-tenant-id/oauth2/v2.0/authorize"
)
assert (
provider._upstream_token_endpoint
== "https://login.microsoftonline.us/env-tenant-id/oauth2/v2.0/token"
)
assert isinstance(provider._token_validator, JWTVerifier)
assert (
provider._token_validator.issuer
== "https://login.microsoftonline.us/env-tenant-id/v2.0"
)
assert (
provider._token_validator.jwks_uri
== "https://login.microsoftonline.us/env-tenant-id/discovery/v2.0/keys"
)
def test_base_authority_with_special_tenant_values(
self, memory_storage: MemoryStore
):
"""Test that base_authority works with special tenant values like 'organizations'."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="organizations",
base_url="https://myserver.com",
required_scopes=["read"],
base_authority="login.microsoftonline.us",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
parsed = urlparse(provider._upstream_authorization_endpoint)
assert parsed.netloc == "login.microsoftonline.us"
assert "/organizations/" in parsed.path
def test_prepare_scopes_for_upstream_refresh_basic_prefixing(
self, memory_storage: MemoryStore
):
"""Test that unprefixed scopes are correctly prefixed for Azure token refresh."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read", "write"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Unprefixed scopes from storage should be prefixed
result = provider._prepare_scopes_for_upstream_refresh(["read", "write"])
assert "api://my-api/read" in result
assert "api://my-api/write" in result
assert "offline_access" in result # Auto-included for refresh tokens
assert len(result) == 3
def test_prepare_scopes_for_upstream_refresh_already_prefixed(
self, memory_storage: MemoryStore
):
"""Test that already-prefixed scopes remain unchanged."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Already prefixed scopes should pass through unchanged
result = provider._prepare_scopes_for_upstream_refresh(
["api://my-api/read", "api://other-api/admin"]
)
assert "api://my-api/read" in result
assert "api://other-api/admin" in result
assert "offline_access" in result # Auto-included for refresh tokens
assert len(result) == 3
def test_prepare_scopes_for_upstream_refresh_with_additional_scopes(
self, memory_storage: MemoryStore
):
"""Test that only OIDC scopes from additional_authorize_scopes are added.
Azure only allows ONE resource per token request (AADSTS28000), so
non-OIDC scopes like User.Read are excluded from refresh requests.
"""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
additional_authorize_scopes=[
"User.Read", # Not OIDC - excluded
"openid",
"profile",
"offline_access",
],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Base scopes should be prefixed, only OIDC scopes appended
result = provider._prepare_scopes_for_upstream_refresh(["read", "write"])
assert "api://my-api/read" in result
assert "api://my-api/write" in result
assert "User.Read" not in result # Not OIDC, excluded
assert "openid" in result
assert "profile" in result
assert "offline_access" in result
assert len(result) == 5
def test_prepare_scopes_for_upstream_refresh_filters_duplicate_additional_scopes(
self,
memory_storage: MemoryStore,
):
"""Test that accidentally stored additional_authorize_scopes are filtered out."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
additional_authorize_scopes=["User.Read", "openid"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# If additional scopes were accidentally stored, they should be filtered
# User.Read is not OIDC so won't be added
result = provider._prepare_scopes_for_upstream_refresh(
["read", "User.Read", "openid"]
)
# Should have: api://my-api/read (prefixed) + openid + offline_access (OIDC scopes)
# User.Read is filtered from storage AND not added (not OIDC)
assert "api://my-api/read" in result
assert "User.Read" not in result # Not OIDC
assert result.count("openid") == 1
assert "offline_access" in result # Auto-included and is OIDC
assert len(result) == 3
def test_prepare_scopes_for_upstream_refresh_mixed_scopes(
self, memory_storage: MemoryStore
):
"""Test mixed scenario with both prefixed and unprefixed scopes."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
additional_authorize_scopes=["openid"], # OIDC scope
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Mix of prefixed and unprefixed scopes
result = provider._prepare_scopes_for_upstream_refresh(
["read", "api://other-api/admin", "write"]
)
assert "api://my-api/read" in result
assert "api://other-api/admin" in result # Already prefixed, unchanged
assert "api://my-api/write" in result
assert "openid" in result
assert "offline_access" in result # Auto-included
assert len(result) == 5
def test_prepare_scopes_for_upstream_refresh_scope_with_slash(
self, memory_storage: MemoryStore
):
"""Test that scopes containing '/' are not prefixed."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Scopes with "/" should not be prefixed (already fully qualified)
result = provider._prepare_scopes_for_upstream_refresh(
["read", "https://graph.microsoft.com/.default"]
)
assert "api://my-api/read" in result
assert (
"https://graph.microsoft.com/.default" in result
) # Not prefixed (contains ://)
def test_prepare_scopes_for_upstream_refresh_empty_scopes(
self, memory_storage: MemoryStore
):
"""Test behavior with empty scopes list."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
additional_authorize_scopes=["User.Read", "openid"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Empty scopes should still add OIDC scopes (not User.Read)
result = provider._prepare_scopes_for_upstream_refresh([])
assert "User.Read" not in result # Not OIDC
assert "openid" in result
assert "offline_access" in result # Auto-included
assert len(result) == 2 # Only OIDC scopes: openid + offline_access
def test_prepare_scopes_for_upstream_refresh_no_additional_scopes(
self, memory_storage: MemoryStore
):
"""Test behavior when no additional_authorize_scopes are configured."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Should prefix base scopes, plus auto-added offline_access
result = provider._prepare_scopes_for_upstream_refresh(["read", "write"])
assert "api://my-api/read" in result
assert "api://my-api/write" in result
assert "offline_access" in result # Auto-included
assert len(result) == 3
def test_prepare_scopes_for_upstream_refresh_deduplicates_scopes(
self, memory_storage: MemoryStore
):
"""Test that duplicate scopes are deduplicated while preserving order."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
additional_authorize_scopes=["openid", "profile"], # OIDC scopes only
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Test with duplicate base scopes
result = provider._prepare_scopes_for_upstream_refresh(
["read", "write", "read", "openid"]
)
# Should have deduplicated results in order (OIDC scopes added, offline_access auto-added)
assert result == [
"api://my-api/read",
"api://my-api/write",
"openid",
"profile",
"offline_access",
]
assert len(result) == 5
def test_prepare_scopes_for_upstream_refresh_deduplicates_prefixed_variants(
self, memory_storage: MemoryStore
):
"""Test that both prefixed and unprefixed variants are deduplicated."""
provider = AzureProvider(
client_id="test_client",
client_secret="test_secret",
tenant_id="test-tenant",
base_url="https://myserver.com",
identifier_uri="api://my-api",
required_scopes=["read"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Test with both prefixed and unprefixed variants of same scope
result = provider._prepare_scopes_for_upstream_refresh(
["read", "api://my-api/read", "write"]
)
# Should deduplicate - first occurrence wins (api://my-api/read from "read")
assert "api://my-api/read" in result
assert "api://my-api/write" in result
assert "offline_access" in result # Auto-included
# Should have 3 items (read deduplicated, plus offline_access)
assert len(result) == 3
assert result.count("api://my-api/read") == 1
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_azure.py",
"license": "Apache License 2.0",
"lines": 656,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/providers/test_google.py | """Tests for Google OAuth provider."""
import pytest
from key_value.aio.stores.memory import MemoryStore
from fastmcp.server.auth.providers.google import GoogleProvider
@pytest.fixture
def memory_storage() -> MemoryStore:
"""Provide a MemoryStore for tests to avoid SQLite initialization on Windows."""
return MemoryStore()
class TestGoogleProvider:
"""Test Google OAuth provider functionality."""
def test_init_with_explicit_params(self, memory_storage: MemoryStore):
"""Test GoogleProvider initialization with explicit parameters."""
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
required_scopes=["openid", "email", "profile"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert provider._upstream_client_id == "123456789.apps.googleusercontent.com"
assert provider._upstream_client_secret.get_secret_value() == "GOCSPX-test123"
assert str(provider.base_url) == "https://myserver.com/"
def test_init_defaults(self, memory_storage: MemoryStore):
"""Test that default values are applied correctly."""
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check defaults
assert provider._redirect_path == "/auth/callback"
# Google provider has ["openid"] as default but we can't easily verify without accessing internals
def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore):
"""Test that OAuth endpoints are configured correctly."""
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check that endpoints use Google's OAuth2 endpoints
assert (
provider._upstream_authorization_endpoint
== "https://accounts.google.com/o/oauth2/v2/auth"
)
assert (
provider._upstream_token_endpoint == "https://oauth2.googleapis.com/token"
)
# Google provider doesn't currently set a revocation endpoint
assert provider._upstream_revocation_endpoint is None
def test_google_specific_scopes(self, memory_storage: MemoryStore):
"""Test handling of Google-specific scope formats."""
# Just test that the provider accepts Google-specific scopes without error
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
required_scopes=[
"openid",
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile",
],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Provider should initialize successfully with these scopes
assert provider is not None
def test_extra_authorize_params_defaults(self, memory_storage: MemoryStore):
"""Test that Google-specific defaults are set for refresh token support."""
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Should have Google-specific defaults for refresh token support
assert provider._extra_authorize_params == {
"access_type": "offline",
"prompt": "consent",
}
def test_extra_authorize_params_override_defaults(
self, memory_storage: MemoryStore
):
"""Test that user can override default extra authorize params."""
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
extra_authorize_params={"prompt": "select_account"},
client_storage=memory_storage,
)
# User override should replace the default
assert provider._extra_authorize_params["prompt"] == "select_account"
# But other defaults should remain
assert provider._extra_authorize_params["access_type"] == "offline"
def test_extra_authorize_params_add_new_params(self, memory_storage: MemoryStore):
"""Test that user can add additional authorize params."""
provider = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-test123",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
extra_authorize_params={"login_hint": "user@example.com"},
client_storage=memory_storage,
)
# New param should be added
assert provider._extra_authorize_params["login_hint"] == "user@example.com"
# Defaults should still be present
assert provider._extra_authorize_params["access_type"] == "offline"
assert provider._extra_authorize_params["prompt"] == "consent"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_google.py",
"license": "Apache License 2.0",
"lines": 116,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/providers/test_workos.py | """Tests for WorkOS OAuth provider."""
from urllib.parse import urlparse
import httpx
import pytest
from key_value.aio.stores.memory import MemoryStore
from pytest_httpx import HTTPXMock
from fastmcp import Client, FastMCP
from fastmcp.client.transports import StreamableHttpTransport
from fastmcp.server.auth.providers.workos import (
AuthKitProvider,
WorkOSProvider,
WorkOSTokenVerifier,
)
from fastmcp.utilities.tests import HeadlessOAuth, run_server_async
@pytest.fixture
def memory_storage() -> MemoryStore:
"""Provide a MemoryStore for tests to avoid SQLite initialization on Windows."""
return MemoryStore()
class TestWorkOSProvider:
"""Test WorkOS OAuth provider functionality."""
def test_init_with_explicit_params(self, memory_storage: MemoryStore):
"""Test WorkOSProvider initialization with explicit parameters."""
provider = WorkOSProvider(
client_id="client_test123",
client_secret="secret_test456",
authkit_domain="https://test.authkit.app",
base_url="https://myserver.com",
required_scopes=["openid", "profile"],
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
assert provider._upstream_client_id == "client_test123"
assert provider._upstream_client_secret.get_secret_value() == "secret_test456"
assert str(provider.base_url) == "https://myserver.com/"
def test_authkit_domain_https_prefix_handling(self, memory_storage: MemoryStore):
"""Test that authkit_domain handles missing https:// prefix."""
# Without https:// - should add it
provider1 = WorkOSProvider(
client_id="test_client",
client_secret="test_secret",
authkit_domain="test.authkit.app",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
parsed = urlparse(provider1._upstream_authorization_endpoint)
assert parsed.scheme == "https"
assert parsed.netloc == "test.authkit.app"
assert parsed.path == "/oauth2/authorize"
# With https:// - should keep it
provider2 = WorkOSProvider(
client_id="test_client",
client_secret="test_secret",
authkit_domain="https://test.authkit.app",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
parsed = urlparse(provider2._upstream_authorization_endpoint)
assert parsed.scheme == "https"
assert parsed.netloc == "test.authkit.app"
assert parsed.path == "/oauth2/authorize"
# With http:// - should be preserved
provider3 = WorkOSProvider(
client_id="test_client",
client_secret="test_secret",
authkit_domain="http://localhost:8080",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
parsed = urlparse(provider3._upstream_authorization_endpoint)
assert parsed.scheme == "http"
assert parsed.netloc == "localhost:8080"
assert parsed.path == "/oauth2/authorize"
def test_init_defaults(self, memory_storage: MemoryStore):
"""Test that default values are applied correctly."""
provider = WorkOSProvider(
client_id="test_client",
client_secret="test_secret",
authkit_domain="https://test.authkit.app",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check defaults
assert provider._redirect_path == "/auth/callback"
# WorkOS provider has no default scopes but we can't easily verify without accessing internals
def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore):
"""Test that OAuth endpoints are configured correctly."""
provider = WorkOSProvider(
client_id="test_client",
client_secret="test_secret",
authkit_domain="https://test.authkit.app",
base_url="https://myserver.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check that endpoints use the authkit domain
assert (
provider._upstream_authorization_endpoint
== "https://test.authkit.app/oauth2/authorize"
)
assert (
provider._upstream_token_endpoint == "https://test.authkit.app/oauth2/token"
)
assert (
provider._upstream_revocation_endpoint is None
) # WorkOS doesn't support revocation
@pytest.fixture
async def mcp_server_url():
"""Start AuthKit server."""
mcp = FastMCP(
auth=AuthKitProvider(
authkit_domain="https://respectful-lullaby-34-staging.authkit.app",
base_url="http://localhost:4321",
)
)
@mcp.tool
def add(a: int, b: int) -> int:
return a + b
async with run_server_async(mcp, transport="http") as url:
yield url
@pytest.fixture
def client_with_headless_oauth(mcp_server_url: str) -> Client:
"""Client with headless OAuth that bypasses browser interaction."""
return Client(
transport=StreamableHttpTransport(mcp_server_url),
auth=HeadlessOAuth(mcp_url=mcp_server_url),
)
class TestAuthKitProvider:
async def test_unauthorized_access(
self, memory_storage: MemoryStore, mcp_server_url: str
):
with pytest.raises(httpx.HTTPStatusError) as exc_info:
async with Client(mcp_server_url) as client:
tools = await client.list_tools() # noqa: F841
assert isinstance(exc_info.value, httpx.HTTPStatusError)
assert exc_info.value.response.status_code == 401
assert "tools" not in locals()
# async def test_authorized_access(self, client_with_headless_oauth: Client):
# async with client_with_headless_oauth:
# tools = await client_with_headless_oauth.list_tools()
# assert tools is not None
# assert len(tools) > 0
# assert "add" in tools
class TestWorkOSTokenVerifierScopes:
async def test_verify_token_rejects_missing_required_scopes(
self, httpx_mock: HTTPXMock
):
httpx_mock.add_response(
url="https://test.authkit.app/oauth2/userinfo",
status_code=200,
json={
"sub": "user_123",
"email": "user@example.com",
"scope": "openid profile",
},
)
verifier = WorkOSTokenVerifier(
authkit_domain="https://test.authkit.app",
required_scopes=["read:secrets"],
)
result = await verifier.verify_token("token")
assert result is None
async def test_verify_token_returns_actual_token_scopes(
self, httpx_mock: HTTPXMock
):
httpx_mock.add_response(
url="https://test.authkit.app/oauth2/userinfo",
status_code=200,
json={
"sub": "user_123",
"email": "user@example.com",
"scope": "openid profile read:secrets",
},
)
verifier = WorkOSTokenVerifier(
authkit_domain="https://test.authkit.app",
required_scopes=["read:secrets"],
)
result = await verifier.verify_token("token")
assert result is not None
assert result.scopes == ["openid", "profile", "read:secrets"]
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_workos.py",
"license": "Apache License 2.0",
"lines": 184,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:examples/fastmcp_config/server.py | """Example FastMCP server for demonstrating fastmcp.json configuration."""
from fastmcp import FastMCP
# Create the FastMCP server instance
mcp = FastMCP("Config Example Server")
@mcp.tool
def echo(text: str) -> str:
"""Echo the provided text back to the user."""
return f"You said: {text}"
@mcp.tool
def add(a: int, b: int) -> int:
"""Add two numbers together."""
return a + b
@mcp.resource("config://example")
def get_example_config() -> str:
"""Return an example configuration."""
return """
This server is configured using fastmcp.json.
The configuration file specifies:
- Python version
- Dependencies
- Transport settings
- Other runtime options
"""
# This allows the server to run with: fastmcp run server.py
if __name__ == "__main__":
import asyncio
asyncio.run(mcp.run_async())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/fastmcp_config/server.py",
"license": "Apache License 2.0",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
PrefectHQ/fastmcp:examples/fastmcp_config_demo/server.py | """
Example server demonstrating fastmcp.json configuration.
This server previously would have used the deprecated dependencies parameter:
mcp = FastMCP("Demo Server", dependencies=["pyautogui", "Pillow"])
Now dependencies are declared in fastmcp.json alongside this file.
"""
import io
from fastmcp import FastMCP
from fastmcp.utilities.types import Image
# Create server - dependencies are now in fastmcp.json
mcp = FastMCP("Screenshot Demo")
@mcp.tool
def take_screenshot() -> Image:
"""
Take a screenshot of the user's screen and return it as an image.
Use this tool anytime the user wants you to look at something on their screen.
"""
import pyautogui
buffer = io.BytesIO()
# Capture and compress the screenshot to stay under size limits
screenshot = pyautogui.screenshot()
screenshot.convert("RGB").save(buffer, format="JPEG", quality=60, optimize=True)
return Image(data=buffer.getvalue(), format="jpeg")
@mcp.tool
def analyze_colors() -> dict:
"""
Analyze the dominant colors in the current screen.
Returns a dictionary with color statistics from the screen.
"""
import pyautogui
from PIL import Image as PILImage
screenshot = pyautogui.screenshot()
# Convert to smaller size for faster analysis
small = screenshot.resize((100, 100), PILImage.Resampling.LANCZOS)
# Get colors
colors = small.getcolors(maxcolors=10000)
if not colors:
return {"error": "Too many colors to analyze"}
# Sort by frequency
sorted_colors = sorted(colors, key=lambda x: x[0], reverse=True)[:10]
return {
"top_colors": [
{"count": count, "rgb": color} for count, color in sorted_colors
],
"total_pixels": sum(c[0] for c in colors),
}
if __name__ == "__main__":
import asyncio
asyncio.run(mcp.run_async())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/fastmcp_config_demo/server.py",
"license": "Apache License 2.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:tests/cli/test_config.py | """Tests for FastMCP configuration file support with nested structure."""
import json
import os
from pathlib import Path
import pytest
from pydantic import ValidationError
from fastmcp.utilities.mcp_server_config import (
Deployment,
MCPServerConfig,
)
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from fastmcp.utilities.mcp_server_config.v1.sources.filesystem import FileSystemSource
class TestFileSystemSource:
"""Test FileSystemSource class."""
def test_dict_source_minimal(self):
"""Test that dict source is converted to FileSystemSource."""
config = MCPServerConfig(source={"path": "server.py"})
# Dict is converted to FileSystemSource
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
assert config.source.entrypoint is None
assert config.source.type == "filesystem"
def test_dict_source_with_entrypoint(self):
"""Test dict source with entrypoint field."""
config = MCPServerConfig(source={"path": "server.py", "entrypoint": "app"})
# Dict with entrypoint is converted to FileSystemSource
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
assert config.source.entrypoint == "app"
assert config.source.type == "filesystem"
def test_filesystem_source_entrypoint(self):
"""Test FileSystemSource entrypoint format."""
config = MCPServerConfig(
source=FileSystemSource(path="src/server.py", entrypoint="mcp")
)
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "src/server.py"
assert config.source.entrypoint == "mcp"
assert config.source.type == "filesystem"
class TestEnvironment:
"""Test Environment class."""
def test_environment_config_fields(self):
"""Test all Environment fields."""
config = MCPServerConfig(
source={"path": "server.py"},
environment={
"python": "3.12",
"dependencies": ["requests", "numpy>=2.0"],
"requirements": "requirements.txt",
"project": ".",
"editable": ["../my-package"],
},
)
env = config.environment
assert env.python == "3.12"
assert env.dependencies == ["requests", "numpy>=2.0"]
# Paths are stored as Path objects
assert env.requirements == Path("requirements.txt")
assert env.project == Path(".")
assert env.editable == [Path("../my-package")]
def test_needs_uv(self):
"""Test needs_uv() method."""
# No environment config - doesn't need UV
config = MCPServerConfig(source={"path": "server.py"})
assert not config.environment._must_run_with_uv()
# Empty environment - doesn't need UV
config = MCPServerConfig(source={"path": "server.py"}, environment={})
assert not config.environment._must_run_with_uv()
# With dependencies - needs UV
config = MCPServerConfig(
source={"path": "server.py"}, environment={"dependencies": ["requests"]}
)
assert config.environment._must_run_with_uv()
# With Python version - needs UV
config = MCPServerConfig(
source={"path": "server.py"}, environment={"python": "3.12"}
)
assert config.environment._must_run_with_uv()
def test_build_uv_run_command(self):
"""Test build_uv_run_command() method."""
config = MCPServerConfig(
source={"path": "server.py"},
environment={
"python": "3.12",
"dependencies": ["requests", "numpy"],
"requirements": "requirements.txt",
"project": ".",
},
)
cmd = config.environment.build_command(["fastmcp", "run", "server.py"])
assert cmd[0] == "uv"
assert cmd[1] == "run"
# Python version not added when project is specified (project defines its own Python)
assert "--python" not in cmd
assert "3.12" not in cmd
assert "--project" in cmd
# Project path should be resolved to absolute path
project_idx = cmd.index("--project")
assert Path(cmd[project_idx + 1]).is_absolute()
assert "--with" in cmd
assert "requests" in cmd
assert "numpy" in cmd
assert "--with-requirements" in cmd
# Requirements path should be resolved to absolute path
req_idx = cmd.index("--with-requirements")
assert Path(cmd[req_idx + 1]).is_absolute()
# Command args should be at the end
assert "fastmcp" in cmd[-3:]
assert "run" in cmd[-2:]
assert "server.py" in cmd[-1:]
class TestDeployment:
"""Test Deployment class."""
def test_deployment_config_fields(self):
"""Test all Deployment fields."""
config = MCPServerConfig(
source={"path": "server.py"},
deployment={
"transport": "http",
"host": "0.0.0.0",
"port": 8000,
"path": "/api/",
"log_level": "DEBUG",
"env": {"API_KEY": "secret"},
"cwd": "./work",
"args": ["--debug"],
},
)
deploy = config.deployment
assert deploy.transport == "http"
assert deploy.host == "0.0.0.0"
assert deploy.port == 8000
assert deploy.path == "/api/"
assert deploy.log_level == "DEBUG"
assert deploy.env == {"API_KEY": "secret"}
assert deploy.cwd == "./work"
assert deploy.args == ["--debug"]
def test_apply_runtime_settings(self, tmp_path):
"""Test apply_runtime_settings() method."""
import os
# Create config with env vars and cwd
work_dir = tmp_path / "work"
work_dir.mkdir()
config = MCPServerConfig(
source={"path": "server.py"},
deployment={
"env": {"TEST_VAR": "test_value"},
"cwd": "work",
},
)
original_cwd = os.getcwd()
original_env = os.environ.get("TEST_VAR")
try:
config.deployment.apply_runtime_settings(tmp_path / "fastmcp.json")
# Check environment variable was set
assert os.environ["TEST_VAR"] == "test_value"
# Check working directory was changed
assert Path.cwd() == work_dir.resolve()
finally:
# Restore original state
os.chdir(original_cwd)
if original_env is None:
os.environ.pop("TEST_VAR", None)
else:
os.environ["TEST_VAR"] = original_env
def test_env_var_interpolation(self, tmp_path):
"""Test environment variable interpolation in deployment env."""
import os
# Set up test environment variables
os.environ["BASE_URL"] = "example.com"
os.environ["ENV_NAME"] = "production"
config = MCPServerConfig(
source={"path": "server.py"},
deployment={
"env": {
"API_URL": "https://api.${BASE_URL}/v1",
"DATABASE": "postgres://${ENV_NAME}.db",
"PREFIXED": "MY_${ENV_NAME}_SERVER",
"MISSING": "value_${NONEXISTENT}_here",
"STATIC": "no_interpolation",
}
},
)
original_values = {
key: os.environ.get(key)
for key in ["API_URL", "DATABASE", "PREFIXED", "MISSING", "STATIC"]
}
try:
config.deployment.apply_runtime_settings()
# Check interpolated values
assert os.environ["API_URL"] == "https://api.example.com/v1"
assert os.environ["DATABASE"] == "postgres://production.db"
assert os.environ["PREFIXED"] == "MY_production_SERVER"
# Missing variables should keep the placeholder
assert os.environ["MISSING"] == "value_${NONEXISTENT}_here"
# Static values should remain unchanged
assert os.environ["STATIC"] == "no_interpolation"
finally:
# Clean up
os.environ.pop("BASE_URL", None)
os.environ.pop("ENV_NAME", None)
for key, value in original_values.items():
if value is None:
os.environ.pop(key, None)
else:
os.environ[key] = value
class TestMCPServerConfig:
"""Test MCPServerConfig root configuration."""
def test_minimal_config(self):
"""Test creating a config with only required fields."""
config = MCPServerConfig(source={"path": "server.py"})
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
assert config.source.entrypoint is None
# Environment and deployment are now always present but empty
assert isinstance(config.environment, UVEnvironment)
assert isinstance(config.deployment, Deployment)
# Check they have no values set
assert not config.environment._must_run_with_uv()
assert all(
getattr(config.deployment, field, None) is None
for field in Deployment.model_fields
)
def test_nested_structure(self):
"""Test the nested configuration structure."""
config = MCPServerConfig(
source={"path": "server.py"},
environment={
"python": "3.12",
"dependencies": ["fastmcp"],
},
deployment={
"transport": "stdio",
"log_level": "INFO",
},
)
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
assert config.source.entrypoint is None
assert isinstance(config.environment, UVEnvironment)
assert isinstance(config.deployment, Deployment)
def test_from_file(self, tmp_path):
"""Test loading config from JSON file with nested structure."""
config_data = {
"$schema": "https://gofastmcp.com/public/schemas/fastmcp.json/v1.json",
"source": {"path": "src/server.py", "entrypoint": "app"},
"environment": {"python": "3.12", "dependencies": ["requests"]},
"deployment": {"transport": "http", "port": 8000},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
config = MCPServerConfig.from_file(config_file)
# When loaded from JSON with entrypoint format, it becomes EntrypointConfig
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "src/server.py"
assert config.source.entrypoint == "app"
assert config.environment.python == "3.12"
assert config.environment.dependencies == ["requests"]
assert config.deployment.transport == "http"
assert config.deployment.port == 8000
def test_from_file_with_string_entrypoint(self, tmp_path):
"""Test loading config with dict source format."""
config_data = {
"source": {"path": "server.py", "entrypoint": "mcp"},
"environment": {"dependencies": ["fastmcp"]},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
config = MCPServerConfig.from_file(config_file)
# String entrypoint with : should be converted to EntrypointConfig
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
assert config.source.entrypoint == "mcp"
def test_string_entrypoint_with_entrypoint_and_environment(self, tmp_path):
"""Test that file.py:entrypoint syntax works with environment config."""
config_data = {
"source": {"path": "src/server.py", "entrypoint": "app"},
"environment": {"python": "3.12", "dependencies": ["fastmcp", "requests"]},
"deployment": {"transport": "http", "port": 8000},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
config = MCPServerConfig.from_file(config_file)
# Should be parsed into EntrypointConfig
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "src/server.py"
assert config.source.entrypoint == "app"
# Environment config should still work
assert config.environment.python == "3.12"
assert config.environment.dependencies == ["fastmcp", "requests"]
# Deployment config should still work
assert config.deployment.transport == "http"
assert config.deployment.port == 8000
def test_find_config_in_current_dir(self, tmp_path):
"""Test finding config in current directory."""
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps({"source": {"path": "server.py"}}))
original_cwd = os.getcwd()
try:
os.chdir(tmp_path)
found = MCPServerConfig.find_config()
assert found == config_file
finally:
os.chdir(original_cwd)
def test_find_config_not_in_parent_dir(self, tmp_path):
"""Test that config is NOT found in parent directory."""
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps({"source": {"path": "server.py"}}))
subdir = tmp_path / "subdir"
subdir.mkdir()
# Should NOT find config in parent directory
found = MCPServerConfig.find_config(subdir)
assert found is None
def test_find_config_in_specified_dir(self, tmp_path):
"""Test finding config in the specified directory."""
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps({"source": {"path": "server.py"}}))
# Should find config when looking in the directory that contains it
found = MCPServerConfig.find_config(tmp_path)
assert found == config_file
def test_find_config_not_found(self, tmp_path):
"""Test when config is not found."""
found = MCPServerConfig.find_config(tmp_path)
assert found is None
def test_invalid_transport(self, tmp_path):
"""Test loading config with invalid transport value."""
config_data = {
"source": {"path": "server.py"},
"deployment": {"transport": "invalid_transport"},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
with pytest.raises(ValidationError):
MCPServerConfig.from_file(config_file)
def test_optional_sections(self):
"""Test that all config sections are optional except source."""
# Only source is required
config = MCPServerConfig(source={"path": "server.py"})
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
# Environment and deployment are now always present but may be empty
assert isinstance(config.environment, UVEnvironment)
assert isinstance(config.deployment, Deployment)
# Only environment with values
config = MCPServerConfig(
source={"path": "server.py"}, environment={"python": "3.12"}
)
assert config.environment.python == "3.12"
assert isinstance(config.deployment, Deployment)
assert all(
getattr(config.deployment, field, None) is None
for field in Deployment.model_fields
)
# Only deployment with values
config = MCPServerConfig(
source={"path": "server.py"}, deployment={"transport": "http"}
)
assert isinstance(config.environment, UVEnvironment)
# Check all fields except 'type' which has a default value
assert all(
getattr(config.environment, field, None) is None
for field in UVEnvironment.model_fields
if field != "type"
)
assert config.deployment.transport == "http"
class TestMCPServerConfigRoundtrip:
"""Test that MCPServerConfig survives model_dump() -> reconstruct pattern.
This is used by the CLI to apply overrides immutably.
"""
def test_roundtrip_preserves_schema(self):
"""Ensure schema_ field survives dump/reconstruct cycle."""
config = MCPServerConfig(source=FileSystemSource(path="server.py"))
config_dict = config.model_dump()
reconstructed = MCPServerConfig(**config_dict)
assert reconstructed.schema_ == config.schema_
def test_roundtrip_with_all_fields(self):
"""Full config survives dump/reconstruct."""
config = MCPServerConfig(
source=FileSystemSource(path="server.py", entrypoint="app"),
environment=UVEnvironment(python="3.11"),
deployment=Deployment(transport="http", port=8080),
)
config_dict = config.model_dump()
reconstructed = MCPServerConfig(**config_dict)
assert reconstructed.source.path == "server.py"
assert reconstructed.environment.python == "3.11"
assert reconstructed.deployment.port == 8080
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_config.py",
"license": "Apache License 2.0",
"lines": 386,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/cli/test_run_config.py | """Integration tests for FastMCP configuration with run command."""
import json
import os
from pathlib import Path
import pytest
from fastmcp.cli.run import load_mcp_server_config
from fastmcp.utilities.mcp_server_config import (
Deployment,
MCPServerConfig,
)
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from fastmcp.utilities.mcp_server_config.v1.sources.filesystem import FileSystemSource
@pytest.fixture
def sample_config(tmp_path):
"""Create a sample fastmcp.json configuration file with nested structure."""
config_data = {
"$schema": "https://gofastmcp.com/public/schemas/fastmcp.json/v1.json",
"source": {"path": "server.py"},
"environment": {"python": "3.11", "dependencies": ["requests"]},
"deployment": {"transport": "stdio", "env": {"TEST_VAR": "test_value"}},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data, indent=2))
# Create a simple server file
server_file = tmp_path / "server.py"
server_file.write_text("""
from fastmcp import FastMCP
mcp = FastMCP("Test Server")
@mcp.tool
def test_tool(message: str) -> str:
return f"Echo: {message}"
""")
return config_file
def test_load_mcp_server_config(sample_config, monkeypatch):
"""Test loading configuration and returning config subsets."""
# Capture environment changes
original_env = dict(os.environ)
try:
config = load_mcp_server_config(sample_config)
# Check that we got the right types
assert isinstance(config, MCPServerConfig)
assert isinstance(config.source, FileSystemSource)
assert isinstance(config.deployment, Deployment)
assert isinstance(config.environment, UVEnvironment)
# Check source - path is not resolved yet, only during load_server
assert config.source.path == "server.py"
assert config.source.entrypoint is None
# Check environment config
assert config.environment.python == "3.11"
assert config.environment.dependencies == ["requests"]
# Check deployment config
assert config.deployment.transport == "stdio"
assert config.deployment.env == {"TEST_VAR": "test_value"}
# Check that environment variables were applied
assert os.environ.get("TEST_VAR") == "test_value"
finally:
# Restore original environment
os.environ.clear()
os.environ.update(original_env)
def test_load_config_with_entrypoint_source(tmp_path):
"""Test loading config with entrypoint-format source."""
config_data = {
"source": {"path": "src/server.py", "entrypoint": "app"},
"deployment": {"transport": "http", "port": 8000},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create the server file in subdirectory
src_dir = tmp_path / "src"
src_dir.mkdir()
server_file = src_dir / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
# Check source - path is not resolved yet, only during load_server
assert config.source.path == "src/server.py"
assert config.source.entrypoint == "app"
# Check deployment
assert config.deployment.transport == "http"
assert config.deployment.port == 8000
def test_load_config_with_cwd(tmp_path):
"""Test that Deployment applies working directory change."""
# Create a subdirectory
subdir = tmp_path / "subdir"
subdir.mkdir()
config_data = {"source": {"path": "server.py"}, "deployment": {"cwd": "subdir"}}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file in subdirectory
server_file = subdir / "server.py"
server_file.write_text("# Test server")
original_cwd = os.getcwd()
try:
config = load_mcp_server_config(config_file) # noqa: F841
# Check that working directory was changed
assert Path.cwd() == subdir.resolve()
finally:
# Restore original working directory
os.chdir(original_cwd)
def test_load_config_with_relative_cwd(tmp_path):
"""Test configuration with relative working directory."""
# Create nested subdirectories
subdir1 = tmp_path / "dir1"
subdir2 = subdir1 / "dir2"
subdir2.mkdir(parents=True)
config_data = {
"source": {"path": "server.py"},
"deployment": {
"cwd": "../" # Relative to config file location
},
}
config_file = subdir2 / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file in parent directory
server_file = subdir1 / "server.py"
server_file.write_text("# Server")
original_cwd = os.getcwd()
try:
config = load_mcp_server_config(config_file) # noqa: F841
# Should change to parent directory of config file
assert Path.cwd() == subdir1.resolve()
finally:
os.chdir(original_cwd)
def test_load_minimal_config(tmp_path):
"""Test loading minimal configuration with only source."""
config_data = {"source": {"path": "server.py"}}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file
server_file = tmp_path / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
# Check we got source - path is not resolved yet, only during load_server
assert isinstance(config.source, FileSystemSource)
assert config.source.path == "server.py"
def test_load_config_with_server_args(tmp_path):
"""Test configuration with server arguments."""
config_data = {
"source": {"path": "server.py"},
"deployment": {"args": ["--debug", "--config", "custom.json"]},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file
server_file = tmp_path / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
assert config.deployment.args == ["--debug", "--config", "custom.json"]
def test_load_config_with_log_level(tmp_path):
"""Test configuration with log_level setting."""
config_data = {
"source": {"path": "server.py"},
"deployment": {"log_level": "DEBUG"},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file
server_file = tmp_path / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
assert config.deployment.log_level == "DEBUG"
def test_load_config_with_various_log_levels(tmp_path):
"""Test that all valid log levels are accepted."""
valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
for level in valid_levels:
config_data = {
"source": {"path": "server.py"},
"deployment": {"log_level": level},
}
config_file = tmp_path / f"fastmcp_{level}.json"
config_file.write_text(json.dumps(config_data))
# Create server file
server_file = tmp_path / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
assert config.deployment.log_level == level
def test_config_subset_independence(tmp_path):
"""Test that config subsets can be used independently."""
config_data = {
"source": {"path": "server.py"},
"environment": {"python": "3.12", "dependencies": ["pandas"]},
"deployment": {"transport": "http", "host": "0.0.0.0", "port": 3000},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file
server_file = tmp_path / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
# Each subset should be independently usable
# Path is not resolved yet, only during load_server
assert config.source.path == "server.py"
assert config.source.entrypoint is None
assert config.environment.python == "3.12"
assert config.environment.dependencies == ["pandas"]
assert config.environment._must_run_with_uv() # Has dependencies
assert config.deployment.transport == "http"
assert config.deployment.host == "0.0.0.0"
assert config.deployment.port == 3000
def test_environment_config_path_resolution(tmp_path):
"""Test that paths in environment config are resolved correctly."""
# Create requirements file
reqs_file = tmp_path / "requirements.txt"
reqs_file.write_text("fastmcp>=2.0")
config_data = {
"source": {"path": "server.py"},
"environment": {
"requirements": "requirements.txt",
"project": ".",
"editable": ["../other-project"],
},
}
config_file = tmp_path / "fastmcp.json"
config_file.write_text(json.dumps(config_data))
# Create server file
server_file = tmp_path / "server.py"
server_file.write_text("# Server")
config = load_mcp_server_config(config_file)
# Check that UV command is built with resolved paths
uv_cmd = config.environment.build_command(["fastmcp", "run", "server.py"])
assert "--with-requirements" in uv_cmd
assert "--project" in uv_cmd
# Path should be resolved relative to config file
req_idx = uv_cmd.index("--with-requirements") + 1
assert Path(uv_cmd[req_idx]).is_absolute() or uv_cmd[req_idx] == "requirements.txt"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_run_config.py",
"license": "Apache License 2.0",
"lines": 224,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:examples/auth/github_oauth/client.py | """OAuth client example for connecting to FastMCP servers.
This example demonstrates how to connect to an OAuth-protected FastMCP server.
To run:
python client.py
"""
import asyncio
from fastmcp.client import Client, OAuth
SERVER_URL = "http://localhost:8000/mcp"
async def main():
try:
async with Client(SERVER_URL, auth=OAuth()) as client:
assert await client.ping()
print("✅ Successfully authenticated!")
tools = await client.list_tools()
print(f"🔧 Available tools ({len(tools)}):")
for tool in tools:
print(f" - {tool.name}: {tool.description}")
except Exception as e:
print(f"❌ Authentication failed: {e}")
raise
if __name__ == "__main__":
asyncio.run(main())
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/github_oauth/client.py",
"license": "Apache License 2.0",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/github_oauth/server.py | """GitHub OAuth server example for FastMCP.
This example demonstrates how to protect a FastMCP server with GitHub OAuth.
Required environment variables:
- FASTMCP_SERVER_AUTH_GITHUB_CLIENT_ID: Your GitHub OAuth app client ID
- FASTMCP_SERVER_AUTH_GITHUB_CLIENT_SECRET: Your GitHub OAuth app client secret
To run:
python server.py
"""
import os
from fastmcp import FastMCP
from fastmcp.server.auth.providers.github import GitHubProvider
auth = GitHubProvider(
client_id=os.getenv("FASTMCP_SERVER_AUTH_GITHUB_CLIENT_ID") or "",
client_secret=os.getenv("FASTMCP_SERVER_AUTH_GITHUB_CLIENT_SECRET") or "",
base_url="http://localhost:8000",
# redirect_path="/auth/callback", # Default path - change if using a different callback URL
)
mcp = FastMCP("GitHub OAuth Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/github_oauth/server.py",
"license": "Apache License 2.0",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:examples/auth/google_oauth/server.py | """Google OAuth server example for FastMCP.
This example demonstrates how to protect a FastMCP server with Google OAuth.
Required environment variables:
- FASTMCP_SERVER_AUTH_GOOGLE_CLIENT_ID: Your Google OAuth client ID
- FASTMCP_SERVER_AUTH_GOOGLE_CLIENT_SECRET: Your Google OAuth client secret
To run:
python server.py
"""
import os
from fastmcp import FastMCP
from fastmcp.server.auth.providers.google import GoogleProvider
auth = GoogleProvider(
client_id=os.getenv("FASTMCP_SERVER_AUTH_GOOGLE_CLIENT_ID") or "",
client_secret=os.getenv("FASTMCP_SERVER_AUTH_GOOGLE_CLIENT_SECRET") or "",
base_url="http://localhost:8000",
# redirect_path="/auth/callback", # Default path - change if using a different callback URL
# Optional: specify required scopes
# required_scopes=["openid", "https://www.googleapis.com/auth/userinfo.email"],
)
mcp = FastMCP("Google OAuth Example Server", auth=auth)
@mcp.tool
def echo(message: str) -> str:
"""Echo the provided message."""
return message
if __name__ == "__main__":
mcp.run(transport="http", port=8000)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "examples/auth/google_oauth/server.py",
"license": "Apache License 2.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/github.py | """GitHub OAuth provider for FastMCP.
This module provides a complete GitHub OAuth integration that's ready to use
with just a client ID and client secret. It handles all the complexity of
GitHub's OAuth flow, token validation, and user management.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.github import GitHubProvider
# Simple GitHub OAuth protection
auth = GitHubProvider(
client_id="your-github-client-id",
client_secret="your-github-client-secret"
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
from __future__ import annotations
import contextlib
import httpx
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl
from fastmcp.server.auth import TokenVerifier
from fastmcp.server.auth.auth import AccessToken
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class GitHubTokenVerifier(TokenVerifier):
"""Token verifier for GitHub OAuth tokens.
GitHub OAuth tokens are opaque (not JWTs), so we verify them
by calling GitHub's API to check if they're valid and get user info.
"""
def __init__(
self,
*,
required_scopes: list[str] | None = None,
timeout_seconds: int = 10,
http_client: httpx.AsyncClient | None = None,
):
"""Initialize the GitHub token verifier.
Args:
required_scopes: Required OAuth scopes (e.g., ['user:email'])
timeout_seconds: HTTP request timeout
http_client: Optional httpx.AsyncClient for connection pooling. When provided,
the client is reused across calls and the caller is responsible for its
lifecycle. When None (default), a fresh client is created per call.
"""
super().__init__(required_scopes=required_scopes)
self.timeout_seconds = timeout_seconds
self._http_client = http_client
async def verify_token(self, token: str) -> AccessToken | None:
"""Verify GitHub OAuth token by calling GitHub API."""
try:
async with (
contextlib.nullcontext(self._http_client)
if self._http_client is not None
else httpx.AsyncClient(timeout=self.timeout_seconds)
) as client:
# Get token info from GitHub API
response = await client.get(
"https://api.github.com/user",
headers={
"Authorization": f"Bearer {token}",
"Accept": "application/vnd.github.v3+json",
"User-Agent": "FastMCP-GitHub-OAuth",
},
)
if response.status_code != 200:
logger.debug(
"GitHub token verification failed: %d - %s",
response.status_code,
response.text[:200],
)
return None
user_data = response.json()
# Get token scopes from GitHub API
# GitHub includes scopes in the X-OAuth-Scopes header
scopes_response = await client.get(
"https://api.github.com/user/repos", # Any authenticated endpoint
headers={
"Authorization": f"Bearer {token}",
"Accept": "application/vnd.github.v3+json",
"User-Agent": "FastMCP-GitHub-OAuth",
},
)
# Extract scopes from X-OAuth-Scopes header if available
oauth_scopes_header = scopes_response.headers.get("x-oauth-scopes", "")
token_scopes = [
scope.strip()
for scope in oauth_scopes_header.split(",")
if scope.strip()
]
# If no scopes in header, assume basic scopes based on successful user API call
if not token_scopes:
token_scopes = ["user"] # Basic scope if we can access user info
# Check required scopes
if self.required_scopes:
token_scopes_set = set(token_scopes)
required_scopes_set = set(self.required_scopes)
if not required_scopes_set.issubset(token_scopes_set):
logger.debug(
"GitHub token missing required scopes. Has %d, needs %d",
len(token_scopes_set),
len(required_scopes_set),
)
return None
# Create AccessToken with GitHub user info
return AccessToken(
token=token,
client_id=str(user_data.get("id", "unknown")), # Use GitHub user ID
scopes=token_scopes,
expires_at=None, # GitHub tokens don't typically expire
claims={
"sub": str(user_data["id"]),
"login": user_data.get("login"),
"name": user_data.get("name"),
"email": user_data.get("email"),
"avatar_url": user_data.get("avatar_url"),
"github_user_data": user_data,
},
)
except httpx.RequestError as e:
logger.debug("Failed to verify GitHub token: %s", e)
return None
except Exception as e:
logger.debug("GitHub token verification error: %s", e)
return None
class GitHubProvider(OAuthProxy):
"""Complete GitHub OAuth provider for FastMCP.
This provider makes it trivial to add GitHub OAuth protection to any
FastMCP server. Just provide your GitHub OAuth app credentials and
a base URL, and you're ready to go.
Features:
- Transparent OAuth proxy to GitHub
- Automatic token validation via GitHub API
- User information extraction
- Minimal configuration required
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.github import GitHubProvider
auth = GitHubProvider(
client_id="Ov23li...",
client_secret="abc123...",
base_url="https://my-server.com"
)
mcp = FastMCP("My App", auth=auth)
```
"""
def __init__(
self,
*,
client_id: str,
client_secret: str,
base_url: AnyHttpUrl | str,
issuer_url: AnyHttpUrl | str | None = None,
redirect_path: str | None = None,
required_scopes: list[str] | None = None,
timeout_seconds: int = 10,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
http_client: httpx.AsyncClient | None = None,
):
"""Initialize GitHub OAuth provider.
Args:
client_id: GitHub OAuth app client ID (e.g., "Ov23li...")
client_secret: GitHub OAuth app client secret
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
redirect_path: Redirect path configured in GitHub OAuth app (defaults to "/auth/callback")
required_scopes: Required GitHub scopes (defaults to ["user"])
timeout_seconds: HTTP request timeout for GitHub API calls (defaults to 10)
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
If None (default), all URIs are allowed. If empty list, no URIs are allowed.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to GitHub.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
http_client: Optional httpx.AsyncClient for connection pooling in token verification.
When provided, the client is reused across verify_token calls and the caller
is responsible for its lifecycle. When None (default), a fresh client is created per call.
"""
# Parse scopes if provided as string
required_scopes_final = (
parse_scopes(required_scopes) if required_scopes is not None else ["user"]
)
# Create GitHub token verifier
token_verifier = GitHubTokenVerifier(
required_scopes=required_scopes_final,
timeout_seconds=timeout_seconds,
http_client=http_client,
)
# Initialize OAuth proxy with GitHub endpoints
super().__init__(
upstream_authorization_endpoint="https://github.com/login/oauth/authorize",
upstream_token_endpoint="https://github.com/login/oauth/access_token",
upstream_client_id=client_id,
upstream_client_secret=client_secret,
token_verifier=token_verifier,
base_url=base_url,
redirect_path=redirect_path,
issuer_url=issuer_url or base_url, # Default to base_url if not specified
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
)
logger.debug(
"Initialized GitHub OAuth provider for client %s with scopes: %s",
client_id,
required_scopes_final,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/github.py",
"license": "Apache License 2.0",
"lines": 223,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/google.py | """Google OAuth provider for FastMCP.
This module provides a complete Google OAuth integration that's ready to use
with just a client ID and client secret. It handles all the complexity of
Google's OAuth flow, token validation, and user management.
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.google import GoogleProvider
# Simple Google OAuth protection
auth = GoogleProvider(
client_id="your-google-client-id.apps.googleusercontent.com",
client_secret="your-google-client-secret"
)
mcp = FastMCP("My Protected Server", auth=auth)
```
"""
from __future__ import annotations
import contextlib
import time
import httpx
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl
from fastmcp.server.auth import TokenVerifier
from fastmcp.server.auth.auth import AccessToken
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class GoogleTokenVerifier(TokenVerifier):
"""Token verifier for Google OAuth tokens.
Google OAuth tokens are opaque (not JWTs), so we verify them
by calling Google's tokeninfo API to check if they're valid and get user info.
"""
def __init__(
self,
*,
required_scopes: list[str] | None = None,
timeout_seconds: int = 10,
http_client: httpx.AsyncClient | None = None,
):
"""Initialize the Google token verifier.
Args:
required_scopes: Required OAuth scopes (e.g., ['openid', 'https://www.googleapis.com/auth/userinfo.email'])
timeout_seconds: HTTP request timeout
http_client: Optional httpx.AsyncClient for connection pooling. When provided,
the client is reused across calls and the caller is responsible for its
lifecycle. When None (default), a fresh client is created per call.
"""
super().__init__(required_scopes=required_scopes)
self.timeout_seconds = timeout_seconds
self._http_client = http_client
async def verify_token(self, token: str) -> AccessToken | None:
"""Verify Google OAuth token by calling Google's tokeninfo API."""
try:
async with (
contextlib.nullcontext(self._http_client)
if self._http_client is not None
else httpx.AsyncClient(timeout=self.timeout_seconds)
) as client:
# Use Google's tokeninfo endpoint to validate the token
response = await client.get(
"https://www.googleapis.com/oauth2/v1/tokeninfo",
params={"access_token": token},
headers={"User-Agent": "FastMCP-Google-OAuth"},
)
if response.status_code != 200:
logger.debug(
"Google token verification failed: %d",
response.status_code,
)
return None
token_info = response.json()
# Check if token is expired
expires_in = token_info.get("expires_in")
if expires_in and int(expires_in) <= 0:
logger.debug("Google token has expired")
return None
# Extract scopes from token info
scope_string = token_info.get("scope", "")
token_scopes = [
scope.strip() for scope in scope_string.split(" ") if scope.strip()
]
# Check required scopes
if self.required_scopes:
token_scopes_set = set(token_scopes)
required_scopes_set = set(self.required_scopes)
if not required_scopes_set.issubset(token_scopes_set):
logger.debug(
"Google token missing required scopes. Has %d, needs %d",
len(token_scopes_set),
len(required_scopes_set),
)
return None
# Get additional user info if we have the right scopes
user_data = {}
if "openid" in token_scopes or "profile" in token_scopes:
try:
userinfo_response = await client.get(
"https://www.googleapis.com/oauth2/v2/userinfo",
headers={
"Authorization": f"Bearer {token}",
"User-Agent": "FastMCP-Google-OAuth",
},
)
if userinfo_response.status_code == 200:
user_data = userinfo_response.json()
except Exception as e:
logger.debug("Failed to fetch Google user info: %s", e)
# Calculate expiration time
expires_at = None
if expires_in:
expires_at = int(time.time() + int(expires_in))
# Create AccessToken with Google user info
access_token = AccessToken(
token=token,
client_id=token_info.get(
"audience", "unknown"
), # Use audience as client_id
scopes=token_scopes,
expires_at=expires_at,
claims={
"sub": user_data.get("id")
or token_info.get("user_id", "unknown"),
"email": user_data.get("email"),
"name": user_data.get("name"),
"picture": user_data.get("picture"),
"given_name": user_data.get("given_name"),
"family_name": user_data.get("family_name"),
"locale": user_data.get("locale"),
"google_user_data": user_data,
"google_token_info": token_info,
},
)
logger.debug("Google token verified successfully")
return access_token
except httpx.RequestError as e:
logger.debug("Failed to verify Google token: %s", e)
return None
except Exception as e:
logger.debug("Google token verification error: %s", e)
return None
class GoogleProvider(OAuthProxy):
"""Complete Google OAuth provider for FastMCP.
This provider makes it trivial to add Google OAuth protection to any
FastMCP server. Just provide your Google OAuth app credentials and
a base URL, and you're ready to go.
Features:
- Transparent OAuth proxy to Google
- Automatic token validation via Google's tokeninfo API
- User information extraction from Google APIs
- Minimal configuration required
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.google import GoogleProvider
auth = GoogleProvider(
client_id="123456789.apps.googleusercontent.com",
client_secret="GOCSPX-abc123...",
base_url="https://my-server.com"
)
mcp = FastMCP("My App", auth=auth)
```
"""
def __init__(
self,
*,
client_id: str,
client_secret: str,
base_url: AnyHttpUrl | str,
issuer_url: AnyHttpUrl | str | None = None,
redirect_path: str | None = None,
required_scopes: list[str] | None = None,
timeout_seconds: int = 10,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
extra_authorize_params: dict[str, str] | None = None,
http_client: httpx.AsyncClient | None = None,
):
"""Initialize Google OAuth provider.
Args:
client_id: Google OAuth client ID (e.g., "123456789.apps.googleusercontent.com")
client_secret: Google OAuth client secret (e.g., "GOCSPX-abc123...")
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
redirect_path: Redirect path configured in Google OAuth app (defaults to "/auth/callback")
required_scopes: Required Google scopes (defaults to ["openid"]). Common scopes include:
- "openid" for OpenID Connect (default)
- "https://www.googleapis.com/auth/userinfo.email" for email access
- "https://www.googleapis.com/auth/userinfo.profile" for profile info
timeout_seconds: HTTP request timeout for Google API calls (defaults to 10)
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
If None (default), all URIs are allowed. If empty list, no URIs are allowed.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to Google.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
extra_authorize_params: Additional parameters to forward to Google's authorization endpoint.
By default, GoogleProvider sets {"access_type": "offline", "prompt": "consent"} to ensure
refresh tokens are returned. You can override these defaults or add additional parameters.
Example: {"prompt": "select_account"} to let users choose their Google account.
http_client: Optional httpx.AsyncClient for connection pooling in token verification.
When provided, the client is reused across verify_token calls and the caller
is responsible for its lifecycle. When None (default), a fresh client is created per call.
"""
# Parse scopes if provided as string
# Google requires at least one scope - openid is the minimal OIDC scope
required_scopes_final = (
parse_scopes(required_scopes) if required_scopes is not None else ["openid"]
)
# Create Google token verifier
token_verifier = GoogleTokenVerifier(
required_scopes=required_scopes_final,
timeout_seconds=timeout_seconds,
http_client=http_client,
)
# Set Google-specific defaults for extra authorize params
# access_type=offline ensures refresh tokens are returned
# prompt=consent forces consent screen to get refresh token (Google only issues on first auth otherwise)
google_defaults = {
"access_type": "offline",
"prompt": "consent",
}
# User-provided params override defaults
if extra_authorize_params:
google_defaults.update(extra_authorize_params)
extra_authorize_params_final = google_defaults
# Initialize OAuth proxy with Google endpoints
super().__init__(
upstream_authorization_endpoint="https://accounts.google.com/o/oauth2/v2/auth",
upstream_token_endpoint="https://oauth2.googleapis.com/token",
upstream_client_id=client_id,
upstream_client_secret=client_secret,
token_verifier=token_verifier,
base_url=base_url,
redirect_path=redirect_path,
issuer_url=issuer_url or base_url, # Default to base_url if not specified
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
extra_authorize_params=extra_authorize_params_final,
)
logger.debug(
"Initialized Google OAuth provider for client %s with scopes: %s",
client_id,
required_scopes_final,
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/google.py",
"license": "Apache License 2.0",
"lines": 258,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/integration_tests/auth/test_github_provider_integration.py | """Integration tests for GitHub OAuth Provider.
Tests the complete GitHub OAuth flow using HeadlessOAuth to bypass browser interaction.
This test requires a GitHub OAuth app to be created at https://github.com/settings/developers
with the following configuration:
- Redirect URL: http://127.0.0.1:9100/auth/callback
- Client ID and Client Secret should be set as environment variables:
- FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID
- FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET
"""
import os
import re
import secrets
import time
from collections.abc import AsyncGenerator
from urllib.parse import parse_qs, urlencode, urlparse
import httpx
import pytest
from fastmcp import FastMCP
from fastmcp.client import Client
from fastmcp.server.auth.auth import AccessToken
from fastmcp.server.auth.oauth_proxy.models import ClientCode
from fastmcp.server.auth.providers.github import GitHubProvider
from fastmcp.utilities.tests import HeadlessOAuth, run_server_async
FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID = os.getenv("FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID")
FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET = os.getenv(
"FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET"
)
# Skip tests if no GitHub OAuth credentials are available
pytestmark = pytest.mark.xfail(
not FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID
or not FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET,
reason="FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID and FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET environment variables are not set or empty",
)
def create_github_server(base_url: str) -> FastMCP:
"""Create FastMCP server with GitHub OAuth protection."""
assert FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID is not None
assert FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET is not None
# Create GitHub OAuth provider
auth = GitHubProvider(
client_id=FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID,
client_secret=FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET,
base_url=base_url,
jwt_signing_key="test-secret",
)
# Create FastMCP server with GitHub authentication
server = FastMCP("GitHub OAuth Integration Test Server", auth=auth)
@server.tool
def get_protected_data() -> str:
"""Returns protected data - requires GitHub OAuth."""
return "🔐 This data requires GitHub OAuth authentication!"
@server.tool
def get_user_info() -> str:
"""Returns user info from OAuth context."""
return "📝 GitHub OAuth user authenticated successfully"
return server
def create_github_server_with_mock_callback(base_url: str) -> FastMCP:
"""Create FastMCP server with GitHub OAuth that mocks the callback for testing."""
assert FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID is not None
assert FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET is not None
# Create GitHub OAuth provider
auth = GitHubProvider(
client_id=FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID,
client_secret=FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET,
base_url=base_url,
jwt_signing_key="test-secret",
)
# Mock the authorize method to return a fake code instead of redirecting to GitHub
async def mock_authorize(client, params):
# Instead of redirecting to GitHub, simulate an immediate callback
# Generate a fake authorization code
fake_code = secrets.token_urlsafe(32)
# Create mock token response (simulating what GitHub would return)
mock_tokens = {
"access_token": f"gho_mock_token_{secrets.token_hex(16)}",
"token_type": "bearer",
"expires_in": 3600,
}
# Store the mock tokens in the proxy's code storage
await auth._code_store.put(
key=fake_code,
value=ClientCode(
code=fake_code,
client_id=client.client_id,
redirect_uri=str(params.redirect_uri),
code_challenge=params.code_challenge,
code_challenge_method=getattr(params, "code_challenge_method", "S256"),
scopes=params.scopes or [],
idp_tokens=mock_tokens,
expires_at=int(time.time() + 300), # 5 minutes
created_at=time.time(),
),
)
# Return the redirect to the client's callback with the fake code
callback_params = {
"code": fake_code,
"state": params.state,
}
separator = "&" if "?" in str(params.redirect_uri) else "?"
return f"{params.redirect_uri}{separator}{urlencode(callback_params)}"
auth.authorize = mock_authorize # type: ignore[assignment]
# Mock the token verifier to accept our fake tokens
original_verify_token = auth._token_validator.verify_token
async def mock_verify_token(token: str):
if token.startswith("gho_mock_token_"):
# Return a mock AccessToken for our fake tokens
return AccessToken(
token=token,
client_id=FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID or "test-client",
scopes=["user"],
expires_at=int(time.time() + 3600),
)
# Fall back to original verification for other tokens
return await original_verify_token(token)
auth._token_validator.verify_token = mock_verify_token # type: ignore[assignment]
# Create FastMCP server with mocked GitHub authentication
server = FastMCP("GitHub OAuth Integration Test Server (Mock)", auth=auth)
@server.tool
def get_protected_data() -> str:
"""Returns protected data - requires GitHub OAuth."""
return "🔐 This data requires GitHub OAuth authentication!"
@server.tool
def get_user_info() -> str:
"""Returns user info from OAuth context."""
return "📝 GitHub OAuth user authenticated successfully"
return server
@pytest.fixture
async def github_server() -> AsyncGenerator[str, None]:
"""Start GitHub OAuth server on a random available port."""
from fastmcp.utilities.http import find_available_port
port = find_available_port()
base_url = f"http://127.0.0.1:{port}"
server = create_github_server(base_url)
async with run_server_async(server, port=port, transport="http") as url:
yield url
@pytest.fixture
async def github_server_with_mock() -> AsyncGenerator[str, None]:
"""Start GitHub OAuth server with mocked callback on a random available port."""
from fastmcp.utilities.http import find_available_port
port = find_available_port()
base_url = f"http://127.0.0.1:{port}"
server = create_github_server_with_mock_callback(base_url)
async with run_server_async(server, port=port, transport="http") as url:
yield url
@pytest.fixture
def github_client(github_server: str) -> Client:
"""Create FastMCP client with HeadlessOAuth for GitHub server."""
return Client(
github_server,
auth=HeadlessOAuth(mcp_url=github_server),
)
@pytest.fixture
def github_client_with_mock(github_server_with_mock: str) -> Client:
"""Create FastMCP client with HeadlessOAuth for mocked GitHub server."""
return Client(
github_server_with_mock,
auth=HeadlessOAuth(mcp_url=github_server_with_mock),
)
async def test_github_oauth_credentials_available():
"""Test that GitHub OAuth credentials are available for testing."""
assert FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID is not None
assert FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET is not None
assert len(FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID) > 0
assert len(FASTMCP_TEST_AUTH_GITHUB_CLIENT_SECRET) > 0
async def test_github_oauth_authorization_redirect(github_server: str):
"""Test that GitHub OAuth authorization redirects to GitHub correctly through consent flow.
Since HeadlessOAuth can't handle real GitHub redirects, we test that:
1. DCR client registration works
2. Authorization endpoint redirects to consent page
3. Consent approval redirects to GitHub with correct parameters
"""
# Extract base URL
parsed = urlparse(github_server)
base_url = f"{parsed.scheme}://{parsed.netloc}"
async with httpx.AsyncClient() as http_client:
# Step 1: Register OAuth client (DCR)
register_response = await http_client.post(
f"{base_url}/register",
json={
"client_name": "Integration Test Client",
"redirect_uris": ["http://localhost:12345/callback"],
"grant_types": ["authorization_code", "refresh_token"],
"response_types": ["code"],
"token_endpoint_auth_method": "client_secret_post",
},
)
if register_response.status_code != 201:
print(f"Registration failed: {register_response.status_code}")
print(f"Response: {register_response.text}")
assert register_response.status_code == 201
client_info = register_response.json()
client_id = client_info["client_id"]
assert client_id is not None
# Step 2: Test authorization endpoint redirects to consent page
auth_url = f"{base_url}/authorize"
auth_params = {
"response_type": "code",
"client_id": client_id,
"redirect_uri": "http://localhost:12345/callback",
"state": "test-state-123",
"code_challenge": "test-challenge",
"code_challenge_method": "S256",
}
auth_response = await http_client.get(
auth_url, params=auth_params, follow_redirects=False
)
# Should redirect to consent page (confused deputy protection)
assert auth_response.status_code == 302
consent_location = auth_response.headers["location"]
assert "/consent" in consent_location
# Step 3: Visit consent page to get CSRF token
consent_response = await http_client.get(
consent_location, follow_redirects=False
)
assert consent_response.status_code == 200
# Extract CSRF token from consent page HTML
csrf_match = re.search(
r'name="csrf_token"\s+value="([^"]+)"', consent_response.text
)
assert csrf_match, "CSRF token not found in consent page"
csrf_token = csrf_match.group(1)
# Extract txn_id from consent URL
txn_id_match = re.search(r"txn_id=([^&]+)", consent_location)
assert txn_id_match, "txn_id not found in consent URL"
txn_id = txn_id_match.group(1)
# Step 4: Approve consent
approve_response = await http_client.post(
f"{base_url}/consent",
data={
"action": "approve",
"txn_id": txn_id,
"csrf_token": csrf_token,
},
cookies=consent_response.cookies,
follow_redirects=False,
)
# Should redirect to GitHub
assert approve_response.status_code in (302, 303)
redirect_location = approve_response.headers["location"]
# Parse redirect URL - should be GitHub
redirect_parsed = urlparse(redirect_location)
assert redirect_parsed.hostname == "github.com"
assert redirect_parsed.path == "/login/oauth/authorize"
# Check that GitHub gets the right parameters
github_params = parse_qs(redirect_parsed.query)
assert "client_id" in github_params
assert github_params["client_id"][0] == FASTMCP_TEST_AUTH_GITHUB_CLIENT_ID
assert "redirect_uri" in github_params
# The redirect_uri should be our proxy's callback, not the client's
proxy_callback = github_params["redirect_uri"][0]
assert proxy_callback.startswith(base_url)
assert proxy_callback.endswith("/auth/callback")
async def test_github_oauth_server_metadata(github_server: str):
"""Test OAuth server metadata discovery."""
from urllib.parse import urlparse
import httpx
# Extract base URL from server URL
parsed = urlparse(github_server)
base_url = f"{parsed.scheme}://{parsed.netloc}"
async with httpx.AsyncClient() as http_client:
# Test OAuth authorization server metadata
metadata_response = await http_client.get(
f"{base_url}/.well-known/oauth-authorization-server"
)
assert metadata_response.status_code == 200
metadata = metadata_response.json()
assert "authorization_endpoint" in metadata
assert "token_endpoint" in metadata
assert "registration_endpoint" in metadata
assert "issuer" in metadata
# Verify endpoints are properly formed
assert metadata["authorization_endpoint"].startswith(base_url)
assert metadata["token_endpoint"].startswith(base_url)
assert metadata["registration_endpoint"].startswith(base_url)
async def test_github_oauth_unauthorized_access(github_server: str):
"""Test that unauthenticated requests are rejected."""
import httpx
from fastmcp.client.transports import StreamableHttpTransport
# Create client without OAuth authentication
unauthorized_client = Client(transport=StreamableHttpTransport(github_server))
# Attempt to connect without authentication should fail
with pytest.raises(httpx.HTTPStatusError, match="401 Unauthorized"):
async with unauthorized_client:
pass
async def test_github_oauth_with_mock(github_client_with_mock: Client):
"""Test complete GitHub OAuth flow with mocked callback."""
async with github_client_with_mock:
# Test that we can ping the server (requires successful OAuth)
assert await github_client_with_mock.ping()
# Test that we can call protected tools
result = await github_client_with_mock.call_tool("get_protected_data", {})
assert "🔐 This data requires GitHub OAuth authentication!" in str(result.data)
# Test that we can call user info tool
result = await github_client_with_mock.call_tool("get_user_info", {})
assert "📝 GitHub OAuth user authenticated successfully" in str(result.data)
async def test_github_oauth_mock_only_accepts_mock_tokens(github_server_with_mock: str):
"""Test that the mock token verifier only accepts mock tokens, not real ones."""
from urllib.parse import urlparse
import httpx
# Extract base URL
parsed = urlparse(github_server_with_mock)
base_url = f"{parsed.scheme}://{parsed.netloc}"
async with httpx.AsyncClient() as http_client:
# Test that a fake "real" GitHub token is rejected
fake_real_token = "gho_real_token_should_be_rejected"
auth_response = await http_client.post(
f"{base_url}/mcp",
headers={
"Authorization": f"Bearer {fake_real_token}",
"Content-Type": "application/json",
},
json={"jsonrpc": "2.0", "id": 1, "method": "ping"},
)
# Should be unauthorized because it's not a mock token
assert auth_response.status_code == 401
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/integration_tests/auth/test_github_provider_integration.py",
"license": "Apache License 2.0",
"lines": 314,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/providers/test_github.py | """Unit tests for GitHub OAuth provider."""
from unittest.mock import MagicMock, patch
import pytest
from key_value.aio.stores.memory import MemoryStore
from fastmcp.server.auth.providers.github import (
GitHubProvider,
GitHubTokenVerifier,
)
@pytest.fixture
def memory_storage() -> MemoryStore:
"""Provide a MemoryStore for tests to avoid SQLite initialization on Windows."""
return MemoryStore()
class TestGitHubProvider:
"""Test GitHubProvider initialization."""
def test_init_with_explicit_params(self, memory_storage: MemoryStore):
"""Test initialization with explicit parameters."""
provider = GitHubProvider(
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
redirect_path="/custom/callback",
required_scopes=["user", "repo"],
timeout_seconds=30,
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check that the provider was initialized correctly
assert provider._upstream_client_id == "test_client"
assert provider._upstream_client_secret.get_secret_value() == "test_secret"
assert (
str(provider.base_url) == "https://example.com/"
) # URLs get normalized with trailing slash
assert provider._redirect_path == "/custom/callback"
def test_init_defaults(self, memory_storage: MemoryStore):
"""Test that default values are applied correctly."""
provider = GitHubProvider(
client_id="test_client",
client_secret="test_secret",
base_url="https://example.com",
jwt_signing_key="test-secret",
client_storage=memory_storage,
)
# Check defaults
assert provider._redirect_path == "/auth/callback"
# The required_scopes should be passed to the token verifier
assert provider._token_validator.required_scopes == ["user"]
class TestGitHubTokenVerifier:
"""Test GitHubTokenVerifier."""
def test_init_with_custom_scopes(self, memory_storage: MemoryStore):
"""Test initialization with custom required scopes."""
verifier = GitHubTokenVerifier(
required_scopes=["user", "repo"],
timeout_seconds=30,
)
assert verifier.required_scopes == ["user", "repo"]
assert verifier.timeout_seconds == 30
def test_init_defaults(self, memory_storage: MemoryStore):
"""Test initialization with defaults."""
verifier = GitHubTokenVerifier()
assert (
verifier.required_scopes == []
) # Parent TokenVerifier sets empty list as default
assert verifier.timeout_seconds == 10
async def test_verify_token_github_api_failure(self):
"""Test token verification when GitHub API returns error."""
verifier = GitHubTokenVerifier()
# Mock httpx.AsyncClient to simulate GitHub API failure
with patch("httpx.AsyncClient") as mock_client_class:
mock_client = MagicMock()
mock_client_class.return_value.__aenter__.return_value = mock_client
# Simulate 401 response from GitHub
mock_response = MagicMock()
mock_response.status_code = 401
mock_response.text = "Bad credentials"
mock_client.get.return_value = mock_response
result = await verifier.verify_token("invalid_token")
assert result is None
async def test_verify_token_success(self):
"""Test successful token verification."""
from unittest.mock import AsyncMock
verifier = GitHubTokenVerifier(required_scopes=["user"])
# Mock the httpx.AsyncClient directly
mock_client = AsyncMock()
# Mock successful user API response
user_response = MagicMock()
user_response.status_code = 200
user_response.json.return_value = {
"id": 12345,
"login": "testuser",
"name": "Test User",
"email": "test@example.com",
"avatar_url": "https://github.com/testuser.png",
}
# Mock successful scopes API response
scopes_response = MagicMock()
scopes_response.headers = {"x-oauth-scopes": "user,repo"}
# Set up the mock client to return our responses
mock_client.get.side_effect = [user_response, scopes_response]
# Patch the AsyncClient context manager
with patch(
"fastmcp.server.auth.providers.github.httpx.AsyncClient"
) as mock_client_class:
mock_client_class.return_value.__aenter__.return_value = mock_client
result = await verifier.verify_token("valid_token")
assert result is not None
assert result.token == "valid_token"
assert result.client_id == "12345"
assert result.scopes == ["user", "repo"]
assert result.claims["login"] == "testuser"
assert result.claims["name"] == "Test User"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/providers/test_github.py",
"license": "Apache License 2.0",
"lines": 110,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:scripts/auto_close_duplicates.py | #!/usr/bin/env python
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "httpx",
# ]
# ///
"""
Auto-close duplicate GitHub issues.
This script runs on a schedule to automatically close issues that have been
marked as duplicates and haven't received any preventing activity.
"""
import os
from dataclasses import dataclass
from datetime import datetime, timedelta, timezone
import httpx
@dataclass
class Issue:
"""Represents a GitHub issue."""
number: int
title: str
state: str
created_at: str
user_id: int
user_login: str
@dataclass
class Comment:
"""Represents a GitHub comment."""
id: int
body: str
created_at: str
user_id: int
user_login: str
user_type: str
@dataclass
class Reaction:
"""Represents a reaction on a comment."""
user_id: int
user_login: str
content: str
class GitHubClient:
"""Client for interacting with GitHub API."""
def __init__(self, token: str, owner: str, repo: str):
self.token = token
self.owner = owner
self.repo = repo
self.headers = {
"Authorization": f"token {token}",
"Accept": "application/vnd.github.v3+json",
}
self.base_url = f"https://api.github.com/repos/{owner}/{repo}"
def get_potential_duplicate_issues(self) -> list[Issue]:
"""Fetch open issues with the potential-duplicate label."""
url = f"{self.base_url}/issues"
issues = []
with httpx.Client() as client:
page = 1
while page <= 10: # Safety limit
response = client.get(
url,
headers=self.headers,
params={
"state": "open",
"labels": "potential-duplicate",
"per_page": 100,
"page": page,
},
)
if response.status_code != 200:
print(f"Error fetching issues: {response.status_code}")
break
data = response.json()
if not data:
break
for item in data:
# Skip pull requests
if "pull_request" in item:
continue
issues.append(
Issue(
number=item["number"],
title=item["title"],
state=item["state"],
created_at=item["created_at"],
user_id=item["user"]["id"],
user_login=item["user"]["login"],
)
)
page += 1
return issues
def get_issue_comments(self, issue_number: int) -> list[Comment]:
"""Fetch all comments for an issue."""
url = f"{self.base_url}/issues/{issue_number}/comments"
comments = []
with httpx.Client() as client:
page = 1
while True:
response = client.get(
url, headers=self.headers, params={"page": page, "per_page": 100}
)
if response.status_code != 200:
break
data = response.json()
if not data:
break
for comment_data in data:
comments.append(
Comment(
id=comment_data["id"],
body=comment_data["body"],
created_at=comment_data["created_at"],
user_id=comment_data["user"]["id"],
user_login=comment_data["user"]["login"],
user_type=comment_data["user"]["type"],
)
)
page += 1
if page > 10: # Safety limit
break
return comments
def get_comment_reactions(
self, issue_number: int, comment_id: int
) -> list[Reaction]:
"""Fetch reactions for a specific comment."""
url = f"{self.base_url}/issues/{issue_number}/comments/{comment_id}/reactions"
reactions = []
with httpx.Client() as client:
response = client.get(url, headers=self.headers)
if response.status_code != 200:
return reactions
data = response.json()
for reaction_data in data:
reactions.append(
Reaction(
user_id=reaction_data["user"]["id"],
user_login=reaction_data["user"]["login"],
content=reaction_data["content"],
)
)
return reactions
def remove_label(self, issue_number: int, label: str) -> bool:
"""Remove a label from an issue."""
url = f"{self.base_url}/issues/{issue_number}/labels/{label}"
with httpx.Client() as client:
response = client.delete(url, headers=self.headers)
return response.status_code in [200, 204]
def close_issue(self, issue_number: int, comment: str) -> bool:
"""Close an issue with a comment and add duplicate label."""
# First add the comment
comment_url = f"{self.base_url}/issues/{issue_number}/comments"
with httpx.Client() as client:
response = client.post(
comment_url, headers=self.headers, json={"body": comment}
)
if response.status_code != 201:
print(f"Failed to add comment to issue #{issue_number}")
return False
# Swap labels: remove potential-duplicate, add duplicate
self.remove_label(issue_number, "potential-duplicate")
labels_url = f"{self.base_url}/issues/{issue_number}/labels"
with httpx.Client() as client:
response = client.post(
labels_url, headers=self.headers, json={"labels": ["duplicate"]}
)
if response.status_code not in [200, 201]:
print(f"Failed to add duplicate label to issue #{issue_number}")
# Then close the issue
issue_url = f"{self.base_url}/issues/{issue_number}"
with httpx.Client() as client:
response = client.patch(
issue_url, headers=self.headers, json={"state": "closed"}
)
return response.status_code == 200
def find_duplicate_comment(comments: list[Comment]) -> Comment | None:
"""Find a bot comment marking the issue as duplicate."""
for comment in comments:
# Check for the specific duplicate message format from a bot
if (
comment.user_type == "Bot"
and "possible duplicate issues" in comment.body.lower()
):
return comment
return None
def was_already_auto_closed(comments: list[Comment]) -> bool:
"""Check if this issue was already auto-closed once."""
for comment in comments:
if (
comment.user_type == "Bot"
and "closing this issue as a duplicate" in comment.body.lower()
):
return True
return False
def is_past_cooldown(duplicate_comment: Comment) -> bool:
"""Check if the 3-day cooldown period has passed."""
comment_date = datetime.fromisoformat(
duplicate_comment.created_at.replace("Z", "+00:00")
)
three_days_ago = datetime.now(timezone.utc) - timedelta(days=3)
return comment_date <= three_days_ago
def has_human_activity(
issue: Issue,
duplicate_comment: Comment,
all_comments: list[Comment],
reactions: list[Reaction],
) -> bool:
"""Check if there's human activity that should prevent auto-closure."""
comment_date = datetime.fromisoformat(
duplicate_comment.created_at.replace("Z", "+00:00")
)
# Check for preventing reactions (thumbs down)
for reaction in reactions:
if reaction.content in ["-1", "confused"]:
print(
f"Issue #{issue.number}: Has preventing reaction from {reaction.user_login}"
)
return True
# Check for any human comment after the duplicate marking
for comment in all_comments:
comment_date_check = datetime.fromisoformat(
comment.created_at.replace("Z", "+00:00")
)
if comment_date_check > comment_date:
if comment.user_type != "Bot":
print(
f"Issue #{issue.number}: {comment.user_login} commented after duplicate marking"
)
return True
return False
def main():
"""Main entry point for auto-closing duplicate issues."""
print("[DEBUG] Starting auto-close duplicates script")
# Get environment variables
token = os.environ.get("GITHUB_TOKEN")
if not token:
raise ValueError("GITHUB_TOKEN environment variable is required")
owner = os.environ.get("GITHUB_REPOSITORY_OWNER", "prefecthq")
repo = os.environ.get("GITHUB_REPOSITORY_NAME", "fastmcp")
print(f"[DEBUG] Repository: {owner}/{repo}")
# Initialize client
client = GitHubClient(token, owner, repo)
# Only fetch issues with the potential-duplicate label
all_issues = client.get_potential_duplicate_issues()
print(f"[DEBUG] Found {len(all_issues)} open issues with potential-duplicate label")
closed_count = 0
cleared_count = 0
for issue in all_issues:
# Get comments for this issue
comments = client.get_issue_comments(issue.number)
# Look for duplicate marking comment
duplicate_comment = find_duplicate_comment(comments)
if not duplicate_comment:
# Label exists but no bot comment - clean up the label
print(f"[DEBUG] Issue #{issue.number} has label but no duplicate comment")
client.remove_label(issue.number, "potential-duplicate")
cleared_count += 1
continue
# Skip if already auto-closed once (someone reopened it intentionally)
if was_already_auto_closed(comments):
print(
f"[DEBUG] Issue #{issue.number} was already auto-closed, removing label"
)
client.remove_label(issue.number, "potential-duplicate")
cleared_count += 1
continue
print(f"[DEBUG] Issue #{issue.number} has duplicate comment")
# Still in cooldown period - skip for now, check again later
if not is_past_cooldown(duplicate_comment):
print(f"[DEBUG] Issue #{issue.number} still in 3-day cooldown period")
continue
# Get reactions on the duplicate comment
reactions = client.get_comment_reactions(issue.number, duplicate_comment.id)
# Check for human activity that prevents closure
if has_human_activity(issue, duplicate_comment, comments, reactions):
print(f"[DEBUG] Issue #{issue.number} has human activity, removing label")
client.remove_label(issue.number, "potential-duplicate")
cleared_count += 1
continue
# No human activity after cooldown - close as duplicate
close_message = (
"Closing this issue as a duplicate based on the automated analysis above.\n\n"
"The duplicate issues identified contain existing discussions and potential solutions. "
"Please add your 👍 to those issues if they match your use case.\n\n"
"If this was closed in error, please leave a comment explaining why this is not "
"a duplicate and we'll reopen it."
)
if client.close_issue(issue.number, close_message):
print(f"[SUCCESS] Closed issue #{issue.number} as duplicate")
closed_count += 1
else:
print(f"[ERROR] Failed to close issue #{issue.number}")
print(
f"[DEBUG] Processing complete. Closed {closed_count} duplicates, "
f"cleared {cleared_count} from review"
)
if __name__ == "__main__":
main()
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "scripts/auto_close_duplicates.py",
"license": "Apache License 2.0",
"lines": 296,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/server/test_streamable_http_no_redirect.py | """Test that streamable HTTP routes avoid 307 redirects."""
import httpx
import pytest
from starlette.routing import Route
from fastmcp import FastMCP
@pytest.mark.parametrize(
"server_path",
["/mcp", "/mcp/"],
)
def test_streamable_http_route_structure(server_path: str):
"""Test that streamable HTTP routes use Route objects with correct paths."""
mcp = FastMCP("TestServer")
@mcp.tool
def greet(name: str) -> str:
return f"Hello, {name}!"
# Create HTTP app with specific path
app = mcp.http_app(transport="http", path=server_path)
# Find the streamable HTTP route
streamable_routes = [
r
for r in app.routes
if isinstance(r, Route) and hasattr(r, "path") and r.path == server_path
]
# Verify route exists and uses Route (not Mount)
assert len(streamable_routes) == 1, (
f"Should have one streamable route for path {server_path}"
)
assert isinstance(streamable_routes[0], Route), "Should use Route, not Mount"
assert streamable_routes[0].path == server_path, (
f"Route path should match {server_path}"
)
async def test_streamable_http_redirect_behavior():
"""Test that non-matching paths get redirected correctly."""
mcp = FastMCP("TestServer")
@mcp.tool
def greet(name: str) -> str:
return f"Hello, {name}!"
# Create HTTP app with /mcp path (no trailing slash)
app = mcp.http_app(transport="http", path="/mcp")
# Test that /mcp/ gets redirected to /mcp
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=app), base_url="http://test"
) as client:
response = await client.get("/mcp/", follow_redirects=False)
assert response.status_code == 307
assert response.headers["location"] == "http://test/mcp"
async def test_streamable_http_no_mount_routes():
"""Test that streamable HTTP app creates Route objects, not Mount objects."""
mcp = FastMCP("TestServer")
app = mcp.http_app(transport="http")
# Should not find any Mount routes for the streamable HTTP path
from starlette.routing import Mount
mount_routes = [
r
for r in app.routes
if isinstance(r, Mount) and hasattr(r, "path") and r.path == "/mcp"
]
assert len(mount_routes) == 0, "Should not have Mount routes for streamable HTTP"
# Should find Route objects instead
route_routes = [
r
for r in app.routes
if isinstance(r, Route) and hasattr(r, "path") and r.path == "/mcp"
]
assert len(route_routes) == 1, "Should have exactly one Route for streamable HTTP"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/test_streamable_http_no_redirect.py",
"license": "Apache License 2.0",
"lines": 65,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/utilities/test_components.py | """Tests for fastmcp.utilities.components module."""
import warnings
import pytest
from pydantic import ValidationError
from fastmcp.prompts.prompt import Prompt
from fastmcp.resources.resource import Resource
from fastmcp.resources.template import ResourceTemplate
from fastmcp.tools.tool import Tool
from fastmcp.utilities.components import (
FastMCPComponent,
FastMCPMeta,
_convert_set_default_none,
get_fastmcp_metadata,
)
class TestConvertSetDefaultNone:
"""Tests for the _convert_set_default_none helper function."""
def test_none_returns_empty_set(self):
"""Test that None returns an empty set."""
result = _convert_set_default_none(None)
assert result == set()
def test_set_returns_same_set(self):
"""Test that a set returns the same set."""
test_set = {"tag1", "tag2"}
result = _convert_set_default_none(test_set)
assert result == test_set
def test_list_converts_to_set(self):
"""Test that a list converts to a set."""
test_list = ["tag1", "tag2", "tag1"] # Duplicate to test deduplication
result = _convert_set_default_none(test_list)
assert result == {"tag1", "tag2"}
def test_tuple_converts_to_set(self):
"""Test that a tuple converts to a set."""
test_tuple = ("tag1", "tag2")
result = _convert_set_default_none(test_tuple)
assert result == {"tag1", "tag2"}
class TestFastMCPComponent:
"""Tests for the FastMCPComponent class."""
@pytest.fixture
def basic_component(self):
"""Create a basic component for testing."""
return FastMCPComponent(
name="test_component",
title="Test Component",
description="A test component",
tags={"test", "component"},
)
def test_initialization_with_minimal_params(self):
"""Test component initialization with minimal parameters."""
component = FastMCPComponent(name="minimal")
assert component.name == "minimal"
assert component.title is None
assert component.description is None
assert component.tags == set()
assert component.meta is None
def test_initialization_with_all_params(self):
"""Test component initialization with all parameters."""
meta = {"custom": "value"}
component = FastMCPComponent(
name="full",
title="Full Component",
description="A fully configured component",
tags={"tag1", "tag2"},
meta=meta,
)
assert component.name == "full"
assert component.title == "Full Component"
assert component.description == "A fully configured component"
assert component.tags == {"tag1", "tag2"}
assert component.meta == meta
def test_key_property_without_custom_key(self, basic_component):
"""Test that key property returns name@version when no custom key is set."""
# Base component has no KEY_PREFIX, so key is just "name@version" (or "name@" for unversioned)
assert basic_component.key == "test_component@"
def test_get_meta_with_fastmcp_meta(self, basic_component):
"""Test get_meta always includes fastmcp meta."""
basic_component.meta = {"custom": "data"}
basic_component.tags = {"tag2", "tag1"} # Unordered to test sorting
result = basic_component.get_meta()
assert result["custom"] == "data"
assert "fastmcp" in result
assert result["fastmcp"]["tags"] == ["tag1", "tag2"] # Should be sorted
def test_get_meta_preserves_existing_fastmcp_meta(self):
"""Test that get_meta preserves existing fastmcp meta."""
component = FastMCPComponent(
name="test",
meta={"fastmcp": {"existing": "value"}},
tags={"new_tag"},
)
result = component.get_meta()
assert result is not None
assert result["fastmcp"]["existing"] == "value"
assert result["fastmcp"]["tags"] == ["new_tag"]
def test_get_meta_returns_dict_with_fastmcp_when_empty(self):
"""Test that get_meta returns dict with fastmcp meta even when no custom meta."""
component = FastMCPComponent(name="test")
result = component.get_meta()
assert result is not None
assert "fastmcp" in result
assert result["fastmcp"]["tags"] == []
def test_get_meta_includes_version(self):
"""Test that get_meta includes version when component has a version."""
component = FastMCPComponent(name="test", version="v1.0.0", tags={"tag1"})
result = component.get_meta()
assert result is not None
assert result["fastmcp"]["version"] == "v1.0.0"
assert result["fastmcp"]["tags"] == ["tag1"]
def test_get_meta_excludes_version_when_none(self):
"""Test that get_meta excludes version when component has no version."""
component = FastMCPComponent(name="test", tags={"tag1"})
result = component.get_meta()
assert result is not None
assert "version" not in result["fastmcp"]
assert result["fastmcp"]["tags"] == ["tag1"]
def test_equality_same_components(self):
"""Test that identical components are equal."""
comp1 = FastMCPComponent(name="test", description="desc")
comp2 = FastMCPComponent(name="test", description="desc")
assert comp1 == comp2
def test_equality_different_components(self):
"""Test that different components are not equal."""
comp1 = FastMCPComponent(name="test1")
comp2 = FastMCPComponent(name="test2")
assert comp1 != comp2
def test_equality_different_types(self, basic_component):
"""Test that component is not equal to other types."""
assert basic_component != "not a component"
assert basic_component != 123
assert basic_component is not None
def test_repr(self, basic_component):
"""Test string representation of component."""
repr_str = repr(basic_component)
assert "FastMCPComponent" in repr_str
assert "name='test_component'" in repr_str
assert "title='Test Component'" in repr_str
assert "description='A test component'" in repr_str
def test_copy_method(self, basic_component):
"""Test copy method creates an independent copy."""
copy = basic_component.copy()
assert copy == basic_component
assert copy is not basic_component
# Modify copy and ensure original is unchanged
copy.name = "modified"
assert basic_component.name == "test_component"
def test_tags_deduplication(self):
"""Test that tags are deduplicated when passed as a sequence."""
component = FastMCPComponent(
name="test",
tags=["tag1", "tag2", "tag1", "tag2"], # type: ignore[arg-type]
)
assert component.tags == {"tag1", "tag2"}
def test_validation_error_for_invalid_data(self):
"""Test that validation errors are raised for invalid data."""
with pytest.raises(ValidationError):
FastMCPComponent() # type: ignore[call-arg]
def test_extra_fields_forbidden(self):
"""Test that extra fields are not allowed."""
with pytest.raises(ValidationError) as exc_info:
FastMCPComponent(name="test", unknown_field="value") # type: ignore[call-arg] # Intentionally passing invalid field for test
assert "Extra inputs are not permitted" in str(exc_info.value)
class TestKeyPrefix:
"""Tests for KEY_PREFIX and make_key functionality."""
def test_base_class_has_empty_prefix(self):
"""Test that FastMCPComponent has empty KEY_PREFIX."""
assert FastMCPComponent.KEY_PREFIX == ""
def test_make_key_without_prefix(self):
"""Test make_key returns just identifier when KEY_PREFIX is empty."""
assert FastMCPComponent.make_key("my_name") == "my_name"
def test_tool_has_tool_prefix(self):
"""Test that Tool has 'tool' KEY_PREFIX."""
assert Tool.KEY_PREFIX == "tool"
assert Tool.make_key("my_tool") == "tool:my_tool"
def test_resource_has_resource_prefix(self):
"""Test that Resource has 'resource' KEY_PREFIX."""
assert Resource.KEY_PREFIX == "resource"
assert Resource.make_key("file://test.txt") == "resource:file://test.txt"
def test_template_has_template_prefix(self):
"""Test that ResourceTemplate has 'template' KEY_PREFIX."""
assert ResourceTemplate.KEY_PREFIX == "template"
assert ResourceTemplate.make_key("data://{id}") == "template:data://{id}"
def test_prompt_has_prompt_prefix(self):
"""Test that Prompt has 'prompt' KEY_PREFIX."""
assert Prompt.KEY_PREFIX == "prompt"
assert Prompt.make_key("my_prompt") == "prompt:my_prompt"
def test_tool_key_property(self):
"""Test that Tool.key returns prefixed key with version sentinel."""
tool = Tool(name="greet", description="A greeting tool", parameters={})
assert tool.key == "tool:greet@"
def test_prompt_key_property(self):
"""Test that Prompt.key returns prefixed key with version sentinel."""
prompt = Prompt(name="analyze", description="An analysis prompt")
assert prompt.key == "prompt:analyze@"
def test_warning_for_missing_key_prefix(self):
"""Test that subclassing without KEY_PREFIX emits a warning."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
class NoPrefix(FastMCPComponent):
pass
assert len(w) == 1
assert "NoPrefix does not define KEY_PREFIX" in str(w[0].message)
def test_no_warning_when_key_prefix_defined(self):
"""Test that subclassing with KEY_PREFIX does not emit a warning."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
class WithPrefix(FastMCPComponent):
KEY_PREFIX = "custom"
assert len(w) == 0
assert WithPrefix.make_key("test") == "custom:test"
class TestGetFastMCPMetadata:
"""Tests for get_fastmcp_metadata helper."""
def test_returns_fastmcp_namespace_when_dict(self):
meta = {"fastmcp": {"tags": ["a"]}, "_fastmcp": {"tags": ["b"]}}
assert get_fastmcp_metadata(meta) == {"tags": ["a"]}
def test_falls_back_to_legacy_namespace_when_dict(self):
meta = {"fastmcp": "invalid", "_fastmcp": {"tags": ["legacy"]}}
assert get_fastmcp_metadata(meta) == {"tags": ["legacy"]}
def test_ignores_non_dict_metadata(self):
assert get_fastmcp_metadata({"fastmcp": "invalid"}) == {}
assert get_fastmcp_metadata({"fastmcp": ["invalid"]}) == {}
assert get_fastmcp_metadata({"_fastmcp": "invalid"}) == {}
class TestComponentEnableDisable:
"""Tests for the enable/disable methods raising NotImplementedError."""
def test_enable_raises_not_implemented_error(self):
"""Test that enable() raises NotImplementedError with migration guidance."""
component = FastMCPComponent(name="test")
with pytest.raises(NotImplementedError) as exc_info:
component.enable()
assert "server.enable" in str(exc_info.value)
assert "test" in str(exc_info.value)
def test_disable_raises_not_implemented_error(self):
"""Test that disable() raises NotImplementedError with migration guidance."""
component = FastMCPComponent(name="test")
with pytest.raises(NotImplementedError) as exc_info:
component.disable()
assert "server.disable" in str(exc_info.value)
assert "test" in str(exc_info.value)
def test_tool_enable_raises_not_implemented(self):
"""Test that Tool.enable() raises NotImplementedError."""
tool = Tool(name="my_tool", description="A tool", parameters={})
with pytest.raises(NotImplementedError) as exc_info:
tool.enable()
assert "tool:my_tool@" in str(exc_info.value)
def test_tool_disable_raises_not_implemented(self):
"""Test that Tool.disable() raises NotImplementedError."""
tool = Tool(name="my_tool", description="A tool", parameters={})
with pytest.raises(NotImplementedError) as exc_info:
tool.disable()
assert "tool:my_tool@" in str(exc_info.value)
def test_prompt_enable_raises_not_implemented(self):
"""Test that Prompt.enable() raises NotImplementedError."""
prompt = Prompt(name="my_prompt", description="A prompt")
with pytest.raises(NotImplementedError) as exc_info:
prompt.enable()
assert "prompt:my_prompt@" in str(exc_info.value)
class TestFastMCPMeta:
"""Tests for the FastMCPMeta TypedDict."""
def test_fastmcp_meta_structure(self):
"""Test that FastMCPMeta has the expected structure."""
meta: FastMCPMeta = {"tags": ["tag1", "tag2"]}
assert meta["tags"] == ["tag1", "tag2"]
def test_fastmcp_meta_with_version(self):
"""Test that FastMCPMeta can include version."""
meta: FastMCPMeta = {"tags": ["tag1"], "version": "v1.0.0"}
assert meta["tags"] == ["tag1"]
assert meta["version"] == "v1.0.0"
def test_fastmcp_meta_optional_fields(self):
"""Test that FastMCPMeta fields are optional."""
meta: FastMCPMeta = {}
assert "tags" not in meta # Should be optional
assert "version" not in meta # Should be optional
class TestEdgeCasesAndIntegration:
"""Tests for edge cases and integration scenarios."""
def test_empty_tags_conversion(self):
"""Test that empty tags are handled correctly."""
component = FastMCPComponent(name="test", tags=set())
assert component.tags == set()
def test_tags_with_none_values(self):
"""Test tags behavior with various input types."""
# Test with None (through validator)
component = FastMCPComponent(name="test")
assert component.tags == set()
def test_get_meta_returns_copy(self):
"""Test that get_meta returns a copy, not a reference to the original."""
component = FastMCPComponent(name="test", meta={"key": "value"})
meta = component.get_meta()
assert meta is not None
meta["key"] = "modified"
assert component.meta is not None
# get_meta returns a copy - mutating it doesn't affect the original
assert component.meta["key"] == "value"
def test_component_with_complex_meta(self):
"""Test component with nested meta structures."""
complex_meta = {
"nested": {"level1": {"level2": "value"}},
"list": [1, 2, 3],
"bool": True,
}
component = FastMCPComponent(name="test", meta=complex_meta)
assert component.meta == complex_meta
def test_model_copy_preserves_all_attributes(self):
"""Test that model_copy preserves all component attributes."""
component = FastMCPComponent(
name="test",
title="Title",
description="Description",
tags={"tag1", "tag2"},
meta={"key": "value"},
)
new_component = component.model_copy()
assert new_component.name == component.name
assert new_component.title == component.title
assert new_component.description == component.description
assert new_component.tags == component.tags
assert new_component.meta == component.meta
assert new_component.key == component.key
def test_model_copy_with_update(self):
"""Test that model_copy works with update dict."""
component = FastMCPComponent(
name="test",
title="Original Title",
description="Original Description",
tags={"tag1"},
)
# Test with update (including name which affects .key)
updated_component = component.model_copy(
update={
"name": "new_name",
"title": "New Title",
"description": "New Description",
},
)
assert updated_component.name == "new_name" # Updated
assert updated_component.title == "New Title" # Updated
assert updated_component.description == "New Description" # Updated
assert updated_component.tags == {"tag1"} # Not in update, unchanged
assert (
updated_component.key == "new_name@"
) # .key is computed from name with @ sentinel
# Original should be unchanged
assert component.name == "test"
assert component.title == "Original Title"
assert component.description == "Original Description"
assert component.key == "test@" # Uses name as key with @ sentinel
def test_model_copy_deep_parameter(self):
"""Test that model_copy respects the deep parameter."""
nested_dict = {"nested": {"value": 1}}
component = FastMCPComponent(name="test", meta=nested_dict)
# Shallow copy (default)
shallow_copy = component.model_copy()
assert shallow_copy.meta is not None
assert component.meta is not None
shallow_copy.meta["nested"]["value"] = 2
assert component.meta["nested"]["value"] == 2 # Original affected
# Deep copy
component.meta["nested"]["value"] = 1 # Reset
deep_copy = component.model_copy(deep=True)
assert deep_copy.meta is not None
deep_copy.meta["nested"]["value"] = 3
assert component.meta["nested"]["value"] == 1 # Original unaffected
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/utilities/test_components.py",
"license": "Apache License 2.0",
"lines": 358,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/auth/test_remote_auth_provider.py | import httpx
import pytest
from pydantic import AnyHttpUrl
from fastmcp import FastMCP
from fastmcp.server.auth import RemoteAuthProvider
from fastmcp.server.auth.providers.jwt import StaticTokenVerifier
@pytest.fixture
def test_tokens():
"""Standard test tokens fixture for all auth tests."""
return {
"test_token": {
"client_id": "test-client",
"scopes": ["read", "write"],
}
}
class TestRemoteAuthProvider:
"""Test suite for RemoteAuthProvider."""
def test_init(self, test_tokens):
"""Test RemoteAuthProvider initialization."""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_servers = [AnyHttpUrl("https://auth.example.com")]
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=auth_servers,
base_url="https://api.example.com",
)
assert provider.token_verifier is token_verifier
assert provider.authorization_servers == auth_servers
assert provider.base_url == AnyHttpUrl("https://api.example.com/")
async def test_verify_token_delegates_to_verifier(self, test_tokens):
"""Test that verify_token delegates to the token verifier."""
# Use a different token for this specific test
tokens = {
"valid_token": {
"client_id": "test-client",
"scopes": [],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
# Valid token
result = await provider.verify_token("valid_token")
assert result is not None
assert result.token == "valid_token"
assert result.client_id == "test-client"
# Invalid token
result = await provider.verify_token("invalid_token")
assert result is None
def test_get_routes_creates_protected_resource_routes(self, test_tokens):
"""Test that get_routes creates protected resource routes."""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
auth_servers = [AnyHttpUrl("https://auth.example.com")]
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=auth_servers,
base_url="https://api.example.com",
)
routes = provider.get_routes()
assert len(routes) == 1
# Check that the route is the OAuth protected resource metadata endpoint
# When called without mcp_path, it creates route at /.well-known/oauth-protected-resource
route = routes[0]
assert route.path == "/.well-known/oauth-protected-resource"
assert route.methods is not None
assert "GET" in route.methods
def test_get_resource_url_with_well_known_path(self):
"""Test _get_resource_url returns correct URL for .well-known path."""
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
metadata_url = provider._get_resource_url(
"/.well-known/oauth-protected-resource/mcp"
)
assert metadata_url == AnyHttpUrl(
"https://api.example.com/.well-known/oauth-protected-resource/mcp"
)
def test_get_resource_url_with_nested_base_url(self):
"""Test _get_resource_url returns correct URL for .well-known path with nested base_url."""
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com/v1/",
)
metadata_url = provider._get_resource_url(
"/.well-known/oauth-protected-resource/mcp"
)
assert metadata_url == AnyHttpUrl(
"https://api.example.com/v1/.well-known/oauth-protected-resource/mcp"
)
def test_get_resource_url_handles_trailing_slash(self):
"""Test _get_resource_url handles trailing slash correctly."""
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com/",
)
metadata_url = provider._get_resource_url(
"/.well-known/oauth-protected-resource/mcp"
)
assert metadata_url == AnyHttpUrl(
"https://api.example.com/.well-known/oauth-protected-resource/mcp"
)
class TestRemoteAuthProviderIntegration:
"""Integration tests for RemoteAuthProvider with FastMCP server."""
@pytest.fixture
def basic_auth_provider(self, test_tokens):
"""Basic RemoteAuthProvider fixture for testing."""
token_verifier = StaticTokenVerifier(tokens=test_tokens)
return RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
def _create_test_auth_provider(
self, base_url="https://api.example.com", test_tokens=None, **kwargs
):
"""Helper to create a test RemoteAuthProvider with StaticTokenVerifier."""
tokens = kwargs.get(
"tokens",
test_tokens
or {
"test_token": {
"client_id": "test-client",
"scopes": ["read", "write"],
}
},
)
token_verifier = StaticTokenVerifier(tokens=tokens)
return RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url=base_url,
)
async def test_protected_resource_metadata_endpoint_status_code(
self, basic_auth_provider
):
"""Test that the protected resource metadata endpoint returns 200."""
mcp = FastMCP("test-server", auth=basic_auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://api.example.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
async def test_protected_resource_metadata_endpoint_resource_field(self):
"""Test that the protected resource metadata endpoint returns correct resource field."""
auth_provider = self._create_test_auth_provider()
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://api.example.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
data = response.json()
# This is the key test - ensure resource field contains the full MCP URL
assert data["resource"] == "https://api.example.com/mcp"
async def test_protected_resource_metadata_endpoint_authorization_servers_field(
self,
):
"""Test that the protected resource metadata endpoint returns correct authorization_servers field."""
auth_provider = self._create_test_auth_provider()
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://api.example.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
data = response.json()
assert data["authorization_servers"] == ["https://auth.example.com/"]
@pytest.mark.parametrize(
"base_url,expected_resource",
[
("https://api.example.com", "https://api.example.com/mcp"),
("https://api.example.com/", "https://api.example.com/mcp"),
("https://api.example.com/v1/", "https://api.example.com/v1/mcp"),
],
)
async def test_base_url_configurations(self, base_url: str, expected_resource: str):
"""Test different base_url configurations."""
from urllib.parse import urlparse
auth_provider = self._create_test_auth_provider(base_url=base_url)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
# Extract the path from the expected resource to construct metadata URL
resource_parsed = urlparse(expected_resource)
# Remove leading slash if present to avoid double slashes
resource_path = resource_parsed.path.lstrip("/")
metadata_path = f"/.well-known/oauth-protected-resource/{resource_path}"
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://test.example.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get(metadata_path)
assert response.status_code == 200
data = response.json()
assert data["resource"] == expected_resource
async def test_multiple_authorization_servers_resource_field(self):
"""Test resource field with multiple authorization servers."""
auth_servers = [
AnyHttpUrl("https://auth1.example.com"),
AnyHttpUrl("https://auth2.example.com"),
]
auth_provider = self._create_test_auth_provider()
# Override the authorization servers
auth_provider.authorization_servers = auth_servers
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://api.example.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
data = response.json()
assert data["resource"] == "https://api.example.com/mcp"
async def test_multiple_authorization_servers_list(self):
"""Test authorization_servers field with multiple authorization servers."""
auth_servers = [
AnyHttpUrl("https://auth1.example.com"),
AnyHttpUrl("https://auth2.example.com"),
]
auth_provider = self._create_test_auth_provider()
# Override the authorization servers
auth_provider.authorization_servers = auth_servers
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://api.example.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
data = response.json()
assert set(data["authorization_servers"]) == {
"https://auth1.example.com/",
"https://auth2.example.com/",
}
async def test_token_verification_with_valid_auth_succeeds(self):
"""Test that requests with valid auth token succeed."""
# Note: This test focuses on HTTP-level authentication behavior
# For the RemoteAuthProvider, the key test is that the OAuth discovery
# endpoint correctly reports the resource server URL, which is tested above
# This is primarily testing that the token verifier integration works
tokens = {
"valid_token": {
"client_id": "test-client",
"scopes": [],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
# Test that the provider correctly delegates to the token verifier
result = await provider.verify_token("valid_token")
assert result is not None
assert result.token == "valid_token"
assert result.client_id == "test-client"
result = await provider.verify_token("invalid_token")
assert result is None
async def test_token_verification_with_invalid_auth_fails(self):
"""Test that the provider correctly rejects invalid tokens."""
tokens = {
"valid_token": {
"client_id": "test-client",
"scopes": [],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://api.example.com",
)
# Test that invalid tokens are rejected
result = await provider.verify_token("invalid_token")
assert result is None
async def test_issue_1348_oauth_discovery_returns_correct_url(self):
"""Test that RemoteAuthProvider correctly returns the full MCP endpoint URL.
This test confirms that RemoteAuthProvider works correctly and returns
the resource URL with the MCP path appended to the base URL.
"""
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://accounts.google.com")],
base_url="https://my-server.com",
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
# The RemoteAuthProvider correctly returns the full MCP endpoint URL
assert data["resource"] == "https://my-server.com/mcp"
assert data["authorization_servers"] == ["https://accounts.google.com/"]
async def test_resource_name_field(self):
"""Test that RemoteAuthProvider correctly returns the resource_name.
This test confirms that RemoteAuthProvider works correctly and returns
the exact resource_name specified.
"""
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://accounts.google.com")],
base_url="https://my-server.com",
resource_name="My Test Resource",
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
# The RemoteAuthProvider correctly returns the resource_name
assert data["resource_name"] == "My Test Resource"
async def test_resource_documentation_field(self):
"""Test that RemoteAuthProvider correctly returns the resource_documentation.
This test confirms that RemoteAuthProvider works correctly and returns
the exact resource_documentation specified.
"""
tokens = {
"test_token": {
"client_id": "test-client",
"scopes": ["read"],
}
}
token_verifier = StaticTokenVerifier(tokens=tokens)
auth_provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://accounts.google.com")],
base_url="https://my-server.com",
resource_documentation=AnyHttpUrl(
"https://doc.my-server.com/resource-docs"
),
)
mcp = FastMCP("test-server", auth=auth_provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
# The metadata URL is path-aware per RFC 9728
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
# The RemoteAuthProvider correctly returns the resource_documentation
assert (
data["resource_documentation"]
== "https://doc.my-server.com/resource-docs"
)
async def test_scopes_supported_overrides_metadata(self):
"""Test that scopes_supported parameter overrides what's in metadata."""
token_verifier = StaticTokenVerifier(
tokens={
"test": {"client_id": "c", "scopes": ["read"]},
},
required_scopes=["read"],
)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://my-server.com",
scopes_supported=["api://my-api/read"],
)
mcp = FastMCP("test-server", auth=provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
assert data["scopes_supported"] == ["api://my-api/read"]
async def test_scopes_supported_defaults_to_verifier(self):
"""Test that metadata uses verifier scopes_supported when parameter not set."""
token_verifier = StaticTokenVerifier(
tokens={
"test": {"client_id": "c", "scopes": ["read"]},
},
required_scopes=["read"],
)
provider = RemoteAuthProvider(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl("https://auth.example.com")],
base_url="https://my-server.com",
)
mcp = FastMCP("test-server", auth=provider)
mcp_http_app = mcp.http_app()
async with httpx.AsyncClient(
transport=httpx.ASGITransport(app=mcp_http_app),
base_url="https://my-server.com",
) as client:
response = await client.get("/.well-known/oauth-protected-resource/mcp")
assert response.status_code == 200
data = response.json()
assert data["scopes_supported"] == ["read"]
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/auth/test_remote_auth_provider.py",
"license": "Apache License 2.0",
"lines": 455,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/server/auth/providers/workos.py | """WorkOS authentication providers for FastMCP.
This module provides two WorkOS authentication strategies:
1. WorkOSProvider - OAuth proxy for WorkOS Connect applications (non-DCR)
2. AuthKitProvider - DCR-compliant provider for WorkOS AuthKit
Choose based on your WorkOS setup and authentication requirements.
"""
from __future__ import annotations
import contextlib
import httpx
from key_value.aio.protocols import AsyncKeyValue
from pydantic import AnyHttpUrl
from starlette.responses import JSONResponse
from starlette.routing import Route
from fastmcp.server.auth import AccessToken, RemoteAuthProvider, TokenVerifier
from fastmcp.server.auth.oauth_proxy import OAuthProxy
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.utilities.auth import parse_scopes
from fastmcp.utilities.logging import get_logger
logger = get_logger(__name__)
class WorkOSTokenVerifier(TokenVerifier):
"""Token verifier for WorkOS OAuth tokens.
WorkOS AuthKit tokens are opaque, so we verify them by calling
the /oauth2/userinfo endpoint to check validity and get user info.
"""
def __init__(
self,
*,
authkit_domain: str,
required_scopes: list[str] | None = None,
timeout_seconds: int = 10,
http_client: httpx.AsyncClient | None = None,
):
"""Initialize the WorkOS token verifier.
Args:
authkit_domain: WorkOS AuthKit domain (e.g., "https://your-app.authkit.app")
required_scopes: Required OAuth scopes
timeout_seconds: HTTP request timeout
http_client: Optional httpx.AsyncClient for connection pooling. When provided,
the client is reused across calls and the caller is responsible for its
lifecycle. When None (default), a fresh client is created per call.
"""
super().__init__(required_scopes=required_scopes)
self.authkit_domain = authkit_domain.rstrip("/")
self.timeout_seconds = timeout_seconds
self._http_client = http_client
async def verify_token(self, token: str) -> AccessToken | None:
"""Verify WorkOS OAuth token by calling userinfo endpoint."""
try:
async with (
contextlib.nullcontext(self._http_client)
if self._http_client is not None
else httpx.AsyncClient(timeout=self.timeout_seconds)
) as client:
# Use WorkOS AuthKit userinfo endpoint to validate token
response = await client.get(
f"{self.authkit_domain}/oauth2/userinfo",
headers={
"Authorization": f"Bearer {token}",
"User-Agent": "FastMCP-WorkOS-OAuth",
},
)
if response.status_code != 200:
logger.debug(
"WorkOS token verification failed: %d - %s",
response.status_code,
response.text[:200],
)
return None
user_data = response.json()
token_scopes = (
parse_scopes(user_data.get("scope") or user_data.get("scopes"))
or []
)
if self.required_scopes and not all(
scope in token_scopes for scope in self.required_scopes
):
logger.debug(
"WorkOS token missing required scopes. required=%s actual=%s",
self.required_scopes,
token_scopes,
)
return None
# Create AccessToken with WorkOS user info
return AccessToken(
token=token,
client_id=str(user_data.get("sub", "unknown")),
scopes=token_scopes,
expires_at=None, # Will be set from token introspection if needed
claims={
"sub": user_data.get("sub"),
"email": user_data.get("email"),
"email_verified": user_data.get("email_verified"),
"name": user_data.get("name"),
"given_name": user_data.get("given_name"),
"family_name": user_data.get("family_name"),
},
)
except httpx.RequestError as e:
logger.debug("Failed to verify WorkOS token: %s", e)
return None
except Exception as e:
logger.debug("WorkOS token verification error: %s", e)
return None
class WorkOSProvider(OAuthProxy):
"""Complete WorkOS OAuth provider for FastMCP.
This provider implements WorkOS AuthKit OAuth using the OAuth Proxy pattern.
It provides OAuth2 authentication for users through WorkOS Connect applications.
Features:
- Transparent OAuth proxy to WorkOS AuthKit
- Automatic token validation via userinfo endpoint
- User information extraction from ID tokens
- Support for standard OAuth scopes (openid, profile, email)
Setup Requirements:
1. Create a WorkOS Connect application in your dashboard
2. Note your AuthKit domain (e.g., "https://your-app.authkit.app")
3. Configure redirect URI as: http://localhost:8000/auth/callback
4. Note your Client ID and Client Secret
Example:
```python
from fastmcp import FastMCP
from fastmcp.server.auth.providers.workos import WorkOSProvider
auth = WorkOSProvider(
client_id="client_123",
client_secret="sk_test_456",
authkit_domain="https://your-app.authkit.app",
base_url="http://localhost:8000"
)
mcp = FastMCP("My App", auth=auth)
```
"""
def __init__(
self,
*,
client_id: str,
client_secret: str,
authkit_domain: str,
base_url: AnyHttpUrl | str,
issuer_url: AnyHttpUrl | str | None = None,
redirect_path: str | None = None,
required_scopes: list[str] | None = None,
timeout_seconds: int = 10,
allowed_client_redirect_uris: list[str] | None = None,
client_storage: AsyncKeyValue | None = None,
jwt_signing_key: str | bytes | None = None,
require_authorization_consent: bool = True,
consent_csp_policy: str | None = None,
http_client: httpx.AsyncClient | None = None,
):
"""Initialize WorkOS OAuth provider.
Args:
client_id: WorkOS client ID
client_secret: WorkOS client secret
authkit_domain: Your WorkOS AuthKit domain (e.g., "https://your-app.authkit.app")
base_url: Public URL where OAuth endpoints will be accessible (includes any mount path)
issuer_url: Issuer URL for OAuth metadata (defaults to base_url). Use root-level URL
to avoid 404s during discovery when mounting under a path.
redirect_path: Redirect path configured in WorkOS (defaults to "/auth/callback")
required_scopes: Required OAuth scopes (no default)
timeout_seconds: HTTP request timeout for WorkOS API calls (defaults to 10)
allowed_client_redirect_uris: List of allowed redirect URI patterns for MCP clients.
If None (default), all URIs are allowed. If empty list, no URIs are allowed.
client_storage: Storage backend for OAuth state (client registrations, encrypted tokens).
If None, an encrypted file store will be created in the data directory
(derived from `platformdirs`).
jwt_signing_key: Secret for signing FastMCP JWT tokens (any string or bytes). If bytes are provided,
they will be used as is. If a string is provided, it will be derived into a 32-byte key. If not
provided, the upstream client secret will be used to derive a 32-byte key using PBKDF2.
require_authorization_consent: Whether to require user consent before authorizing clients (default True).
When True, users see a consent screen before being redirected to WorkOS.
When False, authorization proceeds directly without user confirmation.
SECURITY WARNING: Only disable for local development or testing environments.
http_client: Optional httpx.AsyncClient for connection pooling in token verification.
When provided, the client is reused across verify_token calls and the caller
is responsible for its lifecycle. When None (default), a fresh client is created per call.
"""
# Apply defaults and ensure authkit_domain is a full URL
authkit_domain_str = authkit_domain
if not authkit_domain_str.startswith(("http://", "https://")):
authkit_domain_str = f"https://{authkit_domain_str}"
authkit_domain_final = authkit_domain_str.rstrip("/")
scopes_final = (
parse_scopes(required_scopes) if required_scopes is not None else []
)
# Create WorkOS token verifier
token_verifier = WorkOSTokenVerifier(
authkit_domain=authkit_domain_final,
required_scopes=scopes_final,
timeout_seconds=timeout_seconds,
http_client=http_client,
)
# Initialize OAuth proxy with WorkOS AuthKit endpoints
super().__init__(
upstream_authorization_endpoint=f"{authkit_domain_final}/oauth2/authorize",
upstream_token_endpoint=f"{authkit_domain_final}/oauth2/token",
upstream_client_id=client_id,
upstream_client_secret=client_secret,
token_verifier=token_verifier,
base_url=base_url,
redirect_path=redirect_path,
issuer_url=issuer_url or base_url, # Default to base_url if not specified
allowed_client_redirect_uris=allowed_client_redirect_uris,
client_storage=client_storage,
jwt_signing_key=jwt_signing_key,
require_authorization_consent=require_authorization_consent,
consent_csp_policy=consent_csp_policy,
)
logger.debug(
"Initialized WorkOS OAuth provider for client %s with AuthKit domain %s",
client_id,
authkit_domain_final,
)
class AuthKitProvider(RemoteAuthProvider):
"""AuthKit metadata provider for DCR (Dynamic Client Registration).
This provider implements AuthKit integration using metadata forwarding
instead of OAuth proxying. This is the recommended approach for WorkOS DCR
as it allows WorkOS to handle the OAuth flow directly while FastMCP acts
as a resource server.
IMPORTANT SETUP REQUIREMENTS:
1. Enable Dynamic Client Registration in WorkOS Dashboard:
- Go to Applications → Configuration
- Toggle "Dynamic Client Registration" to enabled
2. Configure your FastMCP server URL as a callback:
- Add your server URL to the Redirects tab in WorkOS dashboard
- Example: https://your-fastmcp-server.com/oauth2/callback
For detailed setup instructions, see:
https://workos.com/docs/authkit/mcp/integrating/token-verification
Example:
```python
from fastmcp.server.auth.providers.workos import AuthKitProvider
# Create AuthKit metadata provider (JWT verifier created automatically)
workos_auth = AuthKitProvider(
authkit_domain="https://your-workos-domain.authkit.app",
base_url="https://your-fastmcp-server.com",
)
# Use with FastMCP
mcp = FastMCP("My App", auth=workos_auth)
```
"""
def __init__(
self,
*,
authkit_domain: AnyHttpUrl | str,
base_url: AnyHttpUrl | str,
client_id: str | None = None,
required_scopes: list[str] | None = None,
scopes_supported: list[str] | None = None,
resource_name: str | None = None,
resource_documentation: AnyHttpUrl | None = None,
token_verifier: TokenVerifier | None = None,
):
"""Initialize AuthKit metadata provider.
Args:
authkit_domain: Your AuthKit domain (e.g., "https://your-app.authkit.app")
base_url: Public URL of this FastMCP server
client_id: Your WorkOS project client ID (e.g., "client_01ABC..."). Used to
validate the JWT audience claim. Found in your WorkOS Dashboard under
API Keys. This is the project-level client ID, not individual MCP client IDs.
required_scopes: Optional list of scopes to require for all requests
scopes_supported: Optional list of scopes to advertise in OAuth metadata.
If None, uses required_scopes. Use this when the scopes clients should
request differ from the scopes enforced on tokens.
resource_name: Optional name for the protected resource metadata.
resource_documentation: Optional documentation URL for the protected resource.
token_verifier: Optional token verifier. If None, creates JWT verifier for AuthKit
"""
self.authkit_domain = str(authkit_domain).rstrip("/")
self.base_url = AnyHttpUrl(str(base_url).rstrip("/"))
# Parse scopes if provided as string
parsed_scopes = (
parse_scopes(required_scopes) if required_scopes is not None else None
)
# Create default JWT verifier if none provided
if token_verifier is None:
logger.warning(
"AuthKitProvider cannot validate token audience for the specific resource "
"because AuthKit does not support RFC 8707 resource indicators. "
"This may leave the server vulnerable to cross-server token replay. "
"Consider using WorkOSProvider (OAuth proxy) for audience-bound tokens."
)
token_verifier = JWTVerifier(
jwks_uri=f"{self.authkit_domain}/oauth2/jwks",
issuer=self.authkit_domain,
algorithm="RS256",
audience=client_id,
required_scopes=parsed_scopes,
)
# Initialize RemoteAuthProvider with AuthKit as the authorization server
super().__init__(
token_verifier=token_verifier,
authorization_servers=[AnyHttpUrl(self.authkit_domain)],
base_url=self.base_url,
scopes_supported=scopes_supported,
resource_name=resource_name,
resource_documentation=resource_documentation,
)
def get_routes(
self,
mcp_path: str | None = None,
) -> list[Route]:
"""Get OAuth routes including AuthKit authorization server metadata forwarding.
This returns the standard protected resource routes plus an authorization server
metadata endpoint that forwards AuthKit's OAuth metadata to clients.
Args:
mcp_path: The path where the MCP endpoint is mounted (e.g., "/mcp")
This is used to advertise the resource URL in metadata.
"""
# Get the standard protected resource routes from RemoteAuthProvider
routes = super().get_routes(mcp_path)
async def oauth_authorization_server_metadata(request):
"""Forward AuthKit OAuth authorization server metadata with FastMCP customizations."""
try:
async with httpx.AsyncClient() as client:
response = await client.get(
f"{self.authkit_domain}/.well-known/oauth-authorization-server"
)
response.raise_for_status()
metadata = response.json()
return JSONResponse(metadata)
except Exception as e:
return JSONResponse(
{
"error": "server_error",
"error_description": f"Failed to fetch AuthKit metadata: {e}",
},
status_code=500,
)
# Add AuthKit authorization server metadata forwarding
routes.append(
Route(
"/.well-known/oauth-authorization-server",
endpoint=oauth_authorization_server_metadata,
methods=["GET"],
)
)
return routes
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/server/auth/providers/workos.py",
"license": "Apache License 2.0",
"lines": 335,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:tests/client/auth/test_oauth_client.py | from unittest.mock import patch
from urllib.parse import urlparse
import httpx
import pytest
from mcp.types import TextResourceContents
from fastmcp.client import Client
from fastmcp.client.auth import OAuth
from fastmcp.client.transports import StreamableHttpTransport
from fastmcp.server.auth.auth import ClientRegistrationOptions
from fastmcp.server.auth.providers.in_memory import InMemoryOAuthProvider
from fastmcp.server.server import FastMCP
from fastmcp.utilities.http import find_available_port
from fastmcp.utilities.tests import HeadlessOAuth, run_server_async
def fastmcp_server(issuer_url: str):
"""Create a FastMCP server with OAuth authentication."""
server = FastMCP(
"TestServer",
auth=InMemoryOAuthProvider(
base_url=issuer_url,
client_registration_options=ClientRegistrationOptions(
enabled=True, valid_scopes=["read", "write"]
),
),
)
@server.tool
def add(a: int, b: int) -> int:
"""Add two numbers together."""
return a + b
@server.resource("resource://test")
def get_test_resource() -> str:
"""Get a test resource."""
return "Hello from authenticated resource!"
return server
@pytest.fixture
async def streamable_http_server():
"""Start OAuth-enabled server."""
port = find_available_port()
server = fastmcp_server(f"http://127.0.0.1:{port}")
async with run_server_async(server, port=port, transport="http") as url:
yield url
@pytest.fixture
def client_unauthorized(streamable_http_server: str) -> Client:
return Client(transport=StreamableHttpTransport(streamable_http_server))
@pytest.fixture
def client_with_headless_oauth(streamable_http_server: str) -> Client:
"""Client with headless OAuth that bypasses browser interaction."""
return Client(
transport=StreamableHttpTransport(streamable_http_server),
auth=HeadlessOAuth(mcp_url=streamable_http_server, scopes=["read", "write"]),
)
async def test_unauthorized(client_unauthorized: Client):
"""Test that unauthenticated requests are rejected."""
with pytest.raises(httpx.HTTPStatusError, match="401 Unauthorized"):
async with client_unauthorized:
pass
async def test_ping(client_with_headless_oauth: Client):
"""Test that we can ping the server."""
async with client_with_headless_oauth:
assert await client_with_headless_oauth.ping()
async def test_list_tools(client_with_headless_oauth: Client):
"""Test that we can list tools."""
async with client_with_headless_oauth:
tools = await client_with_headless_oauth.list_tools()
tool_names = [tool.name for tool in tools]
assert "add" in tool_names
async def test_call_tool(client_with_headless_oauth: Client):
"""Test that we can call a tool."""
async with client_with_headless_oauth:
result = await client_with_headless_oauth.call_tool("add", {"a": 5, "b": 3})
# The add tool returns int which gets wrapped as structured output
# Client unwraps it and puts the actual int in the data field
assert result.data == 8
async def test_list_resources(client_with_headless_oauth: Client):
"""Test that we can list resources."""
async with client_with_headless_oauth:
resources = await client_with_headless_oauth.list_resources()
resource_uris = [str(resource.uri) for resource in resources]
assert "resource://test" in resource_uris
async def test_read_resource(client_with_headless_oauth: Client):
"""Test that we can read a resource."""
async with client_with_headless_oauth:
resource = await client_with_headless_oauth.read_resource("resource://test")
assert isinstance(resource[0], TextResourceContents)
assert resource[0].text == "Hello from authenticated resource!"
async def test_oauth_server_metadata_discovery(streamable_http_server: str):
"""Test that we can discover OAuth metadata from the running server."""
parsed_url = urlparse(streamable_http_server)
server_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
async with httpx.AsyncClient() as client:
# Test OAuth discovery endpoint
metadata_url = f"{server_base_url}/.well-known/oauth-authorization-server"
response = await client.get(metadata_url)
assert response.status_code == 200
metadata = response.json()
assert "authorization_endpoint" in metadata
assert "token_endpoint" in metadata
assert "registration_endpoint" in metadata
# The endpoints should be properly formed URLs
assert metadata["authorization_endpoint"].startswith(server_base_url)
assert metadata["token_endpoint"].startswith(server_base_url)
class TestOAuthClientUrlHandling:
"""Tests for OAuth client URL handling (issue #2573)."""
def test_oauth_preserves_full_url_with_path(self):
"""OAuth client should preserve the full MCP URL including path components.
This is critical for servers hosted under path-based endpoints like
mcp.example.com/server1/v1.0/mcp where OAuth metadata discovery needs
the full path to find the correct .well-known endpoints.
"""
mcp_url = "https://mcp.example.com/server1/v1.0/mcp"
oauth = OAuth(mcp_url=mcp_url)
# The full URL should be preserved for OAuth discovery
assert oauth.context.server_url == mcp_url
# The stored mcp_url should match
assert oauth.mcp_url == mcp_url
def test_oauth_preserves_root_url(self):
"""OAuth client should work correctly with root-level URLs."""
mcp_url = "https://mcp.example.com"
oauth = OAuth(mcp_url=mcp_url)
assert oauth.context.server_url == mcp_url
assert oauth.mcp_url == mcp_url
def test_oauth_normalizes_trailing_slash(self):
"""OAuth client should normalize trailing slashes for consistency."""
mcp_url_with_slash = "https://mcp.example.com/api/mcp/"
oauth = OAuth(mcp_url=mcp_url_with_slash)
# Trailing slash should be stripped
expected = "https://mcp.example.com/api/mcp"
assert oauth.context.server_url == expected
assert oauth.mcp_url == expected
def test_oauth_token_storage_uses_full_url(self):
"""Token storage should use the full URL to separate tokens per endpoint."""
mcp_url = "https://mcp.example.com/server1/v1.0/mcp"
oauth = OAuth(mcp_url=mcp_url)
# Token storage should key by the full URL, not just the host
assert oauth.token_storage_adapter._server_url == mcp_url
class TestOAuthGeneratorCleanup:
"""Tests for OAuth async generator cleanup (issue #2643).
The MCP SDK's OAuthClientProvider.async_auth_flow() holds a lock via
`async with self.context.lock`. If the generator is not explicitly closed,
GC may clean it up from a different task, causing:
RuntimeError: The current task is not holding this lock
"""
async def test_generator_closed_on_successful_flow(self):
"""Verify aclose() is called on the parent generator after successful flow."""
oauth = OAuth(mcp_url="https://example.com")
# Track generator lifecycle using a wrapper class
class TrackedGenerator:
def __init__(self):
self.aclose_called = False
self._exhausted = False
def __aiter__(self):
return self
async def __anext__(self):
if self._exhausted:
raise StopAsyncIteration
self._exhausted = True
return httpx.Request("GET", "https://example.com")
async def asend(self, value):
if self._exhausted:
raise StopAsyncIteration
self._exhausted = True
return httpx.Request("GET", "https://example.com")
async def athrow(self, exc_type, exc_val=None, exc_tb=None):
raise StopAsyncIteration
async def aclose(self):
self.aclose_called = True
tracked_gen = TrackedGenerator()
# Patch the parent class to return our tracked generator
with patch.object(
OAuth.__bases__[0], "async_auth_flow", return_value=tracked_gen
):
# Drive the OAuth flow
flow = oauth.async_auth_flow(httpx.Request("GET", "https://example.com"))
try:
# First asend(None) starts the generator per async generator protocol
await flow.asend(None) # ty: ignore[invalid-argument-type]
try:
await flow.asend(httpx.Response(200))
except StopAsyncIteration:
pass
except StopAsyncIteration:
pass
assert tracked_gen.aclose_called, (
"Generator aclose() was not called after flow completion"
)
async def test_generator_closed_on_exception(self):
"""Verify aclose() is called even when an exception occurs mid-flow."""
oauth = OAuth(mcp_url="https://example.com")
class FailingGenerator:
def __init__(self):
self.aclose_called = False
self._first_call = True
def __aiter__(self):
return self
async def __anext__(self):
return await self.asend(None)
async def asend(self, value):
if self._first_call:
self._first_call = False
return httpx.Request("GET", "https://example.com")
raise ValueError("Simulated failure")
async def athrow(self, exc_type, exc_val=None, exc_tb=None):
raise StopAsyncIteration
async def aclose(self):
self.aclose_called = True
tracked_gen = FailingGenerator()
with patch.object(
OAuth.__bases__[0], "async_auth_flow", return_value=tracked_gen
):
flow = oauth.async_auth_flow(httpx.Request("GET", "https://example.com"))
with pytest.raises(ValueError, match="Simulated failure"):
await flow.asend(None) # ty: ignore[invalid-argument-type]
await flow.asend(httpx.Response(200))
assert tracked_gen.aclose_called, (
"Generator aclose() was not called after exception"
)
class TestTokenStorageTTL:
"""Tests for client token storage TTL behavior (issue #2670).
The token storage TTL should NOT be based on access token expiry, because
the refresh token may be valid much longer. Using access token expiry would
cause both tokens to be deleted when the access token expires, preventing
refresh.
"""
async def test_token_storage_uses_long_ttl(self):
"""Token storage should use a long TTL, not access token expiry.
This is the ianw case: IdP returns expires_in=300 (5 min access token)
but the refresh token is valid for much longer. The entire token entry
should NOT be deleted after 5 minutes.
"""
from key_value.aio.stores.memory import MemoryStore
from mcp.shared.auth import OAuthToken
from fastmcp.client.auth.oauth import TokenStorageAdapter
# Create storage adapter
storage = MemoryStore()
adapter = TokenStorageAdapter(
async_key_value=storage, server_url="https://test"
)
# Create a token with short access expiry (5 minutes)
token = OAuthToken(
access_token="test-access-token",
token_type="Bearer",
expires_in=300, # 5 minutes - but we should NOT use this as storage TTL!
refresh_token="test-refresh-token",
scope="read write",
)
# Store the token
await adapter.set_tokens(token)
# Verify token is stored
stored = await adapter.get_tokens()
assert stored is not None
assert stored.access_token == "test-access-token"
assert stored.refresh_token == "test-refresh-token"
# The key assertion: the TTL should be 1 year (365 days), not 300 seconds
# We verify this by checking the raw storage entry
raw = await storage.get(collection="mcp-oauth-token", key="https://test/tokens")
assert raw is not None
async def test_token_storage_preserves_refresh_token(self):
"""Refresh token should not be lost when access token would expire."""
from key_value.aio.stores.memory import MemoryStore
from mcp.shared.auth import OAuthToken
from fastmcp.client.auth.oauth import TokenStorageAdapter
storage = MemoryStore()
adapter = TokenStorageAdapter(
async_key_value=storage, server_url="https://test"
)
# Store token with short access expiry
token = OAuthToken(
access_token="access",
token_type="Bearer",
expires_in=300,
refresh_token="refresh-token-should-survive",
scope="read",
)
await adapter.set_tokens(token)
# Retrieve and verify refresh token is present
stored = await adapter.get_tokens()
assert stored is not None
assert stored.refresh_token == "refresh-token-should-survive"
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/client/auth/test_oauth_client.py",
"license": "Apache License 2.0",
"lines": 277,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/http/test_http_auth_middleware.py | import pytest
from mcp.server.auth.middleware.bearer_auth import RequireAuthMiddleware
from starlette.routing import Route
from starlette.testclient import TestClient
from fastmcp.server import FastMCP
from fastmcp.server.auth.providers.jwt import JWTVerifier, RSAKeyPair
from fastmcp.server.http import create_streamable_http_app
class TestStreamableHTTPAppResourceMetadataURL:
"""Test resource_metadata_url logic in create_streamable_http_app."""
@pytest.fixture
def rsa_key_pair(self) -> RSAKeyPair:
"""Generate RSA key pair for testing."""
return RSAKeyPair.generate()
@pytest.fixture
def bearer_auth_provider(self, rsa_key_pair):
provider = JWTVerifier(
public_key=rsa_key_pair.public_key,
issuer="https://issuer",
audience="https://audience",
base_url="https://resource.example.com",
)
return provider
def test_auth_endpoint_wrapped_with_require_auth_middleware(
self, bearer_auth_provider
):
"""Test that auth-protected endpoints use RequireAuthMiddleware."""
server = FastMCP(name="TestServer")
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=bearer_auth_provider,
)
route = next(r for r in app.routes if isinstance(r, Route) and r.path == "/mcp")
# When auth is enabled, endpoint should use RequireAuthMiddleware
assert isinstance(route.endpoint, RequireAuthMiddleware)
def test_auth_endpoint_has_correct_methods(self, rsa_key_pair):
"""Test that auth-protected endpoints have correct HTTP methods."""
provider = JWTVerifier(
public_key=rsa_key_pair.public_key,
issuer="https://issuer",
audience="https://audience",
base_url="https://resource.example.com/",
)
server = FastMCP(name="TestServer")
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=provider,
)
route = next(r for r in app.routes if isinstance(r, Route) and r.path == "/mcp")
# Verify RequireAuthMiddleware is applied
assert isinstance(route.endpoint, RequireAuthMiddleware)
# Verify methods include GET, POST, DELETE for streamable-http
expected_methods = {"GET", "POST", "DELETE"}
assert route.methods is not None
assert expected_methods.issubset(set(route.methods))
def test_no_auth_provider_mounts_without_middleware(self, rsa_key_pair):
"""Test that endpoints without auth are not wrapped with middleware."""
server = FastMCP(name="TestServer")
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=None,
)
route = next(r for r in app.routes if isinstance(r, Route) and r.path == "/mcp")
# Without auth, no RequireAuthMiddleware should be applied
assert not isinstance(route.endpoint, RequireAuthMiddleware)
def test_authenticated_requests_still_require_auth(self, bearer_auth_provider):
"""Test that actual requests (not OPTIONS) still require authentication."""
server = FastMCP(name="TestServer")
app = create_streamable_http_app(
server=server,
streamable_http_path="/mcp",
auth=bearer_auth_provider,
)
# Test POST request without auth - should fail with 401
with TestClient(app) as client:
response = client.post("/mcp")
assert response.status_code == 401
assert "www-authenticate" in response.headers
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/http/test_http_auth_middleware.py",
"license": "Apache License 2.0",
"lines": 80,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/resources/test_resource_template_meta.py | from fastmcp.resources import ResourceTemplate
class TestResourceTemplateMeta:
"""Test ResourceTemplate meta functionality."""
def test_template_meta_parameter(self):
"""Test that meta parameter is properly handled."""
def template_func(param: str) -> str:
return f"Result: {param}"
meta_data = {"version": "2.0", "template": "test"}
template = ResourceTemplate.from_function(
fn=template_func,
uri_template="test://{param}",
name="test_template",
meta=meta_data,
)
assert template.meta == meta_data
mcp_template = template.to_mcp_template()
# MCP template includes fastmcp meta, so check that our meta is included
assert mcp_template.meta is not None
assert meta_data.items() <= mcp_template.meta.items()
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/resources/test_resource_template_meta.py",
"license": "Apache License 2.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/utilities/openapi/test_nullable_fields.py | """Tests for nullable field handling in OpenAPI schemas."""
import pytest
from jsonschema import ValidationError, validate
from fastmcp.utilities.openapi.json_schema_converter import (
convert_openapi_schema_to_json_schema,
)
class TestHandleNullableFields:
"""Test conversion of OpenAPI nullable fields to JSON Schema format."""
def test_root_level_nullable_string(self):
"""Test nullable string at root level."""
input_schema = {"type": "string", "nullable": True}
expected = {"type": ["string", "null"]}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_root_level_nullable_integer(self):
"""Test nullable integer at root level."""
input_schema = {"type": "integer", "nullable": True}
expected = {"type": ["integer", "null"]}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_root_level_nullable_boolean(self):
"""Test nullable boolean at root level."""
input_schema = {"type": "boolean", "nullable": True}
expected = {"type": ["boolean", "null"]}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_property_level_nullable_fields(self):
"""Test nullable fields in properties."""
input_schema = {
"type": "object",
"properties": {
"name": {"type": "string"},
"company": {"type": "string", "nullable": True},
"age": {"type": "integer", "nullable": True},
"active": {"type": "boolean", "nullable": True},
},
}
expected = {
"type": "object",
"properties": {
"name": {"type": "string"},
"company": {"type": ["string", "null"]},
"age": {"type": ["integer", "null"]},
"active": {"type": ["boolean", "null"]},
},
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_mixed_nullable_and_non_nullable(self):
"""Test mix of nullable and non-nullable fields."""
input_schema = {
"type": "object",
"properties": {
"required_field": {"type": "string"},
"optional_nullable": {"type": "string", "nullable": True},
"optional_non_nullable": {"type": "string"},
},
"required": ["required_field"],
}
expected = {
"type": "object",
"properties": {
"required_field": {"type": "string"},
"optional_nullable": {"type": ["string", "null"]},
"optional_non_nullable": {"type": "string"},
},
"required": ["required_field"],
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_nullable_false_ignored(self):
"""Test that nullable: false is ignored (removed but no type change)."""
input_schema = {"type": "string", "nullable": False}
expected = {"type": "string"}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_no_nullable_field_unchanged(self):
"""Test that schemas without nullable field are unchanged."""
input_schema = {
"type": "object",
"properties": {"name": {"type": "string"}},
}
expected = input_schema.copy()
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_nullable_without_type_removes_nullable(self):
"""Test that nullable field is removed even without type."""
input_schema = {"nullable": True, "description": "Some field"}
expected = {"description": "Some field"}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_preserves_other_fields(self):
"""Test that other fields are preserved during conversion."""
input_schema = {
"type": "string",
"nullable": True,
"description": "A nullable string",
"example": "test",
"format": "email",
}
expected = {
"type": ["string", "null"],
"description": "A nullable string",
"example": "test",
"format": "email",
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_non_dict_input_unchanged(self):
"""Test that non-dict inputs are returned unchanged."""
# These tests intentionally pass invalid types to check edge case handling
from typing import Any, cast
assert (
convert_openapi_schema_to_json_schema(cast(Any, "string"), "3.0.0")
== "string"
)
assert convert_openapi_schema_to_json_schema(cast(Any, 123), "3.0.0") == 123
assert convert_openapi_schema_to_json_schema(cast(Any, None), "3.0.0") is None
assert convert_openapi_schema_to_json_schema(cast(Any, [1, 2, 3]), "3.0.0") == [
1,
2,
3,
]
def test_performance_optimization_no_copy_when_unchanged(self):
"""Test that schemas without nullable fields return the same object (no copy)."""
input_schema = {
"type": "object",
"properties": {"name": {"type": "string"}},
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
# Should return the exact same object, not a copy
assert result is input_schema
def test_union_types_with_nullable(self):
"""Test nullable handling with existing union types (type as array)."""
input_schema = {"type": ["string", "integer"], "nullable": True}
expected = {"type": ["string", "integer", "null"]}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_already_nullable_union_unchanged(self):
"""Test that union types already containing null are not modified."""
input_schema = {"type": ["string", "null"], "nullable": True}
expected = {"type": ["string", "null"]}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_property_level_union_with_nullable(self):
"""Test nullable handling with union types in properties."""
input_schema = {
"type": "object",
"properties": {"value": {"type": ["string", "integer"], "nullable": True}},
}
expected = {
"type": "object",
"properties": {"value": {"type": ["string", "integer", "null"]}},
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_complex_union_nullable_scenarios(self):
"""Test various complex union type scenarios."""
# Already has null in different position
input1 = {"type": ["null", "string", "integer"], "nullable": True}
result1 = convert_openapi_schema_to_json_schema(input1, "3.0.0")
assert result1 == {"type": ["null", "string", "integer"]}
# Single item array
input2 = {"type": ["string"], "nullable": True}
result2 = convert_openapi_schema_to_json_schema(input2, "3.0.0")
assert result2 == {"type": ["string", "null"]}
def test_oneof_with_nullable(self):
"""Test nullable handling with oneOf constructs."""
input_schema = {
"oneOf": [{"type": "string"}, {"type": "integer"}],
"nullable": True,
}
expected = {
"anyOf": [{"type": "string"}, {"type": "integer"}, {"type": "null"}]
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_anyof_with_nullable(self):
"""Test nullable handling with anyOf constructs."""
input_schema = {
"anyOf": [{"type": "string"}, {"type": "integer"}],
"nullable": True,
}
expected = {
"anyOf": [{"type": "string"}, {"type": "integer"}, {"type": "null"}]
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_anyof_already_nullable(self):
"""Test anyOf that already contains null type."""
input_schema = {
"anyOf": [{"type": "string"}, {"type": "null"}],
"nullable": True,
}
expected = {"anyOf": [{"type": "string"}, {"type": "null"}]}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_allof_with_nullable(self):
"""Test nullable handling with allOf constructs."""
input_schema = {
"allOf": [{"type": "string"}, {"minLength": 1}],
"nullable": True,
}
expected = {
"anyOf": [
{"allOf": [{"type": "string"}, {"minLength": 1}]},
{"type": "null"},
]
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_property_level_oneof_with_nullable(self):
"""Test nullable handling with oneOf in properties."""
input_schema = {
"type": "object",
"properties": {
"value": {
"oneOf": [{"type": "string"}, {"type": "integer"}],
"nullable": True,
}
},
}
expected = {
"type": "object",
"properties": {
"value": {
"anyOf": [{"type": "string"}, {"type": "integer"}, {"type": "null"}]
}
},
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_nullable_enum_field(self):
"""Test nullable enum field - issue #2082."""
input_schema = {
"type": "string",
"nullable": True,
"enum": ["VALUE1", "VALUE2", "VALUE3"],
}
expected = {
"type": ["string", "null"],
"enum": ["VALUE1", "VALUE2", "VALUE3", None],
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_nullable_enum_already_contains_null(self):
"""Test nullable enum that already contains None."""
input_schema = {
"type": "string",
"nullable": True,
"enum": ["VALUE1", "VALUE2", None],
}
expected = {
"type": ["string", "null"],
"enum": ["VALUE1", "VALUE2", None],
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_nullable_enum_without_type(self):
"""Test nullable enum without explicit type field."""
input_schema = {
"nullable": True,
"enum": ["VALUE1", "VALUE2", "VALUE3"],
}
expected = {
"enum": ["VALUE1", "VALUE2", "VALUE3", None],
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_non_nullable_enum_unchanged(self):
"""Test that enum without nullable is unchanged."""
input_schema = {
"type": "string",
"enum": ["VALUE1", "VALUE2", "VALUE3"],
}
expected = {
"type": "string",
"enum": ["VALUE1", "VALUE2", "VALUE3"],
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_property_level_nullable_enum(self):
"""Test nullable enum in object properties."""
input_schema = {
"type": "object",
"properties": {
"status": {
"type": "string",
"nullable": True,
"enum": ["ACTIVE", "INACTIVE", "PENDING"],
},
"name": {"type": "string"},
},
}
expected = {
"type": "object",
"properties": {
"status": {
"type": ["string", "null"],
"enum": ["ACTIVE", "INACTIVE", "PENDING", None],
},
"name": {"type": "string"},
},
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
def test_nullable_integer_enum(self):
"""Test nullable enum with integer values."""
input_schema = {
"type": "integer",
"nullable": True,
"enum": [1, 2, 3],
}
expected = {
"type": ["integer", "null"],
"enum": [1, 2, 3, None],
}
result = convert_openapi_schema_to_json_schema(input_schema, "3.0.0")
assert result == expected
class TestNullableFieldValidation:
"""Test that converted schemas validate correctly with jsonschema."""
def test_nullable_string_validates(self):
"""Test that nullable string validates both null and string values."""
openapi_schema = {"type": "string", "nullable": True}
json_schema = convert_openapi_schema_to_json_schema(openapi_schema, "3.0.0")
# Both null and string should validate
validate(instance=None, schema=json_schema)
validate(instance="test", schema=json_schema)
# Other types should fail
with pytest.raises(ValidationError):
validate(instance=123, schema=json_schema)
def test_nullable_enum_validates(self):
"""Test that nullable enum validates null, enum values, and rejects invalid values."""
openapi_schema = {
"type": "string",
"nullable": True,
"enum": ["VALUE1", "VALUE2", "VALUE3"],
}
json_schema = convert_openapi_schema_to_json_schema(openapi_schema, "3.0.0")
# Null and enum values should validate
validate(instance=None, schema=json_schema)
validate(instance="VALUE1", schema=json_schema)
# Invalid values should fail
with pytest.raises(ValidationError):
validate(instance="INVALID", schema=json_schema)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/utilities/openapi/test_nullable_fields.py",
"license": "Apache License 2.0",
"lines": 345,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/client/transports/test_uv_transport.py | import inspect
import sys
import tempfile
from pathlib import Path
import pytest
import fastmcp
from fastmcp.client import Client
from fastmcp.client.client import CallToolResult
from fastmcp.client.transports import StdioTransport, UvStdioTransport
# Detect if running from dev install to use local source instead of PyPI
_is_dev_install = "dev" in fastmcp.__version__
_fastmcp_src_dir = (
Path(__file__).parent.parent.parent.parent if _is_dev_install else None
)
@pytest.mark.timeout(60)
@pytest.mark.client_process
@pytest.mark.skipif(
sys.platform == "win32",
reason="Windows file locking issues with uv client process cleanup",
)
async def test_uv_transport():
with tempfile.TemporaryDirectory() as tmpdir:
script: str = inspect.cleandoc('''
from fastmcp import FastMCP
mcp = FastMCP()
@mcp.tool
def add(x: int, y: int) -> int:
"""Adds two numbers together"""
return x + y
if __name__ == "__main__":
mcp.run()
''')
script_file: Path = Path(tmpdir) / "uv.py"
_ = script_file.write_text(script)
client: Client[UvStdioTransport] = Client(
transport=UvStdioTransport(command=str(script_file), keep_alive=False)
)
async with client:
result: CallToolResult = await client.call_tool("add", {"x": 1, "y": 2})
sum: int = result.data # pyright: ignore[reportAny]
# Explicitly close the transport to ensure subprocess cleanup
await client.transport.close()
assert sum == 3
@pytest.mark.timeout(60)
@pytest.mark.client_process
@pytest.mark.skipif(
sys.platform == "win32",
reason="Windows file locking issues with uv client process cleanup",
)
async def test_uv_transport_module():
with tempfile.TemporaryDirectory() as tmpdir:
module_dir = Path(tmpdir) / "my_module"
module_dir.mkdir()
module_script = inspect.cleandoc('''
from fastmcp import FastMCP
mcp = FastMCP()
@mcp.tool
def add(x: int, y: int) -> int:
"""Adds two numbers together"""
return x + y
''')
script_file: Path = module_dir / "module.py"
_ = script_file.write_text(module_script)
main_script: str = inspect.cleandoc("""
from .module import mcp
mcp.run()
""")
main_file = module_dir / "__main__.py"
_ = main_file.write_text(main_script)
# In dev installs, use --with-editable to install local source.
# In releases, use --with to install from PyPI.
if _is_dev_install and _fastmcp_src_dir:
transport: StdioTransport = StdioTransport(
command="uv",
args=[
"run",
"--directory",
tmpdir,
"--with-editable",
str(_fastmcp_src_dir),
"--module",
"my_module",
],
keep_alive=False,
)
else:
transport = UvStdioTransport(
with_packages=["fastmcp"],
command="my_module",
module=True,
project_directory=Path(tmpdir),
keep_alive=False,
)
client: Client[StdioTransport] = Client(transport=transport)
async with client:
result: CallToolResult = await client.call_tool("add", {"x": 1, "y": 2})
sum: int = result.data # pyright: ignore[reportAny]
# Explicitly close the transport to ensure subprocess cleanup
await client.transport.close()
assert sum == 3
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/client/transports/test_uv_transport.py",
"license": "Apache License 2.0",
"lines": 100,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/tools/test_tool_future_annotations.py | from __future__ import annotations
from typing import Annotated, Any, Literal, cast
import mcp.types
from pydantic import Field
from fastmcp import Context, FastMCP
from fastmcp.client import Client
from fastmcp.tools.tool import ToolResult
from fastmcp.utilities.types import Image
fastmcp_server = FastMCP()
@fastmcp_server.tool
def simple_with_context(ctx: Context) -> str:
"""Simple tool with context parameter."""
return f"Request ID: {ctx.request_id}"
@fastmcp_server.tool
def complex_types(
data: dict[str, Any], items: list[int], ctx: Context
) -> dict[str, str | int]:
"""Tool with complex type annotations."""
return {"count": len(items), "request_id": ctx.request_id}
@fastmcp_server.tool
def optional_context(name: str, ctx: Context | None = None) -> str:
"""Tool with optional context."""
if ctx:
return f"Hello {name} from request {ctx.request_id}"
return f"Hello {name}"
@fastmcp_server.tool
def union_with_context(value: int | str, ctx: Context) -> ToolResult:
"""Tool returning ToolResult with context."""
return ToolResult(content=f"Value: {value}, Request: {ctx.request_id}")
@fastmcp_server.tool
def returns_image(ctx: Context) -> Image:
"""Tool that returns an Image."""
# Create a simple 1x1 white pixel PNG
png_data = b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDATx\x9cc\xf8\x0f\x00\x00\x01\x01\x00\x05\x18\xd4c\x00\x00\x00\x00IEND\xaeB`\x82"
return Image(data=png_data, format="png")
@fastmcp_server.tool
async def async_with_context(ctx: Context) -> str:
"""Async tool with context."""
return f"Async request: {ctx.request_id}"
@fastmcp_server.tool
def annotated_with_context(
query: Annotated[str, Field(description="Search query")], ctx: Context
) -> str:
"""Tool using Annotated + Field with context."""
return f"Result for: {query}"
@fastmcp_server.tool
def literal_with_context(mode: Literal["fast", "slow"], ctx: Context) -> str:
"""Tool using Literal with context."""
return f"Mode: {mode}"
class TestFutureAnnotations:
async def test_simple_with_context(self):
async with Client(fastmcp_server) as client:
result = await client.call_tool("simple_with_context", {})
assert "Request ID:" in cast(mcp.types.TextContent, result.content[0]).text
async def test_complex_types(self):
async with Client(fastmcp_server) as client:
result = await client.call_tool(
"complex_types", {"data": {"key": "value"}, "items": [1, 2, 3]}
)
# Check the result is valid JSON with expected values
import json
data = json.loads(cast(mcp.types.TextContent, result.content[0]).text)
assert data["count"] == 3
assert "request_id" in data
async def test_optional_context(self):
async with Client(fastmcp_server) as client:
result = await client.call_tool("optional_context", {"name": "World"})
assert (
"Hello World from request"
in cast(mcp.types.TextContent, result.content[0]).text
)
async def test_union_with_context(self):
async with Client(fastmcp_server) as client:
result = await client.call_tool("union_with_context", {"value": 42})
assert (
"Value: 42, Request:"
in cast(mcp.types.TextContent, result.content[0]).text
)
async def test_returns_image(self):
async with Client(fastmcp_server) as client:
result = await client.call_tool("returns_image", {})
assert result.content[0].type == "image"
assert result.content[0].mimeType == "image/png"
async def test_async_with_context(self):
async with Client(fastmcp_server) as client:
result = await client.call_tool("async_with_context", {})
assert (
"Async request:" in cast(mcp.types.TextContent, result.content[0]).text
)
async def test_annotated_with_context(self):
"""Test Annotated[str, Field(...)] works with Context and future annotations."""
async with Client(fastmcp_server) as client:
result = await client.call_tool(
"annotated_with_context", {"query": "hello"}
)
assert (
"Result for: hello"
in cast(mcp.types.TextContent, result.content[0]).text
)
async def test_literal_with_context(self):
"""Test Literal types work with Context and future annotations."""
async with Client(fastmcp_server) as client:
result = await client.call_tool("literal_with_context", {"mode": "fast"})
assert "Mode: fast" in cast(mcp.types.TextContent, result.content[0]).text
async def test_modern_union_syntax_works(self):
"""Test that modern | union syntax works with future annotations."""
# This demonstrates that our solution works with | syntax when types
# are available in module globals
# Define a tool with modern union syntax
@fastmcp_server.tool
def modern_union_tool(value: str | int | None) -> str | None:
"""Tool using modern | union syntax throughout."""
if value is None:
return None
return f"processed: {value}"
async with Client(fastmcp_server) as client:
# Test with string
result = await client.call_tool("modern_union_tool", {"value": "hello"})
assert (
"processed: hello"
in cast(mcp.types.TextContent, result.content[0]).text
)
# Test with int
result = await client.call_tool("modern_union_tool", {"value": 42})
assert (
"processed: 42" in cast(mcp.types.TextContent, result.content[0]).text
)
# Test with None
result = await client.call_tool("modern_union_tool", {"value": None})
# When function returns None, FastMCP returns empty content
assert (
len(result.content) == 0
or cast(mcp.types.TextContent, result.content[0]).text == "null"
)
def test_closure_scoped_types_with_builtins():
"""Closure-scoped tools work when annotations only reference builtins."""
def create_closure():
mcp = FastMCP()
@mcp.tool
def closure_tool(value: str | None) -> str:
return str(value)
return mcp
create_closure()
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/tools/test_tool_future_annotations.py",
"license": "Apache License 2.0",
"lines": 143,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/server/test_tool_transformation.py | import httpx
from fastmcp import FastMCP
from fastmcp.client import Client
from fastmcp.server.transforms import ToolTransform
from fastmcp.tools.tool_transform import (
ArgTransformConfig,
ToolTransformConfig,
)
async def test_tool_transformation_via_layer():
"""Test that tool transformations work via add_transform(ToolTransform(...))."""
mcp = FastMCP("Test Server")
@mcp.tool()
def echo(message: str) -> str:
"""Echo back the message provided."""
return message
mcp.add_transform(
ToolTransform({"echo": ToolTransformConfig(name="echo_transformed")})
)
tools = await mcp.list_tools()
assert len(tools) == 1
assert any(t.name == "echo_transformed" for t in tools)
tool = next(t for t in tools if t.name == "echo_transformed")
assert tool.name == "echo_transformed"
async def test_transformed_tool_filtering():
"""Test that tool transformations add tags that affect filtering."""
mcp = FastMCP("Test Server")
@mcp.tool()
def echo(message: str) -> str:
"""Echo back the message provided."""
return message
# Add transformation that adds tags
mcp.add_transform(
ToolTransform(
{
"echo": ToolTransformConfig(
name="echo_transformed", tags={"enabled_tools"}
)
}
)
)
# Enable only tools with the enabled_tools tag
mcp.enable(tags={"enabled_tools"}, only=True)
tools = await mcp.list_tools()
# With transformation applied, the tool now has the enabled_tools tag
assert len(tools) == 1
async def test_transformed_tool_structured_output_without_annotation():
"""Test that transformed tools generate structured output when original tool has no return annotation.
Ref: https://github.com/PrefectHQ/fastmcp/issues/1369
"""
from fastmcp.client import Client
mcp = FastMCP("Test Server")
@mcp.tool()
def tool_without_annotation(message: str): # No return annotation
"""A tool without return type annotation."""
return {"result": "processed", "input": message}
mcp.add_transform(
ToolTransform(
{"tool_without_annotation": ToolTransformConfig(name="transformed_tool")}
)
)
# Test with client to verify structured output is populated
async with Client(mcp) as client:
result = await client.call_tool("transformed_tool", {"message": "test"})
# Structured output should be populated even without return annotation
assert result.data is not None
assert result.data == {"result": "processed", "input": "test"}
async def test_layer_based_transforms():
"""Test that ToolTransform layer works after tool registration."""
mcp = FastMCP("Test Server")
@mcp.tool()
def my_tool() -> str:
return "hello"
# Add transform after tool registration
mcp.add_transform(
ToolTransform({"my_tool": ToolTransformConfig(name="renamed_tool")})
)
tools = await mcp.list_tools()
assert len(tools) == 1
assert tools[0].name == "renamed_tool"
async def test_server_level_transforms_apply_to_mounted_servers():
"""Test that server-level transforms apply to tools from mounted servers."""
main = FastMCP("Main")
sub = FastMCP("Sub")
@sub.tool()
def sub_tool() -> str:
return "hello from sub"
main.mount(sub)
# Add transform for the mounted tool at server level
main.add_transform(
ToolTransform({"sub_tool": ToolTransformConfig(name="renamed_sub_tool")})
)
tools = await main.list_tools()
tool_names = [t.name for t in tools]
assert "renamed_sub_tool" in tool_names
assert "sub_tool" not in tool_names
async def test_tool_transform_config_enabled_false_hides_tool():
"""Test that ToolTransformConfig with enabled=False hides the tool from list_tools."""
mcp = FastMCP("Test Server")
@mcp.tool()
def visible_tool() -> str:
return "visible"
@mcp.tool()
def hidden_tool() -> str:
return "hidden"
# Disable one tool via transformation
mcp.add_transform(
ToolTransform({"hidden_tool": ToolTransformConfig(enabled=False)})
)
tools = await mcp.list_tools()
tool_names = [t.name for t in tools]
assert "visible_tool" in tool_names
assert "hidden_tool" not in tool_names
async def test_tool_transform_config_enabled_false_with_rename():
"""Test that enabled=False works together with other transformations like rename."""
mcp = FastMCP("Test Server")
@mcp.tool()
def my_tool() -> str:
return "result"
# Rename AND disable
mcp.add_transform(
ToolTransform(
{"my_tool": ToolTransformConfig(name="renamed_and_disabled", enabled=False)}
)
)
tools = await mcp.list_tools()
tool_names = [t.name for t in tools]
# Tool should be hidden regardless of rename
assert "my_tool" not in tool_names
assert "renamed_and_disabled" not in tool_names
async def test_tool_transform_config_enabled_true_keeps_tool_visible():
"""Test that ToolTransformConfig with enabled=True (explicit) keeps the tool visible."""
mcp = FastMCP("Test Server")
@mcp.tool()
def my_tool() -> str:
return "result"
# Explicitly set enabled=True (should be same as default)
mcp.add_transform(ToolTransform({"my_tool": ToolTransformConfig(enabled=True)}))
tools = await mcp.list_tools()
tool_names = [t.name for t in tools]
assert "my_tool" in tool_names
async def test_tool_transform_config_enabled_true_overrides_earlier_disable():
"""Test that ToolTransformConfig with enabled=True can re-enable a previously disabled tool."""
mcp = FastMCP("Test Server")
@mcp.tool()
def my_tool() -> str:
return "result"
# Disable the tool first
mcp.disable(names={"my_tool"})
# Verify tool is initially hidden
tools = await mcp.list_tools()
assert "my_tool" not in [t.name for t in tools]
# Re-enable via transformation (later transforms win)
mcp.add_transform(ToolTransform({"my_tool": ToolTransformConfig(enabled=True)}))
tools = await mcp.list_tools()
tool_names = [t.name for t in tools]
# Tool should now be visible
assert "my_tool" in tool_names
async def test_openapi_path_params_not_duplicated_in_description():
"""Path parameter details should live in inputSchema, not the description.
Regression test for https://github.com/PrefectHQ/fastmcp/issues/3130 — hiding
a path param via ToolTransform left stale references in the description
because the description was generated before transforms ran. The fix is to
keep parameter docs in inputSchema only, where transforms can control them.
"""
spec = {
"openapi": "3.1.0",
"info": {"title": "Test", "version": "0.1.0"},
"paths": {
"/api/{version}/users/{user_id}": {
"get": {
"operationId": "my_endpoint",
"summary": "My endpoint",
"parameters": [
{
"name": "version",
"in": "path",
"required": True,
"description": "API version",
"schema": {"type": "string"},
},
{
"name": "user_id",
"in": "path",
"required": True,
"description": "The user ID",
"schema": {"type": "string"},
},
],
"responses": {"200": {"description": "OK"}},
},
},
},
}
async with httpx.AsyncClient(base_url="http://localhost") as http_client:
mcp = FastMCP.from_openapi(openapi_spec=spec, client=http_client)
# Hide one of the two path params
mcp.add_transform(
ToolTransform(
{
"my_endpoint": ToolTransformConfig(
arguments={
"version": ArgTransformConfig(hide=True, default="v1"),
}
)
}
)
)
async with Client(mcp) as client:
tools = await client.list_tools()
tool = tools[0]
# Description should be the summary only — no parameter details
assert tool.description == "My endpoint"
# Hidden param gone from schema, visible param still present
assert "version" not in tool.inputSchema.get("properties", {})
assert "user_id" in tool.inputSchema["properties"]
assert (
tool.inputSchema["properties"]["user_id"]["description"]
== "The user ID"
)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/server/test_tool_transformation.py",
"license": "Apache License 2.0",
"lines": 219,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/integration_tests/test_github_mcp_remote.py | import json
import os
import pytest
from mcp import McpError
from mcp.types import TextContent, Tool
from fastmcp import Client
from fastmcp.client import StreamableHttpTransport
from fastmcp.client.auth.bearer import BearerAuth
GITHUB_REMOTE_MCP_URL = "https://api.githubcopilot.com/mcp/"
HEADER_AUTHORIZATION = "Authorization"
FASTMCP_GITHUB_TOKEN = os.getenv("FASTMCP_GITHUB_TOKEN")
# Skip tests if no GitHub token is available
pytestmark = pytest.mark.xfail(
not FASTMCP_GITHUB_TOKEN,
reason="The FASTMCP_GITHUB_TOKEN environment variable is not set or empty",
)
@pytest.fixture(name="streamable_http_client")
def fixture_streamable_http_client() -> Client[StreamableHttpTransport]:
assert FASTMCP_GITHUB_TOKEN is not None
return Client(
StreamableHttpTransport(
url=GITHUB_REMOTE_MCP_URL,
auth=BearerAuth(FASTMCP_GITHUB_TOKEN),
)
)
class TestGithubMCPRemote:
async def test_connect_disconnect(
self,
streamable_http_client: Client[StreamableHttpTransport],
):
async with streamable_http_client:
assert streamable_http_client.is_connected() is True
await streamable_http_client._disconnect() # pylint: disable=W0212 (protected-access)
assert streamable_http_client.is_connected() is False
async def test_ping(self, streamable_http_client: Client[StreamableHttpTransport]):
"""Test pinging the server."""
async with streamable_http_client:
assert streamable_http_client.is_connected() is True
result = await streamable_http_client.ping()
assert result is True
async def test_list_tools(
self, streamable_http_client: Client[StreamableHttpTransport]
):
"""Test listing the MCP tools"""
async with streamable_http_client:
assert streamable_http_client.is_connected()
tools = await streamable_http_client.list_tools()
assert isinstance(tools, list)
assert len(tools) > 0 # Ensure the tools list is non-empty
for tool in tools:
assert isinstance(tool, Tool)
assert len(tool.name) > 0
assert tool.description is not None and len(tool.description) > 0
assert isinstance(tool.inputSchema, dict)
assert len(tool.inputSchema) > 0
async def test_list_resources(
self, streamable_http_client: Client[StreamableHttpTransport]
):
"""Test listing the MCP resources"""
async with streamable_http_client:
assert streamable_http_client.is_connected()
resources = await streamable_http_client.list_resources()
assert isinstance(resources, list)
assert len(resources) == 0
async def test_list_prompts(
self, streamable_http_client: Client[StreamableHttpTransport]
):
"""Test listing the MCP prompts"""
async with streamable_http_client:
assert streamable_http_client.is_connected()
prompts = await streamable_http_client.list_prompts()
# there is at least one prompt (as of July 2025)
assert len(prompts) >= 1
async def test_call_tool_ko(
self, streamable_http_client: Client[StreamableHttpTransport]
):
"""Test calling a non-existing tool"""
async with streamable_http_client:
assert streamable_http_client.is_connected()
with pytest.raises(McpError, match=r"unknown tool|tool not found"):
await streamable_http_client.call_tool("foo")
async def test_call_tool_list_commits(
self,
streamable_http_client: Client[StreamableHttpTransport],
):
"""Test calling a list_commit tool"""
async with streamable_http_client:
assert streamable_http_client.is_connected()
result = await streamable_http_client.call_tool(
"list_commits", {"owner": "prefecthq", "repo": "fastmcp"}
)
# at this time, the github server does not support structured content
assert result.structured_content is None
assert isinstance(result.content, list)
assert len(result.content) == 1
assert isinstance(result.content[0], TextContent)
commits = json.loads(result.content[0].text)
for commit in commits:
assert isinstance(commit, dict)
assert "sha" in commit
assert "commit" in commit
assert "author" in commit["commit"]
assert len(commit["commit"]["author"]["date"]) > 0
assert len(commit["commit"]["author"]["name"]) > 0
assert len(commit["commit"]["author"]["email"]) > 0
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/integration_tests/test_github_mcp_remote.py",
"license": "Apache License 2.0",
"lines": 105,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/cli/test_install.py | from pathlib import Path
from fastmcp.cli.install import install_app
from fastmcp.cli.install.stdio import install_stdio
class TestInstallApp:
"""Test the install subapp."""
def test_install_app_exists(self):
"""Test that the install app is properly configured."""
# install_app.name is a tuple in cyclopts
assert "install" in install_app.name
assert "Install MCP servers" in install_app.help
def test_install_commands_registered(self):
"""Test that all install commands are registered."""
# Check that the app has the expected help text and structure
# This is a simpler check that doesn't rely on internal methods
assert hasattr(install_app, "help")
assert "Install MCP servers" in install_app.help
# We can test that the commands parse without errors
try:
install_app.parse_args(["claude-code", "--help"])
install_app.parse_args(["claude-desktop", "--help"])
install_app.parse_args(["cursor", "--help"])
install_app.parse_args(["gemini-cli", "--help"])
install_app.parse_args(["goose", "--help"])
install_app.parse_args(["mcp-json", "--help"])
install_app.parse_args(["stdio", "--help"])
except SystemExit:
# Help commands exit with 0, that's expected
pass
class TestClaudeCodeInstall:
"""Test claude-code install command."""
def test_claude_code_basic(self):
"""Test basic claude-code install command parsing."""
# Parse command with correct parameter names
command, bound, _ = install_app.parse_args(
["claude-code", "server.py", "--name", "test-server"]
)
# Verify parsing was successful
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
def test_claude_code_with_options(self):
"""Test claude-code install with various options."""
command, bound, _ = install_app.parse_args(
[
"claude-code",
"server.py",
"--name",
"test-server",
"--with",
"package1",
"--with",
"package2",
"--env",
"VAR1=value1",
]
)
assert bound.arguments["with_packages"] == ["package1", "package2"]
assert bound.arguments["env_vars"] == ["VAR1=value1"]
def test_claude_code_with_new_options(self):
"""Test claude-code install with new uv options."""
from pathlib import Path
command, bound, _ = install_app.parse_args(
[
"claude-code",
"server.py",
"--python",
"3.11",
"--project",
"/workspace",
"--with-requirements",
"requirements.txt",
]
)
assert bound.arguments["python"] == "3.11"
assert bound.arguments["project"] == Path("/workspace")
assert bound.arguments["with_requirements"] == Path("requirements.txt")
class TestClaudeDesktopInstall:
"""Test claude-desktop install command."""
def test_claude_desktop_basic(self):
"""Test basic claude-desktop install command parsing."""
command, bound, _ = install_app.parse_args(
["claude-desktop", "server.py", "--name", "test-server"]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
def test_claude_desktop_with_env_vars(self):
"""Test claude-desktop install with environment variables."""
command, bound, _ = install_app.parse_args(
[
"claude-desktop",
"server.py",
"--name",
"test-server",
"--env",
"VAR1=value1",
"--env",
"VAR2=value2",
]
)
assert bound.arguments["env_vars"] == ["VAR1=value1", "VAR2=value2"]
def test_claude_desktop_with_new_options(self):
"""Test claude-desktop install with new uv options."""
from pathlib import Path
command, bound, _ = install_app.parse_args(
[
"claude-desktop",
"server.py",
"--python",
"3.10",
"--project",
"/my/project",
"--with-requirements",
"reqs.txt",
]
)
assert bound.arguments["python"] == "3.10"
assert bound.arguments["project"] == Path("/my/project")
assert bound.arguments["with_requirements"] == Path("reqs.txt")
def test_claude_desktop_with_config_path(self):
"""Test claude-desktop install with custom config path."""
command, bound, _ = install_app.parse_args(
["claude-desktop", "server.py", "--config-path", "/custom/path/Claude"]
)
assert bound.arguments["config_path"] == Path("/custom/path/Claude")
def test_claude_desktop_without_config_path(self):
"""Test claude-desktop install without config path defaults to None."""
command, bound, _ = install_app.parse_args(["claude-desktop", "server.py"])
assert bound.arguments.get("config_path") is None
class TestCursorInstall:
"""Test cursor install command."""
def test_cursor_basic(self):
"""Test basic cursor install command parsing."""
command, bound, _ = install_app.parse_args(
["cursor", "server.py", "--name", "test-server"]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
def test_cursor_with_options(self):
"""Test cursor install with options."""
command, bound, _ = install_app.parse_args(
["cursor", "server.py", "--name", "test-server"]
)
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
class TestGooseInstall:
"""Test goose install command."""
def test_goose_basic(self):
"""Test basic goose install command parsing."""
command, bound, _ = install_app.parse_args(
["goose", "server.py", "--name", "test-server"]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
def test_goose_with_options(self):
"""Test goose install with various options."""
command, bound, _ = install_app.parse_args(
[
"goose",
"server.py",
"--name",
"test-server",
"--with",
"package1",
"--with",
"package2",
"--env",
"VAR1=value1",
]
)
assert bound.arguments["with_packages"] == ["package1", "package2"]
assert bound.arguments["env_vars"] == ["VAR1=value1"]
def test_goose_with_python(self):
"""Test goose install with --python option."""
command, bound, _ = install_app.parse_args(
[
"goose",
"server.py",
"--python",
"3.11",
]
)
assert bound.arguments["python"] == "3.11"
class TestMcpJsonInstall:
"""Test mcp-json install command."""
def test_mcp_json_basic(self):
"""Test basic mcp-json install command parsing."""
command, bound, _ = install_app.parse_args(
["mcp-json", "server.py", "--name", "test-server"]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
def test_mcp_json_with_copy(self):
"""Test mcp-json install with copy to clipboard option."""
command, bound, _ = install_app.parse_args(
["mcp-json", "server.py", "--name", "test-server", "--copy"]
)
assert bound.arguments["copy"] is True
class TestStdioInstall:
"""Test stdio install command."""
def test_stdio_basic(self):
"""Test basic stdio install command parsing."""
command, bound, _ = install_app.parse_args(["stdio", "server.py"])
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
def test_stdio_with_copy(self):
"""Test stdio install with copy to clipboard option."""
command, bound, _ = install_app.parse_args(["stdio", "server.py", "--copy"])
assert bound.arguments["copy"] is True
def test_stdio_with_packages(self):
"""Test stdio install with additional packages."""
command, bound, _ = install_app.parse_args(
["stdio", "server.py", "--with", "requests", "--with", "httpx"]
)
assert bound.arguments["with_packages"] == ["requests", "httpx"]
def test_install_stdio_generates_command(self, tmp_path: Path):
"""Test that install_stdio produces a shell command containing fastmcp run."""
server_file = tmp_path / "server.py"
server_file.write_text("# placeholder")
# Capture stdout
import io
import sys
captured = io.StringIO()
old_stdout = sys.stdout
sys.stdout = captured
try:
result = install_stdio(file=server_file, server_object=None)
finally:
sys.stdout = old_stdout
assert result is True
output = captured.getvalue()
assert "fastmcp" in output
assert "run" in output
assert str(server_file.resolve()) in output
def test_install_stdio_with_object(self, tmp_path: Path):
"""Test that install_stdio includes the :object suffix."""
server_file = tmp_path / "server.py"
server_file.write_text("# placeholder")
import io
import sys
captured = io.StringIO()
old_stdout = sys.stdout
sys.stdout = captured
try:
result = install_stdio(file=server_file, server_object="app")
finally:
sys.stdout = old_stdout
assert result is True
output = captured.getvalue()
assert f"{server_file.resolve()}:app" in output
class TestGeminiCliInstall:
"""Test gemini-cli install command."""
def test_gemini_cli_basic(self):
"""Test basic gemini-cli install command parsing."""
# Parse command with correct parameter names
command, bound, _ = install_app.parse_args(
["gemini-cli", "server.py", "--name", "test-server"]
)
# Verify parsing was successful
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["server_name"] == "test-server"
def test_gemini_cli_with_options(self):
"""Test gemini-cli install with various options."""
command, bound, _ = install_app.parse_args(
[
"gemini-cli",
"server.py",
"--name",
"test-server",
"--with",
"package1",
"--with",
"package2",
"--env",
"VAR1=value1",
]
)
assert bound.arguments["with_packages"] == ["package1", "package2"]
assert bound.arguments["env_vars"] == ["VAR1=value1"]
def test_gemini_cli_with_new_options(self):
"""Test gemini-cli install with new uv options."""
from pathlib import Path
command, bound, _ = install_app.parse_args(
[
"gemini-cli",
"server.py",
"--python",
"3.11",
"--project",
"/workspace",
"--with-requirements",
"requirements.txt",
]
)
assert bound.arguments["python"] == "3.11"
assert bound.arguments["project"] == Path("/workspace")
assert bound.arguments["with_requirements"] == Path("requirements.txt")
class TestInstallCommandParsing:
"""Test command parsing and error handling."""
def test_install_minimal_args(self):
"""Test install commands with minimal required arguments."""
# Each command should work with just a server spec
commands_to_test = [
["claude-code", "server.py"],
["claude-desktop", "server.py"],
["cursor", "server.py"],
["gemini-cli", "server.py"],
["goose", "server.py"],
["stdio", "server.py"],
]
for cmd_args in commands_to_test:
command, bound, _ = install_app.parse_args(cmd_args)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
def test_mcp_json_minimal(self):
"""Test that mcp-json works with minimal arguments."""
# Should work with just server spec
command, bound, _ = install_app.parse_args(["mcp-json", "server.py"])
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
def test_stdio_minimal(self):
"""Test that stdio works with minimal arguments."""
command, bound, _ = install_app.parse_args(["stdio", "server.py"])
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
def test_python_option(self):
"""Test --python option for all install commands."""
commands_to_test = [
["claude-code", "server.py", "--python", "3.11"],
["claude-desktop", "server.py", "--python", "3.11"],
["cursor", "server.py", "--python", "3.11"],
["gemini-cli", "server.py", "--python", "3.11"],
["goose", "server.py", "--python", "3.11"],
["mcp-json", "server.py", "--python", "3.11"],
["stdio", "server.py", "--python", "3.11"],
]
for cmd_args in commands_to_test:
command, bound, _ = install_app.parse_args(cmd_args)
assert command is not None
assert bound.arguments["python"] == "3.11"
def test_with_requirements_option(self):
"""Test --with-requirements option for all install commands."""
commands_to_test = [
["claude-code", "server.py", "--with-requirements", "requirements.txt"],
["claude-desktop", "server.py", "--with-requirements", "requirements.txt"],
["cursor", "server.py", "--with-requirements", "requirements.txt"],
["gemini-cli", "server.py", "--with-requirements", "requirements.txt"],
["mcp-json", "server.py", "--with-requirements", "requirements.txt"],
["stdio", "server.py", "--with-requirements", "requirements.txt"],
]
for cmd_args in commands_to_test:
command, bound, _ = install_app.parse_args(cmd_args)
assert command is not None
assert str(bound.arguments["with_requirements"]) == "requirements.txt"
def test_project_option(self):
"""Test --project option for all install commands."""
commands_to_test = [
["claude-code", "server.py", "--project", "/path/to/project"],
["claude-desktop", "server.py", "--project", "/path/to/project"],
["cursor", "server.py", "--project", "/path/to/project"],
["gemini-cli", "server.py", "--project", "/path/to/project"],
["mcp-json", "server.py", "--project", "/path/to/project"],
["stdio", "server.py", "--project", "/path/to/project"],
]
for cmd_args in commands_to_test:
command, bound, _ = install_app.parse_args(cmd_args)
assert command is not None
assert str(bound.arguments["project"]) == str(Path("/path/to/project"))
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_install.py",
"license": "Apache License 2.0",
"lines": 372,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/cli/test_shared.py | from fastmcp.cli.cli import _parse_env_var
class TestEnvVarParsing:
"""Test environment variable parsing functionality."""
def test_parse_env_var_simple(self):
"""Test parsing simple environment variable."""
key, value = _parse_env_var("API_KEY=secret123")
assert key == "API_KEY"
assert value == "secret123"
def test_parse_env_var_with_equals_in_value(self):
"""Test parsing env var with equals signs in the value."""
key, value = _parse_env_var("DATABASE_URL=postgresql://user:pass@host:5432/db")
assert key == "DATABASE_URL"
assert value == "postgresql://user:pass@host:5432/db"
def test_parse_env_var_with_spaces(self):
"""Test parsing env var with spaces (should be stripped)."""
key, value = _parse_env_var(" API_KEY = secret with spaces ")
assert key == "API_KEY"
assert value == "secret with spaces"
def test_parse_env_var_empty_value(self):
"""Test parsing env var with empty value."""
key, value = _parse_env_var("EMPTY_VAR=")
assert key == "EMPTY_VAR"
assert value == ""
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_shared.py",
"license": "Apache License 2.0",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/cli/test_cursor.py | import base64
import json
from pathlib import Path
from unittest.mock import Mock, patch
import pytest
from fastmcp.cli.install.cursor import (
cursor_command,
generate_cursor_deeplink,
install_cursor,
install_cursor_workspace,
open_deeplink,
)
from fastmcp.mcp_config import StdioMCPServer
class TestCursorDeeplinkGeneration:
"""Test cursor deeplink generation functionality."""
def test_generate_deeplink_basic(self):
"""Test basic deeplink generation."""
server_config = StdioMCPServer(
command="uv",
args=["run", "--with", "fastmcp", "fastmcp", "run", "server.py"],
)
deeplink = generate_cursor_deeplink("test-server", server_config)
assert deeplink.startswith("cursor://anysphere.cursor-deeplink/mcp/install?")
assert "name=test-server" in deeplink
assert "config=" in deeplink
# Verify base64 encoding
config_part = deeplink.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
assert config_data["command"] == "uv"
assert config_data["args"] == [
"run",
"--with",
"fastmcp",
"fastmcp",
"run",
"server.py",
]
def test_generate_deeplink_with_env_vars(self):
"""Test deeplink generation with environment variables."""
server_config = StdioMCPServer(
command="uv",
args=["run", "--with", "fastmcp", "fastmcp", "run", "server.py"],
env={"API_KEY": "secret123", "DEBUG": "true"},
)
deeplink = generate_cursor_deeplink("my-server", server_config)
# Decode and verify
config_part = deeplink.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
assert config_data["env"] == {"API_KEY": "secret123", "DEBUG": "true"}
def test_generate_deeplink_special_characters(self):
"""Test deeplink generation with special characters in server name."""
server_config = StdioMCPServer(
command="uv",
args=["run", "--with", "fastmcp", "fastmcp", "run", "server.py"],
)
# Test with spaces and special chars in name - should be URL encoded
deeplink = generate_cursor_deeplink("my server (test)", server_config)
# Spaces and parentheses must be URL-encoded
assert "name=my%20server%20%28test%29" in deeplink
# Ensure no unencoded version appears
assert "name=my server (test)" not in deeplink
def test_generate_deeplink_empty_config(self):
"""Test deeplink generation with minimal config."""
server_config = StdioMCPServer(command="python", args=["server.py"])
deeplink = generate_cursor_deeplink("minimal", server_config)
config_part = deeplink.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
assert config_data["command"] == "python"
assert config_data["args"] == ["server.py"]
assert config_data["env"] == {} # Empty env dict is included
def test_generate_deeplink_complex_args(self):
"""Test deeplink generation with complex arguments."""
server_config = StdioMCPServer(
command="uv",
args=[
"run",
"--with",
"fastmcp",
"--with",
"numpy>=1.20",
"--with-editable",
"/path/to/local/package",
"fastmcp",
"run",
"server.py:CustomServer",
],
)
deeplink = generate_cursor_deeplink("complex-server", server_config)
config_part = deeplink.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
assert "--with-editable" in config_data["args"]
assert "server.py:CustomServer" in config_data["args"]
def test_generate_deeplink_url_injection_protection(self):
"""Test that special characters in server name are properly URL-encoded to prevent injection."""
server_config = StdioMCPServer(
command="python",
args=["server.py"],
)
# Test the PoC case from the security advisory
deeplink = generate_cursor_deeplink("test&calc", server_config)
# The & should be encoded as %26, preventing it from being interpreted as a query parameter separator
assert "name=test%26calc" in deeplink
assert "name=test&calc" not in deeplink
# Verify the URL structure is intact
assert deeplink.startswith("cursor://anysphere.cursor-deeplink/mcp/install?")
assert deeplink.count("&") == 1 # Only one & between name and config parameters
# Test other potentially dangerous characters
dangerous_names = [
("test|calc", "test%7Ccalc"),
("test;calc", "test%3Bcalc"),
("test<calc", "test%3Ccalc"),
("test>calc", "test%3Ecalc"),
("test`calc", "test%60calc"),
("test$calc", "test%24calc"),
("test'calc", "test%27calc"),
('test"calc', "test%22calc"),
("test calc", "test%20calc"),
("test#anchor", "test%23anchor"),
("test?query=val", "test%3Fquery%3Dval"),
]
for dangerous_name, expected_encoded in dangerous_names:
deeplink = generate_cursor_deeplink(dangerous_name, server_config)
assert f"name={expected_encoded}" in deeplink, (
f"Failed to encode {dangerous_name}"
)
# Ensure no unencoded special chars that could break URL structure
name_part = deeplink.split("name=")[1].split("&")[0]
assert name_part == expected_encoded
class TestOpenDeeplink:
"""Test deeplink opening functionality."""
@patch("subprocess.run")
def test_open_deeplink_macos(self, mock_run):
"""Test opening deeplink on macOS."""
with patch("sys.platform", "darwin"):
mock_run.return_value = Mock(returncode=0)
result = open_deeplink("cursor://test")
assert result is True
mock_run.assert_called_once_with(
["open", "cursor://test"], check=True, capture_output=True
)
def test_open_deeplink_windows(self):
"""Test opening deeplink on Windows."""
with patch("sys.platform", "win32"):
with patch(
"fastmcp.cli.install.shared.os.startfile", create=True
) as mock_startfile:
result = open_deeplink("cursor://test")
assert result is True
mock_startfile.assert_called_once_with("cursor://test")
@patch("subprocess.run")
def test_open_deeplink_linux(self, mock_run):
"""Test opening deeplink on Linux."""
with patch("sys.platform", "linux"):
mock_run.return_value = Mock(returncode=0)
result = open_deeplink("cursor://test")
assert result is True
mock_run.assert_called_once_with(
["xdg-open", "cursor://test"], check=True, capture_output=True
)
@patch("subprocess.run")
def test_open_deeplink_failure(self, mock_run):
"""Test handling of deeplink opening failure."""
import subprocess
with patch("sys.platform", "darwin"):
mock_run.side_effect = subprocess.CalledProcessError(1, ["open"])
result = open_deeplink("cursor://test")
assert result is False
@patch("subprocess.run")
def test_open_deeplink_command_not_found(self, mock_run):
"""Test handling when open command is not found."""
with patch("sys.platform", "darwin"):
mock_run.side_effect = FileNotFoundError()
result = open_deeplink("cursor://test")
assert result is False
def test_open_deeplink_invalid_scheme(self):
"""Test that non-cursor:// URLs are rejected."""
result = open_deeplink("http://malicious.com")
assert result is False
result = open_deeplink("https://example.com")
assert result is False
result = open_deeplink("file:///etc/passwd")
assert result is False
def test_open_deeplink_valid_cursor_scheme(self):
"""Test that cursor:// URLs are accepted."""
with patch("sys.platform", "darwin"):
with patch("subprocess.run") as mock_run:
mock_run.return_value = Mock(returncode=0)
result = open_deeplink("cursor://anysphere.cursor-deeplink/mcp/install")
assert result is True
def test_open_deeplink_empty_url(self):
"""Test handling of empty URL."""
result = open_deeplink("")
assert result is False
def test_open_deeplink_windows_oserror(self):
"""Test handling of OSError on Windows."""
with patch("sys.platform", "win32"):
with patch(
"fastmcp.cli.install.shared.os.startfile", create=True
) as mock_startfile:
mock_startfile.side_effect = OSError("File not found")
result = open_deeplink("cursor://test")
assert result is False
class TestInstallCursor:
"""Test cursor installation functionality."""
@patch("fastmcp.cli.install.cursor.open_deeplink")
@patch("fastmcp.cli.install.cursor.print")
def test_install_cursor_success(self, mock_print, mock_open_deeplink):
"""Test successful cursor installation."""
mock_open_deeplink.return_value = True
result = install_cursor(
file=Path("/path/to/server.py"),
server_object=None,
name="test-server",
)
assert result is True
mock_open_deeplink.assert_called_once()
# Verify the deeplink was generated correctly
call_args = mock_open_deeplink.call_args[0][0]
assert call_args.startswith("cursor://anysphere.cursor-deeplink/mcp/install?")
assert "name=test-server" in call_args
@patch("fastmcp.cli.install.cursor.open_deeplink")
@patch("fastmcp.cli.install.cursor.print")
def test_install_cursor_with_packages(self, mock_print, mock_open_deeplink):
"""Test cursor installation with additional packages."""
mock_open_deeplink.return_value = True
result = install_cursor(
file=Path("/path/to/server.py"),
server_object="app",
name="test-server",
with_packages=["numpy", "pandas"],
env_vars={"API_KEY": "test"},
)
assert result is True
call_args = mock_open_deeplink.call_args[0][0]
# Decode the config to verify packages
config_part = call_args.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
# Check that all packages are included
assert "--with" in config_data["args"]
assert "numpy" in config_data["args"]
assert "pandas" in config_data["args"]
assert "fastmcp" in config_data["args"]
assert config_data["env"] == {"API_KEY": "test"}
@patch("fastmcp.cli.install.cursor.open_deeplink")
@patch("fastmcp.cli.install.cursor.print")
def test_install_cursor_with_editable(self, mock_print, mock_open_deeplink):
"""Test cursor installation with editable package."""
mock_open_deeplink.return_value = True
# Use an absolute path that works on all platforms
editable_path = Path.cwd() / "local" / "package"
result = install_cursor(
file=Path("/path/to/server.py"),
server_object="custom_app",
name="test-server",
with_editable=[editable_path],
)
assert result is True
call_args = mock_open_deeplink.call_args[0][0]
# Decode and verify editable path
config_part = call_args.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
assert "--with-editable" in config_data["args"]
# Check that the path was resolved (should be absolute)
editable_idx = config_data["args"].index("--with-editable") + 1
resolved_path = config_data["args"][editable_idx]
assert Path(resolved_path).is_absolute()
assert "server.py:custom_app" in " ".join(config_data["args"])
@patch("fastmcp.cli.install.cursor.open_deeplink")
@patch("fastmcp.cli.install.cursor.print")
def test_install_cursor_failure(self, mock_print, mock_open_deeplink):
"""Test cursor installation when deeplink fails to open."""
mock_open_deeplink.return_value = False
result = install_cursor(
file=Path("/path/to/server.py"),
server_object=None,
name="test-server",
)
assert result is False
# Verify failure message was printed
mock_print.assert_called()
def test_install_cursor_workspace_path_is_file(self, tmp_path):
"""Test that passing a file as workspace_path returns False."""
file_path = tmp_path / "somefile.txt"
file_path.write_text("hello")
result = install_cursor_workspace(
file=Path("/path/to/server.py"),
server_object=None,
name="test-server",
workspace_path=file_path,
)
assert result is False
def test_install_cursor_deduplicate_packages(self):
"""Test that duplicate packages are deduplicated."""
with patch("fastmcp.cli.install.cursor.open_deeplink") as mock_open:
mock_open.return_value = True
install_cursor(
file=Path("/path/to/server.py"),
server_object=None,
name="test-server",
with_packages=["numpy", "fastmcp", "numpy", "pandas", "fastmcp"],
)
call_args = mock_open.call_args[0][0]
config_part = call_args.split("config=")[1]
decoded = base64.urlsafe_b64decode(config_part).decode()
config_data = json.loads(decoded)
# Count occurrences of each package
args_str = " ".join(config_data["args"])
assert args_str.count("--with numpy") == 1
assert args_str.count("--with pandas") == 1
assert args_str.count("--with fastmcp") == 1
class TestCursorCommand:
"""Test the cursor CLI command."""
@patch("fastmcp.cli.install.cursor.install_cursor")
@patch("fastmcp.cli.install.cursor.process_common_args")
async def test_cursor_command_basic(self, mock_process_args, mock_install):
"""Test basic cursor command execution."""
mock_process_args.return_value = (
Path("server.py"),
None,
"test-server",
[],
{},
)
mock_install.return_value = True
with patch("sys.exit") as mock_exit:
await cursor_command("server.py")
mock_install.assert_called_once_with(
file=Path("server.py"),
server_object=None,
name="test-server",
with_editable=[],
with_packages=[],
env_vars={},
python_version=None,
with_requirements=None,
project=None,
workspace=None,
)
mock_exit.assert_not_called()
@patch("fastmcp.cli.install.cursor.install_cursor")
@patch("fastmcp.cli.install.cursor.process_common_args")
async def test_cursor_command_failure(self, mock_process_args, mock_install):
"""Test cursor command when installation fails."""
mock_process_args.return_value = (
Path("server.py"),
None,
"test-server",
[],
{},
)
mock_install.return_value = False
with pytest.raises(SystemExit) as exc_info:
await cursor_command("server.py")
assert isinstance(exc_info.value, SystemExit)
assert exc_info.value.code == 1
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_cursor.py",
"license": "Apache License 2.0",
"lines": 361,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:tests/cli/test_cli.py | import subprocess
from pathlib import Path
from unittest.mock import Mock, patch
import pytest
from fastmcp.cli.cli import _parse_env_var, app
class TestMainCLI:
"""Test the main CLI application."""
def test_app_exists(self):
"""Test that the main app is properly configured."""
# app.name is a tuple in cyclopts
assert "fastmcp" in app.name
assert "FastMCP 2.0" in app.help
# Just check that version exists, not the specific value
assert hasattr(app, "version")
def test_parse_env_var_valid(self):
"""Test parsing valid environment variables."""
key, value = _parse_env_var("KEY=value")
assert key == "KEY"
assert value == "value"
key, value = _parse_env_var("COMPLEX_KEY=complex=value=with=equals")
assert key == "COMPLEX_KEY"
assert value == "complex=value=with=equals"
def test_parse_env_var_invalid(self):
"""Test parsing invalid environment variables exits."""
with pytest.raises(SystemExit) as exc_info:
_parse_env_var("INVALID_FORMAT")
assert isinstance(exc_info.value, SystemExit)
assert exc_info.value.code == 1
class TestVersionCommand:
"""Test the version command."""
@patch("fastmcp.cli.cli.check_for_newer_version", return_value=None)
def test_version_command_execution(self, mock_check):
"""Test that version command executes properly."""
# The version command should execute without raising SystemExit
command, bound, _ = app.parse_args(["version"])
command() # Should not raise
def test_version_command_parsing(self):
"""Test that the version command parses arguments correctly."""
command, bound, _ = app.parse_args(["version"])
assert callable(command)
assert command.__name__ == "version" # type: ignore[attr-defined]
# Default arguments aren't included in bound.arguments
assert bound.arguments == {}
def test_version_command_with_copy_flag(self):
"""Test that the version command parses --copy flag correctly."""
command, bound, _ = app.parse_args(["version", "--copy"])
assert callable(command)
assert command.__name__ == "version" # type: ignore[attr-defined]
assert bound.arguments == {"copy": True}
@patch("fastmcp.cli.cli.pyperclip.copy")
@patch("fastmcp.cli.cli.console")
def test_version_command_copy_functionality(
self, mock_console, mock_pyperclip_copy
):
"""Test that the version command copies to clipboard when --copy is used."""
command, bound, _ = app.parse_args(["version", "--copy"])
command(**bound.arguments)
# Verify pyperclip.copy was called with plain text format
mock_pyperclip_copy.assert_called_once()
copied_text = mock_pyperclip_copy.call_args[0][0]
# Verify the copied text contains expected version info keys in plain text
assert "FastMCP version:" in copied_text
assert "MCP version:" in copied_text
assert "Python version:" in copied_text
assert "Platform:" in copied_text
assert "FastMCP root path:" in copied_text
# Verify no ANSI escape codes (terminal control characters)
assert "\x1b[" not in copied_text
mock_console.print.assert_called_with(
"[green]✓[/green] Version information copied to clipboard"
)
class TestDevCommand:
"""Test the dev command."""
def test_dev_inspector_command_parsing(self):
"""Test that dev inspector command can be parsed with various options."""
# Test basic parsing
command, bound, _ = app.parse_args(["dev", "inspector", "server.py"])
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
# Test with options
command, bound, _ = app.parse_args(
[
"dev",
"inspector",
"server.py",
"--with",
"package1",
"--inspector-version",
"1.0.0",
"--ui-port",
"3000",
]
)
assert bound.arguments["with_packages"] == ["package1"]
assert bound.arguments["inspector_version"] == "1.0.0"
assert bound.arguments["ui_port"] == 3000
def test_dev_inspector_command_parsing_with_new_options(self):
"""Test dev inspector command parsing with new uv options."""
command, bound, _ = app.parse_args(
[
"dev",
"inspector",
"server.py",
"--python",
"3.10",
"--project",
"/workspace",
"--with-requirements",
"dev-requirements.txt",
"--with",
"pytest",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["python"] == "3.10"
assert bound.arguments["project"] == Path("/workspace")
assert bound.arguments["with_requirements"] == Path("dev-requirements.txt")
assert bound.arguments["with_packages"] == ["pytest"]
class TestRunCommand:
"""Test the run command."""
def test_run_command_parsing_basic(self):
"""Test basic run command parsing."""
command, bound, _ = app.parse_args(["run", "server.py"])
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
# Cyclopts only includes non-default values
assert "transport" not in bound.arguments
assert "host" not in bound.arguments
assert "port" not in bound.arguments
assert "path" not in bound.arguments
assert "log_level" not in bound.arguments
assert "no_banner" not in bound.arguments
def test_run_command_parsing_with_options(self):
"""Test run command parsing with various options."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--transport",
"http",
"--host",
"localhost",
"--port",
"8080",
"--path",
"/v1/mcp",
"--log-level",
"DEBUG",
"--no-banner",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["transport"] == "http"
assert bound.arguments["host"] == "localhost"
assert bound.arguments["port"] == 8080
assert bound.arguments["path"] == "/v1/mcp"
assert bound.arguments["log_level"] == "DEBUG"
assert bound.arguments["no_banner"] is True
def test_run_command_parsing_partial_options(self):
"""Test run command parsing with only some options."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--transport",
"http",
"--no-banner",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["transport"] == "http"
assert bound.arguments["no_banner"] is True
# Other options should not be present
assert "host" not in bound.arguments
assert "port" not in bound.arguments
assert "log_level" not in bound.arguments
assert "path" not in bound.arguments
def test_run_command_parsing_with_new_options(self):
"""Test run command parsing with new uv options."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--python",
"3.11",
"--with",
"pandas",
"--with",
"numpy",
"--project",
"/path/to/project",
"--with-requirements",
"requirements.txt",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["python"] == "3.11"
assert bound.arguments["with_packages"] == ["pandas", "numpy"]
assert bound.arguments["project"] == Path("/path/to/project")
assert bound.arguments["with_requirements"] == Path("requirements.txt")
def test_run_command_transport_aliases(self):
"""Test that both 'http' and 'streamable-http' are accepted as valid transport options."""
# Test with 'http' transport
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--transport",
"http",
]
)
assert command is not None
assert bound.arguments["transport"] == "http"
# Test with 'streamable-http' transport
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--transport",
"streamable-http",
]
)
assert command is not None
assert bound.arguments["transport"] == "streamable-http"
def test_run_command_parsing_with_server_args(self):
"""Test run command parsing with server arguments after --."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--",
"--config",
"test.json",
"--debug",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
# Server args after -- are captured as positional arguments in bound.args
assert bound.args == ("server.py", "--config", "test.json", "--debug")
def test_run_command_parsing_with_mixed_args(self):
"""Test run command parsing with both FastMCP options and server args."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--transport",
"http",
"--port",
"8080",
"--",
"--server-port",
"9090",
"--debug",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["transport"] == "http"
assert bound.arguments["port"] == 8080
# Server args after -- are captured separately from FastMCP options
assert bound.args == ("server.py", "--server-port", "9090", "--debug")
def test_run_command_parsing_with_positional_server_args(self):
"""Test run command parsing with positional server arguments."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--",
"arg1",
"arg2",
"--flag",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
# Positional args and flags after -- are all captured
assert bound.args == ("server.py", "arg1", "arg2", "--flag")
def test_run_command_parsing_server_args_require_delimiter(self):
"""Test that server args without -- delimiter are rejected."""
# Should fail because --config is not a recognized FastMCP option
with pytest.raises(SystemExit):
app.parse_args(
[
"run",
"server.py",
"--config",
"test.json",
]
)
def test_run_command_parsing_project_flag(self):
"""Test run command parsing with --project flag."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--project",
"./test-env",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["project"] == Path("./test-env")
def test_run_command_parsing_skip_source_flag(self):
"""Test run command parsing with --skip-source flag."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--skip-source",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["skip_source"] is True
def test_run_command_parsing_project_and_skip_source(self):
"""Test run command parsing with --project and --skip-source flags."""
command, bound, _ = app.parse_args(
[
"run",
"server.py",
"--project",
"./test-env",
"--skip-source",
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
assert bound.arguments["project"] == Path("./test-env")
assert bound.arguments["skip_source"] is True
def test_show_server_banner_setting(self):
"""Test that show_server_banner setting works with environment variable."""
import os
from unittest import mock
from fastmcp.settings import Settings
# Test default (banner shown)
settings = Settings()
assert settings.show_server_banner is True
# Test with env var set to false (banner hidden)
with mock.patch.dict(os.environ, {"FASTMCP_SHOW_SERVER_BANNER": "false"}):
settings = Settings()
assert settings.show_server_banner is False
# Test CLI precedence logic (simulated)
with mock.patch.dict(os.environ, {"FASTMCP_SHOW_SERVER_BANNER": "true"}):
settings = Settings()
# CLI --no-banner flag would override
cli_no_banner = True
final = cli_no_banner if cli_no_banner else not settings.show_server_banner
assert final is True # Banner suppressed by CLI flag
class TestWindowsSpecific:
"""Test Windows-specific functionality."""
@patch("subprocess.run")
def test_get_npx_command_windows_cmd(self, mock_run):
"""Test npx command detection on Windows with npx.cmd."""
from fastmcp.cli.cli import _get_npx_command
with patch("sys.platform", "win32"):
# First call succeeds with npx.cmd
mock_run.return_value = Mock(returncode=0)
result = _get_npx_command()
assert result == "npx.cmd"
mock_run.assert_called_once_with(
["npx.cmd", "--version"],
check=True,
capture_output=True,
)
@patch("subprocess.run")
def test_get_npx_command_windows_exe(self, mock_run):
"""Test npx command detection on Windows with npx.exe."""
from fastmcp.cli.cli import _get_npx_command
with patch("sys.platform", "win32"):
# First call fails, second succeeds
mock_run.side_effect = [
subprocess.CalledProcessError(1, "npx.cmd"),
Mock(returncode=0),
]
result = _get_npx_command()
assert result == "npx.exe"
assert mock_run.call_count == 2
@patch("subprocess.run")
def test_get_npx_command_windows_cmd_missing(self, mock_run):
"""Test npx command detection continues when npx.cmd is missing."""
from fastmcp.cli.cli import _get_npx_command
with patch("sys.platform", "win32"):
# Missing npx.cmd should not abort detection
mock_run.side_effect = [
FileNotFoundError("npx.cmd not found"),
Mock(returncode=0),
]
result = _get_npx_command()
assert result == "npx.exe"
assert mock_run.call_count == 2
@patch("subprocess.run")
def test_get_npx_command_windows_fallback(self, mock_run):
"""Test npx command detection on Windows with plain npx."""
from fastmcp.cli.cli import _get_npx_command
with patch("sys.platform", "win32"):
# First two calls fail, third succeeds
mock_run.side_effect = [
subprocess.CalledProcessError(1, "npx.cmd"),
subprocess.CalledProcessError(1, "npx.exe"),
Mock(returncode=0),
]
result = _get_npx_command()
assert result == "npx"
assert mock_run.call_count == 3
@patch("subprocess.run")
def test_get_npx_command_windows_not_found(self, mock_run):
"""Test npx command detection on Windows when npx is not found."""
from fastmcp.cli.cli import _get_npx_command
with patch("sys.platform", "win32"):
# All calls fail
mock_run.side_effect = subprocess.CalledProcessError(1, "npx")
result = _get_npx_command()
assert result is None
assert mock_run.call_count == 3
@patch("subprocess.run")
def test_get_npx_command_unix(self, mock_run):
"""Test npx command detection on Unix systems."""
from fastmcp.cli.cli import _get_npx_command
with patch("sys.platform", "darwin"):
result = _get_npx_command()
assert result == "npx"
mock_run.assert_not_called()
def test_windows_path_parsing_with_colon(self, tmp_path):
"""Test parsing Windows paths with drive letters and colons."""
from pathlib import Path
from fastmcp.utilities.mcp_server_config.v1.sources.filesystem import (
FileSystemSource,
)
# Create a real test file to test the logic
test_file = tmp_path / "server.py"
test_file.write_text("# test server")
# Test normal file parsing (works on all platforms)
source = FileSystemSource(path=str(test_file))
assert source.entrypoint is None
assert Path(source.path).resolve() == test_file.resolve()
# Test file:object parsing
source = FileSystemSource(path=f"{test_file}:myapp")
assert source.entrypoint == "myapp"
# Test that the file portion resolves correctly when object is specified
assert Path(source.path).resolve() == test_file.resolve()
class TestInspectCommand:
"""Test the inspect command."""
def test_inspect_command_parsing_basic(self):
"""Test basic inspect command parsing."""
command, bound, _ = app.parse_args(["inspect", "server.py"])
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
# Only explicitly set parameters are in bound.arguments
assert "output" not in bound.arguments
def test_inspect_command_parsing_with_output(self, tmp_path):
"""Test inspect command parsing with output file."""
output_file = tmp_path / "output.json"
command, bound, _ = app.parse_args(
[
"inspect",
"server.py",
"--output",
str(output_file),
]
)
assert command is not None
assert bound.arguments["server_spec"] == "server.py"
# Output is parsed as a Path object
assert bound.arguments["output"] == output_file
async def test_inspect_command_text_summary(self, tmp_path, capsys):
"""Test inspect command with no format shows text summary."""
# Create a real server file
server_file = tmp_path / "test_server.py"
server_file.write_text("""
import fastmcp
mcp = fastmcp.FastMCP("InspectTestServer", instructions="Test instructions", version="1.0.0")
@mcp.tool
def test_tool(x: int) -> int:
return x * 2
""")
# Parse and execute the command without format or output
command, bound, _ = app.parse_args(
[
"inspect",
str(server_file),
]
)
await command(**bound.arguments)
# Check the console output
captured = capsys.readouterr()
# Check for the table format output
assert "InspectTestServer" in captured.out
assert "Test instructions" in captured.out
assert "1.0.0" in captured.out
assert "Tools" in captured.out
assert "1" in captured.out # number of tools
assert "FastMCP" in captured.out
assert "MCP" in captured.out
assert "Use --format [fastmcp|mcp] for complete JSON output" in captured.out
async def test_inspect_command_with_real_server(self, tmp_path):
"""Test inspect command with a real server file."""
# Create a real server file
server_file = tmp_path / "test_server.py"
server_file.write_text("""
import fastmcp
mcp = fastmcp.FastMCP("InspectTestServer")
@mcp.tool
def test_tool(x: int) -> int:
return x * 2
@mcp.prompt
def test_prompt(name: str) -> str:
return f"Hello, {name}!"
""")
output_file = tmp_path / "inspect_output.json"
# Parse and execute the command with format and output file
command, bound, _ = app.parse_args(
[
"inspect",
str(server_file),
"--format",
"fastmcp",
"--output",
str(output_file),
]
)
await command(**bound.arguments)
# Verify the output file was created and contains expected content
assert output_file.exists()
content = output_file.read_text()
# Basic checks that the fastmcp format worked
import json
data = json.loads(content)
assert data["server"]["name"] == "InspectTestServer"
assert len(data["tools"]) == 1
assert len(data["prompts"]) == 1
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "tests/cli/test_cli.py",
"license": "Apache License 2.0",
"lines": 535,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
PrefectHQ/fastmcp:src/fastmcp/cli/install/shared.py | """Shared utilities for install commands."""
import json
import os
import subprocess
import sys
from pathlib import Path
from urllib.parse import urlparse
from dotenv import dotenv_values
from pydantic import ValidationError
from rich import print
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.mcp_server_config import MCPServerConfig
from fastmcp.utilities.mcp_server_config.v1.sources.filesystem import FileSystemSource
logger = get_logger(__name__)
def parse_env_var(env_var: str) -> tuple[str, str]:
"""Parse environment variable string in format KEY=VALUE."""
if "=" not in env_var:
print(
f"[red]Invalid environment variable format: '[bold]{env_var}[/bold]'. Must be KEY=VALUE[/red]"
)
sys.exit(1)
key, value = env_var.split("=", 1)
return key.strip(), value.strip()
async def process_common_args(
server_spec: str,
server_name: str | None,
with_packages: list[str] | None,
env_vars: list[str] | None,
env_file: Path | None,
) -> tuple[Path, str | None, str, list[str], dict[str, str] | None]:
"""Process common arguments shared by all install commands.
Handles both fastmcp.json config files and traditional file.py:object syntax.
"""
# Convert None to empty lists for list parameters
with_packages = with_packages or []
env_vars = env_vars or []
# Create MCPServerConfig from server_spec
config = None
config_path: Path | None = None
if server_spec.endswith(".json"):
config_path = Path(server_spec).resolve()
if not config_path.exists():
print(f"[red]Configuration file not found: {config_path}[/red]")
sys.exit(1)
try:
with open(config_path) as f:
data = json.load(f)
# Check if it's an MCPConfig (has mcpServers key)
if "mcpServers" in data:
# MCPConfig files aren't supported for install
print("[red]MCPConfig files are not supported for installation[/red]")
sys.exit(1)
else:
# It's a MCPServerConfig
config = MCPServerConfig.from_file(config_path)
# Merge packages from config if not overridden
if config.environment.dependencies:
# Merge with CLI packages (CLI takes precedence)
config_packages = list(config.environment.dependencies)
with_packages = list(set(with_packages + config_packages))
except (json.JSONDecodeError, ValidationError) as e:
print(f"[red]Invalid configuration file: {e}[/red]")
sys.exit(1)
else:
# Create config from file path
source = FileSystemSource(path=server_spec)
config = MCPServerConfig(source=source)
# Extract file and server_object from the source
# The FileSystemSource handles parsing path:object syntax
source_path = Path(config.source.path).expanduser()
# If loaded from a JSON config, resolve relative paths against the config's directory
if not source_path.is_absolute() and config_path is not None:
file = (config_path.parent / source_path).resolve()
else:
file = source_path.resolve()
# Update the source path so load_server() resolves correctly
config.source.path = str(file)
server_object = (
config.source.entrypoint if hasattr(config.source, "entrypoint") else None
)
logger.debug(
"Installing server",
extra={
"file": str(file),
"server_name": server_name,
"server_object": server_object,
"with_packages": with_packages,
},
)
# Verify the resolved file actually exists
if not file.is_file():
print(f"[red]Server file not found: {file}[/red]")
sys.exit(1)
# Try to import server to get its name and dependencies.
# load_server() resolves paths against cwd, which may differ from our
# config-relative resolution, so we catch SystemExit from its file check.
name = server_name
server = None
if not name:
try:
server = await config.source.load_server()
name = server.name
except (ImportError, ModuleNotFoundError, SystemExit) as e:
logger.debug(
"Could not import server (likely missing dependencies), using file name",
extra={"error": str(e)},
)
name = file.stem
# Process environment variables if provided
env_dict: dict[str, str] | None = None
if env_file or env_vars:
env_dict = {}
# Load from .env file if specified
if env_file:
try:
env_dict |= {
k: v for k, v in dotenv_values(env_file).items() if v is not None
}
except Exception as e:
print(f"[red]Failed to load .env file: {e}[/red]")
sys.exit(1)
# Add command line environment variables
for env_var in env_vars:
key, value = parse_env_var(env_var)
env_dict[key] = value
return file, server_object, name, with_packages, env_dict
def open_deeplink(url: str, *, expected_scheme: str) -> bool:
"""Attempt to open a deeplink URL using the system's default handler.
Args:
url: The deeplink URL to open.
expected_scheme: The URL scheme to validate (e.g. "cursor", "goose").
Returns:
True if the command succeeded, False otherwise.
"""
parsed = urlparse(url)
if parsed.scheme != expected_scheme:
logger.warning(
f"Invalid deeplink scheme: {parsed.scheme}, expected {expected_scheme}"
)
return False
try:
if sys.platform == "darwin":
subprocess.run(["open", url], check=True, capture_output=True)
elif sys.platform == "win32":
os.startfile(url)
else:
subprocess.run(["xdg-open", url], check=True, capture_output=True)
return True
except (subprocess.CalledProcessError, FileNotFoundError, OSError):
return False
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/cli/install/shared.py",
"license": "Apache License 2.0",
"lines": 150,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/cli/install/claude_code.py | """Claude Code integration for FastMCP install using Cyclopts."""
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Annotated
import cyclopts
from rich import print
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from .shared import process_common_args
logger = get_logger(__name__)
def find_claude_command() -> str | None:
"""Find the Claude Code CLI command.
Checks common installation locations since 'claude' is often a shell alias
that doesn't work with subprocess calls.
"""
# First try shutil.which() in case it's a real executable in PATH
claude_in_path = shutil.which("claude")
if claude_in_path:
try:
result = subprocess.run(
[claude_in_path, "--version"],
check=True,
capture_output=True,
text=True,
)
if "Claude Code" in result.stdout:
return claude_in_path
except (subprocess.CalledProcessError, FileNotFoundError):
pass
# Check common installation locations (aliases don't work with subprocess)
potential_paths = [
# Default Claude Code installation location (after migration)
Path.home() / ".claude" / "local" / "claude",
# npm global installation on macOS/Linux (default)
Path("/usr/local/bin/claude"),
# npm global installation with custom prefix
Path.home() / ".npm-global" / "bin" / "claude",
]
for path in potential_paths:
if path.exists():
try:
result = subprocess.run(
[str(path), "--version"],
check=True,
capture_output=True,
text=True,
)
if "Claude Code" in result.stdout:
return str(path)
except (subprocess.CalledProcessError, FileNotFoundError):
continue
return None
def check_claude_code_available() -> bool:
"""Check if Claude Code CLI is available."""
return find_claude_command() is not None
def install_claude_code(
file: Path,
server_object: str | None,
name: str,
*,
with_editable: list[Path] | None = None,
with_packages: list[str] | None = None,
env_vars: dict[str, str] | None = None,
python_version: str | None = None,
with_requirements: Path | None = None,
project: Path | None = None,
) -> bool:
"""Install FastMCP server in Claude Code.
Args:
file: Path to the server file
server_object: Optional server object name (for :object suffix)
name: Name for the server in Claude Code
with_editable: Optional list of directories to install in editable mode
with_packages: Optional list of additional packages to install
env_vars: Optional dictionary of environment variables
python_version: Optional Python version to use
with_requirements: Optional requirements file to install from
project: Optional project directory to run within
Returns:
True if installation was successful, False otherwise
"""
# Check if Claude Code CLI is available
claude_cmd = find_claude_command()
if not claude_cmd:
print(
"[red]Claude Code CLI not found.[/red]\n"
"[blue]Please ensure Claude Code is installed. Try running 'claude --version' to verify.[/blue]"
)
return False
env_config = UVEnvironment(
python=python_version,
dependencies=(with_packages or []) + ["fastmcp"],
requirements=with_requirements,
project=project,
editable=with_editable,
)
# Build server spec from parsed components
if server_object:
server_spec = f"{file.resolve()}:{server_object}"
else:
server_spec = str(file.resolve())
# Build the full command
full_command = env_config.build_command(["fastmcp", "run", server_spec])
# Build claude mcp add command
cmd_parts = [claude_cmd, "mcp", "add", name]
# Add environment variables if specified
if env_vars:
for key, value in env_vars.items():
cmd_parts.extend(["-e", f"{key}={value}"])
# Add server name and command
cmd_parts.append("--")
cmd_parts.extend(full_command)
try:
# Run the claude mcp add command
subprocess.run(cmd_parts, check=True, capture_output=True, text=True)
return True
except subprocess.CalledProcessError as e:
print(
f"[red]Failed to install '[bold]{name}[/bold]' in Claude Code: {e.stderr.strip() if e.stderr else str(e)}[/red]"
)
return False
except Exception as e:
print(f"[red]Failed to install '[bold]{name}[/bold]' in Claude Code: {e}[/red]")
return False
async def claude_code_command(
server_spec: str,
*,
server_name: Annotated[
str | None,
cyclopts.Parameter(
name=["--name", "-n"],
help="Custom name for the server in Claude Code",
),
] = None,
with_editable: Annotated[
list[Path] | None,
cyclopts.Parameter(
"--with-editable",
help="Directory with pyproject.toml to install in editable mode (can be used multiple times)",
),
] = None,
with_packages: Annotated[
list[str] | None,
cyclopts.Parameter(
"--with", help="Additional packages to install (can be used multiple times)"
),
] = None,
env_vars: Annotated[
list[str] | None,
cyclopts.Parameter(
"--env",
help="Environment variables in KEY=VALUE format (can be used multiple times)",
),
] = None,
env_file: Annotated[
Path | None,
cyclopts.Parameter(
"--env-file",
help="Load environment variables from .env file",
),
] = None,
python: Annotated[
str | None,
cyclopts.Parameter(
"--python",
help="Python version to use (e.g., 3.10, 3.11)",
),
] = None,
with_requirements: Annotated[
Path | None,
cyclopts.Parameter(
"--with-requirements",
help="Requirements file to install dependencies from",
),
] = None,
project: Annotated[
Path | None,
cyclopts.Parameter(
"--project",
help="Run the command within the given project directory",
),
] = None,
) -> None:
"""Install an MCP server in Claude Code.
Args:
server_spec: Python file to install, optionally with :object suffix
"""
# Convert None to empty lists for list parameters
with_editable = with_editable or []
with_packages = with_packages or []
env_vars = env_vars or []
file, server_object, name, packages, env_dict = await process_common_args(
server_spec, server_name, with_packages, env_vars, env_file
)
success = install_claude_code(
file=file,
server_object=server_object,
name=name,
with_editable=with_editable,
with_packages=packages,
env_vars=env_dict,
python_version=python,
with_requirements=with_requirements,
project=project,
)
if success:
print(f"[green]Successfully installed '{name}' in Claude Code[/green]")
else:
sys.exit(1)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/cli/install/claude_code.py",
"license": "Apache License 2.0",
"lines": 211,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/cli/install/claude_desktop.py | """Claude Desktop integration for FastMCP install using Cyclopts."""
import os
import sys
from pathlib import Path
from typing import Annotated
import cyclopts
from rich import print
from fastmcp.mcp_config import StdioMCPServer, update_config_file
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from .shared import process_common_args
logger = get_logger(__name__)
def get_claude_config_path(config_path: Path | None = None) -> Path | None:
"""Get the Claude config directory based on platform.
Args:
config_path: Optional custom path to the Claude Desktop config directory
"""
if config_path:
if not config_path.exists():
print(f"[red]The specified config path does not exist: {config_path}[/red]")
return None
return config_path
if sys.platform == "win32":
path = Path(Path.home(), "AppData", "Roaming", "Claude")
elif sys.platform == "darwin":
path = Path(Path.home(), "Library", "Application Support", "Claude")
elif sys.platform.startswith("linux"):
path = Path(
os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config"), "Claude"
)
else:
return None
if path.exists():
return path
return None
def install_claude_desktop(
file: Path,
server_object: str | None,
name: str,
*,
with_editable: list[Path] | None = None,
with_packages: list[str] | None = None,
env_vars: dict[str, str] | None = None,
python_version: str | None = None,
with_requirements: Path | None = None,
project: Path | None = None,
config_path: Path | None = None,
) -> bool:
"""Install FastMCP server in Claude Desktop.
Args:
file: Path to the server file
server_object: Optional server object name (for :object suffix)
name: Name for the server in Claude's config
with_editable: Optional list of directories to install in editable mode
with_packages: Optional list of additional packages to install
env_vars: Optional dictionary of environment variables
python_version: Optional Python version to use
with_requirements: Optional requirements file to install from
project: Optional project directory to run within
config_path: Optional custom path to Claude Desktop config directory
Returns:
True if installation was successful, False otherwise
"""
config_dir = get_claude_config_path(config_path=config_path)
if not config_dir:
if not config_path:
print(
"[red]Claude Desktop config directory not found.[/red]\n"
"[blue]Please ensure Claude Desktop is installed and has been run at least once to initialize its config.[/blue]"
)
return False
config_file = config_dir / "claude_desktop_config.json"
env_config = UVEnvironment(
python=python_version,
dependencies=(with_packages or []) + ["fastmcp"],
requirements=with_requirements,
project=project,
editable=with_editable,
)
# Build server spec from parsed components
if server_object:
server_spec = f"{file.resolve()}:{server_object}"
else:
server_spec = str(file.resolve())
# Build the full command
full_command = env_config.build_command(["fastmcp", "run", server_spec])
# Create server configuration
server_config = StdioMCPServer(
command=full_command[0],
args=full_command[1:],
env=env_vars or {},
)
try:
# Handle environment variable merging manually since we need to preserve existing config
if config_file.exists():
import json
content = config_file.read_text().strip()
if content:
config = json.loads(content)
if "mcpServers" in config and name in config["mcpServers"]:
existing_env = config["mcpServers"][name].get("env", {})
if env_vars:
# New vars take precedence over existing ones
merged_env = {**existing_env, **env_vars}
else:
merged_env = existing_env
server_config.env = merged_env
# Update configuration with correct function signature
update_config_file(config_file, name, server_config)
print(f"[green]Successfully installed '{name}' in Claude Desktop[/green]")
return True
except Exception as e:
print(f"[red]Failed to install server: {e}[/red]")
return False
async def claude_desktop_command(
server_spec: str,
*,
server_name: Annotated[
str | None,
cyclopts.Parameter(
name=["--name", "-n"],
help="Custom name for the server in Claude Desktop's config",
),
] = None,
with_editable: Annotated[
list[Path] | None,
cyclopts.Parameter(
"--with-editable",
help="Directory with pyproject.toml to install in editable mode (can be used multiple times)",
),
] = None,
with_packages: Annotated[
list[str] | None,
cyclopts.Parameter(
"--with", help="Additional packages to install (can be used multiple times)"
),
] = None,
env_vars: Annotated[
list[str] | None,
cyclopts.Parameter(
"--env",
help="Environment variables in KEY=VALUE format (can be used multiple times)",
),
] = None,
env_file: Annotated[
Path | None,
cyclopts.Parameter(
"--env-file",
help="Load environment variables from .env file",
),
] = None,
python: Annotated[
str | None,
cyclopts.Parameter(
"--python",
help="Python version to use (e.g., 3.10, 3.11)",
),
] = None,
with_requirements: Annotated[
Path | None,
cyclopts.Parameter(
"--with-requirements",
help="Requirements file to install dependencies from",
),
] = None,
project: Annotated[
Path | None,
cyclopts.Parameter(
"--project",
help="Run the command within the given project directory",
),
] = None,
config_path: Annotated[
Path | None,
cyclopts.Parameter(
"--config-path",
help="Custom path to Claude Desktop config directory",
),
] = None,
) -> None:
"""Install an MCP server in Claude Desktop.
Args:
server_spec: Python file to install, optionally with :object suffix
"""
# Convert None to empty lists for list parameters
with_editable = with_editable or []
with_packages = with_packages or []
env_vars = env_vars or []
file, server_object, name, with_packages, env_dict = await process_common_args(
server_spec, server_name, with_packages, env_vars, env_file
)
success = install_claude_desktop(
file=file,
server_object=server_object,
name=name,
with_editable=with_editable,
with_packages=with_packages,
env_vars=env_dict,
python_version=python,
with_requirements=with_requirements,
project=project,
config_path=config_path,
)
if not success:
sys.exit(1)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/cli/install/claude_desktop.py",
"license": "Apache License 2.0",
"lines": 205,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
PrefectHQ/fastmcp:src/fastmcp/cli/install/cursor.py | """Cursor integration for FastMCP install using Cyclopts."""
import base64
import sys
from pathlib import Path
from typing import Annotated
from urllib.parse import quote
import cyclopts
from rich import print
from fastmcp.mcp_config import StdioMCPServer, update_config_file
from fastmcp.utilities.logging import get_logger
from fastmcp.utilities.mcp_server_config.v1.environments.uv import UVEnvironment
from .shared import open_deeplink as _shared_open_deeplink
from .shared import process_common_args
logger = get_logger(__name__)
def generate_cursor_deeplink(
server_name: str,
server_config: StdioMCPServer,
) -> str:
"""Generate a Cursor deeplink for installing the MCP server.
Args:
server_name: Name of the server
server_config: Server configuration
Returns:
Deeplink URL that can be clicked to install the server
"""
# Create the configuration structure expected by Cursor
# Base64 encode the configuration (URL-safe for query parameter)
config_json = server_config.model_dump_json(exclude_none=True)
config_b64 = base64.urlsafe_b64encode(config_json.encode()).decode()
# Generate the deeplink URL with properly encoded server name
encoded_name = quote(server_name, safe="")
deeplink = f"cursor://anysphere.cursor-deeplink/mcp/install?name={encoded_name}&config={config_b64}"
return deeplink
def open_deeplink(deeplink: str) -> bool:
"""Attempt to open a Cursor deeplink URL using the system's default handler.
Args:
deeplink: The deeplink URL to open
Returns:
True if the command succeeded, False otherwise
"""
return _shared_open_deeplink(deeplink, expected_scheme="cursor")
def install_cursor_workspace(
file: Path,
server_object: str | None,
name: str,
workspace_path: Path,
*,
with_editable: list[Path] | None = None,
with_packages: list[str] | None = None,
env_vars: dict[str, str] | None = None,
python_version: str | None = None,
with_requirements: Path | None = None,
project: Path | None = None,
) -> bool:
"""Install FastMCP server to workspace-specific Cursor configuration.
Args:
file: Path to the server file
server_object: Optional server object name (for :object suffix)
name: Name for the server in Cursor
workspace_path: Path to the workspace directory
with_editable: Optional list of directories to install in editable mode
with_packages: Optional list of additional packages to install
env_vars: Optional dictionary of environment variables
python_version: Optional Python version to use
with_requirements: Optional requirements file to install from
project: Optional project directory to run within
Returns:
True if installation was successful, False otherwise
"""
# Ensure workspace path is absolute and exists
workspace_path = workspace_path.resolve()
if not workspace_path.exists():
print(f"[red]Workspace directory does not exist: {workspace_path}[/red]")
return False
if not workspace_path.is_dir():
print(f"[red]Workspace path is not a directory: {workspace_path}[/red]")
return False
# Create .cursor directory in workspace
cursor_dir = workspace_path / ".cursor"
cursor_dir.mkdir(exist_ok=True)
config_file = cursor_dir / "mcp.json"
env_config = UVEnvironment(
python=python_version,
dependencies=(with_packages or []) + ["fastmcp"],
requirements=with_requirements,
project=project,
editable=with_editable,
)
# Build server spec from parsed components
if server_object:
server_spec = f"{file.resolve()}:{server_object}"
else:
server_spec = str(file.resolve())
# Build the full command
full_command = env_config.build_command(["fastmcp", "run", server_spec])
# Create server configuration
server_config = StdioMCPServer(
command=full_command[0],
args=full_command[1:],
env=env_vars or {},
)
try:
# Create the config file if it doesn't exist
if not config_file.exists():
config_file.write_text('{"mcpServers": {}}')
# Update configuration with the new server
update_config_file(config_file, name, server_config)
print(
f"[green]Successfully installed '{name}' to workspace at {workspace_path}[/green]"
)
return True
except Exception as e:
print(f"[red]Failed to install server to workspace: {e}[/red]")
return False
def install_cursor(
file: Path,
server_object: str | None,
name: str,
*,
with_editable: list[Path] | None = None,
with_packages: list[str] | None = None,
env_vars: dict[str, str] | None = None,
python_version: str | None = None,
with_requirements: Path | None = None,
project: Path | None = None,
workspace: Path | None = None,
) -> bool:
"""Install FastMCP server in Cursor.
Args:
file: Path to the server file
server_object: Optional server object name (for :object suffix)
name: Name for the server in Cursor
with_editable: Optional list of directories to install in editable mode
with_packages: Optional list of additional packages to install
env_vars: Optional dictionary of environment variables
python_version: Optional Python version to use
with_requirements: Optional requirements file to install from
project: Optional project directory to run within
workspace: Optional workspace directory for project-specific installation
Returns:
True if installation was successful, False otherwise
"""
env_config = UVEnvironment(
python=python_version,
dependencies=(with_packages or []) + ["fastmcp"],
requirements=with_requirements,
project=project,
editable=with_editable,
)
# Build server spec from parsed components
if server_object:
server_spec = f"{file.resolve()}:{server_object}"
else:
server_spec = str(file.resolve())
# Build the full command
full_command = env_config.build_command(["fastmcp", "run", server_spec])
# If workspace is specified, install to workspace-specific config
if workspace:
return install_cursor_workspace(
file=file,
server_object=server_object,
name=name,
workspace_path=workspace,
with_editable=with_editable,
with_packages=with_packages,
env_vars=env_vars,
python_version=python_version,
with_requirements=with_requirements,
project=project,
)
# Create server configuration
server_config = StdioMCPServer(
command=full_command[0],
args=full_command[1:],
env=env_vars or {},
)
# Generate deeplink
deeplink = generate_cursor_deeplink(name, server_config)
print(f"[blue]Opening Cursor to install '{name}'[/blue]")
if open_deeplink(deeplink):
print("[green]Cursor should now open with the installation dialog[/green]")
return True
else:
print(
"[red]Could not open Cursor automatically.[/red]\n"
f"[blue]Please copy this link and open it in Cursor: {deeplink}[/blue]"
)
return False
async def cursor_command(
server_spec: str,
*,
server_name: Annotated[
str | None,
cyclopts.Parameter(
name=["--name", "-n"],
help="Custom name for the server in Cursor",
),
] = None,
with_editable: Annotated[
list[Path] | None,
cyclopts.Parameter(
"--with-editable",
help="Directory with pyproject.toml to install in editable mode (can be used multiple times)",
),
] = None,
with_packages: Annotated[
list[str] | None,
cyclopts.Parameter(
"--with", help="Additional packages to install (can be used multiple times)"
),
] = None,
env_vars: Annotated[
list[str] | None,
cyclopts.Parameter(
"--env",
help="Environment variables in KEY=VALUE format (can be used multiple times)",
),
] = None,
env_file: Annotated[
Path | None,
cyclopts.Parameter(
"--env-file",
help="Load environment variables from .env file",
),
] = None,
python: Annotated[
str | None,
cyclopts.Parameter(
"--python",
help="Python version to use (e.g., 3.10, 3.11)",
),
] = None,
with_requirements: Annotated[
Path | None,
cyclopts.Parameter(
"--with-requirements",
help="Requirements file to install dependencies from",
),
] = None,
project: Annotated[
Path | None,
cyclopts.Parameter(
"--project",
help="Run the command within the given project directory",
),
] = None,
workspace: Annotated[
Path | None,
cyclopts.Parameter(
"--workspace",
help="Install to workspace directory (will create .cursor/ inside it) instead of using deeplink",
),
] = None,
) -> None:
"""Install an MCP server in Cursor.
Args:
server_spec: Python file to install, optionally with :object suffix
"""
# Convert None to empty lists for list parameters
with_editable = with_editable or []
with_packages = with_packages or []
env_vars = env_vars or []
file, server_object, name, with_packages, env_dict = await process_common_args(
server_spec, server_name, with_packages, env_vars, env_file
)
success = install_cursor(
file=file,
server_object=server_object,
name=name,
with_editable=with_editable,
with_packages=with_packages,
env_vars=env_dict,
python_version=python,
with_requirements=with_requirements,
project=project,
workspace=workspace,
)
if not success:
sys.exit(1)
| {
"repo_id": "PrefectHQ/fastmcp",
"file_path": "src/fastmcp/cli/install/cursor.py",
"license": "Apache License 2.0",
"lines": 279,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.