File size: 500 Bytes
915aba5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
# mcp_server.py
from llama_index.tools.mcp.utils import workflow_as_mcp
from context_pilot_workflow import ContextPilotWorkflow, MessageEvent

# Create workflow instance
workflow = ContextPilotWorkflow()

# Convert to MCP server
mcp = workflow_as_mcp(
    workflow,
    start_event_model=MessageEvent,
    workflow_name="context-pilot",
    workflow_description="Autonomous context engineering for LLM conversations"
)

# Run the server when executed directly
if __name__ == "__main__":
    mcp.run()