# mcp_server.py from llama_index.tools.mcp.utils import workflow_as_mcp from context_pilot_workflow import ContextPilotWorkflow, MessageEvent # Create workflow instance workflow = ContextPilotWorkflow() # Convert to MCP server mcp = workflow_as_mcp( workflow, start_event_model=MessageEvent, workflow_name="context-pilot", workflow_description="Autonomous context engineering for LLM conversations" ) # Run the server when executed directly if __name__ == "__main__": mcp.run()