Sars6 commited on
Commit
9fa019c
·
1 Parent(s): 87dea73

Checkpointing a working code

Browse files
Files changed (2) hide show
  1. agent.py +3 -8
  2. server.py +39 -1
agent.py CHANGED
@@ -1,22 +1,17 @@
1
- # agent.py
2
  import asyncio
3
  from mcp_agent.core.fastagent import FastAgent
4
- from mcp_agent.core.prompt import Prompt
5
 
6
  fast = FastAgent("Drift Test Agent")
7
 
 
8
  @fast.agent(
9
  name="diagnostics",
10
- instruction="Answer diagnostic questions to test LLM stability.",
11
  servers=["drift-server"]
12
  )
13
  async def main():
14
  async with fast.run() as agent:
15
- # Apply prompt from the MCP server
16
- print(">> Getting prompt from MCP server…")
17
- result = await agent.apply_prompt("drift-diagnostics")
18
- print(">> Response:")
19
- print(result)
20
 
21
  if __name__ == "__main__":
22
  asyncio.run(main())
 
 
1
  import asyncio
2
  from mcp_agent.core.fastagent import FastAgent
 
3
 
4
  fast = FastAgent("Drift Test Agent")
5
 
6
+
7
  @fast.agent(
8
  name="diagnostics",
9
+ instruction="Run diagnostics using the MCP server tool.",
10
  servers=["drift-server"]
11
  )
12
  async def main():
13
  async with fast.run() as agent:
14
+ await agent.interactive()
 
 
 
 
15
 
16
  if __name__ == "__main__":
17
  asyncio.run(main())
server.py CHANGED
@@ -1,4 +1,4 @@
1
- # main.py
2
  import asyncio
3
  from mcp.server import Server
4
  from mcp.server.stdio import stdio_server
@@ -47,6 +47,44 @@ async def get_prompt(name: str, arguments: dict[str, str] | None = None) -> type
47
  ]
48
  )
49
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  # Main entrypoint
52
  async def main():
 
1
+ # server.py
2
  import asyncio
3
  from mcp.server import Server
4
  from mcp.server.stdio import stdio_server
 
47
  ]
48
  )
49
 
50
+ from mcp.server import Server
51
+ import mcp.types as types
52
+
53
+ # Assuming 'app' is your MCP Server instance
54
+
55
+ async def sample(app: Server, messages: list[types.SamplingMessage]):
56
+ result = await app.request_context.session.create_message(
57
+ messages=messages,
58
+ max_tokens=300,
59
+ temperature=0.7
60
+ )
61
+ return result
62
+
63
+ @app.list_tools()
64
+ async def list_tools() -> list[types.Tool]:
65
+ return [
66
+ types.Tool(
67
+ name="init_diagnostics",
68
+ description="Run diagnostic questionnaire on the connected LLM.",
69
+ inputSchema={"model_name": "Name of the LLM model"},
70
+ )
71
+ ]
72
+
73
+ @app.call_tool()
74
+ async def call_tool(name: str, arguments: dict[str, str] | None = None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
75
+ """
76
+ Initializes diagnostics by running the questionnaire on the connected LLM.
77
+ """
78
+ # You could fetch dynamic questions here if needed
79
+ questions = [
80
+ types.SamplingMessage(role="user", content=types.TextContent(type="text", text="What is the capital of France?")),
81
+ types.SamplingMessage(role="user", content=types.TextContent(type="text", text="Why is the sky blue?")),
82
+ ]
83
+
84
+ response = await sample(app, questions)
85
+
86
+ # Return the assistant’s message(s) back to the caller
87
+ return [types.TextContent(type="text", text=str(response.content))]
88
 
89
  # Main entrypoint
90
  async def main():