Selcan Yukcu commited on
Commit ·
80cfb11
0
Parent(s):
first commit
Browse files- .idea/.gitignore +8 -0
- .idea/inspectionProfiles/Project_Default.xml +38 -0
- .idea/inspectionProfiles/profiles_settings.xml +6 -0
- .idea/misc.xml +7 -0
- .idea/modules.xml +8 -0
- .idea/query_mcp_server.iml +8 -0
- .idea/vcs.xml +6 -0
- postgre_mcp_client.py +125 -0
- postgre_mcp_server.py +440 -0
.idea/.gitignore
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Default ignored files
|
| 2 |
+
/shelf/
|
| 3 |
+
/workspace.xml
|
| 4 |
+
# Editor-based HTTP Client requests
|
| 5 |
+
/httpRequests/
|
| 6 |
+
# Datasource local storage ignored files
|
| 7 |
+
/dataSources/
|
| 8 |
+
/dataSources.local.xml
|
.idea/inspectionProfiles/Project_Default.xml
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<component name="InspectionProjectProfileManager">
|
| 2 |
+
<profile version="1.0">
|
| 3 |
+
<option name="myName" value="Project Default" />
|
| 4 |
+
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
| 5 |
+
<option name="ignoredPackages">
|
| 6 |
+
<value>
|
| 7 |
+
<list size="25">
|
| 8 |
+
<item index="0" class="java.lang.String" itemvalue="motor" />
|
| 9 |
+
<item index="1" class="java.lang.String" itemvalue="PyJWT" />
|
| 10 |
+
<item index="2" class="java.lang.String" itemvalue="joblib" />
|
| 11 |
+
<item index="3" class="java.lang.String" itemvalue="scikit-learn" />
|
| 12 |
+
<item index="4" class="java.lang.String" itemvalue="python-dotenv" />
|
| 13 |
+
<item index="5" class="java.lang.String" itemvalue="Jinja2" />
|
| 14 |
+
<item index="6" class="java.lang.String" itemvalue="fastapi_jwt" />
|
| 15 |
+
<item index="7" class="java.lang.String" itemvalue="pydantic_mongo" />
|
| 16 |
+
<item index="8" class="java.lang.String" itemvalue="starlette" />
|
| 17 |
+
<item index="9" class="java.lang.String" itemvalue="uvicorn" />
|
| 18 |
+
<item index="10" class="java.lang.String" itemvalue="Markdown" />
|
| 19 |
+
<item index="11" class="java.lang.String" itemvalue="passlib" />
|
| 20 |
+
<item index="12" class="java.lang.String" itemvalue="pydantic-settings" />
|
| 21 |
+
<item index="13" class="java.lang.String" itemvalue="pydantic" />
|
| 22 |
+
<item index="14" class="java.lang.String" itemvalue="transformers" />
|
| 23 |
+
<item index="15" class="java.lang.String" itemvalue="triton" />
|
| 24 |
+
<item index="16" class="java.lang.String" itemvalue="pymongo" />
|
| 25 |
+
<item index="17" class="java.lang.String" itemvalue="langchain_core" />
|
| 26 |
+
<item index="18" class="java.lang.String" itemvalue="python-multipart" />
|
| 27 |
+
<item index="19" class="java.lang.String" itemvalue="bcrypt" />
|
| 28 |
+
<item index="20" class="java.lang.String" itemvalue="fastapi" />
|
| 29 |
+
<item index="21" class="java.lang.String" itemvalue="sentence_transformers" />
|
| 30 |
+
<item index="22" class="java.lang.String" itemvalue="contextvars" />
|
| 31 |
+
<item index="23" class="java.lang.String" itemvalue="beanie" />
|
| 32 |
+
<item index="24" class="java.lang.String" itemvalue="openpyxl" />
|
| 33 |
+
</list>
|
| 34 |
+
</value>
|
| 35 |
+
</option>
|
| 36 |
+
</inspection_tool>
|
| 37 |
+
</profile>
|
| 38 |
+
</component>
|
.idea/inspectionProfiles/profiles_settings.xml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<component name="InspectionProjectProfileManager">
|
| 2 |
+
<settings>
|
| 3 |
+
<option name="USE_PROJECT_PROFILE" value="false" />
|
| 4 |
+
<version value="1.0" />
|
| 5 |
+
</settings>
|
| 6 |
+
</component>
|
.idea/misc.xml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="Black">
|
| 4 |
+
<option name="sdkName" value="pyfapi_update" />
|
| 5 |
+
</component>
|
| 6 |
+
<component name="ProjectRootManager" version="2" project-jdk-name="pyfapi_update" project-jdk-type="Python SDK" />
|
| 7 |
+
</project>
|
.idea/modules.xml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="ProjectModuleManager">
|
| 4 |
+
<modules>
|
| 5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/query_mcp_server.iml" filepath="$PROJECT_DIR$/.idea/query_mcp_server.iml" />
|
| 6 |
+
</modules>
|
| 7 |
+
</component>
|
| 8 |
+
</project>
|
.idea/query_mcp_server.iml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<module type="PYTHON_MODULE" version="4">
|
| 3 |
+
<component name="NewModuleRootManager">
|
| 4 |
+
<content url="file://$MODULE_DIR$" />
|
| 5 |
+
<orderEntry type="inheritedJdk" />
|
| 6 |
+
<orderEntry type="sourceFolder" forTests="false" />
|
| 7 |
+
</component>
|
| 8 |
+
</module>
|
.idea/vcs.xml
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="VcsDirectoryMappings">
|
| 4 |
+
<mapping directory="" vcs="Git" />
|
| 5 |
+
</component>
|
| 6 |
+
</project>
|
postgre_mcp_client.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from mcp import ClientSession, StdioServerParameters
|
| 3 |
+
from mcp.client.stdio import stdio_client
|
| 4 |
+
|
| 5 |
+
from langchain_mcp_adapters.tools import load_mcp_tools
|
| 6 |
+
from langgraph.prebuilt import create_react_agent
|
| 7 |
+
from langchain.chat_models import init_chat_model
|
| 8 |
+
from pprint import pprint
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
llm = init_chat_model(model="gemini-2.0-flash-lite", model_provider="google_genai",api_key ="AIzaSyAuxYmci0DVU5l5L_YcxLlxHzR5MLn70js")
|
| 12 |
+
|
| 13 |
+
server_params = StdioServerParameters(
|
| 14 |
+
command="python",
|
| 15 |
+
# buraya full path konulmalı
|
| 16 |
+
args=[r"C:\Users\yukcus\Desktop\MCPTest\postgre_mcp_server.py"],
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
table_summary = """
|
| 20 |
+
The users table stores information about the individuals who use the application. Each user is assigned a unique, auto-incrementing id that serves as the primary key. The username field holds the user's chosen display name and cannot be null, while the email field stores the user’s unique email address, also required and constrained to be unique to avoid duplicates. To track when a user was added to the system, the created_at column records the timestamp of their creation, with a default value set to the current time.
|
| 21 |
+
|
| 22 |
+
The posts table represents content created by users, such as blog posts or messages. Like the users table, each entry has a unique, auto-incrementing id as the primary key. The user_id field links each post to its author by referencing the id field in the users table, establishing a one-to-many relationship between users and posts. The title column holds a brief summary or headline of the post, while the content field contains the full text. A created_at timestamp is also included to record when each post was created, with a default value of the current time.
|
| 23 |
+
"""
|
| 24 |
+
prompt_temp = """You are an expert in generating SQL queries who has access to a PostgreSQL database through FastMCP tools. These tools allow you to perform operations such as listing tables, retrieving schema information, and executing SQL queries. You do not have prior memory, so if information is needed (e.g., table or column names), use the tools to retrieve it.
|
| 25 |
+
Your job is to carefully analyze the user’s request and:
|
| 26 |
+
1. Select the most appropriate tool(s) based on their descriptions.
|
| 27 |
+
2. If necessary, generate valid and safe SQL queries (only use SELECT queries). If you generate a SQL query, exacute it.
|
| 28 |
+
3. Chain tools when required — for example, list tables → get schema → create query.
|
| 29 |
+
4. Always aim to fulfill the user’s intent while being cautious and accurate.
|
| 30 |
+
|
| 31 |
+
You must:
|
| 32 |
+
- Never use destructive SQL operations such as DELETE, DROP, or UPDATE.
|
| 33 |
+
- Always ensure the SQL is correct before using the query tool.
|
| 34 |
+
- Use detailed reasoning to choose which tools to invoke.
|
| 35 |
+
|
| 36 |
+
Only return the final answer to the user’s request. Do not expose internal tool usage or SQL unless the user explicitly asks.
|
| 37 |
+
Here is the user request:
|
| 38 |
+
can you show me the join of all tables?"""
|
| 39 |
+
|
| 40 |
+
def parse_mcp_output(output_dict):
|
| 41 |
+
result = []
|
| 42 |
+
messages = output_dict.get("messages", [])
|
| 43 |
+
|
| 44 |
+
for msg in messages:
|
| 45 |
+
role_name = msg.__class__.__name__ # Example: HumanMessage, AIMessage, ToolMessage
|
| 46 |
+
content = getattr(msg, "content", "")
|
| 47 |
+
|
| 48 |
+
# AIMessage with tool call
|
| 49 |
+
if role_name == "AIMessage":
|
| 50 |
+
function_call = getattr(msg, "additional_kwargs", {}).get("function_call")
|
| 51 |
+
if function_call:
|
| 52 |
+
tool_name = function_call.get("name")
|
| 53 |
+
arguments = function_call.get("arguments")
|
| 54 |
+
|
| 55 |
+
# Check if arguments is a JSON string or a dict
|
| 56 |
+
if isinstance(arguments, str):
|
| 57 |
+
import json
|
| 58 |
+
try:
|
| 59 |
+
arguments_dict = json.loads(arguments)
|
| 60 |
+
except json.JSONDecodeError:
|
| 61 |
+
arguments_dict = {}
|
| 62 |
+
else:
|
| 63 |
+
arguments_dict = arguments or {}
|
| 64 |
+
|
| 65 |
+
# Check for presence of "query" key
|
| 66 |
+
if "query" in arguments_dict:
|
| 67 |
+
print("query detected!!!")
|
| 68 |
+
print(f"ai said:{content}")
|
| 69 |
+
|
| 70 |
+
result.append({
|
| 71 |
+
"type": "ai_function_call",
|
| 72 |
+
"query": arguments_dict["query"]
|
| 73 |
+
})
|
| 74 |
+
else:
|
| 75 |
+
print(f"ai said:{content}")
|
| 76 |
+
|
| 77 |
+
else:
|
| 78 |
+
print(f"ai final answer:{content}")
|
| 79 |
+
result.append({
|
| 80 |
+
"type": "ai_final_answer",
|
| 81 |
+
"ai_said": content
|
| 82 |
+
})
|
| 83 |
+
|
| 84 |
+
# ToolMessage
|
| 85 |
+
elif role_name == "ToolMessage":
|
| 86 |
+
tool_name = getattr(msg, "name", None)
|
| 87 |
+
print(f"tool response:{content}")
|
| 88 |
+
result.append({
|
| 89 |
+
"type": "tool_response",
|
| 90 |
+
"tool": tool_name,
|
| 91 |
+
"response": content
|
| 92 |
+
})
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
return result
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
async def main():
|
| 101 |
+
async with stdio_client(server_params) as (read, write):
|
| 102 |
+
async with ClientSession(read, write) as session:
|
| 103 |
+
# Initialize the connection
|
| 104 |
+
await session.initialize()
|
| 105 |
+
|
| 106 |
+
# Get tools
|
| 107 |
+
tools = await load_mcp_tools(session)
|
| 108 |
+
for tool in tools:
|
| 109 |
+
tool.description += f" {table_summary}"
|
| 110 |
+
|
| 111 |
+
# Create and run the agent
|
| 112 |
+
agent = create_react_agent(llm, tools)
|
| 113 |
+
agent_response = await agent.ainvoke({"messages": prompt_temp})
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
parsed_steps = parse_mcp_output(agent_response)
|
| 117 |
+
#pprint(parsed_steps)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
asyncio.run(main())
|
| 124 |
+
|
| 125 |
+
# open up a new cmd shell and run awith python mcp_client.py
|
postgre_mcp_server.py
ADDED
|
@@ -0,0 +1,440 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from contextlib import asynccontextmanager
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Optional, AsyncIterator
|
| 5 |
+
|
| 6 |
+
import asyncpg
|
| 7 |
+
from mcp.server.fastmcp import FastMCP, Context
|
| 8 |
+
from pydantic import Field
|
| 9 |
+
|
| 10 |
+
# Constants
|
| 11 |
+
DEFAULT_QUERY_LIMIT = 100
|
| 12 |
+
DEFAULT_SCHEMA = "public"
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# Define our own PromptMessage class if the MCP one isn't available
|
| 16 |
+
@dataclass
|
| 17 |
+
class PromptMessage:
|
| 18 |
+
content: str
|
| 19 |
+
role: Optional[str] = "user"
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# Database context class
|
| 23 |
+
@dataclass
|
| 24 |
+
class DbContext:
|
| 25 |
+
pool: asyncpg.Pool
|
| 26 |
+
schema: str
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# Database connection lifecycle manager
|
| 30 |
+
@asynccontextmanager
|
| 31 |
+
async def db_lifespan(server: FastMCP) -> AsyncIterator[DbContext]:
|
| 32 |
+
"""Manage database connection lifecycle"""
|
| 33 |
+
# Initialize DB connection from environment variables
|
| 34 |
+
dsn = os.environ.get("DSN", "postgresql://postgres:postgres@localhost:5432/postgres")
|
| 35 |
+
schema = os.environ.get("SCHEMA", DEFAULT_SCHEMA)
|
| 36 |
+
pool = await asyncpg.create_pool(dsn)
|
| 37 |
+
try:
|
| 38 |
+
yield DbContext(pool=pool, schema=schema)
|
| 39 |
+
finally:
|
| 40 |
+
# Clean up
|
| 41 |
+
await pool.close()
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# Create server with database lifecycle management
|
| 45 |
+
mcp = FastMCP(
|
| 46 |
+
"SQL Database Server",
|
| 47 |
+
dependencies=["asyncpg", "pydantic"],
|
| 48 |
+
lifespan=db_lifespan
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
@mcp.tool(description="tests the database connection and returns the PostgreSQL version or an error message.")
|
| 53 |
+
async def test_connection(ctx: Context) -> str:
|
| 54 |
+
"""Test database connection"""
|
| 55 |
+
try:
|
| 56 |
+
pool = ctx.request_context.lifespan_context.pool
|
| 57 |
+
async with pool.acquire() as conn:
|
| 58 |
+
version = await conn.fetchval("SELECT version();")
|
| 59 |
+
return f"Connection successful. PostgreSQL version: {version}"
|
| 60 |
+
except Exception as e:
|
| 61 |
+
return f"Connection failed: {str(e)}"
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
@mcp.tool(description="Executes a read-only SQL SELECT query and returns formatted results or an error message.")
|
| 65 |
+
async def execute_query(
|
| 66 |
+
query: str = Field(description="SQL query to execute (SELECT only)"),
|
| 67 |
+
limit: Optional[int] = Field(default=DEFAULT_QUERY_LIMIT, description="Maximum number of rows to return"),
|
| 68 |
+
ctx: Context = None
|
| 69 |
+
) -> str:
|
| 70 |
+
"""Execute a read-only SQL query against the database"""
|
| 71 |
+
# Validate query - simple check for read-only
|
| 72 |
+
query = query.strip()
|
| 73 |
+
if not query.lower().startswith("select"):
|
| 74 |
+
return "Error: Only SELECT queries are allowed for security reasons."
|
| 75 |
+
|
| 76 |
+
try:
|
| 77 |
+
pool = ctx.request_context.lifespan_context.pool
|
| 78 |
+
async with pool.acquire() as conn:
|
| 79 |
+
result = await conn.fetch(query)
|
| 80 |
+
|
| 81 |
+
if not result:
|
| 82 |
+
return "Query executed successfully. No rows returned."
|
| 83 |
+
|
| 84 |
+
# Format results
|
| 85 |
+
columns = [k for k in result[0].keys()]
|
| 86 |
+
header = " | ".join(columns)
|
| 87 |
+
separator = "-" * len(header)
|
| 88 |
+
|
| 89 |
+
# Format rows with limit
|
| 90 |
+
rows = [" | ".join(str(val) for val in row.values())
|
| 91 |
+
for row in result[:limit if limit else DEFAULT_QUERY_LIMIT]]
|
| 92 |
+
|
| 93 |
+
return f"{header}\n{separator}\n" + "\n".join(rows)
|
| 94 |
+
except asyncpg.exceptions.PostgresError as e:
|
| 95 |
+
return f"SQL Error: {str(e)}"
|
| 96 |
+
except Exception as e:
|
| 97 |
+
return f"Error: {str(e)}"
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
# Database helper functions
|
| 101 |
+
async def get_all_tables(pool, schema):
|
| 102 |
+
"""Get all tables from the database"""
|
| 103 |
+
async with pool.acquire() as conn:
|
| 104 |
+
result = await conn.fetch("""
|
| 105 |
+
SELECT c.relname AS table_name
|
| 106 |
+
FROM pg_class AS c
|
| 107 |
+
JOIN pg_namespace AS n ON n.oid = c.relnamespace
|
| 108 |
+
WHERE NOT EXISTS (
|
| 109 |
+
SELECT 1
|
| 110 |
+
FROM pg_inherits AS i
|
| 111 |
+
WHERE i.inhrelid = c.oid
|
| 112 |
+
)
|
| 113 |
+
AND c.relkind IN ('r', 'p')
|
| 114 |
+
AND n.nspname = $1
|
| 115 |
+
AND c.relname NOT LIKE 'pg_%'
|
| 116 |
+
ORDER BY c.relname;
|
| 117 |
+
""", schema)
|
| 118 |
+
|
| 119 |
+
return result
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
async def get_table_schema_info(pool, schema, table_name):
|
| 123 |
+
"""Get schema information for a specific table"""
|
| 124 |
+
async with pool.acquire() as conn:
|
| 125 |
+
columns = await conn.fetch("""
|
| 126 |
+
SELECT
|
| 127 |
+
column_name,
|
| 128 |
+
data_type,
|
| 129 |
+
is_nullable,
|
| 130 |
+
column_default,
|
| 131 |
+
character_maximum_length
|
| 132 |
+
FROM information_schema.columns
|
| 133 |
+
WHERE table_schema = $1
|
| 134 |
+
AND table_name = $2
|
| 135 |
+
ORDER BY ordinal_position;
|
| 136 |
+
""", schema, table_name)
|
| 137 |
+
|
| 138 |
+
return columns
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def format_table_schema(table_name, columns):
|
| 142 |
+
"""Format table schema into readable text"""
|
| 143 |
+
if not columns:
|
| 144 |
+
return f"Table '{table_name}' not found."
|
| 145 |
+
|
| 146 |
+
result = [f"Table: {table_name}", "Columns:"]
|
| 147 |
+
for col in columns:
|
| 148 |
+
nullable = "NULL" if col['is_nullable'] == 'YES' else "NOT NULL"
|
| 149 |
+
length = f"({col['character_maximum_length']})" if col['character_maximum_length'] else ""
|
| 150 |
+
default = f" DEFAULT {col['column_default']}" if col['column_default'] else ""
|
| 151 |
+
result.append(f"- {col['column_name']} ({col['data_type']}{length}) {nullable}{default}")
|
| 152 |
+
|
| 153 |
+
return "\n".join(result)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
@mcp.tool(description="Lists all table names in the configured database schema.")
|
| 157 |
+
async def list_tables() -> str:
|
| 158 |
+
"""List all tables in the database"""
|
| 159 |
+
try:
|
| 160 |
+
async with db_lifespan(mcp) as db_ctx:
|
| 161 |
+
result = await get_all_tables(db_ctx.pool, db_ctx.schema)
|
| 162 |
+
|
| 163 |
+
if not result:
|
| 164 |
+
return f"No tables found in the {db_ctx.schema} schema."
|
| 165 |
+
|
| 166 |
+
return "\n".join(row['table_name'] for row in result)
|
| 167 |
+
except asyncpg.exceptions.PostgresError as e:
|
| 168 |
+
return f"SQL Error: {str(e)}"
|
| 169 |
+
except Exception as e:
|
| 170 |
+
return f"Error: {str(e)}"
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
@mcp.tool(description="Retrieves and formats the schema details of a specific table in the database.")
|
| 174 |
+
async def get_table_schema(table_name: str) -> str:
|
| 175 |
+
"""Get schema information for a specific table"""
|
| 176 |
+
try:
|
| 177 |
+
schema = os.environ.get("SCHEMA", DEFAULT_SCHEMA)
|
| 178 |
+
|
| 179 |
+
async with db_lifespan(mcp) as db_ctx:
|
| 180 |
+
columns = await get_table_schema_info(db_ctx.pool, schema, table_name)
|
| 181 |
+
|
| 182 |
+
if not columns:
|
| 183 |
+
return f"Table '{table_name}' not found in {schema} schema."
|
| 184 |
+
|
| 185 |
+
return format_table_schema(table_name, columns)
|
| 186 |
+
except asyncpg.exceptions.PostgresError as e:
|
| 187 |
+
return f"SQL Error: {str(e)}"
|
| 188 |
+
except Exception as e:
|
| 189 |
+
return f"Error: {str(e)}"
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
@mcp.tool(description="Retrieves foreign key relationships for a specified table.")
|
| 193 |
+
def get_foreign_keys(table_name: str) -> str:
|
| 194 |
+
"""Get foreign key information for a table.
|
| 195 |
+
|
| 196 |
+
Args:
|
| 197 |
+
table_name: The name of the table to get foreign keys from
|
| 198 |
+
schema: The schema name (defaults to 'public')
|
| 199 |
+
"""
|
| 200 |
+
schema = os.environ.get("SCHEMA", DEFAULT_SCHEMA)
|
| 201 |
+
|
| 202 |
+
sql = """
|
| 203 |
+
SELECT
|
| 204 |
+
tc.constraint_name,
|
| 205 |
+
kcu.column_name as fk_column,
|
| 206 |
+
ccu.table_schema as referenced_schema,
|
| 207 |
+
ccu.table_name as referenced_table,
|
| 208 |
+
ccu.column_name as referenced_column
|
| 209 |
+
FROM information_schema.table_constraints tc
|
| 210 |
+
JOIN information_schema.key_column_usage kcu
|
| 211 |
+
ON tc.constraint_name = kcu.constraint_name
|
| 212 |
+
AND tc.table_schema = kcu.table_schema
|
| 213 |
+
JOIN information_schema.referential_constraints rc
|
| 214 |
+
ON tc.constraint_name = rc.constraint_name
|
| 215 |
+
JOIN information_schema.constraint_column_usage ccu
|
| 216 |
+
ON rc.unique_constraint_name = ccu.constraint_name
|
| 217 |
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
| 218 |
+
AND tc.table_schema = %s
|
| 219 |
+
AND tc.table_name = %s
|
| 220 |
+
ORDER BY tc.constraint_name, kcu.ordinal_position
|
| 221 |
+
"""
|
| 222 |
+
return execute_query(sql)
|
| 223 |
+
|
| 224 |
+
@mcp.tool(description="Fetches and formats the schema details for all tables in the configured database schema.")
|
| 225 |
+
async def get_all_schemas() -> str:
|
| 226 |
+
"""Get schema information for all tables in the database"""
|
| 227 |
+
try:
|
| 228 |
+
schema = os.environ.get("SCHEMA", DEFAULT_SCHEMA)
|
| 229 |
+
|
| 230 |
+
async with db_lifespan(mcp) as db_ctx:
|
| 231 |
+
tables = await get_all_tables(db_ctx.pool, db_ctx.schema)
|
| 232 |
+
|
| 233 |
+
if not tables:
|
| 234 |
+
return f"No tables found in the {db_ctx.schema} schema."
|
| 235 |
+
|
| 236 |
+
all_schemas = []
|
| 237 |
+
for table in tables:
|
| 238 |
+
table_name = table['table_name']
|
| 239 |
+
columns = await get_table_schema_info(db_ctx.pool, schema, table_name)
|
| 240 |
+
table_schema = format_table_schema(table_name, columns)
|
| 241 |
+
all_schemas.append(table_schema)
|
| 242 |
+
all_schemas.append("") # Add empty line between tables
|
| 243 |
+
|
| 244 |
+
return "\n".join(all_schemas)
|
| 245 |
+
except asyncpg.exceptions.PostgresError as e:
|
| 246 |
+
return f"SQL Error: {str(e)}"
|
| 247 |
+
except Exception as e:
|
| 248 |
+
return f"Error: {str(e)}"
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@mcp.prompt(description="Generates a prompt message to help craft a best-practice SELECT query for a given table.")
|
| 252 |
+
async def generate_select_query(table_name: str) -> list[PromptMessage]:
|
| 253 |
+
"""Generate a SELECT query with best practices for a table"""
|
| 254 |
+
try:
|
| 255 |
+
async with db_lifespan(mcp) as db_ctx:
|
| 256 |
+
pool = db_ctx.pool
|
| 257 |
+
async with pool.acquire() as conn:
|
| 258 |
+
columns = await conn.fetch("""
|
| 259 |
+
SELECT column_name, data_type
|
| 260 |
+
FROM information_schema.columns
|
| 261 |
+
WHERE table_schema = $1 AND table_name = $2
|
| 262 |
+
ORDER BY ordinal_position
|
| 263 |
+
""", db_ctx.schema, table_name)
|
| 264 |
+
|
| 265 |
+
if not columns:
|
| 266 |
+
return [PromptMessage(f"Table '{table_name}' not found in schema '{db_ctx.schema}'.")]
|
| 267 |
+
|
| 268 |
+
columns_text = "\n".join([f"- {col['column_name']} ({col['data_type']})" for col in columns])
|
| 269 |
+
|
| 270 |
+
return [
|
| 271 |
+
PromptMessage(
|
| 272 |
+
f"""Please help me write a well-structured, efficient SELECT query for the '{table_name}' table.
|
| 273 |
+
|
| 274 |
+
Table Schema:
|
| 275 |
+
{columns_text}
|
| 276 |
+
|
| 277 |
+
PostgreSQL SQL Best Practices:
|
| 278 |
+
- Use explicit column names instead of * when possible
|
| 279 |
+
- Include LIMIT clauses to restrict result sets
|
| 280 |
+
- Consider adding WHERE clauses to filter results
|
| 281 |
+
- Use appropriate indexing considerations
|
| 282 |
+
- Format SQL with proper indentation and line breaks
|
| 283 |
+
|
| 284 |
+
Create a basic SELECT query following these best practices:"""
|
| 285 |
+
)
|
| 286 |
+
]
|
| 287 |
+
except Exception as e:
|
| 288 |
+
return [PromptMessage(f"Error generating select query: {str(e)}")]
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
@mcp.prompt(description="Generates a prompt message to assist in writing analytical queries for a given table.")
|
| 292 |
+
async def generate_analytical_query(table_name: str) -> list[PromptMessage]:
|
| 293 |
+
"""Generate analytical queries for a table"""
|
| 294 |
+
try:
|
| 295 |
+
async with db_lifespan(mcp) as db_ctx:
|
| 296 |
+
pool = db_ctx.pool
|
| 297 |
+
async with pool.acquire() as conn:
|
| 298 |
+
columns = await conn.fetch("""
|
| 299 |
+
SELECT column_name, data_type
|
| 300 |
+
FROM information_schema.columns
|
| 301 |
+
WHERE table_schema = $1 AND table_name = $2
|
| 302 |
+
ORDER BY ordinal_position
|
| 303 |
+
""", db_ctx.schema, table_name)
|
| 304 |
+
|
| 305 |
+
if not columns:
|
| 306 |
+
return [PromptMessage(f"Table '{table_name}' not found in schema '{db_ctx.schema}'.")]
|
| 307 |
+
|
| 308 |
+
columns_text = "\n".join([f"- {col['column_name']} ({col['data_type']})" for col in columns])
|
| 309 |
+
|
| 310 |
+
return [
|
| 311 |
+
PromptMessage(
|
| 312 |
+
f"""Please help me create analytical queries for the '{table_name}' table.
|
| 313 |
+
|
| 314 |
+
Table Schema:
|
| 315 |
+
{columns_text}
|
| 316 |
+
|
| 317 |
+
PostgreSQL SQL Best Practices:
|
| 318 |
+
- Use aggregation functions (COUNT, SUM, AVG, MIN, MAX) appropriately
|
| 319 |
+
- Group data using GROUP BY for meaningful aggregations
|
| 320 |
+
- Filter groups with HAVING clauses when needed
|
| 321 |
+
- Consider using window functions for advanced analytics
|
| 322 |
+
- Format SQL with proper indentation and line breaks
|
| 323 |
+
|
| 324 |
+
Create a set of analytical queries for this table:"""
|
| 325 |
+
)
|
| 326 |
+
]
|
| 327 |
+
except Exception as e:
|
| 328 |
+
return [PromptMessage(f"Error generating analytical query: {str(e)}")]
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
@mcp.tool(description="Identifies both explicit and implied foreign key relationships for a given table using schema analysis and naming patterns.")
|
| 335 |
+
def find_relationships(table_name: str, schema: str = 'public') -> str:
|
| 336 |
+
"""Find both explicit and implied relationships for a table.
|
| 337 |
+
|
| 338 |
+
Args:
|
| 339 |
+
table_name: The name of the table to analyze relationships for
|
| 340 |
+
schema: The schema name (defaults to 'public')
|
| 341 |
+
"""
|
| 342 |
+
try:
|
| 343 |
+
# First get explicit foreign key relationships
|
| 344 |
+
fk_sql = """
|
| 345 |
+
SELECT
|
| 346 |
+
kcu.column_name,
|
| 347 |
+
ccu.table_name as foreign_table,
|
| 348 |
+
ccu.column_name as foreign_column,
|
| 349 |
+
'Explicit FK' as relationship_type,
|
| 350 |
+
1 as confidence_level
|
| 351 |
+
FROM information_schema.table_constraints tc
|
| 352 |
+
JOIN information_schema.key_column_usage kcu
|
| 353 |
+
ON tc.constraint_name = kcu.constraint_name
|
| 354 |
+
AND tc.table_schema = kcu.table_schema
|
| 355 |
+
JOIN information_schema.constraint_column_usage ccu
|
| 356 |
+
ON ccu.constraint_name = tc.constraint_name
|
| 357 |
+
AND ccu.table_schema = tc.table_schema
|
| 358 |
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
| 359 |
+
AND tc.table_schema = %s
|
| 360 |
+
AND tc.table_name = %s
|
| 361 |
+
"""
|
| 362 |
+
|
| 363 |
+
# Then look for implied relationships based on common patterns
|
| 364 |
+
implied_sql = """
|
| 365 |
+
WITH source_columns AS (
|
| 366 |
+
-- Get all ID-like columns from our table
|
| 367 |
+
SELECT column_name, data_type
|
| 368 |
+
FROM information_schema.columns
|
| 369 |
+
WHERE table_schema = %s
|
| 370 |
+
AND table_name = %s
|
| 371 |
+
AND (
|
| 372 |
+
column_name LIKE '%%id'
|
| 373 |
+
OR column_name LIKE '%%_id'
|
| 374 |
+
OR column_name LIKE '%%_fk'
|
| 375 |
+
)
|
| 376 |
+
),
|
| 377 |
+
potential_references AS (
|
| 378 |
+
-- Find tables that might be referenced by our ID columns
|
| 379 |
+
SELECT DISTINCT
|
| 380 |
+
sc.column_name as source_column,
|
| 381 |
+
sc.data_type as source_type,
|
| 382 |
+
t.table_name as target_table,
|
| 383 |
+
c.column_name as target_column,
|
| 384 |
+
c.data_type as target_type,
|
| 385 |
+
CASE
|
| 386 |
+
-- Highest confidence: column matches table_id pattern and types match
|
| 387 |
+
WHEN sc.column_name = t.table_name || '_id'
|
| 388 |
+
AND sc.data_type = c.data_type THEN 2
|
| 389 |
+
-- High confidence: column ends with _id and types match
|
| 390 |
+
WHEN sc.column_name LIKE '%%_id'
|
| 391 |
+
AND sc.data_type = c.data_type THEN 3
|
| 392 |
+
-- Medium confidence: column contains table name and types match
|
| 393 |
+
WHEN sc.column_name LIKE '%%' || t.table_name || '%%'
|
| 394 |
+
AND sc.data_type = c.data_type THEN 4
|
| 395 |
+
-- Lower confidence: column ends with id and types match
|
| 396 |
+
WHEN sc.column_name LIKE '%%id'
|
| 397 |
+
AND sc.data_type = c.data_type THEN 5
|
| 398 |
+
END as confidence_level
|
| 399 |
+
FROM source_columns sc
|
| 400 |
+
CROSS JOIN information_schema.tables t
|
| 401 |
+
JOIN information_schema.columns c
|
| 402 |
+
ON c.table_schema = t.table_schema
|
| 403 |
+
AND c.table_name = t.table_name
|
| 404 |
+
AND (c.column_name = 'id' OR c.column_name = sc.column_name)
|
| 405 |
+
WHERE t.table_schema = %s
|
| 406 |
+
AND t.table_name != %s -- Exclude self-references
|
| 407 |
+
)
|
| 408 |
+
SELECT
|
| 409 |
+
source_column as column_name,
|
| 410 |
+
target_table as foreign_table,
|
| 411 |
+
target_column as foreign_column,
|
| 412 |
+
CASE
|
| 413 |
+
WHEN confidence_level = 2 THEN 'Strong implied relationship (exact match)'
|
| 414 |
+
WHEN confidence_level = 3 THEN 'Strong implied relationship (_id pattern)'
|
| 415 |
+
WHEN confidence_level = 4 THEN 'Likely implied relationship (name match)'
|
| 416 |
+
ELSE 'Possible implied relationship'
|
| 417 |
+
END as relationship_type,
|
| 418 |
+
confidence_level
|
| 419 |
+
FROM potential_references
|
| 420 |
+
WHERE confidence_level IS NOT NULL
|
| 421 |
+
ORDER BY confidence_level, source_column;
|
| 422 |
+
"""
|
| 423 |
+
|
| 424 |
+
# Execute both queries and combine results
|
| 425 |
+
fk_results = execute_query(fk_sql)
|
| 426 |
+
implied_results = execute_query(implied_sql)
|
| 427 |
+
|
| 428 |
+
# If both queries returned "No results found", return that
|
| 429 |
+
if fk_results == "No results found" and implied_results == "No results found":
|
| 430 |
+
return "No relationships found for this table"
|
| 431 |
+
|
| 432 |
+
# Otherwise, return both sets of results
|
| 433 |
+
return f"Explicit Foreign Keys:\n{fk_results}\n\nImplied Relationships:\n{implied_results}"
|
| 434 |
+
|
| 435 |
+
except Exception as e:
|
| 436 |
+
return f"Error finding relationships: {str(e)}"
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
if __name__ == "__main__":
|
| 440 |
+
mcp.run()
|