jack4241 commited on
Commit
a13174b
·
1 Parent(s): 8a433f6

test grade + mcp

Browse files
Files changed (1) hide show
  1. app.py +83 -87
app.py CHANGED
@@ -1,122 +1,118 @@
 
 
 
 
 
 
 
 
1
  import logging
 
2
  import sys
3
  from pathlib import Path
4
  from typing import Dict, List, Any
5
- import gradio as gr
6
- import threading
7
- import uvicorn
8
-
9
  sys.path.append(str(Path(__file__).parent / "src"))
10
  from src.IberleyAI.Tools.IberleyIAPrompt import IberleyIAPrompt
11
  from fastmcp import FastMCP
12
 
 
13
  logging.basicConfig(level=logging.INFO)
14
  logger = logging.getLogger(__name__)
15
 
16
- def create_mcp():
 
 
 
 
 
 
 
 
 
17
  mcp = FastMCP(name="IberleyIA MCP Server",
18
- instructions="Provides legal search and fetch tools.")
19
 
20
  @mcp.tool()
21
  async def search(query: str) -> Dict[str, List[Dict[str, Any]]]:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  if not query or not query.strip():
23
  return {"content": []}
 
24
  response = IberleyIAPrompt.iberley_ia_prompt(query)
25
- return {"content": [{"type": "text", "text": response}]}
 
 
 
 
 
 
 
 
 
26
 
27
  @mcp.tool()
28
  async def fetch(id: str) -> Dict[str, Any]:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  if not id or not id.strip():
30
  return {"content": []}
 
31
  response = IberleyIAPrompt.iberley_ia_prompt(id)
32
- return {"content": [{"type": "text", "text": response}]}
 
 
 
 
 
 
 
 
 
33
 
34
  return mcp
35
 
36
 
37
  def main():
38
- mcp = create_mcp()
39
-
40
- def run_search(query):
41
- return IberleyIAPrompt.iberley_ia_prompt(query)
42
-
43
- def run_fetch(doc_id):
44
- return IberleyIAPrompt.iberley_ia_prompt(doc_id)
45
-
46
- with gr.Blocks() as demo:
47
- gr.Markdown("## IberleyIA - Manual Testing UI")
48
 
49
- with gr.Tab("Search"):
50
- query = gr.Textbox(label="Query")
51
- output = gr.Textbox(label="Response")
52
- gr.Button("Run Search").click(fn=run_search, inputs=query, outputs=output)
53
 
54
- with gr.Tab("Fetch"):
55
- doc_id = gr.Textbox(label="Document ID")
56
- output2 = gr.Textbox(label="Response")
57
- gr.Button("Run Fetch").click(fn=run_fetch, inputs=doc_id, outputs=output2)
58
-
59
- # 1) Lanza Gradio en 7860 (único puerto permitido en HF Spaces)
60
- app, _, _ = demo.launch(
61
- server_name="0.0.0.0",
62
- server_port=7860,
63
- share=False,
64
- prevent_thread_lock=True,
65
- )
66
-
67
- # 2) Monta el endpoint MCP en /sse
68
- try:
69
- sse_app = mcp.create_app(transport="sse")
70
- app.mount("/sse", sse_app)
71
- logger.info("Mounted MCP SSE endpoint at /sse")
72
- except AttributeError:
73
- logger.error("Tu versión de fastmcp no tiene create_app(), tendrás que actualizar fastmcp.")
74
-
75
- mcp = create_mcp()
76
-
77
- def run_search(query):
78
- return IberleyIAPrompt.iberley_ia_prompt(query)
79
-
80
- def run_fetch(doc_id):
81
- return IberleyIAPrompt.iberley_ia_prompt(doc_id)
82
-
83
- with gr.Blocks() as demo:
84
- gr.Markdown("## IberleyIA - Manual Testing UI")
85
-
86
- with gr.Tab("Search"):
87
- query = gr.Textbox(label="Query")
88
- output = gr.Textbox(label="Response")
89
- gr.Button("Run Search").click(fn=run_search, inputs=query, outputs=output)
90
-
91
- with gr.Tab("Fetch"):
92
- doc_id = gr.Textbox(label="Document ID")
93
- output2 = gr.Textbox(label="Response")
94
- gr.Button("Run Fetch").click(fn=run_fetch, inputs=doc_id, outputs=output2)
95
-
96
- # 1) Gradio corre en 7860
97
- app, _, _ = demo.launch(
98
- server_name="0.0.0.0",
99
- server_port=7860,
100
- share=False,
101
- prevent_thread_lock=True,
102
- )
103
-
104
- # 2) MCP corre en /sse dentro del mismo servidor uvicorn
105
- # Si FastMCP soporta create_app():
106
  try:
107
- sse_app = mcp.create_app(transport="sse")
108
- app.mount("/sse", sse_app)
109
- logger.info("Mounted MCP SSE endpoint at /sse")
110
- except AttributeError:
111
- logger.error("Tu versión de fastmcp no soporta create_app(). Usa mcp.run en otro hilo.")
112
- # fallback: lanzar MCP en thread separado (aunque intentará usar el mismo puerto)
113
- threading.Thread(
114
- target=lambda: mcp.run(transport="sse", host="0.0.0.0", port=7860, path="/sse"),
115
- daemon=True
116
- ).start()
117
-
118
- # 3) Mantener vivo uvicorn
119
- uvicorn.run(app, host="0.0.0.0", port=7860)
120
 
121
 
122
  if __name__ == "__main__":
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Sample MCP Server for ChatGPT Deep Research Integration
4
+
5
+ This server implements the Model Context Protocol (MCP) with search and fetch
6
+ capabilities designed to work with ChatGPT's deep research feature.
7
+ """
8
+
9
  import logging
10
+ import os
11
  import sys
12
  from pathlib import Path
13
  from typing import Dict, List, Any
 
 
 
 
14
  sys.path.append(str(Path(__file__).parent / "src"))
15
  from src.IberleyAI.Tools.IberleyIAPrompt import IberleyIAPrompt
16
  from fastmcp import FastMCP
17
 
18
+ # Configure logging
19
  logging.basicConfig(level=logging.INFO)
20
  logger = logging.getLogger(__name__)
21
 
22
+ server_instructions = """
23
+ This MCP server provides search and document retrieval capabilities
24
+ for deep research. Use the search tool to find relevant documents
25
+ based on keywords, then use the fetch tool to retrieve complete
26
+ document content with citations.
27
+ """
28
+
29
+ def create_server():
30
+ """Create and configure the MCP server with search and fetch tools."""
31
+
32
  mcp = FastMCP(name="IberleyIA MCP Server",
33
+ instructions=server_instructions)
34
 
35
  @mcp.tool()
36
  async def search(query: str) -> Dict[str, List[Dict[str, Any]]]:
37
+ """
38
+ Search for legal information in the berleyAI chatbot connection
39
+
40
+ This tool searches through the vector store to find semantically relevant matches.
41
+ Returns a list of search results with basic information. Use the fetch tool to get
42
+ complete document content.
43
+
44
+ Args:
45
+ query: Search query string. Natural language queries work best for semantic search.
46
+
47
+ Returns:
48
+ Dictionary with 'content' key containing list of matching documents.
49
+ Each result includes type, text.
50
+ """
51
  if not query or not query.strip():
52
  return {"content": []}
53
+
54
  response = IberleyIAPrompt.iberley_ia_prompt(query)
55
+
56
+ result = {
57
+ "content": [
58
+ {
59
+ "type": "text",
60
+ "text": response
61
+ }
62
+ ]
63
+ }
64
+ return result
65
 
66
  @mcp.tool()
67
  async def fetch(id: str) -> Dict[str, Any]:
68
+ """
69
+ Search for legal information in the berleyAI chatbot connection given an id for a
70
+ legal document withing a query in natural language
71
+
72
+ This tool searches through the vector store to find semantically relevant matches.
73
+ Returns a list of search results with basic information. Use the fetch tool to get
74
+ complete document content.
75
+
76
+ Args:
77
+ query: Search query string. Natural language queries work best for semantic search.
78
+
79
+ Returns:
80
+ Dictionary with 'content' key containing list of matching documents.
81
+ Each result includes type, text.
82
+ """
83
  if not id or not id.strip():
84
  return {"content": []}
85
+
86
  response = IberleyIAPrompt.iberley_ia_prompt(id)
87
+
88
+ result = {
89
+ "content": [
90
+ {
91
+ "type": "text",
92
+ "text": response
93
+ }
94
+ ]
95
+ }
96
+ return result
97
 
98
  return mcp
99
 
100
 
101
  def main():
102
+ """Main function to start the MCP server."""
103
+ #conection string should be "https://iberley-iberleyia.hf.space/sse"
104
+ server = create_server()
 
 
 
 
 
 
 
105
 
106
+ logger.info("Starting MCP server on 0.0.0.0:7860")
107
+ logger.info("Server will be accessible via SSE transport")
 
 
108
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  try:
110
+ server.run(transport="sse", host="0.0.0.0", port=7860)
111
+ except KeyboardInterrupt:
112
+ logger.info("Server stopped by user")
113
+ except Exception as e:
114
+ logger.error(f"Server error: {e}")
115
+ raise
 
 
 
 
 
 
 
116
 
117
 
118
  if __name__ == "__main__":