Spaces:
Paused
Paused
| import gradio as gr | |
| import psycopg2 | |
| from openai import OpenAI | |
| import json | |
| import os | |
| from typing import List, Dict | |
| from pgvector.psycopg2 import register_vector | |
| import numpy as np | |
| # DB ์ฐ๊ฒฐ ์ค์ | |
| def get_db_conn(): | |
| return psycopg2.connect( | |
| host=os.environ["VECTOR_HOST"], | |
| port=5432, | |
| dbname=os.environ["VECTOR_DBNAME"], | |
| user=os.environ["VECTOR_USER"], | |
| password=os.environ["VECTOR_SECRET"] | |
| ) | |
| client = OpenAI() # ํ๊ฒฝ๋ณ์์ OPENAI_API_KEY๊ฐ ์์ผ๋ฉด ์๋ ์ธ์ | |
| def get_embedding(text: str) -> List[float]: | |
| """ | |
| ํ ์คํธ๋ฅผ ์๋ฒ ๋ฉ ๋ฒกํฐ๋ก ๋ณํํฉ๋๋ค. | |
| """ | |
| response = client.embeddings.create( | |
| input=text, | |
| model="text-embedding-ada-002" | |
| ) | |
| return response.data[0].embedding | |
| def search_similar_chats(query: str, maxResults: int = 10) -> List[Dict]: | |
| """ | |
| ์ ์ฌํ ์ฑํ ๋ฌธ์๋ฅผ ๊ฒ์ํฉ๋๋ค. | |
| Args: | |
| query (str): ๊ฒ์ํ ์ฟผ๋ฆฌ ํ ์คํธ | |
| maxResults (int): ๋ฐํํ ์ต๋ ๊ฒฐ๊ณผ ์ | |
| Returns: | |
| List[Dict]: ๊ฒ์ ๊ฒฐ๊ณผ ๋ชฉ๋ก | |
| """ | |
| embedding = np.array(get_embedding(query)) # numpy array๋ก ๋ณํ | |
| conn = get_db_conn() | |
| register_vector(conn) # ๋ฒกํฐ ํ์ ์๋ ๋ณํ ์ง์ | |
| with conn.cursor() as cur: | |
| cur.execute(""" | |
| SELECT id, metadata, content, embedding <#> %s AS distance | |
| FROM vector_store | |
| WHERE metadata->>'documentType' = 'chatAnalysis' | |
| ORDER BY embedding <#> %s | |
| LIMIT %s | |
| """, (embedding, embedding, maxResults)) | |
| rows = cur.fetchall() | |
| conn.close() | |
| return [ | |
| { | |
| "id": row[0], | |
| "metadata": row[1], | |
| "content": row[2], | |
| "distance": row[3] | |
| } | |
| for row in rows | |
| ] | |
| def search_chats_by_category(category: str, maxResults: int = 10) -> List[Dict]: | |
| """ | |
| ํน์ ์นดํ ๊ณ ๋ฆฌ์ ์ฑํ ๋ฌธ์๋ฅผ ๊ฒ์ํฉ๋๋ค. | |
| Args: | |
| category (str): ์นดํ ๊ณ ๋ฆฌ๋ช | |
| maxResults (int): ๋ฐํํ ์ต๋ ๊ฒฐ๊ณผ ์ | |
| Returns: | |
| List[Dict]: ๊ฒ์ ๊ฒฐ๊ณผ ๋ชฉ๋ก | |
| """ | |
| conn = get_db_conn() | |
| with conn.cursor() as cur: | |
| cur.execute(""" | |
| SELECT id, metadata, content | |
| FROM vector_store | |
| WHERE metadata->>'documentType' = 'chatAnalysis' | |
| AND metadata->>'category' = %s | |
| LIMIT %s | |
| """, (category, maxResults)) | |
| rows = cur.fetchall() | |
| conn.close() | |
| return [ | |
| { | |
| "id": row[0], | |
| "metadata": row[1], | |
| "content": row[2] | |
| } | |
| for row in rows | |
| ] | |
| def search_chats_by_date(startDate: str = None, endDate: str = None, maxResults: int = 10) -> List[Dict]: | |
| """ | |
| ์ง์ ๋ ๋ ์ง ๋ฒ์ ๋ด์ ์ฑํ ๋ฌธ์๋ฅผ ๊ฒ์ํฉ๋๋ค. | |
| Args: | |
| startDate (str): ๊ฒ์ ์์ ๋ ์ง (YYYY-MM-DD) | |
| endDate (str): ๊ฒ์ ์ข ๋ฃ ๋ ์ง (YYYY-MM-DD) | |
| maxResults (int): ๋ฐํํ ์ต๋ ๊ฒฐ๊ณผ ์ | |
| Returns: | |
| List[Dict]: ๊ฒ์ ๊ฒฐ๊ณผ ๋ชฉ๋ก | |
| """ | |
| conn = get_db_conn() | |
| query = """ | |
| SELECT id, metadata, content | |
| FROM vector_store | |
| WHERE metadata->>'documentType' = 'chatAnalysis' | |
| """ | |
| params = [] | |
| if startDate not in (None, ""): | |
| query += " AND (metadata->>'startTime')::timestamp >= %s" | |
| params.append(startDate) | |
| if endDate not in (None, ""): | |
| query += " AND (metadata->>'startTime')::timestamp < %s" | |
| params.append(endDate) | |
| query += " LIMIT %s" | |
| params.append(maxResults) | |
| with conn.cursor() as cur: | |
| cur.execute(query, tuple(params)) | |
| rows = cur.fetchall() | |
| conn.close() | |
| return [ | |
| { | |
| "id": row[0], | |
| "metadata": row[1], | |
| "content": row[2] | |
| } | |
| for row in rows | |
| ] | |
| # Gradio Blocks์ ํจ์ ๋ฑ๋ก | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# MCP ToolService ์์") | |
| gr.Interface(fn=search_similar_chats, inputs=["text", "number"], outputs="json", title="search_similar_chats") | |
| gr.Interface(fn=search_chats_by_category, inputs=["text", "number"], outputs="json", title="search_chats_by_category") | |
| gr.Interface(fn=search_chats_by_date, inputs=["text", "text", "number"], outputs="json", title="search_chats_by_date") | |
| if __name__ == "__main__": | |
| demo.launch(mcp_server=True) |