Jake-seong commited on
Commit
30efae8
ยท
verified ยท
1 Parent(s): 5352f0e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +130 -17
app.py CHANGED
@@ -1,28 +1,141 @@
1
  import gradio as gr
 
 
 
 
 
2
 
3
- def letter_counter(word, letter):
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  """
5
- Count the number of occurrences of a letter in a word or text.
 
 
 
 
 
 
 
 
6
 
7
  Args:
8
- word (str): The input text to search through
9
- letter (str): The letter to search for
10
 
11
  Returns:
12
- str: A message indicating how many times the letter appears
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  """
14
- word = word.lower()
15
- letter = letter.lower()
16
- count = word.count(letter)
17
- return count
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- demo = gr.Interface(
20
- fn=letter_counter,
21
- inputs=["textbox", "textbox"],
22
- outputs="number",
23
- title="Letter Counter",
24
- description="Enter text and a letter to count how many times the letter appears in the text."
25
- )
26
 
27
  if __name__ == "__main__":
28
- demo.launch(mcp_server=True)
 
1
  import gradio as gr
2
+ import psycopg2
3
+ import openai
4
+ import json
5
+ import os
6
+ from typing import List, Dict
7
 
8
+ # DB ์—ฐ๊ฒฐ ์„ค์ •
9
+ def get_db_conn():
10
+ return psycopg2.connect(
11
+ host=os.environ["VECTOR_HOST"],
12
+ port=5432,
13
+ dbname=os.environ["VECTOR_DBNAME"],
14
+ user=os.environ["VECTOR_USER"],
15
+ password=os.environ["VECTOR_SECRET"]
16
+ )
17
+
18
+ # ์ž„๋ฒ ๋”ฉ ํ•จ์ˆ˜ (OpenAI API ์˜ˆ์‹œ)
19
+ def get_embedding(text: str) -> List[float]:
20
+ """
21
+ ํ…์ŠคํŠธ๋ฅผ ์ž„๋ฒ ๋”ฉ ๋ฒกํ„ฐ๋กœ ๋ณ€ํ™˜ํ•ฉ๋‹ˆ๋‹ค.
22
  """
23
+ response = openai.Embedding.create(
24
+ input=text,
25
+ model="text-embedding-3-large"
26
+ )
27
+ return response['data'][0]['embedding']
28
+
29
+ def search_similar_chats(query: str, maxResults: int = 10) -> List[Dict]:
30
+ """
31
+ ์œ ์‚ฌํ•œ ์ฑ„ํŒ… ๋ฌธ์„œ๋ฅผ ๊ฒ€์ƒ‰ํ•ฉ๋‹ˆ๋‹ค.
32
 
33
  Args:
34
+ query (str): ๊ฒ€์ƒ‰ํ•  ์ฟผ๋ฆฌ ํ…์ŠคํŠธ
35
+ maxResults (int): ๋ฐ˜ํ™˜ํ•  ์ตœ๋Œ€ ๊ฒฐ๊ณผ ์ˆ˜
36
 
37
  Returns:
38
+ List[Dict]: ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋ชฉ๋ก
39
+ """
40
+ embedding = get_embedding(query)
41
+ conn = get_db_conn()
42
+ with conn.cursor() as cur:
43
+ cur.execute("""
44
+ SELECT id, metadata, content, embedding <#> %s AS distance
45
+ FROM chat_vector_table
46
+ WHERE metadata->>'documentType' = 'chatAnalysis'
47
+ ORDER BY embedding <#> %s
48
+ LIMIT %s
49
+ """, (embedding, embedding, maxResults))
50
+ rows = cur.fetchall()
51
+ conn.close()
52
+ return [
53
+ {
54
+ "id": row[0],
55
+ "metadata": row[1],
56
+ "content": row[2],
57
+ "distance": row[3]
58
+ }
59
+ for row in rows
60
+ ]
61
+
62
+ def search_chats_by_category(category: str, maxResults: int = 10) -> List[Dict]:
63
+ """
64
+ ํŠน์ • ์นดํ…Œ๊ณ ๋ฆฌ์˜ ์ฑ„ํŒ… ๋ฌธ์„œ๋ฅผ ๊ฒ€์ƒ‰ํ•ฉ๋‹ˆ๋‹ค.
65
+
66
+ Args:
67
+ category (str): ์นดํ…Œ๊ณ ๋ฆฌ๋ช…
68
+ maxResults (int): ๋ฐ˜ํ™˜ํ•  ์ตœ๋Œ€ ๊ฒฐ๊ณผ ์ˆ˜
69
+
70
+ Returns:
71
+ List[Dict]: ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋ชฉ๋ก
72
+ """
73
+ conn = get_db_conn()
74
+ with conn.cursor() as cur:
75
+ cur.execute("""
76
+ SELECT id, metadata, content
77
+ FROM chat_vector_table
78
+ WHERE metadata->>'documentType' = 'chatAnalysis'
79
+ AND metadata->>'category' = %s
80
+ LIMIT %s
81
+ """, (category, maxResults))
82
+ rows = cur.fetchall()
83
+ conn.close()
84
+ return [
85
+ {
86
+ "id": row[0],
87
+ "metadata": row[1],
88
+ "content": row[2]
89
+ }
90
+ for row in rows
91
+ ]
92
+
93
+ def search_chats_by_date(startDate: str = None, endDate: str = None, maxResults: int = 10) -> List[Dict]:
94
+ """
95
+ ์ง€์ •๋œ ๋‚ ์งœ ๋ฒ”์œ„ ๋‚ด์˜ ์ฑ„ํŒ… ๋ฌธ์„œ๋ฅผ ๊ฒ€์ƒ‰ํ•ฉ๋‹ˆ๋‹ค.
96
+
97
+ Args:
98
+ startDate (str): ๊ฒ€์ƒ‰ ์‹œ์ž‘ ๋‚ ์งœ (YYYY-MM-DD)
99
+ endDate (str): ๊ฒ€์ƒ‰ ์ข…๋ฃŒ ๋‚ ์งœ (YYYY-MM-DD)
100
+ maxResults (int): ๋ฐ˜ํ™˜ํ•  ์ตœ๋Œ€ ๊ฒฐ๊ณผ ์ˆ˜
101
+
102
+ Returns:
103
+ List[Dict]: ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋ชฉ๋ก
104
+ """
105
+ conn = get_db_conn()
106
+ query = """
107
+ SELECT id, metadata, content
108
+ FROM chat_vector_table
109
+ WHERE metadata->>'documentType' = 'chatAnalysis'
110
  """
111
+ params = []
112
+ if startDate:
113
+ query += " AND (metadata->>'startTime')::timestamp >= %s"
114
+ params.append(startDate)
115
+ if endDate:
116
+ query += " AND (metadata->>'startTime')::timestamp < %s"
117
+ params.append(endDate)
118
+ query += " LIMIT %s"
119
+ params.append(maxResults)
120
+ with conn.cursor() as cur:
121
+ cur.execute(query, tuple(params))
122
+ rows = cur.fetchall()
123
+ conn.close()
124
+ return [
125
+ {
126
+ "id": row[0],
127
+ "metadata": row[1],
128
+ "content": row[2]
129
+ }
130
+ for row in rows
131
+ ]
132
 
133
+ # Gradio Blocks์— ํ•จ์ˆ˜ ๋“ฑ๋ก
134
+ with gr.Blocks() as demo:
135
+ gr.Markdown("# MCP ToolService ์˜ˆ์‹œ")
136
+ gr.Interface(fn=search_similar_chats, inputs=["text", "number"], outputs="json", name="search_similar_chats")
137
+ gr.Interface(fn=search_chats_by_category, inputs=["text", "number"], outputs="json", name="search_chats_by_category")
138
+ gr.Interface(fn=search_chats_by_date, inputs=["text", "text", "number"], outputs="json", name="search_chats_by_date")
 
139
 
140
  if __name__ == "__main__":
141
+ demo.launch(mcp_server=True)