Spaces:
Sleeping
Sleeping
SyngyeonTak commited on
Commit ยท
e29b52a
1
Parent(s): 64fa191
region_extractor updates
Browse files- region_extractor.py +52 -2
region_extractor.py
CHANGED
|
@@ -4,15 +4,25 @@ import faiss
|
|
| 4 |
import numpy as np
|
| 5 |
from sentence_transformers import SentenceTransformer
|
| 6 |
from huggingface_hub import hf_hub_download
|
|
|
|
| 7 |
|
| 8 |
DATA_REPO = "Syngyeon/seoulalpha-data"
|
| 9 |
MODEL_NAME = "jhgan/ko-sbert-nli"
|
| 10 |
|
|
|
|
|
|
|
|
|
|
| 11 |
# ๋ก๋
|
| 12 |
def _load_region_index():
|
| 13 |
try:
|
| 14 |
-
index_path = hf_hub_download(
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
index = faiss.read_index(index_path)
|
| 18 |
model = SentenceTransformer(MODEL_NAME)
|
|
@@ -45,3 +55,43 @@ def extract_region_semantic(user_query, top_k=5):
|
|
| 45 |
if vid in region_meta:
|
| 46 |
results.append(region_meta[vid]["region_name"])
|
| 47 |
return results
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
import numpy as np
|
| 5 |
from sentence_transformers import SentenceTransformer
|
| 6 |
from huggingface_hub import hf_hub_download
|
| 7 |
+
from openai import OpenAI # ๐น ์ถ๊ฐ
|
| 8 |
|
| 9 |
DATA_REPO = "Syngyeon/seoulalpha-data"
|
| 10 |
MODEL_NAME = "jhgan/ko-sbert-nli"
|
| 11 |
|
| 12 |
+
# OpenAI ํด๋ผ์ด์ธํธ ์ด๊ธฐํ
|
| 13 |
+
client = OpenAI(api_key=os.getenv("API_KEY")) # ๐น ์ถ๊ฐ
|
| 14 |
+
|
| 15 |
# ๋ก๋
|
| 16 |
def _load_region_index():
|
| 17 |
try:
|
| 18 |
+
index_path = hf_hub_download(
|
| 19 |
+
repo_id=DATA_REPO, repo_type="dataset",
|
| 20 |
+
filename="data/faiss/region_db/faiss_region_semantic.index"
|
| 21 |
+
)
|
| 22 |
+
metadata_path = hf_hub_download(
|
| 23 |
+
repo_id=DATA_REPO, repo_type="dataset",
|
| 24 |
+
filename="data/faiss/region_db/metadata_region_semantic.jsonl"
|
| 25 |
+
)
|
| 26 |
|
| 27 |
index = faiss.read_index(index_path)
|
| 28 |
model = SentenceTransformer(MODEL_NAME)
|
|
|
|
| 55 |
if vid in region_meta:
|
| 56 |
results.append(region_meta[vid]["region_name"])
|
| 57 |
return results
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def extract_region_from_query(user_query):
|
| 61 |
+
"""
|
| 62 |
+
์ฌ์ฉ์ ์ง๋ฌธ์์ LLM์ ์ฌ์ฉํด ์ง์ญ๋ช
ํค์๋ ๋ฆฌ์คํธ๋ฅผ ์ถ์ถํฉ๋๋ค.
|
| 63 |
+
"""
|
| 64 |
+
print("[LLM] ์ฌ์ฉ์ ์ฟผ๋ฆฌ์์ ์ง์ญ๋ช
ํค์๋๋ฅผ ์ถ์ถํฉ๋๋ค...")
|
| 65 |
+
|
| 66 |
+
system_prompt = """
|
| 67 |
+
๋น์ ์ ์ฌ์ฉ์์ ์ฌํ ๊ด๋ จ ์ง๋ฌธ์์ '๋ํ๋ฏผ๊ตญ ํ์ ๊ตฌ์ญ' ํค์๋๋ฅผ ์ถ์ถํ๋ AI ์ด์์คํดํธ์
๋๋ค.
|
| 68 |
+
์ฌ์ฉ์์ ์ง๋ฌธ์ ๋ถ์ํ์ฌ, ์ฃผ์ ํํฐ๋ง์ ์ฌ์ฉํ ์ ์๋ ํค์๋ ๋ชฉ๋ก์ JSON ํ์์ผ๋ก ๋ฐํํด ์ฃผ์ธ์.
|
| 69 |
+
๊ฒฐ๊ณผ๋ ๋ฐ๋์ {"regions": ["ํค์๋1", "ํค์๋2", ...]} ํํ์ฌ์ผ ํฉ๋๋ค.
|
| 70 |
+
|
| 71 |
+
- "์ ๋ผ๋"๋ "์ ๋ถ", "์ ๋จ", "๊ด์ฃผ"๋ก ํด์ํฉ๋๋ค.
|
| 72 |
+
- "๊ฒฝ์๋"๋ "๊ฒฝ๋ถ", "๊ฒฝ๋จ", "๋ถ์ฐ", "๋๊ตฌ", "์ธ์ฐ"์ผ๋ก ํด์ํฉ๋๋ค.
|
| 73 |
+
- "์ถฉ์ฒญ๋"๋ "์ถฉ๋ถ", "์ถฉ๋จ", "๋์ ", "์ธ์ข
"์ผ๋ก ํด์ํฉ๋๋ค.
|
| 74 |
+
- "์์ธ ๊ทผ๊ต"๋ "๊ฒฝ๊ธฐ", "์ธ์ฒ"์ผ๋ก ํด์ํฉ๋๋ค.
|
| 75 |
+
- ์ธ๊ธ๋ ์ง์ญ์ด ์์ผ๋ฉด ๋น ๋ฆฌ์คํธ []๋ฅผ ๋ฐํํฉ๋๋ค.
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
messages = [
|
| 79 |
+
{"role": "system", "content": system_prompt},
|
| 80 |
+
{"role": "user", "content": user_query}
|
| 81 |
+
]
|
| 82 |
+
|
| 83 |
+
try:
|
| 84 |
+
response = client.chat.completions.create(
|
| 85 |
+
model="gpt-3.5-turbo",
|
| 86 |
+
messages=messages,
|
| 87 |
+
response_format={"type": "json_object"}
|
| 88 |
+
)
|
| 89 |
+
result = json.loads(response.choices[0].message.content)
|
| 90 |
+
|
| 91 |
+
if 'regions' in result and isinstance(result['regions'], list):
|
| 92 |
+
return result['regions']
|
| 93 |
+
else:
|
| 94 |
+
return []
|
| 95 |
+
except Exception as e:
|
| 96 |
+
print(f"[LLM] ์ง์ญ๋ช
์ถ์ถ ์ค ์ค๋ฅ ๋ฐ์: {e}")
|
| 97 |
+
return []
|