Spaces:
Sleeping
Sleeping
| import os | |
| import json | |
| import openai | |
| from openai import OpenAI # π¦ Use Sync Client | |
| from dotenv import load_dotenv | |
| from abc import ABC, abstractmethod | |
| # π¦ Debugging Environment Loading | |
| print(f"π¦ [Brain Init] Current CWD: {os.getcwd()}") | |
| load_dotenv() | |
| load_dotenv(os.path.join(os.path.dirname(__file__), "../../../.env")) | |
| API_KEY = os.getenv("OPENAI_API_KEY") | |
| if API_KEY: | |
| print(f"β [Brain Init] API Key Found: {API_KEY[:5]}***") | |
| client = OpenAI(api_key=API_KEY) # π¦ Sync Client | |
| else: | |
| print("β [Brain Init] API Key NOT FOUND in environment!") | |
| client = None | |
| class PersonaStrategy(ABC): | |
| def get_query_gen_prompt(self, history_txt): pass | |
| def get_final_answer_prompt(self, context, refined_context, trend, history_txt): pass | |
| def get_chat_only_prompt(self, history_txt): pass | |
| def get_suggestion_prompt(self, path, page_context): pass | |
| class NyangPersona(PersonaStrategy): | |
| def get_query_gen_prompt(self, history_txt): | |
| return f"λΉμ μ μ΅κ³ μ μν κ²μ μ λ΅κ° 'λ₯μ΄'μ λλ€. [μ΄μ λν]λ₯Ό μ°Έκ³ νμ¬ μ¬μ©μμ νμ¬ μ§λ¬Έ μλλ₯Ό νμ νμΈμ. 1μ°¨ κ²μ κ²°κ³Όμ [κ΅°μ§ λΆμ] λ΄μ©μ λ³΄κ³ , μ λ³΄κ° λΆμ‘±νλ€λ©΄ **μ λ° κ²μ ν€μλ 3κ°**λ₯Ό μμ±νμΈμ. λ§μ½ μ‘λ΄μ΄λΌλ©΄ λΉ λ¦¬μ€νΈ []λ₯Ό λ°ννμΈμ.\n\n[μ΄μ λν]\n{history_txt}\n\n[μ μ½ μ¬ν]\nμΆλ ₯μ μ€μ§ JSON 리μ€νΈ νμμ΄μ΄μΌ ν©λλ€. μ: [\"ν€μλ1\", \"ν€μλ2\"]" | |
| def get_final_answer_prompt(self, context, refined_context, trend, history_txt): | |
| return f"""μ§λ°°μΈλ, λ°κ°λ€λ₯! λ₯μ΄κ° μ΅κ³ μ κΏν 리μ€νΈλ₯Ό κ°μ Έμλ€λ₯! π¦β¨πΎ | |
| λΉμ μ 'λ€μ΄λ°λ₯' μΌνλͺ°μ AI λΉμ 'λ₯μ΄'μ λλ€. | |
| [μ΄μ λν] | |
| {history_txt} | |
| [μ§μΉ¨] | |
| 1. [λ°μ΄ν°]μ μλ μνλ€μ λ°νμΌλ‘ μΆμ²ν΄μ£ΌμΈμ. | |
| 2. **μνλͺ , κ°κ²©**μ μ ννκ² μΈκΈνμΈμ. | |
| 3. μνλͺ μ λ°λμ **[μνλͺ ](λ§ν¬)** νμμ λ§ν¬λ€μ΄ λ§ν¬λ‘ μμ±νμ¬ ν΄λ¦ μ μ΄λν μ μκ² νμΈμ. (μ: [λ§μλ μΈλ₯΄](/product/123)) | |
| 4. λ§ν¬λ 무쑰건 '~λ€λ₯', '~λ₯'μ λλ€. | |
| 5. μ¬κ³ κ° μλ€λ©΄ "μ§κΈ λ°λ‘ ꡬ맀 κ°λ₯νλ€λ₯!" μ΄λΌκ³ λ§λΆμ΄μΈμ. | |
| [λ°μ΄ν°]: | |
| {context} | |
| [μ λ° λ°μ΄ν°]: | |
| {refined_context} | |
| [νΈλ λ]: | |
| {trend}""" | |
| def get_chat_only_prompt(self, history_txt): | |
| return f"μ§λ°°μΈλκ³Ό μ¦κ²κ² μλ€λ₯Ό λ λ AI κ³ μμ΄ 'λ₯μ΄'μ λλ€. λ§ν¬λ '~λ€λ₯'μ λλ€.\n\n[μ΄μ λν]\n{history_txt}" | |
| def get_suggestion_prompt(self, path, page_context): | |
| return f""" | |
| λΉμ μ λ°λ €λλ¬Ό μΌνλͺ°μ AI λΉμ 'λ₯μ΄'μ λλ€. | |
| μ¬μ©μκ° νμ¬ '{path}' νμ΄μ§λ₯Ό λ³΄κ³ μμ΅λλ€. | |
| λ§₯λ½: {page_context} | |
| μ΄ μν©μμ μ¬μ©μκ° κΆκΈν΄ν λ§ν **μ§λ¬Έ 3κ°μ§**μ κ·Έμ λν **μΌμ€ μλ λ΅λ³**μ 미리 μ€λΉν΄μ£ΌμΈμ. | |
| λ΅λ³μ λ₯μ΄ λ§ν¬(~λ€λ₯)λ‘ μ§§κ³ ν΅μ¬λ§ μ λ¬ν΄μΌ ν©λλ€. | |
| [μΆλ ₯ νμ] | |
| λ°λμ JSON 리μ€νΈ ννλ‘ μμ±νμΈμ. | |
| μμ: | |
| [ | |
| {{"question": "λ°°μ‘μ μΈμ μΆλ°ν΄?", "answer": "μ€ν 3μ μ μ£Όλ¬Έμ λΉμΌ μΆλ°νλ€λ₯! π"}}, | |
| {{"question": "μ΄κ±° μ ν΅κΈ°ν λλν΄?", "answer": "κ±±μ λ§λΌλ₯! μ΅κ·Ό μ μ‘°λ μ μ ν μνλ§ λ³΄λΈλ€λ₯."}} | |
| ] | |
| """ | |
| class BrainHub: | |
| def __init__(self, strategy: PersonaStrategy): | |
| self.strategy = strategy | |
| def generate_search_queries(self, query, nodes, centroids, history=[]): | |
| if not client: return [] | |
| history_txt = "\n".join([f"User: {h['user']}\nNyang: {h['assistant']}" for h in history[-7:]]) | |
| cluster_txt = "\n".join([f"- κ΅°μ§ {cid}: {info.get('summary', 'μ 보 μμ')} (ν¬κΈ°: {info['size']})" for cid, info in centroids.items()]) | |
| context_summary = "\n".join([f"- {n['title']}" for n in nodes[:5]]) | |
| messages = [ | |
| {"role": "system", "content": self.strategy.get_query_gen_prompt(history_txt)}, | |
| {"role": "user", "content": f"μ§λ¬Έ: {query}\n\n[1μ°¨ κ²μ κ²°κ³Ό μμ½]\n{context_summary}\n\n[κ΅°μ§ λΆμ κ²°κ³Ό]\n{cluster_txt}"} | |
| ] | |
| try: | |
| response = client.chat.completions.create(model="gpt-4o-mini", messages=messages, temperature=0.3) | |
| content = response.choices[0].message.content | |
| if "```" in content: content = content.replace("```json", "").replace("```", "") | |
| return json.loads(content) | |
| except: return [] | |
| def extract_keywords(self, query): | |
| if not client: return [] | |
| system_prompt = """ | |
| λΉμ μ κ²μ 쿼리 μ΅μ ν μ λ¬Έκ°μ λλ€. | |
| μ¬μ©μμ μ§λ¬Έμμ **λ°μ΄ν°λ² μ΄μ€ κ²μμ μ¬μ©ν ν΅μ¬ ν€μλ 3~4κ°**λ₯Ό μΆμΆνμΈμ. | |
| [μ€μ μ§μΉ¨] | |
| 1. **λ°λμ 볡ν©λͺ μ¬λ₯Ό κ°λ³ λ¨μ΄λ‘ λΆλ¦¬νμΈμ.** (μ: "μ΅μμ¬λ£" -> ["μ΅μ", "μ¬λ£"], "κ³ μμ΄κ°μ" -> ["κ³ μμ΄", "κ°μ"]) | |
| 2. λΈλλλͺ , μν μ’ λ₯, ν΅μ¬ μμ± μμ£Όλ‘ λ½μΌμΈμ. | |
| 3. λμ΄μ°κΈ°κ° μμ΄λ μλ―Έ λ¨μλ‘ μͺΌκ°μΌ κ²μμ΄ μ λ©λλ€. | |
| [μΆλ ₯ νμ] | |
| JSON 리μ€νΈλ§ λ°ννμΈμ. μ: ["λ‘μμΊλ", "μ΅μ", "μ¬λ£", "λ€μ΄μ΄νΈ"] | |
| """ | |
| messages = [ | |
| {"role": "system", "content": system_prompt}, | |
| {"role": "user", "content": query} | |
| ] | |
| try: | |
| response = client.chat.completions.create(model="gpt-4o-mini", messages=messages, temperature=0.3) | |
| content = response.choices[0].message.content | |
| if "```" in content: content = content.replace("```json", "").replace("```", "").strip() | |
| return json.loads(content) | |
| except: return [] | |
| def generate_final_answer(self, query, nodes, refined_nodes, top_tokens, centroids, history=[]): | |
| if not client: return "API ν€κ° μμ΄μ λ₯μ΄κ° λ§μ λͺ» νκ² λ€λ₯... πΏ" | |
| history_txt = "\n".join([f"User: {h['user']}\nNyang: {h['assistant']}" for h in history[-7:]]) | |
| def format_nodes(n_list): | |
| txt = "" | |
| for i, n in enumerate(n_list): | |
| src = "β¨[μμ¬λͺ°]" if n.get('source') == 'homepage' else "[μ§μ]" | |
| price = n.get('price', 0) | |
| brand = n.get('brand', '') or n.get('maker', 'Unknown') | |
| category = n.get('category', '') or n.get('main_category', '') | |
| content = n.get('content', '') | |
| stock = n.get('stock', 0) | |
| reviews = n.get('review_count', 0) | |
| pet = n.get('pet_type', 'all') | |
| rel_link = n.get('link', '') | |
| if rel_link.startswith("/") and not rel_link.startswith("http"): | |
| full_link = f"http://localhost:3000{rel_link}" | |
| else: | |
| full_link = rel_link | |
| txt += f""" | |
| [ID:{n.get('id', '?')}] {i+1}. {src} {n['title']} | |
| - κ°κ²©: {price}μ | λΈλλ: {brand} | μΉ΄ν κ³ λ¦¬: {category} | |
| - λμ: {pet} | μ¬κ³ : {stock}κ° | 리뷰μ: {reviews}κ° | |
| - μμΈμ€λͺ : {content[:200]}... | |
| - λ§ν¬: <{full_link}> | |
| """ | |
| return txt | |
| context_txt = format_nodes(nodes) | |
| inventory_txt = format_nodes(refined_nodes) | |
| trend_txt = f"ν€μλ: {', '.join([t[0] for t in top_tokens])}\nκ΅°μ§: {', '.join([info.get('summary', '') for info in centroids.values()])}" | |
| final_prompt = f"""μ§λ°°μΈλ, λ°κ°λ€λ₯! λ₯μ΄κ° μ§μ¬λμ μν λ§μΆ€ν 'ν리미μ 리ν¬νΈ'λ₯Ό μμ±νλ€λ₯! π¦β¨πΎ | |
| λΉμ μ λ°λ €λλ¬Ό μ©νμ λͺ¨λ κ²μ κΏ°λ«κ³ μλ 'μνΌ μ μ λ₯μ΄'μ λλ€. | |
| [μ΄μ λν (μ΅κ·Ό 7ν΄)] | |
| {history_txt} | |
| [μ§μΉ¨] | |
| 1. **[μ€μ νλ§€ μν]** λͺ©λ‘μ μλ λͺ¨λ μνμ μ°μ μ μΌλ‘ μΆμ²νμΈμ. | |
| 2. **[λ°°κ²½ μ§μ]**μμ κ°μ₯ μ ν©λκ° λμ 'λͺ ν/μΈκΈ° μν' 1~2κ°λ₯Ό μΆκ°λ‘ μμ νμ¬ μΆμ²νμΈμ. | |
| 3. κ° μνμ μΆμ²ν λλ λ¨μν λμ΄νμ§ λ§κ³ , **μ 곡λ μμΈμ€λͺ κ³Ό λ°°κ²½μ§μμ νμ©ν΄ μμ£Ό νλΆνκ³ μ λ¬Έμ μΌλ‘ μ€λͺ **νμΈμ. (μ: μ±λΆμ μ₯μ , κΈ°λ ν¨κ³Ό λ±) | |
| 4. **[νμ] λͺ¨λ μμ¬λͺ° μνμ λ°λμ `[μνλͺ ](/product/ID)` νμμ λ§ν¬λ€μ΄ λ§ν¬λ₯Ό ν¬ν¨ν΄μΌ ν©λλ€.** | |
| - λ§ν¬κ° μλ μΆμ²μ 무ν¨μ λλ€. λ°μ΄ν°μ μ 곡λ λ§ν¬ μ£Όμλ₯Ό 100% νμ©νμΈμ. | |
| 5. μΈλΆ μν(λ°°κ²½ μ§μ)μ "λ₯μ΄ λμκ΄μμ μ°Ύμ λͺ ν μνμ΄λ€λ₯!" κ°μ μμμ΄λ₯Ό λΆμ¬μ£ΌμΈμ. | |
| 6. λ§ν¬λ λ₯μνκ³ λλν '~λ€λ₯', '~λ₯'μ λλ€. | |
| [λ°°κ²½ μ§μ (μ§μ νμ₯ λ° μΈλΆ μΆμ²μ©)]: | |
| {context_txt} | |
| [μ€μ νλ§€ μν (μ°λ¦¬ κ°κ² μ¬κ³ )]: | |
| {inventory_txt} | |
| [νΈλ λ]: | |
| {trend_txt}""" | |
| messages = [{"role": "system", "content": final_prompt}, {"role": "user", "content": query}] | |
| try: | |
| print("π¦ Calling GPT for Final Answer (Sync Mode)...") | |
| response = client.chat.completions.create(model="gpt-4o-mini", messages=messages, temperature=0.7, timeout=45) | |
| print("π¦ GPT Response Received!") | |
| return response.choices[0].message.content | |
| except Exception as e: return f"μλ¬λΌλ₯: {e}" | |
| def generate_quick_chat(self, query, history=[]): | |
| if not client: return "μ... μ μλ§ κΈ°λ€λ €λ¬λΌλ₯!" | |
| history_txt = "\n".join([f"User: {h['user']}\nNyang: {h['assistant']}" for h in history[-3:]]) | |
| system_prompt = """ | |
| λΉμ μ λ°λ €λλ¬Ό μ λ¬Έ μΌνλͺ° 'λ€μ΄λ°λ₯'μ AI μ μ 'λ₯μ΄'μ λλ€. | |
| μ¬μ©μμ μ§λ¬Έμ λν΄ μ¦μ λ΅λ³νλ, λ€μ ꡬ쑰λ₯Ό μ§μΌμ£ΌμΈμ: | |
| 1. 곡κ°κ³Ό μΌλ° μ§μ: ν΄λΉ μ νκ΅°(μ: μ¬λ£, λͺ¨λ, μ₯λκ°)μ λν μΌλ°μ μΈ νΉμ§ μ€λͺ . | |
| 2. μ§μ¬ μ£Όμμ¬ν: κ³ μμ΄κ° ν΄λΉ μ νμ μ¬μ©ν λ μ§μ¬κ° κΌ μμμΌ ν μ μμ¬νμ΄λ κΏν 1~2κ°μ§. | |
| 3. μ ν λ©νΈ: λ§μ§λ§μ λ°λμ "μ§λ°°μΈλμ μν΄ μ°λ¦¬ λ§€μ₯μ λ± λ§λ λ¬Όκ±΄μ΄ μλμ§ λ₯μ΄κ° μΌλ₯Έ μ°Ύμλ³΄κ² λ€μΉ! πΎ"λ‘ λλΌ κ². | |
| λ§ν¬λ 무쑰건 '~λ€λ₯', '~μΉ'μ μμ΄ κ·μ½κ³ μ λ¬Έμ μΌλ‘ λλ΅νμΈμ. | |
| """ | |
| messages = [ | |
| {"role": "system", "content": system_prompt}, | |
| {"role": "user", "content": f"[μ΄μ λν]\n{history_txt}\n\n[μ¬μ©μ μ§λ¬Έ]\n{query}"} | |
| ] | |
| try: | |
| response = client.chat.completions.create(model="gpt-4o-mini", messages=messages, temperature=0.7, max_tokens=250) | |
| return response.choices[0].message.content | |
| except: return "μκ² λ€λ₯! μ μλ§ κΈ°λ€λ €μ£Όλ©΄ μ°Ύμλ³΄κ² λ€λ₯!" | |
| def generate_suggestions(self, path): | |
| if not client: | |
| return [{"label": "μλ ?", "cached_answer": "λ°κ°λ€λ₯!"}] | |
| context = "μΌνλͺ° λ©μΈ λ‘λΉ" | |
| if "product" in path: context = "νΉμ μν μμΈ νμ΄μ§. ꡬ맀λ₯Ό κ³ λ―Ό μ€." | |
| elif "cart" in path: context = "μ₯λ°κ΅¬λ νμ΄μ§. κ²°μ μ§μ ." | |
| elif "category" in path: context = "μΉ΄ν κ³ λ¦¬ λͺ©λ‘ νμ΄μ§. μμ΄μΌν μ€." | |
| elif "login" in path: context = "λ‘κ·ΈμΈ/νμκ°μ νμ΄μ§." | |
| messages = [ | |
| {"role": "system", "content": self.strategy.get_suggestion_prompt(path, context)}, | |
| {"role": "user", "content": "μ§λ¬Έ-λ΅λ³ μΈνΈ 3κ° μμ±ν΄μ€."} | |
| ] | |
| try: | |
| response = client.chat.completions.create(model="gpt-4o-mini", messages=messages, temperature=0.7, max_tokens=300) | |
| content = response.choices[0].message.content | |
| if "```" in content: content = content.replace("```json", "").replace("```", "").strip() | |
| raw_data = json.loads(content) | |
| suggestions = [] | |
| for item in raw_data: | |
| suggestions.append({ | |
| "label": item['question'], | |
| "cached_answer": item['answer'], | |
| "link": None | |
| }) | |
| return suggestions[:3] | |
| except Exception as e: | |
| print(f"Suggestion Gen Error: {e}") | |
| return [] | |