f23f43gf / app.py
ssboost's picture
Upload 15 files
580b0de verified
# -*- coding: utf-8 -*-
"""
AI ์ƒํ’ˆ ์†Œ์‹ฑ ๋ถ„์„ ์‹œ์Šคํ…œ v2.9 - ์ถœ๋ ฅ ๊ธฐ๋Šฅ ์ถ”๊ฐ€ + ๋ฉ€ํ‹ฐ์‚ฌ์šฉ์ž ์•ˆ์ „
- ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ์—‘์…€ ์ถœ๋ ฅ
- ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ HTML ์ถœ๋ ฅ
- ์••์ถ•ํŒŒ์ผ๋กœ ๊ฒฐ๊ณผ ๋‹ค์šด๋กœ๋“œ
- Gemini API ํ‚ค ํ†ตํ•ฉ ๊ด€๋ฆฌ
- ํ•œ๊ตญ์‹œ๊ฐ„ ์ ์šฉ
- ๋ฉ€ํ‹ฐ ์‚ฌ์šฉ์ž ์•ˆ์ „: gr.State ์‚ฌ์šฉ์œผ๋กœ ์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ๊ด€๋ฆฌ
"""
import gradio as gr
import pandas as pd
import os
import logging
import google.generativeai as genai
from datetime import datetime, timedelta
import pytz # ํ•œ๊ตญ์‹œ๊ฐ„ ์ ์šฉ์„ ์œ„ํ•œ ์ถ”๊ฐ€
import time
import re
from collections import Counter
import zipfile
import tempfile
# ๋กœ๊น… ์„ค์ •
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# ๋ชจ๋“ˆ ์ž„ํฌํŠธ
import api_utils
import text_utils
import keyword_search
import product_search
import keyword_processor
import export_utils
import keyword_analysis
import trend_analysis_v2
# ===== Gemini API ์„ค์ • =====
def setup_gemini_model():
"""Gemini ๋ชจ๋ธ ์ดˆ๊ธฐํ™” - api_utils์—์„œ ๊ด€๋ฆฌ"""
try:
# api_utils์—์„œ Gemini ๋ชจ๋ธ ๊ฐ€์ ธ์˜ค๊ธฐ
model = api_utils.get_gemini_model()
if model:
logger.info("Gemini ๋ชจ๋ธ ์ดˆ๊ธฐํ™” ์„ฑ๊ณต (api_utils ํ†ตํ•ฉ ๊ด€๋ฆฌ)")
return model
else:
logger.warning("Gemini API ํ‚ค๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
return None
except Exception as e:
logger.error(f"Gemini ๋ชจ๋ธ ์ดˆ๊ธฐํ™” ์‹คํŒจ: {e}")
return None
# Gemini ๋ชจ๋ธ ์ดˆ๊ธฐํ™”
gemini_model = setup_gemini_model()
# ===== ํ•œ๊ตญ์‹œ๊ฐ„ ๊ด€๋ จ ํ•จ์ˆ˜ =====
def get_korean_time():
"""ํ•œ๊ตญ์‹œ๊ฐ„ ๋ฐ˜ํ™˜"""
korea_tz = pytz.timezone('Asia/Seoul')
return datetime.now(korea_tz)
def format_korean_datetime(dt=None, format_type="filename"):
"""ํ•œ๊ตญ์‹œ๊ฐ„ ํฌ๋งทํŒ…"""
if dt is None:
dt = get_korean_time()
if format_type == "filename":
return dt.strftime("%y%m%d_%H%M")
elif format_type == "display":
return dt.strftime('%Y๋…„ %m์›” %d์ผ %H์‹œ %M๋ถ„')
elif format_type == "full":
return dt.strftime('%Y-%m-%d %H:%M:%S')
else:
return dt.strftime("%y%m%d_%H%M")
# ===== ์ถœ๋ ฅ ์ „์šฉ ์ƒํƒœ ๋ณ€์ˆ˜ ์ œ๊ฑฐ (๋ฉ€ํ‹ฐ ์‚ฌ์šฉ์ž ์•ˆ์ „์„ ์œ„ํ•ด gr.State ์‚ฌ์šฉ) =====
# export_state ์ „์—ญ ๋ณ€์ˆ˜ ์ œ๊ฑฐ - ๋ฉ€ํ‹ฐ ์‚ฌ์šฉ์ž ํ™˜๊ฒฝ์—์„œ ๋ฐ์ดํ„ฐ ํ˜ผํ•ฉ ๋ฌธ์ œ ํ•ด๊ฒฐ
# ===== ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ๊ธฐ๋Šฅ =====
def analyze_related_keywords(keyword):
"""์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ - ๋„ค์ด๋ฒ„ ์ƒํ’ˆ 40๊ฐœ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ๋ณตํ•ฉํ‚ค์›Œ๋“œ ์ถ”์ถœ"""
logger.info(f"์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ์‹œ์ž‘: '{keyword}'")
try:
# 1๋‹จ๊ณ„: ๋„ค์ด๋ฒ„ ์ƒํ’ˆ 40๊ฐœ ์ถ”์ถœ
api_keyword = keyword.replace(" ", "")
products_data = []
# 40๊ฐœ ์ƒํ’ˆ์„ ๊ฐ€์ ธ์˜ค๊ธฐ ์œ„ํ•ด ์—ฌ๋Ÿฌ ํŽ˜์ด์ง€ ํ˜ธ์ถœ
for page in range(1, 5): # 4ํŽ˜์ด์ง€ * 10๊ฐœ = 40๊ฐœ
result = product_search.fetch_products_by_keyword(api_keyword, page=page, display=10)
if result["status"] == "success" and result["products"]:
products_data.extend(result["products"])
else:
break
time.sleep(0.3) # API ๋ ˆ์ดํŠธ ๋ฆฌ๋ฐ‹ ๋ฐฉ์ง€
if not products_data:
return {
"status": "error",
"message": f"'{keyword}' ํ‚ค์›Œ๋“œ๋กœ ์ƒํ’ˆ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.",
"keywords_df": pd.DataFrame()
}
# ์‹ค์ œ ๊ฐ€์ ธ์˜จ ์ƒํ’ˆ ์ˆ˜ ์ œํ•œ
products_data = products_data[:40]
logger.info(f"์ƒํ’ˆ ์ถ”์ถœ ์™„๋ฃŒ: {len(products_data)}๊ฐœ")
# 2๋‹จ๊ณ„: ์ƒํ’ˆ๋ช…์—์„œ ํ‚ค์›Œ๋“œ ์ถ”์ถœ (์ŠคํŽ˜์ด์Šค๋ฐ”๋กœ ๋ถ„๋ฅ˜)
all_words = []
for product in products_data:
title = product.get("์ƒํ’ˆ๋ช…", "")
# ๊ณต๋ฐฑ๊ณผ ์‰ผํ‘œ๋กœ ๋ถ„๋ฆฌ
words = re.split(r'[,\s]+', title)
all_words.extend([word.strip() for word in words if word.strip() and len(word.strip()) >= 1])
# ์ค‘๋ณต ์ œ๊ฑฐ
unique_words = list(set(all_words))
logger.info(f"์ถ”์ถœ๋œ ๋‹จ์–ด ์ˆ˜: {len(unique_words)}๊ฐœ")
# 3๋‹จ๊ณ„: ๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ๋ฅผ ์•ž๋’ค๋กœ ๋ถ™์—ฌ์„œ ๋ณตํ•ฉํ‚ค์›Œ๋“œ ์ƒ์„ฑ
compound_keywords = []
main_keyword = keyword.strip()
for word in unique_words:
if word != main_keyword and len(word) >= 2: # ๋‹จ์ผ ๊ธ€์ž ์ œ์™ธ
# ์•ž์— ๋ถ™์ด๊ธฐ
front_compound = f"{word} {main_keyword}"
compound_keywords.append(front_compound)
# ๋’ค์— ๋ถ™์ด๊ธฐ
back_compound = f"{main_keyword} {word}"
compound_keywords.append(back_compound)
# ์ค‘๋ณต ์ œ๊ฑฐ
compound_keywords = list(set(compound_keywords))
logger.info(f"์ƒ์„ฑ๋œ ๋ณตํ•ฉํ‚ค์›Œ๋“œ ์ˆ˜: {len(compound_keywords)}๊ฐœ")
# 4๋‹จ๊ณ„: ๊ฒ€์ƒ‰๋Ÿ‰ ์ถ”์ถœ
api_keywords = [kw.replace(" ", "") for kw in compound_keywords]
search_volumes = keyword_search.fetch_all_search_volumes(api_keywords)
# 5๋‹จ๊ณ„: ์•ž๋’ค ํ‚ค์›Œ๋“œ ์ค‘ ๋†’์€ ๊ฒƒ ์„ ํƒ, ๋‚ฎ์€ ๊ฒƒ ์ œ๊ฑฐ
keyword_pairs = {} # {base_word: {"front": front_kw, "back": back_kw, "front_vol": vol, "back_vol": vol}}
for word in unique_words:
if word != main_keyword and len(word) >= 2:
front_kw = f"{word} {main_keyword}"
back_kw = f"{main_keyword} {word}"
front_api = front_kw.replace(" ", "")
back_api = back_kw.replace(" ", "")
front_vol = search_volumes.get(front_api, {}).get("์ด๊ฒ€์ƒ‰๋Ÿ‰", 0)
back_vol = search_volumes.get(back_api, {}).get("์ด๊ฒ€์ƒ‰๋Ÿ‰", 0)
keyword_pairs[word] = {
"front": front_kw,
"back": back_kw,
"front_vol": front_vol,
"back_vol": back_vol
}
# 6๋‹จ๊ณ„: ๋†’์€ ๊ฒ€์ƒ‰๋Ÿ‰์˜ ํ‚ค์›Œ๋“œ๋งŒ ์„ ํƒ
final_keywords = []
for word, data in keyword_pairs.items():
if data["front_vol"] > data["back_vol"]:
selected_kw = data["front"]
selected_vol = data["front_vol"]
selected_api = selected_kw.replace(" ", "")
elif data["back_vol"] > data["front_vol"]:
selected_kw = data["back"]
selected_vol = data["back_vol"]
selected_api = selected_kw.replace(" ", "")
elif data["front_vol"] == data["back_vol"] and data["front_vol"] > 0:
# ๊ฐ™์€ ๊ฒ€์ƒ‰๋Ÿ‰์ด๋ฉด ์ž์—ฐ์Šค๋Ÿฌ์šด ์ˆœ์„œ ์„ ํƒ (์ผ๋ฐ˜์ ์œผ๋กœ ๋’ค์— ๋ถ™์ด๋Š” ๊ฒƒ์ด ์ž์—ฐ์Šค๋Ÿฌ์›€)
selected_kw = data["back"]
selected_vol = data["back_vol"]
selected_api = selected_kw.replace(" ", "")
else:
# ๋‘˜ ๋‹ค 0์ด๋ฉด ์ œ์™ธ
continue
if selected_vol > 0: # ๊ฒ€์ƒ‰๋Ÿ‰์ด ์žˆ๋Š” ๊ฒƒ๋งŒ ํฌํ•จ
vol_data = search_volumes.get(selected_api, {})
final_keywords.append({
"์—ฐ๊ด€ ํ‚ค์›Œ๋“œ": selected_kw,
"PC๊ฒ€์ƒ‰๋Ÿ‰": vol_data.get("PC๊ฒ€์ƒ‰๋Ÿ‰", 0),
"๋ชจ๋ฐ”์ผ๊ฒ€์ƒ‰๋Ÿ‰": vol_data.get("๋ชจ๋ฐ”์ผ๊ฒ€์ƒ‰๋Ÿ‰", 0),
"์ด๊ฒ€์ƒ‰๋Ÿ‰": selected_vol,
"๊ฒ€์ƒ‰๋Ÿ‰๊ตฌ๊ฐ„": text_utils.get_search_volume_range(selected_vol)
})
# ๊ฒ€์ƒ‰๋Ÿ‰ ๊ธฐ์ค€์œผ๋กœ ๋‚ด๋ฆผ์ฐจ์ˆœ ์ •๋ ฌ
final_keywords = sorted(final_keywords, key=lambda x: x["์ด๊ฒ€์ƒ‰๋Ÿ‰"], reverse=True)
# DataFrame ์ƒ์„ฑ
df_keywords = pd.DataFrame(final_keywords)
logger.info(f"์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ์™„๋ฃŒ: {len(final_keywords)}๊ฐœ ํ‚ค์›Œ๋“œ")
return {
"status": "success",
"message": f"'{keyword}' ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด {len(final_keywords)}๊ฐœ๋ฅผ ์ฐพ์•˜์Šต๋‹ˆ๋‹ค.",
"keywords_df": df_keywords,
"total_products": len(products_data)
}
except Exception as e:
logger.error(f"์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ์˜ค๋ฅ˜: {e}")
return {
"status": "error",
"message": f"์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}",
"keywords_df": pd.DataFrame()
}
# ===== ๋กœ๋”ฉ ์• ๋‹ˆ๋ฉ”์ด์…˜ =====
def create_loading_animation():
"""๋กœ๋”ฉ ์• ๋‹ˆ๋ฉ”์ด์…˜ HTML"""
return """
<div style="display: flex; flex-direction: column; align-items: center; padding: 40px; background: white; border-radius: 12px; box-shadow: 0 4px 12px rgba(0,0,0,0.1);">
<div style="width: 60px; height: 60px; border: 4px solid #f3f3f3; border-top: 4px solid #FB7F0D; border-radius: 50%; animation: spin 1s linear infinite; margin-bottom: 20px;"></div>
<h3 style="color: #FB7F0D; margin: 10px 0; font-size: 18px;">๋ถ„์„ ์ค‘์ž…๋‹ˆ๋‹ค...</h3>
<p style="color: #666; margin: 5px 0; text-align: center;">๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ๋ฅผ ์ˆ˜์ง‘ํ•˜๊ณ  AI๊ฐ€ ๋ถ„์„ํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค.<br>์ž ์‹œ๋งŒ ๊ธฐ๋‹ค๋ ค์ฃผ์„ธ์š”.</p>
<div style="width: 200px; height: 4px; background: #f0f0f0; border-radius: 2px; margin-top: 15px; overflow: hidden;">
<div style="width: 100%; height: 100%; background: linear-gradient(90deg, #FB7F0D, #ff9a8b); border-radius: 2px; animation: progress 2s ease-in-out infinite;"></div>
</div>
</div>
<style>
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
@keyframes progress {
0% { transform: translateX(-100%); }
100% { transform: translateX(100%); }
}
</style>
"""
# ===== ์—๋Ÿฌ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜ =====
def generate_error_response(error_message):
"""์—๋Ÿฌ ์‘๋‹ต ์ƒ์„ฑ"""
return f'''
<div style="color: red; padding: 30px; text-align: center; width: 100%;
background-color: #f8d7da; border-radius: 12px; border: 1px solid #f5c6cb;">
<h3 style="margin-bottom: 15px;">โŒ ๋ถ„์„ ์˜ค๋ฅ˜</h3>
<p style="margin-bottom: 20px;">{error_message}</p>
<div style="background: white; padding: 15px; border-radius: 8px; color: #333;">
<h4>ํ•ด๊ฒฐ ๋ฐฉ๋ฒ•:</h4>
<ul style="text-align: left; padding-left: 20px;">
<li>ํ‚ค์›Œ๋“œ ์ฒ ์ž๋ฅผ ํ™•์ธํ•ด์ฃผ์„ธ์š”</li>
<li>๋” ๊ฐ„๋‹จํ•œ ํ‚ค์›Œ๋“œ๋ฅผ ์‚ฌ์šฉํ•ด๋ณด์„ธ์š”</li>
<li>๋„คํŠธ์›Œํฌ ์—ฐ๊ฒฐ์„ ํ™•์ธํ•ด์ฃผ์„ธ์š”</li>
<li>์ž ์‹œ ํ›„ ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”</li>
</ul>
</div>
</div>
'''
# ===== ๋ฉ”์ธ ํ‚ค์›Œ๋“œ ๋ถ„์„ ํ•จ์ˆ˜ =====
def safe_keyword_analysis(analysis_keyword, base_keyword, keywords_data):
"""์—๋Ÿฌ ๋ฐฉ์ง€๋ฅผ ์œ„ํ•œ ์•ˆ์ „ํ•œ ํ‚ค์›Œ๋“œ ๋ถ„์„ - ์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ๋ฐ˜ํ™˜"""
# ์ž…๋ ฅ๊ฐ’ ๊ฒ€์ฆ
if not analysis_keyword or not analysis_keyword.strip():
return generate_error_response("๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."), {}
analysis_keyword = analysis_keyword.strip()
try:
# ๊ฒ€์ƒ‰๋Ÿ‰ ์กฐํšŒ - ์—๋Ÿฌ ๋ฐฉ์ง€
api_keyword = keyword_analysis.normalize_keyword_for_api(analysis_keyword)
search_volumes = keyword_search.fetch_all_search_volumes([api_keyword])
volume_data = search_volumes.get(api_keyword, {"PC๊ฒ€์ƒ‰๋Ÿ‰": 0, "๋ชจ๋ฐ”์ผ๊ฒ€์ƒ‰๋Ÿ‰": 0, "์ด๊ฒ€์ƒ‰๋Ÿ‰": 0})
# ๊ฒ€์ƒ‰๋Ÿ‰์ด 0์ด๊ฑฐ๋‚˜ ํ‚ค์›Œ๋“œ๊ฐ€ ์กด์žฌํ•˜์ง€ ์•Š๋Š” ๊ฒฝ์šฐ ์ฒ˜๋ฆฌ
if volume_data['์ด๊ฒ€์ƒ‰๋Ÿ‰'] == 0:
logger.warning(f"'{analysis_keyword}' ํ‚ค์›Œ๋“œ์˜ ๊ฒ€์ƒ‰๋Ÿ‰์ด 0์ด๊ฑฐ๋‚˜ ์กด์žฌํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.")
error_result = f"""
<div style="padding: 30px; text-align: center; background: #fff3cd; border-radius: 12px; border: 1px solid #ffeaa7;">
<h3 style="color: #856404; margin-bottom: 15px;">โš ๏ธ ํ‚ค์›Œ๋“œ ๋ถ„์„ ๋ถˆ๊ฐ€</h3>
<p style="color: #856404; margin-bottom: 10px;"><strong>'{analysis_keyword}'</strong> ํ‚ค์›Œ๋“œ๋Š” ๊ฒ€์ƒ‰๋Ÿ‰์ด ์—†๊ฑฐ๋‚˜ ์˜ฌ๋ฐ”๋ฅด์ง€ ์•Š์€ ํ‚ค์›Œ๋“œ์ž…๋‹ˆ๋‹ค.</p>
<div style="background: white; padding: 15px; border-radius: 8px; margin-top: 15px;">
<h4 style="color: #333; margin-bottom: 10px;">๐Ÿ’ก ๊ถŒ์žฅ์‚ฌํ•ญ</h4>
<ul style="text-align: left; color: #666; padding-left: 20px;">
<li>ํ‚ค์›Œ๋“œ ์ฒ ์ž๋ฅผ ํ™•์ธํ•ด์ฃผ์„ธ์š”</li>
<li>๋” ์ผ๋ฐ˜์ ์ธ ํ‚ค์›Œ๋“œ๋ฅผ ์‚ฌ์šฉํ•ด๋ณด์„ธ์š”</li>
<li>2๋‹จ๊ณ„์—์„œ ์ œ์•ˆํ•œ ํ‚ค์›Œ๋“œ ๋ชฉ๋ก์„ ์ฐธ๊ณ ํ•ด์ฃผ์„ธ์š”</li>
<li>ํ‚ค์›Œ๋“œ๋ฅผ ๋„์–ด์“ฐ๊ธฐ๋กœ ๊ตฌ๋ถ„ํ•ด๋ณด์„ธ์š” (์˜ˆ: '์—ฌ์„ฑ ์Šฌ๋ฆฌํผ')</li>
</ul>
</div>
</div>
"""
return error_result, {}
logger.info(f"'{analysis_keyword}' ํ˜„์žฌ ๊ฒ€์ƒ‰๋Ÿ‰: {volume_data['์ด๊ฒ€์ƒ‰๋Ÿ‰']:,}")
# ํŠธ๋ Œ๋“œ ๋ถ„์„ ์‹œ๋„
monthly_data_1year = {}
monthly_data_3year = {}
trend_available = False
try:
# ๋ฐ์ดํ„ฐ๋žฉ API ํ‚ค ํ™•์ธ
datalab_config = api_utils.get_next_datalab_api_config()
if datalab_config and not datalab_config["CLIENT_ID"].startswith("YOUR_"):
logger.info("๋ฐ์ดํ„ฐ๋žฉ API ํ‚ค๊ฐ€ ์„ค์ •๋˜์–ด ์žˆ์–ด 1๋…„, 3๋…„ ํŠธ๋ Œ๋“œ ๋ถ„์„์„ ์‹œ๋„ํ•ฉ๋‹ˆ๋‹ค.")
# ์ตœ์ ํ™”๋œ API ํ•จ์ˆ˜ ์‚ฌ์šฉ
# 1๋…„ ํŠธ๋ Œ๋“œ ๋ฐ์ดํ„ฐ
trend_data_1year = trend_analysis_v2.get_naver_trend_data_v5([analysis_keyword], "1year", max_retries=3)
if trend_data_1year:
current_volumes = {api_keyword: volume_data}
monthly_data_1year = trend_analysis_v2.calculate_monthly_volumes_v7([analysis_keyword], current_volumes, trend_data_1year, "1year")
# 3๋…„ ํŠธ๋ Œ๋“œ ๋ฐ์ดํ„ฐ
trend_data_3year = trend_analysis_v2.get_naver_trend_data_v5([analysis_keyword], "3year", max_retries=3)
if trend_data_3year:
current_volumes = {api_keyword: volume_data}
monthly_data_3year = trend_analysis_v2.calculate_monthly_volumes_v7([analysis_keyword], current_volumes, trend_data_3year, "3year")
# 3๋…„ ๋ฐ์ดํ„ฐ๊ฐ€ ์—†๋Š” ๊ฒฝ์šฐ 1๋…„ ๋ฐ์ดํ„ฐ๋กœ ํ™•์žฅ
if not monthly_data_3year and monthly_data_1year:
logger.info("3๋…„ ๋ฐ์ดํ„ฐ๊ฐ€ ์—†์–ด 1๋…„ ๋ฐ์ดํ„ฐ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ 3๋…„ ์ฐจํŠธ ์ƒ์„ฑ")
keyword = analysis_keyword
if keyword in monthly_data_1year:
data_1y = monthly_data_1year[keyword]
# 3๋…„ ๋ถ„๋Ÿ‰์˜ ๋‚ ์งœ ์ƒ์„ฑ (24๊ฐœ์›” ์ถ”๊ฐ€)
extended_dates = []
extended_volumes = []
# ๊ธฐ์กด 1๋…„ ๋ฐ์ดํ„ฐ ์ด์ „์— 24๊ฐœ์›” ์ถ”๊ฐ€ (๋ชจ๋‘ 0์œผ๋กœ)
start_date = datetime.strptime(data_1y["dates"][0], "%Y-%m-%d")
for i in range(24, 0, -1):
prev_date = start_date - timedelta(days=30 * i)
extended_dates.append(prev_date.strftime("%Y-%m-%d"))
extended_volumes.append(0)
# ๊ธฐ์กด 1๋…„ ๋ฐ์ดํ„ฐ ์ถ”๊ฐ€ (์˜ˆ์ƒ ๋ฐ์ดํ„ฐ ์ œ์™ธ)
actual_count = data_1y.get("actual_count", len(data_1y["dates"]))
extended_dates.extend(data_1y["dates"][:actual_count])
extended_volumes.extend(data_1y["monthly_volumes"][:actual_count])
monthly_data_3year = {
keyword: {
"monthly_volumes": extended_volumes,
"dates": extended_dates,
"current_volume": data_1y["current_volume"],
"growth_rate": trend_analysis_v2.calculate_3year_growth_rate_improved(extended_volumes),
"volume_per_percent": data_1y["volume_per_percent"],
"current_ratio": data_1y["current_ratio"],
"actual_count": len(extended_volumes),
"predicted_count": 0
}
}
if monthly_data_1year or monthly_data_3year:
trend_available = True
logger.info("ํŠธ๋ Œ๋“œ ๋ถ„์„ ์„ฑ๊ณต")
else:
logger.info("ํŠธ๋ Œ๋“œ ๋ฐ์ดํ„ฐ ์ฒ˜๋ฆฌ ์‹คํŒจ")
else:
logger.info("๋ฐ์ดํ„ฐ๋žฉ API ํ‚ค๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์Œ")
except Exception as e:
logger.info(f"ํŠธ๋ Œ๋“œ ๋ถ„์„ ๊ฑด๋„ˆ๋œ€: {str(e)[:100]}")
# ํ‚ค์›Œ๋“œ ๋ฐ์ดํ„ฐ ์ค€๋น„
step2_keywords_df = keywords_data.get("keywords_df") if keywords_data else None
filtered_keywords_df = step2_keywords_df # ๋‹จ์ˆœํžˆ ์›๋ณธ ๋ฐ์ดํ„ฐ ์‚ฌ์šฉ
target_categories = [] # ๋นˆ ๋ฆฌ์ŠคํŠธ
# === ๐Ÿ“ˆ ๊ฒ€์ƒ‰๋Ÿ‰ ํŠธ๋ Œ๋“œ ๋ถ„์„ ์„น์…˜ ===
if trend_available and (monthly_data_1year or monthly_data_3year):
try:
trend_chart = trend_analysis_v2.create_trend_chart_v7(monthly_data_1year, monthly_data_3year)
except Exception as e:
logger.warning(f"ํŠธ๋ Œ๋“œ ์ฐจํŠธ ์ƒ์„ฑ ์‹คํŒจ, ๊ธฐ๋ณธ ์ฐจํŠธ ์‚ฌ์šฉ: {e}")
trend_chart = trend_analysis_v2.create_enhanced_current_chart(volume_data, analysis_keyword)
else:
trend_chart = trend_analysis_v2.create_enhanced_current_chart(volume_data, analysis_keyword)
# ํŠธ๋ Œ๋“œ ์„น์…˜
trend_section = f"""
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;">
<div style="background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;">
<h3 style="margin: 0; font-size: 18px; color: white;">๐Ÿ“ˆ ๊ฒ€์ƒ‰๋Ÿ‰ ํŠธ๋ Œ๋“œ ๋ถ„์„</h3>
</div>
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);">
{trend_chart}
</div>
</div>
"""
# === ๐ŸŽฏ ํ‚ค์›Œ๋“œ ๋ถ„์„ ์„น์…˜ (AI ๋ถ„์„) ===
# api_utils์—์„œ Gemini ๋ชจ๋ธ ๊ฐ€์ ธ์˜ค๊ธฐ
current_gemini_model = api_utils.get_gemini_model()
keyword_analysis_html = keyword_analysis.analyze_keyword_for_sourcing(
analysis_keyword, volume_data, monthly_data_1year, monthly_data_3year,
filtered_keywords_df, target_categories, current_gemini_model
)
keyword_analysis_section = f"""
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;">
<div style="background: linear-gradient(135deg, #11998e 0%, #38ef7d 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;">
<h3 style="margin: 0; font-size: 18px; color: white;">๐ŸŽฏ ํ‚ค์›Œ๋“œ ๋ถ„์„</h3>
</div>
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); overflow: hidden;">
{keyword_analysis_html}
</div>
</div>
"""
# ๊ฒฝ๊ณ  ์„น์…˜ (ํ•„์š”ํ•œ ๊ฒฝ์šฐ)
warning_section = ""
if not trend_available:
warning_section = f"""
<div style="width: 100%; margin: 20px auto; padding: 15px; background: #fff3cd; border: 1px solid #ffeaa7; border-radius: 8px; font-family: 'Pretendard', sans-serif;">
<div style="display: flex; align-items: center;">
<span style="font-size: 20px; margin-right: 10px;">โš ๏ธ</span>
<div>
<strong style="color: #856404;">์ผ๋ถ€ ๊ธฐ๋Šฅ ์ œํ•œ</strong>
<div style="font-size: 14px; color: #856404; margin-top: 5px;">
ํŠธ๋ Œ๋“œ ๋ถ„์„์— ์ œํ•œ์ด ์žˆ์Šต๋‹ˆ๋‹ค. ํ˜„์žฌ ๊ฒ€์ƒ‰๋Ÿ‰ ๋ถ„์„๊ณผ AI ์ถ”์ฒœ์€ ์ •์ƒ ์ œ๊ณต๋ฉ๋‹ˆ๋‹ค.<br>
<small>์™„์ „ํ•œ ์›” ๋ฐ์ดํ„ฐ ๊ธฐ์ค€์œผ๋กœ ๋ถ„์„ํ•˜๊ธฐ ์œ„ํ•ด ์ตœ์‹  ์™„๋ฃŒ๋œ ์›”๊นŒ์ง€๋งŒ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.</small>
</div>
</div>
</div>
</div>
"""
# ์ตœ์ข… ๊ฒฐ๊ณผ ์กฐํ•ฉ
final_result = warning_section + trend_section + keyword_analysis_section
# ์„ธ์…˜๋ณ„ ์ถœ๋ ฅ์šฉ ์ƒํƒœ ๋ฐ์ดํ„ฐ ๋ฐ˜ํ™˜ (๋ฉ€ํ‹ฐ ์‚ฌ์šฉ์ž ์•ˆ์ „)
session_export_data = {
"main_keyword": base_keyword,
"analysis_keyword": analysis_keyword,
"main_keywords_df": keywords_data.get("keywords_df") if keywords_data else None,
"related_keywords_df": None, # ์—ฌ๊ธฐ์„œ๋Š” ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ํ•˜์ง€ ์•Š์Œ
"analysis_html": final_result
}
return final_result, session_export_data
except Exception as e:
logger.error(f"ํ‚ค์›Œ๋“œ ๋ถ„์„ ์ค‘ ์ „์ฒด ์˜ค๋ฅ˜: {e}")
error_result = generate_error_response(f"ํ‚ค์›Œ๋“œ ๋ถ„์„ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}")
return error_result, {}
# ===== 2๋‹จ๊ณ„: ์ƒํ’ˆ ๋ฐ์ดํ„ฐ ๊ธฐ๋ฐ˜ ํ‚ค์›Œ๋“œ ์ถ”์ถœ =====
def extract_keywords_from_products(keyword):
"""๋„ค์ด๋ฒ„ ์‡ผํ•‘์—์„œ ์‹ค์ œ ์ƒํ’ˆ ๋ฐ์ดํ„ฐ๋ฅผ ์ˆ˜์ง‘ํ•˜๊ณ  ๋ชจ๋“  ํ‚ค์›Œ๋“œ ํ‘œ์‹œ"""
logger.info(f"์ƒํ’ˆ ํ‚ค์›Œ๋“œ ์ถ”์ถœ ์‹œ์ž‘: ํ‚ค์›Œ๋“œ='{keyword}'")
api_keyword = keyword_analysis.normalize_keyword_for_api(keyword)
search_results = product_search.fetch_naver_shopping_data(
keyword, korean_only=True, apply_main_keyword=True, exclude_zero_volume=True
)
if not search_results.get("product_list"):
return {
"status": "error",
"message": "์ƒํ’ˆ ๋ฐ์ดํ„ฐ๋ฅผ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.",
"products": [],
"keywords": []
}
processed_results = keyword_processor.process_search_results(
search_results, keyword, exclude_zero_volume=True
)
df_keywords = processed_results["keywords_df"]
df_products = processed_results["products_df"]
if df_keywords.empty:
return {
"status": "error",
"message": "์ถ”์ถœ๋œ ํ‚ค์›Œ๋“œ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.",
"products": [],
"keywords": []
}
logger.info(f"ํ‚ค์›Œ๋“œ ์ถ”์ถœ ์™„๋ฃŒ: ์ด {len(df_keywords)}๊ฐœ ํ‚ค์›Œ๋“œ")
return {
"status": "success",
"message": "ํ‚ค์›Œ๋“œ ์ถ”์ถœ ์™„๋ฃŒ",
"products": df_products,
"keywords_df": df_keywords,
"categories": processed_results["categories"]
}
# ===== ํŒŒ์ผ ์ถœ๋ ฅ ํ•จ์ˆ˜๋“ค =====
def create_timestamp_filename(analysis_keyword):
"""ํƒ€์ž„์Šคํƒฌํ”„๊ฐ€ ํฌํ•จ๋œ ํŒŒ์ผ๋ช… ์ƒ์„ฑ - ํ•œ๊ตญ์‹œ๊ฐ„ ์ ์šฉ"""
timestamp = format_korean_datetime(format_type="filename")
safe_keyword = re.sub(r'[^\w\s-]', '', analysis_keyword).strip()
safe_keyword = re.sub(r'[-\s]+', '_', safe_keyword)
return f"{safe_keyword}_{timestamp}_๋ถ„์„๊ฒฐ๊ณผ"
def export_to_excel(main_keyword, main_keywords_df, analysis_keyword, related_keywords_df, filename_base):
"""์—‘์…€ ํŒŒ์ผ๋กœ ์ถœ๋ ฅ"""
try:
excel_filename = f"{filename_base}.xlsx"
excel_path = os.path.join(tempfile.gettempdir(), excel_filename)
with pd.ExcelWriter(excel_path, engine='xlsxwriter') as writer:
# ์›Œํฌ๋ถ๊ณผ ์›Œํฌ์‹œํŠธ ์Šคํƒ€์ผ ์„ค์ •
workbook = writer.book
# ํ—ค๋” ์Šคํƒ€์ผ
header_format = workbook.add_format({
'bold': True,
'text_wrap': True,
'valign': 'top',
'fg_color': '#D7E4BC',
'border': 1
})
# ๋ฐ์ดํ„ฐ ์Šคํƒ€์ผ
data_format = workbook.add_format({
'text_wrap': True,
'valign': 'top',
'border': 1
})
# ์ˆซ์ž ํฌ๋งท
number_format = workbook.add_format({
'num_format': '#,##0',
'text_wrap': True,
'valign': 'top',
'border': 1
})
# ์ฒซ ๋ฒˆ์งธ ์‹œํŠธ: ๋ฉ”์ธํ‚ค์›Œ๋“œ ์กฐํ•ฉํ‚ค์›Œ๋“œ
if main_keywords_df is not None and not main_keywords_df.empty:
main_keywords_df.to_excel(writer, sheet_name=f'{main_keyword}_์กฐํ•ฉํ‚ค์›Œ๋“œ', index=False)
worksheet1 = writer.sheets[f'{main_keyword}_์กฐํ•ฉํ‚ค์›Œ๋“œ']
# ํ—ค๋” ์Šคํƒ€์ผ ์ ์šฉ
for col_num, value in enumerate(main_keywords_df.columns.values):
worksheet1.write(0, col_num, value, header_format)
# ๋ฐ์ดํ„ฐ ์Šคํƒ€์ผ ์ ์šฉ
for row_num in range(1, len(main_keywords_df) + 1):
for col_num, value in enumerate(main_keywords_df.iloc[row_num-1]):
if col_num in [1, 2, 3]: # PC๊ฒ€์ƒ‰๋Ÿ‰, ๋ชจ๋ฐ”์ผ๊ฒ€์ƒ‰๋Ÿ‰, ์ด๊ฒ€์ƒ‰๋Ÿ‰ ์ปฌ๋Ÿผ
worksheet1.write(row_num, col_num, value, number_format)
else:
worksheet1.write(row_num, col_num, value, data_format)
# ์—ด ๋„ˆ๋น„ ์ž๋™ ์กฐ์ •
for i, col in enumerate(main_keywords_df.columns):
max_len = max(
main_keywords_df[col].astype(str).map(len).max(),
len(str(col))
)
worksheet1.set_column(i, i, min(max_len + 2, 50))
# ๋‘ ๋ฒˆ์งธ ์‹œํŠธ: ๋ถ„์„ํ‚ค์›Œ๋“œ ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด
if related_keywords_df is not None and not related_keywords_df.empty:
related_keywords_df.to_excel(writer, sheet_name=f'{analysis_keyword}_์—ฐ๊ด€๊ฒ€์ƒ‰์–ด', index=False)
worksheet2 = writer.sheets[f'{analysis_keyword}_์—ฐ๊ด€๊ฒ€์ƒ‰์–ด']
# ํ—ค๋” ์Šคํƒ€์ผ ์ ์šฉ
for col_num, value in enumerate(related_keywords_df.columns.values):
worksheet2.write(0, col_num, value, header_format)
# ๋ฐ์ดํ„ฐ ์Šคํƒ€์ผ ์ ์šฉ
for row_num in range(1, len(related_keywords_df) + 1):
for col_num, value in enumerate(related_keywords_df.iloc[row_num-1]):
if col_num in [1, 2, 3]: # PC๊ฒ€์ƒ‰๋Ÿ‰, ๋ชจ๋ฐ”์ผ๊ฒ€์ƒ‰๋Ÿ‰, ์ด๊ฒ€์ƒ‰๋Ÿ‰ ์ปฌ๋Ÿผ
worksheet2.write(row_num, col_num, value, number_format)
else:
worksheet2.write(row_num, col_num, value, data_format)
# ์—ด ๋„ˆ๋น„ ์ž๋™ ์กฐ์ •
for i, col in enumerate(related_keywords_df.columns):
max_len = max(
related_keywords_df[col].astype(str).map(len).max(),
len(str(col))
)
worksheet2.set_column(i, i, min(max_len + 2, 50))
logger.info(f"์—‘์…€ ํŒŒ์ผ ์ƒ์„ฑ ์™„๋ฃŒ: {excel_path}")
return excel_path
except Exception as e:
logger.error(f"์—‘์…€ ํŒŒ์ผ ์ƒ์„ฑ ์˜ค๋ฅ˜: {e}")
return None
def export_to_html(analysis_html, filename_base):
"""HTML ํŒŒ์ผ๋กœ ์ถœ๋ ฅ - ํ•œ๊ตญ์‹œ๊ฐ„ ์ ์šฉ"""
try:
html_filename = f"{filename_base}.html"
html_path = os.path.join(tempfile.gettempdir(), html_filename)
# ํ•œ๊ตญ์‹œ๊ฐ„์œผ๋กœ ์ƒ์„ฑ ์‹œ๊ฐ„ ํ‘œ์‹œ
korean_time = format_korean_datetime(format_type="display")
# ์™„์ „ํ•œ HTML ๋ฌธ์„œ ์ƒ์„ฑ
full_html = f"""
<!DOCTYPE html>
<html lang="ko">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ ๊ฒฐ๊ณผ</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css">
<style>
body {{
font-family: 'Pretendard', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
margin: 0;
padding: 20px;
background-color: #f5f5f5;
line-height: 1.6;
}}
.container {{
max-width: 1200px;
margin: 0 auto;
background: white;
border-radius: 12px;
box-shadow: 0 4px 12px rgba(0,0,0,0.1);
overflow: hidden;
}}
.header {{
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
color: white;
padding: 30px;
text-align: center;
}}
.header h1 {{
margin: 0;
font-size: 28px;
font-weight: 700;
}}
.header p {{
margin: 10px 0 0 0;
font-size: 16px;
opacity: 0.9;
}}
.content {{
padding: 30px;
}}
.timestamp {{
text-align: center;
padding: 20px;
background: #f8f9fa;
color: #6c757d;
font-size: 14px;
border-top: 1px solid #dee2e6;
}}
/* ์ฐจํŠธ ์Šคํƒ€์ผ ๊ฐœ์„  */
.chart-container {{
margin: 20px 0;
padding: 20px;
background: white;
border-radius: 8px;
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
}}
/* ๋ฐ˜์‘ํ˜• ์Šคํƒ€์ผ */
@media (max-width: 768px) {{
.container {{
margin: 10px;
border-radius: 8px;
}}
.header {{
padding: 20px;
}}
.header h1 {{
font-size: 24px;
}}
.content {{
padding: 20px;
}}
}}
/* ์• ๋‹ˆ๋ฉ”์ด์…˜ */
@keyframes spin {{
0% {{ transform: rotate(0deg); }}
100% {{ transform: rotate(360deg); }}
}}
@keyframes progress {{
0% {{ transform: translateX(-100%); }}
100% {{ transform: translateX(100%); }}
}}
/* ํ”„๋ฆฐํŠธ ์Šคํƒ€์ผ */
@media print {{
body {{
background: white;
padding: 0;
}}
.container {{
box-shadow: none;
border-radius: 0;
}}
.header {{
background: #667eea !important;
-webkit-print-color-adjust: exact;
}}
}}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1><i class="fas fa-chart-line"></i> ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ ๊ฒฐ๊ณผ</h1>
<p>AI ์ƒํ’ˆ ์†Œ์‹ฑ ๋ถ„์„ ์‹œ์Šคํ…œ v2.9</p>
</div>
<div class="content">
{analysis_html}
</div>
<div class="timestamp">
<i class="fas fa-clock"></i> ์ƒ์„ฑ ์‹œ๊ฐ„: {korean_time} (ํ•œ๊ตญ์‹œ๊ฐ„)
</div>
</div>
</body>
</html>
"""
with open(html_path, 'w', encoding='utf-8') as f:
f.write(full_html)
logger.info(f"HTML ํŒŒ์ผ ์ƒ์„ฑ ์™„๋ฃŒ: {html_path}")
return html_path
except Exception as e:
logger.error(f"HTML ํŒŒ์ผ ์ƒ์„ฑ ์˜ค๋ฅ˜: {e}")
return None
def create_zip_file(excel_path, html_path, filename_base):
"""์••์ถ• ํŒŒ์ผ ์ƒ์„ฑ"""
try:
zip_filename = f"{filename_base}.zip"
zip_path = os.path.join(tempfile.gettempdir(), zip_filename)
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
if excel_path and os.path.exists(excel_path):
zipf.write(excel_path, f"{filename_base}.xlsx")
logger.info(f"์—‘์…€ ํŒŒ์ผ ์••์ถ• ์ถ”๊ฐ€: {filename_base}.xlsx")
if html_path and os.path.exists(html_path):
zipf.write(html_path, f"{filename_base}.html")
logger.info(f"HTML ํŒŒ์ผ ์••์ถ• ์ถ”๊ฐ€: {filename_base}.html")
logger.info(f"์••์ถ• ํŒŒ์ผ ์ƒ์„ฑ ์™„๋ฃŒ: {zip_path}")
return zip_path
except Exception as e:
logger.error(f"์••์ถ• ํŒŒ์ผ ์ƒ์„ฑ ์˜ค๋ฅ˜: {e}")
return None
def export_analysis_results(export_data):
"""๋ถ„์„ ๊ฒฐ๊ณผ ์ถœ๋ ฅ ๋ฉ”์ธ ํ•จ์ˆ˜ - ์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ์ฒ˜๋ฆฌ"""
try:
# ์ถœ๋ ฅํ•  ๋ฐ์ดํ„ฐ ํ™•์ธ
if not export_data or not isinstance(export_data, dict):
return None, "๋ถ„์„ ๋ฐ์ดํ„ฐ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค. ๋จผ์ € ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„์„ ์‹คํ–‰ํ•ด์ฃผ์„ธ์š”."
analysis_keyword = export_data.get("analysis_keyword", "")
analysis_html = export_data.get("analysis_html", "")
main_keyword = export_data.get("main_keyword", "")
main_keywords_df = export_data.get("main_keywords_df")
related_keywords_df = export_data.get("related_keywords_df")
if not analysis_keyword:
return None, "๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. ๋จผ์ € ํ‚ค์›Œ๋“œ ๋ถ„์„์„ ์‹คํ–‰ํ•ด์ฃผ์„ธ์š”."
if not analysis_html:
return None, "๋ถ„์„ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค. ๋จผ์ € ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„์„ ์‹คํ–‰ํ•ด์ฃผ์„ธ์š”."
# ํŒŒ์ผ๋ช… ์ƒ์„ฑ (ํ•œ๊ตญ์‹œ๊ฐ„ ์ ์šฉ)
filename_base = create_timestamp_filename(analysis_keyword)
logger.info(f"์ถœ๋ ฅ ํŒŒ์ผ๋ช…: {filename_base}")
# ์—‘์…€ ํŒŒ์ผ ์ƒ์„ฑ
excel_path = None
if main_keywords_df is not None or related_keywords_df is not None:
excel_path = export_to_excel(
main_keyword,
main_keywords_df,
analysis_keyword,
related_keywords_df,
filename_base
)
# HTML ํŒŒ์ผ ์ƒ์„ฑ
html_path = export_to_html(analysis_html, filename_base)
# ์••์ถ• ํŒŒ์ผ ์ƒ์„ฑ
if excel_path or html_path:
zip_path = create_zip_file(excel_path, html_path, filename_base)
if zip_path:
return zip_path, f"โœ… ๋ถ„์„ ๊ฒฐ๊ณผ๊ฐ€ ์„ฑ๊ณต์ ์œผ๋กœ ์ถœ๋ ฅ๋˜์—ˆ์Šต๋‹ˆ๋‹ค!\nํŒŒ์ผ๋ช…: {filename_base}.zip"
else:
return None, "์••์ถ• ํŒŒ์ผ ์ƒ์„ฑ์— ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค."
else:
return None, "์ถœ๋ ฅํ•  ํŒŒ์ผ์ด ์—†์Šต๋‹ˆ๋‹ค."
except Exception as e:
logger.error(f"๋ถ„์„ ๊ฒฐ๊ณผ ์ถœ๋ ฅ ์˜ค๋ฅ˜: {e}")
return None, f"์ถœ๋ ฅ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
# ===== ๊ทธ๋ผ๋””์˜ค ์ธํ„ฐํŽ˜์ด์Šค =====
def create_interface():
# CSS ํŒŒ์ผ ๋กœ๋“œ
try:
with open('style.css', 'r', encoding='utf-8') as f:
custom_css = f.read()
with open('keyword_analysis_report.css', 'r', encoding='utf-8') as f:
keyword_css = f.read()
custom_css += "\n" + keyword_css
except:
custom_css = """
:root { --primary-color: #FB7F0D; --secondary-color: #ff9a8b; }
.custom-button {
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)) !important;
color: white !important; border-radius: 30px !important; height: 45px !important;
font-size: 16px !important; font-weight: bold !important; width: 100% !important;
}
.export-button {
background: linear-gradient(135deg, #28a745, #20c997) !important;
color: white !important; border-radius: 25px !important; height: 50px !important;
font-size: 17px !important; font-weight: bold !important; width: 100% !important;
margin-top: 20px !important;
}
"""
with gr.Blocks(
css=custom_css,
title="๐Ÿ›’ AI ์ƒํ’ˆ ์†Œ์‹ฑ ๋ถ„์„๊ธฐ v2.9",
theme=gr.themes.Default(primary_hue="orange", secondary_hue="orange")
) as interface:
# ํฐํŠธ ๋ฐ ์•„์ด์ฝ˜ ๋กœ๋“œ
gr.HTML("""
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css">
""")
# ์„ธ์…˜๋ณ„ ์ƒํƒœ ๋ณ€์ˆ˜ (๋ฉ€ํ‹ฐ ์‚ฌ์šฉ์ž ์•ˆ์ „)
keywords_data_state = gr.State()
export_data_state = gr.State({})
# === 1๋‹จ๊ณ„: ๋ฉ”์ธ ํ‚ค์›Œ๋“œ ์ž…๋ ฅ ===
with gr.Column(elem_classes="custom-frame fade-in"):
gr.HTML('<div class="section-title"><i class="fas fa-search"></i> 1๋‹จ๊ณ„: ๋ฉ”์ธ ํ‚ค์›Œ๋“œ ์ž…๋ ฅ</div>')
keyword_input = gr.Textbox(
label="์ƒํ’ˆ ๋ฉ”์ธํ‚ค์›Œ๋“œ",
placeholder="์˜ˆ: ์Šฌ๋ฆฌํผ, ๋ฌด์„ ์ด์–ดํฐ, ํ•ธ๋“œํฌ๋ฆผ",
value="",
elem_id="keyword_input"
)
collect_data_btn = gr.Button("1๋‹จ๊ณ„: ์ƒํ’ˆ ๋ฐ์ดํ„ฐ ์ˆ˜์ง‘ํ•˜๊ธฐ", elem_classes="custom-button", size="lg")
# === 2๋‹จ๊ณ„: ์ˆ˜์ง‘๋œ ํ‚ค์›Œ๋“œ ๋ชฉ๋ก ===
with gr.Column(elem_classes="custom-frame fade-in"):
gr.HTML('<div class="section-title"><i class="fas fa-database"></i> 2๋‹จ๊ณ„: ์ˆ˜์ง‘๋œ ํ‚ค์›Œ๋“œ ๋ชฉ๋ก</div>')
keywords_result = gr.HTML()
# === 3๋‹จ๊ณ„: ๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ ์„ ํƒ ===
with gr.Column(elem_classes="custom-frame fade-in"):
gr.HTML('<div class="section-title"><i class="fas fa-bullseye"></i> 3๋‹จ๊ณ„: ๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ ์„ ํƒ</div>')
analysis_keyword_input = gr.Textbox(
label="๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ",
placeholder="์œ„ ๋ชฉ๋ก์—์„œ ์›ํ•˜๋Š” ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š” (์˜ˆ: ํ†ต๊ตฝ ์Šฌ๋ฆฌํผ)",
value="",
elem_id="analysis_keyword_input"
)
analyze_keyword_btn = gr.Button("ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ ํ•˜๊ธฐ", elem_classes="custom-button", size="lg")
# === ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ ===
with gr.Column(elem_classes="custom-frame fade-in"):
gr.HTML('<div class="section-title"><i class="fas fa-chart-line"></i> ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„</div>')
analysis_result = gr.HTML(label="ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„")
# === ๊ฒฐ๊ณผ ์ถœ๋ ฅ ์„น์…˜ ===
with gr.Column(elem_classes="custom-frame fade-in"):
gr.HTML('<div class="section-title"><i class="fas fa-download"></i> ๋ถ„์„ ๊ฒฐ๊ณผ ์ถœ๋ ฅ</div>')
export_btn = gr.Button("๐Ÿ“Š ๋ถ„์„๊ฒฐ๊ณผ ์ถœ๋ ฅํ•˜๊ธฐ", elem_classes="export-button", size="lg")
export_result = gr.HTML()
download_file = gr.File(label="๋‹ค์šด๋กœ๋“œ", visible=False)
# ===== ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ =====
def on_collect_data(keyword):
if not keyword.strip():
return ("<div style='color: red; padding: 20px; text-align: center; width: 100%;'>ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.</div>", None)
# ๋กœ๋”ฉ ์ƒํƒœ ํ‘œ์‹œ
yield (create_loading_animation(), None)
result = extract_keywords_from_products(keyword)
if result["status"] == "error":
yield (f"<div style='color: red; padding: 20px; text-align: center; width: 100%;'>{result['message']}</div>", None)
return
keywords_df = result["keywords_df"]
html_table = export_utils.create_table_without_checkboxes(keywords_df)
success_html = f"""
<div style="width: 100%; background: #d4edda; border: 1px solid #c3e6cb; padding: 15px; border-radius: 5px; margin-bottom: 20px;">
<h4 style="color: #155724; margin: 0 0 10px 0;">โœ… ๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ ์ˆ˜์ง‘ ์™„๋ฃŒ!</h4>
<p style="margin: 0; color: #155724;">
โ€ข ์‹ค์ œ ์ƒํ’ˆ {len(result['products'])}๊ฐœ ๋ถ„์„<br>
โ€ข ์ถ”์ถœ๋œ ํ‚ค์›Œ๋“œ: <strong>{len(keywords_df)}๊ฐœ</strong><br>
โ€ข ์•„๋ž˜ ๋ชฉ๋ก์—์„œ ์›ํ•˜๋Š” ํ‚ค์›Œ๋“œ๋ฅผ ์„ ํƒํ•˜์—ฌ ๋ถ„์„ํ•˜์„ธ์š”
</p>
</div>
<h5 style="margin: 20px 0 10px 0; color: #495057;">๐Ÿ“Š ์ „์ฒด ํ‚ค์›Œ๋“œ ๋ชฉ๋ก</h5>
{html_table}
"""
yield (success_html, result)
def on_analyze_keyword(analysis_keyword, base_keyword, keywords_data):
if not analysis_keyword.strip():
return "<div style='color: red; padding: 20px; text-align: center; width: 100%;'>๋ถ„์„ํ•  ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.</div>", {}
# ๋กœ๋”ฉ ์ƒํƒœ ํ‘œ์‹œ
yield create_loading_animation(), {}
# ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ๋จผ์ € ์‹คํ–‰
related_result = analyze_related_keywords(analysis_keyword)
# ์‹ค์ œ ํ‚ค์›Œ๋“œ ๋ถ„์„ ์‹คํ–‰ (์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ๋ฐ˜ํ™˜)
keyword_result, session_export_data = safe_keyword_analysis(analysis_keyword, base_keyword, keywords_data)
# ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ์„ธ์…˜ ๋ฐ์ดํ„ฐ์— ์ถ”๊ฐ€
if related_result["status"] == "success" and not related_result["keywords_df"].empty:
session_export_data["related_keywords_df"] = related_result["keywords_df"]
# ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ๊ฒฐ๊ณผ HTML ์ƒ์„ฑ
if related_result["status"] == "success" and not related_result["keywords_df"].empty:
df_keywords = related_result["keywords_df"]
related_table = export_utils.create_table_without_checkboxes(df_keywords)
related_html = f"""
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;">
<div style="background: linear-gradient(135deg, #17a2b8 0%, #20c997 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;">
<h3 style="margin: 0; font-size: 18px; color: white;">๐Ÿ”— ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„</h3>
</div>
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);">
<div style="background: #e8f5e8; border: 1px solid #c3e6cb; padding: 15px; border-radius: 5px; margin-bottom: 20px;">
<h4 style="color: #155724; margin: 0 0 10px 0;">๐Ÿ”— ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„ ์™„๋ฃŒ!</h4>
<p style="margin: 0; color: #155724;">
โ€ข ๋ถ„์„ ๊ธฐ์ค€ ์ƒํ’ˆ: <strong>{related_result['total_products']}๊ฐœ</strong><br>
โ€ข ๋ฐœ๊ฒฌ๋œ ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด: <strong>{len(df_keywords)}๊ฐœ</strong><br>
โ€ข ๋ฉ”์ธ ํ‚ค์›Œ๋“œ์™€ ๊ฒฐํ•ฉ๋œ ๋ณตํ•ฉํ‚ค์›Œ๋“œ๋งŒ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค
</p>
</div>
{related_table}
</div>
</div>
"""
# ์„ธ์…˜ ๋ฐ์ดํ„ฐ์˜ analysis_html์„ ์—…๋ฐ์ดํŠธ
session_export_data["analysis_html"] = related_html + session_export_data["analysis_html"]
else:
related_html = f"""
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;">
<div style="background: linear-gradient(135deg, #17a2b8 0%, #20c997 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;">
<h3 style="margin: 0; font-size: 18px; color: white;">๐Ÿ”— ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ถ„์„</h3>
</div>
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);">
<div style="color: orange; padding: 20px; text-align: center; background: #fff3cd; border-radius: 8px;">
'{analysis_keyword}' ํ‚ค์›Œ๋“œ์˜ ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.
</div>
</div>
</div>
"""
# ์„ธ์…˜ ๋ฐ์ดํ„ฐ์˜ analysis_html์„ ์—…๋ฐ์ดํŠธ
session_export_data["analysis_html"] = related_html + session_export_data["analysis_html"]
# ์ตœ์ข… ๊ฒฐ๊ณผ ์กฐํ•ฉ
final_result = related_html + keyword_result
yield final_result, session_export_data
def on_export_results(export_data):
"""๋ถ„์„ ๊ฒฐ๊ณผ ์ถœ๋ ฅ ํ•ธ๋“ค๋Ÿฌ - ์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ์ฒ˜๋ฆฌ"""
try:
zip_path, message = export_analysis_results(export_data)
if zip_path:
# ์„ฑ๊ณต ๋ฉ”์‹œ์ง€์™€ ํ•จ๊ป˜ ๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ ์ œ๊ณต
success_html = f"""
<div style="background: #d4edda; border: 1px solid #c3e6cb; padding: 20px; border-radius: 8px; margin: 10px 0;">
<h4 style="color: #155724; margin: 0 0 15px 0;"><i class="fas fa-check-circle"></i> ์ถœ๋ ฅ ์™„๋ฃŒ!</h4>
<p style="color: #155724; margin: 0; line-height: 1.6;">
{message}<br>
<strong>ํฌํ•จ ํŒŒ์ผ:</strong><br>
โ€ข ๐Ÿ“Š ์—‘์…€ ํŒŒ์ผ: ๋ฉ”์ธํ‚ค์›Œ๋“œ ์กฐํ•ฉํ‚ค์›Œ๋“œ + ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ฐ์ดํ„ฐ<br>
โ€ข ๐ŸŒ HTML ํŒŒ์ผ: ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ ๊ฒฐ๊ณผ (๊ทธ๋ž˜ํ”„ ํฌํ•จ)<br>
<br>
<i class="fas fa-download"></i> ์•„๋ž˜ ๋‹ค์šด๋กœ๋“œ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์—ฌ ํŒŒ์ผ์„ ์ €์žฅํ•˜์„ธ์š”.<br>
<small style="color: #666;">โฐ ํ•œ๊ตญ์‹œ๊ฐ„ ๊ธฐ์ค€์œผ๋กœ ํŒŒ์ผ๋ช…์ด ์ƒ์„ฑ๋ฉ๋‹ˆ๋‹ค.</small>
</p>
</div>
"""
return success_html, gr.update(value=zip_path, visible=True)
else:
# ์‹คํŒจ ๋ฉ”์‹œ์ง€
error_html = f"""
<div style="background: #f8d7da; border: 1px solid #f5c6cb; padding: 20px; border-radius: 8px; margin: 10px 0;">
<h4 style="color: #721c24; margin: 0 0 10px 0;"><i class="fas fa-exclamation-triangle"></i> ์ถœ๋ ฅ ์‹คํŒจ</h4>
<p style="color: #721c24; margin: 0;">{message}</p>
</div>
"""
return error_html, gr.update(visible=False)
except Exception as e:
logger.error(f"์ถœ๋ ฅ ํ•ธ๋“ค๋Ÿฌ ์˜ค๋ฅ˜: {e}")
error_html = f"""
<div style="background: #f8d7da; border: 1px solid #f5c6cb; padding: 20px; border-radius: 8px; margin: 10px 0;">
<h4 style="color: #721c24; margin: 0 0 10px 0;"><i class="fas fa-exclamation-triangle"></i> ์‹œ์Šคํ…œ ์˜ค๋ฅ˜</h4>
<p style="color: #721c24; margin: 0;">์ถœ๋ ฅ ์ค‘ ์‹œ์Šคํ…œ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}</p>
</div>
"""
return error_html, gr.update(visible=False)
# ===== ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ =====
collect_data_btn.click(
fn=on_collect_data,
inputs=[keyword_input],
outputs=[keywords_result, keywords_data_state]
)
analyze_keyword_btn.click(
fn=on_analyze_keyword,
inputs=[analysis_keyword_input, keyword_input, keywords_data_state],
outputs=[analysis_result, export_data_state]
)
export_btn.click(
fn=on_export_results,
inputs=[export_data_state],
outputs=[export_result, download_file]
)
return interface
# ===== API ์„ค์ • ํ™•์ธ ํ•จ์ˆ˜ =====
def check_datalab_api_config():
"""๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ๋žฉ API ์„ค์ • ํ™•์ธ"""
logger.info("=== ๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ๋žฉ API ์„ค์ • ํ™•์ธ ===")
datalab_config = api_utils.get_next_datalab_api_config()
if not datalab_config:
logger.warning("โŒ ๋ฐ์ดํ„ฐ๋žฉ API ํ‚ค๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
logger.info("ํŠธ๋ Œ๋“œ ๋ถ„์„ ๊ธฐ๋Šฅ์ด ๋น„ํ™œ์„ฑํ™”๋ฉ๋‹ˆ๋‹ค.")
return False
client_id = datalab_config["CLIENT_ID"]
client_secret = datalab_config["CLIENT_SECRET"]
logger.info(f"์ด {len(api_utils.NAVER_DATALAB_CONFIGS)}๊ฐœ์˜ ๋ฐ์ดํ„ฐ๋žฉ API ์„ค์ • ์‚ฌ์šฉ ์ค‘")
logger.info(f"ํ˜„์žฌ ์„ ํƒ๋œ API:")
logger.info(f" CLIENT_ID: {client_id[:8]}***{client_id[-4:] if len(client_id) > 12 else '***'}")
logger.info(f" CLIENT_SECRET: {client_secret[:4]}***{client_secret[-2:] if len(client_secret) > 6 else '***'}")
# ๊ธฐ๋ณธ๊ฐ’ ์ฒดํฌ
if client_id.startswith("YOUR_"):
logger.error("โŒ CLIENT_ID๊ฐ€ ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์„ค์ •๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค!")
return False
if client_secret.startswith("YOUR_"):
logger.error("โŒ CLIENT_SECRET์ด ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์„ค์ •๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค!")
return False
# ๊ธธ์ด ์ฒดํฌ
if len(client_id) < 10:
logger.warning("โš ๏ธ CLIENT_ID๊ฐ€ ์งง์Šต๋‹ˆ๋‹ค. ์˜ฌ๋ฐ”๋ฅธ ํ‚ค์ธ์ง€ ํ™•์ธํ•ด์ฃผ์„ธ์š”.")
if len(client_secret) < 5:
logger.warning("โš ๏ธ CLIENT_SECRET์ด ์งง์Šต๋‹ˆ๋‹ค. ์˜ฌ๋ฐ”๋ฅธ ํ‚ค์ธ์ง€ ํ™•์ธํ•ด์ฃผ์„ธ์š”.")
logger.info("โœ… ๋ฐ์ดํ„ฐ๋žฉ API ํ‚ค ํ˜•์‹ ๊ฒ€์ฆ ์™„๋ฃŒ")
return True
def check_gemini_api_config():
"""Gemini API ์„ค์ • ํ™•์ธ"""
logger.info("=== Gemini API ์„ค์ • ํ™•์ธ ===")
is_valid, message = api_utils.validate_gemini_config()
if is_valid:
logger.info(f"โœ… {message}")
# ์ฒซ ๋ฒˆ์งธ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ํ‚ค ํ…Œ์ŠคํŠธ
test_key = api_utils.get_next_gemini_api_key()
if test_key:
logger.info(f"ํ˜„์žฌ ์‚ฌ์šฉ ์ค‘์ธ Gemini API ํ‚ค: {test_key[:8]}***{test_key[-4:]}")
return True
else:
logger.warning(f"โŒ {message}")
logger.info("AI ๋ถ„์„ ๊ธฐ๋Šฅ์ด ์ œํ•œ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.")
return False
# ===== ๋ฉ”์ธ ์‹คํ–‰ =====
if __name__ == "__main__":
# pytz ๋ชจ๋“ˆ ์„ค์น˜ ํ™•์ธ
try:
import pytz
logger.info("โœ… pytz ๋ชจ๋“ˆ ๋กœ๋“œ ์„ฑ๊ณต - ํ•œ๊ตญ์‹œ๊ฐ„ ์ง€์›")
except ImportError:
logger.warning("โš ๏ธ pytz ๋ชจ๋“ˆ์ด ์„ค์น˜๋˜์ง€ ์•Š์Œ - pip install pytz ์‹คํ–‰ ํ•„์š”")
logger.info("์‹œ์Šคํ…œ ์‹œ๊ฐ„์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค.")
# API ์„ค์ • ์ดˆ๊ธฐํ™”
api_utils.initialize_api_configs()
logger.info("===== ์ƒํ’ˆ ์†Œ์‹ฑ ๋ถ„์„ ์‹œ์Šคํ…œ v2.9 (์ถœ๋ ฅ๊ธฐ๋Šฅ ์ถ”๊ฐ€ + ํ•œ๊ตญ์‹œ๊ฐ„ + ๋ฉ€ํ‹ฐ์‚ฌ์šฉ์ž ์•ˆ์ „) ์‹œ์ž‘ =====")
# ๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ๋žฉ API ์„ค์ • ํ™•์ธ
datalab_available = check_datalab_api_config()
# Gemini API ์„ค์ • ํ™•์ธ
gemini_available = check_gemini_api_config()
# ํ•„์š”ํ•œ ํŒจํ‚ค์ง€ ์•ˆ๋‚ด
print("๐Ÿ“ฆ ํ•„์š”ํ•œ ํŒจํ‚ค์ง€:")
print(" pip install gradio google-generativeai pandas requests xlsxwriter markdown plotly pytz")
print()
# API ํ‚ค ์„ค์ • ์•ˆ๋‚ด
if not gemini_available:
print("โš ๏ธ GEMINI_API_KEY ๋˜๋Š” GOOGLE_API_KEY ํ™˜๊ฒฝ๋ณ€์ˆ˜๋ฅผ ์„ค์ •ํ•˜์„ธ์š”.")
print(" export GEMINI_API_KEY='your-api-key'")
print(" ๋˜๋Š”")
print(" export GOOGLE_API_KEY='your-api-key'")
print()
if not datalab_available:
print("โš ๏ธ ๋„ค์ด๋ฒ„ ๋ฐ์ดํ„ฐ๋žฉ API ํŠธ๋ Œ๋“œ ๋ถ„์„์„ ์œ„ํ•ด์„œ๋Š”:")
print(" 1. ๋„ค์ด๋ฒ„ ๊ฐœ๋ฐœ์ž์„ผํ„ฐ(https://developers.naver.com)์—์„œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ๋“ฑ๋ก")
print(" 2. '๋ฐ์ดํ„ฐ๋žฉ(๊ฒ€์ƒ‰์–ด ํŠธ๋ Œ๋“œ)' API ์ถ”๊ฐ€")
print(" 3. ๋ฐœ๊ธ‰๋ฐ›์€ CLIENT_ID์™€ CLIENT_SECRET์„ api_utils.py์˜ NAVER_DATALAB_CONFIGS์— ์„ค์ •")
print(" 4. ํ˜„์žฌ๋Š” ํ˜„์žฌ ๊ฒ€์ƒ‰๋Ÿ‰ ์ •๋ณด๋งŒ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.")
print()
else:
print("โœ… ๋ฐ์ดํ„ฐ๋žฉ API ์„ค์ • ์™„๋ฃŒ - 1๋…„, 3๋…„ ํŠธ๋ Œ๋“œ ๋ถ„์„์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค!")
print()
if gemini_available:
print("โœ… Gemini API ์„ค์ • ์™„๋ฃŒ - AI ๋ถ„์„์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค!")
print()
print("๐Ÿ›ก๏ธ v2.9 ๋ฉ€ํ‹ฐ์‚ฌ์šฉ์ž ์•ˆ์ „ ๊ฐœ์„ ์‚ฌํ•ญ:")
print(" โ€ข ์ „์—ญ ๋ณ€์ˆ˜ export_state ์™„์ „ ์ œ๊ฑฐ")
print(" โ€ข gr.State({}) ์‚ฌ์šฉ์œผ๋กœ ๊ฐ ์‚ฌ์šฉ์ž๋ณ„ ์„ธ์…˜ ๋ฐ์ดํ„ฐ ์™„์ „ ๋ถ„๋ฆฌ")
print(" โ€ข safe_keyword_analysis() ํ•จ์ˆ˜์—์„œ ์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ๋ฐ˜ํ™˜")
print(" โ€ข export_analysis_results() ํ•จ์ˆ˜์—์„œ ์„ธ์…˜๋ณ„ ๋ฐ์ดํ„ฐ ์ฒ˜๋ฆฌ")
print(" โ€ข ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ์—์„œ export_data_state ์„ธ์…˜ ์ƒํƒœ ๊ด€๋ฆฌ")
print(" โ€ข ํ—ˆ๊น…ํŽ˜์ด์Šค ์ŠคํŽ˜์ด์Šค ๋“ฑ ๋ฉ€ํ‹ฐ์‚ฌ์šฉ์ž ํ™˜๊ฒฝ์—์„œ ์•ˆ์ „ํ•œ ๋™์‹œ ์‚ฌ์šฉ ๋ณด์žฅ")
print()
print("๐Ÿš€ ๊ธฐ์กด v2.9 ๊ธฐ๋Šฅ:")
print(" โ€ข ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ์—‘์…€ ์ถœ๋ ฅ")
print(" โ€ข ํ‚ค์›Œ๋“œ ์‹ฌ์ถฉ๋ถ„์„ HTML ์ถœ๋ ฅ")
print(" โ€ข ์••์ถ•ํŒŒ์ผ๋กœ ๊ฒฐ๊ณผ ๋‹ค์šด๋กœ๋“œ")
print(" โ€ข Gemini API ํ‚ค ํ†ตํ•ฉ ๊ด€๋ฆฌ")
print(" โ€ข ํ•œ๊ตญ์‹œ๊ฐ„ ์ ์šฉ")
print()
# ์•ฑ ์‹คํ–‰
app = create_interface()
app.launch(server_name="0.0.0.0", server_port=7860, share=True)