|
|
|
|
|
""" |
|
|
AI ์ํ ์์ฑ ๋ถ์ ์์คํ
v2.9 - ์ถ๋ ฅ ๊ธฐ๋ฅ ์ถ๊ฐ + ๋ฉํฐ์ฌ์ฉ์ ์์ |
|
|
- ์ฐ๊ด๊ฒ์์ด ์์
์ถ๋ ฅ |
|
|
- ํค์๋ ์ฌ์ถฉ๋ถ์ HTML ์ถ๋ ฅ |
|
|
- ์์ถํ์ผ๋ก ๊ฒฐ๊ณผ ๋ค์ด๋ก๋ |
|
|
- Gemini API ํค ํตํฉ ๊ด๋ฆฌ |
|
|
- ํ๊ตญ์๊ฐ ์ ์ฉ |
|
|
- ๋ฉํฐ ์ฌ์ฉ์ ์์ : gr.State ์ฌ์ฉ์ผ๋ก ์ธ์
๋ณ ๋ฐ์ดํฐ ๊ด๋ฆฌ |
|
|
""" |
|
|
|
|
|
import gradio as gr |
|
|
import pandas as pd |
|
|
import os |
|
|
import logging |
|
|
import google.generativeai as genai |
|
|
from datetime import datetime, timedelta |
|
|
import pytz |
|
|
import time |
|
|
import re |
|
|
from collections import Counter |
|
|
import zipfile |
|
|
import tempfile |
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
|
import api_utils |
|
|
import text_utils |
|
|
import keyword_search |
|
|
import product_search |
|
|
import keyword_processor |
|
|
import export_utils |
|
|
import keyword_analysis |
|
|
import trend_analysis_v2 |
|
|
|
|
|
|
|
|
def setup_gemini_model(): |
|
|
"""Gemini ๋ชจ๋ธ ์ด๊ธฐํ - api_utils์์ ๊ด๋ฆฌ""" |
|
|
try: |
|
|
|
|
|
model = api_utils.get_gemini_model() |
|
|
|
|
|
if model: |
|
|
logger.info("Gemini ๋ชจ๋ธ ์ด๊ธฐํ ์ฑ๊ณต (api_utils ํตํฉ ๊ด๋ฆฌ)") |
|
|
return model |
|
|
else: |
|
|
logger.warning("Gemini API ํค๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.") |
|
|
return None |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Gemini ๋ชจ๋ธ ์ด๊ธฐํ ์คํจ: {e}") |
|
|
return None |
|
|
|
|
|
|
|
|
gemini_model = setup_gemini_model() |
|
|
|
|
|
|
|
|
def get_korean_time(): |
|
|
"""ํ๊ตญ์๊ฐ ๋ฐํ""" |
|
|
korea_tz = pytz.timezone('Asia/Seoul') |
|
|
return datetime.now(korea_tz) |
|
|
|
|
|
def format_korean_datetime(dt=None, format_type="filename"): |
|
|
"""ํ๊ตญ์๊ฐ ํฌ๋งทํ
""" |
|
|
if dt is None: |
|
|
dt = get_korean_time() |
|
|
|
|
|
if format_type == "filename": |
|
|
return dt.strftime("%y%m%d_%H%M") |
|
|
elif format_type == "display": |
|
|
return dt.strftime('%Y๋
%m์ %d์ผ %H์ %M๋ถ') |
|
|
elif format_type == "full": |
|
|
return dt.strftime('%Y-%m-%d %H:%M:%S') |
|
|
else: |
|
|
return dt.strftime("%y%m%d_%H%M") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def analyze_related_keywords(keyword): |
|
|
"""์ฐ๊ด๊ฒ์์ด ๋ถ์ - ๋ค์ด๋ฒ ์ํ 40๊ฐ๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ๋ณตํฉํค์๋ ์ถ์ถ""" |
|
|
logger.info(f"์ฐ๊ด๊ฒ์์ด ๋ถ์ ์์: '{keyword}'") |
|
|
|
|
|
try: |
|
|
|
|
|
api_keyword = keyword.replace(" ", "") |
|
|
products_data = [] |
|
|
|
|
|
|
|
|
for page in range(1, 5): |
|
|
result = product_search.fetch_products_by_keyword(api_keyword, page=page, display=10) |
|
|
if result["status"] == "success" and result["products"]: |
|
|
products_data.extend(result["products"]) |
|
|
else: |
|
|
break |
|
|
time.sleep(0.3) |
|
|
|
|
|
if not products_data: |
|
|
return { |
|
|
"status": "error", |
|
|
"message": f"'{keyword}' ํค์๋๋ก ์ํ์ ์ฐพ์ ์ ์์ต๋๋ค.", |
|
|
"keywords_df": pd.DataFrame() |
|
|
} |
|
|
|
|
|
|
|
|
products_data = products_data[:40] |
|
|
logger.info(f"์ํ ์ถ์ถ ์๋ฃ: {len(products_data)}๊ฐ") |
|
|
|
|
|
|
|
|
all_words = [] |
|
|
for product in products_data: |
|
|
title = product.get("์ํ๋ช
", "") |
|
|
|
|
|
words = re.split(r'[,\s]+', title) |
|
|
all_words.extend([word.strip() for word in words if word.strip() and len(word.strip()) >= 1]) |
|
|
|
|
|
|
|
|
unique_words = list(set(all_words)) |
|
|
logger.info(f"์ถ์ถ๋ ๋จ์ด ์: {len(unique_words)}๊ฐ") |
|
|
|
|
|
|
|
|
compound_keywords = [] |
|
|
main_keyword = keyword.strip() |
|
|
|
|
|
for word in unique_words: |
|
|
if word != main_keyword and len(word) >= 2: |
|
|
|
|
|
front_compound = f"{word} {main_keyword}" |
|
|
compound_keywords.append(front_compound) |
|
|
|
|
|
|
|
|
back_compound = f"{main_keyword} {word}" |
|
|
compound_keywords.append(back_compound) |
|
|
|
|
|
|
|
|
compound_keywords = list(set(compound_keywords)) |
|
|
logger.info(f"์์ฑ๋ ๋ณตํฉํค์๋ ์: {len(compound_keywords)}๊ฐ") |
|
|
|
|
|
|
|
|
api_keywords = [kw.replace(" ", "") for kw in compound_keywords] |
|
|
search_volumes = keyword_search.fetch_all_search_volumes(api_keywords) |
|
|
|
|
|
|
|
|
keyword_pairs = {} |
|
|
|
|
|
for word in unique_words: |
|
|
if word != main_keyword and len(word) >= 2: |
|
|
front_kw = f"{word} {main_keyword}" |
|
|
back_kw = f"{main_keyword} {word}" |
|
|
|
|
|
front_api = front_kw.replace(" ", "") |
|
|
back_api = back_kw.replace(" ", "") |
|
|
|
|
|
front_vol = search_volumes.get(front_api, {}).get("์ด๊ฒ์๋", 0) |
|
|
back_vol = search_volumes.get(back_api, {}).get("์ด๊ฒ์๋", 0) |
|
|
|
|
|
keyword_pairs[word] = { |
|
|
"front": front_kw, |
|
|
"back": back_kw, |
|
|
"front_vol": front_vol, |
|
|
"back_vol": back_vol |
|
|
} |
|
|
|
|
|
|
|
|
final_keywords = [] |
|
|
for word, data in keyword_pairs.items(): |
|
|
if data["front_vol"] > data["back_vol"]: |
|
|
selected_kw = data["front"] |
|
|
selected_vol = data["front_vol"] |
|
|
selected_api = selected_kw.replace(" ", "") |
|
|
elif data["back_vol"] > data["front_vol"]: |
|
|
selected_kw = data["back"] |
|
|
selected_vol = data["back_vol"] |
|
|
selected_api = selected_kw.replace(" ", "") |
|
|
elif data["front_vol"] == data["back_vol"] and data["front_vol"] > 0: |
|
|
|
|
|
selected_kw = data["back"] |
|
|
selected_vol = data["back_vol"] |
|
|
selected_api = selected_kw.replace(" ", "") |
|
|
else: |
|
|
|
|
|
continue |
|
|
|
|
|
if selected_vol > 0: |
|
|
vol_data = search_volumes.get(selected_api, {}) |
|
|
final_keywords.append({ |
|
|
"์ฐ๊ด ํค์๋": selected_kw, |
|
|
"PC๊ฒ์๋": vol_data.get("PC๊ฒ์๋", 0), |
|
|
"๋ชจ๋ฐ์ผ๊ฒ์๋": vol_data.get("๋ชจ๋ฐ์ผ๊ฒ์๋", 0), |
|
|
"์ด๊ฒ์๋": selected_vol, |
|
|
"๊ฒ์๋๊ตฌ๊ฐ": text_utils.get_search_volume_range(selected_vol) |
|
|
}) |
|
|
|
|
|
|
|
|
final_keywords = sorted(final_keywords, key=lambda x: x["์ด๊ฒ์๋"], reverse=True) |
|
|
|
|
|
|
|
|
df_keywords = pd.DataFrame(final_keywords) |
|
|
|
|
|
logger.info(f"์ฐ๊ด๊ฒ์์ด ๋ถ์ ์๋ฃ: {len(final_keywords)}๊ฐ ํค์๋") |
|
|
|
|
|
return { |
|
|
"status": "success", |
|
|
"message": f"'{keyword}' ์ฐ๊ด๊ฒ์์ด {len(final_keywords)}๊ฐ๋ฅผ ์ฐพ์์ต๋๋ค.", |
|
|
"keywords_df": df_keywords, |
|
|
"total_products": len(products_data) |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"์ฐ๊ด๊ฒ์์ด ๋ถ์ ์ค๋ฅ: {e}") |
|
|
return { |
|
|
"status": "error", |
|
|
"message": f"์ฐ๊ด๊ฒ์์ด ๋ถ์ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", |
|
|
"keywords_df": pd.DataFrame() |
|
|
} |
|
|
|
|
|
|
|
|
def create_loading_animation(): |
|
|
"""๋ก๋ฉ ์ ๋๋ฉ์ด์
HTML""" |
|
|
return """ |
|
|
<div style="display: flex; flex-direction: column; align-items: center; padding: 40px; background: white; border-radius: 12px; box-shadow: 0 4px 12px rgba(0,0,0,0.1);"> |
|
|
<div style="width: 60px; height: 60px; border: 4px solid #f3f3f3; border-top: 4px solid #FB7F0D; border-radius: 50%; animation: spin 1s linear infinite; margin-bottom: 20px;"></div> |
|
|
<h3 style="color: #FB7F0D; margin: 10px 0; font-size: 18px;">๋ถ์ ์ค์
๋๋ค...</h3> |
|
|
<p style="color: #666; margin: 5px 0; text-align: center;">๋ค์ด๋ฒ ๋ฐ์ดํฐ๋ฅผ ์์งํ๊ณ AI๊ฐ ๋ถ์ํ๊ณ ์์ต๋๋ค.<br>์ ์๋ง ๊ธฐ๋ค๋ ค์ฃผ์ธ์.</p> |
|
|
<div style="width: 200px; height: 4px; background: #f0f0f0; border-radius: 2px; margin-top: 15px; overflow: hidden;"> |
|
|
<div style="width: 100%; height: 100%; background: linear-gradient(90deg, #FB7F0D, #ff9a8b); border-radius: 2px; animation: progress 2s ease-in-out infinite;"></div> |
|
|
</div> |
|
|
</div> |
|
|
|
|
|
<style> |
|
|
@keyframes spin { |
|
|
0% { transform: rotate(0deg); } |
|
|
100% { transform: rotate(360deg); } |
|
|
} |
|
|
|
|
|
@keyframes progress { |
|
|
0% { transform: translateX(-100%); } |
|
|
100% { transform: translateX(100%); } |
|
|
} |
|
|
</style> |
|
|
""" |
|
|
|
|
|
|
|
|
def generate_error_response(error_message): |
|
|
"""์๋ฌ ์๋ต ์์ฑ""" |
|
|
return f''' |
|
|
<div style="color: red; padding: 30px; text-align: center; width: 100%; |
|
|
background-color: #f8d7da; border-radius: 12px; border: 1px solid #f5c6cb;"> |
|
|
<h3 style="margin-bottom: 15px;">โ ๋ถ์ ์ค๋ฅ</h3> |
|
|
<p style="margin-bottom: 20px;">{error_message}</p> |
|
|
<div style="background: white; padding: 15px; border-radius: 8px; color: #333;"> |
|
|
<h4>ํด๊ฒฐ ๋ฐฉ๋ฒ:</h4> |
|
|
<ul style="text-align: left; padding-left: 20px;"> |
|
|
<li>ํค์๋ ์ฒ ์๋ฅผ ํ์ธํด์ฃผ์ธ์</li> |
|
|
<li>๋ ๊ฐ๋จํ ํค์๋๋ฅผ ์ฌ์ฉํด๋ณด์ธ์</li> |
|
|
<li>๋คํธ์ํฌ ์ฐ๊ฒฐ์ ํ์ธํด์ฃผ์ธ์</li> |
|
|
<li>์ ์ ํ ๋ค์ ์๋ํด์ฃผ์ธ์</li> |
|
|
</ul> |
|
|
</div> |
|
|
</div> |
|
|
''' |
|
|
|
|
|
|
|
|
def safe_keyword_analysis(analysis_keyword, base_keyword, keywords_data): |
|
|
"""์๋ฌ ๋ฐฉ์ง๋ฅผ ์ํ ์์ ํ ํค์๋ ๋ถ์ - ์ธ์
๋ณ ๋ฐ์ดํฐ ๋ฐํ""" |
|
|
|
|
|
|
|
|
if not analysis_keyword or not analysis_keyword.strip(): |
|
|
return generate_error_response("๋ถ์ํ ํค์๋๋ฅผ ์
๋ ฅํด์ฃผ์ธ์."), {} |
|
|
|
|
|
analysis_keyword = analysis_keyword.strip() |
|
|
|
|
|
try: |
|
|
|
|
|
api_keyword = keyword_analysis.normalize_keyword_for_api(analysis_keyword) |
|
|
search_volumes = keyword_search.fetch_all_search_volumes([api_keyword]) |
|
|
volume_data = search_volumes.get(api_keyword, {"PC๊ฒ์๋": 0, "๋ชจ๋ฐ์ผ๊ฒ์๋": 0, "์ด๊ฒ์๋": 0}) |
|
|
|
|
|
|
|
|
if volume_data['์ด๊ฒ์๋'] == 0: |
|
|
logger.warning(f"'{analysis_keyword}' ํค์๋์ ๊ฒ์๋์ด 0์ด๊ฑฐ๋ ์กด์ฌํ์ง ์์ต๋๋ค.") |
|
|
error_result = f""" |
|
|
<div style="padding: 30px; text-align: center; background: #fff3cd; border-radius: 12px; border: 1px solid #ffeaa7;"> |
|
|
<h3 style="color: #856404; margin-bottom: 15px;">โ ๏ธ ํค์๋ ๋ถ์ ๋ถ๊ฐ</h3> |
|
|
<p style="color: #856404; margin-bottom: 10px;"><strong>'{analysis_keyword}'</strong> ํค์๋๋ ๊ฒ์๋์ด ์๊ฑฐ๋ ์ฌ๋ฐ๋ฅด์ง ์์ ํค์๋์
๋๋ค.</p> |
|
|
<div style="background: white; padding: 15px; border-radius: 8px; margin-top: 15px;"> |
|
|
<h4 style="color: #333; margin-bottom: 10px;">๐ก ๊ถ์ฅ์ฌํญ</h4> |
|
|
<ul style="text-align: left; color: #666; padding-left: 20px;"> |
|
|
<li>ํค์๋ ์ฒ ์๋ฅผ ํ์ธํด์ฃผ์ธ์</li> |
|
|
<li>๋ ์ผ๋ฐ์ ์ธ ํค์๋๋ฅผ ์ฌ์ฉํด๋ณด์ธ์</li> |
|
|
<li>2๋จ๊ณ์์ ์ ์ํ ํค์๋ ๋ชฉ๋ก์ ์ฐธ๊ณ ํด์ฃผ์ธ์</li> |
|
|
<li>ํค์๋๋ฅผ ๋์ด์ฐ๊ธฐ๋ก ๊ตฌ๋ถํด๋ณด์ธ์ (์: '์ฌ์ฑ ์ฌ๋ฆฌํผ')</li> |
|
|
</ul> |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
return error_result, {} |
|
|
|
|
|
logger.info(f"'{analysis_keyword}' ํ์ฌ ๊ฒ์๋: {volume_data['์ด๊ฒ์๋']:,}") |
|
|
|
|
|
|
|
|
monthly_data_1year = {} |
|
|
monthly_data_3year = {} |
|
|
trend_available = False |
|
|
|
|
|
try: |
|
|
|
|
|
datalab_config = api_utils.get_next_datalab_api_config() |
|
|
if datalab_config and not datalab_config["CLIENT_ID"].startswith("YOUR_"): |
|
|
logger.info("๋ฐ์ดํฐ๋ฉ API ํค๊ฐ ์ค์ ๋์ด ์์ด 1๋
, 3๋
ํธ๋ ๋ ๋ถ์์ ์๋ํฉ๋๋ค.") |
|
|
|
|
|
|
|
|
|
|
|
trend_data_1year = trend_analysis_v2.get_naver_trend_data_v5([analysis_keyword], "1year", max_retries=3) |
|
|
if trend_data_1year: |
|
|
current_volumes = {api_keyword: volume_data} |
|
|
monthly_data_1year = trend_analysis_v2.calculate_monthly_volumes_v7([analysis_keyword], current_volumes, trend_data_1year, "1year") |
|
|
|
|
|
|
|
|
trend_data_3year = trend_analysis_v2.get_naver_trend_data_v5([analysis_keyword], "3year", max_retries=3) |
|
|
if trend_data_3year: |
|
|
current_volumes = {api_keyword: volume_data} |
|
|
monthly_data_3year = trend_analysis_v2.calculate_monthly_volumes_v7([analysis_keyword], current_volumes, trend_data_3year, "3year") |
|
|
|
|
|
|
|
|
if not monthly_data_3year and monthly_data_1year: |
|
|
logger.info("3๋
๋ฐ์ดํฐ๊ฐ ์์ด 1๋
๋ฐ์ดํฐ๋ฅผ ๊ธฐ๋ฐ์ผ๋ก 3๋
์ฐจํธ ์์ฑ") |
|
|
keyword = analysis_keyword |
|
|
if keyword in monthly_data_1year: |
|
|
data_1y = monthly_data_1year[keyword] |
|
|
|
|
|
|
|
|
extended_dates = [] |
|
|
extended_volumes = [] |
|
|
|
|
|
|
|
|
start_date = datetime.strptime(data_1y["dates"][0], "%Y-%m-%d") |
|
|
for i in range(24, 0, -1): |
|
|
prev_date = start_date - timedelta(days=30 * i) |
|
|
extended_dates.append(prev_date.strftime("%Y-%m-%d")) |
|
|
extended_volumes.append(0) |
|
|
|
|
|
|
|
|
actual_count = data_1y.get("actual_count", len(data_1y["dates"])) |
|
|
extended_dates.extend(data_1y["dates"][:actual_count]) |
|
|
extended_volumes.extend(data_1y["monthly_volumes"][:actual_count]) |
|
|
|
|
|
monthly_data_3year = { |
|
|
keyword: { |
|
|
"monthly_volumes": extended_volumes, |
|
|
"dates": extended_dates, |
|
|
"current_volume": data_1y["current_volume"], |
|
|
"growth_rate": trend_analysis_v2.calculate_3year_growth_rate_improved(extended_volumes), |
|
|
"volume_per_percent": data_1y["volume_per_percent"], |
|
|
"current_ratio": data_1y["current_ratio"], |
|
|
"actual_count": len(extended_volumes), |
|
|
"predicted_count": 0 |
|
|
} |
|
|
} |
|
|
|
|
|
if monthly_data_1year or monthly_data_3year: |
|
|
trend_available = True |
|
|
logger.info("ํธ๋ ๋ ๋ถ์ ์ฑ๊ณต") |
|
|
else: |
|
|
logger.info("ํธ๋ ๋ ๋ฐ์ดํฐ ์ฒ๋ฆฌ ์คํจ") |
|
|
else: |
|
|
logger.info("๋ฐ์ดํฐ๋ฉ API ํค๊ฐ ์ค์ ๋์ง ์์") |
|
|
except Exception as e: |
|
|
logger.info(f"ํธ๋ ๋ ๋ถ์ ๊ฑด๋๋: {str(e)[:100]}") |
|
|
|
|
|
|
|
|
step2_keywords_df = keywords_data.get("keywords_df") if keywords_data else None |
|
|
filtered_keywords_df = step2_keywords_df |
|
|
target_categories = [] |
|
|
|
|
|
|
|
|
if trend_available and (monthly_data_1year or monthly_data_3year): |
|
|
try: |
|
|
trend_chart = trend_analysis_v2.create_trend_chart_v7(monthly_data_1year, monthly_data_3year) |
|
|
except Exception as e: |
|
|
logger.warning(f"ํธ๋ ๋ ์ฐจํธ ์์ฑ ์คํจ, ๊ธฐ๋ณธ ์ฐจํธ ์ฌ์ฉ: {e}") |
|
|
trend_chart = trend_analysis_v2.create_enhanced_current_chart(volume_data, analysis_keyword) |
|
|
else: |
|
|
trend_chart = trend_analysis_v2.create_enhanced_current_chart(volume_data, analysis_keyword) |
|
|
|
|
|
|
|
|
trend_section = f""" |
|
|
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;"> |
|
|
<div style="background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;"> |
|
|
<h3 style="margin: 0; font-size: 18px; color: white;">๐ ๊ฒ์๋ ํธ๋ ๋ ๋ถ์</h3> |
|
|
</div> |
|
|
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);"> |
|
|
{trend_chart} |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
|
|
|
|
|
|
current_gemini_model = api_utils.get_gemini_model() |
|
|
|
|
|
keyword_analysis_html = keyword_analysis.analyze_keyword_for_sourcing( |
|
|
analysis_keyword, volume_data, monthly_data_1year, monthly_data_3year, |
|
|
filtered_keywords_df, target_categories, current_gemini_model |
|
|
) |
|
|
|
|
|
keyword_analysis_section = f""" |
|
|
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;"> |
|
|
<div style="background: linear-gradient(135deg, #11998e 0%, #38ef7d 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;"> |
|
|
<h3 style="margin: 0; font-size: 18px; color: white;">๐ฏ ํค์๋ ๋ถ์</h3> |
|
|
</div> |
|
|
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); overflow: hidden;"> |
|
|
{keyword_analysis_html} |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
|
|
|
warning_section = "" |
|
|
if not trend_available: |
|
|
warning_section = f""" |
|
|
<div style="width: 100%; margin: 20px auto; padding: 15px; background: #fff3cd; border: 1px solid #ffeaa7; border-radius: 8px; font-family: 'Pretendard', sans-serif;"> |
|
|
<div style="display: flex; align-items: center;"> |
|
|
<span style="font-size: 20px; margin-right: 10px;">โ ๏ธ</span> |
|
|
<div> |
|
|
<strong style="color: #856404;">์ผ๋ถ ๊ธฐ๋ฅ ์ ํ</strong> |
|
|
<div style="font-size: 14px; color: #856404; margin-top: 5px;"> |
|
|
ํธ๋ ๋ ๋ถ์์ ์ ํ์ด ์์ต๋๋ค. ํ์ฌ ๊ฒ์๋ ๋ถ์๊ณผ AI ์ถ์ฒ์ ์ ์ ์ ๊ณต๋ฉ๋๋ค.<br> |
|
|
<small>์์ ํ ์ ๋ฐ์ดํฐ ๊ธฐ์ค์ผ๋ก ๋ถ์ํ๊ธฐ ์ํด ์ต์ ์๋ฃ๋ ์๊น์ง๋ง ํ์๋ฉ๋๋ค.</small> |
|
|
</div> |
|
|
</div> |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
|
|
|
final_result = warning_section + trend_section + keyword_analysis_section |
|
|
|
|
|
|
|
|
session_export_data = { |
|
|
"main_keyword": base_keyword, |
|
|
"analysis_keyword": analysis_keyword, |
|
|
"main_keywords_df": keywords_data.get("keywords_df") if keywords_data else None, |
|
|
"related_keywords_df": None, |
|
|
"analysis_html": final_result |
|
|
} |
|
|
|
|
|
return final_result, session_export_data |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"ํค์๋ ๋ถ์ ์ค ์ ์ฒด ์ค๋ฅ: {e}") |
|
|
error_result = generate_error_response(f"ํค์๋ ๋ถ์ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}") |
|
|
return error_result, {} |
|
|
|
|
|
|
|
|
def extract_keywords_from_products(keyword): |
|
|
"""๋ค์ด๋ฒ ์ผํ์์ ์ค์ ์ํ ๋ฐ์ดํฐ๋ฅผ ์์งํ๊ณ ๋ชจ๋ ํค์๋ ํ์""" |
|
|
logger.info(f"์ํ ํค์๋ ์ถ์ถ ์์: ํค์๋='{keyword}'") |
|
|
|
|
|
api_keyword = keyword_analysis.normalize_keyword_for_api(keyword) |
|
|
search_results = product_search.fetch_naver_shopping_data( |
|
|
keyword, korean_only=True, apply_main_keyword=True, exclude_zero_volume=True |
|
|
) |
|
|
|
|
|
if not search_results.get("product_list"): |
|
|
return { |
|
|
"status": "error", |
|
|
"message": "์ํ ๋ฐ์ดํฐ๋ฅผ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค.", |
|
|
"products": [], |
|
|
"keywords": [] |
|
|
} |
|
|
|
|
|
processed_results = keyword_processor.process_search_results( |
|
|
search_results, keyword, exclude_zero_volume=True |
|
|
) |
|
|
|
|
|
df_keywords = processed_results["keywords_df"] |
|
|
df_products = processed_results["products_df"] |
|
|
|
|
|
if df_keywords.empty: |
|
|
return { |
|
|
"status": "error", |
|
|
"message": "์ถ์ถ๋ ํค์๋๊ฐ ์์ต๋๋ค.", |
|
|
"products": [], |
|
|
"keywords": [] |
|
|
} |
|
|
|
|
|
logger.info(f"ํค์๋ ์ถ์ถ ์๋ฃ: ์ด {len(df_keywords)}๊ฐ ํค์๋") |
|
|
|
|
|
return { |
|
|
"status": "success", |
|
|
"message": "ํค์๋ ์ถ์ถ ์๋ฃ", |
|
|
"products": df_products, |
|
|
"keywords_df": df_keywords, |
|
|
"categories": processed_results["categories"] |
|
|
} |
|
|
|
|
|
|
|
|
def create_timestamp_filename(analysis_keyword): |
|
|
"""ํ์์คํฌํ๊ฐ ํฌํจ๋ ํ์ผ๋ช
์์ฑ - ํ๊ตญ์๊ฐ ์ ์ฉ""" |
|
|
timestamp = format_korean_datetime(format_type="filename") |
|
|
safe_keyword = re.sub(r'[^\w\s-]', '', analysis_keyword).strip() |
|
|
safe_keyword = re.sub(r'[-\s]+', '_', safe_keyword) |
|
|
return f"{safe_keyword}_{timestamp}_๋ถ์๊ฒฐ๊ณผ" |
|
|
|
|
|
def export_to_excel(main_keyword, main_keywords_df, analysis_keyword, related_keywords_df, filename_base): |
|
|
"""์์
ํ์ผ๋ก ์ถ๋ ฅ""" |
|
|
try: |
|
|
excel_filename = f"{filename_base}.xlsx" |
|
|
excel_path = os.path.join(tempfile.gettempdir(), excel_filename) |
|
|
|
|
|
with pd.ExcelWriter(excel_path, engine='xlsxwriter') as writer: |
|
|
|
|
|
workbook = writer.book |
|
|
|
|
|
|
|
|
header_format = workbook.add_format({ |
|
|
'bold': True, |
|
|
'text_wrap': True, |
|
|
'valign': 'top', |
|
|
'fg_color': '#D7E4BC', |
|
|
'border': 1 |
|
|
}) |
|
|
|
|
|
|
|
|
data_format = workbook.add_format({ |
|
|
'text_wrap': True, |
|
|
'valign': 'top', |
|
|
'border': 1 |
|
|
}) |
|
|
|
|
|
|
|
|
number_format = workbook.add_format({ |
|
|
'num_format': '#,##0', |
|
|
'text_wrap': True, |
|
|
'valign': 'top', |
|
|
'border': 1 |
|
|
}) |
|
|
|
|
|
|
|
|
if main_keywords_df is not None and not main_keywords_df.empty: |
|
|
main_keywords_df.to_excel(writer, sheet_name=f'{main_keyword}_์กฐํฉํค์๋', index=False) |
|
|
worksheet1 = writer.sheets[f'{main_keyword}_์กฐํฉํค์๋'] |
|
|
|
|
|
|
|
|
for col_num, value in enumerate(main_keywords_df.columns.values): |
|
|
worksheet1.write(0, col_num, value, header_format) |
|
|
|
|
|
|
|
|
for row_num in range(1, len(main_keywords_df) + 1): |
|
|
for col_num, value in enumerate(main_keywords_df.iloc[row_num-1]): |
|
|
if col_num in [1, 2, 3]: |
|
|
worksheet1.write(row_num, col_num, value, number_format) |
|
|
else: |
|
|
worksheet1.write(row_num, col_num, value, data_format) |
|
|
|
|
|
|
|
|
for i, col in enumerate(main_keywords_df.columns): |
|
|
max_len = max( |
|
|
main_keywords_df[col].astype(str).map(len).max(), |
|
|
len(str(col)) |
|
|
) |
|
|
worksheet1.set_column(i, i, min(max_len + 2, 50)) |
|
|
|
|
|
|
|
|
if related_keywords_df is not None and not related_keywords_df.empty: |
|
|
related_keywords_df.to_excel(writer, sheet_name=f'{analysis_keyword}_์ฐ๊ด๊ฒ์์ด', index=False) |
|
|
worksheet2 = writer.sheets[f'{analysis_keyword}_์ฐ๊ด๊ฒ์์ด'] |
|
|
|
|
|
|
|
|
for col_num, value in enumerate(related_keywords_df.columns.values): |
|
|
worksheet2.write(0, col_num, value, header_format) |
|
|
|
|
|
|
|
|
for row_num in range(1, len(related_keywords_df) + 1): |
|
|
for col_num, value in enumerate(related_keywords_df.iloc[row_num-1]): |
|
|
if col_num in [1, 2, 3]: |
|
|
worksheet2.write(row_num, col_num, value, number_format) |
|
|
else: |
|
|
worksheet2.write(row_num, col_num, value, data_format) |
|
|
|
|
|
|
|
|
for i, col in enumerate(related_keywords_df.columns): |
|
|
max_len = max( |
|
|
related_keywords_df[col].astype(str).map(len).max(), |
|
|
len(str(col)) |
|
|
) |
|
|
worksheet2.set_column(i, i, min(max_len + 2, 50)) |
|
|
|
|
|
logger.info(f"์์
ํ์ผ ์์ฑ ์๋ฃ: {excel_path}") |
|
|
return excel_path |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"์์
ํ์ผ ์์ฑ ์ค๋ฅ: {e}") |
|
|
return None |
|
|
|
|
|
def export_to_html(analysis_html, filename_base): |
|
|
"""HTML ํ์ผ๋ก ์ถ๋ ฅ - ํ๊ตญ์๊ฐ ์ ์ฉ""" |
|
|
try: |
|
|
html_filename = f"{filename_base}.html" |
|
|
html_path = os.path.join(tempfile.gettempdir(), html_filename) |
|
|
|
|
|
|
|
|
korean_time = format_korean_datetime(format_type="display") |
|
|
|
|
|
|
|
|
full_html = f""" |
|
|
<!DOCTYPE html> |
|
|
<html lang="ko"> |
|
|
<head> |
|
|
<meta charset="UTF-8"> |
|
|
<meta name="viewport" content="width=device-width, initial-scale=1.0"> |
|
|
<title>ํค์๋ ์ฌ์ถฉ๋ถ์ ๊ฒฐ๊ณผ</title> |
|
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css"> |
|
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css"> |
|
|
<style> |
|
|
body {{ |
|
|
font-family: 'Pretendard', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; |
|
|
margin: 0; |
|
|
padding: 20px; |
|
|
background-color: #f5f5f5; |
|
|
line-height: 1.6; |
|
|
}} |
|
|
.container {{ |
|
|
max-width: 1200px; |
|
|
margin: 0 auto; |
|
|
background: white; |
|
|
border-radius: 12px; |
|
|
box-shadow: 0 4px 12px rgba(0,0,0,0.1); |
|
|
overflow: hidden; |
|
|
}} |
|
|
.header {{ |
|
|
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); |
|
|
color: white; |
|
|
padding: 30px; |
|
|
text-align: center; |
|
|
}} |
|
|
.header h1 {{ |
|
|
margin: 0; |
|
|
font-size: 28px; |
|
|
font-weight: 700; |
|
|
}} |
|
|
.header p {{ |
|
|
margin: 10px 0 0 0; |
|
|
font-size: 16px; |
|
|
opacity: 0.9; |
|
|
}} |
|
|
.content {{ |
|
|
padding: 30px; |
|
|
}} |
|
|
.timestamp {{ |
|
|
text-align: center; |
|
|
padding: 20px; |
|
|
background: #f8f9fa; |
|
|
color: #6c757d; |
|
|
font-size: 14px; |
|
|
border-top: 1px solid #dee2e6; |
|
|
}} |
|
|
|
|
|
/* ์ฐจํธ ์คํ์ผ ๊ฐ์ */ |
|
|
.chart-container {{ |
|
|
margin: 20px 0; |
|
|
padding: 20px; |
|
|
background: white; |
|
|
border-radius: 8px; |
|
|
box-shadow: 0 2px 8px rgba(0,0,0,0.1); |
|
|
}} |
|
|
|
|
|
/* ๋ฐ์ํ ์คํ์ผ */ |
|
|
@media (max-width: 768px) {{ |
|
|
.container {{ |
|
|
margin: 10px; |
|
|
border-radius: 8px; |
|
|
}} |
|
|
.header {{ |
|
|
padding: 20px; |
|
|
}} |
|
|
.header h1 {{ |
|
|
font-size: 24px; |
|
|
}} |
|
|
.content {{ |
|
|
padding: 20px; |
|
|
}} |
|
|
}} |
|
|
|
|
|
/* ์ ๋๋ฉ์ด์
*/ |
|
|
@keyframes spin {{ |
|
|
0% {{ transform: rotate(0deg); }} |
|
|
100% {{ transform: rotate(360deg); }} |
|
|
}} |
|
|
|
|
|
@keyframes progress {{ |
|
|
0% {{ transform: translateX(-100%); }} |
|
|
100% {{ transform: translateX(100%); }} |
|
|
}} |
|
|
|
|
|
/* ํ๋ฆฐํธ ์คํ์ผ */ |
|
|
@media print {{ |
|
|
body {{ |
|
|
background: white; |
|
|
padding: 0; |
|
|
}} |
|
|
.container {{ |
|
|
box-shadow: none; |
|
|
border-radius: 0; |
|
|
}} |
|
|
.header {{ |
|
|
background: #667eea !important; |
|
|
-webkit-print-color-adjust: exact; |
|
|
}} |
|
|
}} |
|
|
</style> |
|
|
</head> |
|
|
<body> |
|
|
<div class="container"> |
|
|
<div class="header"> |
|
|
<h1><i class="fas fa-chart-line"></i> ํค์๋ ์ฌ์ถฉ๋ถ์ ๊ฒฐ๊ณผ</h1> |
|
|
<p>AI ์ํ ์์ฑ ๋ถ์ ์์คํ
v2.9</p> |
|
|
</div> |
|
|
<div class="content"> |
|
|
{analysis_html} |
|
|
</div> |
|
|
<div class="timestamp"> |
|
|
<i class="fas fa-clock"></i> ์์ฑ ์๊ฐ: {korean_time} (ํ๊ตญ์๊ฐ) |
|
|
</div> |
|
|
</div> |
|
|
</body> |
|
|
</html> |
|
|
""" |
|
|
|
|
|
with open(html_path, 'w', encoding='utf-8') as f: |
|
|
f.write(full_html) |
|
|
|
|
|
logger.info(f"HTML ํ์ผ ์์ฑ ์๋ฃ: {html_path}") |
|
|
return html_path |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"HTML ํ์ผ ์์ฑ ์ค๋ฅ: {e}") |
|
|
return None |
|
|
|
|
|
def create_zip_file(excel_path, html_path, filename_base): |
|
|
"""์์ถ ํ์ผ ์์ฑ""" |
|
|
try: |
|
|
zip_filename = f"{filename_base}.zip" |
|
|
zip_path = os.path.join(tempfile.gettempdir(), zip_filename) |
|
|
|
|
|
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: |
|
|
if excel_path and os.path.exists(excel_path): |
|
|
zipf.write(excel_path, f"{filename_base}.xlsx") |
|
|
logger.info(f"์์
ํ์ผ ์์ถ ์ถ๊ฐ: {filename_base}.xlsx") |
|
|
|
|
|
if html_path and os.path.exists(html_path): |
|
|
zipf.write(html_path, f"{filename_base}.html") |
|
|
logger.info(f"HTML ํ์ผ ์์ถ ์ถ๊ฐ: {filename_base}.html") |
|
|
|
|
|
logger.info(f"์์ถ ํ์ผ ์์ฑ ์๋ฃ: {zip_path}") |
|
|
return zip_path |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"์์ถ ํ์ผ ์์ฑ ์ค๋ฅ: {e}") |
|
|
return None |
|
|
|
|
|
def export_analysis_results(export_data): |
|
|
"""๋ถ์ ๊ฒฐ๊ณผ ์ถ๋ ฅ ๋ฉ์ธ ํจ์ - ์ธ์
๋ณ ๋ฐ์ดํฐ ์ฒ๋ฆฌ""" |
|
|
try: |
|
|
|
|
|
if not export_data or not isinstance(export_data, dict): |
|
|
return None, "๋ถ์ ๋ฐ์ดํฐ๊ฐ ์์ต๋๋ค. ๋จผ์ ํค์๋ ์ฌ์ถฉ๋ถ์์ ์คํํด์ฃผ์ธ์." |
|
|
|
|
|
analysis_keyword = export_data.get("analysis_keyword", "") |
|
|
analysis_html = export_data.get("analysis_html", "") |
|
|
main_keyword = export_data.get("main_keyword", "") |
|
|
main_keywords_df = export_data.get("main_keywords_df") |
|
|
related_keywords_df = export_data.get("related_keywords_df") |
|
|
|
|
|
if not analysis_keyword: |
|
|
return None, "๋ถ์ํ ํค์๋๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค. ๋จผ์ ํค์๋ ๋ถ์์ ์คํํด์ฃผ์ธ์." |
|
|
|
|
|
if not analysis_html: |
|
|
return None, "๋ถ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค. ๋จผ์ ํค์๋ ์ฌ์ถฉ๋ถ์์ ์คํํด์ฃผ์ธ์." |
|
|
|
|
|
|
|
|
filename_base = create_timestamp_filename(analysis_keyword) |
|
|
logger.info(f"์ถ๋ ฅ ํ์ผ๋ช
: {filename_base}") |
|
|
|
|
|
|
|
|
excel_path = None |
|
|
if main_keywords_df is not None or related_keywords_df is not None: |
|
|
excel_path = export_to_excel( |
|
|
main_keyword, |
|
|
main_keywords_df, |
|
|
analysis_keyword, |
|
|
related_keywords_df, |
|
|
filename_base |
|
|
) |
|
|
|
|
|
|
|
|
html_path = export_to_html(analysis_html, filename_base) |
|
|
|
|
|
|
|
|
if excel_path or html_path: |
|
|
zip_path = create_zip_file(excel_path, html_path, filename_base) |
|
|
if zip_path: |
|
|
return zip_path, f"โ
๋ถ์ ๊ฒฐ๊ณผ๊ฐ ์ฑ๊ณต์ ์ผ๋ก ์ถ๋ ฅ๋์์ต๋๋ค!\nํ์ผ๋ช
: {filename_base}.zip" |
|
|
else: |
|
|
return None, "์์ถ ํ์ผ ์์ฑ์ ์คํจํ์ต๋๋ค." |
|
|
else: |
|
|
return None, "์ถ๋ ฅํ ํ์ผ์ด ์์ต๋๋ค." |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"๋ถ์ ๊ฒฐ๊ณผ ์ถ๋ ฅ ์ค๋ฅ: {e}") |
|
|
return None, f"์ถ๋ ฅ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}" |
|
|
|
|
|
|
|
|
def create_interface(): |
|
|
|
|
|
try: |
|
|
with open('style.css', 'r', encoding='utf-8') as f: |
|
|
custom_css = f.read() |
|
|
|
|
|
with open('keyword_analysis_report.css', 'r', encoding='utf-8') as f: |
|
|
keyword_css = f.read() |
|
|
custom_css += "\n" + keyword_css |
|
|
except: |
|
|
custom_css = """ |
|
|
:root { --primary-color: #FB7F0D; --secondary-color: #ff9a8b; } |
|
|
.custom-button { |
|
|
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)) !important; |
|
|
color: white !important; border-radius: 30px !important; height: 45px !important; |
|
|
font-size: 16px !important; font-weight: bold !important; width: 100% !important; |
|
|
} |
|
|
.export-button { |
|
|
background: linear-gradient(135deg, #28a745, #20c997) !important; |
|
|
color: white !important; border-radius: 25px !important; height: 50px !important; |
|
|
font-size: 17px !important; font-weight: bold !important; width: 100% !important; |
|
|
margin-top: 20px !important; |
|
|
} |
|
|
""" |
|
|
|
|
|
with gr.Blocks( |
|
|
css=custom_css, |
|
|
title="๐ AI ์ํ ์์ฑ ๋ถ์๊ธฐ v2.9", |
|
|
theme=gr.themes.Default(primary_hue="orange", secondary_hue="orange") |
|
|
) as interface: |
|
|
|
|
|
|
|
|
gr.HTML(""" |
|
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css"> |
|
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css"> |
|
|
""") |
|
|
|
|
|
|
|
|
keywords_data_state = gr.State() |
|
|
export_data_state = gr.State({}) |
|
|
|
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
|
gr.HTML('<div class="section-title"><i class="fas fa-search"></i> 1๋จ๊ณ: ๋ฉ์ธ ํค์๋ ์
๋ ฅ</div>') |
|
|
|
|
|
keyword_input = gr.Textbox( |
|
|
label="์ํ ๋ฉ์ธํค์๋", |
|
|
placeholder="์: ์ฌ๋ฆฌํผ, ๋ฌด์ ์ด์ดํฐ, ํธ๋ํฌ๋ฆผ", |
|
|
value="", |
|
|
elem_id="keyword_input" |
|
|
) |
|
|
|
|
|
collect_data_btn = gr.Button("1๋จ๊ณ: ์ํ ๋ฐ์ดํฐ ์์งํ๊ธฐ", elem_classes="custom-button", size="lg") |
|
|
|
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
|
gr.HTML('<div class="section-title"><i class="fas fa-database"></i> 2๋จ๊ณ: ์์ง๋ ํค์๋ ๋ชฉ๋ก</div>') |
|
|
keywords_result = gr.HTML() |
|
|
|
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
|
gr.HTML('<div class="section-title"><i class="fas fa-bullseye"></i> 3๋จ๊ณ: ๋ถ์ํ ํค์๋ ์ ํ</div>') |
|
|
|
|
|
analysis_keyword_input = gr.Textbox( |
|
|
label="๋ถ์ํ ํค์๋", |
|
|
placeholder="์ ๋ชฉ๋ก์์ ์ํ๋ ํค์๋๋ฅผ ์
๋ ฅํ์ธ์ (์: ํต๊ตฝ ์ฌ๋ฆฌํผ)", |
|
|
value="", |
|
|
elem_id="analysis_keyword_input" |
|
|
) |
|
|
|
|
|
analyze_keyword_btn = gr.Button("ํค์๋ ์ฌ์ถฉ๋ถ์ ํ๊ธฐ", elem_classes="custom-button", size="lg") |
|
|
|
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
|
gr.HTML('<div class="section-title"><i class="fas fa-chart-line"></i> ํค์๋ ์ฌ์ถฉ๋ถ์</div>') |
|
|
analysis_result = gr.HTML(label="ํค์๋ ์ฌ์ถฉ๋ถ์") |
|
|
|
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
|
gr.HTML('<div class="section-title"><i class="fas fa-download"></i> ๋ถ์ ๊ฒฐ๊ณผ ์ถ๋ ฅ</div>') |
|
|
|
|
|
export_btn = gr.Button("๐ ๋ถ์๊ฒฐ๊ณผ ์ถ๋ ฅํ๊ธฐ", elem_classes="export-button", size="lg") |
|
|
export_result = gr.HTML() |
|
|
download_file = gr.File(label="๋ค์ด๋ก๋", visible=False) |
|
|
|
|
|
|
|
|
def on_collect_data(keyword): |
|
|
if not keyword.strip(): |
|
|
return ("<div style='color: red; padding: 20px; text-align: center; width: 100%;'>ํค์๋๋ฅผ ์
๋ ฅํด์ฃผ์ธ์.</div>", None) |
|
|
|
|
|
|
|
|
yield (create_loading_animation(), None) |
|
|
|
|
|
result = extract_keywords_from_products(keyword) |
|
|
|
|
|
if result["status"] == "error": |
|
|
yield (f"<div style='color: red; padding: 20px; text-align: center; width: 100%;'>{result['message']}</div>", None) |
|
|
return |
|
|
|
|
|
keywords_df = result["keywords_df"] |
|
|
html_table = export_utils.create_table_without_checkboxes(keywords_df) |
|
|
|
|
|
success_html = f""" |
|
|
<div style="width: 100%; background: #d4edda; border: 1px solid #c3e6cb; padding: 15px; border-radius: 5px; margin-bottom: 20px;"> |
|
|
<h4 style="color: #155724; margin: 0 0 10px 0;">โ
๋ค์ด๋ฒ ๋ฐ์ดํฐ ์์ง ์๋ฃ!</h4> |
|
|
<p style="margin: 0; color: #155724;"> |
|
|
โข ์ค์ ์ํ {len(result['products'])}๊ฐ ๋ถ์<br> |
|
|
โข ์ถ์ถ๋ ํค์๋: <strong>{len(keywords_df)}๊ฐ</strong><br> |
|
|
โข ์๋ ๋ชฉ๋ก์์ ์ํ๋ ํค์๋๋ฅผ ์ ํํ์ฌ ๋ถ์ํ์ธ์ |
|
|
</p> |
|
|
</div> |
|
|
|
|
|
<h5 style="margin: 20px 0 10px 0; color: #495057;">๐ ์ ์ฒด ํค์๋ ๋ชฉ๋ก</h5> |
|
|
{html_table} |
|
|
""" |
|
|
|
|
|
yield (success_html, result) |
|
|
|
|
|
def on_analyze_keyword(analysis_keyword, base_keyword, keywords_data): |
|
|
if not analysis_keyword.strip(): |
|
|
return "<div style='color: red; padding: 20px; text-align: center; width: 100%;'>๋ถ์ํ ํค์๋๋ฅผ ์
๋ ฅํด์ฃผ์ธ์.</div>", {} |
|
|
|
|
|
|
|
|
yield create_loading_animation(), {} |
|
|
|
|
|
|
|
|
related_result = analyze_related_keywords(analysis_keyword) |
|
|
|
|
|
|
|
|
keyword_result, session_export_data = safe_keyword_analysis(analysis_keyword, base_keyword, keywords_data) |
|
|
|
|
|
|
|
|
if related_result["status"] == "success" and not related_result["keywords_df"].empty: |
|
|
session_export_data["related_keywords_df"] = related_result["keywords_df"] |
|
|
|
|
|
|
|
|
if related_result["status"] == "success" and not related_result["keywords_df"].empty: |
|
|
df_keywords = related_result["keywords_df"] |
|
|
related_table = export_utils.create_table_without_checkboxes(df_keywords) |
|
|
|
|
|
related_html = f""" |
|
|
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;"> |
|
|
<div style="background: linear-gradient(135deg, #17a2b8 0%, #20c997 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;"> |
|
|
<h3 style="margin: 0; font-size: 18px; color: white;">๐ ์ฐ๊ด๊ฒ์์ด ๋ถ์</h3> |
|
|
</div> |
|
|
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);"> |
|
|
<div style="background: #e8f5e8; border: 1px solid #c3e6cb; padding: 15px; border-radius: 5px; margin-bottom: 20px;"> |
|
|
<h4 style="color: #155724; margin: 0 0 10px 0;">๐ ์ฐ๊ด๊ฒ์์ด ๋ถ์ ์๋ฃ!</h4> |
|
|
<p style="margin: 0; color: #155724;"> |
|
|
โข ๋ถ์ ๊ธฐ์ค ์ํ: <strong>{related_result['total_products']}๊ฐ</strong><br> |
|
|
โข ๋ฐ๊ฒฌ๋ ์ฐ๊ด๊ฒ์์ด: <strong>{len(df_keywords)}๊ฐ</strong><br> |
|
|
โข ๋ฉ์ธ ํค์๋์ ๊ฒฐํฉ๋ ๋ณตํฉํค์๋๋ง ํ์๋ฉ๋๋ค |
|
|
</p> |
|
|
</div> |
|
|
{related_table} |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
|
|
|
session_export_data["analysis_html"] = related_html + session_export_data["analysis_html"] |
|
|
else: |
|
|
related_html = f""" |
|
|
<div style="width: 100%; margin: 30px auto; font-family: 'Pretendard', sans-serif;"> |
|
|
<div style="background: linear-gradient(135deg, #17a2b8 0%, #20c997 100%); padding: 15px; border-radius: 10px 10px 0 0; color: white; text-align: center;"> |
|
|
<h3 style="margin: 0; font-size: 18px; color: white;">๐ ์ฐ๊ด๊ฒ์์ด ๋ถ์</h3> |
|
|
</div> |
|
|
<div style="background: white; padding: 20px; border-radius: 0 0 10px 10px; box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);"> |
|
|
<div style="color: orange; padding: 20px; text-align: center; background: #fff3cd; border-radius: 8px;"> |
|
|
'{analysis_keyword}' ํค์๋์ ์ฐ๊ด๊ฒ์์ด๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค. |
|
|
</div> |
|
|
</div> |
|
|
</div> |
|
|
""" |
|
|
|
|
|
|
|
|
session_export_data["analysis_html"] = related_html + session_export_data["analysis_html"] |
|
|
|
|
|
|
|
|
final_result = related_html + keyword_result |
|
|
yield final_result, session_export_data |
|
|
|
|
|
def on_export_results(export_data): |
|
|
"""๋ถ์ ๊ฒฐ๊ณผ ์ถ๋ ฅ ํธ๋ค๋ฌ - ์ธ์
๋ณ ๋ฐ์ดํฐ ์ฒ๋ฆฌ""" |
|
|
try: |
|
|
zip_path, message = export_analysis_results(export_data) |
|
|
|
|
|
if zip_path: |
|
|
|
|
|
success_html = f""" |
|
|
<div style="background: #d4edda; border: 1px solid #c3e6cb; padding: 20px; border-radius: 8px; margin: 10px 0;"> |
|
|
<h4 style="color: #155724; margin: 0 0 15px 0;"><i class="fas fa-check-circle"></i> ์ถ๋ ฅ ์๋ฃ!</h4> |
|
|
<p style="color: #155724; margin: 0; line-height: 1.6;"> |
|
|
{message}<br> |
|
|
<strong>ํฌํจ ํ์ผ:</strong><br> |
|
|
โข ๐ ์์
ํ์ผ: ๋ฉ์ธํค์๋ ์กฐํฉํค์๋ + ์ฐ๊ด๊ฒ์์ด ๋ฐ์ดํฐ<br> |
|
|
โข ๐ HTML ํ์ผ: ํค์๋ ์ฌ์ถฉ๋ถ์ ๊ฒฐ๊ณผ (๊ทธ๋ํ ํฌํจ)<br> |
|
|
<br> |
|
|
<i class="fas fa-download"></i> ์๋ ๋ค์ด๋ก๋ ๋ฒํผ์ ํด๋ฆญํ์ฌ ํ์ผ์ ์ ์ฅํ์ธ์.<br> |
|
|
<small style="color: #666;">โฐ ํ๊ตญ์๊ฐ ๊ธฐ์ค์ผ๋ก ํ์ผ๋ช
์ด ์์ฑ๋ฉ๋๋ค.</small> |
|
|
</p> |
|
|
</div> |
|
|
""" |
|
|
return success_html, gr.update(value=zip_path, visible=True) |
|
|
else: |
|
|
|
|
|
error_html = f""" |
|
|
<div style="background: #f8d7da; border: 1px solid #f5c6cb; padding: 20px; border-radius: 8px; margin: 10px 0;"> |
|
|
<h4 style="color: #721c24; margin: 0 0 10px 0;"><i class="fas fa-exclamation-triangle"></i> ์ถ๋ ฅ ์คํจ</h4> |
|
|
<p style="color: #721c24; margin: 0;">{message}</p> |
|
|
</div> |
|
|
""" |
|
|
return error_html, gr.update(visible=False) |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"์ถ๋ ฅ ํธ๋ค๋ฌ ์ค๋ฅ: {e}") |
|
|
error_html = f""" |
|
|
<div style="background: #f8d7da; border: 1px solid #f5c6cb; padding: 20px; border-radius: 8px; margin: 10px 0;"> |
|
|
<h4 style="color: #721c24; margin: 0 0 10px 0;"><i class="fas fa-exclamation-triangle"></i> ์์คํ
์ค๋ฅ</h4> |
|
|
<p style="color: #721c24; margin: 0;">์ถ๋ ฅ ์ค ์์คํ
์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}</p> |
|
|
</div> |
|
|
""" |
|
|
return error_html, gr.update(visible=False) |
|
|
|
|
|
|
|
|
collect_data_btn.click( |
|
|
fn=on_collect_data, |
|
|
inputs=[keyword_input], |
|
|
outputs=[keywords_result, keywords_data_state] |
|
|
) |
|
|
|
|
|
analyze_keyword_btn.click( |
|
|
fn=on_analyze_keyword, |
|
|
inputs=[analysis_keyword_input, keyword_input, keywords_data_state], |
|
|
outputs=[analysis_result, export_data_state] |
|
|
) |
|
|
|
|
|
export_btn.click( |
|
|
fn=on_export_results, |
|
|
inputs=[export_data_state], |
|
|
outputs=[export_result, download_file] |
|
|
) |
|
|
|
|
|
return interface |
|
|
|
|
|
|
|
|
def check_datalab_api_config(): |
|
|
"""๋ค์ด๋ฒ ๋ฐ์ดํฐ๋ฉ API ์ค์ ํ์ธ""" |
|
|
logger.info("=== ๋ค์ด๋ฒ ๋ฐ์ดํฐ๋ฉ API ์ค์ ํ์ธ ===") |
|
|
|
|
|
datalab_config = api_utils.get_next_datalab_api_config() |
|
|
|
|
|
if not datalab_config: |
|
|
logger.warning("โ ๋ฐ์ดํฐ๋ฉ API ํค๊ฐ ์ค์ ๋์ง ์์์ต๋๋ค.") |
|
|
logger.info("ํธ๋ ๋ ๋ถ์ ๊ธฐ๋ฅ์ด ๋นํ์ฑํ๋ฉ๋๋ค.") |
|
|
return False |
|
|
|
|
|
client_id = datalab_config["CLIENT_ID"] |
|
|
client_secret = datalab_config["CLIENT_SECRET"] |
|
|
|
|
|
logger.info(f"์ด {len(api_utils.NAVER_DATALAB_CONFIGS)}๊ฐ์ ๋ฐ์ดํฐ๋ฉ API ์ค์ ์ฌ์ฉ ์ค") |
|
|
logger.info(f"ํ์ฌ ์ ํ๋ API:") |
|
|
logger.info(f" CLIENT_ID: {client_id[:8]}***{client_id[-4:] if len(client_id) > 12 else '***'}") |
|
|
logger.info(f" CLIENT_SECRET: {client_secret[:4]}***{client_secret[-2:] if len(client_secret) > 6 else '***'}") |
|
|
|
|
|
|
|
|
if client_id.startswith("YOUR_"): |
|
|
logger.error("โ CLIENT_ID๊ฐ ๊ธฐ๋ณธ๊ฐ์ผ๋ก ์ค์ ๋์ด ์์ต๋๋ค!") |
|
|
return False |
|
|
|
|
|
if client_secret.startswith("YOUR_"): |
|
|
logger.error("โ CLIENT_SECRET์ด ๊ธฐ๋ณธ๊ฐ์ผ๋ก ์ค์ ๋์ด ์์ต๋๋ค!") |
|
|
return False |
|
|
|
|
|
|
|
|
if len(client_id) < 10: |
|
|
logger.warning("โ ๏ธ CLIENT_ID๊ฐ ์งง์ต๋๋ค. ์ฌ๋ฐ๋ฅธ ํค์ธ์ง ํ์ธํด์ฃผ์ธ์.") |
|
|
|
|
|
if len(client_secret) < 5: |
|
|
logger.warning("โ ๏ธ CLIENT_SECRET์ด ์งง์ต๋๋ค. ์ฌ๋ฐ๋ฅธ ํค์ธ์ง ํ์ธํด์ฃผ์ธ์.") |
|
|
|
|
|
logger.info("โ
๋ฐ์ดํฐ๋ฉ API ํค ํ์ ๊ฒ์ฆ ์๋ฃ") |
|
|
return True |
|
|
|
|
|
def check_gemini_api_config(): |
|
|
"""Gemini API ์ค์ ํ์ธ""" |
|
|
logger.info("=== Gemini API ์ค์ ํ์ธ ===") |
|
|
|
|
|
is_valid, message = api_utils.validate_gemini_config() |
|
|
|
|
|
if is_valid: |
|
|
logger.info(f"โ
{message}") |
|
|
|
|
|
test_key = api_utils.get_next_gemini_api_key() |
|
|
if test_key: |
|
|
logger.info(f"ํ์ฌ ์ฌ์ฉ ์ค์ธ Gemini API ํค: {test_key[:8]}***{test_key[-4:]}") |
|
|
return True |
|
|
else: |
|
|
logger.warning(f"โ {message}") |
|
|
logger.info("AI ๋ถ์ ๊ธฐ๋ฅ์ด ์ ํ๋ ์ ์์ต๋๋ค.") |
|
|
return False |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
try: |
|
|
import pytz |
|
|
logger.info("โ
pytz ๋ชจ๋ ๋ก๋ ์ฑ๊ณต - ํ๊ตญ์๊ฐ ์ง์") |
|
|
except ImportError: |
|
|
logger.warning("โ ๏ธ pytz ๋ชจ๋์ด ์ค์น๋์ง ์์ - pip install pytz ์คํ ํ์") |
|
|
logger.info("์์คํ
์๊ฐ์ ์ฌ์ฉํฉ๋๋ค.") |
|
|
|
|
|
|
|
|
api_utils.initialize_api_configs() |
|
|
logger.info("===== ์ํ ์์ฑ ๋ถ์ ์์คํ
v2.9 (์ถ๋ ฅ๊ธฐ๋ฅ ์ถ๊ฐ + ํ๊ตญ์๊ฐ + ๋ฉํฐ์ฌ์ฉ์ ์์ ) ์์ =====") |
|
|
|
|
|
|
|
|
datalab_available = check_datalab_api_config() |
|
|
|
|
|
|
|
|
gemini_available = check_gemini_api_config() |
|
|
|
|
|
|
|
|
print("๐ฆ ํ์ํ ํจํค์ง:") |
|
|
print(" pip install gradio google-generativeai pandas requests xlsxwriter markdown plotly pytz") |
|
|
print() |
|
|
|
|
|
|
|
|
if not gemini_available: |
|
|
print("โ ๏ธ GEMINI_API_KEY ๋๋ GOOGLE_API_KEY ํ๊ฒฝ๋ณ์๋ฅผ ์ค์ ํ์ธ์.") |
|
|
print(" export GEMINI_API_KEY='your-api-key'") |
|
|
print(" ๋๋") |
|
|
print(" export GOOGLE_API_KEY='your-api-key'") |
|
|
print() |
|
|
|
|
|
if not datalab_available: |
|
|
print("โ ๏ธ ๋ค์ด๋ฒ ๋ฐ์ดํฐ๋ฉ API ํธ๋ ๋ ๋ถ์์ ์ํด์๋:") |
|
|
print(" 1. ๋ค์ด๋ฒ ๊ฐ๋ฐ์์ผํฐ(https://developers.naver.com)์์ ์ ํ๋ฆฌ์ผ์ด์
๋ฑ๋ก") |
|
|
print(" 2. '๋ฐ์ดํฐ๋ฉ(๊ฒ์์ด ํธ๋ ๋)' API ์ถ๊ฐ") |
|
|
print(" 3. ๋ฐ๊ธ๋ฐ์ CLIENT_ID์ CLIENT_SECRET์ api_utils.py์ NAVER_DATALAB_CONFIGS์ ์ค์ ") |
|
|
print(" 4. ํ์ฌ๋ ํ์ฌ ๊ฒ์๋ ์ ๋ณด๋ง ํ์๋ฉ๋๋ค.") |
|
|
print() |
|
|
else: |
|
|
print("โ
๋ฐ์ดํฐ๋ฉ API ์ค์ ์๋ฃ - 1๋
, 3๋
ํธ๋ ๋ ๋ถ์์ด ๊ฐ๋ฅํฉ๋๋ค!") |
|
|
print() |
|
|
|
|
|
if gemini_available: |
|
|
print("โ
Gemini API ์ค์ ์๋ฃ - AI ๋ถ์์ด ๊ฐ๋ฅํฉ๋๋ค!") |
|
|
print() |
|
|
|
|
|
print("๐ก๏ธ v2.9 ๋ฉํฐ์ฌ์ฉ์ ์์ ๊ฐ์ ์ฌํญ:") |
|
|
print(" โข ์ ์ญ ๋ณ์ export_state ์์ ์ ๊ฑฐ") |
|
|
print(" โข gr.State({}) ์ฌ์ฉ์ผ๋ก ๊ฐ ์ฌ์ฉ์๋ณ ์ธ์
๋ฐ์ดํฐ ์์ ๋ถ๋ฆฌ") |
|
|
print(" โข safe_keyword_analysis() ํจ์์์ ์ธ์
๋ณ ๋ฐ์ดํฐ ๋ฐํ") |
|
|
print(" โข export_analysis_results() ํจ์์์ ์ธ์
๋ณ ๋ฐ์ดํฐ ์ฒ๋ฆฌ") |
|
|
print(" โข ์ด๋ฒคํธ ํธ๋ค๋ฌ์์ export_data_state ์ธ์
์ํ ๊ด๋ฆฌ") |
|
|
print(" โข ํ๊น
ํ์ด์ค ์คํ์ด์ค ๋ฑ ๋ฉํฐ์ฌ์ฉ์ ํ๊ฒฝ์์ ์์ ํ ๋์ ์ฌ์ฉ ๋ณด์ฅ") |
|
|
print() |
|
|
|
|
|
print("๐ ๊ธฐ์กด v2.9 ๊ธฐ๋ฅ:") |
|
|
print(" โข ์ฐ๊ด๊ฒ์์ด ์์
์ถ๋ ฅ") |
|
|
print(" โข ํค์๋ ์ฌ์ถฉ๋ถ์ HTML ์ถ๋ ฅ") |
|
|
print(" โข ์์ถํ์ผ๋ก ๊ฒฐ๊ณผ ๋ค์ด๋ก๋") |
|
|
print(" โข Gemini API ํค ํตํฉ ๊ด๋ฆฌ") |
|
|
print(" โข ํ๊ตญ์๊ฐ ์ ์ฉ") |
|
|
print() |
|
|
|
|
|
|
|
|
app = create_interface() |
|
|
app.launch(server_name="0.0.0.0", server_port=7860, share=True) |