4G23WAS3 / app.py
ssboost's picture
Update app.py
cd74a76 verified
raw
history blame
33.8 kB
import gradio as gr
import pandas as pd
import os
import time
import threading
import tempfile
import logging
import random
import uuid
import shutil
import glob
from datetime import datetime
import requests
import json
from dotenv import load_dotenv
# ํ™˜๊ฒฝ๋ณ€์ˆ˜ ๋กœ๋“œ
load_dotenv()
# ๋กœ๊น… ์„ค์ • (API ์ •๋ณด ์™„์ „ ์ฐจ๋‹จ)
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger(__name__)
# ์„ธ์…˜๋ณ„ ์ž„์‹œ ํŒŒ์ผ ๊ด€๋ฆฌ๋ฅผ ์œ„ํ•œ ๋”•์…”๋„ˆ๋ฆฌ
session_temp_files = {}
session_data = {}
def get_api_client():
"""ํ™˜๊ฒฝ๋ณ€์ˆ˜์—์„œ API ์—”๋“œํฌ์ธํŠธ๋ฅผ ๊ฐ€์ ธ์™€ ์š”์ฒญ ํ•จ์ˆ˜ ์ƒ์„ฑ"""
endpoint = os.getenv('API_ENDPOINT')
if not endpoint:
raise ValueError("API_ENDPOINT ํ™˜๊ฒฝ๋ณ€์ˆ˜๊ฐ€ ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค.")
def make_request(api_name, **kwargs):
try:
# gradio_client์™€ ๋™์ผํ•œ ๋ฐฉ์‹์œผ๋กœ API ํ˜ธ์ถœ
if not endpoint.startswith('http'):
base_url = f"https://{endpoint}.hf.space"
else:
base_url = endpoint
# Gradio API ์—”๋“œํฌ์ธํŠธ ํ˜•์‹ ๋งž์ถ”๊ธฐ
url = f"{base_url}/call{api_name}"
# ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ์ˆœ์„œ๋Œ€๋กœ ๋ฐฐ์—ด๋กœ ๋ณ€ํ™˜
if api_name == "/process_search_results":
data = [kwargs.get('keyword', ''), kwargs.get('korean_only', True),
kwargs.get('apply_main_keyword', '๋ฉ”์ธํ‚ค์›Œ๋“œ ์ ์šฉ'), kwargs.get('exclude_zero_volume', False)]
elif api_name == "/search_with_loading":
data = [kwargs.get('keyword', ''), kwargs.get('korean_only', True),
kwargs.get('apply_main_keyword', '๋ฉ”์ธํ‚ค์›Œ๋“œ ์ ์šฉ'), kwargs.get('exclude_zero_volume', False)]
elif api_name == "/filter_and_sort_table":
data = [kwargs.get('selected_cat', '์ „์ฒด ๋ณด๊ธฐ'), kwargs.get('keyword_sort', '์ •๋ ฌ ์—†์Œ'),
kwargs.get('total_volume_sort', '์ •๋ ฌ ์—†์Œ'), kwargs.get('usage_count_sort', '์ •๋ ฌ ์—†์Œ'),
kwargs.get('selected_volume_range', '์ „์ฒด'), kwargs.get('exclude_zero_volume', False)]
elif api_name == "/update_category_selection":
data = [kwargs.get('selected_cat', '์ „์ฒด ๋ณด๊ธฐ')]
elif api_name == "/process_analyze_results":
data = [kwargs.get('analysis_keywords', ''), kwargs.get('selected_category', '์ „์ฒด ๋ณด๊ธฐ')]
elif api_name == "/analyze_with_loading":
data = [kwargs.get('analysis_keywords', ''), kwargs.get('selected_category', '์ „์ฒด ๋ณด๊ธฐ')]
elif api_name == "/reset_interface":
data = []
elif api_name == "/get_session_id":
data = []
else:
data = []
response = requests.post(url, json={"data": data}, timeout=60)
if response.status_code == 200:
result = response.json()
return result.get('data', [])
else:
raise Exception(f"API ํ˜ธ์ถœ ์‹คํŒจ: {response.status_code}")
except Exception as e:
raise Exception(f"API ์—ฐ๊ฒฐ ์˜ค๋ฅ˜: {str(e)}")
return type('APIClient', (), {'predict': lambda self, **kwargs: make_request(kwargs.pop('api_name'), **kwargs)})()
def cleanup_huggingface_temp_folders():
"""ํ—ˆ๊น…ํŽ˜์ด์Šค ์ž„์‹œ ํด๋” ์ดˆ๊ธฐ ์ •๋ฆฌ"""
try:
temp_dirs = [tempfile.gettempdir(), "/tmp", "/var/tmp"]
cleanup_count = 0
for temp_dir in temp_dirs:
if os.path.exists(temp_dir):
try:
session_files = glob.glob(os.path.join(temp_dir, "session_*.xlsx"))
session_files.extend(glob.glob(os.path.join(temp_dir, "session_*.csv")))
for file_path in session_files:
try:
if os.path.getmtime(file_path) < time.time() - 3600:
os.remove(file_path)
cleanup_count += 1
except Exception:
pass
except Exception:
pass
logger.info(f"โœ… ์ž„์‹œ ํด๋” ์ •๋ฆฌ ์™„๋ฃŒ - {cleanup_count}๊ฐœ ํŒŒ์ผ ์‚ญ์ œ")
except Exception as e:
logger.error(f"์ž„์‹œ ํด๋” ์ •๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {e}")
def setup_clean_temp_environment():
"""๊นจ๋—ํ•œ ์ž„์‹œ ํ™˜๊ฒฝ ์„ค์ •"""
try:
cleanup_huggingface_temp_folders()
app_temp_dir = os.path.join(tempfile.gettempdir(), "control_tower_app")
if os.path.exists(app_temp_dir):
shutil.rmtree(app_temp_dir, ignore_errors=True)
os.makedirs(app_temp_dir, exist_ok=True)
os.environ['CONTROL_TOWER_TEMP'] = app_temp_dir
logger.info(f"โœ… ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ „์šฉ ์ž„์‹œ ๋””๋ ‰ํ† ๋ฆฌ ์„ค์ •: {app_temp_dir}")
return app_temp_dir
except Exception as e:
logger.error(f"์ž„์‹œ ํ™˜๊ฒฝ ์„ค์ • ์‹คํŒจ: {e}")
return tempfile.gettempdir()
def get_app_temp_dir():
"""์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ „์šฉ ์ž„์‹œ ๋””๋ ‰ํ† ๋ฆฌ ๋ฐ˜ํ™˜"""
return os.environ.get('CONTROL_TOWER_TEMP', tempfile.gettempdir())
def get_session_id():
"""์„ธ์…˜ ID ์ƒ์„ฑ"""
try:
client = get_api_client()
result = client.predict(api_name="/get_session_id")
return result[0] if result else str(uuid.uuid4())
except Exception:
return str(uuid.uuid4())
def cleanup_session_files(session_id, delay=300):
"""์„ธ์…˜๋ณ„ ์ž„์‹œ ํŒŒ์ผ ์ •๋ฆฌ ํ•จ์ˆ˜"""
def cleanup():
time.sleep(delay)
if session_id in session_temp_files:
files_to_remove = session_temp_files[session_id].copy()
del session_temp_files[session_id]
for file_path in files_to_remove:
try:
if os.path.exists(file_path):
os.remove(file_path)
logger.info(f"์„ธ์…˜ {session_id[:8]}... ์ž„์‹œ ํŒŒ์ผ ์‚ญ์ œ: {file_path}")
except Exception as e:
logger.error(f"์„ธ์…˜ {session_id[:8]}... ํŒŒ์ผ ์‚ญ์ œ ์˜ค๋ฅ˜: {e}")
threading.Thread(target=cleanup, daemon=True).start()
def register_session_file(session_id, file_path):
"""์„ธ์…˜๋ณ„ ํŒŒ์ผ ๋“ฑ๋ก"""
if session_id not in session_temp_files:
session_temp_files[session_id] = []
session_temp_files[session_id].append(file_path)
def cleanup_old_sessions():
"""์˜ค๋ž˜๋œ ์„ธ์…˜ ๋ฐ์ดํ„ฐ ์ •๋ฆฌ"""
current_time = time.time()
sessions_to_remove = []
for session_id, data in session_data.items():
if current_time - data.get('last_activity', 0) > 3600:
sessions_to_remove.append(session_id)
for session_id in sessions_to_remove:
if session_id in session_temp_files:
for file_path in session_temp_files[session_id]:
try:
if os.path.exists(file_path):
os.remove(file_path)
logger.info(f"์˜ค๋ž˜๋œ ์„ธ์…˜ {session_id[:8]}... ํŒŒ์ผ ์‚ญ์ œ: {file_path}")
except Exception as e:
logger.error(f"์˜ค๋ž˜๋œ ์„ธ์…˜ ํŒŒ์ผ ์‚ญ์ œ ์˜ค๋ฅ˜: {e}")
del session_temp_files[session_id]
if session_id in session_data:
del session_data[session_id]
logger.info(f"์˜ค๋ž˜๋œ ์„ธ์…˜ ๋ฐ์ดํ„ฐ ์‚ญ์ œ: {session_id[:8]}...")
def update_session_activity(session_id):
"""์„ธ์…˜ ํ™œ๋™ ์‹œ๊ฐ„ ์—…๋ฐ์ดํŠธ"""
if session_id not in session_data:
session_data[session_id] = {}
session_data[session_id]['last_activity'] = time.time()
def create_session_temp_file(session_id, suffix='.xlsx'):
"""์„ธ์…˜๋ณ„ ์ž„์‹œ ํŒŒ์ผ ์ƒ์„ฑ"""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
random_suffix = str(random.randint(1000, 9999))
temp_dir = get_app_temp_dir()
filename = f"session_{session_id[:8]}_{timestamp}_{random_suffix}{suffix}"
temp_file_path = os.path.join(temp_dir, filename)
with open(temp_file_path, 'w') as f:
pass
register_session_file(session_id, temp_file_path)
return temp_file_path
def search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume):
"""์›๋ณธ API: /search_with_loading"""
try:
client = get_api_client()
result = client.predict(
keyword=keyword,
korean_only=korean_only,
apply_main_keyword=apply_main_keyword,
exclude_zero_volume=exclude_zero_volume,
api_name="/search_with_loading"
)
return result[0] if result else ""
except Exception as e:
logger.error(f"search_with_loading API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return ""
def process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume):
"""์›๋ณธ API: /process_search_results"""
try:
client = get_api_client()
result = client.predict(
keyword=keyword,
korean_only=korean_only,
apply_main_keyword=apply_main_keyword,
exclude_zero_volume=exclude_zero_volume,
api_name="/process_search_results"
)
# ๊ฒฐ๊ณผ ์•ˆ์ „ํ•˜๊ฒŒ ์ฒ˜๋ฆฌ
if len(result) >= 5:
table_html, cat_choices, vol_choices, selected_cat, download_file = result[:5]
# ๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ์ด ์žˆ๋Š” ๊ฒฝ์šฐ ๋กœ์ปฌ๋กœ ๋ณต์‚ฌ
local_download_file = None
if download_file:
session_id = get_session_id()
local_download_file = create_session_temp_file(session_id, '.xlsx')
try:
shutil.copy2(download_file, local_download_file)
except Exception as e:
logger.error(f"ํŒŒ์ผ ๋ณต์‚ฌ ์˜ค๋ฅ˜: {e}")
local_download_file = None
return table_html, cat_choices, vol_choices, selected_cat, local_download_file
else:
return (
"<p>๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.</p>",
["์ „์ฒด ๋ณด๊ธฐ"], ["์ „์ฒด"], "์ „์ฒด ๋ณด๊ธฐ", None
)
except Exception as e:
logger.error(f"process_search_results API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return (
"<p>์„œ๋น„์Šค ์—ฐ๊ฒฐ์— ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค. ์ž ์‹œ ํ›„ ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”.</p>",
["์ „์ฒด ๋ณด๊ธฐ"], ["์ „์ฒด"], "์ „์ฒด ๋ณด๊ธฐ", None
)
def filter_and_sort_table(selected_cat, keyword_sort, total_volume_sort, usage_count_sort, selected_volume_range, exclude_zero_volume):
"""์›๋ณธ API: /filter_and_sort_table"""
try:
client = get_api_client()
result = client.predict(
selected_cat=selected_cat,
keyword_sort=keyword_sort,
total_volume_sort=total_volume_sort,
usage_count_sort=usage_count_sort,
selected_volume_range=selected_volume_range,
exclude_zero_volume=exclude_zero_volume,
api_name="/filter_and_sort_table"
)
return result[0] if result else ""
except Exception as e:
logger.error(f"filter_and_sort_table API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return "<p>ํ•„ํ„ฐ๋ง ์„œ๋น„์Šค ์—ฐ๊ฒฐ์— ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค.</p>"
def update_category_selection(selected_cat):
"""์›๋ณธ API: /update_category_selection"""
try:
client = get_api_client()
result = client.predict(
selected_cat=selected_cat,
api_name="/update_category_selection"
)
return gr.update(value=result[0] if result else selected_cat)
except Exception as e:
logger.error(f"update_category_selection API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return gr.update(value=selected_cat)
def analyze_with_loading(analysis_keywords, selected_category):
"""์›๋ณธ API: /analyze_with_loading"""
try:
client = get_api_client()
result = client.predict(
analysis_keywords=analysis_keywords,
selected_category=selected_category,
api_name="/analyze_with_loading"
)
return result[0] if result else ""
except Exception as e:
logger.error(f"analyze_with_loading API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return ""
def process_analyze_results(analysis_keywords, selected_category):
"""์›๋ณธ API: /process_analyze_results"""
try:
client = get_api_client()
result = client.predict(
analysis_keywords=analysis_keywords,
selected_category=selected_category,
api_name="/process_analyze_results"
)
if len(result) >= 2:
analysis_result, download_file = result[:2]
# ๋‹ค์šด๋กœ๋“œ ํŒŒ์ผ์ด ์žˆ๋Š” ๊ฒฝ์šฐ ๋กœ์ปฌ๋กœ ๋ณต์‚ฌ
local_download_file = None
if download_file:
session_id = get_session_id()
local_download_file = create_session_temp_file(session_id, '.xlsx')
try:
shutil.copy2(download_file, local_download_file)
except Exception as e:
logger.error(f"๋ถ„์„ ๊ฒฐ๊ณผ ํŒŒ์ผ ๋ณต์‚ฌ ์˜ค๋ฅ˜: {e}")
local_download_file = None
return analysis_result, local_download_file
else:
return "๋ถ„์„ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", None
except Exception as e:
logger.error(f"process_analyze_results API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return "๋ถ„์„ ์„œ๋น„์Šค ์—ฐ๊ฒฐ์— ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค. ์ž ์‹œ ํ›„ ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”.", None
def reset_interface():
"""์›๋ณธ API: /reset_interface"""
try:
client = get_api_client()
result = client.predict(api_name="/reset_interface")
return result if result else get_default_reset_values()
except Exception as e:
logger.error(f"reset_interface API ํ˜ธ์ถœ ์˜ค๋ฅ˜: {e}")
return get_default_reset_values()
def get_default_reset_values():
"""๊ธฐ๋ณธ ๋ฆฌ์…‹ ๊ฐ’ ๋ฐ˜ํ™˜"""
return (
"", True, False, "๋ฉ”์ธํ‚ค์›Œ๋“œ ์ ์šฉ", "", ["์ „์ฒด ๋ณด๊ธฐ"], "์ „์ฒด ๋ณด๊ธฐ",
["์ „์ฒด"], "์ „์ฒด", "์ •๋ ฌ ์—†์Œ", "์ •๋ ฌ ์—†์Œ", ["์ „์ฒด ๋ณด๊ธฐ"], "์ „์ฒด ๋ณด๊ธฐ",
"", "", None
)
# UI ์ฒ˜๋ฆฌ ๋ž˜ํผ ํ•จ์ˆ˜๋“ค
def wrapper_search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume):
"""๊ฒ€์ƒ‰ ๋กœ๋”ฉ UI ์ฒ˜๋ฆฌ"""
search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume)
return (
gr.update(visible=True), # progress_section
gr.update(visible=False) # empty_table_html
)
def wrapper_process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume):
"""๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ UI"""
result = process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume)
table_html, cat_choices, vol_choices, selected_cat, download_file = result
# UI ํ‘œ์‹œ ์—ฌ๋ถ€ ๊ฒฐ์ •
if table_html and "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค" not in table_html and "๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค" not in table_html:
keyword_section_visibility = True
category_section_visibility = True
empty_placeholder_vis = False
execution_section_visibility = True
else:
keyword_section_visibility = False
category_section_visibility = False
empty_placeholder_vis = True
execution_section_visibility = False
# ๊ฐ€์ƒ์˜ state_df
state_df = pd.DataFrame()
return (
table_html, # table_output
cat_choices, # category_filter choices
vol_choices, # search_volume_filter choices
state_df, # state_df
selected_cat, # selected_category value
download_file, # download_output
gr.update(visible=keyword_section_visibility), # keyword_analysis_section
gr.update(visible=category_section_visibility), # category_analysis_section
gr.update(visible=False), # progress_section
gr.update(visible=empty_placeholder_vis), # empty_table_html
gr.update(visible=execution_section_visibility), # execution_section
keyword # keyword_state
)
def wrapper_analyze_with_loading(analysis_keywords, selected_category, state_df):
"""๋ถ„์„ ๋กœ๋”ฉ UI ์ฒ˜๋ฆฌ"""
analyze_with_loading(analysis_keywords, selected_category)
return gr.update(visible=True) # progress_section
def wrapper_process_analyze_results(analysis_keywords, selected_category, state_df):
"""๋ถ„์„ ๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ UI"""
analysis_result, download_file = process_analyze_results(analysis_keywords, selected_category)
return (
analysis_result, # analysis_result
download_file, # download_output
gr.update(visible=True), # analysis_output_section
gr.update(visible=False) # progress_section
)
# ์„ธ์…˜ ์ •๋ฆฌ ์Šค์ผ€์ค„๋Ÿฌ
def start_session_cleanup_scheduler():
"""์„ธ์…˜ ์ •๋ฆฌ ์Šค์ผ€์ค„๋Ÿฌ ์‹œ์ž‘"""
def cleanup_scheduler():
while True:
time.sleep(600) # 10๋ถ„๋งˆ๋‹ค ์‹คํ–‰
cleanup_old_sessions()
cleanup_huggingface_temp_folders()
threading.Thread(target=cleanup_scheduler, daemon=True).start()
def cleanup_on_startup():
"""์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹œ์ž‘ ์‹œ ์ „์ฒด ์ •๋ฆฌ"""
logger.info("๐Ÿงน ์ปจํŠธ๋กค ํƒ€์›Œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹œ์ž‘ - ์ดˆ๊ธฐ ์ •๋ฆฌ ์ž‘์—… ์‹œ์ž‘...")
cleanup_huggingface_temp_folders()
app_temp_dir = setup_clean_temp_environment()
global session_temp_files, session_data
session_temp_files.clear()
session_data.clear()
logger.info(f"โœ… ์ดˆ๊ธฐ ์ •๋ฆฌ ์ž‘์—… ์™„๋ฃŒ - ์•ฑ ์ „์šฉ ๋””๋ ‰ํ† ๋ฆฌ: {app_temp_dir}")
return app_temp_dir
# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
def create_app():
fontawesome_html = """
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css">
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Noto+Sans+KR:wght@300;400;500;700&display=swap">
"""
# CSS ํŒŒ์ผ ๋กœ๋“œ
try:
with open('style.css', 'r', encoding='utf-8') as f:
custom_css = f.read()
except:
custom_css = """
:root {
--primary-color: #FB7F0D;
--secondary-color: #ff9a8b;
}
.custom-button {
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)) !important;
color: white !important;
border-radius: 30px !important;
height: 45px !important;
font-size: 16px !important;
font-weight: bold !important;
width: 100% !important;
}
"""
with gr.Blocks(css=custom_css, theme=gr.themes.Default(
primary_hue="orange",
secondary_hue="orange",
font=[gr.themes.GoogleFont("Noto Sans KR"), "ui-sans-serif", "system-ui"]
)) as demo:
gr.HTML(fontawesome_html)
# ํ‚ค์›Œ๋“œ ์ƒํƒœ ๊ด€๋ฆฌ
keyword_state = gr.State("")
# ์ž…๋ ฅ ์„น์…˜
with gr.Column(elem_classes="custom-frame fade-in"):
gr.HTML('<div class="section-title"><i class="fas fa-search"></i> ๊ฒ€์ƒ‰ ์ž…๋ ฅ</div>')
with gr.Row():
with gr.Column(scale=1):
keyword = gr.Textbox(
label="๋ฉ”์ธ ํ‚ค์›Œ๋“œ",
placeholder="์˜ˆ: ์˜ค์ง•์–ด"
)
with gr.Column(scale=1):
search_btn = gr.Button(
"๋ฉ”์ธํ‚ค์›Œ๋“œ ๋ถ„์„",
elem_classes="custom-button"
)
with gr.Accordion("์˜ต์…˜ ์„ค์ •", open=False):
with gr.Row():
with gr.Column(scale=1):
korean_only = gr.Checkbox(
label="ํ•œ๊ธ€๋งŒ ์ถ”์ถœ",
value=True
)
with gr.Column(scale=1):
exclude_zero_volume = gr.Checkbox(
label="๊ฒ€์ƒ‰๋Ÿ‰ 0 ํ‚ค์›Œ๋“œ ์ œ์™ธ",
value=False
)
with gr.Row():
with gr.Column(scale=1):
apply_main_keyword = gr.Radio(
["๋ฉ”์ธํ‚ค์›Œ๋“œ ์ ์šฉ", "๋ฉ”์ธํ‚ค์›Œ๋“œ ๋ฏธ์ ์šฉ"],
label="์กฐํ•ฉ ๋ฐฉ์‹",
value="๋ฉ”์ธํ‚ค์›Œ๋“œ ์ ์šฉ"
)
with gr.Column(scale=1):
gr.HTML("")
# ์ง„ํ–‰ ์ƒํƒœ ํ‘œ์‹œ ์„น์…˜
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as progress_section:
gr.HTML('<div class="section-title"><i class="fas fa-spinner"></i> ๋ถ„์„ ์ง„ํ–‰ ์ƒํƒœ</div>')
progress_html = gr.HTML("""
<div style="padding: 15px; background-color: #f9f9f9; border-radius: 5px; margin: 10px 0; border: 1px solid #ddd;">
<div style="margin-bottom: 10px; display: flex; align-items: center;">
<i class="fas fa-spinner fa-spin" style="color: #FB7F0D; margin-right: 10px;"></i>
<span>ํ‚ค์›Œ๋“œ ๋ฐ์ดํ„ฐ๋ฅผ ๋ถ„์„์ค‘์ž…๋‹ˆ๋‹ค. ์ž ์‹œ๋งŒ ๊ธฐ๋‹ค๋ ค์ฃผ์„ธ์š”...</span>
</div>
<div style="background-color: #e9ecef; height: 10px; border-radius: 5px; overflow: hidden;">
<div class="progress-bar"></div>
</div>
</div>
""")
# ๋ฉ”์ธํ‚ค์›Œ๋“œ ๋ถ„์„ ๊ฒฐ๊ณผ ์„น์…˜
with gr.Column(elem_classes="custom-frame fade-in") as main_keyword_section:
gr.HTML('<div class="section-title"><i class="fas fa-table"></i> ๋ฉ”์ธํ‚ค์›Œ๋“œ ๋ถ„์„ ๊ฒฐ๊ณผ</div>')
empty_table_html = gr.HTML("""
<table class="empty-table">
<thead>
<tr>
<th>์ˆœ๋ฒˆ</th>
<th>์กฐํ•ฉ ํ‚ค์›Œ๋“œ</th>
<th>PC๊ฒ€์ƒ‰๋Ÿ‰</th>
<th>๋ชจ๋ฐ”์ผ๊ฒ€์ƒ‰๋Ÿ‰</th>
<th>์ด๊ฒ€์ƒ‰๋Ÿ‰</th>
<th>๊ฒ€์ƒ‰๋Ÿ‰๊ตฌ๊ฐ„</th>
<th>ํ‚ค์›Œ๋“œ ์‚ฌ์šฉ์ž์ˆœ์œ„</th>
<th>ํ‚ค์›Œ๋“œ ์‚ฌ์šฉํšŸ์ˆ˜</th>
<th>์ƒํ’ˆ ๋“ฑ๋ก ์นดํ…Œ๊ณ ๋ฆฌ</th>
</tr>
</thead>
<tbody>
<tr>
<td colspan="9" style="padding: 30px; text-align: center;">
๊ฒ€์ƒ‰์„ ์‹คํ–‰ํ•˜๋ฉด ์—ฌ๊ธฐ์— ๊ฒฐ๊ณผ๊ฐ€ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค
</td>
</tr>
</tbody>
</table>
""")
with gr.Column(visible=False) as keyword_analysis_section:
with gr.Row():
with gr.Column(scale=1):
category_filter = gr.Dropdown(
choices=["์ „์ฒด ๋ณด๊ธฐ"],
label="์นดํ…Œ๊ณ ๋ฆฌ ํ•„ํ„ฐ",
value="์ „์ฒด ๋ณด๊ธฐ",
interactive=True
)
with gr.Column(scale=1):
total_volume_sort = gr.Dropdown(
choices=["์ •๋ ฌ ์—†์Œ", "์˜ค๋ฆ„์ฐจ์ˆœ", "๋‚ด๋ฆผ์ฐจ์ˆœ"],
label="์ด๊ฒ€์ƒ‰๋Ÿ‰ ์ •๋ ฌ",
value="์ •๋ ฌ ์—†์Œ",
interactive=True
)
with gr.Row():
with gr.Column(scale=1):
search_volume_filter = gr.Dropdown(
choices=["์ „์ฒด"],
label="๊ฒ€์ƒ‰๋Ÿ‰ ๊ตฌ๊ฐ„ ํ•„ํ„ฐ",
value="์ „์ฒด",
interactive=True
)
with gr.Column(scale=1):
usage_count_sort = gr.Dropdown(
choices=["์ •๋ ฌ ์—†์Œ", "์˜ค๋ฆ„์ฐจ์ˆœ", "๋‚ด๋ฆผ์ฐจ์ˆœ"],
label="ํ‚ค์›Œ๋“œ ์‚ฌ์šฉํšŸ์ˆ˜ ์ •๋ ฌ",
value="์ •๋ ฌ ์—†์Œ",
interactive=True
)
gr.HTML("<div class='data-container' id='table_container'></div>")
table_output = gr.HTML(elem_classes="fade-in")
# ์นดํ…Œ๊ณ ๋ฆฌ ๋ถ„์„ ์„น์…˜
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as category_analysis_section:
gr.HTML('<div class="section-title"><i class="fas fa-chart-bar"></i> ํ‚ค์›Œ๋“œ ๋ถ„์„</div>')
with gr.Row():
with gr.Column(scale=1):
analysis_keywords = gr.Textbox(
label="ํ‚ค์›Œ๋“œ ์ž…๋ ฅ (์ตœ๋Œ€ 20๊ฐœ, ์‰ผํ‘œ ๋˜๋Š” ์—”ํ„ฐ๋กœ ๊ตฌ๋ถ„)",
placeholder="์˜ˆ: ์˜ค์ง•์–ด๋ณถ์Œ, ์˜ค์ง•์–ด ์†์งˆ, ์˜ค์ง•์–ด ์š”๋ฆฌ...",
lines=5
)
with gr.Column(scale=1):
selected_category = gr.Dropdown(
label="๋ถ„์„ํ•  ์นดํ…Œ๊ณ ๋ฆฌ(๋ถ„์„ ์ „ ๋ฐ˜๋“œ์‹œ ์„ ํƒํ•ด์ฃผ์„ธ์š”)",
choices=["์ „์ฒด ๋ณด๊ธฐ"],
value="์ „์ฒด ๋ณด๊ธฐ",
interactive=True
)
# ์‹คํ–‰ ์„น์…˜
with gr.Column(elem_classes="execution-section", visible=False) as execution_section:
gr.HTML('<div class="section-title"><i class="fas fa-play-circle"></i> ์‹คํ–‰</div>')
with gr.Row():
with gr.Column(scale=1):
analyze_btn = gr.Button(
"์นดํ…Œ๊ณ ๋ฆฌ ์ผ์น˜ ๋ถ„์„",
elem_classes=["execution-button", "primary-button"]
)
with gr.Column(scale=1):
reset_btn = gr.Button(
"๋ชจ๋“  ์ž…๋ ฅ ์ดˆ๊ธฐํ™”",
elem_classes=["execution-button", "secondary-button"]
)
# ๋ถ„์„ ๊ฒฐ๊ณผ ์ถœ๋ ฅ ์„น์…˜
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as analysis_output_section:
gr.HTML('<div class="section-title"><i class="fas fa-list-ul"></i> ๋ถ„์„ ๊ฒฐ๊ณผ ์š”์•ฝ</div>')
analysis_result = gr.HTML(elem_classes="fade-in")
with gr.Row():
download_output = gr.File(
label="ํ‚ค์›Œ๋“œ ๋ชฉ๋ก ๋‹ค์šด๋กœ๋“œ",
visible=True
)
# ์ƒํƒœ ์ €์žฅ์šฉ ๋ณ€์ˆ˜
state_df = gr.State()
# ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
search_btn.click(
fn=wrapper_search_with_loading,
inputs=[keyword, korean_only, apply_main_keyword, exclude_zero_volume],
outputs=[progress_section, empty_table_html]
).then(
fn=wrapper_process_search_results,
inputs=[keyword, korean_only, apply_main_keyword, exclude_zero_volume],
outputs=[
table_output, category_filter, search_volume_filter,
state_df, selected_category, download_output,
keyword_analysis_section, category_analysis_section,
progress_section, empty_table_html, execution_section,
keyword_state
]
)
# ํ•„ํ„ฐ ๋ฐ ์ •๋ ฌ ๋ณ€๊ฒฝ ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
category_filter.change(
fn=filter_and_sort_table,
inputs=[
category_filter, gr.Textbox(value="์ •๋ ฌ ์—†์Œ", visible=False),
total_volume_sort, usage_count_sort,
search_volume_filter, exclude_zero_volume
],
outputs=[table_output]
)
category_filter.change(
fn=update_category_selection,
inputs=[category_filter],
outputs=[selected_category]
)
total_volume_sort.change(
fn=filter_and_sort_table,
inputs=[
category_filter, gr.Textbox(value="์ •๋ ฌ ์—†์Œ", visible=False),
total_volume_sort, usage_count_sort,
search_volume_filter, exclude_zero_volume
],
outputs=[table_output]
)
usage_count_sort.change(
fn=filter_and_sort_table,
inputs=[
category_filter, gr.Textbox(value="์ •๋ ฌ ์—†์Œ", visible=False),
total_volume_sort, usage_count_sort,
search_volume_filter, exclude_zero_volume
],
outputs=[table_output]
)
search_volume_filter.change(
fn=filter_and_sort_table,
inputs=[
category_filter, gr.Textbox(value="์ •๋ ฌ ์—†์Œ", visible=False),
total_volume_sort, usage_count_sort,
search_volume_filter, exclude_zero_volume
],
outputs=[table_output]
)
exclude_zero_volume.change(
fn=filter_and_sort_table,
inputs=[
category_filter, gr.Textbox(value="์ •๋ ฌ ์—†์Œ", visible=False),
total_volume_sort, usage_count_sort,
search_volume_filter, exclude_zero_volume
],
outputs=[table_output]
)
# ์นดํ…Œ๊ณ ๋ฆฌ ๋ถ„์„ ๋ฒ„ํŠผ ์ด๋ฒคํŠธ
analyze_btn.click(
fn=wrapper_analyze_with_loading,
inputs=[analysis_keywords, selected_category, state_df],
outputs=[progress_section]
).then(
fn=wrapper_process_analyze_results,
inputs=[analysis_keywords, selected_category, state_df],
outputs=[analysis_result, download_output, analysis_output_section, progress_section]
)
# ๋ฆฌ์…‹ ๋ฒ„ํŠผ ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
reset_btn.click(
fn=reset_interface,
inputs=[],
outputs=[
keyword, korean_only, exclude_zero_volume, apply_main_keyword,
table_output, category_filter, category_filter,
search_volume_filter, search_volume_filter,
total_volume_sort, usage_count_sort,
selected_category, selected_category,
analysis_keywords, analysis_result, download_output
]
)
return demo
if __name__ == "__main__":
# ========== ์‹œ์ž‘ ์‹œ ์ „์ฒด ์ดˆ๊ธฐํ™” ==========
print("===== Application Startup at %s =====" % time.strftime("%Y-%m-%d %H:%M:%S"))
logger.info("๐Ÿš€ ์ปจํŠธ๋กค ํƒ€์›Œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹œ์ž‘...")
# 1. ์ฒซ ๋ฒˆ์งธ: ํ—ˆ๊น…ํŽ˜์ด์Šค ์ž„์‹œ ํด๋” ์ •๋ฆฌ ๋ฐ ํ™˜๊ฒฝ ์„ค์ •
app_temp_dir = cleanup_on_startup()
# 2. ์„ธ์…˜ ์ •๋ฆฌ ์Šค์ผ€์ค„๋Ÿฌ ์‹œ์ž‘
start_session_cleanup_scheduler()
# 3. API ์—ฐ๊ฒฐ ํ…Œ์ŠคํŠธ
try:
test_client = get_api_client()
logger.info("โœ… API ์—ฐ๊ฒฐ ํ…Œ์ŠคํŠธ ์„ฑ๊ณต")
except Exception as e:
logger.error("โŒ API ์—ฐ๊ฒฐ ์‹คํŒจ - ํ™˜๊ฒฝ๋ณ€์ˆ˜ API_ENDPOINT๋ฅผ ํ™•์ธํ•˜์„ธ์š”")
print("โŒ API_ENDPOINT ํ™˜๊ฒฝ๋ณ€์ˆ˜๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
print("๐Ÿ’ก .env ํŒŒ์ผ์— ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์„ค์ •ํ•˜์„ธ์š”:")
print("API_ENDPOINT=your-endpoint-here")
raise SystemExit(1)
logger.info("===== ์ปจํŠธ๋กค ํƒ€์›Œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹œ์ž‘ ์™„๋ฃŒ at %s =====", time.strftime("%Y-%m-%d %H:%M:%S"))
logger.info(f"๐Ÿ“ ์ž„์‹œ ํŒŒ์ผ ์ €์žฅ ์œ„์น˜: {app_temp_dir}")
# ========== ์•ฑ ์‹คํ–‰ ==========
try:
app = create_app()
print("๐Ÿš€ Gradio ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์ด ์‹œ์ž‘๋ฉ๋‹ˆ๋‹ค...")
app.launch(
share=False, # ๋ณด์•ˆ์„ ์œ„ํ•ด share ๋น„ํ™œ์„ฑํ™”
server_name="0.0.0.0", # ๋ชจ๋“  IP์—์„œ ์ ‘๊ทผ ํ—ˆ์šฉ
server_port=7860, # ํฌํŠธ ์ง€์ •
max_threads=40, # ๋ฉ€ํ‹ฐ์œ ์ €๋ฅผ ์œ„ํ•œ ์Šค๋ ˆ๋“œ ์ˆ˜ ์ฆ๊ฐ€
auth=None, # ํ•„์š”์‹œ ์ธ์ฆ ์ถ”๊ฐ€ ๊ฐ€๋Šฅ
show_error=True, # ์—๋Ÿฌ ํ‘œ์‹œ
quiet=False, # ๋กœ๊ทธ ํ‘œ์‹œ
favicon_path=None, # ํŒŒ๋น„์ฝ˜ ์„ค์ •
ssl_verify=False, # SSL ๊ฒ€์ฆ ๋น„ํ™œ์„ฑํ™” (๊ฐœ๋ฐœ์šฉ)
inbrowser=False, # ์ž๋™ ๋ธŒ๋ผ์šฐ์ € ์—ด๊ธฐ ๋น„ํ™œ์„ฑํ™”
prevent_thread_lock=False # ์Šค๋ ˆ๋“œ ์ž ๊ธˆ ๋ฐฉ์ง€ ๋น„ํ™œ์„ฑํ™”
)
except Exception as e:
logger.error(f"์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹คํ–‰ ์‹คํŒจ: {e}")
print(f"โŒ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹คํ–‰ ์‹คํŒจ: {e}")
raise SystemExit(1)
finally:
# ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ข…๋ฃŒ ์‹œ ์ •๋ฆฌ
logger.info("๐Ÿงน ์ปจํŠธ๋กค ํƒ€์›Œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์ข…๋ฃŒ - ์ตœ์ข… ์ •๋ฆฌ ์ž‘์—…...")
try:
cleanup_huggingface_temp_folders()
if os.path.exists(app_temp_dir):
shutil.rmtree(app_temp_dir, ignore_errors=True)
logger.info("โœ… ์ตœ์ข… ์ •๋ฆฌ ์™„๋ฃŒ")
except Exception as e:
logger.error(f"์ตœ์ข… ์ •๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {e}")