File size: 19,546 Bytes
744b6ba
4d089a2
744b6ba
 
 
dab0a85
 
744b6ba
4d089a2
 
744b6ba
 
 
 
 
 
 
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5491458
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9d62d44
dab0a85
 
661d612
9d62d44
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f2f9993
dab0a85
68b586b
661d612
dab0a85
 
e84b737
dab0a85
 
 
 
 
 
 
 
 
 
 
9d62d44
661d612
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9d62d44
 
dab0a85
 
 
661d612
9d62d44
 
dab0a85
9d62d44
 
 
dab0a85
9d62d44
 
661d612
dab0a85
f2f9993
ceaf08b
 
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
ceaf08b
dab0a85
 
 
 
661d612
dab0a85
 
 
9d62d44
661d612
f2f9993
dab0a85
 
f2f9993
5491458
dab0a85
 
 
 
 
 
5491458
4d089a2
dab0a85
 
 
 
4d089a2
dab0a85
 
 
 
 
4d089a2
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5491458
4d089a2
 
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
771bd90
 
dab0a85
771bd90
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9d62d44
dab0a85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9d62d44
dab0a85
 
4d089a2
dab0a85
744b6ba
88d3ed7
 
dab0a85
 
 
 
 
 
 
 
 
 
 
744b6ba
 
01203f2
 
dab0a85
 
 
01203f2
dab0a85
 
 
 
01203f2
dab0a85
 
 
 
01203f2
dab0a85
 
 
 
01203f2
dab0a85
 
 
 
9d62d44
dab0a85
 
9d62d44
01203f2
dab0a85
 
 
 
 
 
744b6ba
dab0a85
 
8a19795
744b6ba
dab0a85
744b6ba
 
 
dab0a85
 
 
 
 
 
 
 
 
744b6ba
 
dab0a85
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
import os
import json
import logging
import re
import gradio as gr
# google.generativeai v0.3.1 이상 ν•„μš”
import google.generativeai as genai
from dotenv import load_dotenv
import random
from typing import List, Dict, Any, Optional, Tuple

load_dotenv()

# ------------------- λ‘œκΉ… μ„€μ • -------------------
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# ------------------- Gemini API μ„€μ • -------------------
GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY", "")
if not GEMINI_API_KEY:
    logger.error("Gemini API ν‚€κ°€ .env νŒŒμΌμ— μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. λ²ˆμ—­ 및 ν”„λ‘¬ν”„νŠΈ 생성이 μž‘λ™ν•˜μ§€ μ•Šμ„ 수 μžˆμŠ΅λ‹ˆλ‹€.")
else:
    try:
        genai.configure(api_key=GEMINI_API_KEY)
        logger.info("Gemini API ν‚€κ°€ μ„±κ³΅μ μœΌλ‘œ μ„€μ •λ˜μ—ˆμŠ΅λ‹ˆλ‹€.")
    except Exception as e:
        logger.error(f"Gemini API ν‚€ μ„€μ • 쀑 였λ₯˜ λ°œμƒ: {e}")
        GEMINI_API_KEY = "" # 였λ₯˜ λ°œμƒ μ‹œ ν‚€ λΉ„ν™œμ„±ν™”

# ------------------- 레퍼런슀 데이터 λ‘œλ“œ -------------------
CLOTHING_REFERENCES_PATH = 'clothing_references.json' # μ‹€μ œ 파일 경둜둜 μˆ˜μ •ν•˜μ„Έμš”
clothing_references: List[Dict[str, Any]] = []

try:
    # UTF-8 인코딩 λͺ…μ‹œμ  μ§€μ •
    with open(CLOTHING_REFERENCES_PATH, 'r', encoding='utf-8') as f:
        clothing_references = json.load(f).get("clothing_references", [])
    if not clothing_references:
        logger.warning(f"{CLOTHING_REFERENCES_PATH} νŒŒμΌμ—μ„œ 'clothing_references' ν‚€λ₯Ό 찾을 수 μ—†κ±°λ‚˜ λΉ„μ–΄ μžˆμŠ΅λ‹ˆλ‹€.")
    else:
        logger.info(f"{len(clothing_references)}개의 의λ₯˜ 레퍼런슀λ₯Ό μ„±κ³΅μ μœΌλ‘œ λ‘œλ“œν–ˆμŠ΅λ‹ˆλ‹€.")
except FileNotFoundError:
    logger.error(f"레퍼런슀 파일({CLOTHING_REFERENCES_PATH})을 찾을 수 μ—†μŠ΅λ‹ˆλ‹€. 레퍼런슀 κΈ°λŠ₯ 없이 μž‘λ™ν•©λ‹ˆλ‹€.")
    clothing_references = []
except json.JSONDecodeError:
    logger.error(f"레퍼런슀 파일({CLOTHING_REFERENCES_PATH})의 ν˜•μ‹μ΄ 잘λͺ»λ˜μ—ˆμŠ΅λ‹ˆλ‹€. JSON νŒŒμ‹±μ— μ‹€νŒ¨ν–ˆμŠ΅λ‹ˆλ‹€.")
    clothing_references = []
except Exception as e:
    logger.error(f"레퍼런슀 파일 λ‘œλ“œ 쀑 μ˜ˆμƒμΉ˜ λͺ»ν•œ 였λ₯˜ λ°œμƒ: {e}")
    clothing_references = []

# ------------------- Geminiλ₯Ό μ‚¬μš©ν•œ λ²ˆμ—­ ν•¨μˆ˜ (μ‹ κ·œ μΆ”κ°€) -------------------
def translate_with_gemini(text_to_translate: str) -> str:
    """Gemini APIλ₯Ό μ‚¬μš©ν•˜μ—¬ ν•œκ΅­μ–΄ ν…μŠ€νŠΈλ₯Ό μ˜μ–΄λ‘œ λ²ˆμ—­ν•©λ‹ˆλ‹€."""
    if not text_to_translate:
        return ""
    if not GEMINI_API_KEY:
        logger.warning("Gemini API ν‚€κ°€ μ—†μ–΄ λ²ˆμ—­μ„ κ±΄λ„ˆ<0xEB><0x9B><0x84>λ‹ˆλ‹€. 원본 ν…μŠ€νŠΈλ₯Ό λ°˜ν™˜ν•©λ‹ˆλ‹€.")
        return text_to_translate

    try:
        # --- λͺ¨λΈλͺ… λ³€κ²½ ---
        model = genai.GenerativeModel('gemini-2.0-flash')
        # -------------------

        prompt = f"""Translate the following Korean text into natural-sounding English.
Respond ONLY with the translated English text and nothing else.

Korean Text:
{text_to_translate}

English Translation:"""

        response = model.generate_content(
            prompt,
            generation_config=genai.types.GenerationConfig(
                temperature=0.2,
                max_output_tokens=len(text_to_translate) * 3
            )
        )

        translated_text = response.text.strip()
        translated_text = translated_text.strip('"')
        logger.info(f"λ²ˆμ—­ 성곡: '{text_to_translate}' -> '{translated_text}' (Model: gemini-2.0-flash)")
        return translated_text

    except Exception as e:
        logger.error(f"Gemini λ²ˆμ—­ 쀑 였λ₯˜ λ°œμƒ ('{text_to_translate}') with gemini-2.0-flash: {e}. 원본 ν…μŠ€νŠΈλ₯Ό λ°˜ν™˜ν•©λ‹ˆλ‹€.")
        return text_to_translate

# ------------------- Geminiλ₯Ό μ‚¬μš©ν•œ ν”„λ‘¬ν”„νŠΈ 생성 ν•¨μˆ˜ (레퍼런슀 ν™œμš©) -------------------
def generate_prompt_with_gemini(person_description_en, item_description_2_en, item_description_3_en, custom_prompt_en, reference: Optional[Dict[str, Any]]):
    """Gemini APIλ₯Ό μ‚¬μš©ν•΄ κ°œμ„ λœ ν”„λ‘¬ν”„νŠΈ 생성 (랜덀 레퍼런슀 정보 ν™œμš©)"""
    if not GEMINI_API_KEY:
        logger.error("Gemini API ν‚€κ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.")
        return "였λ₯˜: Gemini API ν‚€κ°€ μ—†μ–΄ ν”„λ‘¬ν”„νŠΈ 생성을 μ§„ν–‰ν•  수 μ—†μŠ΅λ‹ˆλ‹€."

    try:
        system_instruction = f"""
You are an expert prompt generator for virtual fashion try-on using an AI image generator like Midjourney.
Your goal is to create ONE highly detailed and realistic prompt in natural English that visually describes a complete scene, incorporating elements from a provided reference scenario.
Your response MUST include the exact input texts for the person and items in the following format:
- The person description MUST appear as: "({person_description_en} from image #1)"
- The first item description MUST appear as: "({item_description_2_en} from image #2)"
- The second item description MUST appear as: "({item_description_3_en} from image #3)"

The prompt should describe a specific lifestyle scene inspired by the reference details (setting, background, pose, camera angle, style).
Critically, ensure the model's face from (image #1) is **exactly preserved, clear, sharp, and realistically integrated** into the scene.
The final image should look like a high-quality photograph.

Return ONLY the prompt as one complete English paragraph.
Always end the prompt with the following parameters, maintaining the specified seed: --ar 9:16 --face #1 --seed 123456 --q 3 --v 5.2 --style raw
"""

        prompt_person = f"({person_description_en} from image #1)"
        prompt_items = f"({item_description_2_en} from image #2) and ({item_description_3_en} from image #3)"

        reference_details = ""
        if reference:
            reference_details = f"""
Reference Scenario Details to inspire the scene:
- Style: {reference.get('style', 'N/A')}
- Setting: {reference.get('setting', 'N/A')}
- Background: {reference.get('background', 'N/A')}
- Pose: {reference.get('pose', 'N/A')}
- Camera Angle: {reference.get('camera_angle', 'standard view')}
- Lens Type: {reference.get('lens_type', 'standard lens')}
- Focus/Light: {reference.get('focus_style', 'standard focus and light')}
"""

        prompt_request = f"""
Create a detailed Midjourney prompt for a virtual fitting:

- The person is: {prompt_person}.
- They are wearing: {prompt_items}.
- Additional user request: "{custom_prompt_en if custom_prompt_en else 'None'}"
{reference_details}

IMPORTANT INSTRUCTIONS:
- Generate ONE single paragraph prompt in natural, descriptive English.
- Describe a specific, realistic lifestyle scene incorporating the reference details.
- Emphasize **exact preservation, clarity, and sharpness of the face** from image #1.
- The output should resemble a high-quality photograph.
- Do NOT explain anything. Do NOT include bullet points or extra text.
- Ensure the prompt ends ONLY with: --ar 9:16 --face #1 --seed 123456 --q 3 --v 5.2 --style raw
"""
        # --- λͺ¨λΈλͺ… λ³€κ²½ ---
        model = genai.GenerativeModel(
            'gemini-2.0-flash', # λͺ¨λΈλͺ… λ³€κ²½
            system_instruction=system_instruction
        )
        # -------------------

        logger.info(f"Geminiμ—κ²Œ ν”„λ‘¬ν”„νŠΈ 생성 μš”μ²­ μ‹œμž‘ (Model: gemini-2.0-flash)")

        response = model.generate_content(
            prompt_request,
            generation_config=genai.types.GenerationConfig(
                temperature=0.7,
                top_p=0.95,
                top_k=40,
                max_output_tokens=8192 # λͺ¨λΈ μ΅œλŒ€μΉ˜ 확인 ν•„μš”
            )
        )

        logger.info(f"Gemini 응닡 μˆ˜μ‹  (ν”„λ‘¬ν”„νŠΈ 생성)")

        try:
            enhanced_prompt = response.text.strip()
        except AttributeError as ae:
             logger.warning(f"Gemini μ‘λ‹΅μ—μ„œ .text 속성을 찾을 수 μ—†μŒ: {ae}. 응닡 객체 ꡬ쑰 확인 ν•„μš”.")
             try:
                 if response.candidates:
                     enhanced_prompt = response.candidates[0].content.parts[0].text.strip()
                 else:
                     enhanced_prompt = "⚠️ Gemini 응닡 νŒŒμ‹± μ‹€νŒ¨ (candidates μ—†μŒ)"
             except Exception as e:
                 logger.warning(f"Gemini 응닡 λŒ€μ²΄ νŒŒμ‹± μ‹€νŒ¨: {str(e)}. 응닡: {response}")
                 enhanced_prompt = "⚠️ Gemini 응닡 νŒŒμ‹± μ™„μ „ μ‹€νŒ¨"
        except Exception as e:
            logger.error(f"Gemini 응닡 νŒŒμ‹± 쀑 μ˜ˆμƒμΉ˜ λͺ»ν•œ 였λ₯˜: {str(e)}. 응닡: {response}")
            enhanced_prompt = "⚠️ Gemini 응닡 νŒŒμ‹± 쀑 였λ₯˜ λ°œμƒ"

        required_params = "--ar 9:16 --face #1 --seed 123456 --q 3 --v 5.2 --style raw"
        if not enhanced_prompt.endswith(required_params):
            prompt_base = re.sub(r'--ar\s+\S+\s+--face\s+\S+\s+--seed\s+\d+\s+--q\s+\d+(\.\d+)?\s+--v\s+\S+\s+--style\s+\S+$', '', enhanced_prompt).strip()
            enhanced_prompt = f"{prompt_base} {required_params}"

        enhanced_prompt = filter_prompt_only(enhanced_prompt)

        logger.info(f"Gemini 생성 ν”„λ‘¬ν”„νŠΈ (필터링 ν›„): {enhanced_prompt}")
        return enhanced_prompt

    except Exception as e:
        logger.exception("Gemini ν”„λ‘¬ν”„νŠΈ 생성 쀑 μ‹¬κ°ν•œ 였λ₯˜ λ°œμƒ:")
        return f"였λ₯˜: Gemini ν”„λ‘¬ν”„νŠΈ 생성 μ‹€νŒ¨ ({str(e)}). κΈ°λ³Έ ν”„λ‘¬ν”„νŠΈλ₯Ό μ‚¬μš©ν•©λ‹ˆλ‹€."


def filter_prompt_only(prompt: str) -> str:
    """Gemini의 μ„€λͺ… 및 λΆˆν•„μš”ν•œ λ©”μ‹œμ§€λ₯Ό μ œκ±°ν•˜κ³  μ‹€μ œ ν”„λ‘¬ν”„νŠΈλ§Œ μΆ”μΆœν•˜λŠ” ν•¨μˆ˜ (λ³€κ²½ μ—†μŒ)"""
    prompt = prompt.strip()

    prompt = re.sub(r"```[a-zA-Z]*\n(.*?)\n```", r"\1", prompt, flags=re.DOTALL)
    prompt = prompt.strip('`')

    start_phrases = [
        "Here's the generated prompt:", "Here is the prompt:", "Okay, here's the prompt:",
        "Enhanced prompt:", "Generated prompt:", "Prompt:", "Here's an enhanced prompt:",
        "Here is the improved prompt:", "I've refined the prompt:", "Below is the prompt:",
        "The enhanced prompt is:"
    ]
    end_phrases = [
        "I hope this helps!", "Let me know if you need adjustments.", "Enjoy generating!",
        "This prompt aims to fulfill all requirements."
    ]

    for phrase in start_phrases:
        if prompt.lower().startswith(phrase.lower()):
            prompt = prompt[len(phrase):].lstrip(':').strip()
            break

    prompt_lines = prompt.split('\n')
    if len(prompt_lines) > 1:
         last_line = prompt_lines[-1].strip()
         for phrase in end_phrases:
             if last_line.startswith(phrase):
                 prompt = '\n'.join(prompt_lines[:-1]).strip()
                 break

    required_params = "--ar 9:16 --face #1 --seed 123456 --q 3 --v 5.2 --style raw"
    if required_params in prompt:
         base_prompt = prompt.split(required_params)[0].strip()
         prompt = f"{base_prompt} {required_params}"
    elif not prompt.endswith(required_params):
         logger.warning("Gemini κ²°κ³Όμ—μ„œ ν•„μˆ˜ νŒŒλΌλ―Έν„° λˆ„λ½ 확인, κ°•μ œ μΆ”κ°€")
         prompt = f"{prompt.strip()} {required_params}"

    return prompt.strip()


# ------------------- κΈ°λ³Έ ν”„λ‘¬ν”„νŠΈ 생성 ν•¨μˆ˜ (Gemini μ‹€νŒ¨ μ‹œ Fallback) -------------------
def generate_basic_prompt(person_description_ko, item_description_2_ko, item_description_3_ko, custom_prompt_ko):
    """μ‚¬μš©μž μž…λ ₯ 기반의 κΈ°λ³Έ ν”„λ‘¬ν”„νŠΈλ₯Ό 생성 (Gemini μ‹€νŒ¨ μ‹œ μ‚¬μš©, μž…λ ₯은 ν•œκ΅­μ–΄)"""
    person_en = translate_with_gemini(person_description_ko)
    item2_en = translate_with_gemini(item_description_2_ko)
    item3_en = translate_with_gemini(item_description_3_ko)
    custom_en = translate_with_gemini(custom_prompt_ko)

    combined_item_description = f"({item2_en} from image #2) and ({item3_en} from image #3)"
    base_prompt = (f"Hyperrealistic lifestyle portrait of a ({person_en} from image #1) wearing "
                   f"{combined_item_description}. "
                   f"Her face is exactly preserved from (image #1)")
    if custom_en.strip():
        base_prompt += f", {custom_en.strip()}"

    base_prompt += " --ar 9:16 --face #1 --seed 123456 --q 3 --v 5.2 --style raw"
    logger.info(f"κΈ°λ³Έ ν”„λ‘¬ν”„νŠΈ 생성됨 (λ²ˆμ—­ μ‹œλ„λ¨): {base_prompt}")
    return base_prompt

# ------------------- μ΅œμ’… ν”„λ‘¬ν”„νŠΈ 생성 ν•¨μˆ˜ (μˆ˜μ •λ¨) -------------------
def generate_final_prompt(model_image, item_image_2, item_image_3, person_description_ko, item_description_2_ko, item_description_3_ko, custom_prompt_ko):
    if not model_image or not item_image_2 or not item_image_3:
        return "였λ₯˜: λͺ¨λΈ 이미지(#1)와 μ•„μ΄ν…œ 이미지(#2, #3)λ₯Ό λͺ¨λ‘ μ—…λ‘œλ“œν•΄μ£Όμ„Έμš”."
    if not person_description_ko or not item_description_2_ko or not item_description_3_ko:
        return "였λ₯˜: 인물 μ„€λͺ…κ³Ό 두 μ•„μ΄ν…œ μ„€λͺ…을 λͺ¨λ‘ μž…λ ₯ν•΄μ£Όμ„Έμš”."

    if not GEMINI_API_KEY:
        logger.error("Gemini API ν‚€κ°€ μ—†μ–΄ μ§„ν–‰ν•  수 μ—†μŠ΅λ‹ˆλ‹€. .env νŒŒμΌμ„ ν™•μΈν•˜μ„Έμš”.")
        return "였λ₯˜: Gemini API ν‚€κ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. ν”„λ‘¬ν”„νŠΈλ₯Ό 생성할 수 μ—†μŠ΅λ‹ˆλ‹€."

    logger.info("μž…λ ₯된 ν•œκ΅­μ–΄ μ„€λͺ…을 μ˜μ–΄λ‘œ λ²ˆμ—­ μ‹œμž‘ (Model: gemini-2.0-flash)...")
    translated_person = translate_with_gemini(person_description_ko)
    translated_item_2 = translate_with_gemini(item_description_2_ko)
    translated_item_3 = translate_with_gemini(item_description_3_ko)
    translated_custom = translate_with_gemini(custom_prompt_ko)
    logger.info("λ²ˆμ—­ μ™„λ£Œ (였λ₯˜ μ‹œ 원본 ν…μŠ€νŠΈ μ‚¬μš©λ¨).")

    if translated_person == person_description_ko and person_description_ko:
        logger.warning(f"인물 μ„€λͺ…({person_description_ko}) λ²ˆμ—­ μ‹€νŒ¨ λ˜λŠ” 이미 μ˜μ–΄μΌ 수 μžˆμŠ΅λ‹ˆλ‹€.")

    selected_reference = None
    if clothing_references:
        selected_reference = random.choice(clothing_references)
        logger.info(f"μ„ νƒλœ 레퍼런슀 ID: {selected_reference.get('id', 'N/A')}, μŠ€νƒ€μΌ: {selected_reference.get('style', 'N/A')}")
    else:
        logger.warning("μ‚¬μš© κ°€λŠ₯ν•œ λ ˆνΌλŸ°μŠ€κ°€ μ—†μ–΄ 레퍼런슀 없이 μ§„ν–‰ν•©λ‹ˆλ‹€.")

    try:
        # --- λͺ¨λΈλͺ… λ³€κ²½ 반영됨 ---
        generated_prompt = generate_prompt_with_gemini(
            translated_person,
            translated_item_2,
            translated_item_3,
            translated_custom,
            selected_reference
        )
        # -------------------------

        if "였λ₯˜:" in generated_prompt or "⚠️" in generated_prompt:
             logger.warning(f"Gemini ν”„λ‘¬ν”„νŠΈ 생성 μ‹€νŒ¨ λ˜λŠ” 였λ₯˜ 포함: {generated_prompt}. κΈ°λ³Έ ν”„λ‘¬ν”„νŠΈλ₯Ό μ‚¬μš©ν•©λ‹ˆλ‹€.")
             return generate_basic_prompt(person_description_ko, item_description_2_ko, item_description_3_ko, custom_prompt_ko)
        else:
             return generated_prompt

    except Exception as e:
        logger.exception("μ΅œμ’… ν”„λ‘¬ν”„νŠΈ 생성 κ³Όμ •μ—μ„œ μ˜ˆμ™Έ λ°œμƒ:")
        return generate_basic_prompt(person_description_ko, item_description_2_ko, item_description_3_ko, custom_prompt_ko)

# ------------------- Gradio μΈν„°νŽ˜μ΄μŠ€ ꡬ성 (λ³€κ²½ μ—†μŒ) -------------------
def create_app():
    with gr.Blocks(title="가상 ν”ΌνŒ… μŠ€νŠœλ””μ˜€") as demo:
        gr.Markdown("# 가상 ν”ΌνŒ… μŠ€νŠœλ””μ˜€")
        gr.Markdown("""
        μ’ŒμΈ‘μ€ μž…λ ₯ μ„Ήμ…˜, μš°μΈ‘μ€ 좜λ ₯ μ„Ήμ…˜μž…λ‹ˆλ‹€.
        - **이미지 μ—…λ‘œλ“œ:** #1(인물), #2(μ•„μ΄ν…œ), #3(μ•„μ΄ν…œ) 이미지λ₯Ό μ—…λ‘œλ“œν•˜μ„Έμš”.
        - **μ„€λͺ… μž…λ ₯:** 각 이미지에 λŒ€ν•œ μ„€λͺ…을 **ν•œκ΅­μ–΄**둜 μž…λ ₯ν•˜μ„Έμš” (예: λ―Έμ†Œμ§“λŠ” μ Šμ€ μ—¬μ„±, 베이지색 울 μ½”νŠΈ, μ²­λ°”μ§€). Geminiκ°€ μ˜μ–΄λ‘œ λ²ˆμ—­ν•©λ‹ˆλ‹€.
        - **μ»€μŠ€ν…€ μ„€λͺ…:** μΆ”κ°€ν•˜κ³  싢은 λ°°κ²½, 포즈, λΆ„μœ„κΈ° 등을 **ν•œκ΅­μ–΄**둜 μž…λ ₯ν•˜μ„Έμš” (선택 사항).
        - **ν”„λ‘¬ν”„νŠΈ 생성:** λ²„νŠΌμ„ λˆ„λ₯΄λ©΄ μž…λ ₯ μ„€λͺ…을 μ˜μ–΄λ‘œ λ²ˆμ—­ν•˜κ³ , 랜덀 λ ˆνΌλŸ°μŠ€μ™€ μ‘°ν•©ν•˜μ—¬ Midjourney용 ν”„λ‘¬ν”„νŠΈλ₯Ό μƒμ„±ν•©λ‹ˆλ‹€.
        - **μ‹œλ“œ κ³ μ •:** ν”„λ‘¬ν”„νŠΈμ—λŠ” 항상 `--seed 123456`이 ν¬ν•¨λ˜μ–΄ 일관성을 μœ μ§€ν•©λ‹ˆλ‹€.
        - **레퍼런슀:** `clothing_references.json` νŒŒμΌμ—μ„œ λžœλ€ν•˜κ²Œ λ°°κ²½, 카메라 μƒ· 등을 μ°Έμ‘°ν•©λ‹ˆλ‹€.
        - **주의:** λ²ˆμ—­ 및 ν”„λ‘¬ν”„νŠΈ 생성에 Gemini API(gemini-2.0-flash)κ°€ μ‚¬μš©λ˜λ―€λ‘œ API ν‚€κ°€ ν•„μš”ν•˜λ©°, μ•½κ°„μ˜ 처리 μ‹œκ°„μ΄ μ†Œμš”λ  수 μžˆμŠ΅λ‹ˆλ‹€.
        """)

        with gr.Row():
            with gr.Column(scale=1):
                gr.Markdown("## μž…λ ₯ μ„Ήμ…˜")
                with gr.Row():
                    model_image = gr.Image(label="λͺ¨λΈ 이미지 (#1)", type="pil", sources=["upload"])
                    item_image_2 = gr.Image(label="μ•„μ΄ν…œ 이미지 (#2)", type="pil", sources=["upload"])
                    item_image_3 = gr.Image(label="μ•„μ΄ν…œ 이미지 (#3)", type="pil", sources=["upload"])
                with gr.Row():
                    person_description_ko = gr.Textbox(
                        label="인물섀λͺ…(#1) (ν•œκ΅­μ–΄)",
                        placeholder="예: λ―Έμ†Œμ§“λŠ” μ Šμ€ μ—¬μ„±",
                        lines=1, interactive=True
                    )
                    item_description_2_ko = gr.Textbox(
                        label="μ•„μ΄ν…œμ„€λͺ…(#2) (ν•œκ΅­μ–΄)",
                        placeholder="예: 베이지색 울 μ½”νŠΈ",
                        lines=1, interactive=True
                    )
                    item_description_3_ko = gr.Textbox(
                        label="μ•„μ΄ν…œμ„€λͺ…(#3) (ν•œκ΅­μ–΄)",
                        placeholder="예: 밝은 μ›Œμ‹±μ˜ μ²­λ°”μ§€",
                        lines=1, interactive=True
                    )
                custom_prompt_ko = gr.Textbox(
                    label="μ»€μŠ€ν…€ 상황 μ„€λͺ… (ν•œκ΅­μ–΄, 선택 사항)",
                    placeholder="예: 파리의 가을 κ±°λ¦¬μ—μ„œ 컀피λ₯Ό λ“€κ³  있음, λ”°λœ»ν•œ μ˜€ν›„ ν–‡μ‚΄",
                    lines=2, interactive=True
                )
                prompt_btn = gr.Button("ν”„λ‘¬ν”„νŠΈ 생성 (λ²ˆμ—­ 포함)", variant="primary")

            with gr.Column(scale=1):
                gr.Markdown("## 좜λ ₯ μ„Ήμ…˜")
                prompt_output = gr.Textbox(
                    label="μƒμ„±λœ Midjourney ν”„λ‘¬ν”„νŠΈ (영문)",
                    lines=15,
                    interactive=False
                )

        prompt_btn.click(
            fn=generate_final_prompt,
            inputs=[model_image, item_image_2, item_image_3, person_description_ko, item_description_2_ko, item_description_3_ko, custom_prompt_ko],
            outputs=[prompt_output]
        )

    return demo

if __name__ == "__main__":
    if not GEMINI_API_KEY:
        print("κ²½κ³ : Gemini API ν‚€κ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. λ²ˆμ—­ 및 ν”„λ‘¬ν”„νŠΈ 생성이 μ œν•œλ  수 μžˆμŠ΅λ‹ˆλ‹€.")
    # μ‚¬μš©ν•˜λ €λŠ” λͺ¨λΈλͺ… 확인 (Google AI Studio λ˜λŠ” API λ¬Έμ„œ μ°Έκ³ )
    print("μ‚¬μš©ν•  Gemini λͺ¨λΈ: gemini-2.0-flash (API ν‚€ ν™˜κ²½μ—μ„œ μ‚¬μš© κ°€λŠ₯ν•œμ§€ 확인 ν•„μš”)")
    if not clothing_references:
        print("κ²½κ³ : clothing_references.json νŒŒμΌμ„ λ‘œλ“œν•˜μ§€ λͺ»ν–ˆκ±°λ‚˜ λΉ„μ–΄μžˆμŠ΅λ‹ˆλ‹€. 레퍼런슀 κΈ°λŠ₯ 없이 μ‹€ν–‰λ©λ‹ˆλ‹€.")
    else:
        print(f"{len(clothing_references)}개의 레퍼런슀λ₯Ό μ‚¬μš©ν•˜μ—¬ 앱을 μ‹œμž‘ν•©λ‹ˆλ‹€.")

    app = create_app()
    app.queue()
    app.launch()