code-slicer commited on
Commit
acb7474
ยท
verified ยท
1 Parent(s): 9f2a1bb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -51
app.py CHANGED
@@ -178,6 +178,18 @@ def to_llm_mode():
178
  st.session_state["llm_mode"] = True
179
  st.session_state["llm_intro_needed"] = True
180
  st.rerun()
 
 
 
 
 
 
 
 
 
 
 
 
181
 
182
  def _build_structured_user_prompt(user_text: str) -> str:
183
  # ๋ถˆํ•„์š”ํ•œ ๋ž˜ํ•‘ ์—†์ด, ๋ชจ๋ธ์ด JSON๋งŒ ๋‚ด๋„๋ก ๊น”๋”ํžˆ ์ „๋‹ฌ
@@ -240,13 +252,11 @@ def _ensure_llm_state():
240
  st.session_state.setdefault("llm_intro_needed", False)
241
  st.session_state.setdefault("llm_input", "")
242
 
243
- def render_llm_followup(chat_container):
244
- """
245
- ์ถ”์ฒœ ํ”Œ๋กœ์šฐ ์ข…๋ฃŒ ํ›„ ์ž๋™ ์ง„์ž…ํ•˜๋Š” ์ž์œ ์งˆ๋ฌธ(LLM) ๋ชจ๋“œ UI.
246
- """
247
  _ensure_llm_state()
248
  MAX_TURNS = 6
249
 
 
250
  if st.session_state.get("llm_intro_needed"):
251
  log_and_render(
252
  "๐Ÿง  ์ด์ œ๋ถ€ํ„ฐ๋Š” ์ž์œ  ์งˆ๋ฌธ ๋ชจ๋“œ์˜ˆ์š”. ์—ฌํ–‰ ์™ธ์—๋„ ๋ญ๋“  ํ•œ๊ตญ์–ด๋กœ ๋ฌผ์–ด๋ณด์„ธ์š”!",
@@ -256,75 +266,70 @@ def render_llm_followup(chat_container):
256
  )
257
  st.session_state["llm_intro_needed"] = False
258
 
259
- # ์ž…๋ ฅ์ฐฝ
 
 
 
 
 
 
260
  q = st.text_input(
261
  "LLM ์งˆ๋ฌธ",
262
  placeholder="๋ฌด์—‡์ด๋“  ๋ฌผ์–ด๋ณด์„ธ์š” (์ข…๋ฃŒํ•˜๋ ค๋ฉด '์ข…๋ฃŒ' ์ž…๋ ฅ)",
263
  key="llm_input"
264
  )
265
 
266
- # ๋™์ž‘
267
  if q:
268
  # ์ข…๋ฃŒ ํ‚ค์›Œ๋“œ
269
  if q.strip() in ("์ข…๋ฃŒ", "quit", "exit"):
270
- st.session_state["llm_mode"] = False
271
- st.session_state["llm_input"] = ""
272
- log_and_render(
273
- "LLM ๋ชจ๋“œ๋ฅผ ์ข…๋ฃŒํ• ๊ฒŒ์š”. ํ•„์š”ํ•˜์‹ค ๋•Œ ์–ธ์ œ๋“  ๋‹ค์‹œ ์งˆ๋ฌธํ•ด ์ฃผ์„ธ์š”! โœจ",
274
- sender="bot",
275
- chat_container=chat_container,
276
- key=f"llm_end_{random.randint(1,999999)}"
277
- )
278
- st.rerun()
279
- return
 
 
 
 
 
 
 
 
 
 
280
 
281
- # ์‚ฌ์šฉ์ž ๋ฒ„๋ธ”
282
- log_and_render(
283
- q,
284
- sender="user",
285
- chat_container=chat_container,
286
- key=f"llm_user_{random.randint(1,999999)}"
287
- )
288
  st.session_state.llm_history.append({"role": "user", "content": q})
289
 
290
- # ์ตœ๊ทผ ํžˆ์Šคํ† ๋ฆฌ๋กœ LLM ํ˜ธ์ถœ
291
  msgs = st.session_state.llm_history[-(MAX_TURNS-1):]
292
  a = _call_ollama_chat(
293
  messages=msgs,
294
  system_prompt=KOREAN_SYSTEM_PROMPT,
295
  temperature=0.8, top_p=0.9, top_k=40, repeat_penalty=1.1
296
  )
297
-
298
  if not a:
299
- log_and_render(
300
- "โš ๏ธ LLM ์‘๋‹ต์„ ๋ฐ›์ง€ ๋ชปํ–ˆ์Šต๋‹ˆ๋‹ค. Ollama ์„œ๋ฒ„ ์ƒํƒœ๋ฅผ ํ™•์ธํ•ด ์ฃผ์„ธ์š”.",
301
- sender="bot",
302
- chat_container=chat_container,
303
- key=f"llm_err_{random.randint(1,999999)}"
304
- )
305
  else:
306
- log_and_render(
307
- a,
308
- sender="bot",
309
- chat_container=chat_container,
310
- key=f"llm_bot_{random.randint(1,999999)}"
311
- )
312
  st.session_state.llm_history.append({"role": "assistant", "content": a})
313
-
314
- # ์ž…๋ ฅ ์ดˆ๊ธฐํ™”
315
  st.session_state["llm_input"] = ""
316
 
317
- # ํ•˜๋‹จ ์ข…๋ฃŒ ๋ฒ„ํŠผ
318
- cols = st.columns(3)
319
- with cols[0]:
320
- if st.button("๐Ÿ”š LLM ๋ชจ๋“œ ์ข…๋ฃŒ"):
 
 
321
  st.session_state["llm_mode"] = False
322
- log_and_render(
323
- "LLM ๋ชจ๋“œ๋ฅผ ์ข…๋ฃŒํ• ๊ฒŒ์š”. ๋‹ค์Œ์— ๋˜ ๋งŒ๋‚˜์š”! ๐Ÿ‘‹",
324
- sender="bot",
325
- chat_container=chat_container,
326
- key=f"llm_end_btn_{random.randint(1,999999)}"
327
- )
328
  st.rerun()
329
 
330
  # ์ง€์—ฐ ์ดˆ๊ธฐํ™”: import ์‹œ์ ์—๋Š” ๋ฐ์ดํ„ฐ ์ ‘๊ทผ ๊ธˆ์ง€, ์—ฌ๊ธฐ์„œ ํ•œ ๋ฒˆ๋งŒ ์ฃผ์ž…
@@ -1695,8 +1700,9 @@ def main():
1695
  init_session()
1696
  chat_container = st.container()
1697
 
1698
- if st.session_state.get("llm_mode"):
1699
- render_llm_followup(chat_container)
 
1700
  return
1701
 
1702
  # ๐ŸŽ›๏ธ ๋งํ’์„ /ํ‘œ์‹œ ์˜ต์…˜ (โ‘ข, โ‘ฃ)
 
178
  st.session_state["llm_mode"] = True
179
  st.session_state["llm_intro_needed"] = True
180
  st.rerun()
181
+
182
+ def _ensure_llm_state():
183
+ st.session_state.setdefault("llm_mode", False) # ํ’€์Šคํฌ๋ฆฐ ๋ชจ๋“œ์šฉ(๊ธฐ์กด)
184
+ st.session_state.setdefault("llm_inline", False) # โœ… ์ธ๋ผ์ธ ํ‘œ์‹œ์šฉ
185
+ st.session_state.setdefault("llm_history", [])
186
+ st.session_state.setdefault("llm_intro_needed", False)
187
+ st.session_state.setdefault("llm_input", "")
188
+
189
+ def show_llm_inline():
190
+ _ensure_llm_state()
191
+ st.session_state["llm_inline"] = True
192
+ st.session_state["llm_intro_needed"] = True
193
 
194
  def _build_structured_user_prompt(user_text: str) -> str:
195
  # ๋ถˆํ•„์š”ํ•œ ๋ž˜ํ•‘ ์—†์ด, ๋ชจ๋ธ์ด JSON๋งŒ ๋‚ด๋„๋ก ๊น”๋”ํžˆ ์ „๋‹ฌ
 
252
  st.session_state.setdefault("llm_intro_needed", False)
253
  st.session_state.setdefault("llm_input", "")
254
 
255
+ def render_llm_followup(chat_container, inline=False):
 
 
 
256
  _ensure_llm_state()
257
  MAX_TURNS = 6
258
 
259
+ # ์ธ๋ผ์ธ์ด๋ฉด ์•ˆ๋‚ด๋„ ์ฑ„ํŒ…์ฐฝ ๋งํ’์„  ํ˜•ํƒœ๋กœ ๋ Œ๋”
260
  if st.session_state.get("llm_intro_needed"):
261
  log_and_render(
262
  "๐Ÿง  ์ด์ œ๋ถ€ํ„ฐ๋Š” ์ž์œ  ์งˆ๋ฌธ ๋ชจ๋“œ์˜ˆ์š”. ์—ฌํ–‰ ์™ธ์—๋„ ๋ญ๋“  ํ•œ๊ตญ์–ด๋กœ ๋ฌผ์–ด๋ณด์„ธ์š”!",
 
266
  )
267
  st.session_state["llm_intro_needed"] = False
268
 
269
+ # ์ธ๋ผ์ธ ํŒจ๋„ ๊ตฌ์—ญ
270
+ if inline:
271
+ st.divider()
272
+ st.markdown("#### ๐Ÿค– LLM ์งˆ๋ฌธ")
273
+ else:
274
+ st.markdown("### ๐Ÿค– LLM ์งˆ๋ฌธ")
275
+
276
  q = st.text_input(
277
  "LLM ์งˆ๋ฌธ",
278
  placeholder="๋ฌด์—‡์ด๋“  ๋ฌผ์–ด๋ณด์„ธ์š” (์ข…๋ฃŒํ•˜๋ ค๋ฉด '์ข…๋ฃŒ' ์ž…๋ ฅ)",
279
  key="llm_input"
280
  )
281
 
 
282
  if q:
283
  # ์ข…๋ฃŒ ํ‚ค์›Œ๋“œ
284
  if q.strip() in ("์ข…๋ฃŒ", "quit", "exit"):
285
+ if inline:
286
+ st.session_state["llm_inline"] = False # โœ… ์ธ๋ผ์ธ๋งŒ ๋‹ซ๊ธฐ
287
+ st.session_state["llm_input"] = ""
288
+ log_and_render(
289
+ "LLM ํŒจ๋„์„ ๋‹ซ์„๊ฒŒ์š”. ํ•„์š”ํ•˜์‹ค ๋•Œ ๋‹ค์‹œ ์งˆ๋ฌธํ•ด ์ฃผ์„ธ์š”! โœจ",
290
+ sender="bot", chat_container=chat_container,
291
+ key=f"llm_end_inline_{random.randint(1,999999)}"
292
+ )
293
+ return
294
+ else:
295
+ st.session_state["llm_mode"] = False
296
+ st.session_state["llm_input"] = ""
297
+ log_and_render(
298
+ "LLM ๋ชจ๋“œ๋ฅผ ์ข…๋ฃŒํ• ๊ฒŒ์š”. ํ•„์š”ํ•˜์‹ค ๋•Œ ๋‹ค์‹œ ์งˆ๋ฌธํ•ด ์ฃผ์„ธ์š”! โœจ",
299
+ sender="bot", chat_container=chat_container,
300
+ key=f"llm_end_full_{random.randint(1,999999)}"
301
+ )
302
+ # ํ’€์Šคํฌ๋ฆฐ ๋ชจ๋“œ๋งŒ rerun
303
+ st.rerun()
304
+ return
305
 
306
+ log_and_render(q, sender="user", chat_container=chat_container,
307
+ key=f"llm_user_{random.randint(1,999999)}")
 
 
 
 
 
308
  st.session_state.llm_history.append({"role": "user", "content": q})
309
 
 
310
  msgs = st.session_state.llm_history[-(MAX_TURNS-1):]
311
  a = _call_ollama_chat(
312
  messages=msgs,
313
  system_prompt=KOREAN_SYSTEM_PROMPT,
314
  temperature=0.8, top_p=0.9, top_k=40, repeat_penalty=1.1
315
  )
 
316
  if not a:
317
+ log_and_render("โš ๏ธ LLM ์‘๋‹ต์„ ๋ฐ›์ง€ ๋ชปํ–ˆ์Šต๋‹ˆ๋‹ค. Ollama ์„œ๋ฒ„๋ฅผ ํ™•์ธํ•ด ์ฃผ์„ธ์š”.",
318
+ sender="bot", chat_container=chat_container,
319
+ key=f"llm_err_{random.randint(1,999999)}")
 
 
 
320
  else:
321
+ log_and_render(a, sender="bot", chat_container=chat_container,
322
+ key=f"llm_bot_{random.randint(1,999999)}")
 
 
 
 
323
  st.session_state.llm_history.append({"role": "assistant", "content": a})
 
 
324
  st.session_state["llm_input"] = ""
325
 
326
+ # ๋‹ซ๊ธฐ ๋ฒ„ํŠผ
327
+ if inline:
328
+ if st.button("๐Ÿ”ฝ LLM ํŒจ๋„ ๋‹ซ๊ธฐ", key="llm_close_inline"):
329
+ st.session_state["llm_inline"] = False
330
+ else:
331
+ if st.button("๐Ÿ”š LLM ๋ชจ๋“œ ์ข…๋ฃŒ", key="llm_close_full"):
332
  st.session_state["llm_mode"] = False
 
 
 
 
 
 
333
  st.rerun()
334
 
335
  # ์ง€์—ฐ ์ดˆ๊ธฐํ™”: import ์‹œ์ ์—๋Š” ๋ฐ์ดํ„ฐ ์ ‘๊ทผ ๊ธˆ์ง€, ์—ฌ๊ธฐ์„œ ํ•œ ๋ฒˆ๋งŒ ์ฃผ์ž…
 
1700
  init_session()
1701
  chat_container = st.container()
1702
 
1703
+ # โœ… ํ’€์Šคํฌ๋ฆฐ์ผ ๋•Œ๋งŒ ์กฐ๊ธฐ ๋ฆฌํ„ด
1704
+ if st.session_state.get("llm_mode") and not st.session_state.get("llm_inline", False):
1705
+ render_llm_followup(chat_container, inline=False)
1706
  return
1707
 
1708
  # ๐ŸŽ›๏ธ ๋งํ’์„ /ํ‘œ์‹œ ์˜ต์…˜ (โ‘ข, โ‘ฃ)