code-slicer commited on
Commit
2381854
Β·
verified Β·
1 Parent(s): 723d13b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -15
app.py CHANGED
@@ -180,11 +180,10 @@ def to_llm_mode():
180
  st.rerun()
181
 
182
  def _ensure_llm_state():
183
- st.session_state.setdefault("llm_mode", False) # ν’€μŠ€ν¬λ¦° λͺ¨λ“œμš©(κΈ°μ‘΄)
184
- st.session_state.setdefault("llm_inline", False) # βœ… 인라인 ν‘œμ‹œμš©
185
- st.session_state.setdefault("llm_history", [])
186
  st.session_state.setdefault("llm_intro_needed", False)
187
- st.session_state.setdefault("llm_input", "")
188
 
189
  def show_llm_inline():
190
  _ensure_llm_state()
@@ -251,22 +250,20 @@ def render_llm_followup(chat_container, inline=False):
251
 
252
  st.markdown("### β—Ž LLM 질문")
253
 
254
- # κΈ°μ‘΄ λŒ€ν™” λ Œλ”
255
  for m in st.session_state.get("llm_msgs", []):
256
  with st.chat_message(m["role"]):
257
  st.markdown(m["content"])
258
 
259
- # ⚠️ μœ„μ ― keyλŠ” 'llm_query'둜 μ‚¬μš© (이 key에 λŒ€ν•΄ μ–΄λ””μ„œλ„ 직접 λŒ€μž… κΈˆμ§€)
260
  user_msg = st.chat_input("무엇이든 λ¬Όμ–΄λ³΄μ„Έμš” (μ’…λ£Œν•˜λ €λ©΄ 'μ’…λ£Œ' μž…λ ₯)", key="llm_query")
261
  if not user_msg:
262
  return
263
 
264
  text = user_msg.strip()
265
 
266
- # μ’…λ£Œ λͺ…λ Ή 처리
267
  if text in {"μ’…λ£Œ", "quit", "exit"}:
268
  st.session_state["llm_inline"] = False
269
- st.session_state["llm_open"] = False
270
  st.rerun()
271
  return
272
 
@@ -274,16 +271,21 @@ def render_llm_followup(chat_container, inline=False):
274
  st.session_state.setdefault("llm_msgs", [])
275
  st.session_state["llm_msgs"].append({"role": "user", "content": text})
276
 
277
- # LLM 호좜 (μ˜ˆμ™Έ μ•ˆμ „)
278
  try:
279
- bot = call_llm(text) # κΈ°μ‘΄ ν•¨μˆ˜ μ‚¬μš©
280
- except Exception as e:
 
 
 
 
 
281
  bot = "⚠️ LLM 응닡을 λ°›μ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€. Ollama μ„œλ²„λ₯Ό 확인해 μ£Όμ„Έμš”."
282
- st.session_state["llm_msgs"].append({"role": "assistant", "content": bot})
283
 
284
- # β›” μ—¬κΈ°μ„œ st.session_state['llm_query']λ‚˜ 'llm_input'을 직접 λŒ€μž…/μ‚­μ œν•˜μ§€ λ§ˆμ„Έμš”.
285
  st.rerun()
286
 
 
287
  def render_llm_inline_if_open(chat_container):
288
  """llm_inline ν”Œλž˜κ·Έκ°€ 켜져 있으면 인라인 LLM νŒ¨λ„μ„ κ·Έλ¦½λ‹ˆλ‹€."""
289
  _ensure_llm_state()
@@ -515,7 +517,7 @@ def region_ui(travel_df, external_score_df, festival_df, weather_df, package_df,
515
 
516
  # μΆ”μ²œ κ°€λŠ₯ν•œ μ—¬ν–‰μ§€κ°€ μ—†λ‹€λ©΄ μ’…λ£Œ λ‹¨κ³„λ‘œ μ „ν™˜
517
  if remaining.empty and sample_key not in st.session_state:
518
- st.session_state[step_key] = "recommand_end"
519
  st.rerun()
520
  return
521
 
@@ -1393,7 +1395,7 @@ def emotion_ui(travel_df, external_score_df, festival_df, weather_df, package_df
1393
  return
1394
 
1395
  # ────────────────── 3) μΆ”μ²œ μ’…λ£Œ 단계: 더 이상 μΆ”μ²œν•  μ—¬ν–‰μ§€κ°€ 없을 λ•Œ
1396
- elif st.session_state[step_key] == "recommend_place_end":
1397
  with chat_container:
1398
  # 3.1) λ©”μ‹œμ§€ 좜λ ₯
1399
  log_and_render(
 
180
  st.rerun()
181
 
182
  def _ensure_llm_state():
183
+ st.session_state.setdefault("llm_mode", False)
184
+ st.session_state.setdefault("llm_inline", False)
 
185
  st.session_state.setdefault("llm_intro_needed", False)
186
+ st.session_state.setdefault("llm_msgs", [])
187
 
188
  def show_llm_inline():
189
  _ensure_llm_state()
 
250
 
251
  st.markdown("### β—Ž LLM 질문")
252
 
 
253
  for m in st.session_state.get("llm_msgs", []):
254
  with st.chat_message(m["role"]):
255
  st.markdown(m["content"])
256
 
 
257
  user_msg = st.chat_input("무엇이든 λ¬Όμ–΄λ³΄μ„Έμš” (μ’…λ£Œν•˜λ €λ©΄ 'μ’…λ£Œ' μž…λ ₯)", key="llm_query")
258
  if not user_msg:
259
  return
260
 
261
  text = user_msg.strip()
262
 
263
+ # μ’…λ£Œ λͺ…λ Ή
264
  if text in {"μ’…λ£Œ", "quit", "exit"}:
265
  st.session_state["llm_inline"] = False
266
+ st.session_state["llm_mode"] = False # ← llm_open λŒ€μ‹  llm_mode μ‚¬μš©
267
  st.rerun()
268
  return
269
 
 
271
  st.session_state.setdefault("llm_msgs", [])
272
  st.session_state["llm_msgs"].append({"role": "user", "content": text})
273
 
274
+ # βœ… Ollama둜 μ‹€μ œ 호좜
275
  try:
276
+ bot = _call_ollama_chat(
277
+ messages=st.session_state["llm_msgs"],
278
+ system_prompt=KOREAN_SYSTEM_PROMPT
279
+ )
280
+ if not bot:
281
+ bot = "⚠️ LLM 응닡을 λ°›μ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€. Ollama μ„œλ²„λ₯Ό 확인해 μ£Όμ„Έμš”."
282
+ except Exception:
283
  bot = "⚠️ LLM 응닡을 λ°›μ§€ λͺ»ν–ˆμŠ΅λ‹ˆλ‹€. Ollama μ„œλ²„λ₯Ό 확인해 μ£Όμ„Έμš”."
 
284
 
285
+ st.session_state["llm_msgs"].append({"role": "assistant", "content": bot})
286
  st.rerun()
287
 
288
+
289
  def render_llm_inline_if_open(chat_container):
290
  """llm_inline ν”Œλž˜κ·Έκ°€ 켜져 있으면 인라인 LLM νŒ¨λ„μ„ κ·Έλ¦½λ‹ˆλ‹€."""
291
  _ensure_llm_state()
 
517
 
518
  # μΆ”μ²œ κ°€λŠ₯ν•œ μ—¬ν–‰μ§€κ°€ μ—†λ‹€λ©΄ μ’…λ£Œ λ‹¨κ³„λ‘œ μ „ν™˜
519
  if remaining.empty and sample_key not in st.session_state:
520
+ st.session_state[step_key] = "recommend_end"
521
  st.rerun()
522
  return
523
 
 
1395
  return
1396
 
1397
  # ────────────────── 3) μΆ”μ²œ μ’…λ£Œ 단계: 더 이상 μΆ”μ²œν•  μ—¬ν–‰μ§€κ°€ 없을 λ•Œ
1398
+ elif st.session_state[step_key] == "recommend_places_end":
1399
  with chat_container:
1400
  # 3.1) λ©”μ‹œμ§€ 좜λ ₯
1401
  log_and_render(