chatbot / src /streamlit_app.py
jonghhhh's picture
Update src/streamlit_app.py
17353e2 verified
# streamlit_app.py
import streamlit as st
import google.generativeai as genai
import os
import json
from io import StringIO
# ------------------------------------------------------
# Hugging Face Spacesμ—μ„œλŠ” ν™˜κ²½λ³€μˆ˜(Secrets)둜 API ν‚€λ₯Ό μ €μž₯
# Settings β†’ secrets β†’ New secret: GEMINI_API_KEY 등둝
# ------------------------------------------------------
api_key = os.environ.get("OPENAI_API_KEY")
genai.configure(api_key=api_key)
# 초기 μ„€μ •
if "messages" not in st.session_state:
st.session_state.messages = []
if "system_prompt" not in st.session_state:
st.session_state.system_prompt = "당신은 μΉœμ ˆν•œ AI μ–΄μ‹œμŠ€ν„΄νŠΈμž…λ‹ˆλ‹€."
model = genai.GenerativeModel("gemma-3-27b-it")
# ------------------------------------------------------
# Streamlit UI
# ------------------------------------------------------
st.set_page_config(page_title="Gemini 챗봇", page_icon="πŸ€–", layout="wide")
st.title("πŸ€– Google Gemini λŒ€ν™”ν˜• 챗봇")
st.caption("Hugging Face Spaces + Streamlit + Google Generative AI")
# μ‚¬μ΄λ“œλ°”: μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ μˆ˜μ • & 둜그 λ‹€μš΄λ‘œλ“œ
with st.sidebar:
st.subheader("βš™οΈ μ„€μ •")
new_system_prompt = st.text_area("μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ", st.session_state.system_prompt, height=100)
if st.button("λ³€κ²½ 적용"):
st.session_state.system_prompt = new_system_prompt
st.success("μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈκ°€ λ³€κ²½λ˜μ—ˆμŠ΅λ‹ˆλ‹€.")
st.markdown("---")
if st.session_state.messages:
# JSON λ³€ν™˜
json_data = json.dumps(st.session_state.messages, ensure_ascii=False, indent=2)
st.download_button(
label="πŸ’Ύ λŒ€ν™” 둜그 JSON λ‹€μš΄λ‘œλ“œ",
data=json_data,
file_name="chat_log.json",
mime="application/json"
)
# λŒ€ν™” 좜λ ₯
for msg in st.session_state.messages:
with st.chat_message("user"):
st.markdown(msg["user"])
with st.chat_message("assistant"):
st.markdown(msg["ai"])
# μ‚¬μš©μž μž…λ ₯
if prompt := st.chat_input("λ©”μ‹œμ§€λ₯Ό μž…λ ₯ν•˜μ„Έμš”. μ’…λ£Œν•˜λ €λ©΄ 'quit' μž…λ ₯"):
if prompt.lower() == "quit":
st.warning("λŒ€ν™”λ₯Ό μ’…λ£Œν•©λ‹ˆλ‹€. μƒˆλ‘œκ³ μΉ¨ν•˜λ©΄ λ‹€μ‹œ μ‹œμž‘ν•  수 μžˆμŠ΅λ‹ˆλ‹€.")
else:
# λŒ€ν™” μ»¨ν…μŠ€νŠΈ ꡬ성
context = f"μ‹œμŠ€ν…œ: {st.session_state.system_prompt}\n"
for msg in st.session_state.messages[-10:]: # 졜근 10개만 μœ μ§€
context += f"μ‚¬μš©μž: {msg['user']}\nAI: {msg['ai']}\n"
context += f"μ‚¬μš©μž: {prompt}\nAI:"
with st.chat_message("user"):
st.markdown(prompt)
try:
response = model.generate_content(context)
ai_response = response.text
except Exception as e:
ai_response = f"⚠️ 였λ₯˜ λ°œμƒ: {e}"
with st.chat_message("assistant"):
st.markdown(ai_response)
# λŒ€ν™” 기둝 μ €μž₯
st.session_state.messages.append({"user": prompt, "ai": ai_response})