| |
| import streamlit as st |
| from models import load_model |
|
|
| |
| demo = load_model() |
|
|
| |
| st.set_page_config( |
| page_title="DeepSeek Chatbot - ruslanmv.com", |
| page_icon="🤖", |
| layout="centered" |
| ) |
|
|
| |
| if "messages" not in st.session_state: |
| st.session_state.messages = [] |
|
|
| |
| with st.sidebar: |
| st.header("Model Configuration") |
| |
| |
| system_message = st.text_area( |
| "System Message", |
| value="You are a friendly Chatbot created by ruslanmv.com", |
| height=100 |
| ) |
| |
| |
| max_tokens = st.slider( |
| "Max Tokens", |
| min_value=1, |
| max_value=4000, |
| value=512, |
| step=10 |
| ) |
| |
| temperature = st.slider( |
| "Temperature", |
| min_value=0.1, |
| max_value=4.0, |
| value=0.7, |
| step=0.1 |
| ) |
| |
| top_p = st.slider( |
| "Top-p (nucleus sampling)", |
| min_value=0.1, |
| max_value=1.0, |
| value=0.9, |
| step=0.1 |
| ) |
|
|
| |
| st.title("🤖 DeepSeek Chatbot") |
| st.caption("Powered by ruslanmv.com - Configure parameters in the sidebar") |
|
|
| |
| for message in st.session_state.messages: |
| with st.chat_message(message["role"]): |
| st.markdown(message["content"]) |
|
|
| |
| if prompt := st.chat_input("Type your message..."): |
| |
| st.session_state.messages.append({"role": "user", "content": prompt}) |
| |
| |
| with st.chat_message("user"): |
| st.markdown(prompt) |
| |
| try: |
| |
| with st.spinner("Generating response..."): |
| |
| response = demo( |
| f"{system_message}\n\nUser: {prompt}\nAssistant:", |
| max_length=max_tokens, |
| temperature=temperature, |
| top_p=top_p |
| ) |
| |
| |
| with st.chat_message("assistant"): |
| st.markdown(response) |
| |
| |
| st.session_state.messages.append({"role": "assistant", "content": response}) |
| |
| except Exception as e: |
| st.error(f"An error occurred: {str(e)}") |
|
|