|
|
| import streamlit as st |
| import requests |
| import logging |
|
|
| |
| logging.basicConfig(level=logging.INFO) |
| logger = logging.getLogger(__name__) |
|
|
| |
| st.set_page_config( |
| page_title="DeepSeek Chatbot - ruslanmv.com", |
| page_icon="🤖", |
| layout="centered" |
| ) |
|
|
| |
| if "messages" not in st.session_state: |
| st.session_state.messages = [] |
|
|
| |
| with st.sidebar: |
| st.header("Model Configuration") |
| |
|
|
| |
| model_options = [ |
| "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", |
| |
| |
| |
| |
| ] |
| selected_model = st.selectbox("Select Model", model_options, index=0) |
|
|
| system_message = st.text_area( |
| "System Message", |
| value="You are a friendly Chatbot created by ruslanmv.com", |
| height=100 |
| ) |
|
|
| max_tokens = st.slider( |
| "Max Tokens", |
| 1, 4000, 512 |
| ) |
|
|
| temperature = st.slider( |
| "Temperature", |
| 0.1, 4.0, 0.7 |
| ) |
|
|
| top_p = st.slider( |
| "Top-p", |
| 0.1, 1.0, 0.9 |
| ) |
|
|
| |
| def query(payload, api_url): |
| headers = {"Authorization": f"Bearer {st.secrets['HF_TOKEN']}"} |
| logger.info(f"Sending request to {api_url} with payload: {payload}") |
| response = requests.post(api_url, headers=headers, json=payload) |
| logger.info(f"Received response: {response.status_code}, {response.text}") |
| return response.json() |
|
|
| |
| st.title("🤖 DeepSeek Chatbot") |
| st.caption("Powered by Hugging Face Inference API - Configure in sidebar") |
|
|
| |
| for message in st.session_state.messages: |
| with st.chat_message(message["role"]): |
| st.markdown(message["content"]) |
|
|
| |
| if prompt := st.chat_input("Type your message..."): |
| st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
| with st.chat_message("user"): |
| st.markdown(prompt) |
|
|
| try: |
| with st.spinner("Generating response..."): |
| |
| payload = { |
| "inputs": prompt, |
| "parameters": { |
| "max_new_tokens": max_tokens, |
| "temperature": temperature, |
| "top_p": top_p, |
| "return_full_text": False |
| } |
| } |
|
|
| |
| api_url = f"https://api-inference.huggingface.co/models/{selected_model}" |
| logger.info(f"Selected model: {selected_model}, API URL: {api_url}") |
|
|
| |
| output = query(payload, api_url) |
|
|
| |
| if isinstance(output, list) and len(output) > 0 and 'generated_text' in output[0]: |
| assistant_response = output[0]['generated_text'] |
| logger.info(f"Generated response: {assistant_response}") |
|
|
| with st.chat_message("assistant"): |
| st.markdown(assistant_response) |
|
|
| st.session_state.messages.append({"role": "assistant", "content": assistant_response}) |
| else: |
| logger.error(f"Unexpected API response: {output}") |
| st.error("Error: Unable to generate a response. Please try again.") |
|
|
| except Exception as e: |
| logger.error(f"Application Error: {str(e)}", exc_info=True) |
| st.error(f"Application Error: {str(e)}") |
|
|