File size: 1,528 Bytes
5c4fc85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2f3c7ce
 
 
 
 
 
 
 
 
 
5c4fc85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd3907f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import streamlit as st

from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain.schema import HumanMessage, SystemMessage, AIMessage

st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
st.header("Hey, I'm your DeepSeek")

if "sessionMessages" not in st.session_state:
     st.session_state.sessionMessages = [
        SystemMessage(content="You are a helpful assistant.")
    ]


def load_answer(question):
    st.session_state.sessionMessages.append(HumanMessage
                                            (content=question))
    assistant_answer = chat_model.invoke(st.session_state.sessionMessages)

    if isinstance(assistant_answer, AIMessage):
        response_text = assistant_answer.content
    elif isinstance(assistant_answer, dict) and "content" in assistant_answer:
        response_text = assistant_answer["content"]
    else:
        response_text = str(assistant_answer)
    
    st.session_state.sessionMessages.append(AIMessage(content=response_text))
    return response_text

def get_text():
    input_text = st.text_input("You: ")
    return input_text

llm = HuggingFaceEndpoint(
    repo_id="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
    task="text-generation",
    max_new_tokens=512,
    do_sample=True,
    temperature=0.7,
    repetition_penalty=1.03,
)

chat_model = ChatHuggingFace(llm=llm)

user_input = get_text()
submit = st.button('Generate')

if submit:
    response = load_answer(user_input)
    st.subheader("Answer: ")
    st.write(response)