File size: 2,451 Bytes
c1c4977
24d620f
b6e1d0d
9e80ceb
 
68bd3e2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31df602
8b0bcd3
31df602
 
 
 
 
 
 
0f36243
8b0bcd3
31df602
 
 
68bd3e2
 
 
 
d943ab1
8ed1db5
8beee20
4716ee7
a50d8f6
54c1db3
 
972f7cf
94ee0d3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import os
import streamlit as st
from datasets import load_dataset
from huggingface_hub import InferenceClient

# Get the API key from the environment variable
api_key = os.getenv("HF_API_KEY")
client = InferenceClient(api_key=api_key)

# Load the dataset
dataset = load_dataset("andreska/adregadocs", split="test")

# Function to read the content from the dataset
def read_dataset(dataset):
    text = []
    for item in dataset:
        text.append(item['text'])
    return "\n".join(text)

context = read_dataset(dataset)

# Inject custom CSS
st.markdown(
    """
    <style>
        .scrollable-div {
            height: 390px;
            width: 100%;
            overflow-y: auto;
            padding: 10px;
            border: 1px solid #ccc;
        }
    
       .block-container {
            padding-top: 3rem;
            padding-bottom: 0rem;
            padding-left: 5rem;
            padding-right: 5rem;
        }
    </style>
    """,
    unsafe_allow_html=True
)

placeholder = st.empty()
# Define the placeholder globally (outside columns)
if st.session_state and st.session_state.conversation:
    placeholder.markdown(f'<div class="scrollable-div">{st.session_state.conversation}</div>', unsafe_allow_html=True)
else:
    placeholder.markdown(f'<div class="scrollable-div"><p>Welcome! I am your Adrega AI assistant</p></div>', unsafe_allow_html=True)

def handle_submit():
    user_input = st.session_state.user_input

    if user_input:
        messages = [
            {"role": "system", "content": f"Context: {context}"},
            {"role": "user", "content": user_input}
        ]

        response = client.chat.completions.create(
            model="Qwen/Qwen2.5-72B-Instruct",
            messages=messages,
            max_tokens=1000,
            stream=True
        )

        answer = ""
        
        for chunk in response:
            answer += chunk['choices'][0]['delta']['content']
            placeholder.markdown(f'<div class="scrollable-div"><p>{answer}</p></div>', unsafe_allow_html=True)

        st.session_state.conversation = f"<p>{answer}</p>";
        placeholder.markdown(f'<div class="scrollable-div">{st.session_state.conversation}</div>', unsafe_allow_html=True)
    #else:
        #st.session_state.conversation(f"<p><strong>Adrega AI:</strong>: Please enter a question.")

st.text_input('Ask me a question', key='user_input', on_change=handle_submit)

if st.button("Ask"):
    handle_submit()