andreska's picture
Try to replace textarea with markdown to display nice html
c28c28f verified
raw
history blame
2.13 kB
import os
import streamlit as st
from datasets import load_dataset
from huggingface_hub import InferenceClient
# Get the API key from the environment variable
api_key = os.getenv("HF_API_KEY")
client = InferenceClient(api_key=api_key)
# Load the dataset
dataset = load_dataset("andreska/adregadocs", split="test")
# Function to read the content from the dataset
def read_dataset(dataset):
text = []
for item in dataset:
text.append(item['text'])
return "\n".join(text)
context = read_dataset(dataset)
# Inject custom CSS to change the background color to yellow
st.markdown(
"""
<style>
body {
background-color: yellow;
}
</style>
""",
unsafe_allow_html=True
)
st.title("Adrega AI Help")
st.session_state.include_context = st.checkbox('Search in Help')
if 'conversation' not in st.session_state:
st.session_state.conversation = ""
def handle_submit():
user_input = st.session_state.user_input
if user_input:
if st.session_state.include_context:
messages = [
{"role": "system", "content": f"Context: {context}"},
{"role": "user", "content": user_input}
]
else:
messages = [
{"role": "system", "content": f"Context: Supported OS in Adrega is Commodore 64 and Amiga. We print Gantt diagrams in dos."},
{"role": "user", "content": user_input}
]
completion = client.chat.completions.create(
model="Qwen/Qwen2.5-72B-Instruct",
#model="Qwen/Qwen2.5-Coder-32B-Instruct",
#model="HuggingFaceTB/SmolLM2-1.7B-Instruct",
messages=messages,
max_tokens=500
)
answer = completion.choices[0].message['content']
st.session_state.conversation += f"User: {user_input}\nAdrega AI: {answer}\n\n"
else:
st.write("Please enter a question.")
st.text_input('Ask me a question', key='user_input', on_change=handle_submit)
if st.button("Ask"):
handle_submit()
st.markdown(st.session_state.conversation, unsafe_allow_html=True)