import os import streamlit as st from datasets import load_dataset from huggingface_hub import InferenceClient # Get the API key from the environment variable api_key = os.getenv("HF_API_KEY") client = InferenceClient(api_key=api_key) # Load the dataset dataset = load_dataset("andreska/adregadocs", split="test") # Function to read the content from the dataset def read_dataset(dataset): text = [] for item in dataset: text.append(item['text']) return "\n".join(text) context = read_dataset(dataset) # Inject custom CSS st.markdown( """ """, unsafe_allow_html=True ) placeholder = st.empty() # Define the placeholder globally (outside columns) if st.session_state and st.session_state.conversation: placeholder.markdown(f'
{st.session_state.conversation}
', unsafe_allow_html=True) else: placeholder.markdown(f'

Welcome! I am your Adrega AI assistant

', unsafe_allow_html=True) def handle_submit(): user_input = st.session_state.user_input if user_input: messages = [ {"role": "system", "content": f"Context: {context}"}, {"role": "user", "content": user_input} ] response = client.chat.completions.create( model="Qwen/Qwen2.5-72B-Instruct", messages=messages, max_tokens=1000, stream=True ) answer = "" for chunk in response: answer += chunk['choices'][0]['delta']['content'] placeholder.markdown(f'

{answer}

', unsafe_allow_html=True) st.session_state.conversation = f"

{answer}

"; placeholder.markdown(f'
{st.session_state.conversation}
', unsafe_allow_html=True) #else: #st.session_state.conversation(f"

Adrega AI:: Please enter a question.") st.text_input('Ask me a question', key='user_input', on_change=handle_submit) if st.button("Ask"): handle_submit()