Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| from datasets import load_dataset | |
| from huggingface_hub import InferenceClient | |
| # ... (rest of your code) | |
| # Define the placeholder globally (outside columns) | |
| placeholder = st.empty() | |
| placeholder.markdown(f'<div class="scrollable-div"><p></p></div>', unsafe_allow_html=True) | |
| def handle_submit(): | |
| user_input = st.session_state.user_input | |
| if user_input: | |
| if st.session_state.include_context: | |
| messages = [ | |
| {"role": "system", "content": f"Context: {context}"}, | |
| {"role": "user", "content": user_input} | |
| ] | |
| else: | |
| messages = [ | |
| {"role": "system", "content": f"Context: Adrega is a powerful project management and reporting tool. It can show Gantt diagrams, S-Curves, Tabular reports and various charts in single reports, report bundles or in a customizable dashboard."}, | |
| {"role": "user", "content": user_input} | |
| ] | |
| response = client.chat.completions.create( | |
| model="Qwen/Qwen2.5-72B-Instruct", | |
| messages=messages, | |
| max_tokens=1000, | |
| stream=True | |
| ) | |
| answer = "" | |
| for chunk in response: | |
| answer += chunk['choices'][0]['delta']['content'] | |
| # Update content within the existing placeholder | |
| placeholder.markdown(f'<div class="scrollable-div"><p>{answer}</p></div>', unsafe_allow_html=True) | |
| st.session_state.conversation = f"<p>{answer}</p>" + st.session_state.conversation | |
| else: | |
| st.session_state.conversation(f"<p><strong>Adrega AI:</strong>: Please enter a question.") | |
| # ... (rest of your code) | |
| st.text_input('Ask me a question', key='user_input', on_change=handle_submit) | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| if st.button("Ask"): | |
| handle_submit() | |
| if 'conversation' not in st.session_state: | |
| st.session_state.conversation = "" | |
| with col2: | |
| st.session_state.include_context = st.checkbox('Search in Help') |