andreska commited on
Commit
8beee20
·
verified ·
1 Parent(s): 74e6674

Gemini fix to rerender problems

Browse files
Files changed (1) hide show
  1. app.py +6 -36
app.py CHANGED
@@ -3,39 +3,9 @@ import streamlit as st
3
  from datasets import load_dataset
4
  from huggingface_hub import InferenceClient
5
 
6
- # Get the API key from the environment variable
7
- api_key = os.getenv("HF_API_KEY")
8
- client = InferenceClient(api_key=api_key)
9
 
10
- # Load the dataset
11
- dataset = load_dataset("andreska/adregadocs", split="test")
12
-
13
- # Function to read the content from the dataset
14
- def read_dataset(dataset):
15
- text = []
16
- for item in dataset:
17
- text.append(item['text'])
18
- return "\n".join(text)
19
-
20
- context = read_dataset(dataset)
21
-
22
- # Inject custom CSS
23
- st.markdown(
24
- """
25
- <style>
26
- .scrollable-div {
27
- height: 400px;
28
- width: 100%;
29
- overflow-y: auto;
30
- padding: 10px;
31
- border: 1px solid #ccc;
32
- }
33
- </style>
34
- """,
35
- unsafe_allow_html=True
36
- )
37
-
38
- # Define the placeholder globally
39
  placeholder = st.empty()
40
  placeholder.markdown(f'<div class="scrollable-div"><p></p></div>', unsafe_allow_html=True)
41
 
@@ -56,9 +26,6 @@ def handle_submit():
56
 
57
  response = client.chat.completions.create(
58
  model="Qwen/Qwen2.5-72B-Instruct",
59
- #model="Qwen/Qwen2.5-Coder-32B-Instruct",
60
- #model="mistralai/Mistral-7B-Instruct-v0.3",
61
- #model="meta-llama/Llama-3.1-8B-Instruct",
62
  messages=messages,
63
  max_tokens=1000,
64
  stream=True
@@ -67,14 +34,17 @@ def handle_submit():
67
  answer = ""
68
  for chunk in response:
69
  answer += chunk['choices'][0]['delta']['content']
 
70
  placeholder.markdown(f'<div class="scrollable-div"><p>{answer}</p></div>', unsafe_allow_html=True)
71
  st.session_state.conversation = f"<p>{answer}</p>" + st.session_state.conversation
 
72
  else:
73
  st.session_state.conversation(f"<p><strong>Adrega AI:</strong>: Please enter a question.")
74
 
 
 
75
  st.text_input('Ask me a question', key='user_input', on_change=handle_submit)
76
  col1, col2 = st.columns(2)
77
-
78
  with col1:
79
  if st.button("Ask"):
80
  handle_submit()
 
3
  from datasets import load_dataset
4
  from huggingface_hub import InferenceClient
5
 
6
+ # ... (rest of your code)
 
 
7
 
8
+ # Define the placeholder globally (outside columns)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  placeholder = st.empty()
10
  placeholder.markdown(f'<div class="scrollable-div"><p></p></div>', unsafe_allow_html=True)
11
 
 
26
 
27
  response = client.chat.completions.create(
28
  model="Qwen/Qwen2.5-72B-Instruct",
 
 
 
29
  messages=messages,
30
  max_tokens=1000,
31
  stream=True
 
34
  answer = ""
35
  for chunk in response:
36
  answer += chunk['choices'][0]['delta']['content']
37
+ # Update content within the existing placeholder
38
  placeholder.markdown(f'<div class="scrollable-div"><p>{answer}</p></div>', unsafe_allow_html=True)
39
  st.session_state.conversation = f"<p>{answer}</p>" + st.session_state.conversation
40
+
41
  else:
42
  st.session_state.conversation(f"<p><strong>Adrega AI:</strong>: Please enter a question.")
43
 
44
+ # ... (rest of your code)
45
+
46
  st.text_input('Ask me a question', key='user_input', on_change=handle_submit)
47
  col1, col2 = st.columns(2)
 
48
  with col1:
49
  if st.button("Ask"):
50
  handle_submit()