andreska commited on
Commit
82af3b6
·
verified ·
1 Parent(s): fd25069

Try to change output to stream instead of wait for completion

Browse files
Files changed (1) hide show
  1. app.py +11 -8
app.py CHANGED
@@ -19,16 +19,16 @@ def read_dataset(dataset):
19
 
20
  context = read_dataset(dataset)
21
 
22
- # Inject custom CSS to change the background color to yellow
23
  st.markdown(
24
  """
25
  <style>
26
  .scrollable-div {
27
- max-height: 300px;
28
  overflow-y: auto;
29
  padding: 10px;
30
  border: 1px solid #ccc;
31
- background-color: #f9f9f9; }
32
  </style>
33
  """,
34
  unsafe_allow_html=True
@@ -49,19 +49,22 @@ def handle_submit():
49
  {"role": "user", "content": user_input}
50
  ]
51
 
52
- completion = client.chat.completions.create(
53
  model="Qwen/Qwen2.5-72B-Instruct",
54
  #model="Qwen/Qwen2.5-Coder-32B-Instruct",
55
  #model="mistralai/Mistral-7B-Instruct-v0.3",
56
  #model="meta-llama/Llama-3.1-8B-Instruct",
57
  messages=messages,
58
- max_tokens=1000
 
59
  )
60
 
61
- answer = completion.choices[0].message['content']
62
 
63
- #st.session_state.conversation = f"<p><strong>User:</strong> {user_input}</p><p><strong>Adrega AI:</strong> {answer}</p>" + st.session_state.conversation
64
- st.session_state.conversation = f"<p><strong>Adrega AI:</strong> {answer}</p>" + st.session_state.conversation
 
 
65
  else:
66
  st.session_state.conversation(f"<p><strong>Adrega AI:</strong>: Please enter a question.")
67
 
 
19
 
20
  context = read_dataset(dataset)
21
 
22
+ # Inject custom CSS
23
  st.markdown(
24
  """
25
  <style>
26
  .scrollable-div {
27
+ max-height: 400px;
28
  overflow-y: auto;
29
  padding: 10px;
30
  border: 1px solid #ccc;
31
+ }
32
  </style>
33
  """,
34
  unsafe_allow_html=True
 
49
  {"role": "user", "content": user_input}
50
  ]
51
 
52
+ response = client.chat.completions.create(
53
  model="Qwen/Qwen2.5-72B-Instruct",
54
  #model="Qwen/Qwen2.5-Coder-32B-Instruct",
55
  #model="mistralai/Mistral-7B-Instruct-v0.3",
56
  #model="meta-llama/Llama-3.1-8B-Instruct",
57
  messages=messages,
58
+ max_tokens=1000,
59
+ stream=True
60
  )
61
 
62
+ answer = ""
63
 
64
+ for chunk in response:
65
+ answer += chunk['choices'][0]['delta']['content']
66
+ st.session_state.conversation = f"<p><strong>Adrega AI:</strong> {answer}</p>" + st.session_state.conversation
67
+ st.markdown(f'<div class="scrollable-div">{st.session_state.conversation}</div>', unsafe_allow_html=True)
68
  else:
69
  st.session_state.conversation(f"<p><strong>Adrega AI:</strong>: Please enter a question.")
70