Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -11,8 +11,8 @@ class XylariaChat:
|
|
| 11 |
|
| 12 |
# Initialize the inference client
|
| 13 |
self.client = InferenceClient(
|
| 14 |
-
model="Qwen/QwQ-32B-Preview",
|
| 15 |
-
|
| 16 |
)
|
| 17 |
|
| 18 |
# Initialize conversation history and persistent memory
|
|
@@ -74,19 +74,34 @@ Capabilities:
|
|
| 74 |
|
| 75 |
# Generate response with streaming
|
| 76 |
try:
|
| 77 |
-
|
| 78 |
-
|
|
|
|
| 79 |
temperature=0.5,
|
| 80 |
-
max_tokens=10240,
|
| 81 |
top_p=0.7,
|
| 82 |
stream=True
|
| 83 |
)
|
| 84 |
|
| 85 |
-
return
|
| 86 |
|
| 87 |
except Exception as e:
|
| 88 |
return f"Error generating response: {str(e)}"
|
| 89 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 90 |
def create_interface(self):
|
| 91 |
# Local storage JavaScript functions
|
| 92 |
local_storage_js = """
|
|
@@ -117,14 +132,12 @@ Capabilities:
|
|
| 117 |
updated_history = chat_history + [[message, ""]]
|
| 118 |
|
| 119 |
# Streaming output
|
| 120 |
-
for
|
| 121 |
-
|
| 122 |
-
chunk_content = chunk.choices[0].delta.content
|
| 123 |
-
full_response += chunk_content
|
| 124 |
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
|
| 129 |
# Update conversation history
|
| 130 |
self.conversation_history.append(
|
|
@@ -166,7 +179,7 @@ Capabilities:
|
|
| 166 |
label="Xylaria 1.4 Senoa",
|
| 167 |
height=500,
|
| 168 |
show_copy_button=True,
|
| 169 |
-
|
| 170 |
)
|
| 171 |
|
| 172 |
# Input row with improved layout
|
|
@@ -185,11 +198,11 @@ Capabilities:
|
|
| 185 |
|
| 186 |
# Load chat history from local storage on page load
|
| 187 |
demo.load(
|
| 188 |
-
fn=None,
|
| 189 |
-
_js="() => loadFromLocalStorage()"
|
| 190 |
).then(
|
| 191 |
fn=lambda x: x,
|
| 192 |
-
inputs=chatbot,
|
| 193 |
outputs=chatbot
|
| 194 |
)
|
| 195 |
|
|
@@ -199,7 +212,9 @@ Capabilities:
|
|
| 199 |
inputs=[txt, chatbot],
|
| 200 |
outputs=[txt, chatbot]
|
| 201 |
).then(
|
| 202 |
-
fn=None,
|
|
|
|
|
|
|
| 203 |
_js='(chatHistory) => saveToLocalStorage(chatHistory)'
|
| 204 |
)
|
| 205 |
txt.submit(
|
|
@@ -207,7 +222,9 @@ Capabilities:
|
|
| 207 |
inputs=[txt, chatbot],
|
| 208 |
outputs=[txt, chatbot]
|
| 209 |
).then(
|
| 210 |
-
fn=None,
|
|
|
|
|
|
|
| 211 |
_js='(chatHistory) => saveToLocalStorage(chatHistory)'
|
| 212 |
)
|
| 213 |
|
|
@@ -217,17 +234,17 @@ Capabilities:
|
|
| 217 |
inputs=None,
|
| 218 |
outputs=[chatbot]
|
| 219 |
).then(
|
| 220 |
-
fn=None,
|
| 221 |
_js='() => clearLocalStorage()'
|
| 222 |
)
|
| 223 |
|
| 224 |
# Clear persistent memory and reset conversation with local storage clear
|
| 225 |
clear_memory.click(
|
| 226 |
-
fn=
|
| 227 |
inputs=None,
|
| 228 |
outputs=[chatbot]
|
| 229 |
).then(
|
| 230 |
-
fn=None,
|
| 231 |
_js='() => clearLocalStorage()'
|
| 232 |
)
|
| 233 |
|
|
|
|
| 11 |
|
| 12 |
# Initialize the inference client
|
| 13 |
self.client = InferenceClient(
|
| 14 |
+
model="Qwen/QwQ-32B-Preview", # Changed model name to a placeholder
|
| 15 |
+
token=self.hf_token
|
| 16 |
)
|
| 17 |
|
| 18 |
# Initialize conversation history and persistent memory
|
|
|
|
| 74 |
|
| 75 |
# Generate response with streaming
|
| 76 |
try:
|
| 77 |
+
response_stream = self.client.text_generation(
|
| 78 |
+
prompt=self.messages_to_prompt(messages), # Convert messages to prompt format
|
| 79 |
+
max_new_tokens=1024,
|
| 80 |
temperature=0.5,
|
|
|
|
| 81 |
top_p=0.7,
|
| 82 |
stream=True
|
| 83 |
)
|
| 84 |
|
| 85 |
+
return response_stream
|
| 86 |
|
| 87 |
except Exception as e:
|
| 88 |
return f"Error generating response: {str(e)}"
|
| 89 |
|
| 90 |
+
def messages_to_prompt(self, messages):
|
| 91 |
+
"""
|
| 92 |
+
Converts a list of messages in OpenAI format to a prompt string.
|
| 93 |
+
"""
|
| 94 |
+
prompt = ""
|
| 95 |
+
for message in messages:
|
| 96 |
+
if message["role"] == "system":
|
| 97 |
+
prompt += f"<|im_start|>system\n{message['content']}<|im_end|>\n"
|
| 98 |
+
elif message["role"] == "user":
|
| 99 |
+
prompt += f"<|im_start|>user\n{message['content']}<|im_end|>\n"
|
| 100 |
+
elif message["role"] == "assistant":
|
| 101 |
+
prompt += f"<|im_start|>assistant\n{message['content']}<|im_end|>\n"
|
| 102 |
+
prompt += "<|im_start|>assistant\n"
|
| 103 |
+
return prompt
|
| 104 |
+
|
| 105 |
def create_interface(self):
|
| 106 |
# Local storage JavaScript functions
|
| 107 |
local_storage_js = """
|
|
|
|
| 132 |
updated_history = chat_history + [[message, ""]]
|
| 133 |
|
| 134 |
# Streaming output
|
| 135 |
+
for response_text in response_stream:
|
| 136 |
+
full_response += response_text
|
|
|
|
|
|
|
| 137 |
|
| 138 |
+
# Update the last message in chat history with partial response
|
| 139 |
+
updated_history[-1][1] = full_response
|
| 140 |
+
yield "", updated_history
|
| 141 |
|
| 142 |
# Update conversation history
|
| 143 |
self.conversation_history.append(
|
|
|
|
| 179 |
label="Xylaria 1.4 Senoa",
|
| 180 |
height=500,
|
| 181 |
show_copy_button=True,
|
| 182 |
+
type="messages" # Use the 'messages' format
|
| 183 |
)
|
| 184 |
|
| 185 |
# Input row with improved layout
|
|
|
|
| 198 |
|
| 199 |
# Load chat history from local storage on page load
|
| 200 |
demo.load(
|
| 201 |
+
fn=None,
|
| 202 |
+
_js="() => loadFromLocalStorage()"
|
| 203 |
).then(
|
| 204 |
fn=lambda x: x,
|
| 205 |
+
inputs=chatbot,
|
| 206 |
outputs=chatbot
|
| 207 |
)
|
| 208 |
|
|
|
|
| 212 |
inputs=[txt, chatbot],
|
| 213 |
outputs=[txt, chatbot]
|
| 214 |
).then(
|
| 215 |
+
fn=None,
|
| 216 |
+
inputs=[chatbot], # Pass chatbot history to JavaScript
|
| 217 |
+
outputs=None,
|
| 218 |
_js='(chatHistory) => saveToLocalStorage(chatHistory)'
|
| 219 |
)
|
| 220 |
txt.submit(
|
|
|
|
| 222 |
inputs=[txt, chatbot],
|
| 223 |
outputs=[txt, chatbot]
|
| 224 |
).then(
|
| 225 |
+
fn=None,
|
| 226 |
+
inputs=[chatbot], # Pass chatbot history to JavaScript
|
| 227 |
+
outputs=None,
|
| 228 |
_js='(chatHistory) => saveToLocalStorage(chatHistory)'
|
| 229 |
)
|
| 230 |
|
|
|
|
| 234 |
inputs=None,
|
| 235 |
outputs=[chatbot]
|
| 236 |
).then(
|
| 237 |
+
fn=None,
|
| 238 |
_js='() => clearLocalStorage()'
|
| 239 |
)
|
| 240 |
|
| 241 |
# Clear persistent memory and reset conversation with local storage clear
|
| 242 |
clear_memory.click(
|
| 243 |
+
fn=self.reset_conversation,
|
| 244 |
inputs=None,
|
| 245 |
outputs=[chatbot]
|
| 246 |
).then(
|
| 247 |
+
fn=None,
|
| 248 |
_js='() => clearLocalStorage()'
|
| 249 |
)
|
| 250 |
|