Deploy TextAI v2 - Clean architecture
Browse files
app.py
CHANGED
|
@@ -100,6 +100,34 @@ def _estimate_title(conv: Dict[str, Any]) -> str:
|
|
| 100 |
return "New chat"
|
| 101 |
|
| 102 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
def _strip_content_for_prompt(messages: List[Dict[str, Any]]) -> List[Dict[str, str]]:
|
| 104 |
"""Convert messages to simple role/content format for API calls"""
|
| 105 |
out = []
|
|
@@ -246,7 +274,7 @@ def ui_new_chat(state):
|
|
| 246 |
def ui_select_chat(state, conv_id):
|
| 247 |
state = _set_active(state, conv_id)
|
| 248 |
conv = _active_conv(state)
|
| 249 |
-
return state, conv["messages"], conv["title"]
|
| 250 |
|
| 251 |
|
| 252 |
def ui_rename_chat(state, new_title):
|
|
@@ -264,7 +292,7 @@ def ui_delete_chat(state):
|
|
| 264 |
else:
|
| 265 |
state["active_id"] = state["conversations"][0]["id"]
|
| 266 |
conv = _active_conv(state)
|
| 267 |
-
return state, _conv_choices(state), conv["id"], conv["messages"], conv["title"]
|
| 268 |
|
| 269 |
|
| 270 |
def ui_export(state):
|
|
@@ -277,7 +305,7 @@ def ui_export(state):
|
|
| 277 |
def ui_import(state, file_obj):
|
| 278 |
if file_obj is None:
|
| 279 |
conv = _active_conv(state)
|
| 280 |
-
return state, _conv_choices(state), state["active_id"], conv["messages"], conv["title"]
|
| 281 |
try:
|
| 282 |
with open(file_obj.name, "r", encoding="utf-8") as f:
|
| 283 |
loaded = json.load(f)
|
|
@@ -290,7 +318,7 @@ def ui_import(state, file_obj):
|
|
| 290 |
except Exception:
|
| 291 |
pass
|
| 292 |
conv = _active_conv(state)
|
| 293 |
-
return state, _conv_choices(state), conv["id"], conv["messages"], conv["title"]
|
| 294 |
|
| 295 |
|
| 296 |
def ui_add_user_message(state, mm_value):
|
|
@@ -314,7 +342,7 @@ def ui_add_user_message(state, mm_value):
|
|
| 314 |
content_parts.append({"path": f})
|
| 315 |
|
| 316 |
if not content_parts:
|
| 317 |
-
return state, conv["messages"], gr.MultimodalTextbox(value=None)
|
| 318 |
|
| 319 |
conv["messages"].append({
|
| 320 |
"role": "user",
|
|
@@ -325,7 +353,7 @@ def ui_add_user_message(state, mm_value):
|
|
| 325 |
if conv["title"] == "New chat":
|
| 326 |
conv["title"] = _estimate_title(conv)
|
| 327 |
|
| 328 |
-
return state, conv["messages"], gr.MultimodalTextbox(value=None)
|
| 329 |
|
| 330 |
|
| 331 |
def ui_regenerate_prepare(state):
|
|
@@ -333,7 +361,7 @@ def ui_regenerate_prepare(state):
|
|
| 333 |
if conv["messages"] and conv["messages"][-1].get("role") == "assistant":
|
| 334 |
conv["messages"].pop()
|
| 335 |
_touch(conv)
|
| 336 |
-
return state, conv["messages"]
|
| 337 |
|
| 338 |
|
| 339 |
def ui_generate_assistant(state, backend, system_prompt, temperature, max_tokens,
|
|
@@ -356,7 +384,7 @@ def ui_generate_assistant(state, backend, system_prompt, temperature, max_tokens
|
|
| 356 |
|
| 357 |
for partial in pseudo_stream(full):
|
| 358 |
conv["messages"][-1]["content"] = partial
|
| 359 |
-
yield state, conv["messages"]
|
| 360 |
|
| 361 |
|
| 362 |
# βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
@@ -594,7 +622,6 @@ with gr.Blocks(title=APP_TITLE) as demo:
|
|
| 594 |
|
| 595 |
chatbot = gr.Chatbot(
|
| 596 |
elem_id="chatbot",
|
| 597 |
-
type="messages",
|
| 598 |
height=550,
|
| 599 |
)
|
| 600 |
|
|
@@ -614,7 +641,7 @@ with gr.Blocks(title=APP_TITLE) as demo:
|
|
| 614 |
choices = _conv_choices(s)
|
| 615 |
active = s["active_id"]
|
| 616 |
conv = _active_conv(s)
|
| 617 |
-
return choices, active, conv["title"], conv["messages"]
|
| 618 |
|
| 619 |
demo.load(
|
| 620 |
_init_sidebar,
|
|
|
|
| 100 |
return "New chat"
|
| 101 |
|
| 102 |
|
| 103 |
+
def _to_chatbot_format(messages: List[Dict[str, Any]]) -> List[Tuple[Optional[str], Optional[str]]]:
|
| 104 |
+
"""Convert messages to Gradio Chatbot tuple format: [(user, assistant), ...]"""
|
| 105 |
+
result = []
|
| 106 |
+
i = 0
|
| 107 |
+
while i < len(messages):
|
| 108 |
+
user_msg = None
|
| 109 |
+
assistant_msg = None
|
| 110 |
+
|
| 111 |
+
# Get user message
|
| 112 |
+
if i < len(messages) and messages[i].get("role") == "user":
|
| 113 |
+
content = messages[i].get("content", "")
|
| 114 |
+
if isinstance(content, list):
|
| 115 |
+
user_msg = " ".join(str(c) if isinstance(c, str) else f"[file]" for c in content)
|
| 116 |
+
else:
|
| 117 |
+
user_msg = str(content)
|
| 118 |
+
i += 1
|
| 119 |
+
|
| 120 |
+
# Get assistant message
|
| 121 |
+
if i < len(messages) and messages[i].get("role") == "assistant":
|
| 122 |
+
assistant_msg = str(messages[i].get("content", ""))
|
| 123 |
+
i += 1
|
| 124 |
+
|
| 125 |
+
if user_msg is not None or assistant_msg is not None:
|
| 126 |
+
result.append((user_msg, assistant_msg))
|
| 127 |
+
|
| 128 |
+
return result
|
| 129 |
+
|
| 130 |
+
|
| 131 |
def _strip_content_for_prompt(messages: List[Dict[str, Any]]) -> List[Dict[str, str]]:
|
| 132 |
"""Convert messages to simple role/content format for API calls"""
|
| 133 |
out = []
|
|
|
|
| 274 |
def ui_select_chat(state, conv_id):
|
| 275 |
state = _set_active(state, conv_id)
|
| 276 |
conv = _active_conv(state)
|
| 277 |
+
return state, _to_chatbot_format(conv["messages"]), conv["title"]
|
| 278 |
|
| 279 |
|
| 280 |
def ui_rename_chat(state, new_title):
|
|
|
|
| 292 |
else:
|
| 293 |
state["active_id"] = state["conversations"][0]["id"]
|
| 294 |
conv = _active_conv(state)
|
| 295 |
+
return state, _conv_choices(state), conv["id"], _to_chatbot_format(conv["messages"]), conv["title"]
|
| 296 |
|
| 297 |
|
| 298 |
def ui_export(state):
|
|
|
|
| 305 |
def ui_import(state, file_obj):
|
| 306 |
if file_obj is None:
|
| 307 |
conv = _active_conv(state)
|
| 308 |
+
return state, _conv_choices(state), state["active_id"], _to_chatbot_format(conv["messages"]), conv["title"]
|
| 309 |
try:
|
| 310 |
with open(file_obj.name, "r", encoding="utf-8") as f:
|
| 311 |
loaded = json.load(f)
|
|
|
|
| 318 |
except Exception:
|
| 319 |
pass
|
| 320 |
conv = _active_conv(state)
|
| 321 |
+
return state, _conv_choices(state), conv["id"], _to_chatbot_format(conv["messages"]), conv["title"]
|
| 322 |
|
| 323 |
|
| 324 |
def ui_add_user_message(state, mm_value):
|
|
|
|
| 342 |
content_parts.append({"path": f})
|
| 343 |
|
| 344 |
if not content_parts:
|
| 345 |
+
return state, _to_chatbot_format(conv["messages"]), gr.MultimodalTextbox(value=None)
|
| 346 |
|
| 347 |
conv["messages"].append({
|
| 348 |
"role": "user",
|
|
|
|
| 353 |
if conv["title"] == "New chat":
|
| 354 |
conv["title"] = _estimate_title(conv)
|
| 355 |
|
| 356 |
+
return state, _to_chatbot_format(conv["messages"]), gr.MultimodalTextbox(value=None)
|
| 357 |
|
| 358 |
|
| 359 |
def ui_regenerate_prepare(state):
|
|
|
|
| 361 |
if conv["messages"] and conv["messages"][-1].get("role") == "assistant":
|
| 362 |
conv["messages"].pop()
|
| 363 |
_touch(conv)
|
| 364 |
+
return state, _to_chatbot_format(conv["messages"])
|
| 365 |
|
| 366 |
|
| 367 |
def ui_generate_assistant(state, backend, system_prompt, temperature, max_tokens,
|
|
|
|
| 384 |
|
| 385 |
for partial in pseudo_stream(full):
|
| 386 |
conv["messages"][-1]["content"] = partial
|
| 387 |
+
yield state, _to_chatbot_format(conv["messages"])
|
| 388 |
|
| 389 |
|
| 390 |
# βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
|
|
| 622 |
|
| 623 |
chatbot = gr.Chatbot(
|
| 624 |
elem_id="chatbot",
|
|
|
|
| 625 |
height=550,
|
| 626 |
)
|
| 627 |
|
|
|
|
| 641 |
choices = _conv_choices(s)
|
| 642 |
active = s["active_id"]
|
| 643 |
conv = _active_conv(s)
|
| 644 |
+
return choices, active, conv["title"], _to_chatbot_format(conv["messages"])
|
| 645 |
|
| 646 |
demo.load(
|
| 647 |
_init_sidebar,
|