BastienHot commited on
Commit
e38515d
·
verified ·
1 Parent(s): 8aa5e4c

Sync from GitHub repo - 2025-09-21 12:14:16

Browse files
Files changed (1) hide show
  1. app.py +69 -13
app.py CHANGED
@@ -65,9 +65,65 @@ SPECIFICATION_CATEGORIES: Tuple[str, ...] = (
65
  )
66
 
67
 
68
- ChatHistory = List[gr.ChatMessage]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  ConversationState = Dict[str, ChatHistory]
70
  ComponentUpdate = Dict[str, Any]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
  # Create a simple counter so each pending specification has a predictable,
73
  # unique identifier. ``itertools.count`` is lightweight and thread-safe for the
@@ -182,17 +238,17 @@ def _persona_prompt(persona: str, message: str) -> str:
182
 
183
 
184
  def _record_conversation(
185
- conversation_state: Dict[str, List[gr.ChatMessage]],
186
  persona: str,
187
  user_message: str,
188
  ai_response: str,
189
- ) -> Dict[str, List[gr.ChatMessage]]:
190
  """Append conversation turns and return the mutated state copy."""
191
 
192
  updated_history = {**conversation_state}
193
  history = list(updated_history.get(persona, []))
194
- history.append(gr.ChatMessage(role="user", content=user_message))
195
- history.append(gr.ChatMessage(role="assistant", content=ai_response))
196
  updated_history[persona] = history
197
  return updated_history
198
 
@@ -287,21 +343,21 @@ def load_demo_data(
287
 
288
  conversation_state = {
289
  "requirements": [
290
- gr.ChatMessage(
291
  role="user",
292
  content="Outline the business goals for the ecommerce relaunch.",
293
  ),
294
- gr.ChatMessage(
295
  role="assistant",
296
  content="Generated demo summary covering revenue targets, customer journeys, and KPIs.",
297
  ),
298
  ],
299
  "technical": [
300
- gr.ChatMessage(
301
  role="user",
302
  content="Propose the core services and integrations we need.",
303
  ),
304
- gr.ChatMessage(
305
  role="assistant",
306
  content="Demo architecture: API gateway, checkout service, event bus, analytics pipeline.",
307
  ),
@@ -340,7 +396,7 @@ def _handle_conversation(
340
  project: Optional[str],
341
  conversation_state: ConversationState,
342
  pending_state: Dict[str, List[Dict[str, str]]],
343
- ) -> Tuple[ChatHistory, ConversationState, Dict[str, List[Dict[str, str]]], str]:
344
  """Core handler shared by both AI persona chat tabs."""
345
 
346
  _ensure_project_selected(project)
@@ -384,7 +440,7 @@ def _handle_conversation(
384
  updated_pending = {"queue": queue}
385
 
386
  status = "Draft added to the validation queue. Review it on the Validation tab."
387
- return updated_conversation[persona], updated_conversation, updated_pending, status
388
 
389
 
390
  def handle_requirements_chat(
@@ -392,7 +448,7 @@ def handle_requirements_chat(
392
  project: Optional[str],
393
  conversation_state: ConversationState,
394
  pending_state: Dict[str, List[Dict[str, str]]],
395
- ) -> Tuple[ChatHistory, ConversationState, Dict[str, List[Dict[str, str]]], str]:
396
  """Wrapper for the Requirements persona interaction."""
397
 
398
  return _handle_conversation(
@@ -409,7 +465,7 @@ def handle_technical_chat(
409
  project: Optional[str],
410
  conversation_state: ConversationState,
411
  pending_state: Dict[str, List[Dict[str, str]]],
412
- ) -> Tuple[ChatHistory, ConversationState, Dict[str, List[Dict[str, str]]], str]:
413
  """Wrapper for the Technical persona interaction."""
414
 
415
  return _handle_conversation(
 
65
  )
66
 
67
 
68
+ # ---------------------------------------------------------------------------
69
+ # Gradio compatibility helpers
70
+ # ---------------------------------------------------------------------------
71
+
72
+ if hasattr(gr, "ChatMessage"):
73
+ ChatMessage = gr.ChatMessage
74
+ else:
75
+ @dataclass(frozen=True)
76
+ class ChatMessage:
77
+ """Fallback message structure for Gradio versions without ChatMessage."""
78
+
79
+ role: str
80
+ content: str
81
+
82
+ def dict(self) -> Dict[str, str]:
83
+ """Return a dictionary representation compatible with Gradio Chatbot."""
84
+
85
+ return {"role": self.role, "content": self.content}
86
+
87
+ def model_dump(self) -> Dict[str, str]:
88
+ """Mirror pydantic-style serialization used internally by Gradio."""
89
+
90
+ return self.dict()
91
+
92
+ def __iter__(self):
93
+ """Allow tuple-like unpacking in legacy Gradio behaviors."""
94
+
95
+ yield self.role
96
+ yield self.content
97
+
98
+ def __getitem__(self, key: str) -> str:
99
+ """Provide dictionary-style access for compatibility checks."""
100
+
101
+ if key == "role":
102
+ return self.role
103
+ if key == "content":
104
+ return self.content
105
+ raise KeyError(key)
106
+
107
+
108
+ ChatHistory = List[ChatMessage]
109
  ConversationState = Dict[str, ChatHistory]
110
  ComponentUpdate = Dict[str, Any]
111
+ ChatbotMessages = List[Any]
112
+
113
+ # ``gr.Chatbot`` expects different payload structures depending on the installed
114
+ # Gradio version. The helper below normalizes our internal chat history objects
115
+ # to the appropriate wire format, keeping the rest of the codebase agnostic to
116
+ # those differences.
117
+ def _chatbot_messages(history: ChatHistory) -> ChatbotMessages:
118
+ """Return data formatted for ``gr.Chatbot`` regardless of Gradio version."""
119
+
120
+ if hasattr(gr, "ChatMessage"):
121
+ return history
122
+ return [
123
+ message.dict() if hasattr(message, "dict") else {"role": message.role, "content": message.content}
124
+ for message in history
125
+ ]
126
+
127
 
128
  # Create a simple counter so each pending specification has a predictable,
129
  # unique identifier. ``itertools.count`` is lightweight and thread-safe for the
 
238
 
239
 
240
  def _record_conversation(
241
+ conversation_state: Dict[str, List[ChatMessage]],
242
  persona: str,
243
  user_message: str,
244
  ai_response: str,
245
+ ) -> Dict[str, List[ChatMessage]]:
246
  """Append conversation turns and return the mutated state copy."""
247
 
248
  updated_history = {**conversation_state}
249
  history = list(updated_history.get(persona, []))
250
+ history.append(ChatMessage(role="user", content=user_message))
251
+ history.append(ChatMessage(role="assistant", content=ai_response))
252
  updated_history[persona] = history
253
  return updated_history
254
 
 
343
 
344
  conversation_state = {
345
  "requirements": [
346
+ ChatMessage(
347
  role="user",
348
  content="Outline the business goals for the ecommerce relaunch.",
349
  ),
350
+ ChatMessage(
351
  role="assistant",
352
  content="Generated demo summary covering revenue targets, customer journeys, and KPIs.",
353
  ),
354
  ],
355
  "technical": [
356
+ ChatMessage(
357
  role="user",
358
  content="Propose the core services and integrations we need.",
359
  ),
360
+ ChatMessage(
361
  role="assistant",
362
  content="Demo architecture: API gateway, checkout service, event bus, analytics pipeline.",
363
  ),
 
396
  project: Optional[str],
397
  conversation_state: ConversationState,
398
  pending_state: Dict[str, List[Dict[str, str]]],
399
+ ) -> Tuple[ChatbotMessages, ConversationState, Dict[str, List[Dict[str, str]]], str]:
400
  """Core handler shared by both AI persona chat tabs."""
401
 
402
  _ensure_project_selected(project)
 
440
  updated_pending = {"queue": queue}
441
 
442
  status = "Draft added to the validation queue. Review it on the Validation tab."
443
+ return _chatbot_messages(updated_conversation[persona]), updated_conversation, updated_pending, status
444
 
445
 
446
  def handle_requirements_chat(
 
448
  project: Optional[str],
449
  conversation_state: ConversationState,
450
  pending_state: Dict[str, List[Dict[str, str]]],
451
+ ) -> Tuple[ChatbotMessages, ConversationState, Dict[str, List[Dict[str, str]]], str]:
452
  """Wrapper for the Requirements persona interaction."""
453
 
454
  return _handle_conversation(
 
465
  project: Optional[str],
466
  conversation_state: ConversationState,
467
  pending_state: Dict[str, List[Dict[str, str]]],
468
+ ) -> Tuple[ChatbotMessages, ConversationState, Dict[str, List[Dict[str, str]]], str]:
469
  """Wrapper for the Technical persona interaction."""
470
 
471
  return _handle_conversation(