BastienHot commited on
Commit
bd65bac
·
verified ·
1 Parent(s): 91e5ba2

Sync from GitHub repo - 2025-09-20 22:16:59

Browse files
Files changed (2) hide show
  1. app.py +46 -35
  2. requirements.txt +5 -5
app.py CHANGED
@@ -27,7 +27,7 @@ import itertools
27
  import logging
28
  import traceback
29
  from dataclasses import dataclass
30
- from typing import Dict, Iterable, List, Optional, Tuple
31
 
32
  import gradio as gr
33
 
@@ -61,6 +61,11 @@ SPECIFICATION_CATEGORIES: Tuple[str, ...] = (
61
  "Validation Criteria",
62
  )
63
 
 
 
 
 
 
64
  # Create a simple counter so each pending specification has a predictable,
65
  # unique identifier. ``itertools.count`` is lightweight and thread-safe for the
66
  # single-worker environments common when running Gradio locally.
@@ -174,17 +179,17 @@ def _persona_prompt(persona: str, message: str) -> str:
174
 
175
 
176
  def _record_conversation(
177
- conversation_state: Dict[str, List[Tuple[str, str]]],
178
  persona: str,
179
  user_message: str,
180
  ai_response: str,
181
- ) -> Dict[str, List[Tuple[str, str]]]:
182
  """Append conversation turns and return the mutated state copy."""
183
 
184
  updated_history = {**conversation_state}
185
  history = list(updated_history.get(persona, []))
186
- history.append(("user", user_message))
187
- history.append(("assistant", ai_response))
188
  updated_history[persona] = history
189
  return updated_history
190
 
@@ -217,12 +222,12 @@ def _group_approved_specifications(records: Iterable[SpecificationRecord]) -> Di
217
  # Gradio callback functions (project management)
218
  # ---------------------------------------------------------------------------
219
 
220
- def bootstrap_application() -> Tuple[List[str], gr.Dropdown.update, str, Dict[str, List[Tuple[str, str]]], Dict[str, List[Dict[str, str]]], str]:
221
  """Return initial state for the interface when the app loads."""
222
 
223
  projects = [DEMO_PROJECT_NAME]
224
  current_project = DEMO_PROJECT_NAME
225
- conversation_state = {"requirements": [], "technical": []}
226
  pending_state = {"queue": []}
227
  if CONFIG.demo_mode:
228
  status = (
@@ -234,7 +239,7 @@ def bootstrap_application() -> Tuple[List[str], gr.Dropdown.update, str, Dict[st
234
  "Ready to collaborate. Create a project or load demo data while"
235
  " authenticated providers generate live specifications."
236
  )
237
- dropdown_update = gr.Dropdown.update(choices=projects, value=current_project)
238
  return projects, dropdown_update, current_project, conversation_state, pending_state, status
239
 
240
 
@@ -242,7 +247,7 @@ def create_project(
242
  project_name: str,
243
  projects: List[str],
244
  current_project: Optional[str],
245
- ) -> Tuple[List[str], gr.Dropdown.update, str, gr.Textbox.update]:
246
  """Create a new project and update the selection dropdown."""
247
 
248
  if not project_name or not project_name.strip():
@@ -253,9 +258,9 @@ def create_project(
253
  raise ValueError(f"Project '{normalized_name}' already exists.")
254
 
255
  updated_projects = projects + [normalized_name]
256
- dropdown_update = gr.Dropdown.update(choices=updated_projects, value=normalized_name)
257
  status = f"Created project '{normalized_name}' and set it as active."
258
- clear_input = gr.Textbox.update(value="")
259
  return updated_projects, dropdown_update, status, clear_input
260
 
261
 
@@ -270,26 +275,32 @@ def select_project(project_name: str) -> Tuple[str, str]:
270
 
271
  def load_demo_data(
272
  projects: List[str],
273
- conversation_state: Dict[str, List[Tuple[str, str]]],
274
  pending_state: Dict[str, List[Dict[str, str]]],
275
- ) -> Tuple[List[str], Dict[str, List[Tuple[str, str]]], Dict[str, List[Dict[str, str]]], gr.Dropdown.update, str]:
276
  """Populate application state with mock data for testing."""
277
 
278
  demo_projects = projects if DEMO_PROJECT_NAME in projects else projects + [DEMO_PROJECT_NAME]
279
 
280
  conversation_state = {
281
  "requirements": [
282
- ("user", "Outline the business goals for the ecommerce relaunch."),
283
- (
284
- "assistant",
285
- "Generated demo summary covering revenue targets, customer journeys, and KPIs.",
 
 
 
286
  ),
287
  ],
288
  "technical": [
289
- ("user", "Propose the core services and integrations we need."),
290
- (
291
- "assistant",
292
- "Demo architecture: API gateway, checkout service, event bus, analytics pipeline.",
 
 
 
293
  ),
294
  ],
295
  }
@@ -310,7 +321,7 @@ def load_demo_data(
310
  ]
311
 
312
  pending_state = {"queue": queue}
313
- dropdown_update = gr.Dropdown.update(choices=demo_projects, value=DEMO_PROJECT_NAME)
314
  status = "Demo data loaded. Conversations and pending drafts now contain example content."
315
  return demo_projects, conversation_state, pending_state, dropdown_update, status
316
 
@@ -324,9 +335,9 @@ def _handle_conversation(
324
  persona: str,
325
  message: str,
326
  project: Optional[str],
327
- conversation_state: Dict[str, List[Tuple[str, str]]],
328
  pending_state: Dict[str, List[Dict[str, str]]],
329
- ) -> Tuple[List[Tuple[str, str]], Dict[str, List[Tuple[str, str]]], Dict[str, List[Dict[str, str]]], str]:
330
  """Core handler shared by both AI persona chat tabs."""
331
 
332
  _ensure_project_selected(project)
@@ -376,9 +387,9 @@ def _handle_conversation(
376
  def handle_requirements_chat(
377
  message: str,
378
  project: Optional[str],
379
- conversation_state: Dict[str, List[Tuple[str, str]]],
380
  pending_state: Dict[str, List[Dict[str, str]]],
381
- ) -> Tuple[List[Tuple[str, str]], Dict[str, List[Tuple[str, str]]], Dict[str, List[Dict[str, str]]], str]:
382
  """Wrapper for the Requirements persona interaction."""
383
 
384
  return _handle_conversation(
@@ -393,9 +404,9 @@ def handle_requirements_chat(
393
  def handle_technical_chat(
394
  message: str,
395
  project: Optional[str],
396
- conversation_state: Dict[str, List[Tuple[str, str]]],
397
  pending_state: Dict[str, List[Dict[str, str]]],
398
- ) -> Tuple[List[Tuple[str, str]], Dict[str, List[Tuple[str, str]]], Dict[str, List[Dict[str, str]]], str]:
399
  """Wrapper for the Technical persona interaction."""
400
 
401
  return _handle_conversation(
@@ -411,16 +422,16 @@ def handle_technical_chat(
411
  # Gradio callback functions (validation and approvals)
412
  # ---------------------------------------------------------------------------
413
 
414
- def refresh_pending_specs(pending_state: Dict[str, List[Dict[str, str]]]) -> Tuple[gr.Dropdown.update, str]:
415
  """Update the pending specification dropdown and display guidance."""
416
 
417
  queue = pending_state.get("queue", [])
418
  if not queue:
419
- return gr.Dropdown.update(choices=[], value=None), "No drafts awaiting validation."
420
 
421
  labels = _format_validation_queue(queue)
422
  first_id = queue[0]["id"]
423
- return gr.Dropdown.update(choices=labels, value=first_id), "Select a draft to review."
424
 
425
 
426
  def load_pending_spec(
@@ -533,12 +544,12 @@ def export_specification(
533
  return rendered, notice
534
 
535
 
536
- def list_exportable_specs() -> gr.Dropdown.update:
537
  """Populate the export dropdown with approved specifications."""
538
 
539
  records = DB_MANAGER.fetch_recent_specifications(limit=200)
540
  options = [(record.title, str(record.id)) for record in records]
541
- return gr.Dropdown.update(choices=options, value=(options[0][1] if options else None))
542
 
543
 
544
  def summarize_settings() -> str:
@@ -627,7 +638,7 @@ def build_interface() -> gr.Blocks:
627
  and product scope. Each response is added to the validation queue.
628
  """
629
  )
630
- requirements_chat = gr.Chatbot(height=350)
631
  with gr.Row(elem_classes="two-column"):
632
  requirements_input = gr.Textbox(label="Message", placeholder="Describe goals, constraints, and personas...", lines=3)
633
  requirements_submit = gr.Button("Send", variant="primary")
@@ -643,7 +654,7 @@ def build_interface() -> gr.Blocks:
643
  considerations. Drafts also flow into the validation queue for review.
644
  """
645
  )
646
- technical_chat = gr.Chatbot(height=350)
647
  with gr.Row(elem_classes="two-column"):
648
  technical_input = gr.Textbox(label="Message", placeholder="Ask for architecture proposals, sequencing, or risks...", lines=3)
649
  technical_submit = gr.Button("Send", variant="primary")
 
27
  import logging
28
  import traceback
29
  from dataclasses import dataclass
30
+ from typing import Any, Dict, Iterable, List, Optional, Tuple
31
 
32
  import gradio as gr
33
 
 
61
  "Validation Criteria",
62
  )
63
 
64
+
65
+ ChatHistory = List[gr.ChatMessage]
66
+ ConversationState = Dict[str, ChatHistory]
67
+ ComponentUpdate = Dict[str, Any]
68
+
69
  # Create a simple counter so each pending specification has a predictable,
70
  # unique identifier. ``itertools.count`` is lightweight and thread-safe for the
71
  # single-worker environments common when running Gradio locally.
 
179
 
180
 
181
  def _record_conversation(
182
+ conversation_state: Dict[str, List[gr.ChatMessage]],
183
  persona: str,
184
  user_message: str,
185
  ai_response: str,
186
+ ) -> Dict[str, List[gr.ChatMessage]]:
187
  """Append conversation turns and return the mutated state copy."""
188
 
189
  updated_history = {**conversation_state}
190
  history = list(updated_history.get(persona, []))
191
+ history.append(gr.ChatMessage(role="user", content=user_message))
192
+ history.append(gr.ChatMessage(role="assistant", content=ai_response))
193
  updated_history[persona] = history
194
  return updated_history
195
 
 
222
  # Gradio callback functions (project management)
223
  # ---------------------------------------------------------------------------
224
 
225
+ def bootstrap_application() -> Tuple[List[str], ComponentUpdate, str, ConversationState, Dict[str, List[Dict[str, str]]], str]:
226
  """Return initial state for the interface when the app loads."""
227
 
228
  projects = [DEMO_PROJECT_NAME]
229
  current_project = DEMO_PROJECT_NAME
230
+ conversation_state: ConversationState = {"requirements": [], "technical": []}
231
  pending_state = {"queue": []}
232
  if CONFIG.demo_mode:
233
  status = (
 
239
  "Ready to collaborate. Create a project or load demo data while"
240
  " authenticated providers generate live specifications."
241
  )
242
+ dropdown_update = gr.update(choices=projects, value=current_project)
243
  return projects, dropdown_update, current_project, conversation_state, pending_state, status
244
 
245
 
 
247
  project_name: str,
248
  projects: List[str],
249
  current_project: Optional[str],
250
+ ) -> Tuple[List[str], ComponentUpdate, str, ComponentUpdate]:
251
  """Create a new project and update the selection dropdown."""
252
 
253
  if not project_name or not project_name.strip():
 
258
  raise ValueError(f"Project '{normalized_name}' already exists.")
259
 
260
  updated_projects = projects + [normalized_name]
261
+ dropdown_update = gr.update(choices=updated_projects, value=normalized_name)
262
  status = f"Created project '{normalized_name}' and set it as active."
263
+ clear_input = gr.update(value="")
264
  return updated_projects, dropdown_update, status, clear_input
265
 
266
 
 
275
 
276
  def load_demo_data(
277
  projects: List[str],
278
+ conversation_state: ConversationState,
279
  pending_state: Dict[str, List[Dict[str, str]]],
280
+ ) -> Tuple[List[str], ConversationState, Dict[str, List[Dict[str, str]]], ComponentUpdate, str]:
281
  """Populate application state with mock data for testing."""
282
 
283
  demo_projects = projects if DEMO_PROJECT_NAME in projects else projects + [DEMO_PROJECT_NAME]
284
 
285
  conversation_state = {
286
  "requirements": [
287
+ gr.ChatMessage(
288
+ role="user",
289
+ content="Outline the business goals for the ecommerce relaunch.",
290
+ ),
291
+ gr.ChatMessage(
292
+ role="assistant",
293
+ content="Generated demo summary covering revenue targets, customer journeys, and KPIs.",
294
  ),
295
  ],
296
  "technical": [
297
+ gr.ChatMessage(
298
+ role="user",
299
+ content="Propose the core services and integrations we need.",
300
+ ),
301
+ gr.ChatMessage(
302
+ role="assistant",
303
+ content="Demo architecture: API gateway, checkout service, event bus, analytics pipeline.",
304
  ),
305
  ],
306
  }
 
321
  ]
322
 
323
  pending_state = {"queue": queue}
324
+ dropdown_update = gr.update(choices=demo_projects, value=DEMO_PROJECT_NAME)
325
  status = "Demo data loaded. Conversations and pending drafts now contain example content."
326
  return demo_projects, conversation_state, pending_state, dropdown_update, status
327
 
 
335
  persona: str,
336
  message: str,
337
  project: Optional[str],
338
+ conversation_state: ConversationState,
339
  pending_state: Dict[str, List[Dict[str, str]]],
340
+ ) -> Tuple[ChatHistory, ConversationState, Dict[str, List[Dict[str, str]]], str]:
341
  """Core handler shared by both AI persona chat tabs."""
342
 
343
  _ensure_project_selected(project)
 
387
  def handle_requirements_chat(
388
  message: str,
389
  project: Optional[str],
390
+ conversation_state: ConversationState,
391
  pending_state: Dict[str, List[Dict[str, str]]],
392
+ ) -> Tuple[ChatHistory, ConversationState, Dict[str, List[Dict[str, str]]], str]:
393
  """Wrapper for the Requirements persona interaction."""
394
 
395
  return _handle_conversation(
 
404
  def handle_technical_chat(
405
  message: str,
406
  project: Optional[str],
407
+ conversation_state: ConversationState,
408
  pending_state: Dict[str, List[Dict[str, str]]],
409
+ ) -> Tuple[ChatHistory, ConversationState, Dict[str, List[Dict[str, str]]], str]:
410
  """Wrapper for the Technical persona interaction."""
411
 
412
  return _handle_conversation(
 
422
  # Gradio callback functions (validation and approvals)
423
  # ---------------------------------------------------------------------------
424
 
425
+ def refresh_pending_specs(pending_state: Dict[str, List[Dict[str, str]]]) -> Tuple[ComponentUpdate, str]:
426
  """Update the pending specification dropdown and display guidance."""
427
 
428
  queue = pending_state.get("queue", [])
429
  if not queue:
430
+ return gr.update(choices=[], value=None), "No drafts awaiting validation."
431
 
432
  labels = _format_validation_queue(queue)
433
  first_id = queue[0]["id"]
434
+ return gr.update(choices=labels, value=first_id), "Select a draft to review."
435
 
436
 
437
  def load_pending_spec(
 
544
  return rendered, notice
545
 
546
 
547
+ def list_exportable_specs() -> ComponentUpdate:
548
  """Populate the export dropdown with approved specifications."""
549
 
550
  records = DB_MANAGER.fetch_recent_specifications(limit=200)
551
  options = [(record.title, str(record.id)) for record in records]
552
+ return gr.update(choices=options, value=(options[0][1] if options else None))
553
 
554
 
555
  def summarize_settings() -> str:
 
638
  and product scope. Each response is added to the validation queue.
639
  """
640
  )
641
+ requirements_chat = gr.Chatbot(type="messages", height=350)
642
  with gr.Row(elem_classes="two-column"):
643
  requirements_input = gr.Textbox(label="Message", placeholder="Describe goals, constraints, and personas...", lines=3)
644
  requirements_submit = gr.Button("Send", variant="primary")
 
654
  considerations. Drafts also flow into the validation queue for review.
655
  """
656
  )
657
+ technical_chat = gr.Chatbot(type="messages", height=350)
658
  with gr.Row(elem_classes="two-column"):
659
  technical_input = gr.Textbox(label="Message", placeholder="Ask for architecture proposals, sequencing, or risks...", lines=3)
660
  technical_submit = gr.Button("Send", variant="primary")
requirements.txt CHANGED
@@ -3,10 +3,10 @@
3
  #
4
  # gradio powers the web-based interface that enables users to interact with
5
  # AI specification tools directly from the browser with minimal boilerplate.
6
- gradio==4.0.0
7
  # requests provides a simple yet powerful HTTP client for calling external AI
8
  # services that do not have dedicated SDKs, ensuring consistent API handling.
9
- requests==2.31.0
10
  # sqlite3 is part of the Python standard library and powers lightweight local
11
  # storage for specifications; the entry here serves as documentation only.
12
  # No pip installation is required because sqlite3 ships with Python 3.
@@ -14,10 +14,10 @@ requests==2.31.0
14
  # python-dotenv loads environment variables from a .env file, simplifying
15
  # configuration management for different environments (development, staging,
16
  # production) without hardcoding secrets in the codebase.
17
- python-dotenv==1.0.1
18
  # markdown converts project specifications into Markdown-formatted text for
19
  # exports and previews inside the application and supporting services.
20
- markdown==3.5.2
21
  # jinja2 renders HTML and Markdown export templates with dynamic content,
22
  # allowing flexible formatting of generated specification documents.
23
- jinja2==3.1.3
 
3
  #
4
  # gradio powers the web-based interface that enables users to interact with
5
  # AI specification tools directly from the browser with minimal boilerplate.
6
+ gradio==5.46.1
7
  # requests provides a simple yet powerful HTTP client for calling external AI
8
  # services that do not have dedicated SDKs, ensuring consistent API handling.
9
+ requests==2.32.5
10
  # sqlite3 is part of the Python standard library and powers lightweight local
11
  # storage for specifications; the entry here serves as documentation only.
12
  # No pip installation is required because sqlite3 ships with Python 3.
 
14
  # python-dotenv loads environment variables from a .env file, simplifying
15
  # configuration management for different environments (development, staging,
16
  # production) without hardcoding secrets in the codebase.
17
+ python-dotenv==1.1.1
18
  # markdown converts project specifications into Markdown-formatted text for
19
  # exports and previews inside the application and supporting services.
20
+ markdown==3.9
21
  # jinja2 renders HTML and Markdown export templates with dynamic content,
22
  # allowing flexible formatting of generated specification documents.
23
+ jinja2==3.1.6