Spaces:
Paused
Paused
| """Tab 2: Results + Chat. | |
| Display generated assessment results with integrated chat for Q&A and modifications. | |
| """ | |
| import json | |
| import gradio as gr | |
| from typing import Any, Optional, TYPE_CHECKING | |
| from datetime import datetime | |
| import tempfile | |
| from ui.state import SessionState | |
| from ui.components import create_stats_dict, create_progress_html, image_store | |
| # Lazy imports to avoid chromadb dependency at module load time | |
| # These are imported when generate_assessment() is called | |
| if TYPE_CHECKING: | |
| from pipeline import FDAMPipeline, PipelineResult, PDFGenerator | |
| def create_tab() -> dict[str, Any]: | |
| """Create Results + Chat tab UI components. | |
| Returns: | |
| Dictionary of component references for event wiring. | |
| """ | |
| # --- Processing Section --- | |
| with gr.Row(): | |
| generate_btn = gr.Button( | |
| "Generate Assessment", | |
| variant="primary", | |
| scale=2, | |
| elem_id="generate_btn", | |
| ) | |
| processing_status = gr.Textbox( | |
| label="Status", | |
| value="Ready", | |
| interactive=False, | |
| elem_id="processing_status", | |
| ) | |
| progress_html = gr.HTML( | |
| value="", | |
| elem_id="progress_html", | |
| ) | |
| # --- Results Display --- | |
| gr.Markdown("---") | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| gr.Markdown("#### Annotated Images") | |
| annotated_gallery = gr.Gallery( | |
| label="AI-Analyzed Images", | |
| columns=2, | |
| height="auto", | |
| elem_id="annotated_gallery", | |
| ) | |
| with gr.Column(scale=1): | |
| gr.Markdown("#### Assessment Summary") | |
| stats_output = gr.JSON( | |
| label="Statistics", | |
| elem_id="stats_output", | |
| ) | |
| gr.Markdown("---") | |
| gr.Markdown("### Cleaning Specification / Scope of Work") | |
| sow_output = gr.Markdown( | |
| value="*Generate an assessment to see results here.*", | |
| elem_id="sow_output", | |
| ) | |
| # --- Downloads --- | |
| gr.Markdown("#### Downloads") | |
| with gr.Row(): | |
| download_md = gr.File( | |
| label="Download Markdown (.md)", | |
| elem_id="download_md", | |
| ) | |
| download_pdf = gr.File( | |
| label="Download PDF (.pdf)", | |
| elem_id="download_pdf", | |
| ) | |
| # --- Chat Interface --- | |
| gr.Markdown("---") | |
| gr.Markdown("### Ask Questions or Request Changes") | |
| gr.Markdown( | |
| "*Chat with the AI about the assessment results or request document modifications.*" | |
| ) | |
| chatbot = gr.Chatbot( | |
| label="Chat", | |
| # type parameter removed in Gradio 6.x - messages format is default | |
| height=300, | |
| elem_id="chatbot", | |
| ) | |
| with gr.Row(): | |
| chat_input = gr.Textbox( | |
| label="Message", | |
| placeholder="Ask a question or request a change...", | |
| scale=4, | |
| elem_id="chat_input", | |
| ) | |
| chat_send_btn = gr.Button("Send", variant="primary", scale=1) | |
| # Quick action buttons | |
| with gr.Row(): | |
| gr.Markdown("**Quick Actions:**") | |
| with gr.Row(): | |
| quick_explain_zones = gr.Button("Explain zone classifications", size="sm") | |
| quick_explain_materials = gr.Button("Explain detected materials", size="sm") | |
| quick_sampling = gr.Button("Explain sampling plan", size="sm") | |
| quick_add_note = gr.Button("Add a note to document", size="sm") | |
| # Navigation | |
| with gr.Row(): | |
| back_btn = gr.Button("← Back to Input") | |
| regenerate_btn = gr.Button( | |
| "Regenerate Assessment", | |
| variant="secondary", | |
| ) | |
| reset_doc_btn = gr.Button( | |
| "Reset Document", | |
| variant="secondary", | |
| ) | |
| return { | |
| # Generation controls | |
| "generate_btn": generate_btn, | |
| "processing_status": processing_status, | |
| "progress_html": progress_html, | |
| # Results display | |
| "annotated_gallery": annotated_gallery, | |
| "stats_output": stats_output, | |
| "sow_output": sow_output, | |
| # Downloads | |
| "download_md": download_md, | |
| "download_pdf": download_pdf, | |
| # Chat interface | |
| "chatbot": chatbot, | |
| "chat_input": chat_input, | |
| "chat_send_btn": chat_send_btn, | |
| # Quick actions | |
| "quick_explain_zones": quick_explain_zones, | |
| "quick_explain_materials": quick_explain_materials, | |
| "quick_sampling": quick_sampling, | |
| "quick_add_note": quick_add_note, | |
| # Navigation | |
| "back_btn": back_btn, | |
| "regenerate_btn": regenerate_btn, | |
| "reset_doc_btn": reset_doc_btn, | |
| } | |
| def check_preflight(session: SessionState) -> str: | |
| """Check if assessment can be generated. | |
| Returns: | |
| HTML string with preflight status. | |
| """ | |
| can_generate, errors = session.can_generate() | |
| # Also check if images are in memory | |
| expected_ids = [img.id for img in session.images] | |
| missing_ids = image_store.get_missing_ids(expected_ids) | |
| if missing_ids: | |
| errors.append(f"{len(missing_ids)} image(s) need to be re-uploaded") | |
| can_generate = False | |
| if can_generate: | |
| stats = create_stats_dict(session) | |
| return f""" | |
| <div style="background: #e8f5e9; border: 1px solid #66bb6a; border-radius: 4px; padding: 15px;"> | |
| <strong style="color: #2e7d32;">✓ Ready to Generate</strong> | |
| <div style="margin-top: 10px; color: #333;"> | |
| <strong>Room:</strong> {stats['room_name']}<br> | |
| <strong>Images:</strong> {stats['images']}<br> | |
| <strong>Total Area:</strong> {stats['total_floor_area_sf']} SF | |
| </div> | |
| </div> | |
| """ | |
| else: | |
| error_items = "".join(f"<li>{e}</li>" for e in errors) | |
| return f""" | |
| <div style="background: #ffebee; border: 1px solid #ef5350; border-radius: 4px; padding: 15px;"> | |
| <strong style="color: #c62828;">Cannot Generate - Please Fix:</strong> | |
| <ul style="margin: 10px 0 0 0; padding-left: 20px; color: #c62828;">{error_items}</ul> | |
| </div> | |
| """ | |
| def generate_assessment( | |
| session: SessionState, | |
| progress: Optional[gr.Progress] = None, | |
| ) -> tuple[SessionState, str, str, list[tuple], dict, str, Optional[str], Optional[str], list[dict]]: | |
| """Generate the assessment using the FDAM pipeline. | |
| Returns: | |
| Tuple of (session, status, progress_html, annotated_images, | |
| stats, sow_markdown, md_file_path, pdf_file_path, chat_history). | |
| """ | |
| # Lazy import to avoid chromadb dependency at module load | |
| from pipeline import FDAMPipeline, PipelineResult, PDFGenerator | |
| # Create pipeline instance | |
| pipeline = FDAMPipeline() | |
| # Define progress callback for Gradio | |
| def progress_callback(prog): | |
| if progress: | |
| progress(prog.percent, desc=prog.message) | |
| # Execute pipeline | |
| result: PipelineResult = pipeline.execute( | |
| session=session, | |
| progress_callback=progress_callback, | |
| ) | |
| # Handle errors | |
| if not result.success: | |
| error_msg = "**Error:** Please fix the following before generating:\n\n" | |
| error_msg += "\n".join(f"- {e}" for e in result.errors) | |
| return ( | |
| result.session, | |
| "Error: Cannot generate", | |
| "", | |
| [], | |
| {}, | |
| error_msg, | |
| None, | |
| None, | |
| [], # Clear chat on error | |
| ) | |
| # Generate stats dictionary for UI | |
| stats = pipeline.generate_stats_dict(result) | |
| # Get markdown content | |
| sow_markdown = result.document.markdown if result.document else "" | |
| # Store document in session for chat modifications | |
| session.generated_document = sow_markdown | |
| session.original_document = sow_markdown | |
| # Store serializable subset of PipelineResult for chat context | |
| session.pipeline_result_json = _serialize_pipeline_result(result) | |
| # Clear chat history on new generation | |
| session.chat_history = [] | |
| # Save markdown file | |
| md_path = None | |
| pdf_path = None | |
| try: | |
| if sow_markdown: | |
| room_name_safe = session.room.name.replace(' ', '_') if session.room.name else "Room" | |
| with tempfile.NamedTemporaryFile( | |
| mode='w', | |
| suffix='.md', | |
| delete=False, | |
| prefix=f"SOW_{room_name_safe}_", | |
| ) as f: | |
| f.write(sow_markdown) | |
| md_path = f.name | |
| # Generate PDF | |
| pdf_generator = PDFGenerator() | |
| pdf_result = pdf_generator.generate_pdf(sow_markdown) | |
| if pdf_result.success: | |
| pdf_path = pdf_result.pdf_path | |
| else: | |
| result.warnings.append(f"PDF generation failed: {pdf_result.error_message}") | |
| except Exception as e: | |
| print(f"Error saving files: {e}") | |
| # Add warnings to status if any | |
| status = "Complete" | |
| if result.warnings: | |
| status = f"Complete ({len(result.warnings)} warnings)" | |
| session.has_results = True | |
| session.results_generated_at = datetime.now().isoformat() | |
| session.update_timestamp() | |
| return ( | |
| session, | |
| status, | |
| create_progress_html(6, 6, f"Complete! ({result.execution_time_seconds:.1f}s)"), | |
| result.annotated_images, | |
| stats, | |
| sow_markdown, | |
| md_path, | |
| pdf_path, | |
| [], # Reset chat history | |
| ) | |
| def _serialize_pipeline_result(result: "PipelineResult") -> str: | |
| """Serialize PipelineResult to JSON, excluding non-serializable fields. | |
| Excludes: | |
| - annotated_images (contains PIL.Image objects) | |
| - session (complex SessionState object) | |
| - document (GeneratedDocument object) | |
| """ | |
| # Convert VisionResult dataclasses to dicts | |
| vision_results_dict = {} | |
| for img_id, vr in result.vision_results.items(): | |
| vision_results_dict[img_id] = { | |
| "zone": vr.zone, | |
| "condition": vr.condition, | |
| "materials": vr.materials, | |
| "bounding_boxes": vr.bounding_boxes, | |
| } | |
| # Convert SurfaceDisposition dataclasses to dicts | |
| dispositions_list = [] | |
| for disp in result.dispositions: | |
| dispositions_list.append({ | |
| "room_name": disp.room_name, | |
| "surface_type": disp.surface_type, | |
| "zone": disp.zone, | |
| "condition": disp.condition, | |
| "disposition": disp.disposition, | |
| "cleaning_method": disp.cleaning_method, | |
| "notes": disp.notes, | |
| }) | |
| serializable = { | |
| "success": result.success, | |
| "errors": result.errors, | |
| "warnings": result.warnings, | |
| "execution_time_seconds": result.execution_time_seconds, | |
| "vision_results": vision_results_dict, | |
| "dispositions": dispositions_list, | |
| "calculations": result.calculations, | |
| } | |
| return json.dumps(serializable, default=str) | |
| def reset_document(session: SessionState) -> tuple[SessionState, str]: | |
| """Reset document to original generated version.""" | |
| if session.original_document: | |
| session.generated_document = session.original_document | |
| session.update_timestamp() | |
| return session, session.original_document | |
| return session, session.generated_document or "" | |
| def regenerate_downloads( | |
| session: SessionState, | |
| ) -> tuple[Optional[str], Optional[str]]: | |
| """Regenerate download files from current document. | |
| Used after chat modifications to update downloads. | |
| """ | |
| sow_markdown = session.generated_document | |
| if not sow_markdown: | |
| return None, None | |
| md_path = None | |
| pdf_path = None | |
| try: | |
| room_name_safe = session.room.name.replace(' ', '_') if session.room.name else "Room" | |
| with tempfile.NamedTemporaryFile( | |
| mode='w', | |
| suffix='.md', | |
| delete=False, | |
| prefix=f"SOW_{room_name_safe}_", | |
| ) as f: | |
| f.write(sow_markdown) | |
| md_path = f.name | |
| # Lazy import PDFGenerator | |
| from pipeline import PDFGenerator | |
| pdf_generator = PDFGenerator() | |
| pdf_result = pdf_generator.generate_pdf(sow_markdown) | |
| if pdf_result.success: | |
| pdf_path = pdf_result.pdf_path | |
| except Exception as e: | |
| print(f"Error regenerating files: {e}") | |
| return md_path, pdf_path | |