| |
| """ |
| DTO Confluence Automation - Event-driven report generation |
| Listens to NATS events and automatically creates post-run reports in Confluence |
| """ |
|
|
| import asyncio |
| import json |
| from typing import Dict, Any, Optional, List |
| from datetime import datetime |
| from nats.aio.client import Client as NATS |
| from confluence_client import DTOConfluenceClient |
|
|
| class DTOConfluenceAutomation: |
| def __init__(self, nats_servers: list = ["nats://localhost:4222"]): |
| self.nats_servers = nats_servers |
| self.nats_client = NATS() |
| self.confluence_client = DTOConfluenceClient() |
| self.run_cache = {} |
| self.report_cache = {} |
| |
| async def connect(self) -> bool: |
| """Connect to NATS and Confluence""" |
| try: |
| |
| await self.nats_client.connect(servers=self.nats_servers) |
| print("β
Connected to NATS for Confluence automation") |
| |
| |
| if not self.confluence_client.test_connection(): |
| print("β οΈ Confluence not available - running in monitoring mode only") |
| |
| return True |
| |
| except Exception as e: |
| print(f"β Confluence automation connection failed: {e}") |
| return False |
| |
| async def handle_run_planned(self, event_data: Dict[str, Any]): |
| """Handle RUN_PLANNED event - cache run data for report""" |
| run_id = event_data.get('run_id') |
| |
| |
| self.run_cache[run_id] = { |
| 'run_id': run_id, |
| 'job_id': event_data.get('job_id'), |
| 'data_class': event_data.get('data_class'), |
| 'environment': event_data.get('environment'), |
| 'manifest_path': event_data.get('manifest_path'), |
| 'initiated_by': event_data.get('initiated_by'), |
| 'start_time': event_data.get('timestamp'), |
| 'data_size_bytes': event_data.get('data_size_bytes'), |
| 'estimated_duration': event_data.get('estimated_duration') |
| } |
| |
| print(f"π Cached run data for report generation: {run_id}") |
| |
| async def handle_transfer_started(self, event_data: Dict[str, Any]): |
| """Handle TRANSFER_STARTED event - update cached run data""" |
| run_id = event_data.get('run_id') |
| |
| if run_id in self.run_cache: |
| self.run_cache[run_id].update({ |
| 'source_host': event_data.get('source_host'), |
| 'target_host': event_data.get('target_host'), |
| 'transfer_method': event_data.get('transfer_method'), |
| 'transfer_start_time': event_data.get('timestamp') |
| }) |
| |
| print(f"π Updated transfer details for report: {run_id}") |
| |
| async def handle_validation_passed(self, event_data: Dict[str, Any]): |
| """Handle VALIDATION_PASSED event - cache validation results""" |
| run_id = event_data.get('run_id') |
| |
| if run_id in self.run_cache: |
| self.run_cache[run_id]['validation_results'] = event_data.get('validation_results', {}) |
| print(f"π Cached validation results for report: {run_id}") |
| |
| async def handle_slo_breach(self, event_data: Dict[str, Any]): |
| """Handle SLO_BREACH event - add to run issues""" |
| run_id = event_data.get('run_id') |
| |
| if run_id in self.run_cache: |
| if 'issues' not in self.run_cache[run_id]: |
| self.run_cache[run_id]['issues'] = [] |
| |
| issue = { |
| 'type': 'SLO_BREACH', |
| 'sli_name': event_data.get('sli_name'), |
| 'expected_value': event_data.get('expected_value'), |
| 'actual_value': event_data.get('actual_value'), |
| 'breach_duration': event_data.get('breach_duration_seconds'), |
| 'timestamp': event_data.get('timestamp') |
| } |
| |
| self.run_cache[run_id]['issues'].append(issue) |
| print(f"β οΈ Added SLO breach to report data: {run_id}") |
| |
| async def handle_run_completed(self, event_data: Dict[str, Any]): |
| """Handle RUN_COMPLETED event - generate final report""" |
| run_id = event_data.get('run_id') |
| final_status = event_data.get('final_status') |
| |
| if run_id not in self.run_cache: |
| print(f"β οΈ No cached data for run: {run_id}") |
| return |
| |
| print(f"π Generating Confluence report for completed run: {run_id}") |
| |
| try: |
| |
| run_data = self.run_cache[run_id].copy() |
| run_data.update({ |
| 'final_status': final_status, |
| 'end_time': event_data.get('timestamp'), |
| 'total_duration_seconds': event_data.get('total_duration_seconds'), |
| 'error_message': event_data.get('error_message') |
| }) |
| |
| |
| metrics = { |
| 'average_throughput_mbps': event_data.get('average_throughput_mbps', 0), |
| 'total_duration_seconds': event_data.get('total_duration_seconds', 0), |
| 'data_size_gb': run_data.get('data_size_bytes', 0) / (1024**3), |
| 'transfer_method': run_data.get('transfer_method', 'unknown'), |
| 'source_host': run_data.get('source_host', 'unknown'), |
| 'target_host': run_data.get('target_host', 'unknown'), |
| 'validation_results': run_data.get('validation_results', {}), |
| 'artifacts': event_data.get('artifacts', []), |
| 'errors': self.extract_errors_from_run(run_data), |
| 'issues': run_data.get('issues', []) |
| } |
| |
| |
| report_url = self.confluence_client.create_run_report(run_data, metrics) |
| |
| if report_url: |
| |
| page_id = self.extract_page_id_from_url(report_url) |
| if page_id: |
| self.report_cache[run_id] = page_id |
| |
| print(f"β
Generated Confluence report: {report_url}") |
| |
| |
| report_event = { |
| 'event_id': f"confluence-report-{run_id}", |
| 'event_type': 'CONFLUENCE_REPORT_GENERATED', |
| 'timestamp': datetime.now().isoformat(), |
| 'run_id': run_id, |
| 'report_url': report_url, |
| 'page_id': page_id |
| } |
| |
| await self.nats_client.publish( |
| "dto.events.confluence.report_generated", |
| json.dumps(report_event).encode() |
| ) |
| |
| |
| await self.attach_artifacts_to_report(page_id, metrics.get('artifacts', [])) |
| |
| else: |
| print(f"β Failed to generate report for run: {run_id}") |
| |
| except Exception as e: |
| print(f"β Error generating report: {e}") |
| finally: |
| |
| if run_id in self.run_cache: |
| del self.run_cache[run_id] |
| |
| async def handle_lineage_completed(self, event_data: Dict[str, Any]): |
| """Handle LINEAGE_COMPLETED event - update report with lineage info""" |
| run_id = event_data.get('run_id') |
| page_id = self.report_cache.get(run_id) |
| |
| if not page_id: |
| print(f"βΉοΈ No report page found for lineage update: {run_id}") |
| return |
| |
| try: |
| |
| lineage_metrics = { |
| 'lineage_tracking': 'Completed', |
| 'integrity_validation': 'Passed', |
| 'lineage_timestamp': event_data.get('timestamp') |
| } |
| |
| |
| integrity_report = event_data.get('integrity_report', {}) |
| if integrity_report: |
| lineage_metrics.update({ |
| 'datasets_tracked': integrity_report.get('vertex_counts', {}).get('datasets', 0), |
| 'lineage_edges': integrity_report.get('edge_counts', {}).get('derived_from', 0) |
| }) |
| |
| success = self.confluence_client.update_run_report(page_id, lineage_metrics) |
| |
| if success: |
| print(f"β
Updated report with lineage info: {run_id}") |
| else: |
| print(f"β Failed to update report with lineage: {run_id}") |
| |
| except Exception as e: |
| print(f"β Error updating report with lineage: {e}") |
| |
| def extract_errors_from_run(self, run_data: Dict[str, Any]) -> List[str]: |
| """Extract errors and issues from run data""" |
| errors = [] |
| |
| |
| for issue in run_data.get('issues', []): |
| if issue['type'] == 'SLO_BREACH': |
| errors.append(f"SLO breach: {issue['sli_name']} - expected {issue['expected_value']}, got {issue['actual_value']}") |
| |
| |
| if run_data.get('error_message'): |
| errors.append(run_data['error_message']) |
| |
| return errors |
| |
| def extract_page_id_from_url(self, url: str) -> Optional[str]: |
| """Extract page ID from Confluence URL""" |
| try: |
| |
| if '/pages/' in url: |
| parts = url.split('/pages/') |
| if len(parts) > 1: |
| page_part = parts[1].split('/')[0] |
| return page_part |
| except Exception: |
| pass |
| return None |
| |
| async def attach_artifacts_to_report(self, page_id: str, artifacts: List[str]): |
| """Attach artifacts to Confluence report""" |
| if not page_id or not artifacts: |
| return |
| |
| try: |
| for artifact_path in artifacts: |
| |
| try: |
| from pathlib import Path |
| artifact_file = Path(artifact_path) |
| |
| if artifact_file.exists() and artifact_file.stat().st_size < 10 * 1024 * 1024: |
| self.confluence_client.attach_file_to_page( |
| page_id, |
| str(artifact_file), |
| f"DTO transfer artifact: {artifact_file.name}" |
| ) |
| except Exception as e: |
| print(f"β οΈ Could not attach artifact {artifact_path}: {e}") |
| |
| except Exception as e: |
| print(f"β Error attaching artifacts: {e}") |
| |
| async def generate_summary_report(self): |
| """Generate weekly/monthly summary reports""" |
| try: |
| |
| |
| print("π Generating summary report...") |
| |
| |
| |
| |
| |
| print("β
Summary report generation completed") |
| |
| except Exception as e: |
| print(f"β Error generating summary report: {e}") |
| |
| async def process_event(self, msg): |
| """Process incoming NATS event for Confluence automation""" |
| try: |
| event_data = json.loads(msg.data.decode()) |
| event_type = event_data.get('event_type') |
| |
| |
| handlers = { |
| 'RUN_PLANNED': self.handle_run_planned, |
| 'TRANSFER_STARTED': self.handle_transfer_started, |
| 'VALIDATION_PASSED': self.handle_validation_passed, |
| 'SLO_BREACH': self.handle_slo_breach, |
| 'RUN_COMPLETED': self.handle_run_completed, |
| 'LINEAGE_COMPLETED': self.handle_lineage_completed |
| } |
| |
| handler = handlers.get(event_type) |
| if handler: |
| await handler(event_data) |
| else: |
| |
| print(f"βΉοΈ Confluence automation ignoring event: {event_type}") |
| |
| except Exception as e: |
| print(f"β Error processing Confluence event: {e}") |
| |
| async def start_automation(self): |
| """Start listening for DTO events""" |
| try: |
| |
| await self.nats_client.subscribe("dto.events.>", cb=self.process_event) |
| print("β
Started Confluence automation - listening for DTO events") |
| |
| |
| while True: |
| await asyncio.sleep(1) |
| |
| except KeyboardInterrupt: |
| print("\nπ Stopping Confluence automation...") |
| except Exception as e: |
| print(f"β Confluence automation error: {e}") |
| finally: |
| await self.nats_client.close() |
|
|
| |
| async def main(): |
| automation = DTOConfluenceAutomation() |
| |
| if await automation.connect(): |
| print("π DTO Confluence Automation started") |
| print("Monitoring events: RUN_PLANNED, TRANSFER_STARTED, VALIDATION_PASSED, SLO_BREACH, RUN_COMPLETED") |
| print("Generating automated post-run reports in Confluence") |
| print("Press Ctrl+C to stop\n") |
| |
| await automation.start_automation() |
| else: |
| print("β Failed to start Confluence automation") |
|
|
| if __name__ == "__main__": |
| asyncio.run(main()) |