adaptai / platform /dataops /dto /integrations /confluence_automation.py
ADAPT-Chase's picture
Add files using upload-large-folder tool
fd357f4 verified
#!/usr/bin/env python3
"""
DTO Confluence Automation - Event-driven report generation
Listens to NATS events and automatically creates post-run reports in Confluence
"""
import asyncio
import json
from typing import Dict, Any, Optional, List
from datetime import datetime
from nats.aio.client import Client as NATS
from confluence_client import DTOConfluenceClient
class DTOConfluenceAutomation:
def __init__(self, nats_servers: list = ["nats://localhost:4222"]):
self.nats_servers = nats_servers
self.nats_client = NATS()
self.confluence_client = DTOConfluenceClient()
self.run_cache = {} # Cache run data for report generation
self.report_cache = {} # Cache report page IDs for updates
async def connect(self) -> bool:
"""Connect to NATS and Confluence"""
try:
# Connect to NATS
await self.nats_client.connect(servers=self.nats_servers)
print("βœ… Connected to NATS for Confluence automation")
# Test Confluence connection
if not self.confluence_client.test_connection():
print("⚠️ Confluence not available - running in monitoring mode only")
return True
except Exception as e:
print(f"❌ Confluence automation connection failed: {e}")
return False
async def handle_run_planned(self, event_data: Dict[str, Any]):
"""Handle RUN_PLANNED event - cache run data for report"""
run_id = event_data.get('run_id')
# Cache run data for later report generation
self.run_cache[run_id] = {
'run_id': run_id,
'job_id': event_data.get('job_id'),
'data_class': event_data.get('data_class'),
'environment': event_data.get('environment'),
'manifest_path': event_data.get('manifest_path'),
'initiated_by': event_data.get('initiated_by'),
'start_time': event_data.get('timestamp'),
'data_size_bytes': event_data.get('data_size_bytes'),
'estimated_duration': event_data.get('estimated_duration')
}
print(f"πŸ“Š Cached run data for report generation: {run_id}")
async def handle_transfer_started(self, event_data: Dict[str, Any]):
"""Handle TRANSFER_STARTED event - update cached run data"""
run_id = event_data.get('run_id')
if run_id in self.run_cache:
self.run_cache[run_id].update({
'source_host': event_data.get('source_host'),
'target_host': event_data.get('target_host'),
'transfer_method': event_data.get('transfer_method'),
'transfer_start_time': event_data.get('timestamp')
})
print(f"πŸ“Š Updated transfer details for report: {run_id}")
async def handle_validation_passed(self, event_data: Dict[str, Any]):
"""Handle VALIDATION_PASSED event - cache validation results"""
run_id = event_data.get('run_id')
if run_id in self.run_cache:
self.run_cache[run_id]['validation_results'] = event_data.get('validation_results', {})
print(f"πŸ“Š Cached validation results for report: {run_id}")
async def handle_slo_breach(self, event_data: Dict[str, Any]):
"""Handle SLO_BREACH event - add to run issues"""
run_id = event_data.get('run_id')
if run_id in self.run_cache:
if 'issues' not in self.run_cache[run_id]:
self.run_cache[run_id]['issues'] = []
issue = {
'type': 'SLO_BREACH',
'sli_name': event_data.get('sli_name'),
'expected_value': event_data.get('expected_value'),
'actual_value': event_data.get('actual_value'),
'breach_duration': event_data.get('breach_duration_seconds'),
'timestamp': event_data.get('timestamp')
}
self.run_cache[run_id]['issues'].append(issue)
print(f"⚠️ Added SLO breach to report data: {run_id}")
async def handle_run_completed(self, event_data: Dict[str, Any]):
"""Handle RUN_COMPLETED event - generate final report"""
run_id = event_data.get('run_id')
final_status = event_data.get('final_status')
if run_id not in self.run_cache:
print(f"⚠️ No cached data for run: {run_id}")
return
print(f"πŸ“„ Generating Confluence report for completed run: {run_id}")
try:
# Prepare comprehensive run data
run_data = self.run_cache[run_id].copy()
run_data.update({
'final_status': final_status,
'end_time': event_data.get('timestamp'),
'total_duration_seconds': event_data.get('total_duration_seconds'),
'error_message': event_data.get('error_message')
})
# Prepare metrics for report
metrics = {
'average_throughput_mbps': event_data.get('average_throughput_mbps', 0),
'total_duration_seconds': event_data.get('total_duration_seconds', 0),
'data_size_gb': run_data.get('data_size_bytes', 0) / (1024**3),
'transfer_method': run_data.get('transfer_method', 'unknown'),
'source_host': run_data.get('source_host', 'unknown'),
'target_host': run_data.get('target_host', 'unknown'),
'validation_results': run_data.get('validation_results', {}),
'artifacts': event_data.get('artifacts', []),
'errors': self.extract_errors_from_run(run_data),
'issues': run_data.get('issues', [])
}
# Generate report
report_url = self.confluence_client.create_run_report(run_data, metrics)
if report_url:
# Extract page ID from URL for future updates
page_id = self.extract_page_id_from_url(report_url)
if page_id:
self.report_cache[run_id] = page_id
print(f"βœ… Generated Confluence report: {report_url}")
# Publish report completion event
report_event = {
'event_id': f"confluence-report-{run_id}",
'event_type': 'CONFLUENCE_REPORT_GENERATED',
'timestamp': datetime.now().isoformat(),
'run_id': run_id,
'report_url': report_url,
'page_id': page_id
}
await self.nats_client.publish(
"dto.events.confluence.report_generated",
json.dumps(report_event).encode()
)
# Attach log files if available
await self.attach_artifacts_to_report(page_id, metrics.get('artifacts', []))
else:
print(f"❌ Failed to generate report for run: {run_id}")
except Exception as e:
print(f"❌ Error generating report: {e}")
finally:
# Clean up cache
if run_id in self.run_cache:
del self.run_cache[run_id]
async def handle_lineage_completed(self, event_data: Dict[str, Any]):
"""Handle LINEAGE_COMPLETED event - update report with lineage info"""
run_id = event_data.get('run_id')
page_id = self.report_cache.get(run_id)
if not page_id:
print(f"ℹ️ No report page found for lineage update: {run_id}")
return
try:
# Update report with lineage information
lineage_metrics = {
'lineage_tracking': 'Completed',
'integrity_validation': 'Passed',
'lineage_timestamp': event_data.get('timestamp')
}
# Add integrity report if available
integrity_report = event_data.get('integrity_report', {})
if integrity_report:
lineage_metrics.update({
'datasets_tracked': integrity_report.get('vertex_counts', {}).get('datasets', 0),
'lineage_edges': integrity_report.get('edge_counts', {}).get('derived_from', 0)
})
success = self.confluence_client.update_run_report(page_id, lineage_metrics)
if success:
print(f"βœ… Updated report with lineage info: {run_id}")
else:
print(f"❌ Failed to update report with lineage: {run_id}")
except Exception as e:
print(f"❌ Error updating report with lineage: {e}")
def extract_errors_from_run(self, run_data: Dict[str, Any]) -> List[str]:
"""Extract errors and issues from run data"""
errors = []
# Add errors from issues
for issue in run_data.get('issues', []):
if issue['type'] == 'SLO_BREACH':
errors.append(f"SLO breach: {issue['sli_name']} - expected {issue['expected_value']}, got {issue['actual_value']}")
# Add final error message if available
if run_data.get('error_message'):
errors.append(run_data['error_message'])
return errors
def extract_page_id_from_url(self, url: str) -> Optional[str]:
"""Extract page ID from Confluence URL"""
try:
# URL format: https://domain.atlassian.net/wiki/spaces/SPACE/pages/123456/Page+Title
if '/pages/' in url:
parts = url.split('/pages/')
if len(parts) > 1:
page_part = parts[1].split('/')[0]
return page_part
except Exception:
pass
return None
async def attach_artifacts_to_report(self, page_id: str, artifacts: List[str]):
"""Attach artifacts to Confluence report"""
if not page_id or not artifacts:
return
try:
for artifact_path in artifacts:
# Only attach small files (< 10MB) to avoid Confluence limits
try:
from pathlib import Path
artifact_file = Path(artifact_path)
if artifact_file.exists() and artifact_file.stat().st_size < 10 * 1024 * 1024:
self.confluence_client.attach_file_to_page(
page_id,
str(artifact_file),
f"DTO transfer artifact: {artifact_file.name}"
)
except Exception as e:
print(f"⚠️ Could not attach artifact {artifact_path}: {e}")
except Exception as e:
print(f"❌ Error attaching artifacts: {e}")
async def generate_summary_report(self):
"""Generate weekly/monthly summary reports"""
try:
# This would be called periodically to generate summary reports
# Implementation would aggregate metrics across multiple runs
print("πŸ“Š Generating summary report...")
# Get recent completed runs
# Aggregate performance metrics
# Create summary page
print("βœ… Summary report generation completed")
except Exception as e:
print(f"❌ Error generating summary report: {e}")
async def process_event(self, msg):
"""Process incoming NATS event for Confluence automation"""
try:
event_data = json.loads(msg.data.decode())
event_type = event_data.get('event_type')
# Route to appropriate handler
handlers = {
'RUN_PLANNED': self.handle_run_planned,
'TRANSFER_STARTED': self.handle_transfer_started,
'VALIDATION_PASSED': self.handle_validation_passed,
'SLO_BREACH': self.handle_slo_breach,
'RUN_COMPLETED': self.handle_run_completed,
'LINEAGE_COMPLETED': self.handle_lineage_completed
}
handler = handlers.get(event_type)
if handler:
await handler(event_data)
else:
# Log other events for debugging
print(f"ℹ️ Confluence automation ignoring event: {event_type}")
except Exception as e:
print(f"❌ Error processing Confluence event: {e}")
async def start_automation(self):
"""Start listening for DTO events"""
try:
# Subscribe to DTO events
await self.nats_client.subscribe("dto.events.>", cb=self.process_event)
print("βœ… Started Confluence automation - listening for DTO events")
# Keep running
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
print("\nπŸ›‘ Stopping Confluence automation...")
except Exception as e:
print(f"❌ Confluence automation error: {e}")
finally:
await self.nats_client.close()
# CLI entry point
async def main():
automation = DTOConfluenceAutomation()
if await automation.connect():
print("πŸ“„ DTO Confluence Automation started")
print("Monitoring events: RUN_PLANNED, TRANSFER_STARTED, VALIDATION_PASSED, SLO_BREACH, RUN_COMPLETED")
print("Generating automated post-run reports in Confluence")
print("Press Ctrl+C to stop\n")
await automation.start_automation()
else:
print("❌ Failed to start Confluence automation")
if __name__ == "__main__":
asyncio.run(main())