File size: 14,161 Bytes
fd357f4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
#!/usr/bin/env python3
"""
DTO Confluence Automation - Event-driven report generation
Listens to NATS events and automatically creates post-run reports in Confluence
"""

import asyncio
import json
from typing import Dict, Any, Optional, List
from datetime import datetime
from nats.aio.client import Client as NATS
from confluence_client import DTOConfluenceClient

class DTOConfluenceAutomation:
    def __init__(self, nats_servers: list = ["nats://localhost:4222"]):
        self.nats_servers = nats_servers
        self.nats_client = NATS()
        self.confluence_client = DTOConfluenceClient()
        self.run_cache = {}  # Cache run data for report generation
        self.report_cache = {}  # Cache report page IDs for updates
        
    async def connect(self) -> bool:
        """Connect to NATS and Confluence"""
        try:
            # Connect to NATS
            await self.nats_client.connect(servers=self.nats_servers)
            print("βœ… Connected to NATS for Confluence automation")
            
            # Test Confluence connection
            if not self.confluence_client.test_connection():
                print("⚠️ Confluence not available - running in monitoring mode only")
            
            return True
            
        except Exception as e:
            print(f"❌ Confluence automation connection failed: {e}")
            return False
    
    async def handle_run_planned(self, event_data: Dict[str, Any]):
        """Handle RUN_PLANNED event - cache run data for report"""
        run_id = event_data.get('run_id')
        
        # Cache run data for later report generation
        self.run_cache[run_id] = {
            'run_id': run_id,
            'job_id': event_data.get('job_id'),
            'data_class': event_data.get('data_class'),
            'environment': event_data.get('environment'),
            'manifest_path': event_data.get('manifest_path'),
            'initiated_by': event_data.get('initiated_by'),
            'start_time': event_data.get('timestamp'),
            'data_size_bytes': event_data.get('data_size_bytes'),
            'estimated_duration': event_data.get('estimated_duration')
        }
        
        print(f"πŸ“Š Cached run data for report generation: {run_id}")
    
    async def handle_transfer_started(self, event_data: Dict[str, Any]):
        """Handle TRANSFER_STARTED event - update cached run data"""
        run_id = event_data.get('run_id')
        
        if run_id in self.run_cache:
            self.run_cache[run_id].update({
                'source_host': event_data.get('source_host'),
                'target_host': event_data.get('target_host'),
                'transfer_method': event_data.get('transfer_method'),
                'transfer_start_time': event_data.get('timestamp')
            })
            
            print(f"πŸ“Š Updated transfer details for report: {run_id}")
    
    async def handle_validation_passed(self, event_data: Dict[str, Any]):
        """Handle VALIDATION_PASSED event - cache validation results"""
        run_id = event_data.get('run_id')
        
        if run_id in self.run_cache:
            self.run_cache[run_id]['validation_results'] = event_data.get('validation_results', {})
            print(f"πŸ“Š Cached validation results for report: {run_id}")
    
    async def handle_slo_breach(self, event_data: Dict[str, Any]):
        """Handle SLO_BREACH event - add to run issues"""
        run_id = event_data.get('run_id')
        
        if run_id in self.run_cache:
            if 'issues' not in self.run_cache[run_id]:
                self.run_cache[run_id]['issues'] = []
            
            issue = {
                'type': 'SLO_BREACH',
                'sli_name': event_data.get('sli_name'),
                'expected_value': event_data.get('expected_value'),
                'actual_value': event_data.get('actual_value'),
                'breach_duration': event_data.get('breach_duration_seconds'),
                'timestamp': event_data.get('timestamp')
            }
            
            self.run_cache[run_id]['issues'].append(issue)
            print(f"⚠️ Added SLO breach to report data: {run_id}")
    
    async def handle_run_completed(self, event_data: Dict[str, Any]):
        """Handle RUN_COMPLETED event - generate final report"""
        run_id = event_data.get('run_id')
        final_status = event_data.get('final_status')
        
        if run_id not in self.run_cache:
            print(f"⚠️ No cached data for run: {run_id}")
            return
        
        print(f"πŸ“„ Generating Confluence report for completed run: {run_id}")
        
        try:
            # Prepare comprehensive run data
            run_data = self.run_cache[run_id].copy()
            run_data.update({
                'final_status': final_status,
                'end_time': event_data.get('timestamp'),
                'total_duration_seconds': event_data.get('total_duration_seconds'),
                'error_message': event_data.get('error_message')
            })
            
            # Prepare metrics for report
            metrics = {
                'average_throughput_mbps': event_data.get('average_throughput_mbps', 0),
                'total_duration_seconds': event_data.get('total_duration_seconds', 0),
                'data_size_gb': run_data.get('data_size_bytes', 0) / (1024**3),
                'transfer_method': run_data.get('transfer_method', 'unknown'),
                'source_host': run_data.get('source_host', 'unknown'),
                'target_host': run_data.get('target_host', 'unknown'),
                'validation_results': run_data.get('validation_results', {}),
                'artifacts': event_data.get('artifacts', []),
                'errors': self.extract_errors_from_run(run_data),
                'issues': run_data.get('issues', [])
            }
            
            # Generate report
            report_url = self.confluence_client.create_run_report(run_data, metrics)
            
            if report_url:
                # Extract page ID from URL for future updates
                page_id = self.extract_page_id_from_url(report_url)
                if page_id:
                    self.report_cache[run_id] = page_id
                
                print(f"βœ… Generated Confluence report: {report_url}")
                
                # Publish report completion event
                report_event = {
                    'event_id': f"confluence-report-{run_id}",
                    'event_type': 'CONFLUENCE_REPORT_GENERATED',
                    'timestamp': datetime.now().isoformat(),
                    'run_id': run_id,
                    'report_url': report_url,
                    'page_id': page_id
                }
                
                await self.nats_client.publish(
                    "dto.events.confluence.report_generated",
                    json.dumps(report_event).encode()
                )
                
                # Attach log files if available
                await self.attach_artifacts_to_report(page_id, metrics.get('artifacts', []))
                
            else:
                print(f"❌ Failed to generate report for run: {run_id}")
                
        except Exception as e:
            print(f"❌ Error generating report: {e}")
        finally:
            # Clean up cache
            if run_id in self.run_cache:
                del self.run_cache[run_id]
    
    async def handle_lineage_completed(self, event_data: Dict[str, Any]):
        """Handle LINEAGE_COMPLETED event - update report with lineage info"""
        run_id = event_data.get('run_id')
        page_id = self.report_cache.get(run_id)
        
        if not page_id:
            print(f"ℹ️ No report page found for lineage update: {run_id}")
            return
        
        try:
            # Update report with lineage information
            lineage_metrics = {
                'lineage_tracking': 'Completed',
                'integrity_validation': 'Passed',
                'lineage_timestamp': event_data.get('timestamp')
            }
            
            # Add integrity report if available
            integrity_report = event_data.get('integrity_report', {})
            if integrity_report:
                lineage_metrics.update({
                    'datasets_tracked': integrity_report.get('vertex_counts', {}).get('datasets', 0),
                    'lineage_edges': integrity_report.get('edge_counts', {}).get('derived_from', 0)
                })
            
            success = self.confluence_client.update_run_report(page_id, lineage_metrics)
            
            if success:
                print(f"βœ… Updated report with lineage info: {run_id}")
            else:
                print(f"❌ Failed to update report with lineage: {run_id}")
                
        except Exception as e:
            print(f"❌ Error updating report with lineage: {e}")
    
    def extract_errors_from_run(self, run_data: Dict[str, Any]) -> List[str]:
        """Extract errors and issues from run data"""
        errors = []
        
        # Add errors from issues
        for issue in run_data.get('issues', []):
            if issue['type'] == 'SLO_BREACH':
                errors.append(f"SLO breach: {issue['sli_name']} - expected {issue['expected_value']}, got {issue['actual_value']}")
        
        # Add final error message if available
        if run_data.get('error_message'):
            errors.append(run_data['error_message'])
        
        return errors
    
    def extract_page_id_from_url(self, url: str) -> Optional[str]:
        """Extract page ID from Confluence URL"""
        try:
            # URL format: https://domain.atlassian.net/wiki/spaces/SPACE/pages/123456/Page+Title
            if '/pages/' in url:
                parts = url.split('/pages/')
                if len(parts) > 1:
                    page_part = parts[1].split('/')[0]
                    return page_part
        except Exception:
            pass
        return None
    
    async def attach_artifacts_to_report(self, page_id: str, artifacts: List[str]):
        """Attach artifacts to Confluence report"""
        if not page_id or not artifacts:
            return
        
        try:
            for artifact_path in artifacts:
                # Only attach small files (< 10MB) to avoid Confluence limits
                try:
                    from pathlib import Path
                    artifact_file = Path(artifact_path)
                    
                    if artifact_file.exists() and artifact_file.stat().st_size < 10 * 1024 * 1024:
                        self.confluence_client.attach_file_to_page(
                            page_id, 
                            str(artifact_file),
                            f"DTO transfer artifact: {artifact_file.name}"
                        )
                except Exception as e:
                    print(f"⚠️ Could not attach artifact {artifact_path}: {e}")
                    
        except Exception as e:
            print(f"❌ Error attaching artifacts: {e}")
    
    async def generate_summary_report(self):
        """Generate weekly/monthly summary reports"""
        try:
            # This would be called periodically to generate summary reports
            # Implementation would aggregate metrics across multiple runs
            print("πŸ“Š Generating summary report...")
            
            # Get recent completed runs
            # Aggregate performance metrics
            # Create summary page
            
            print("βœ… Summary report generation completed")
            
        except Exception as e:
            print(f"❌ Error generating summary report: {e}")
    
    async def process_event(self, msg):
        """Process incoming NATS event for Confluence automation"""
        try:
            event_data = json.loads(msg.data.decode())
            event_type = event_data.get('event_type')
            
            # Route to appropriate handler
            handlers = {
                'RUN_PLANNED': self.handle_run_planned,
                'TRANSFER_STARTED': self.handle_transfer_started,
                'VALIDATION_PASSED': self.handle_validation_passed,
                'SLO_BREACH': self.handle_slo_breach,
                'RUN_COMPLETED': self.handle_run_completed,
                'LINEAGE_COMPLETED': self.handle_lineage_completed
            }
            
            handler = handlers.get(event_type)
            if handler:
                await handler(event_data)
            else:
                # Log other events for debugging
                print(f"ℹ️ Confluence automation ignoring event: {event_type}")
                
        except Exception as e:
            print(f"❌ Error processing Confluence event: {e}")
    
    async def start_automation(self):
        """Start listening for DTO events"""
        try:
            # Subscribe to DTO events
            await self.nats_client.subscribe("dto.events.>", cb=self.process_event)
            print("βœ… Started Confluence automation - listening for DTO events")
            
            # Keep running
            while True:
                await asyncio.sleep(1)
                
        except KeyboardInterrupt:
            print("\nπŸ›‘ Stopping Confluence automation...")
        except Exception as e:
            print(f"❌ Confluence automation error: {e}")
        finally:
            await self.nats_client.close()

# CLI entry point
async def main():
    automation = DTOConfluenceAutomation()
    
    if await automation.connect():
        print("πŸ“„ DTO Confluence Automation started")
        print("Monitoring events: RUN_PLANNED, TRANSFER_STARTED, VALIDATION_PASSED, SLO_BREACH, RUN_COMPLETED")
        print("Generating automated post-run reports in Confluence")
        print("Press Ctrl+C to stop\n")
        
        await automation.start_automation()
    else:
        print("❌ Failed to start Confluence automation")

if __name__ == "__main__":
    asyncio.run(main())