adaptai / platform /dataops /dto /lineage /lineage_event_handler.py
ADAPT-Chase's picture
Add files using upload-large-folder tool
fd357f4 verified
#!/usr/bin/env python3
"""
DTO Lineage Event Handler - Processes NATS events to build data lineage
Listens for transfer events and automatically updates lineage graph
"""
import asyncio
import json
from typing import Dict, Any, Optional
from datetime import datetime
from nats.aio.client import Client as NATS
from dto_lineage_client import DTOLineageClient
class DTOLineageEventHandler:
def __init__(self, nats_servers: list = ["nats://localhost:4222"]):
self.nats_servers = nats_servers
self.nats_client = NATS()
self.lineage_client = DTOLineageClient()
self.run_cache = {} # Cache run vertices to avoid lookups
async def connect(self) -> bool:
"""Connect to NATS and JanusGraph"""
try:
# Connect to NATS
await self.nats_client.connect(servers=self.nats_servers)
print("βœ… Connected to NATS for lineage tracking")
# Connect to JanusGraph
if not self.lineage_client.connect():
print("⚠️ JanusGraph not available - lineage tracking disabled")
return False
return True
except Exception as e:
print(f"❌ Lineage handler connection failed: {e}")
return False
async def handle_run_planned(self, event_data: Dict[str, Any]):
"""Handle RUN_PLANNED event - create run vertex"""
run_id = event_data.get('run_id')
job_id = event_data.get('job_id')
manifest_path = event_data.get('manifest_path')
environment = event_data.get('environment')
initiated_by = event_data.get('initiated_by')
print(f"πŸ“Š Recording run lineage: {run_id}")
# Record run vertex
run_vertex_id = self.lineage_client.record_run(
run_id, job_id, manifest_path, environment, initiated_by
)
if run_vertex_id:
self.run_cache[run_id] = run_vertex_id
print(f"βœ… Run vertex created: {run_id} -> {run_vertex_id}")
else:
print(f"❌ Failed to create run vertex: {run_id}")
async def handle_manifest_loaded(self, event_data: Dict[str, Any]):
"""Handle MANIFEST_LOADED event - record source datasets"""
run_id = event_data.get('run_id')
manifest_data = event_data.get('manifest_data', {})
datasets = manifest_data.get('datasets', [])
environment = event_data.get('environment', 'unknown')
data_class = event_data.get('data_class', 'unknown')
print(f"πŸ“Š Recording source datasets for run: {run_id}")
for dataset in datasets:
dataset_path = dataset.get('path')
checksum = dataset.get('checksum')
size_bytes = dataset.get('size_bytes', 0)
if dataset_path and checksum:
# Record source dataset
dataset_vertex_id = self.lineage_client.record_dataset(
dataset_path, checksum, size_bytes, data_class, environment
)
if dataset_vertex_id:
print(f"βœ… Source dataset recorded: {dataset_path}")
# Link dataset to run as input
try:
# Note: This would need proper graph traversal to link
# For now, we're recording the relationship for future linking
print(f"πŸ”— Linked {dataset_path} as input to run {run_id}")
except Exception as e:
print(f"❌ Error linking dataset to run: {e}")
async def handle_transfer_started(self, event_data: Dict[str, Any]):
"""Handle TRANSFER_STARTED event - record hosts and transfer details"""
run_id = event_data.get('run_id')
source_host = event_data.get('source_host')
target_host = event_data.get('target_host')
transfer_method = event_data.get('transfer_method')
print(f"πŸ“Š Recording transfer hosts for run: {run_id}")
# Record hosts
if source_host:
source_host_id = self.lineage_client.record_host(source_host, "source")
if source_host_id:
print(f"βœ… Source host recorded: {source_host}")
if target_host:
target_host_id = self.lineage_client.record_host(target_host, "target")
if target_host_id:
print(f"βœ… Target host recorded: {target_host}")
# Store transfer details in run cache for later use
if run_id not in self.run_cache:
self.run_cache[run_id] = {}
self.run_cache[run_id].update({
'source_host': source_host,
'target_host': target_host,
'transfer_method': transfer_method
})
async def handle_validation_passed(self, event_data: Dict[str, Any]):
"""Handle VALIDATION_PASSED event - record target datasets"""
run_id = event_data.get('run_id')
validation_results = event_data.get('validation_results', {})
print(f"πŸ“Š Recording validated target datasets for run: {run_id}")
# Get run details from cache
run_details = self.run_cache.get(run_id, {})
for file_result in validation_results.get('files', []):
target_path = file_result.get('target_path')
checksum = file_result.get('checksum')
size_bytes = file_result.get('size_bytes', 0)
source_path = file_result.get('source_path')
if target_path and checksum and file_result.get('validation_passed'):
# Record target dataset
target_dataset_id = self.lineage_client.record_dataset(
target_path, checksum, size_bytes,
event_data.get('data_class', 'unknown'),
event_data.get('environment', 'unknown')
)
if target_dataset_id and source_path:
print(f"βœ… Target dataset recorded: {target_path}")
# Find source dataset
source_dataset_id = None
try:
# This would need proper implementation to find source dataset
# by path and checksum from the graph
print(f"πŸ” Looking up source dataset: {source_path}")
# For now, create lineage link with available information
if all([run_details.get('source_host'), run_details.get('target_host')]):
# Link transfer lineage
self.lineage_client.link_transfer_lineage(
source_dataset_id or "unknown",
target_dataset_id,
run_id,
run_details['source_host'],
run_details['target_host'],
run_details.get('transfer_method', 'unknown'),
event_data.get('average_throughput_mbps', 0.0)
)
print(f"πŸ”— Linked transfer lineage: {source_path} -> {target_path}")
except Exception as e:
print(f"❌ Error linking lineage: {e}")
async def handle_run_completed(self, event_data: Dict[str, Any]):
"""Handle RUN_COMPLETED event - finalize lineage and generate reports"""
run_id = event_data.get('run_id')
final_status = event_data.get('final_status')
print(f"πŸ“Š Finalizing lineage for completed run: {run_id}")
if final_status == 'SUCCESS':
# Generate lineage impact report
try:
# Get all datasets processed in this run
# This would need proper graph traversal implementation
print(f"πŸ“ˆ Generating lineage impact report for run: {run_id}")
# Validate lineage integrity
integrity_report = self.lineage_client.validate_lineage_integrity()
if integrity_report:
print(f"βœ… Lineage integrity validated for run: {run_id}")
# Publish lineage completion event
lineage_event = {
'event_id': f"lineage-completed-{run_id}",
'event_type': 'LINEAGE_COMPLETED',
'timestamp': datetime.now().isoformat(),
'run_id': run_id,
'integrity_report': integrity_report
}
await self.nats_client.publish(
"dto.events.lineage.completed",
json.dumps(lineage_event).encode()
)
except Exception as e:
print(f"❌ Error finalizing lineage: {e}")
# Clean up run cache
if run_id in self.run_cache:
del self.run_cache[run_id]
async def handle_data_corruption_detected(self, event_data: Dict[str, Any]):
"""Handle data corruption - trace impact through lineage"""
corrupted_dataset = event_data.get('dataset_path')
corruption_type = event_data.get('corruption_type')
print(f"🚨 Tracing corruption impact: {corrupted_dataset}")
try:
# Find all impacted datasets downstream
impacted_datasets = self.lineage_client.find_data_impact(corrupted_dataset)
if impacted_datasets:
print(f"⚠️ Corruption may impact {len(impacted_datasets)} datasets")
# Publish impact analysis event
impact_event = {
'event_id': f"corruption-impact-{datetime.now().timestamp()}",
'event_type': 'CORRUPTION_IMPACT_ANALYSIS',
'timestamp': datetime.now().isoformat(),
'corrupted_dataset': corrupted_dataset,
'corruption_type': corruption_type,
'impacted_datasets': impacted_datasets,
'impact_count': len(impacted_datasets)
}
await self.nats_client.publish(
"dto.events.lineage.corruption_impact",
json.dumps(impact_event).encode()
)
print(f"πŸ“Š Published corruption impact analysis")
except Exception as e:
print(f"❌ Error tracing corruption impact: {e}")
async def process_event(self, msg):
"""Process incoming NATS event for lineage tracking"""
try:
event_data = json.loads(msg.data.decode())
event_type = event_data.get('event_type')
# Route to appropriate handler
handlers = {
'RUN_PLANNED': self.handle_run_planned,
'MANIFEST_LOADED': self.handle_manifest_loaded,
'TRANSFER_STARTED': self.handle_transfer_started,
'VALIDATION_PASSED': self.handle_validation_passed,
'RUN_COMPLETED': self.handle_run_completed,
'DATA_CORRUPTION_DETECTED': self.handle_data_corruption_detected
}
handler = handlers.get(event_type)
if handler:
await handler(event_data)
else:
# Log other events for debugging
print(f"ℹ️ Lineage handler ignoring event: {event_type}")
except Exception as e:
print(f"❌ Error processing lineage event: {e}")
async def start_lineage_tracking(self):
"""Start listening for DTO events to build lineage"""
try:
# Subscribe to all DTO events
await self.nats_client.subscribe("dto.events.>", cb=self.process_event)
print("βœ… Started lineage tracking - listening for DTO events")
# Keep running
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
print("\nπŸ›‘ Stopping lineage tracking...")
except Exception as e:
print(f"❌ Lineage tracking error: {e}")
finally:
await self.nats_client.close()
self.lineage_client.close()
# CLI entry point
async def main():
handler = DTOLineageEventHandler()
if await handler.connect():
print("πŸ“Š DTO Lineage Tracking started")
print("Monitoring events: RUN_PLANNED, MANIFEST_LOADED, TRANSFER_STARTED, VALIDATION_PASSED, RUN_COMPLETED")
print("Building data provenance graph in JanusGraph")
print("Press Ctrl+C to stop\n")
await handler.start_lineage_tracking()
else:
print("❌ Failed to start lineage tracking")
if __name__ == "__main__":
asyncio.run(main())