|
|
|
|
|
""" |
|
|
DTO Lineage Event Handler - Processes NATS events to build data lineage |
|
|
Listens for transfer events and automatically updates lineage graph |
|
|
""" |
|
|
|
|
|
import asyncio |
|
|
import json |
|
|
from typing import Dict, Any, Optional |
|
|
from datetime import datetime |
|
|
from nats.aio.client import Client as NATS |
|
|
from dto_lineage_client import DTOLineageClient |
|
|
|
|
|
class DTOLineageEventHandler: |
|
|
def __init__(self, nats_servers: list = ["nats://localhost:4222"]): |
|
|
self.nats_servers = nats_servers |
|
|
self.nats_client = NATS() |
|
|
self.lineage_client = DTOLineageClient() |
|
|
self.run_cache = {} |
|
|
|
|
|
async def connect(self) -> bool: |
|
|
"""Connect to NATS and JanusGraph""" |
|
|
try: |
|
|
|
|
|
await self.nats_client.connect(servers=self.nats_servers) |
|
|
print("β
Connected to NATS for lineage tracking") |
|
|
|
|
|
|
|
|
if not self.lineage_client.connect(): |
|
|
print("β οΈ JanusGraph not available - lineage tracking disabled") |
|
|
return False |
|
|
|
|
|
return True |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Lineage handler connection failed: {e}") |
|
|
return False |
|
|
|
|
|
async def handle_run_planned(self, event_data: Dict[str, Any]): |
|
|
"""Handle RUN_PLANNED event - create run vertex""" |
|
|
run_id = event_data.get('run_id') |
|
|
job_id = event_data.get('job_id') |
|
|
manifest_path = event_data.get('manifest_path') |
|
|
environment = event_data.get('environment') |
|
|
initiated_by = event_data.get('initiated_by') |
|
|
|
|
|
print(f"π Recording run lineage: {run_id}") |
|
|
|
|
|
|
|
|
run_vertex_id = self.lineage_client.record_run( |
|
|
run_id, job_id, manifest_path, environment, initiated_by |
|
|
) |
|
|
|
|
|
if run_vertex_id: |
|
|
self.run_cache[run_id] = run_vertex_id |
|
|
print(f"β
Run vertex created: {run_id} -> {run_vertex_id}") |
|
|
else: |
|
|
print(f"β Failed to create run vertex: {run_id}") |
|
|
|
|
|
async def handle_manifest_loaded(self, event_data: Dict[str, Any]): |
|
|
"""Handle MANIFEST_LOADED event - record source datasets""" |
|
|
run_id = event_data.get('run_id') |
|
|
manifest_data = event_data.get('manifest_data', {}) |
|
|
|
|
|
datasets = manifest_data.get('datasets', []) |
|
|
environment = event_data.get('environment', 'unknown') |
|
|
data_class = event_data.get('data_class', 'unknown') |
|
|
|
|
|
print(f"π Recording source datasets for run: {run_id}") |
|
|
|
|
|
for dataset in datasets: |
|
|
dataset_path = dataset.get('path') |
|
|
checksum = dataset.get('checksum') |
|
|
size_bytes = dataset.get('size_bytes', 0) |
|
|
|
|
|
if dataset_path and checksum: |
|
|
|
|
|
dataset_vertex_id = self.lineage_client.record_dataset( |
|
|
dataset_path, checksum, size_bytes, data_class, environment |
|
|
) |
|
|
|
|
|
if dataset_vertex_id: |
|
|
print(f"β
Source dataset recorded: {dataset_path}") |
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
print(f"π Linked {dataset_path} as input to run {run_id}") |
|
|
except Exception as e: |
|
|
print(f"β Error linking dataset to run: {e}") |
|
|
|
|
|
async def handle_transfer_started(self, event_data: Dict[str, Any]): |
|
|
"""Handle TRANSFER_STARTED event - record hosts and transfer details""" |
|
|
run_id = event_data.get('run_id') |
|
|
source_host = event_data.get('source_host') |
|
|
target_host = event_data.get('target_host') |
|
|
transfer_method = event_data.get('transfer_method') |
|
|
|
|
|
print(f"π Recording transfer hosts for run: {run_id}") |
|
|
|
|
|
|
|
|
if source_host: |
|
|
source_host_id = self.lineage_client.record_host(source_host, "source") |
|
|
if source_host_id: |
|
|
print(f"β
Source host recorded: {source_host}") |
|
|
|
|
|
if target_host: |
|
|
target_host_id = self.lineage_client.record_host(target_host, "target") |
|
|
if target_host_id: |
|
|
print(f"β
Target host recorded: {target_host}") |
|
|
|
|
|
|
|
|
if run_id not in self.run_cache: |
|
|
self.run_cache[run_id] = {} |
|
|
|
|
|
self.run_cache[run_id].update({ |
|
|
'source_host': source_host, |
|
|
'target_host': target_host, |
|
|
'transfer_method': transfer_method |
|
|
}) |
|
|
|
|
|
async def handle_validation_passed(self, event_data: Dict[str, Any]): |
|
|
"""Handle VALIDATION_PASSED event - record target datasets""" |
|
|
run_id = event_data.get('run_id') |
|
|
validation_results = event_data.get('validation_results', {}) |
|
|
|
|
|
print(f"π Recording validated target datasets for run: {run_id}") |
|
|
|
|
|
|
|
|
run_details = self.run_cache.get(run_id, {}) |
|
|
|
|
|
for file_result in validation_results.get('files', []): |
|
|
target_path = file_result.get('target_path') |
|
|
checksum = file_result.get('checksum') |
|
|
size_bytes = file_result.get('size_bytes', 0) |
|
|
source_path = file_result.get('source_path') |
|
|
|
|
|
if target_path and checksum and file_result.get('validation_passed'): |
|
|
|
|
|
target_dataset_id = self.lineage_client.record_dataset( |
|
|
target_path, checksum, size_bytes, |
|
|
event_data.get('data_class', 'unknown'), |
|
|
event_data.get('environment', 'unknown') |
|
|
) |
|
|
|
|
|
if target_dataset_id and source_path: |
|
|
print(f"β
Target dataset recorded: {target_path}") |
|
|
|
|
|
|
|
|
source_dataset_id = None |
|
|
try: |
|
|
|
|
|
|
|
|
print(f"π Looking up source dataset: {source_path}") |
|
|
|
|
|
|
|
|
if all([run_details.get('source_host'), run_details.get('target_host')]): |
|
|
|
|
|
self.lineage_client.link_transfer_lineage( |
|
|
source_dataset_id or "unknown", |
|
|
target_dataset_id, |
|
|
run_id, |
|
|
run_details['source_host'], |
|
|
run_details['target_host'], |
|
|
run_details.get('transfer_method', 'unknown'), |
|
|
event_data.get('average_throughput_mbps', 0.0) |
|
|
) |
|
|
|
|
|
print(f"π Linked transfer lineage: {source_path} -> {target_path}") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Error linking lineage: {e}") |
|
|
|
|
|
async def handle_run_completed(self, event_data: Dict[str, Any]): |
|
|
"""Handle RUN_COMPLETED event - finalize lineage and generate reports""" |
|
|
run_id = event_data.get('run_id') |
|
|
final_status = event_data.get('final_status') |
|
|
|
|
|
print(f"π Finalizing lineage for completed run: {run_id}") |
|
|
|
|
|
if final_status == 'SUCCESS': |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
print(f"π Generating lineage impact report for run: {run_id}") |
|
|
|
|
|
|
|
|
integrity_report = self.lineage_client.validate_lineage_integrity() |
|
|
|
|
|
if integrity_report: |
|
|
print(f"β
Lineage integrity validated for run: {run_id}") |
|
|
|
|
|
|
|
|
lineage_event = { |
|
|
'event_id': f"lineage-completed-{run_id}", |
|
|
'event_type': 'LINEAGE_COMPLETED', |
|
|
'timestamp': datetime.now().isoformat(), |
|
|
'run_id': run_id, |
|
|
'integrity_report': integrity_report |
|
|
} |
|
|
|
|
|
await self.nats_client.publish( |
|
|
"dto.events.lineage.completed", |
|
|
json.dumps(lineage_event).encode() |
|
|
) |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Error finalizing lineage: {e}") |
|
|
|
|
|
|
|
|
if run_id in self.run_cache: |
|
|
del self.run_cache[run_id] |
|
|
|
|
|
async def handle_data_corruption_detected(self, event_data: Dict[str, Any]): |
|
|
"""Handle data corruption - trace impact through lineage""" |
|
|
corrupted_dataset = event_data.get('dataset_path') |
|
|
corruption_type = event_data.get('corruption_type') |
|
|
|
|
|
print(f"π¨ Tracing corruption impact: {corrupted_dataset}") |
|
|
|
|
|
try: |
|
|
|
|
|
impacted_datasets = self.lineage_client.find_data_impact(corrupted_dataset) |
|
|
|
|
|
if impacted_datasets: |
|
|
print(f"β οΈ Corruption may impact {len(impacted_datasets)} datasets") |
|
|
|
|
|
|
|
|
impact_event = { |
|
|
'event_id': f"corruption-impact-{datetime.now().timestamp()}", |
|
|
'event_type': 'CORRUPTION_IMPACT_ANALYSIS', |
|
|
'timestamp': datetime.now().isoformat(), |
|
|
'corrupted_dataset': corrupted_dataset, |
|
|
'corruption_type': corruption_type, |
|
|
'impacted_datasets': impacted_datasets, |
|
|
'impact_count': len(impacted_datasets) |
|
|
} |
|
|
|
|
|
await self.nats_client.publish( |
|
|
"dto.events.lineage.corruption_impact", |
|
|
json.dumps(impact_event).encode() |
|
|
) |
|
|
|
|
|
print(f"π Published corruption impact analysis") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Error tracing corruption impact: {e}") |
|
|
|
|
|
async def process_event(self, msg): |
|
|
"""Process incoming NATS event for lineage tracking""" |
|
|
try: |
|
|
event_data = json.loads(msg.data.decode()) |
|
|
event_type = event_data.get('event_type') |
|
|
|
|
|
|
|
|
handlers = { |
|
|
'RUN_PLANNED': self.handle_run_planned, |
|
|
'MANIFEST_LOADED': self.handle_manifest_loaded, |
|
|
'TRANSFER_STARTED': self.handle_transfer_started, |
|
|
'VALIDATION_PASSED': self.handle_validation_passed, |
|
|
'RUN_COMPLETED': self.handle_run_completed, |
|
|
'DATA_CORRUPTION_DETECTED': self.handle_data_corruption_detected |
|
|
} |
|
|
|
|
|
handler = handlers.get(event_type) |
|
|
if handler: |
|
|
await handler(event_data) |
|
|
else: |
|
|
|
|
|
print(f"βΉοΈ Lineage handler ignoring event: {event_type}") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Error processing lineage event: {e}") |
|
|
|
|
|
async def start_lineage_tracking(self): |
|
|
"""Start listening for DTO events to build lineage""" |
|
|
try: |
|
|
|
|
|
await self.nats_client.subscribe("dto.events.>", cb=self.process_event) |
|
|
print("β
Started lineage tracking - listening for DTO events") |
|
|
|
|
|
|
|
|
while True: |
|
|
await asyncio.sleep(1) |
|
|
|
|
|
except KeyboardInterrupt: |
|
|
print("\nπ Stopping lineage tracking...") |
|
|
except Exception as e: |
|
|
print(f"β Lineage tracking error: {e}") |
|
|
finally: |
|
|
await self.nats_client.close() |
|
|
self.lineage_client.close() |
|
|
|
|
|
|
|
|
async def main(): |
|
|
handler = DTOLineageEventHandler() |
|
|
|
|
|
if await handler.connect(): |
|
|
print("π DTO Lineage Tracking started") |
|
|
print("Monitoring events: RUN_PLANNED, MANIFEST_LOADED, TRANSFER_STARTED, VALIDATION_PASSED, RUN_COMPLETED") |
|
|
print("Building data provenance graph in JanusGraph") |
|
|
print("Press Ctrl+C to stop\n") |
|
|
|
|
|
await handler.start_lineage_tracking() |
|
|
else: |
|
|
print("β Failed to start lineage tracking") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
asyncio.run(main()) |