| | |
| | """ |
| | DTO Cache Client - Interface to Dragonfly for live status caching |
| | """ |
| |
|
| | import json |
| | import redis |
| | from typing import Dict, Any, Optional, List |
| | from datetime import datetime, timezone |
| |
|
| | class DTOCacheClient: |
| | def __init__(self, host: str = "localhost", port: int = 18000, |
| | password: Optional[str] = None): |
| | self.host = host |
| | self.port = port |
| | self.password = password |
| | self.redis_client = None |
| | |
| | def connect(self) -> bool: |
| | """Connect to Dragonfly server""" |
| | try: |
| | self.redis_client = redis.Redis( |
| | host=self.host, |
| | port=self.port, |
| | password=self.password, |
| | decode_responses=True, |
| | socket_timeout=5, |
| | socket_connect_timeout=5 |
| | ) |
| | |
| | self.redis_client.ping() |
| | print(f"β
Connected to Dragonfly at {self.host}:{self.port}") |
| | return True |
| | except redis.ConnectionError as e: |
| | print(f"β Failed to connect to Dragonfly: {e}") |
| | return False |
| | except Exception as e: |
| | print(f"β Error connecting to Dragonfly: {e}") |
| | return False |
| | |
| | def update_run_status(self, run_id: str, status: str, |
| | metadata: Optional[Dict[str, Any]] = None) -> bool: |
| | """Update run status in cache""" |
| | if not self.redis_client: |
| | if not self.connect(): |
| | return False |
| | |
| | try: |
| | key = f"dto:run:{run_id}:status" |
| | timestamp = int(datetime.now(timezone.utc).timestamp()) |
| | |
| | status_data = { |
| | "status": status, |
| | "updated_at": timestamp, |
| | "run_id": run_id |
| | } |
| | |
| | if metadata: |
| | status_data.update(metadata) |
| | |
| | |
| | self.redis_client.hset(key, mapping=status_data) |
| | self.redis_client.expire(key, 3600) |
| | |
| | print(f"β
Updated run status: {run_id} -> {status}") |
| | return True |
| | |
| | except Exception as e: |
| | print(f"β Error updating run status: {e}") |
| | return False |
| | |
| | def get_run_status(self, run_id: str) -> Optional[Dict[str, Any]]: |
| | """Get run status from cache""" |
| | if not self.redis_client: |
| | if not self.connect(): |
| | return None |
| | |
| | try: |
| | key = f"dto:run:{run_id}:status" |
| | status_data = self.redis_client.hgetall(key) |
| | |
| | if status_data: |
| | |
| | if 'updated_at' in status_data: |
| | status_data['updated_at'] = int(status_data['updated_at']) |
| | if 'progress_percent' in status_data: |
| | status_data['progress_percent'] = float(status_data['progress_percent']) |
| | |
| | return status_data |
| | else: |
| | return None |
| | |
| | except Exception as e: |
| | print(f"β Error getting run status: {e}") |
| | return None |
| | |
| | def update_run_progress(self, run_id: str, progress_percent: float, |
| | transferred_bytes: int, total_bytes: int, |
| | throughput_mbps: float) -> bool: |
| | """Update run progress metrics""" |
| | if not self.redis_client: |
| | if not self.connect(): |
| | return False |
| | |
| | try: |
| | key = f"dto:run:{run_id}:progress" |
| | timestamp = int(datetime.now(timezone.utc).timestamp()) |
| | |
| | progress_data = { |
| | "progress_percent": progress_percent, |
| | "transferred_bytes": transferred_bytes, |
| | "total_bytes": total_bytes, |
| | "throughput_mbps": throughput_mbps, |
| | "updated_at": timestamp |
| | } |
| | |
| | |
| | self.redis_client.hset(key, mapping=progress_data) |
| | self.redis_client.expire(key, 3600) |
| | |
| | print(f"β
Updated run progress: {run_id} -> {progress_percent}%") |
| | return True |
| | |
| | except Exception as e: |
| | print(f"β Error updating run progress: {e}") |
| | return False |
| | |
| | def add_artifact(self, run_id: str, artifact_type: str, artifact_path: str) -> bool: |
| | """Add artifact reference to cache""" |
| | if not self.redis_client: |
| | if not self.connect(): |
| | return False |
| | |
| | try: |
| | key = f"dto:run:{run_id}:artifacts:{artifact_type}" |
| | self.redis_client.sadd(key, artifact_path) |
| | self.redis_client.expire(key, 86400) |
| | |
| | print(f"β
Added artifact: {run_id} -> {artifact_type}: {artifact_path}") |
| | return True |
| | |
| | except Exception as e: |
| | print(f"β Error adding artifact: {e}") |
| | return False |
| | |
| | def get_artifacts(self, run_id: str, artifact_type: str) -> List[str]: |
| | """Get artifacts for a run""" |
| | if not self.redis_client: |
| | if not self.connect(): |
| | return [] |
| | |
| | try: |
| | key = f"dto:run:{run_id}:artifacts:{artifact_type}" |
| | artifacts = self.redis_client.smembers(key) |
| | return list(artifacts) if artifacts else [] |
| | |
| | except Exception as e: |
| | print(f"β Error getting artifacts: {e}") |
| | return [] |
| | |
| | def publish_alert(self, job_id: str, alert_type: str, message: str, |
| | severity: str = "warning") -> bool: |
| | """Publish alert to cache""" |
| | if not self.redis_client: |
| | if not self.connect(): |
| | return False |
| | |
| | try: |
| | key = f"dto:alerts:{job_id}" |
| | alert_data = { |
| | "type": alert_type, |
| | "message": message, |
| | "severity": severity, |
| | "timestamp": int(datetime.utcnow().timestamp()) |
| | } |
| | |
| | |
| | self.redis_client.hset(key, mapping=alert_data) |
| | self.redis_client.expire(key, 172800) |
| | |
| | |
| | self.redis_client.publish("dto:alerts", json.dumps(alert_data)) |
| | |
| | print(f"β
Published alert: {job_id} -> {alert_type}: {message}") |
| | return True |
| | |
| | except Exception as e: |
| | print(f"β Error publishing alert: {e}") |
| | return False |
| |
|
| | |
| | def test_cache_connectivity(): |
| | """Test Dragonfly cache connectivity""" |
| | client = DTOCacheClient() |
| | |
| | if client.connect(): |
| | |
| | test_run_id = "test-run-001" |
| | |
| | |
| | client.update_run_status(test_run_id, "IN_PROGRESS", { |
| | "manifest_path": "/test/manifest.yaml", |
| | "data_class": "CLASS_A" |
| | }) |
| | |
| | |
| | client.update_run_progress(test_run_id, 25.5, 26843545600, 107374182400, 604.0) |
| | |
| | |
| | client.add_artifact(test_run_id, "logs", "/data/adaptai/platform/dataops/dto/logs/test-run-001.log") |
| | |
| | |
| | status = client.get_run_status(test_run_id) |
| | print(f"Retrieved status: {status}") |
| | |
| | |
| | artifacts = client.get_artifacts(test_run_id, "logs") |
| | print(f"Retrieved artifacts: {artifacts}") |
| | |
| | |
| | client.publish_alert("test-job-001", "SLO_BREACH", "Throughput below expected 500 Mbps", "critical") |
| | |
| | print("β
All cache operations completed successfully") |
| | return True |
| | else: |
| | print("β Cache connectivity test failed") |
| | return False |
| |
|
| | if __name__ == "__main__": |
| | print("Testing DTO Cache Client...") |
| | print("=" * 50) |
| | |
| | test_cache_connectivity() |