adaptai / platform /dataops /dto /database /cassandra_client.py
ADAPT-Chase's picture
Add files using upload-large-folder tool
fd357f4 verified
#!/usr/bin/env python3
"""
DTO Cassandra Client - Interface to Cassandra for durable run history
"""
import json
from datetime import datetime
from typing import Dict, Any, Optional, List
from cassandra.cluster import Cluster
from cassandra.auth import PlainTextAuthProvider
from cassandra.query import dict_factory
class DTOCassandraClient:
def __init__(self, hosts: List[str] = ["localhost"], port: int = 9042,
username: Optional[str] = None, password: Optional[str] = None):
self.hosts = hosts
self.port = port
self.username = username
self.password = password
self.cluster = None
self.session = None
def connect(self) -> bool:
"""Connect to Cassandra cluster"""
try:
auth_provider = None
if self.username and self.password:
auth_provider = PlainTextAuthProvider(
username=self.username,
password=self.password
)
self.cluster = Cluster(
contact_points=self.hosts,
port=self.port,
auth_provider=auth_provider,
protocol_version=4
)
self.session = self.cluster.connect()
self.session.set_keyspace('dto_operations')
self.session.row_factory = dict_factory
print(f"βœ… Connected to Cassandra at {self.hosts}:{self.port}")
return True
except Exception as e:
print(f"❌ Failed to connect to Cassandra: {e}")
return False
def store_run(self, run_data: Dict[str, Any]) -> bool:
"""Store run metadata in Cassandra"""
if not self.session:
if not self.connect():
return False
try:
query = """
INSERT INTO runs (
run_id, job_id, manifest_path, data_class, environment, status,
start_time, end_time, initiated_by, approvers, data_size_bytes,
estimated_duration, final_status, total_duration_seconds,
average_throughput_mbps, artifacts, metadata
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
"""
self.session.execute(query, (
run_data.get('run_id'),
run_data.get('job_id'),
run_data.get('manifest_path'),
run_data.get('data_class'),
run_data.get('environment'),
run_data.get('status'),
run_data.get('start_time'),
run_data.get('end_time'),
run_data.get('initiated_by'),
run_data.get('approvers', set()),
run_data.get('data_size_bytes'),
run_data.get('estimated_duration'),
run_data.get('final_status'),
run_data.get('total_duration_seconds'),
run_data.get('average_throughput_mbps'),
run_data.get('artifacts', []),
run_data.get('metadata', {})
))
print(f"βœ… Stored run: {run_data.get('run_id')}")
return True
except Exception as e:
print(f"❌ Error storing run: {e}")
return False
def store_metric(self, run_id: str, metric_name: str, value: float,
labels: Optional[Dict[str, str]] = None) -> bool:
"""Store performance metric"""
if not self.session:
if not self.connect():
return False
try:
# Create minute-precision bucket
now = datetime.now()
bucket = datetime(now.year, now.month, now.day, now.hour, now.minute)
query = """
INSERT INTO metrics (run_id, metric_name, bucket, timestamp, value, labels)
VALUES (%s, %s, %s, %s, %s, %s)
"""
self.session.execute(query, (
run_id,
metric_name,
bucket,
now,
value,
labels or {}
))
print(f"βœ… Stored metric: {run_id} -> {metric_name}: {value}")
return True
except Exception as e:
print(f"❌ Error storing metric: {e}")
return False
def store_event(self, event_data: Dict[str, Any]) -> bool:
"""Store event in durable event store"""
if not self.session:
if not self.connect():
return False
try:
query = """
INSERT INTO events (event_id, event_type, timestamp, run_id, job_id, payload)
VALUES (%s, %s, %s, %s, %s, %s)
"""
self.session.execute(query, (
event_data.get('event_id'),
event_data.get('event_type'),
event_data.get('timestamp'),
event_data.get('run_id'),
event_data.get('job_id'),
json.dumps(event_data.get('payload', {}))
))
print(f"βœ… Stored event: {event_data.get('event_type')} for {event_data.get('run_id')}")
return True
except Exception as e:
print(f"❌ Error storing event: {e}")
return False
def get_run(self, run_id: str) -> Optional[Dict[str, Any]]:
"""Get run by ID"""
if not self.session:
if not self.connect():
return None
try:
query = "SELECT * FROM runs WHERE run_id = %s LIMIT 1"
result = self.session.execute(query, (run_id,))
return result.one()
except Exception as e:
print(f"❌ Error getting run: {e}")
return None
def get_run_metrics(self, run_id: str, metric_name: Optional[str] = None) -> List[Dict[str, Any]]:
"""Get metrics for a run"""
if not self.session:
if not self.connect():
return []
try:
if metric_name:
query = "SELECT * FROM metrics WHERE run_id = %s AND metric_name = %s"
result = self.session.execute(query, (run_id, metric_name))
else:
query = "SELECT * FROM metrics WHERE run_id = %s"
result = self.session.execute(query, (run_id,))
return list(result)
except Exception as e:
print(f"❌ Error getting metrics: {e}")
return []
def get_run_events(self, run_id: str, event_type: Optional[str] = None) -> List[Dict[str, Any]]:
"""Get events for a run"""
if not self.session:
if not self.connect():
return []
try:
if event_type:
query = "SELECT * FROM events WHERE run_id = %s AND event_type = %s"
result = self.session.execute(query, (run_id, event_type))
else:
query = "SELECT * FROM events WHERE run_id = %s"
result = self.session.execute(query, (run_id,))
return list(result)
except Exception as e:
print(f"❌ Error getting events: {e}")
return []
def close(self):
"""Close Cassandra connection"""
if self.cluster:
self.cluster.shutdown()
# Test function
def test_cassandra_connectivity():
"""Test Cassandra connectivity and basic operations"""
client = DTOCassandraClient()
if client.connect():
# Test run storage
test_run = {
'run_id': 'test-run-001',
'job_id': 'test-job-001',
'manifest_path': '/manifests/class_a/test.yaml',
'data_class': 'CLASS_A',
'environment': 'staging',
'status': 'completed',
'start_time': datetime.now(),
'initiated_by': 'prometheus',
'data_size_bytes': 107374182400,
'estimated_duration': '2h',
'final_status': 'SUCCESS',
'total_duration_seconds': 7200,
'average_throughput_mbps': 604.0,
'artifacts': ['/logs/test-run-001.log', '/reports/test-run-001.pdf'],
'metadata': {'transfer_method': 'ssh+dd', 'compression': 'none'}
}
client.store_run(test_run)
# Test metric storage
client.store_metric('test-run-001', 'throughput_mbps', 604.0, {
'source_host': 'vast2',
'target_host': 'vast1',
'transfer_method': 'ssh+dd'
})
# Test event storage
test_event = {
'event_id': 'test-event-001',
'event_type': 'RUN_COMPLETED',
'timestamp': datetime.now(),
'run_id': 'test-run-001',
'job_id': 'test-job-001',
'payload': {
'success': True,
'final_status': 'SUCCESS',
'artifacts': ['/logs/test-run-001.log']
}
}
client.store_event(test_event)
# Test retrieval
run = client.get_run('test-run-001')
print(f"Retrieved run: {run}")
metrics = client.get_run_metrics('test-run-001')
print(f"Retrieved metrics: {metrics}")
events = client.get_run_events('test-run-001')
print(f"Retrieved events: {events}")
client.close()
print("βœ… All Cassandra operations completed successfully")
return True
else:
print("❌ Cassandra connectivity test failed")
return False
if __name__ == "__main__":
print("Testing DTO Cassandra Client...")
print("=" * 50)
test_cassandra_connectivity()