adaptai / platform /aiml /mlops /elizabeth_tools.py
ADAPT-Chase's picture
Add files using upload-large-folder tool
42bba47 verified
#!/usr/bin/env python3
"""
Elizabeth Enhanced Tools - Comprehensive tool set for unrestricted AI operations
"""
import os
import json
import sqlite3
import requests
import psutil
import subprocess
import datetime
import hashlib
import asyncio
import glob
from typing import Dict, List, Any, Optional
import logging
import shutil
class ElizabethTools:
"""Comprehensive tool set for Elizabeth AI assistant"""
def __init__(self):
self.tool_registry = {}
self.setup_logging()
self.initialize_tools()
def setup_logging(self):
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
self.logger = logging.getLogger(__name__)
def initialize_tools(self):
"""Initialize all available tools"""
self.tool_registry = {
'database_query': self.database_query,
'api_call': self.api_call,
'file_operations': self.file_operations,
'system_monitor': self.system_monitor,
'cloud_operations': self.cloud_operations,
'network_scan': self.network_scan,
'process_manager': self.process_manager,
'data_analysis': self.data_analysis,
'code_execution': self.code_execution,
'memory_operations': self.memory_operations
}
def database_query(self, query: str, db_path: str = None) -> Dict[str, Any]:
"""Execute SQL queries on databases"""
try:
if not db_path:
db_path = "/data/adaptai/platform/aiml/mlops/backend/mlflow.db"
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute(query)
if query.strip().upper().startswith(('SELECT', 'PRAGMA')):
columns = [desc[0] for desc in cursor.description]
rows = cursor.fetchall()
result = {
'columns': columns,
'rows': rows,
'row_count': len(rows)
}
else:
conn.commit()
result = {
'affected_rows': cursor.rowcount,
'last_row_id': cursor.lastrowid
}
conn.close()
return {'success': True, 'data': result}
except Exception as e:
return {'success': False, 'error': str(e)}
def api_call(self, method: str, url: str, headers: Dict = None, data: Any = None, params: Dict = None) -> Dict[str, Any]:
"""Make HTTP API calls with full control"""
try:
headers = headers or {}
response = requests.request(
method=method.upper(),
url=url,
headers=headers,
json=data if isinstance(data, dict) else None,
data=data if not isinstance(data, dict) else None,
params=params,
timeout=30
)
return {
'success': True,
'status_code': response.status_code,
'headers': dict(response.headers),
'data': response.json() if response.headers.get('content-type', '').startswith('application/json') else response.text
}
except Exception as e:
return {'success': False, 'error': str(e)}
def file_operations(self, operation: str, path: str, content: str = None, recursive: bool = False, allow_overwrite: bool = False) -> Dict[str, Any]:
"""Perform file system operations with safety mechanisms"""
try:
# Ensure absolute path
path = os.path.abspath(path)
# System file protection
system_paths = ['/etc', '/usr', '/bin', '/sbin', '/lib', '/proc', '/sys', '/dev']
if any(path.startswith(sys_path) for sys_path in system_paths):
return {'success': False, 'error': f'System files protection: Cannot access {path}'}
# Safety check for write operations
if operation in ['write', 'append'] and os.path.exists(path) and not allow_overwrite:
# Generate new filename with timestamp
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
base, ext = os.path.splitext(path)
new_path = f"{base}_{timestamp}{ext}"
# Also create backup of original
backup_path = f"{base}_backup_{timestamp}{ext}"
if operation == 'write':
shutil.copy2(path, backup_path)
actual_path = new_path
message = f'File exists. Created new file: {new_path} (backup: {backup_path})'
else:
actual_path = path
message = None
if operation == 'read':
with open(path, 'r') as f:
return {'success': True, 'content': f.read()}
elif operation == 'write':
os.makedirs(os.path.dirname(actual_path), exist_ok=True)
with open(actual_path, 'w') as f:
f.write(content)
return {'success': True, 'message': message or f'Safely written to {actual_path}'}
elif operation == 'append':
os.makedirs(os.path.dirname(actual_path), exist_ok=True)
with open(actual_path, 'a') as f:
f.write(content)
return {'success': True, 'message': message or f'Safely appended to {actual_path}'}
elif operation == 'copy':
if not os.path.exists(path):
return {'success': False, 'error': f'Source file does not exist: {path}'}
if os.path.exists(actual_path) and not allow_overwrite:
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
base, ext = os.path.splitext(actual_path)
actual_path = f"{base}_copy_{timestamp}{ext}"
shutil.copy2(path, actual_path)
return {'success': True, 'message': f'Safely copied to {actual_path}'}
elif operation == 'list':
items = []
if recursive:
for root, dirs, files in os.walk(path):
for file in files:
items.append(os.path.join(root, file))
else:
items = os.listdir(path) if os.path.exists(path) else []
return {'success': True, 'items': items}
elif operation == 'delete':
if not os.path.exists(path):
return {'success': False, 'error': f'File does not exist: {path}'}
# Create backup before deletion
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
backup_dir = "/tmp/elizabeth_backups"
os.makedirs(backup_dir, exist_ok=True)
backup_path = os.path.join(backup_dir, f"{os.path.basename(path)}_{timestamp}")
if os.path.isdir(path):
shutil.copytree(path, backup_path)
else:
shutil.copy2(path, backup_path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
return {'success': True, 'message': f'Safely deleted {path} (backup: {backup_path})'}
elif operation == 'info':
if not os.path.exists(path):
return {'success': False, 'error': f'File does not exist: {path}'}
stat = os.stat(path)
return {'success': True, 'info': {
'size': stat.st_size,
'modified': datetime.datetime.fromtimestamp(stat.st_mtime).isoformat(),
'is_dir': os.path.isdir(path),
'permissions': oct(stat.st_mode)[-3:],
'exists': os.path.exists(path),
'absolute_path': os.path.abspath(path)
}}
else:
return {'success': False, 'error': f'Unsupported operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def system_monitor(self, metric: str = 'all') -> Dict[str, Any]:
"""Monitor system resources and performance"""
try:
result = {}
if metric in ['cpu', 'all']:
result['cpu'] = {
'percent': psutil.cpu_percent(interval=1),
'count': psutil.cpu_count(),
'freq': psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None
}
if metric in ['memory', 'all']:
mem = psutil.virtual_memory()
result['memory'] = {
'total': mem.total,
'available': mem.available,
'percent': mem.percent,
'used': mem.used,
'free': mem.free
}
if metric in ['disk', 'all']:
disk = psutil.disk_usage('/')
result['disk'] = {
'total': disk.total,
'used': disk.used,
'free': disk.free,
'percent': (disk.used / disk.total) * 100
}
if metric in ['gpu', 'all']:
try:
import subprocess
gpu_info = subprocess.check_output(['nvidia-smi', '--query-gpu=memory.used,memory.total,utilization.gpu,temperature.gpu', '--format=csv,noheader,nounits'],
stderr=subprocess.DEVNULL).decode().strip().split('\n')
gpu_data = []
for i, gpu in enumerate(gpu_info):
if gpu.strip():
used, total, util, temp = gpu.split(', ')
gpu_data.append({
'index': i,
'memory_used': int(used),
'memory_total': int(total),
'memory_percent': (int(used) / int(total)) * 100,
'gpu_util': int(util),
'temperature': int(temp)
})
result['gpu'] = gpu_data
except:
result['gpu'] = 'NVIDIA GPU not available'
return {'success': True, 'data': result}
except Exception as e:
return {'success': False, 'error': str(e)}
def cloud_operations(self, operation: str, service: str, **kwargs) -> Dict[str, Any]:
"""Perform cloud operations (AWS, GCP, Azure compatible)"""
try:
if service == 'aws':
return self.aws_operations(operation, **kwargs)
elif service == 'gcp':
return self.gcp_operations(operation, **kwargs)
elif service == 'azure':
return self.azure_operations(operation, **kwargs)
else:
return {'success': False, 'error': f'Unsupported cloud service: {service}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def aws_operations(self, operation: str, **kwargs) -> Dict[str, Any]:
"""AWS-specific operations"""
try:
import boto3
if operation == 'list_instances':
ec2 = boto3.client('ec2')
response = ec2.describe_instances()
instances = []
for reservation in response['Reservations']:
for instance in reservation['Instances']:
instances.append({
'id': instance['InstanceId'],
'state': instance['State']['Name'],
'type': instance['InstanceType']
})
return {'success': True, 'instances': instances}
elif operation == 'list_buckets':
s3 = boto3.client('s3')
response = s3.list_buckets()
return {'success': True, 'buckets': [b['Name'] for b in response['Buckets']]}
return {'success': False, 'error': f'Unsupported AWS operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def gcp_operations(self, operation: str, **kwargs) -> Dict[str, Any]:
"""GCP-specific operations"""
try:
from google.cloud import compute_v1, storage
if operation == 'list_instances':
client = compute_v1.InstancesClient()
project = kwargs.get('project', 'default')
zone = kwargs.get('zone', 'us-central1-a')
instances = client.list(project=project, zone=zone)
result = []
for instance in instances:
result.append({
'name': instance.name,
'status': instance.status,
'machine_type': instance.machine_type.split('/')[-1]
})
return {'success': True, 'instances': result}
return {'success': False, 'error': f'Unsupported GCP operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def azure_operations(self, operation: str, **kwargs) -> Dict[str, Any]:
"""Azure-specific operations"""
try:
from azure.mgmt.compute import ComputeManagementClient
from azure.identity import DefaultAzureCredential
if operation == 'list_instances':
credential = DefaultAzureCredential()
subscription_id = kwargs.get('subscription_id')
compute_client = ComputeManagementClient(credential, subscription_id)
vms = compute_client.virtual_machines.list_all()
instances = []
for vm in vms:
instances.append({
'name': vm.name,
'resource_group': vm.id.split('/')[4],
'location': vm.location
})
return {'success': True, 'instances': instances}
return {'success': False, 'error': f'Unsupported Azure operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def network_scan(self, target: str, ports: List[int] = None) -> Dict[str, Any]:
"""Perform network scanning and monitoring"""
try:
if not ports:
ports = [21, 22, 23, 25, 53, 80, 110, 143, 443, 993, 995]
import socket
open_ports = []
for port in ports:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
result = sock.connect_ex((target, port))
if result == 0:
open_ports.append(port)
sock.close()
return {'success': True, 'target': target, 'open_ports': open_ports}
except Exception as e:
return {'success': False, 'error': str(e)}
def process_manager(self, operation: str, pid: int = None, name: str = None) -> Dict[str, Any]:
"""Manage system processes"""
try:
if operation == 'list':
processes = []
for proc in psutil.process_iter(['pid', 'name', 'cpu_percent', 'memory_percent']):
try:
processes.append(proc.info)
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
return {'success': True, 'processes': processes}
elif operation == 'kill' and pid:
process = psutil.Process(pid)
process.terminate()
return {'success': True, 'message': f'Killed process {pid}'}
elif operation == 'find' and name:
processes = []
for proc in psutil.process_iter(['pid', 'name']):
if name.lower() in proc.info['name'].lower():
processes.append(proc.info)
return {'success': True, 'processes': processes}
return {'success': False, 'error': f'Unsupported process operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def data_analysis(self, operation: str, data: Any, **kwargs) -> Dict[str, Any]:
"""Perform data analysis operations"""
try:
import pandas as pd
import numpy as np
if operation == 'describe':
if isinstance(data, str):
df = pd.read_csv(data)
else:
df = pd.DataFrame(data)
return {'success': True, 'description': df.describe().to_dict()}
elif operation == 'correlation':
df = pd.DataFrame(data)
return {'success': True, 'correlation': df.corr().to_dict()}
elif operation == 'summary':
df = pd.DataFrame(data)
return {'success': True, 'summary': {
'shape': df.shape,
'columns': list(df.columns),
'dtypes': df.dtypes.to_dict(),
'null_counts': df.isnull().sum().to_dict()
}}
return {'success': False, 'error': f'Unsupported data operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def code_execution(self, code: str, language: str = 'python') -> Dict[str, Any]:
"""Execute code snippets safely"""
try:
if language == 'python':
import io
import sys
from contextlib import redirect_stdout, redirect_stderr
stdout = io.StringIO()
stderr = io.StringIO()
with redirect_stdout(stdout), redirect_stderr(stderr):
exec(code)
return {
'success': True,
'stdout': stdout.getvalue(),
'stderr': stderr.getvalue()
}
elif language == 'bash':
result = subprocess.run(code, shell=True, capture_output=True, text=True)
return {
'success': result.returncode == 0,
'stdout': result.stdout,
'stderr': result.stderr,
'return_code': result.returncode
}
return {'success': False, 'error': f'Unsupported language: {language}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def memory_operations(self, operation: str, key: str = None, value: Any = None) -> Dict[str, Any]:
"""Manage persistent memory storage with session persistence and chat history"""
try:
# Use DragonflyDB for persistence
import redis
try:
redis_client = redis.Redis(
host='localhost',
port=18000,
password='elizabeth-secret-2025',
decode_responses=True
)
redis_client.ping()
except:
# Fallback to file-based memory
return self._file_based_memory(operation, key, value)
if operation == 'store':
session_data = {
'value': value,
'timestamp': datetime.datetime.now().isoformat(),
'session_id': os.getpid(),
'type': 'chat_history' if 'chat' in str(key) else 'memory'
}
redis_client.setex(key, 86400 * 7, json.dumps(session_data)) # 7 days TTL
return {'success': True, 'message': f'Stored {key} to DragonflyDB'}
elif operation == 'retrieve':
data = redis_client.get(key)
if data:
return {'success': True, 'data': json.loads(data)}
else:
return {'success': False, 'error': f'Key {key} not found'}
elif operation == 'list':
keys = redis_client.keys('*')
return {'success': True, 'keys': keys}
elif operation == 'store_session':
# Store entire session with timestamp
session_key = f"elizabeth_session_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}"
session_data = {
'session_data': value,
'timestamp': datetime.datetime.now().isoformat(),
'pid': os.getpid(),
'type': 'full_session'
}
redis_client.setex(session_key, 86400 * 30, json.dumps(session_data)) # 30 days TTL
return {'success': True, 'message': f'Session stored as {session_key}'}
elif operation == 'get_chat_history':
# Retrieve all chat history
chat_keys = [k for k in redis_client.keys('*') if 'chat_' in str(k)]
history = []
for key in sorted(chat_keys):
data = redis_client.get(key)
if data:
history.append(json.loads(data))
return {'success': True, 'history': history}
return {'success': False, 'error': f'Unsupported memory operation: {operation}'}
except Exception as e:
return {'success': False, 'error': str(e)}
def _file_based_memory(self, operation: str, key: str = None, value: Any = None) -> Dict[str, Any]:
"""Fallback file-based memory for session persistence"""
try:
memory_dir = "/data/adaptai/platform/aiml/mlops/elizabeth_sessions"
os.makedirs(memory_dir, exist_ok=True)
if operation == 'store':
session_file = f"{memory_dir}/session_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
session_data = {
'key': key,
'value': value,
'timestamp': datetime.datetime.now().isoformat(),
'type': 'chat_history' if 'chat' in str(key) else 'memory'
}
with open(session_file, 'w') as f:
json.dump(session_data, f, indent=2)
return {'success': True, 'message': f'Stored to file: {session_file}'}
elif operation == 'retrieve':
# Find most recent file with key
files = glob.glob(f"{memory_dir}/*.json")
for file in sorted(files, reverse=True):
with open(file, 'r') as f:
data = json.load(f)
if data.get('key') == key:
return {'success': True, 'data': data}
return {'success': False, 'error': f'Key {key} not found'}
elif operation == 'list':
files = glob.glob(f"{memory_dir}/*.json")
return {'success': True, 'keys': [os.path.basename(f) for f in files]}
elif operation == 'get_chat_history':
files = glob.glob(f"{memory_dir}/*.json")
history = []
for file in sorted(files):
with open(file, 'r') as f:
data = json.load(f)
if data.get('type') == 'chat_history':
history.append(data)
return {'success': True, 'history': history}
return {'success': False, 'error': f'Unsupported operation'}
except Exception as e:
return {'success': False, 'error': str(e)}
def execute_tool(self, tool_name: str, **kwargs) -> Dict[str, Any]:
"""Execute a specific tool by name"""
if tool_name not in self.tool_registry:
return {'success': False, 'error': f'Tool {tool_name} not found'}
return self.tool_registry[tool_name](**kwargs)
def list_tools(self) -> List[str]:
"""List all available tools"""
return list(self.tool_registry.keys())
def get_tool_info(self, tool_name: str) -> Dict[str, Any]:
"""Get detailed information about a specific tool"""
if tool_name not in self.tool_registry:
return {'success': False, 'error': f'Tool {tool_name} not found'}
tool_func = self.tool_registry[tool_name]
return {
'name': tool_name,
'description': tool_func.__doc__,
'parameters': self._extract_parameters(tool_func)
}
def _extract_parameters(self, func) -> Dict[str, str]:
"""Extract parameter information from function signature"""
import inspect
signature = inspect.signature(func)
params = {}
for name, param in signature.parameters.items():
if name != 'self':
params[name] = str(param.annotation) if param.annotation != inspect.Parameter.empty else 'Any'
return params
# Global tool instance
elizabeth_tools = ElizabethTools()