| |
| """ |
| DTO CI Validation Script |
| Implements validation gates required by OPERATING_AGREEMENT.md |
| """ |
|
|
| import os |
| import sys |
| import yaml |
| import json |
| import subprocess |
| from pathlib import Path |
| from typing import Dict, Any, List, Tuple |
|
|
| class DTOCIValidator: |
| def __init__(self): |
| self.root_path = Path(__file__).parent |
| self.manifest_path = self.root_path / "dto_manifest.yaml" |
| self.codeowners_path = self.root_path / "CODEOWNERS" |
| self.errors = [] |
| self.warnings = [] |
| |
| def load_manifest(self) -> Dict[str, Any]: |
| """Load and parse DTO manifest""" |
| if not self.manifest_path.exists(): |
| self.errors.append("DTO manifest not found") |
| return {} |
| |
| try: |
| with open(self.manifest_path, 'r') as f: |
| return yaml.safe_load(f) |
| except Exception as e: |
| self.errors.append(f"Failed to parse manifest: {e}") |
| return {} |
| |
| def validate_schema(self) -> bool: |
| """Validate manifest schema compliance""" |
| print("π Validating DTO manifest schema...") |
| |
| manifest = self.load_manifest() |
| if not manifest: |
| return False |
| |
| |
| required_fields = [ |
| 'apiVersion', 'kind', 'metadata', 'services', |
| 'slo_specifications', 'port_registry', 'validation' |
| ] |
| |
| for field in required_fields: |
| if field not in manifest: |
| self.errors.append(f"Missing required field: {field}") |
| |
| |
| if manifest.get('apiVersion') != 'dto/v1': |
| self.errors.append(f"Invalid API version: {manifest.get('apiVersion')}") |
| |
| |
| if manifest.get('kind') != 'ServiceManifest': |
| self.errors.append(f"Invalid kind: {manifest.get('kind')}") |
| |
| |
| metadata = manifest.get('metadata', {}) |
| required_metadata = ['name', 'namespace', 'version', 'owner'] |
| for field in required_metadata: |
| if field not in metadata: |
| self.errors.append(f"Missing metadata field: {field}") |
| |
| if self.errors: |
| print("β Schema validation failed") |
| return False |
| |
| print("β
Schema validation passed") |
| return True |
| |
| def validate_port_conflicts(self) -> bool: |
| """Check for port conflicts across services""" |
| print("π Checking for port conflicts...") |
| |
| manifest = self.load_manifest() |
| if not manifest: |
| return False |
| |
| used_ports = {} |
| |
| for category, services in manifest.get('services', {}).items(): |
| for service in services: |
| service_name = service['name'] |
| for port in service.get('ports', []): |
| if port in used_ports: |
| self.errors.append( |
| f"Port conflict: {port} used by both {used_ports[port]} and {service_name}" |
| ) |
| else: |
| used_ports[port] = service_name |
| |
| if self.errors: |
| print("β Port conflict validation failed") |
| return False |
| |
| print("β
No port conflicts found") |
| return True |
| |
| def validate_dependencies(self) -> bool: |
| """Validate service dependencies exist and are valid""" |
| print("π Validating service dependencies...") |
| |
| manifest = self.load_manifest() |
| if not manifest: |
| return False |
| |
| |
| all_services = set() |
| for category, services in manifest.get('services', {}).items(): |
| for service in services: |
| all_services.add(service['name']) |
| |
| |
| for category, services in manifest.get('services', {}).items(): |
| for service in services: |
| for dep in service.get('dependencies', []): |
| if dep not in all_services: |
| self.errors.append( |
| f"Service {service['name']} depends on non-existent service: {dep}" |
| ) |
| |
| if self.errors: |
| print("β Dependency validation failed") |
| return False |
| |
| print("β
All dependencies validated") |
| return True |
| |
| def validate_slo_compliance(self) -> bool: |
| """Validate SLO tier assignments and specifications""" |
| print("π Validating SLO compliance...") |
| |
| manifest = self.load_manifest() |
| if not manifest: |
| return False |
| |
| slo_specs = manifest.get('slo_specifications', {}) |
| valid_tiers = set(slo_specs.keys()) |
| |
| for category, services in manifest.get('services', {}).items(): |
| for service in services: |
| slo_tier = service.get('slo_tier') |
| if not slo_tier: |
| self.errors.append(f"Service {service['name']} missing SLO tier") |
| elif slo_tier not in valid_tiers: |
| self.errors.append( |
| f"Service {service['name']} has invalid SLO tier: {slo_tier}" |
| ) |
| |
| |
| required_slo_fields = ['availability', 'latency_p99', 'recovery_time', 'max_downtime'] |
| for tier, spec in slo_specs.items(): |
| for field in required_slo_fields: |
| if field not in spec: |
| self.errors.append(f"SLO tier {tier} missing field: {field}") |
| |
| if self.errors: |
| print("β SLO compliance validation failed") |
| return False |
| |
| print("β
SLO compliance validated") |
| return True |
| |
| def validate_codeowners(self) -> bool: |
| """Validate CODEOWNERS file exists and has required entries""" |
| print("π Validating CODEOWNERS...") |
| |
| if not self.codeowners_path.exists(): |
| self.errors.append("CODEOWNERS file not found") |
| return False |
| |
| try: |
| with open(self.codeowners_path, 'r') as f: |
| codeowners_content = f.read() |
| except Exception as e: |
| self.errors.append(f"Failed to read CODEOWNERS: {e}") |
| return False |
| |
| |
| required_patterns = [ |
| '/data/adaptai/platform/dataops/dto/', |
| '/data/adaptai/platform/dataops/dto/dto_manifest.yaml', |
| '/data/adaptai/platform/dataops/dto/CODEOWNERS', |
| '/data/adaptai/platform/dataops/dto/services/', |
| '/data/adaptai/platform/dataops/dto/generated/' |
| ] |
| |
| for pattern in required_patterns: |
| if pattern not in codeowners_content: |
| self.errors.append(f"CODEOWNERS missing required pattern: {pattern}") |
| |
| if self.errors: |
| print("β CODEOWNERS validation failed") |
| return False |
| |
| print("β
CODEOWNERS validation passed") |
| return True |
| |
| def validate_generated_artifacts(self) -> bool: |
| """Validate that generated artifacts exist and are up-to-date""" |
| print("π Validating generated artifacts...") |
| |
| generated_path = self.root_path / "generated" |
| if not generated_path.exists(): |
| self.errors.append("Generated artifacts directory not found") |
| return False |
| |
| |
| required_files = [ |
| "supervisord-production.conf", |
| |
| ] |
| |
| for file_name in required_files: |
| file_path = generated_path / file_name |
| if not file_path.exists(): |
| self.errors.append(f"Missing generated file: {file_name}") |
| |
| |
| runbooks_path = self.root_path / "docs" / "runbooks" |
| if not runbooks_path.exists(): |
| self.errors.append("Runbooks directory not found") |
| else: |
| |
| runbook_files = list(runbooks_path.glob("*-runbook.md")) |
| if not runbook_files: |
| self.errors.append("No runbooks found") |
| |
| if self.errors: |
| print("β Generated artifacts validation failed") |
| return False |
| |
| print("β
Generated artifacts validated") |
| return True |
| |
| def validate_syncthing_policy(self) -> bool: |
| """Validate Syncthing narrow usage policy compliance""" |
| print("π Validating Syncthing policy...") |
| |
| manifest = self.load_manifest() |
| if not manifest: |
| return False |
| |
| syncthing_config = manifest.get('syncthing', {}) |
| |
| if syncthing_config.get('enabled'): |
| |
| allowed_classes = syncthing_config.get('data_classes', []) |
| if not set(allowed_classes).issubset({'CLASS_B', 'CLASS_C'}): |
| self.errors.append("Syncthing can only be used for CLASS_B and CLASS_C data") |
| |
| |
| excluded_paths = syncthing_config.get('excluded_paths', []) |
| required_exclusions = ['/.git/*', '/logs/*', '*.tmp', '*.log'] |
| for exclusion in required_exclusions: |
| if exclusion not in excluded_paths: |
| self.warnings.append(f"Consider excluding path: {exclusion}") |
| |
| |
| bandwidth_limits = syncthing_config.get('bandwidth_limits', {}) |
| if not bandwidth_limits: |
| self.warnings.append("Bandwidth limits not specified for Syncthing") |
| |
| if self.errors: |
| print("β Syncthing policy validation failed") |
| return False |
| |
| print("β
Syncthing policy validated") |
| return True |
| |
| def validate_environment_overlays(self) -> bool: |
| """Validate environment overlay structure""" |
| print("π Validating environment overlays...") |
| |
| manifest = self.load_manifest() |
| if not manifest: |
| return False |
| |
| environments = manifest.get('environments', {}) |
| overlays_path = self.root_path / "overlays" |
| |
| for env_name in environments.keys(): |
| env_overlay_path = overlays_path / env_name / "config.yaml" |
| if not env_overlay_path.exists(): |
| self.errors.append(f"Missing overlay for environment: {env_name}") |
| |
| if self.errors: |
| print("β Environment overlay validation failed") |
| return False |
| |
| print("β
Environment overlays validated") |
| return True |
| |
| def run_validation(self) -> Tuple[bool, List[str], List[str]]: |
| """Run all validation checks""" |
| print("π Running DTO CI validation...") |
| print("=" * 50) |
| |
| validations = [ |
| self.validate_schema, |
| self.validate_port_conflicts, |
| self.validate_dependencies, |
| self.validate_slo_compliance, |
| self.validate_codeowners, |
| self.validate_generated_artifacts, |
| self.validate_syncthing_policy, |
| self.validate_environment_overlays |
| ] |
| |
| all_passed = True |
| for validation in validations: |
| if not validation(): |
| all_passed = False |
| |
| print("=" * 50) |
| |
| if all_passed: |
| print("β
All CI validation checks passed!") |
| else: |
| print("β CI validation failed!") |
| print("\nErrors:") |
| for error in self.errors: |
| print(f" - {error}") |
| |
| if self.warnings: |
| print("\nWarnings:") |
| for warning in self.warnings: |
| print(f" - {warning}") |
| |
| return all_passed, self.errors, self.warnings |
|
|
| def main(): |
| """CLI entry point""" |
| validator = DTOCIValidator() |
| success, errors, warnings = validator.run_validation() |
| |
| |
| sys.exit(0 if success else 1) |
|
|
| if __name__ == "__main__": |
| main() |