File size: 3,559 Bytes
fbf3c28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#!/usr/bin/env python3
# Generator Freshness Checker
# Ensures generated artifacts are up-to-date with DTOs

import argparse
import hashlib
import json
import os
import sys
from pathlib import Path
from typing import Dict, List

class FreshnessChecker:
    def __init__(self):
        self.generated_dirs = [
            Path("/data/adaptai/systemd"),
            Path("/data/adaptai/platform") / "signalcore" / "ops" / "supervisor",
            Path("/data/adaptai/deployment") / "environments"
        ]
    
    def calculate_hash(self, file_path: Path) -> str:
        """Calculate SHA256 hash of file contents"""
        hasher = hashlib.sha256()
        with open(file_path, 'rb') as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hasher.update(chunk)
        return hasher.hexdigest()
    
    def find_dto_files(self) -> List[Path]:
        """Find all DTO files"""
        dto_files = []
        platform_dir = Path("/data/adaptai/platform")
        
        for domain_dir in platform_dir.iterdir():
            if domain_dir.is_dir():
                dto_dir = domain_dir / "dto"
                if dto_dir.exists():
                    dto_files.extend(dto_dir.glob("*.yaml"))
                    dto_files.extend(dto_dir.glob("*.yml"))
        
        return dto_files
    
    def check_generated_freshness(self) -> bool:
        """Check if generated files are fresh"""
        dto_files = self.find_dto_files()
        all_fresh = True
        
        for dto_file in dto_files:
            fresh = self._check_dto_freshness(dto_file)
            if not fresh:
                all_fresh = False
                print(f"❌ Stale artifacts for {dto_file}")
            else:
                print(f"✅ Artifacts fresh for {dto_file}")
        
        return all_fresh
    
    def _check_dto_freshness(self, dto_file: Path) -> bool:
        """Check freshness for a specific DTO"""
        # Get DTO hash and timestamp
        dto_hash = self.calculate_hash(dto_file)
        dto_mtime = dto_file.stat().st_mtime
        
        # Check if any generated file is older than DTO
        for generated_dir in self.generated_dirs:
            if generated_dir.exists():
                for generated_file in generated_dir.rglob("*"):
                    if generated_file.is_file():
                        # Check if this file might be generated from this DTO
                        if self._is_related(generated_file, dto_file):
                            gen_mtime = generated_file.stat().st_mtime
                            if gen_mtime < dto_mtime:
                                print(f"Generated file {generated_file} is older than DTO {dto_file}")
                                return False
        
        return True
    
    def _is_related(self, generated_file: Path, dto_file: Path) -> bool:
        """Check if generated file might be related to DTO"""
        # Simple heuristic: check if generated file name contains DTO domain
        dto_domain = dto_file.parent.parent.name
        return dto_domain in str(generated_file)

def main():
    parser = argparse.ArgumentParser(description="Generator Freshness Checker")
    parser.add_argument("--check", action="store_true", help="Check all generated artifacts")
    
    args = parser.parse_args()
    checker = FreshnessChecker()
    
    if args.check:
        fresh = checker.check_generated_freshness()
        sys.exit(0 if fresh else 1)
    else:
        parser.print_help()
        sys.exit(1)

if __name__ == "__main__":
    main()