#!/usr/bin/env python3 import os import re import shutil import subprocess from datetime import datetime import json # --- CONFIGURATION: Define update rules --- UPDATES = { "requirements.txt": { "type": "lines", "entries": [ "fastapi", "gunicorn", "uvicorn", "flask", "pandas==2.1.4", "numpy==1.25.2", "python-dotenv" ], "comment": "# Required for agentic services" }, ".env": { "type": "keyval", "entries": { "STATIC_FILES_DIR": "/app/static", "FLASK_APP": "backend.app", "FLASK_ENV": "production" } }, "Dockerfile": { "type": "replace_section", "pattern": r"^CMD\s+.*", "replacement": 'CMD ["gunicorn", "--bind", "0.0.0.0:5000", "--workers", "4", "backend.app:app"]' }, "angular.json": { "type": "json_property", "path": "projects.agentic-dashboard.architect.build.options", "updates": { "outputPath": "dist", "baseHref": "/static/" } } } # --- MAIN UPDATE HANDLERS --- class FileUpdater: @staticmethod def backup_file(filepath): if not os.path.exists(filepath): return None timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") backup_path = f"{filepath}.bak_{timestamp}" shutil.copy2(filepath, backup_path) return backup_path @staticmethod def update_lines_file(path, entries, comment=None): existing = set() if os.path.exists(path): with open(path, "r") as f: existing = {line.strip().lower() for line in f.readlines() if line.strip()} needed = [e for e in entries if e.lower() not in existing] if not needed: return False, None backup = FileUpdater.backup_file(path) with open(path, "a") as f: if comment: f.write(f"\n{comment}\n") for entry in needed: f.write(f"{entry}\n") return True, backup @staticmethod def update_keyval_file(path, kvs): existing = {} if os.path.exists(path): with open(path, "r") as f: for line in f: if "=" in line and not line.startswith("#"): k, v = line.strip().split("=", 1) existing[k.strip()] = v.strip() needs_update = any(existing.get(k) != v for k, v in kvs.items()) if not needs_update: return False, None backup = FileUpdater.backup_file(path) with open(path, "w") as f: for k, v in kvs.items(): f.write(f"{k}={v}\n") for k, v in existing.items(): if k not in kvs: f.write(f"{k}={v}\n") return True, backup @staticmethod def update_regex_file(path, pattern, replacement): if not os.path.exists(path): return False, None with open(path, "r") as f: content = f.read() updated = re.sub(pattern, replacement, content, flags=re.MULTILINE) if content == updated: return False, None backup = FileUpdater.backup_file(path) with open(path, "w") as f: f.write(updated) return True, backup @staticmethod def update_json_file(path, json_path, updates): if not os.path.exists(path): return False, None backup = FileUpdater.backup_file(path) with open(path, "r") as f: data = json.load(f) keys = json_path.split(".") ref = data for k in keys[:-1]: ref = ref.setdefault(k, {}) ref[keys[-1]] = {**ref.get(keys[-1], {}), **updates} with open(path, "w") as f: json.dump(data, f, indent=2) return True, backup # --- MAIN EXECUTION --- def smart_append(base_dir="."): print(f"\nšŸ”§ Scanning: {os.path.abspath(base_dir)}") for filename, rule in UPDATES.items(): path = os.path.join(base_dir, filename) print(f"\nāš™ļø Updating: {filename}") try: if rule["type"] == "lines": updated, backup = FileUpdater.update_lines_file(path, rule["entries"], rule.get("comment")) elif rule["type"] == "keyval": updated, backup = FileUpdater.update_keyval_file(path, rule["entries"]) elif rule["type"] == "replace_section": updated, backup = FileUpdater.update_regex_file(path, rule["pattern"], rule["replacement"]) elif rule["type"] == "json_property": updated, backup = FileUpdater.update_json_file(path, rule["path"], rule["updates"]) else: print("āš ļø Unknown update type.") continue if updated: print(f"āœ… Updated {filename} (Backup: {backup})") else: print(f"āœ“ No changes needed") except Exception as e: print(f"āŒ Error: {e}") if __name__ == "__main__": import sys directory = sys.argv[1] if len(sys.argv) > 1 else "." smart_append(directory) print("\n✨ All updates complete.")