# Copyright 2025 - EWAAST Project # Adapted from Google's appoint-ready architecture # Licensed under the Apache License, Version 2.0 """ Disk-based caching for expensive LLM calls. Adapted directly from appoint-ready's cache.py """ from diskcache import Cache import os import shutil import tempfile import zipfile import logging # Initialize cache cache = Cache(os.environ.get("CACHE_DIR", ".cache")) # Print cache statistics after loading try: item_count = len(cache) size_bytes = cache.volume() print(f"EWAAST Cache loaded: {item_count} items, approx {size_bytes} bytes") except Exception as e: print(f"Could not retrieve cache statistics: {e}") def create_cache_zip(): """Creates a zip file of the cache for backup/download.""" temp_dir = tempfile.gettempdir() base_name = os.path.join(temp_dir, "ewaast_cache_archive") archive_path = base_name + ".zip" cache_directory = os.environ.get("CACHE_DIR", ".cache") if not os.path.isdir(cache_directory): logging.error(f"Cache directory not found at {cache_directory}") return None, f"Cache directory not found: {cache_directory}" logging.info("Forcing a cache checkpoint for safe backup...") try: with Cache(cache_directory) as temp_cache: temp_cache.close() # Clean up temporary files before archiving tmp_path = os.path.join(cache_directory, 'tmp') if os.path.isdir(tmp_path): logging.info(f"Removing temporary cache directory: {tmp_path}") shutil.rmtree(tmp_path) logging.info(f"Creating zip archive of {cache_directory} to {archive_path}") with zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED, compresslevel=9) as zipf: for root, _, files in os.walk(cache_directory): for file in files: file_path = os.path.join(root, file) arcname = os.path.relpath(file_path, cache_directory) zipf.write(file_path, arcname) logging.info("Zip archive created successfully.") return archive_path, None except Exception as e: logging.error(f"Error creating zip archive: {e}", exc_info=True) return None, f"Error creating zip archive: {e}"