Spaces:
Sleeping
Sleeping
| import os | |
| import time | |
| import json | |
| import tempfile | |
| import shutil | |
| from huggingface_hub import HfApi, hf_hub_download | |
| HF_TOKEN = os.getenv("HF_TOKEN") | |
| DATASET_ID = os.getenv("DATASET_ID") # e.g. "username/grim-fable-data" | |
| api = HfApi(token=HF_TOKEN) | |
| MANIFEST_FILE = "manifest.json" | |
| # In-memory manifest cache to reduce API calls | |
| _manifest_cache = {"data": None, "last_fetch": 0} | |
| MANIFEST_CACHE_TTL = 30 # seconds | |
| def get_manifest(force_refresh: bool = False): | |
| """Downloads and reads the manifest.json file with caching.""" | |
| if not HF_TOKEN or not DATASET_ID: | |
| return {"saves": {}} | |
| now = time.time() | |
| if not force_refresh and _manifest_cache["data"] and (now - _manifest_cache["last_fetch"] < MANIFEST_CACHE_TTL): | |
| return _manifest_cache["data"] | |
| try: | |
| downloaded_path = hf_hub_download( | |
| repo_id=DATASET_ID, | |
| filename=MANIFEST_FILE, | |
| repo_type="dataset", | |
| token=HF_TOKEN | |
| ) | |
| with open(downloaded_path, 'r') as f: | |
| data = json.load(f) | |
| _manifest_cache["data"] = data | |
| _manifest_cache["last_fetch"] = now | |
| return data | |
| except Exception as e: | |
| print(f"Notice: manifest.json not found or could not be read: {e}") | |
| return {"saves": {}} | |
| def update_manifest(save_name: str, description: str = None, deleted: bool = False): | |
| """Updates the manifest.json with new/updated save info.""" | |
| if not HF_TOKEN or not DATASET_ID: | |
| return | |
| manifest = get_manifest() | |
| if "saves" not in manifest: manifest["saves"] = {} | |
| if deleted: | |
| if save_name in manifest["saves"]: | |
| del manifest["saves"][save_name] | |
| else: | |
| existing = manifest["saves"].get(save_name, {"description": "", "timestamp": 0}) | |
| manifest["saves"][save_name] = { | |
| "description": description if description is not None else existing["description"], | |
| "timestamp": time.time() | |
| } | |
| try: | |
| with tempfile.NamedTemporaryFile(mode='w', delete=False) as tf: | |
| json.dump(manifest, tf) | |
| temp_path = tf.name | |
| api.upload_file( | |
| path_or_fileobj=temp_path, | |
| path_in_repo=MANIFEST_FILE, | |
| repo_id=DATASET_ID, | |
| repo_type="dataset" | |
| ) | |
| os.remove(temp_path) | |
| # Invalidate cache | |
| _manifest_cache["data"] = None | |
| except Exception as e: | |
| print(f"Error updating manifest: {e}") | |
| def save_to_dataset(save_name: str, db_path: str, description: str = None): | |
| if not HF_TOKEN or not DATASET_ID: | |
| return False | |
| try: | |
| api.upload_file( | |
| path_or_fileobj=db_path, | |
| path_in_repo=f"{save_name}/world.db", | |
| repo_id=DATASET_ID, | |
| repo_type="dataset" | |
| ) | |
| update_manifest(save_name, description) | |
| return True | |
| except Exception as e: | |
| print(f"Error saving to dataset: {e}") | |
| return False | |
| def load_from_dataset(save_name: str, db_path: str): | |
| if not HF_TOKEN or not DATASET_ID: | |
| return False | |
| path_in_repo = f"{save_name}/world.db" | |
| try: | |
| downloaded_path = hf_hub_download( | |
| repo_id=DATASET_ID, | |
| filename=path_in_repo, | |
| repo_type="dataset", | |
| token=HF_TOKEN | |
| ) | |
| shutil.copy(downloaded_path, db_path) | |
| return True | |
| except Exception as e: | |
| print(f"Error loading from dataset: {e}") | |
| return False | |
| def delete_save(save_name: str): | |
| if not HF_TOKEN or not DATASET_ID: | |
| return False | |
| try: | |
| update_manifest(save_name, deleted=True) | |
| files = api.list_repo_files(repo_id=DATASET_ID, repo_type="dataset") | |
| files_to_delete = [f for f in files if f.startswith(f"{save_name}/")] | |
| for f in files_to_delete: | |
| api.delete_file(path_in_repo=f, repo_id=DATASET_ID, repo_type="dataset") | |
| return True | |
| except Exception as e: | |
| print(f"Error deleting save: {e}") | |
| return False | |
| def list_saves(): | |
| if not HF_TOKEN or not DATASET_ID: | |
| return [] | |
| manifest = get_manifest() | |
| results = [] | |
| for name, data in manifest.get("saves", {}).items(): | |
| results.append({ | |
| "name": name, | |
| "description": data.get("description", ""), | |
| "timestamp": data.get("timestamp", 0) | |
| }) | |
| return results | |
| def get_cached_media(save_name: str, entity_id: str, media_type: str): | |
| """Retrieves media content from the dataset if it exists.""" | |
| ext = "webp" if media_type == "image" else "mp3" | |
| path_in_repo = f"{save_name}/media/{entity_id}.{ext}" | |
| try: | |
| downloaded_path = hf_hub_download( | |
| repo_id=DATASET_ID, | |
| filename=path_in_repo, | |
| repo_type="dataset", | |
| token=HF_TOKEN | |
| ) | |
| with open(downloaded_path, 'rb') as f: | |
| return f.read() | |
| except Exception: | |
| return None | |
| def save_cached_media(save_name: str, entity_id: str, media_type: str, content: bytes): | |
| """Saves media content to the dataset.""" | |
| ext = "webp" if media_type == "image" else "mp3" | |
| path_in_repo = f"{save_name}/media/{entity_id}.{ext}" | |
| try: | |
| with tempfile.NamedTemporaryFile(mode='wb', delete=False) as tf: | |
| tf.write(content) | |
| temp_path = tf.name | |
| api.upload_file( | |
| path_or_fileobj=temp_path, | |
| path_in_repo=path_in_repo, | |
| repo_id=DATASET_ID, | |
| repo_type="dataset" | |
| ) | |
| os.remove(temp_path) | |
| return True | |
| except Exception as e: | |
| print(f"Error caching media: {e}") | |
| return False | |
| class PersistenceLoop: | |
| def __init__(self): | |
| self.last_interaction_time = time.time() | |
| self.last_save_time = time.time() | |
| self.current_save_name = "default_save" | |
| self.needs_save = False | |
| self.turn_counter = 0 | |
| def update_interaction(self): | |
| self.last_interaction_time = time.time() | |
| self.needs_save = True | |
| self.turn_counter += 1 | |
| def should_autosave(self, interaction_happened=False): | |
| now = time.time() | |
| # Immediate save every 5 turns | |
| if interaction_happened and self.turn_counter >= 5: | |
| self.turn_counter = 0 | |
| return True | |
| # If an interaction just happened, we don't save immediately unless it's been a long time (safety check) | |
| if interaction_happened: | |
| if now - self.last_save_time > 3600 and self.needs_save: return True | |
| return False | |
| # Idle save: trigger if we have unsaved changes and it's been 30 mins since last interaction | |
| if self.needs_save and now - self.last_interaction_time > 1800: | |
| return True | |
| return False | |
| persistence_manager = PersistenceLoop() | |