| """ |
| Doctor command for hermes CLI. |
| |
| Diagnoses issues with Hermes Agent setup. |
| """ |
|
|
| import os |
| import sys |
| import subprocess |
| import shutil |
| from pathlib import Path |
|
|
| from hermes_cli.config import get_project_root, get_hermes_home, get_env_path |
|
|
| PROJECT_ROOT = get_project_root() |
| HERMES_HOME = get_hermes_home() |
|
|
| |
| from dotenv import load_dotenv |
| _env_path = get_env_path() |
| if _env_path.exists(): |
| try: |
| load_dotenv(_env_path, encoding="utf-8") |
| except UnicodeDecodeError: |
| load_dotenv(_env_path, encoding="latin-1") |
| |
| load_dotenv(PROJECT_ROOT / ".env", override=False, encoding="utf-8") |
|
|
| from hermes_cli.colors import Colors, color |
| from hermes_constants import OPENROUTER_MODELS_URL |
|
|
|
|
| _PROVIDER_ENV_HINTS = ( |
| "OPENROUTER_API_KEY", |
| "OPENAI_API_KEY", |
| "ANTHROPIC_API_KEY", |
| "ANTHROPIC_TOKEN", |
| "OPENAI_BASE_URL", |
| "GLM_API_KEY", |
| "ZAI_API_KEY", |
| "Z_AI_API_KEY", |
| "KIMI_API_KEY", |
| "MINIMAX_API_KEY", |
| "MINIMAX_CN_API_KEY", |
| "KILOCODE_API_KEY", |
| ) |
|
|
|
|
| def _has_provider_env_config(content: str) -> bool: |
| """Return True when ~/.hermes/.env contains provider auth/base URL settings.""" |
| return any(key in content for key in _PROVIDER_ENV_HINTS) |
|
|
|
|
| def _honcho_is_configured_for_doctor() -> bool: |
| """Return True when Honcho is configured, even if this process has no active session.""" |
| try: |
| from honcho_integration.client import HonchoClientConfig |
|
|
| cfg = HonchoClientConfig.from_global_config() |
| return bool(cfg.enabled and cfg.api_key) |
| except Exception: |
| return False |
|
|
|
|
| def _apply_doctor_tool_availability_overrides(available: list[str], unavailable: list[dict]) -> tuple[list[str], list[dict]]: |
| """Adjust runtime-gated tool availability for doctor diagnostics.""" |
| if not _honcho_is_configured_for_doctor(): |
| return available, unavailable |
|
|
| updated_available = list(available) |
| updated_unavailable = [] |
| for item in unavailable: |
| if item.get("name") == "honcho": |
| if "honcho" not in updated_available: |
| updated_available.append("honcho") |
| continue |
| updated_unavailable.append(item) |
| return updated_available, updated_unavailable |
|
|
|
|
| def check_ok(text: str, detail: str = ""): |
| print(f" {color('β', Colors.GREEN)} {text}" + (f" {color(detail, Colors.DIM)}" if detail else "")) |
|
|
| def check_warn(text: str, detail: str = ""): |
| print(f" {color('β ', Colors.YELLOW)} {text}" + (f" {color(detail, Colors.DIM)}" if detail else "")) |
|
|
| def check_fail(text: str, detail: str = ""): |
| print(f" {color('β', Colors.RED)} {text}" + (f" {color(detail, Colors.DIM)}" if detail else "")) |
|
|
| def check_info(text: str): |
| print(f" {color('β', Colors.CYAN)} {text}") |
|
|
|
|
| def _check_gateway_service_linger(issues: list[str]) -> None: |
| """Warn when a systemd user gateway service will stop after logout.""" |
| try: |
| from hermes_cli.gateway import ( |
| get_systemd_linger_status, |
| get_systemd_unit_path, |
| is_linux, |
| ) |
| except Exception as e: |
| check_warn("Gateway service linger", f"(could not import gateway helpers: {e})") |
| return |
|
|
| if not is_linux(): |
| return |
|
|
| unit_path = get_systemd_unit_path() |
| if not unit_path.exists(): |
| return |
|
|
| print() |
| print(color("β Gateway Service", Colors.CYAN, Colors.BOLD)) |
|
|
| linger_enabled, linger_detail = get_systemd_linger_status() |
| if linger_enabled is True: |
| check_ok("Systemd linger enabled", "(gateway service survives logout)") |
| elif linger_enabled is False: |
| check_warn("Systemd linger disabled", "(gateway may stop after logout)") |
| check_info("Run: sudo loginctl enable-linger $USER") |
| issues.append("Enable linger for the gateway user service: sudo loginctl enable-linger $USER") |
| else: |
| check_warn("Could not verify systemd linger", f"({linger_detail})") |
|
|
|
|
| def run_doctor(args): |
| """Run diagnostic checks.""" |
| should_fix = getattr(args, 'fix', False) |
|
|
| |
| |
| os.environ.setdefault("HERMES_INTERACTIVE", "1") |
| |
| issues = [] |
| manual_issues = [] |
| fixed_count = 0 |
| |
| print() |
| print(color("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ", Colors.CYAN)) |
| print(color("β π©Ί Hermes Doctor β", Colors.CYAN)) |
| print(color("βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ", Colors.CYAN)) |
| |
| |
| |
| |
| print() |
| print(color("β Python Environment", Colors.CYAN, Colors.BOLD)) |
| |
| py_version = sys.version_info |
| if py_version >= (3, 11): |
| check_ok(f"Python {py_version.major}.{py_version.minor}.{py_version.micro}") |
| elif py_version >= (3, 10): |
| check_ok(f"Python {py_version.major}.{py_version.minor}.{py_version.micro}") |
| check_warn("Python 3.11+ recommended for RL Training tools (tinker requires >= 3.11)") |
| elif py_version >= (3, 8): |
| check_warn(f"Python {py_version.major}.{py_version.minor}.{py_version.micro}", "(3.10+ recommended)") |
| else: |
| check_fail(f"Python {py_version.major}.{py_version.minor}.{py_version.micro}", "(3.10+ required)") |
| issues.append("Upgrade Python to 3.10+") |
| |
| |
| in_venv = sys.prefix != sys.base_prefix |
| if in_venv: |
| check_ok("Virtual environment active") |
| else: |
| check_warn("Not in virtual environment", "(recommended)") |
| |
| |
| |
| |
| print() |
| print(color("β Required Packages", Colors.CYAN, Colors.BOLD)) |
| |
| required_packages = [ |
| ("openai", "OpenAI SDK"), |
| ("rich", "Rich (terminal UI)"), |
| ("dotenv", "python-dotenv"), |
| ("yaml", "PyYAML"), |
| ("httpx", "HTTPX"), |
| ] |
| |
| optional_packages = [ |
| ("croniter", "Croniter (cron expressions)"), |
| ("telegram", "python-telegram-bot"), |
| ("discord", "discord.py"), |
| ] |
| |
| for module, name in required_packages: |
| try: |
| __import__(module) |
| check_ok(name) |
| except ImportError: |
| check_fail(name, "(missing)") |
| issues.append(f"Install {name}: uv pip install {module}") |
| |
| for module, name in optional_packages: |
| try: |
| __import__(module) |
| check_ok(name, "(optional)") |
| except ImportError: |
| check_warn(name, "(optional, not installed)") |
| |
| |
| |
| |
| print() |
| print(color("β Configuration Files", Colors.CYAN, Colors.BOLD)) |
| |
| |
| env_path = HERMES_HOME / '.env' |
| if env_path.exists(): |
| check_ok("~/.hermes/.env file exists") |
| |
| |
| content = env_path.read_text() |
| if _has_provider_env_config(content): |
| check_ok("API key or custom endpoint configured") |
| else: |
| check_warn("No API key found in ~/.hermes/.env") |
| issues.append("Run 'hermes setup' to configure API keys") |
| else: |
| |
| fallback_env = PROJECT_ROOT / '.env' |
| if fallback_env.exists(): |
| check_ok(".env file exists (in project directory)") |
| else: |
| check_fail("~/.hermes/.env file missing") |
| if should_fix: |
| env_path.parent.mkdir(parents=True, exist_ok=True) |
| env_path.touch() |
| check_ok("Created empty ~/.hermes/.env") |
| check_info("Run 'hermes setup' to configure API keys") |
| fixed_count += 1 |
| else: |
| check_info("Run 'hermes setup' to create one") |
| issues.append("Run 'hermes setup' to create .env") |
| |
| |
| config_path = HERMES_HOME / 'config.yaml' |
| if config_path.exists(): |
| check_ok("~/.hermes/config.yaml exists") |
| else: |
| fallback_config = PROJECT_ROOT / 'cli-config.yaml' |
| if fallback_config.exists(): |
| check_ok("cli-config.yaml exists (in project directory)") |
| else: |
| example_config = PROJECT_ROOT / 'cli-config.yaml.example' |
| if should_fix and example_config.exists(): |
| config_path.parent.mkdir(parents=True, exist_ok=True) |
| shutil.copy2(str(example_config), str(config_path)) |
| check_ok("Created ~/.hermes/config.yaml from cli-config.yaml.example") |
| fixed_count += 1 |
| elif should_fix: |
| check_warn("config.yaml not found and no example to copy from") |
| manual_issues.append("Create ~/.hermes/config.yaml manually") |
| else: |
| check_warn("config.yaml not found", "(using defaults)") |
| |
| |
| |
| |
| print() |
| print(color("β Auth Providers", Colors.CYAN, Colors.BOLD)) |
|
|
| try: |
| from hermes_cli.auth import get_nous_auth_status, get_codex_auth_status |
|
|
| nous_status = get_nous_auth_status() |
| if nous_status.get("logged_in"): |
| check_ok("Nous Portal auth", "(logged in)") |
| else: |
| check_warn("Nous Portal auth", "(not logged in)") |
|
|
| codex_status = get_codex_auth_status() |
| if codex_status.get("logged_in"): |
| check_ok("OpenAI Codex auth", "(logged in)") |
| else: |
| check_warn("OpenAI Codex auth", "(not logged in)") |
| if codex_status.get("error"): |
| check_info(codex_status["error"]) |
| except Exception as e: |
| check_warn("Auth provider status", f"(could not check: {e})") |
|
|
| if shutil.which("codex"): |
| check_ok("codex CLI") |
| else: |
| check_warn("codex CLI not found", "(required for openai-codex login)") |
|
|
| |
| |
| |
| print() |
| print(color("β Directory Structure", Colors.CYAN, Colors.BOLD)) |
| |
| hermes_home = HERMES_HOME |
| if hermes_home.exists(): |
| check_ok("~/.hermes directory exists") |
| else: |
| if should_fix: |
| hermes_home.mkdir(parents=True, exist_ok=True) |
| check_ok("Created ~/.hermes directory") |
| fixed_count += 1 |
| else: |
| check_warn("~/.hermes not found", "(will be created on first use)") |
| |
| |
| expected_subdirs = ["cron", "sessions", "logs", "skills", "memories"] |
| for subdir_name in expected_subdirs: |
| subdir_path = hermes_home / subdir_name |
| if subdir_path.exists(): |
| check_ok(f"~/.hermes/{subdir_name}/ exists") |
| else: |
| if should_fix: |
| subdir_path.mkdir(parents=True, exist_ok=True) |
| check_ok(f"Created ~/.hermes/{subdir_name}/") |
| fixed_count += 1 |
| else: |
| check_warn(f"~/.hermes/{subdir_name}/ not found", "(will be created on first use)") |
| |
| |
| soul_path = hermes_home / "SOUL.md" |
| if soul_path.exists(): |
| content = soul_path.read_text(encoding="utf-8").strip() |
| |
| lines = [l for l in content.splitlines() if l.strip() and not l.strip().startswith(("<!--", "-->", "#"))] |
| if lines: |
| check_ok("~/.hermes/SOUL.md exists (persona configured)") |
| else: |
| check_info("~/.hermes/SOUL.md exists but is empty β edit it to customize personality") |
| else: |
| check_warn("~/.hermes/SOUL.md not found", "(create it to give Hermes a custom personality)") |
| if should_fix: |
| soul_path.parent.mkdir(parents=True, exist_ok=True) |
| soul_path.write_text( |
| "# Hermes Agent Persona\n\n" |
| "<!-- Edit this file to customize how Hermes communicates. -->\n\n" |
| "You are Hermes, a helpful AI assistant.\n", |
| encoding="utf-8", |
| ) |
| check_ok("Created ~/.hermes/SOUL.md with basic template") |
| fixed_count += 1 |
| |
| |
| memories_dir = hermes_home / "memories" |
| if memories_dir.exists(): |
| check_ok("~/.hermes/memories/ directory exists") |
| memory_file = memories_dir / "MEMORY.md" |
| user_file = memories_dir / "USER.md" |
| if memory_file.exists(): |
| size = len(memory_file.read_text(encoding="utf-8").strip()) |
| check_ok(f"MEMORY.md exists ({size} chars)") |
| else: |
| check_info("MEMORY.md not created yet (will be created when the agent first writes a memory)") |
| if user_file.exists(): |
| size = len(user_file.read_text(encoding="utf-8").strip()) |
| check_ok(f"USER.md exists ({size} chars)") |
| else: |
| check_info("USER.md not created yet (will be created when the agent first writes a memory)") |
| else: |
| check_warn("~/.hermes/memories/ not found", "(will be created on first use)") |
| if should_fix: |
| memories_dir.mkdir(parents=True, exist_ok=True) |
| check_ok("Created ~/.hermes/memories/") |
| fixed_count += 1 |
| |
| |
| state_db_path = hermes_home / "state.db" |
| if state_db_path.exists(): |
| try: |
| import sqlite3 |
| conn = sqlite3.connect(str(state_db_path)) |
| cursor = conn.execute("SELECT COUNT(*) FROM sessions") |
| count = cursor.fetchone()[0] |
| conn.close() |
| check_ok(f"~/.hermes/state.db exists ({count} sessions)") |
| except Exception as e: |
| check_warn(f"~/.hermes/state.db exists but has issues: {e}") |
| else: |
| check_info("~/.hermes/state.db not created yet (will be created on first session)") |
|
|
| _check_gateway_service_linger(issues) |
| |
| |
| |
| |
| print() |
| print(color("β External Tools", Colors.CYAN, Colors.BOLD)) |
| |
| |
| if shutil.which("git"): |
| check_ok("git") |
| else: |
| check_warn("git not found", "(optional)") |
| |
| |
| if shutil.which("rg"): |
| check_ok("ripgrep (rg)", "(faster file search)") |
| else: |
| check_warn("ripgrep (rg) not found", "(file search uses grep fallback)") |
| check_info("Install for faster search: sudo apt install ripgrep") |
| |
| |
| terminal_env = os.getenv("TERMINAL_ENV", "local") |
| if terminal_env == "docker": |
| if shutil.which("docker"): |
| |
| result = subprocess.run(["docker", "info"], capture_output=True) |
| if result.returncode == 0: |
| check_ok("docker", "(daemon running)") |
| else: |
| check_fail("docker daemon not running") |
| issues.append("Start Docker daemon") |
| else: |
| check_fail("docker not found", "(required for TERMINAL_ENV=docker)") |
| issues.append("Install Docker or change TERMINAL_ENV") |
| else: |
| if shutil.which("docker"): |
| check_ok("docker", "(optional)") |
| else: |
| check_warn("docker not found", "(optional)") |
| |
| |
| if terminal_env == "ssh": |
| ssh_host = os.getenv("TERMINAL_SSH_HOST") |
| if ssh_host: |
| |
| result = subprocess.run( |
| ["ssh", "-o", "ConnectTimeout=5", "-o", "BatchMode=yes", ssh_host, "echo ok"], |
| capture_output=True, |
| text=True |
| ) |
| if result.returncode == 0: |
| check_ok(f"SSH connection to {ssh_host}") |
| else: |
| check_fail(f"SSH connection to {ssh_host}") |
| issues.append(f"Check SSH configuration for {ssh_host}") |
| else: |
| check_fail("TERMINAL_SSH_HOST not set", "(required for TERMINAL_ENV=ssh)") |
| issues.append("Set TERMINAL_SSH_HOST in .env") |
| |
| |
| if terminal_env == "daytona": |
| daytona_key = os.getenv("DAYTONA_API_KEY") |
| if daytona_key: |
| check_ok("Daytona API key", "(configured)") |
| else: |
| check_fail("DAYTONA_API_KEY not set", "(required for TERMINAL_ENV=daytona)") |
| issues.append("Set DAYTONA_API_KEY environment variable") |
| try: |
| from daytona import Daytona |
| check_ok("daytona SDK", "(installed)") |
| except ImportError: |
| check_fail("daytona SDK not installed", "(pip install daytona)") |
| issues.append("Install daytona SDK: pip install daytona") |
|
|
| |
| if shutil.which("node"): |
| check_ok("Node.js") |
| |
| agent_browser_path = PROJECT_ROOT / "node_modules" / "agent-browser" |
| if agent_browser_path.exists(): |
| check_ok("agent-browser (Node.js)", "(browser automation)") |
| else: |
| check_warn("agent-browser not installed", "(run: npm install)") |
| else: |
| check_warn("Node.js not found", "(optional, needed for browser tools)") |
| |
| |
| if shutil.which("npm"): |
| npm_dirs = [ |
| (PROJECT_ROOT, "Browser tools (agent-browser)"), |
| (PROJECT_ROOT / "scripts" / "whatsapp-bridge", "WhatsApp bridge"), |
| ] |
| for npm_dir, label in npm_dirs: |
| if not (npm_dir / "node_modules").exists(): |
| continue |
| try: |
| audit_result = subprocess.run( |
| ["npm", "audit", "--json"], |
| cwd=str(npm_dir), |
| capture_output=True, text=True, timeout=30, |
| ) |
| import json as _json |
| audit_data = _json.loads(audit_result.stdout) if audit_result.stdout.strip() else {} |
| vuln_count = audit_data.get("metadata", {}).get("vulnerabilities", {}) |
| critical = vuln_count.get("critical", 0) |
| high = vuln_count.get("high", 0) |
| moderate = vuln_count.get("moderate", 0) |
| total = critical + high + moderate |
| if total == 0: |
| check_ok(f"{label} deps", "(no known vulnerabilities)") |
| elif critical > 0 or high > 0: |
| check_warn( |
| f"{label} deps", |
| f"({critical} critical, {high} high, {moderate} moderate β run: cd {npm_dir} && npm audit fix)" |
| ) |
| issues.append(f"{label} has {total} npm vulnerability(ies)") |
| else: |
| check_ok(f"{label} deps", f"({moderate} moderate vulnerability(ies))") |
| except Exception: |
| pass |
|
|
| |
| |
| |
| print() |
| print(color("β API Connectivity", Colors.CYAN, Colors.BOLD)) |
| |
| openrouter_key = os.getenv("OPENROUTER_API_KEY") |
| if openrouter_key: |
| print(" Checking OpenRouter API...", end="", flush=True) |
| try: |
| import httpx |
| response = httpx.get( |
| OPENROUTER_MODELS_URL, |
| headers={"Authorization": f"Bearer {openrouter_key}"}, |
| timeout=10 |
| ) |
| if response.status_code == 200: |
| print(f"\r {color('β', Colors.GREEN)} OpenRouter API ") |
| elif response.status_code == 401: |
| print(f"\r {color('β', Colors.RED)} OpenRouter API {color('(invalid API key)', Colors.DIM)} ") |
| issues.append("Check OPENROUTER_API_KEY in .env") |
| else: |
| print(f"\r {color('β', Colors.RED)} OpenRouter API {color(f'(HTTP {response.status_code})', Colors.DIM)} ") |
| except Exception as e: |
| print(f"\r {color('β', Colors.RED)} OpenRouter API {color(f'({e})', Colors.DIM)} ") |
| issues.append("Check network connectivity") |
| else: |
| check_warn("OpenRouter API", "(not configured)") |
| |
| anthropic_key = os.getenv("ANTHROPIC_TOKEN") or os.getenv("ANTHROPIC_API_KEY") |
| if anthropic_key: |
| print(" Checking Anthropic API...", end="", flush=True) |
| try: |
| import httpx |
| from agent.anthropic_adapter import _is_oauth_token, _COMMON_BETAS, _OAUTH_ONLY_BETAS |
|
|
| headers = {"anthropic-version": "2023-06-01"} |
| if _is_oauth_token(anthropic_key): |
| headers["Authorization"] = f"Bearer {anthropic_key}" |
| headers["anthropic-beta"] = ",".join(_COMMON_BETAS + _OAUTH_ONLY_BETAS) |
| else: |
| headers["x-api-key"] = anthropic_key |
| response = httpx.get( |
| "https://api.anthropic.com/v1/models", |
| headers=headers, |
| timeout=10 |
| ) |
| if response.status_code == 200: |
| print(f"\r {color('β', Colors.GREEN)} Anthropic API ") |
| elif response.status_code == 401: |
| print(f"\r {color('β', Colors.RED)} Anthropic API {color('(invalid API key)', Colors.DIM)} ") |
| else: |
| msg = "(couldn't verify)" |
| print(f"\r {color('β ', Colors.YELLOW)} Anthropic API {color(msg, Colors.DIM)} ") |
| except Exception as e: |
| print(f"\r {color('β ', Colors.YELLOW)} Anthropic API {color(f'({e})', Colors.DIM)} ") |
|
|
| |
| |
| |
| _apikey_providers = [ |
| ("Z.AI / GLM", ("GLM_API_KEY", "ZAI_API_KEY", "Z_AI_API_KEY"), "https://api.z.ai/api/paas/v4/models", "GLM_BASE_URL", True), |
| ("Kimi / Moonshot", ("KIMI_API_KEY",), "https://api.moonshot.ai/v1/models", "KIMI_BASE_URL", True), |
| |
| ("MiniMax", ("MINIMAX_API_KEY",), None, "MINIMAX_BASE_URL", False), |
| ("MiniMax (China)", ("MINIMAX_CN_API_KEY",), None, "MINIMAX_CN_BASE_URL", False), |
| ("AI Gateway", ("AI_GATEWAY_API_KEY",), "https://ai-gateway.vercel.sh/v1/models", "AI_GATEWAY_BASE_URL", True), |
| ("Kilo Code", ("KILOCODE_API_KEY",), "https://api.kilo.ai/api/gateway/models", "KILOCODE_BASE_URL", True), |
| ] |
| for _pname, _env_vars, _default_url, _base_env, _supports_health_check in _apikey_providers: |
| _key = "" |
| for _ev in _env_vars: |
| _key = os.getenv(_ev, "") |
| if _key: |
| break |
| if _key: |
| _label = _pname.ljust(20) |
| |
| if not _supports_health_check: |
| print(f" {color('β', Colors.GREEN)} {_label} {color('(key configured)', Colors.DIM)}") |
| continue |
| print(f" Checking {_pname} API...", end="", flush=True) |
| try: |
| import httpx |
| _base = os.getenv(_base_env, "") |
| |
| if not _base and _key.startswith("sk-kimi-"): |
| _base = "https://api.kimi.com/coding/v1" |
| _url = (_base.rstrip("/") + "/models") if _base else _default_url |
| _headers = {"Authorization": f"Bearer {_key}"} |
| if "api.kimi.com" in _url.lower(): |
| _headers["User-Agent"] = "KimiCLI/1.0" |
| _resp = httpx.get( |
| _url, |
| headers=_headers, |
| timeout=10, |
| ) |
| if _resp.status_code == 200: |
| print(f"\r {color('β', Colors.GREEN)} {_label} ") |
| elif _resp.status_code == 401: |
| print(f"\r {color('β', Colors.RED)} {_label} {color('(invalid API key)', Colors.DIM)} ") |
| issues.append(f"Check {_env_vars[0]} in .env") |
| else: |
| print(f"\r {color('β ', Colors.YELLOW)} {_label} {color(f'(HTTP {_resp.status_code})', Colors.DIM)} ") |
| except Exception as _e: |
| print(f"\r {color('β ', Colors.YELLOW)} {_label} {color(f'({_e})', Colors.DIM)} ") |
|
|
| |
| |
| |
| print() |
| print(color("β Submodules", Colors.CYAN, Colors.BOLD)) |
| |
| |
| tinker_dir = PROJECT_ROOT / "tinker-atropos" |
| if tinker_dir.exists() and (tinker_dir / "pyproject.toml").exists(): |
| if py_version >= (3, 11): |
| try: |
| __import__("tinker_atropos") |
| check_ok("tinker-atropos", "(RL training backend)") |
| except ImportError: |
| check_warn("tinker-atropos found but not installed", "(run: uv pip install -e ./tinker-atropos)") |
| issues.append("Install tinker-atropos: uv pip install -e ./tinker-atropos") |
| else: |
| check_warn("tinker-atropos requires Python 3.11+", f"(current: {py_version.major}.{py_version.minor})") |
| else: |
| check_warn("tinker-atropos not found", "(run: git submodule update --init --recursive)") |
| |
| |
| |
| |
| print() |
| print(color("β Tool Availability", Colors.CYAN, Colors.BOLD)) |
| |
| try: |
| |
| sys.path.insert(0, str(PROJECT_ROOT)) |
| from model_tools import check_tool_availability, TOOLSET_REQUIREMENTS |
| |
| available, unavailable = check_tool_availability() |
| available, unavailable = _apply_doctor_tool_availability_overrides(available, unavailable) |
| |
| for tid in available: |
| info = TOOLSET_REQUIREMENTS.get(tid, {}) |
| check_ok(info.get("name", tid)) |
| |
| for item in unavailable: |
| env_vars = item.get("missing_vars") or item.get("env_vars") or [] |
| if env_vars: |
| vars_str = ", ".join(env_vars) |
| check_warn(item["name"], f"(missing {vars_str})") |
| else: |
| check_warn(item["name"], "(system dependency not met)") |
|
|
| |
| api_disabled = [u for u in unavailable if (u.get("missing_vars") or u.get("env_vars"))] |
| if api_disabled: |
| issues.append("Run 'hermes setup' to configure missing API keys for full tool access") |
| except Exception as e: |
| check_warn("Could not check tool availability", f"({e})") |
| |
| |
| |
| |
| print() |
| print(color("β Skills Hub", Colors.CYAN, Colors.BOLD)) |
|
|
| hub_dir = HERMES_HOME / "skills" / ".hub" |
| if hub_dir.exists(): |
| check_ok("Skills Hub directory exists") |
| lock_file = hub_dir / "lock.json" |
| if lock_file.exists(): |
| try: |
| import json |
| lock_data = json.loads(lock_file.read_text()) |
| count = len(lock_data.get("installed", {})) |
| check_ok(f"Lock file OK ({count} hub-installed skill(s))") |
| except Exception: |
| check_warn("Lock file", "(corrupted or unreadable)") |
| quarantine = hub_dir / "quarantine" |
| q_count = sum(1 for d in quarantine.iterdir() if d.is_dir()) if quarantine.exists() else 0 |
| if q_count > 0: |
| check_warn(f"{q_count} skill(s) in quarantine", "(pending review)") |
| else: |
| check_warn("Skills Hub directory not initialized", "(run: hermes skills list)") |
|
|
| from hermes_cli.config import get_env_value |
| github_token = get_env_value("GITHUB_TOKEN") or get_env_value("GH_TOKEN") |
| if github_token: |
| check_ok("GitHub token configured (authenticated API access)") |
| else: |
| check_warn("No GITHUB_TOKEN", "(60 req/hr rate limit β set in ~/.hermes/.env for better rates)") |
|
|
| |
| |
| |
| print() |
| print(color("β Honcho Memory", Colors.CYAN, Colors.BOLD)) |
|
|
| try: |
| from honcho_integration.client import HonchoClientConfig, resolve_config_path |
| hcfg = HonchoClientConfig.from_global_config() |
| _honcho_cfg_path = resolve_config_path() |
|
|
| if not _honcho_cfg_path.exists(): |
| check_warn("Honcho config not found", f"run: hermes honcho setup") |
| elif not hcfg.enabled: |
| check_info(f"Honcho disabled (set enabled: true in {_honcho_cfg_path} to activate)") |
| elif not hcfg.api_key: |
| check_fail("Honcho API key not set", "run: hermes honcho setup") |
| issues.append("No Honcho API key β run 'hermes honcho setup'") |
| else: |
| from honcho_integration.client import get_honcho_client, reset_honcho_client |
| reset_honcho_client() |
| try: |
| get_honcho_client(hcfg) |
| check_ok( |
| "Honcho connected", |
| f"workspace={hcfg.workspace_id} mode={hcfg.memory_mode} freq={hcfg.write_frequency}", |
| ) |
| except Exception as _e: |
| check_fail("Honcho connection failed", str(_e)) |
| issues.append(f"Honcho unreachable: {_e}") |
| except ImportError: |
| check_warn("honcho-ai not installed", "pip install honcho-ai") |
| except Exception as _e: |
| check_warn("Honcho check failed", str(_e)) |
|
|
| |
| |
| |
| print() |
| remaining_issues = issues + manual_issues |
| if should_fix and fixed_count > 0: |
| print(color("β" * 60, Colors.GREEN)) |
| print(color(f" Fixed {fixed_count} issue(s).", Colors.GREEN, Colors.BOLD), end="") |
| if remaining_issues: |
| print(color(f" {len(remaining_issues)} issue(s) require manual intervention.", Colors.YELLOW, Colors.BOLD)) |
| else: |
| print() |
| print() |
| if remaining_issues: |
| for i, issue in enumerate(remaining_issues, 1): |
| print(f" {i}. {issue}") |
| print() |
| elif remaining_issues: |
| print(color("β" * 60, Colors.YELLOW)) |
| print(color(f" Found {len(remaining_issues)} issue(s) to address:", Colors.YELLOW, Colors.BOLD)) |
| print() |
| for i, issue in enumerate(remaining_issues, 1): |
| print(f" {i}. {issue}") |
| print() |
| if not should_fix: |
| print(color(" Tip: run 'hermes doctor --fix' to auto-fix what's possible.", Colors.DIM)) |
| else: |
| print(color("β" * 60, Colors.GREEN)) |
| print(color(" All checks passed! π", Colors.GREEN, Colors.BOLD)) |
| |
| print() |
|
|