| import asyncio
|
| import os
|
| import shutil
|
| import time
|
| from pathlib import Path
|
| from unittest.mock import MagicMock, AsyncMock
|
| import json
|
|
|
| from loguru import logger
|
|
|
|
|
| import sys
|
|
|
| project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
| sys.path.append(project_root)
|
|
|
| from mnemocore.core.engine import HAIMEngine
|
| from mnemocore.core.config import get_config, SubconsciousAIConfig, HAIMConfig
|
| from mnemocore.core.subconscious_ai import SubconsciousAIWorker, ModelClient
|
| from mnemocore.core.node import MemoryNode
|
| from mnemocore.core.binary_hdv import BinaryHDV
|
|
|
|
|
| class MockModelClient(ModelClient):
|
| def __init__(self, responses):
|
|
|
| self.responses = responses
|
| self.call_count = 0
|
| self.model_name = "mock-model"
|
| self.model_url = "mock-url"
|
|
|
| async def generate(self, prompt: str, **kwargs) -> str:
|
| self.call_count += 1
|
| logger.info(f"MockModelClient received prompt: {prompt[:50]}...")
|
|
|
|
|
| if "Categorize" in prompt:
|
| return self.responses.get("sorting", "{}")
|
| if "Analyze these memories" in prompt:
|
| logger.info("Generating dreaming response...")
|
| return self.responses.get("dreaming", "{}")
|
|
|
| return "{}"
|
|
|
| async def run_verification():
|
| """Verify that _enhanced_dreaming_cycle actually binds memories."""
|
|
|
|
|
| test_dir = Path("./test_env_dream").resolve()
|
| if test_dir.exists():
|
| shutil.rmtree(test_dir)
|
| test_dir.mkdir()
|
|
|
| logger.info(f"Test Environment: {test_dir}")
|
|
|
|
|
|
|
| import dataclasses
|
| from mnemocore.core.config import PathsConfig
|
|
|
| cfg = get_config()
|
|
|
|
|
| new_paths = dataclasses.replace(
|
| cfg.paths,
|
| data_dir=str(test_dir),
|
| memory_file=str(test_dir / "memories.pkl"),
|
| synapses_file=str(test_dir / "synapses.jsonl")
|
| )
|
|
|
|
|
| sub_cfg = SubconsciousAIConfig(
|
| enabled=True,
|
| enhanced_dreaming_enabled=True,
|
| pulse_interval_seconds=1,
|
| model_provider="mock",
|
| model_name="mock-model",
|
| dry_run=False
|
| )
|
|
|
|
|
| cfg = dataclasses.replace(
|
| cfg,
|
| paths=new_paths,
|
| subconscious_ai=sub_cfg
|
| )
|
|
|
|
|
| logger.info("Initializing Engine...")
|
| engine = HAIMEngine(config=cfg)
|
| await engine.initialize()
|
|
|
|
|
| logger.info("Creating dummy memories...")
|
|
|
|
|
|
|
| weak_content = "The cat sat on the mat."
|
| weak_vec = engine.binary_encoder.encode(weak_content)
|
| weak_node = MemoryNode(
|
| id="weak-memory-1",
|
| hdv=weak_vec,
|
| content=weak_content,
|
| metadata={"source": "user", "ltp_strength": 0.1}
|
| )
|
|
|
| weak_node.ltp_strength = 0.2
|
|
|
| weak_node.metadata.pop("dream_analyzed", None)
|
|
|
| await engine.tier_manager.add_memory(weak_node)
|
|
|
|
|
| related_content = "Felines enjoy resting on rugs."
|
| related_vec = engine.binary_encoder.encode(related_content)
|
| related_node = MemoryNode(
|
| id="related-memory-2",
|
| hdv=related_vec,
|
| content=related_content,
|
| metadata={"source": "user"}
|
| )
|
| await engine.tier_manager.add_memory(related_node)
|
|
|
| logger.info(f"Weak Memory ID: {weak_node.id}")
|
| logger.info(f"Related Memory ID: {related_node.id}")
|
|
|
|
|
| logger.info("Initializing Subconscious Worker...")
|
| worker = SubconsciousAIWorker(engine, sub_cfg)
|
|
|
|
|
|
|
|
|
|
|
| mock_response = {
|
| "bridges": {
|
| "1": ["feline_concept"]
|
| }
|
| }
|
|
|
| mock_client = MockModelClient({
|
| "dreaming": json.dumps(mock_response)
|
| })
|
|
|
| worker._model_client = mock_client
|
|
|
|
|
|
|
| original_search = engine.tier_manager.search
|
|
|
| async def mock_search(query_vec, top_k=5, time_range=None):
|
| logger.info(f"Mock Search invoked. Returning {related_node.id}")
|
|
|
| return [(related_node.id, 0.95)]
|
|
|
|
|
| engine.tier_manager.search = mock_search
|
|
|
| logger.info("Starting Dream Cycle...")
|
|
|
| try:
|
|
|
| result = await worker._enhanced_dreaming_cycle()
|
|
|
| logger.info(f"Cycle Result: {result.output}")
|
|
|
| except Exception as e:
|
| logger.exception("Error during dream cycle")
|
|
|
|
|
|
|
| logger.info("Verifying Synapse Creation...")
|
|
|
|
|
| synapse = engine._synapse_index.get(weak_node.id, related_node.id)
|
|
|
| if synapse:
|
| logger.success(f"SUCCESS: Synapse found between {weak_node.id} and {related_node.id}")
|
| logger.info(f"Synapse Strength: {synapse.strength}")
|
| else:
|
| logger.error(f"FAILURE: No synapse found between {weak_node.id} and {related_node.id}")
|
|
|
| logger.info(f"Total synapses in index: {len(engine._synapse_index)}")
|
|
|
|
|
| if os.path.exists(cfg.paths.synapses_file):
|
| with open(cfg.paths.synapses_file, "r") as f:
|
| content = f.read()
|
|
|
| if weak_node.id in content and related_node.id in content:
|
| logger.success("SUCCESS: Synapse persisted to file.")
|
| else:
|
| logger.warning("WARNING: Synapse file exists but IDs not found (might require a save trigger).")
|
|
|
| await engine._save_synapses()
|
| with open(cfg.paths.synapses_file, "r") as f2:
|
| content2 = f2.read()
|
| if weak_node.id in content2:
|
| logger.success("SUCCESS: Synapse persisted after explicit save.")
|
| else:
|
| logger.error("FAILURE: Synapse still not in file.")
|
| else:
|
| logger.error("FAILURE: Synapse file not created.")
|
|
|
|
|
| await engine.close()
|
| if test_dir.exists():
|
| shutil.rmtree(test_dir)
|
|
|
| if __name__ == "__main__":
|
| asyncio.run(run_verification())
|
|
|