File size: 2,808 Bytes
1d32142
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import uuid
from datetime import datetime
from abc import ABC, abstractmethod
from langgraph.store.base import BaseStore
from langchain_core.runnables import RunnableConfig


class BaseMemoryAgent(ABC):
    """Base class for agents with memory capabilities.

    Extracts shared logic from therapist_agent and logical_agent:
    - Memory retrieval from store
    - Automatic storage of all conversations (user + assistant messages)
    - Message construction with system prompt + memories
    - LLM invocation and response formatting
    """

    def __init__(self, llm):
        self.llm = llm

    @property
    @abstractmethod
    def system_prompt(self) -> str:
        """Each agent defines its own personality/system prompt."""
        pass

    async def retrieve_memories(self, store: BaseStore, user_id: str, query: str) -> str:
        """Fetch relevant memories for this user."""
        namespace = ("memories", user_id)
        memories = await store.asearch(namespace, query=query)
        return "\n".join([d.value.get("data", "") for d in memories])

    async def store_message(self, store: BaseStore, user_id: str, content: str, role: str):
        """Store every message to Supabase automatically.

        Args:
            store: The LangGraph store instance
            user_id: User identifier for namespacing
            content: The message content
            role: Either 'user' or 'assistant'
        """
        memory_id = str(uuid.uuid4())
        namespace = ("memories", user_id)
        await store.aput(namespace, memory_id, {
            "data": content,
            "role": role,
            "timestamp": datetime.now().isoformat()
        })

    async def __call__(self, state: dict, config: RunnableConfig, *, store: BaseStore) -> dict:
        """Make the agent callable for LangGraph node compatibility."""
        last_message = state["messages"][-1]
        user_id = config["configurable"].get("user_id", "default_user")

        # Get memories
        memory_info = await self.retrieve_memories(store, user_id, str(last_message.content))

        # Build prompt with memories injected
        full_prompt = f"""{self.system_prompt}

        User information from previous sessions:
        {memory_info}"""

        messages = [
            {"role": "system", "content": full_prompt},
            {"role": "user", "content": last_message.content}
        ]

        # Store user message automatically
        await self.store_message(store, user_id, last_message.content, "user")

        # Get response from LLM
        reply = self.llm.invoke(messages)

        # Store assistant response automatically
        await self.store_message(store, user_id, reply.content, "assistant")

        return {"messages": [{"role": "assistant", "content": reply.content}]}