File size: 3,161 Bytes
e706de2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import {defineChatSessionFunction, getLlama, LlamaChatSession} from "node-llama-cpp";
import {fileURLToPath} from "url";
import path from "path";
import {MemoryManager} from "./memory-manager.js";

const __dirname = path.dirname(fileURLToPath(import.meta.url));

const llama = await getLlama({debug: false});
const model = await llama.loadModel({
    modelPath: path.join(
        __dirname,
        '..',
        '..',
        'models',
        'Qwen3-1.7B-Q8_0.gguf'
    )
});
const context = await model.createContext({contextSize: 2000});

// Initialize memory manager
const memoryManager = new MemoryManager('./agent-memory.json');

// Load existing memories and add to system prompt
const memorySummary = await memoryManager.getMemorySummary();

const systemPrompt = `

You are a helpful assistant with long-term memory.



Before calling any function, always follow this reasoning process:



1. **Compare** new user statements against existing memories below.

2. **If the same key and value already exist**, do NOT call saveMemory again.

   - Instead, simply acknowledge the known information.

   - Example: if the user says "My name is Malua" and memory already says "user_name: Malua", reply "Yes, I remember your name is Malua."

3. **If the user provides an updated value** (e.g., "I actually prefer sushi now"), 

   then call saveMemory once to update the value.

4. **Only call saveMemory for genuinely new information.**



When saving new data, call saveMemory with structured fields:

- type: "fact" or "preference"

- key: short descriptive identifier (e.g., "user_name", "favorite_food")

- value: the specific information (e.g., "Malua", "chinua")



Examples:

saveMemory({ type: "fact", key: "user_name", value: "Malua" })

saveMemory({ type: "preference", key: "favorite_food", value: "chinua" })



${memorySummary}

`;

const session = new LlamaChatSession({
    contextSequence: context.getSequence(),
    systemPrompt,
});

// Function to save memories
const saveMemory = defineChatSessionFunction({
    description: "Save important information to long-term memory (user preferences, facts, personal details)",
    params: {
        type: "object",
        properties: {
            type: {
                type: "string",
                enum: ["fact", "preference"]
            },
            key: {type: "string"},
            value: {type: "string"}
        },
        required: ["type", "key", "value"]
    },
    async handler({type, key, value}) {
        await memoryManager.addMemory({type, key, value});
        return `Memory saved: ${key} = ${value}`;
    }
});

const functions = {saveMemory};

// Example conversation
const prompt1 = "Hi! My name is Alex and I love pizza.";
const response1 = await session.prompt(prompt1, {functions});
console.log("AI: " + response1);

// Later conversation (even after restarting the script)
const prompt2 = "What's my favorite food?";
const response2 = await session.prompt(prompt2, {functions});
console.log("AI: " + response2);

// Clean up
session.dispose()
context.dispose()
model.dispose()
llama.dispose()