Chris4K commited on
Commit
ca376d8
Β·
verified Β·
1 Parent(s): 02ca344

Upload 17 files

Browse files
memory/__pycache__/vector.cpython-313.pyc CHANGED
Binary files a/memory/__pycache__/vector.cpython-313.pyc and b/memory/__pycache__/vector.cpython-313.pyc differ
 
memory/vector.py CHANGED
@@ -51,6 +51,10 @@ class _SentenceTransformerEmbedder:
51
  embeddings = self.model.encode(input, show_progress_bar=False)
52
  return embeddings.tolist()
53
 
 
 
 
 
54
 
55
  class SemanticMemory:
56
  """ChromaDB-backed vector store with Markdown file mirror."""
 
51
  embeddings = self.model.encode(input, show_progress_bar=False)
52
  return embeddings.tolist()
53
 
54
+ def name(self) -> str:
55
+ """Required by ChromaDB EmbeddingFunction protocol."""
56
+ return f"sentence-transformers_{self.model_name}"
57
+
58
 
59
  class SemanticMemory:
60
  """ChromaDB-backed vector store with Markdown file mirror."""
memory_server.py ADDED
@@ -0,0 +1,572 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Memory System MCP Server
4
+ =========================
5
+ A three-tier memory architecture exposed as MCP tools for AI agents.
6
+
7
+ Tiers
8
+ -----
9
+ 1. **Session** (short-term) – conversation context, auto-expiring
10
+ 2. **Episodic** (mid-term) – past tasks & events, searchable timeline
11
+ 3. **Semantic** (long-term) – vector-backed RAG knowledge base
12
+
13
+ Every entry is also persisted as a human-readable Markdown file.
14
+
15
+ Usage
16
+ -----
17
+ python memory_server.py # stdio transport (for MCP clients)
18
+ python memory_server.py --sse 8765 # SSE transport on port 8765
19
+
20
+ Transport is auto-detected via MCP protocol when run from an MCP host.
21
+ """
22
+
23
+ from __future__ import annotations
24
+
25
+ import json
26
+ import logging
27
+ import os
28
+ import sys
29
+ from pathlib import Path
30
+ from typing import Any, Dict, List, Optional
31
+
32
+ from mcp.server.fastmcp import FastMCP
33
+
34
+ # ── local imports ────────────────────────────────────────────
35
+ from memory.session import SessionMemory
36
+ from memory.events import EpisodicMemory
37
+ from memory.vector import SemanticMemory
38
+ from memory.models import MemoryEntry, MemoryTier
39
+
40
+ # ── logging ──────────────────────────────────────────────────
41
+ logging.basicConfig(
42
+ level=logging.INFO,
43
+ format="%(asctime)s %(levelname)-8s %(name)s %(message)s",
44
+ )
45
+ logger = logging.getLogger("memory-mcp")
46
+
47
+ # ── resolve data root ───────────────────────────────────────
48
+ DATA_ROOT = Path(os.environ.get("MEMORY_DATA_ROOT", Path(__file__).parent / "data"))
49
+ DATA_ROOT.mkdir(parents=True, exist_ok=True)
50
+
51
+ EMBEDDING_MODEL = os.environ.get("MEMORY_EMBEDDING_MODEL", "all-MiniLM-L6-v2")
52
+ SESSION_TTL = int(os.environ.get("MEMORY_SESSION_TTL", "3600"))
53
+
54
+ # ── initialise stores ───────────────────────────────────────
55
+ session_store = SessionMemory(
56
+ base_dir=str(DATA_ROOT / "session"),
57
+ ttl=SESSION_TTL,
58
+ )
59
+ episodic_store = EpisodicMemory(
60
+ base_dir=str(DATA_ROOT / "events"),
61
+ )
62
+ semantic_store = SemanticMemory(
63
+ vector_dir=str(DATA_ROOT / "vector"),
64
+ md_dir=str(DATA_ROOT / "vector" / "docs"),
65
+ model_name=EMBEDDING_MODEL,
66
+ )
67
+
68
+ logger.info("🧠 Memory stores initialised – data_root=%s", DATA_ROOT)
69
+
70
+ # ── MCP server ───────────────────────────────────────────────
71
+ mcp = FastMCP("memory")
72
+
73
+
74
+ # =====================================================================
75
+ # RESOURCES – browse memory state
76
+ # =====================================================================
77
+
78
+ @mcp.resource("memory://status")
79
+ def memory_status() -> str:
80
+ """Overview of the memory system."""
81
+ return json.dumps({
82
+ "session": {
83
+ "sessions": session_store.list_sessions(),
84
+ "total_entries": sum(
85
+ len(session_store.list_entries(sid))
86
+ for sid in session_store.list_sessions()
87
+ ),
88
+ },
89
+ "episodic": {
90
+ "total_entries": episodic_store.count(),
91
+ },
92
+ "semantic": {
93
+ "total_entries": semantic_store.count(),
94
+ "embedding_model": EMBEDDING_MODEL,
95
+ },
96
+ }, indent=2)
97
+
98
+
99
+ @mcp.resource("memory://session/{session_id}")
100
+ def session_resource(session_id: str) -> str:
101
+ """List entries in a session."""
102
+ entries = session_store.list_entries(session_id)
103
+ return json.dumps([e.to_dict() for e in entries], indent=2)
104
+
105
+
106
+ @mcp.resource("memory://events/recent")
107
+ def recent_events_resource() -> str:
108
+ """The 20 most recent episodic events."""
109
+ entries = episodic_store.recent(20)
110
+ return json.dumps([e.to_dict() for e in entries], indent=2)
111
+
112
+
113
+ # =====================================================================
114
+ # PROMPTS
115
+ # =====================================================================
116
+
117
+ @mcp.prompt()
118
+ def memory_context_prompt(query: str = "", session_id: str = "default") -> str:
119
+ """Build a comprehensive memory context for an LLM prompt."""
120
+ parts: List[str] = ["# Agent Memory Context\n"]
121
+
122
+ # session context
123
+ session_entries = session_store.list_entries(session_id)
124
+ if session_entries:
125
+ parts.append("## Recent Conversation (Session)")
126
+ for e in session_entries[-5:]:
127
+ parts.append(f"- [{e.created_at}] {e.title}: {e.content[:200]}")
128
+ parts.append("")
129
+
130
+ # episodic
131
+ recent = episodic_store.recent(5)
132
+ if recent:
133
+ parts.append("## Recent Tasks (Episodic)")
134
+ for e in recent:
135
+ parts.append(f"- [{e.created_at}] {e.title}: {e.content[:200]}")
136
+ parts.append("")
137
+
138
+ # semantic / RAG
139
+ if query:
140
+ hits = semantic_store.search(query, limit=3)
141
+ if hits:
142
+ parts.append("## Relevant Knowledge (Semantic / RAG)")
143
+ for h in hits:
144
+ parts.append(f"- [score={h.score:.2f}] {h.entry.title}: {h.entry.content[:300]}")
145
+ parts.append("")
146
+
147
+ return "\n".join(parts)
148
+
149
+
150
+ # =====================================================================
151
+ # TOOLS – full CRUD for each tier
152
+ # =====================================================================
153
+
154
+ # ─── Session (short-term) ───────────────────────────────────
155
+
156
+ @mcp.tool()
157
+ def session_create(
158
+ content: str,
159
+ title: str = "",
160
+ tags: str = "",
161
+ session_id: str = "default",
162
+ importance: float = 0.5,
163
+ ) -> Dict[str, Any]:
164
+ """
165
+ Create a new short-term / session memory entry.
166
+
167
+ Stores conversation context that auto-expires after the configured TTL.
168
+ Persisted as a Markdown file under data/session/<session_id>/.
169
+ """
170
+ entry = MemoryEntry(
171
+ content=content,
172
+ title=title or content[:60],
173
+ tags=[t.strip() for t in tags.split(",") if t.strip()] if tags else [],
174
+ importance=importance,
175
+ )
176
+ result = session_store.create(entry, session_id=session_id)
177
+ return {"status": "created", "entry": result.to_dict()}
178
+
179
+
180
+ @mcp.tool()
181
+ def session_read(entry_id: str, session_id: str = "default") -> Dict[str, Any]:
182
+ """Read a single session memory entry by ID."""
183
+ entry = session_store.read(entry_id, session_id)
184
+ if not entry:
185
+ return {"status": "not_found", "entry_id": entry_id}
186
+ return {"status": "ok", "entry": entry.to_dict()}
187
+
188
+
189
+ @mcp.tool()
190
+ def session_update(
191
+ entry_id: str,
192
+ session_id: str = "default",
193
+ content: str = "",
194
+ title: str = "",
195
+ tags: str = "",
196
+ importance: float = -1,
197
+ ) -> Dict[str, Any]:
198
+ """Update a session memory entry. Only provided fields are changed."""
199
+ kwargs: Dict[str, Any] = {}
200
+ if content:
201
+ kwargs["content"] = content
202
+ if title:
203
+ kwargs["title"] = title
204
+ if tags:
205
+ kwargs["tags"] = [t.strip() for t in tags.split(",") if t.strip()]
206
+ if importance >= 0:
207
+ kwargs["importance"] = importance
208
+ entry = session_store.update(entry_id, session_id, **kwargs)
209
+ if not entry:
210
+ return {"status": "not_found", "entry_id": entry_id}
211
+ return {"status": "updated", "entry": entry.to_dict()}
212
+
213
+
214
+ @mcp.tool()
215
+ def session_delete(entry_id: str, session_id: str = "default") -> Dict[str, Any]:
216
+ """Delete a session memory entry."""
217
+ ok = session_store.delete(entry_id, session_id)
218
+ return {"status": "deleted" if ok else "not_found", "entry_id": entry_id}
219
+
220
+
221
+ @mcp.tool()
222
+ def session_list(session_id: str = "default", tag: str = "") -> Dict[str, Any]:
223
+ """List all entries in a session, optionally filtered by tag."""
224
+ entries = session_store.list_entries(session_id, tag=tag or None)
225
+ return {"count": len(entries), "entries": [e.to_dict() for e in entries]}
226
+
227
+
228
+ @mcp.tool()
229
+ def session_search(query: str, session_id: str = "", limit: int = 10) -> Dict[str, Any]:
230
+ """Keyword search across session memories."""
231
+ results = session_store.search(query, session_id=session_id or None, limit=limit)
232
+ return {"count": len(results), "entries": [e.to_dict() for e in results]}
233
+
234
+
235
+ @mcp.tool()
236
+ def session_clear(session_id: str = "default") -> Dict[str, Any]:
237
+ """Clear all entries from a session."""
238
+ count = session_store.clear_session(session_id)
239
+ return {"status": "cleared", "session_id": session_id, "deleted": count}
240
+
241
+
242
+ @mcp.tool()
243
+ def session_gc() -> Dict[str, Any]:
244
+ """Garbage-collect expired session entries across all sessions."""
245
+ removed = session_store.gc()
246
+ return {"status": "gc_complete", "removed": removed}
247
+
248
+
249
+ # ─── Episodic (mid-term) ────────────────────────────────────
250
+
251
+ @mcp.tool()
252
+ def episodic_create(
253
+ content: str,
254
+ title: str = "",
255
+ tags: str = "",
256
+ importance: float = 0.5,
257
+ source: str = "",
258
+ ) -> Dict[str, Any]:
259
+ """
260
+ Record a new episodic memory (task completion, event, interaction).
261
+
262
+ Stored as a timestamped Markdown file under data/events/.
263
+ """
264
+ tag_list = [t.strip() for t in tags.split(",") if t.strip()] if tags else []
265
+ entry = episodic_store.create(
266
+ content=content,
267
+ title=title,
268
+ tags=tag_list,
269
+ importance=importance,
270
+ source=source,
271
+ )
272
+ return {"status": "created", "entry": entry.to_dict()}
273
+
274
+
275
+ @mcp.tool()
276
+ def episodic_read(entry_id: str) -> Dict[str, Any]:
277
+ """Read a single episodic memory by ID."""
278
+ entry = episodic_store.read(entry_id)
279
+ if not entry:
280
+ return {"status": "not_found", "entry_id": entry_id}
281
+ return {"status": "ok", "entry": entry.to_dict()}
282
+
283
+
284
+ @mcp.tool()
285
+ def episodic_update(
286
+ entry_id: str,
287
+ content: str = "",
288
+ title: str = "",
289
+ tags: str = "",
290
+ importance: float = -1,
291
+ ) -> Dict[str, Any]:
292
+ """Update an episodic memory entry."""
293
+ kwargs: Dict[str, Any] = {}
294
+ if content:
295
+ kwargs["content"] = content
296
+ if title:
297
+ kwargs["title"] = title
298
+ if tags:
299
+ kwargs["tags"] = [t.strip() for t in tags.split(",") if t.strip()]
300
+ if importance >= 0:
301
+ kwargs["importance"] = importance
302
+ entry = episodic_store.update(entry_id, **kwargs)
303
+ if not entry:
304
+ return {"status": "not_found", "entry_id": entry_id}
305
+ return {"status": "updated", "entry": entry.to_dict()}
306
+
307
+
308
+ @mcp.tool()
309
+ def episodic_delete(entry_id: str) -> Dict[str, Any]:
310
+ """Delete an episodic memory entry."""
311
+ ok = episodic_store.delete(entry_id)
312
+ return {"status": "deleted" if ok else "not_found", "entry_id": entry_id}
313
+
314
+
315
+ @mcp.tool()
316
+ def episodic_list(
317
+ tag: str = "",
318
+ since: str = "",
319
+ until: str = "",
320
+ limit: int = 50,
321
+ ) -> Dict[str, Any]:
322
+ """List episodic memories, optionally filtered by tag and/or time range (ISO format)."""
323
+ entries = episodic_store.list_entries(
324
+ tag=tag or None,
325
+ since=since or None,
326
+ until=until or None,
327
+ limit=limit,
328
+ )
329
+ return {"count": len(entries), "entries": [e.to_dict() for e in entries]}
330
+
331
+
332
+ @mcp.tool()
333
+ def episodic_search(query: str, limit: int = 10) -> Dict[str, Any]:
334
+ """Keyword search across episodic memories."""
335
+ results = episodic_store.search(query, limit=limit)
336
+ return {"count": len(results), "entries": [e.to_dict() for e in results]}
337
+
338
+
339
+ @mcp.tool()
340
+ def episodic_recent(n: int = 10) -> Dict[str, Any]:
341
+ """Get the N most recent episodic events."""
342
+ entries = episodic_store.recent(n)
343
+ return {"count": len(entries), "entries": [e.to_dict() for e in entries]}
344
+
345
+
346
+ # ─── Semantic / RAG (long-term) ─────────────────────────────
347
+
348
+ @mcp.tool()
349
+ def semantic_create(
350
+ content: str,
351
+ title: str = "",
352
+ tags: str = "",
353
+ importance: float = 0.5,
354
+ source: str = "",
355
+ ) -> Dict[str, Any]:
356
+ """
357
+ Add a document to the semantic / RAG knowledge base.
358
+
359
+ The content is embedded via sentence-transformers and stored in ChromaDB
360
+ for similarity search. Also persisted as a Markdown file.
361
+ """
362
+ tag_list = [t.strip() for t in tags.split(",") if t.strip()] if tags else []
363
+ entry = semantic_store.create(
364
+ content=content,
365
+ title=title,
366
+ tags=tag_list,
367
+ importance=importance,
368
+ source=source,
369
+ )
370
+ return {"status": "created", "entry": entry.to_dict()}
371
+
372
+
373
+ @mcp.tool()
374
+ def semantic_read(entry_id: str) -> Dict[str, Any]:
375
+ """Read a single semantic memory by ID."""
376
+ entry = semantic_store.read(entry_id)
377
+ if not entry:
378
+ return {"status": "not_found", "entry_id": entry_id}
379
+ return {"status": "ok", "entry": entry.to_dict()}
380
+
381
+
382
+ @mcp.tool()
383
+ def semantic_update(
384
+ entry_id: str,
385
+ content: str = "",
386
+ title: str = "",
387
+ tags: str = "",
388
+ importance: float = -1,
389
+ ) -> Dict[str, Any]:
390
+ """Update a semantic memory entry. Re-embeds automatically if content changes."""
391
+ kwargs: Dict[str, Any] = {}
392
+ if content:
393
+ kwargs["content"] = content
394
+ if title:
395
+ kwargs["title"] = title
396
+ if tags:
397
+ kwargs["tags"] = [t.strip() for t in tags.split(",") if t.strip()]
398
+ if importance >= 0:
399
+ kwargs["importance"] = importance
400
+ entry = semantic_store.update(entry_id, **kwargs)
401
+ if not entry:
402
+ return {"status": "not_found", "entry_id": entry_id}
403
+ return {"status": "updated", "entry": entry.to_dict()}
404
+
405
+
406
+ @mcp.tool()
407
+ def semantic_delete(entry_id: str) -> Dict[str, Any]:
408
+ """Delete a semantic memory entry from vector store and disk."""
409
+ ok = semantic_store.delete(entry_id)
410
+ return {"status": "deleted" if ok else "not_found", "entry_id": entry_id}
411
+
412
+
413
+ @mcp.tool()
414
+ def semantic_search(query: str, limit: int = 5) -> Dict[str, Any]:
415
+ """
416
+ Semantic similarity search (RAG retrieval).
417
+
418
+ Finds the most relevant documents in the knowledge base using
419
+ vector cosine similarity. This is the primary RAG endpoint.
420
+ """
421
+ results = semantic_store.search(query, limit=limit)
422
+ return {
423
+ "count": len(results),
424
+ "results": [
425
+ {
426
+ "score": round(r.score, 4),
427
+ "distance": round(r.distance, 4),
428
+ "entry": r.entry.to_dict(),
429
+ }
430
+ for r in results
431
+ ],
432
+ }
433
+
434
+
435
+ @mcp.tool()
436
+ def semantic_list(limit: int = 100, tag: str = "") -> Dict[str, Any]:
437
+ """List all entries in the semantic knowledge base."""
438
+ entries = semantic_store.list_entries(limit=limit, tag=tag or None)
439
+ return {"count": len(entries), "entries": [e.to_dict() for e in entries]}
440
+
441
+
442
+ # ─── Cross-tier utilities ───────────────────────────────────
443
+
444
+ @mcp.tool()
445
+ def memory_search_all(query: str, limit: int = 5) -> Dict[str, Any]:
446
+ """
447
+ Search across ALL memory tiers (session + episodic + semantic).
448
+
449
+ Combines keyword search from session & episodic with
450
+ semantic vector search. Returns unified results sorted by relevance.
451
+ """
452
+ results: Dict[str, Any] = {}
453
+
454
+ # session
455
+ s_hits = session_store.search(query, limit=limit)
456
+ results["session"] = [e.to_dict() for e in s_hits]
457
+
458
+ # episodic
459
+ e_hits = episodic_store.search(query, limit=limit)
460
+ results["episodic"] = [e.to_dict() for e in e_hits]
461
+
462
+ # semantic (RAG)
463
+ v_hits = semantic_store.search(query, limit=limit)
464
+ results["semantic"] = [
465
+ {"score": round(r.score, 4), "entry": r.entry.to_dict()}
466
+ for r in v_hits
467
+ ]
468
+
469
+ results["total"] = len(s_hits) + len(e_hits) + len(v_hits)
470
+ return results
471
+
472
+
473
+ @mcp.tool()
474
+ def memory_promote(entry_id: str, from_tier: str, to_tier: str) -> Dict[str, Any]:
475
+ """
476
+ Promote a memory entry from one tier to another.
477
+
478
+ E.g. promote a session memory to episodic, or episodic to semantic.
479
+ The entry is copied to the target tier (source is kept).
480
+ """
481
+ # read from source
482
+ source_entry: Optional[MemoryEntry] = None
483
+ if from_tier == "session":
484
+ source_entry = session_store.read(entry_id)
485
+ elif from_tier == "episodic":
486
+ source_entry = episodic_store.read(entry_id)
487
+ elif from_tier == "semantic":
488
+ source_entry = semantic_store.read(entry_id)
489
+
490
+ if not source_entry:
491
+ return {"status": "not_found", "entry_id": entry_id, "tier": from_tier}
492
+
493
+ # write to target
494
+ if to_tier == "session":
495
+ new_entry = MemoryEntry(
496
+ content=source_entry.content,
497
+ title=source_entry.title,
498
+ tags=source_entry.tags,
499
+ importance=source_entry.importance,
500
+ metadata=source_entry.metadata,
501
+ source=f"promoted from {from_tier}:{entry_id}",
502
+ )
503
+ result = session_store.create(new_entry)
504
+ elif to_tier == "episodic":
505
+ result = episodic_store.create(
506
+ content=source_entry.content,
507
+ title=source_entry.title,
508
+ tags=source_entry.tags,
509
+ importance=source_entry.importance,
510
+ metadata=source_entry.metadata,
511
+ source=f"promoted from {from_tier}:{entry_id}",
512
+ )
513
+ elif to_tier == "semantic":
514
+ result = semantic_store.create(
515
+ content=source_entry.content,
516
+ title=source_entry.title,
517
+ tags=source_entry.tags,
518
+ importance=source_entry.importance,
519
+ metadata=source_entry.metadata,
520
+ source=f"promoted from {from_tier}:{entry_id}",
521
+ )
522
+ else:
523
+ return {"status": "error", "message": f"Unknown target tier: {to_tier}"}
524
+
525
+ return {
526
+ "status": "promoted",
527
+ "from": from_tier,
528
+ "to": to_tier,
529
+ "original_id": entry_id,
530
+ "new_entry": result.to_dict(),
531
+ }
532
+
533
+
534
+ @mcp.tool()
535
+ def memory_stats() -> Dict[str, Any]:
536
+ """Get statistics about all memory tiers."""
537
+ sessions = session_store.list_sessions()
538
+ session_total = sum(len(session_store.list_entries(sid)) for sid in sessions)
539
+ return {
540
+ "session": {
541
+ "sessions": len(sessions),
542
+ "total_entries": session_total,
543
+ "ttl_seconds": SESSION_TTL,
544
+ },
545
+ "episodic": {
546
+ "total_entries": episodic_store.count(),
547
+ },
548
+ "semantic": {
549
+ "total_entries": semantic_store.count(),
550
+ "embedding_model": EMBEDDING_MODEL,
551
+ },
552
+ "data_root": str(DATA_ROOT),
553
+ }
554
+
555
+
556
+ # =====================================================================
557
+ # ENTRY POINT
558
+ # =====================================================================
559
+
560
+ if __name__ == "__main__":
561
+ import argparse
562
+
563
+ parser = argparse.ArgumentParser(description="Memory System MCP Server")
564
+ parser.add_argument("--sse", type=int, default=0, help="Run SSE transport on this port")
565
+ args = parser.parse_args()
566
+
567
+ if args.sse:
568
+ logger.info("πŸš€ Starting Memory MCP server (SSE) on port %d", args.sse)
569
+ mcp.run(transport="sse", sse_params={"port": args.sse})
570
+ else:
571
+ logger.info("πŸš€ Starting Memory MCP server (stdio)")
572
+ mcp.run(transport="stdio")