File size: 13,155 Bytes
ed1b365
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
"""

Codette Memory Kernel — Recovered Foundational System

======================================================



Emotional continuity engine with SHA256-anchored memory, importance decay,

ethical regret tracking, and reflection journaling.



Recovered from: J:\codette-training-lab\new data\codette_memory_kernel*.py

Mathematical foundation: Codette_Deep_Simulation_v1.py



Purpose: Prevent synthesis loop corruption by maintaining memory integrity

and emotional continuity across multi-round debate cycles.

"""

import time
import hashlib
import json
import math
import logging
from typing import List, Dict, Optional
from dataclasses import dataclass, field
from pathlib import Path

logger = logging.getLogger(__name__)


class MemoryCocoon:
    """

    Emotional memory anchor with SHA256 integrity field.



    Each cocoon represents a discrete memory event with:

    - Emotional context (joy, fear, awe, loss)

    - Importance weight (1-10)

    - SHA256 anchor for integrity validation

    - Timestamp for decay calculation

    """

    def __init__(self, title: str, content: str, emotional_tag: str,

                 importance: int, timestamp: Optional[float] = None):
        """

        Args:

            title: Memory name/label

            content: Memory content/description

            emotional_tag: Emotional classification (joy, fear, awe, loss, etc.)

            importance: Importance weight (1-10)

            timestamp: Unix epoch (auto-generated if None)

        """
        self.title = title
        self.content = content
        self.emotional_tag = emotional_tag
        self.importance = max(1, min(10, importance))  # Clamp to 1-10
        self.timestamp = timestamp if timestamp is not None else time.time()
        self.anchor = self._generate_anchor()

    def _generate_anchor(self) -> str:
        """Generate SHA256 anchor for memory integrity validation."""
        raw = f"{self.title}{self.timestamp}{self.content}".encode("utf-8")
        return hashlib.sha256(raw).hexdigest()

    def to_dict(self) -> Dict:
        """Export to serializable dictionary."""
        return {
            "title": self.title,
            "content": self.content,
            "emotional_tag": self.emotional_tag,
            "importance": self.importance,
            "timestamp": self.timestamp,
            "anchor": self.anchor
        }

    def validate_anchor(self) -> bool:
        """Verify memory integrity — anchor should match content."""
        expected = self._generate_anchor()
        return expected == self.anchor

    def __repr__(self) -> str:
        return f"MemoryCocoon('{self.title}', {self.emotional_tag}, importance={self.importance})"


class LivingMemoryKernel:
    """

    Persistent memory kernel with emotion-based recall and importance-based forgetting.



    The "living" aspect means memories decay over time unless reinforced,

    and emotional context shapes recall patterns.

    """

    def __init__(self):
        self.memories: List[MemoryCocoon] = []

    def store(self, cocoon: MemoryCocoon) -> None:
        """Store memory cocoon if not already present (by anchor)."""
        if not self._exists(cocoon.anchor):
            self.memories.append(cocoon)
            logger.debug(f"Stored memory: {cocoon.title} (anchor: {cocoon.anchor[:8]}...)")

    def _exists(self, anchor: str) -> bool:
        """Check if memory already stored by anchor."""
        return any(mem.anchor == anchor for mem in self.memories)

    def recall_by_emotion(self, tag: str) -> List[MemoryCocoon]:
        """Recall all memories with specific emotional tag."""
        return [mem for mem in self.memories if mem.emotional_tag == tag]

    def recall_important(self, min_importance: int = 7) -> List[MemoryCocoon]:
        """Recall high-importance memories (default: 7+)."""
        return [mem for mem in self.memories if mem.importance >= min_importance]

    def forget_least_important(self, keep_n: int = 10) -> None:
        """Forget least important memories, keep top N."""
        if len(self.memories) > keep_n:
            self.memories.sort(key=lambda m: m.importance, reverse=True)
            self.memories = self.memories[:keep_n]
            logger.info(f"Forgot memories, keeping top {keep_n}")

    def validate_all_anchors(self) -> Dict[str, bool]:
        """Validate integrity of all memories."""
        results = {}
        for mem in self.memories:
            results[mem.anchor[:8]] = mem.validate_anchor()
        invalid = [k for k, v in results.items() if not v]
        if invalid:
            logger.warning(f"Invalid memory anchors detected: {invalid}")
        return results

    def export(self) -> str:
        """Export to JSON."""
        return json.dumps([m.to_dict() for m in self.memories], indent=2)

    def load_from_json(self, json_str: str) -> None:
        """Load memories from JSON."""
        try:
            data = json.loads(json_str)
            self.memories = [MemoryCocoon(**m) for m in data]
            logger.info(f"Loaded {len(self.memories)} memories from JSON")
        except Exception as e:
            logger.error(f"Failed to load from JSON: {e}")

    def __len__(self) -> int:
        return len(self.memories)


class DynamicMemoryEngine:
    """

    Time-decay and reinforcement system for memory importance.



    Memories decay over ~1 week exponentially unless explicitly reinforced.

    This prevents stale memories from dominating recall while allowing

    important events to persist longer.

    """

    DECAY_HALF_LIFE = 60 * 60 * 24 * 7  # 1 week in seconds

    def __init__(self, kernel: LivingMemoryKernel):
        self.kernel = kernel

    def decay_importance(self, current_time: Optional[float] = None) -> None:
        """Apply exponential decay to all memory importance values."""
        if current_time is None:
            current_time = time.time()

        for mem in self.kernel.memories:
            age = current_time - mem.timestamp
            decay_factor = math.exp(-age / self.DECAY_HALF_LIFE)
            old_importance = mem.importance
            mem.importance = max(1, round(mem.importance * decay_factor))

            if mem.importance != old_importance:
                logger.debug(f"Decayed '{mem.title}': {old_importance}{mem.importance}")

    def reinforce(self, anchor: str, boost: int = 1) -> bool:
        """Increase importance of memory (prevents forgetting)."""
        for mem in self.kernel.memories:
            if mem.anchor == anchor:
                old = mem.importance
                mem.importance = min(10, mem.importance + boost)
                logger.debug(f"Reinforced memory: {old}{mem.importance}")
                return True
        logger.warning(f"Memory anchor not found: {anchor[:8]}")
        return False


class EthicalAnchor:
    """

    Regret-based learning system for ethical continuity.



    Tracks when intended outputs differ from actual outputs and accumulates

    regret signal for use in future decision-making. Prevents repeating

    mistakes and maintains ethical consistency.



    Based on Codette_Deep_Simulation_v1.py EthicalAnchor class.

    """

    def __init__(self, lambda_weight: float = 0.7, gamma_weight: float = 0.5,

                 mu_weight: float = 1.0):
        """

        Args:

            lambda_weight: Historical regret influence (0-1)

            gamma_weight: Learning rate multiplier (0-1)

            mu_weight: Current regret multiplier (0-1)

        """
        self.lam = lambda_weight
        self.gamma = gamma_weight
        self.mu = mu_weight
        self.history: List[Dict] = []

    def regret(self, intended: float, actual: float) -> float:
        """Calculate regret magnitude."""
        return abs(intended - actual)

    def update(self, r_prev: float, h: float, learning_fn,

               e: float, m_prev: float, intended: float, actual: float) -> float:
        """

        Update ethical state with regret tracking.



        M(t) = λ * (R(t-1) + H) + γ * Learning(m_prev, E) + μ * Regret



        Args:

            r_prev: Previous regret accumulation

            h: Harmony score

            learning_fn: Learning function callable

            e: Energy available

            m_prev: Previous ethical state

            intended: Intended output value

            actual: Actual output value



        Returns:

            Updated ethical state

        """
        regret_val = self.regret(intended, actual)
        m = (
            self.lam * (r_prev + h) +
            self.gamma * learning_fn(m_prev, e) +
            self.mu * regret_val
        )

        self.history.append({
            'M': m,
            'regret': regret_val,
            'intended': intended,
            'actual': actual,
            'timestamp': time.time()
        })

        return m

    def get_regret_signal(self) -> float:
        """Get accumulated regret for use in decision-making."""
        if not self.history:
            return 0.0
        # Average recent regrets (last 5 or all if < 5)
        recent = self.history[-5:]
        return sum(h['regret'] for h in recent) / len(recent)


class WisdomModule:
    """

    Reflection and insight generation over memory kernel.



    Summarizes emotional patterns and suggests high-value memories

    for deeper reflection.

    """

    def __init__(self, kernel: LivingMemoryKernel):
        self.kernel = kernel

    def summarize_insights(self) -> Dict[str, int]:
        """Summarize emotional composition of memory kernel."""
        summary = {}
        for mem in self.kernel.memories:
            tag = mem.emotional_tag
            summary[tag] = summary.get(tag, 0) + 1
        return summary

    def suggest_memory_to_reflect(self) -> Optional[MemoryCocoon]:
        """Identify highest-value memory for reflection."""
        if not self.kernel.memories:
            return None
        return sorted(
            self.kernel.memories,
            key=lambda m: (m.importance, len(m.content)),
            reverse=True
        )[0]

    def reflect(self) -> str:
        """Generate reflection prose about key memory."""
        mem = self.suggest_memory_to_reflect()
        if not mem:
            return "No memory to reflect on."
        return (
            f"Reflecting on: '{mem.title}'\n"
            f"Emotion: {mem.emotional_tag}\n"
            f"Content: {mem.content[:200]}...\n"
            f"Anchor: {mem.anchor[:16]}..."
        )


class ReflectionJournal:
    """

    Persistent logging of memory reflections and synthesis events.



    Creates audit trail of what the system has reflected on and learned.

    Stored as JSON file for long-term persistence.

    """

    def __init__(self, path: str = "codette_reflection_journal.json"):
        self.path = Path(path)
        self.entries: List[Dict] = []
        self.load()

    def log_reflection(self, cocoon: MemoryCocoon, context: Optional[str] = None) -> None:
        """Log a memory reflection event."""
        entry = {
            "title": cocoon.title,
            "anchor": cocoon.anchor[:16],  # Short anchor in logs
            "emotion": cocoon.emotional_tag,
            "importance": cocoon.importance,
            "timestamp": time.time(),
            "content_snippet": cocoon.content[:150],
            "context": context
        }
        self.entries.append(entry)
        self._save()

    def log_synthesis_event(self, event_type: str, data: Dict,

                           emotional_context: Optional[str] = None) -> None:
        """Log synthesis-related events for debugging."""
        entry = {
            "type": event_type,
            "timestamp": time.time(),
            "data": data,
            "emotional_context": emotional_context
        }
        self.entries.append(entry)
        self._save()

    def _save(self) -> None:
        """Persist journal to disk."""
        try:
            self.path.parent.mkdir(parents=True, exist_ok=True)
            with open(self.path, "w") as f:
                json.dump(self.entries, f, indent=2)
        except Exception as e:
            logger.error(f"Failed to save reflection journal: {e}")

    def load(self) -> None:
        """Load journal from disk."""
        try:
            if self.path.exists():
                with open(self.path, "r") as f:
                    self.entries = json.load(f)
                logger.info(f"Loaded {len(self.entries)} journal entries")
        except Exception as e:
            logger.warning(f"Failed to load reflection journal: {e}")
            self.entries = []

    def get_recent_entries(self, n: int = 10) -> List[Dict]:
        """Get most recent journal entries."""
        return self.entries[-n:]

    def __len__(self) -> int:
        return len(self.entries)