LOGOS-SPCW-Matroska / logos /stream_interpreter.py
GitHub Copilot
Refactor: Restructure into Machine Shop protocol (logos package, gradio ui)
ac73ca8
"""
LOGOS Stream Interpreter - SPCW Cake/Bake Protocol
Implements 512-byte Atom architecture with Heat Code extraction and Prime Modulo Harmonization
"""
import numpy as np
from collections import deque
from enum import Enum
import logging
# Global Scalar Wave Prime (for harmonization)
GLOBAL_SCALAR_PRIME = 9973
class ChunkType(Enum):
"""Chunk classification based on Prime Modulo Harmonization"""
META = "META" # Harmonized Wave (Structure/Geometric)
DELTA = "DELTA" # Phase Hole/Heat (Correctional/Thermal)
class RenderFrame:
"""Container for rendered frame data"""
def __init__(self, rgb_buffer, heat_signature, chunk_type, render_buffer_size):
self.rgb_buffer = rgb_buffer # numpy array (H, W, 3)
self.heat_signature = heat_signature # 8-char hex string
self.chunk_type = chunk_type
self.render_buffer_size = render_buffer_size
class StreamInterpreter:
"""
Implements SPCW Cake/Bake Protocol:
- Ingest: 512-byte fixed chunks
- Heat Code: First 4 bytes (8 hex digits)
- Wave Payload: Remaining 508 bytes
- Harmonization: Prime modulo classification
"""
def __init__(self, min_fidelity=256, max_fidelity=1024, global_scalar_prime=9973):
"""
Initialize the Stream Interpreter with SPCW protocol
Args:
min_fidelity: Minimum render buffer dimension
max_fidelity: Maximum render buffer dimension
global_scalar_prime: Prime for harmonization modulo
"""
self.min_fidelity = min_fidelity
self.max_fidelity = max_fidelity
self.global_scalar_prime = global_scalar_prime
self.ATOM_SIZE = 512 # Fixed 512-byte chunk size
self.render_buffer_size = min_fidelity
self.meta_markers = deque(maxlen=100) # Track recent META markers
self.chunk_history = deque(maxlen=50)
# Setup logging
self.logger = logging.getLogger('StreamInterpreter')
def ingest_stream(self, binary_data):
"""
Extract 512-byte Atom from binary stream data
Args:
binary_data: bytes object (must be exactly 512 bytes)
Returns:
dict with:
- heat_signature: 8-char hex string (first 4 bytes)
- wave_payload: bytes (remaining 508 bytes)
"""
if len(binary_data) != self.ATOM_SIZE:
raise ValueError(
f"Chunk must be exactly {self.ATOM_SIZE} bytes, got {len(binary_data)}"
)
# Extract Heat Code (first 4 bytes → 8 hex digits)
heat_code_bytes = binary_data[0:4]
heat_signature = heat_code_bytes.hex()
# Extract Wave Payload (remaining 508 bytes)
wave_payload = binary_data[4:]
return {
'heat_signature': heat_signature,
'wave_payload': wave_payload,
'raw_chunk': binary_data
}
def analyze_chunk(self, atom_data):
"""
Analyze chunk using Prime Modulo Harmonization
Args:
atom_data: dict from ingest_stream()
Returns:
dict with:
- chunk_type: META or DELTA
- residue: Modulo residue value
- harmonized: Boolean indicating harmonization
"""
heat_signature_hex = atom_data['heat_signature']
# Convert 8-hex signature to integer
heat_signature_int = int(heat_signature_hex, 16)
# Prime Modulo Harmonization
residue = heat_signature_int % self.global_scalar_prime
# Classification
if residue == 0:
# Harmonized: Fits the wave structure
chunk_type = ChunkType.META
harmonized = True
else:
# Phase Hole: Noise/Gap requiring correction
chunk_type = ChunkType.DELTA
harmonized = False
return {
'chunk_type': chunk_type,
'residue': residue,
'harmonized': harmonized,
'heat_signature': heat_signature_hex,
'heat_signature_int': heat_signature_int
}
def calculate_meta_complexity(self, wave_payload):
"""
Calculate complexity from META wave payload for fidelity scaling
Args:
wave_payload: bytes (508 bytes)
Returns:
complexity: Float [0.0, 1.0] representing structural complexity
"""
if not wave_payload or len(wave_payload) == 0:
return 0.0
payload_array = np.frombuffer(wave_payload, dtype=np.uint8)
# Complexity factors:
# 1. Byte value variance (structure variation)
byte_variance = np.var(payload_array) / (255.0 ** 2)
# 2. Pattern regularity (low variance = more regular = higher structure)
# For META, higher structure = higher fidelity needed
pattern_regularity = 1.0 - min(byte_variance, 1.0)
# 3. Spatial coherence (byte transitions)
transitions = np.sum(np.diff(payload_array) != 0)
transition_rate = transitions / len(payload_array)
# Combine: Regular patterns (META) indicate structural complexity
complexity = (0.5 * pattern_regularity + 0.5 * transition_rate)
return min(max(complexity, 0.0), 1.0)
def update_fidelity(self, complexity, chunk_type):
"""
Dynamically adjust render_buffer_size based on META complexity
Args:
complexity: Complexity metric from calculate_meta_complexity
chunk_type: ChunkType.META or ChunkType.DELTA
"""
if chunk_type == ChunkType.META:
# META chunks determine resolution (Structure drives fidelity)
target_fidelity = self.min_fidelity + int(
(self.max_fidelity - self.min_fidelity) * complexity
)
# Smooth transition using exponential moving average
alpha = 0.3 # Smoothing factor
self.render_buffer_size = int(
alpha * target_fidelity + (1 - alpha) * self.render_buffer_size
)
# Clamp to bounds
self.render_buffer_size = max(
self.min_fidelity,
min(self.max_fidelity, self.render_buffer_size)
)
def process_chunk(self, binary_chunk):
"""
Process a 512-byte chunk through the full SPCW pipeline
Args:
binary_chunk: bytes object (exactly 512 bytes)
Returns:
dict with:
- heat_signature: 8-char hex string
- wave_payload: bytes (508 bytes)
- chunk_type: META or DELTA
- residue: Modulo residue
- complexity: Complexity metric (for META)
- render_buffer_size: Current render buffer size
- atom_data: Full atom structure
"""
# Step 1: Ingest and extract Heat Code + Wave Payload
atom_data = self.ingest_stream(binary_chunk)
# Step 2: Analyze via Prime Modulo Harmonization
analysis = self.analyze_chunk(atom_data)
chunk_type = analysis['chunk_type']
# Step 3: Calculate complexity (for META chunks)
complexity = 0.0
if chunk_type == ChunkType.META:
complexity = self.calculate_meta_complexity(atom_data['wave_payload'])
# Step 4: Update fidelity based on META complexity
self.update_fidelity(complexity, chunk_type)
# Track META markers for harmonization
if chunk_type == ChunkType.META:
self.meta_markers.append({
'index': len(self.meta_markers),
'heat_signature': analysis['heat_signature'],
'complexity': complexity,
'fidelity': self.render_buffer_size
})
# Store chunk history
self.chunk_history.append({
'heat_signature': analysis['heat_signature'],
'residue': analysis['residue'],
'type': chunk_type,
'complexity': complexity
})
# Log processing information
self.logger.info(
f"Input Chunk Size: [{len(binary_chunk)}] -> "
f"Heat Code: [{analysis['heat_signature']}] -> "
f"Residue: [{analysis['residue']}] -> "
f"Type: [{chunk_type.value}] -> "
f"Calculated Fidelity: [{self.render_buffer_size}] -> "
f"Render Buffer: [{self.render_buffer_size}x{self.render_buffer_size}]"
)
return {
'heat_signature': analysis['heat_signature'],
'wave_payload': atom_data['wave_payload'],
'chunk_type': chunk_type,
'residue': analysis['residue'],
'complexity': complexity,
'render_buffer_size': self.render_buffer_size,
'atom_data': atom_data
}
def get_synchronization_markers(self):
"""
Get META markers for StreamHarmonization
Returns:
List of META marker positions and characteristics
"""
return list(self.meta_markers)
def get_render_buffer_size(self):
"""Get current render buffer size"""
return self.render_buffer_size