gladius-training / kernel /temporal.py
amuzetnoM's picture
GLADIUS training package: kernel + omega + synthase + checkpoint (step 529)
63e99b4 verified
"""
GLADIUS v2.0 β€” Time Engine
Dual-clock temporal encoding:
Absolute Clock: Time2Vec β€” wall-clock time as learned periodic functions.
Relative Clock: Event-anchored with exponential decay.
Injected ADDITIVELY into hidden states (like a bias), not concatenated.
Does not interfere with RoPE (rotational) β€” operates in different subspace.
argmax_memory S(relevance | query, time_decay) β€” time modulates every scoring function.
"""
import torch
import torch.nn as nn
import math
import time as time_module
from .config import KernelConfig
class AbsoluteClock(nn.Module):
"""
Time2Vec: learned periodic encoding of wall-clock time.
Input: scalar timestamp (seconds since epoch)
Output: vector of learned periodic features
t β†’ [ω₁t + φ₁, sin(Ο‰β‚‚t + Ο†β‚‚), sin(ω₃t + φ₃), ...]
First component is linear (trend), rest are periodic (patterns).
"""
def __init__(self, num_frequencies: int = 16):
super().__init__()
self.num_frequencies = num_frequencies
# Learnable frequency and phase for each component
self.omega = nn.Parameter(torch.randn(num_frequencies) * 0.01)
self.phi = nn.Parameter(torch.zeros(num_frequencies))
def forward(self, timestamp: torch.Tensor) -> torch.Tensor:
"""
Args:
timestamp: (batch,) β€” seconds since epoch, normalized
Returns:
encoding: (batch, num_frequencies)
"""
# Normalize timestamp to reasonable range
t = timestamp.unsqueeze(-1) # (B, 1)
# First component: linear (captures trend)
linear = self.omega[0:1] * t + self.phi[0:1]
# Remaining: periodic (captures patterns β€” daily, weekly, etc.)
periodic = torch.sin(self.omega[1:] * t + self.phi[1:])
return torch.cat([linear, periodic], dim=-1) # (B, num_frequencies)
class RelativeClock(nn.Module):
"""
Event-anchored temporal encoding with exponential decay.
Tracks recent events and encodes "time since event X" with decay.
Recent events have strong encoding, old events fade.
This gives GLADIUS a sense of "how long ago" something happened.
"""
def __init__(self, config: KernelConfig):
super().__init__()
self.max_events = config.time_max_events
self.num_frequencies = config.time_num_frequencies
# Event buffer (not a parameter β€” updated at runtime)
self.register_buffer('event_times', torch.zeros(config.time_max_events))
self.register_buffer('event_head', torch.tensor(0, dtype=torch.long))
# Learned decay rates per frequency
self.decay = nn.Parameter(torch.ones(config.time_num_frequencies) * 0.01)
# Projection from event features to encoding
self.proj = nn.Linear(config.time_max_events, config.time_num_frequencies, bias=False)
def record_event(self, timestamp: float):
"""Record a new event timestamp."""
with torch.no_grad():
idx = self.event_head.item() % self.max_events
self.event_times[idx] = timestamp
self.event_head += 1
def forward(self, current_time: torch.Tensor) -> torch.Tensor:
"""
Args:
current_time: (batch,) β€” current timestamp
Returns:
encoding: (batch, num_frequencies)
"""
# Time since each event
deltas = current_time.unsqueeze(-1) - self.event_times.unsqueeze(0) # (B, max_events)
deltas = deltas.clamp(min=0) # No future events
# Exponential decay
# log-scale the deltas to handle large time ranges
log_deltas = torch.log1p(deltas) # log(1 + delta) for numerical stability
# Decay-weighted features
decayed = torch.exp(-self.decay.abs().unsqueeze(0) * self.proj(log_deltas))
return decayed # (B, num_frequencies)
class TemporalFusion(nn.Module):
"""
Fuses absolute + relative clock into a single temporal embedding.
Projects to hidden_dim for additive injection.
"""
def __init__(self, config: KernelConfig):
super().__init__()
input_dim = config.time_num_frequencies * 2 # absolute + relative
self.proj = nn.Sequential(
nn.Linear(input_dim, config.time_dim),
nn.SiLU(),
nn.Linear(config.time_dim, config.hidden_dim),
)
def forward(self, absolute: torch.Tensor, relative: torch.Tensor) -> torch.Tensor:
"""
Args:
absolute: (batch, num_frequencies)
relative: (batch, num_frequencies)
Returns:
temporal_embedding: (batch, hidden_dim)
"""
combined = torch.cat([absolute, relative], dim=-1)
return self.proj(combined)
class TimeEngine(nn.Module):
"""
Complete time engine. Produces temporal embeddings for additive injection.
Supports two modes:
- 'continuous' (default): Time2Vec β€” learned periodic functions
- 'lattice': LatticeClock β€” discrete quantized positions
Usage:
time_embed = time_engine(timestamp) # (B, hidden_dim)
hidden = hidden + time_embed.unsqueeze(1) # Broadcast across seq_len
"""
def __init__(self, config: KernelConfig):
super().__init__()
# Determine clock mode from config
self.clock_mode = getattr(config, 'clock_mode', 'continuous')
if self.clock_mode == 'lattice':
from .temporal_lattice import LatticeClock
self.lattice = LatticeClock(config)
else:
self.absolute = AbsoluteClock(config.time_num_frequencies)
self.fusion = TemporalFusion(config)
# Relative clock is used in both modes
self.relative = RelativeClock(config)
# Normalization factor for timestamps (seconds since 2026-01-01)
self.register_buffer('epoch_offset', torch.tensor(1735689600.0)) # 2026-01-01 UTC
def normalize_timestamp(self, timestamp: torch.Tensor) -> torch.Tensor:
"""Normalize to reasonable range for learned frequencies."""
# Convert to hours since epoch_offset (ensure device alignment)
epoch = self.epoch_offset.to(timestamp.device)
return (timestamp - epoch) / 3600.0
def forward(self, timestamp: torch.Tensor | float | None = None) -> torch.Tensor:
"""
Args:
timestamp: (batch,) seconds since Unix epoch, or None for current time
Returns:
temporal_embedding: (batch, hidden_dim)
"""
if timestamp is None:
timestamp = torch.tensor([time_module.time()])
if isinstance(timestamp, (int, float)):
timestamp = torch.tensor([timestamp], dtype=torch.float32)
# Ensure timestamp is on same device as model parameters
target_device = next(self.parameters()).device
timestamp = timestamp.to(target_device)
t_norm = self.normalize_timestamp(timestamp)
if self.clock_mode == 'lattice':
# Lattice mode: discrete quantized temporal encoding
return self.lattice(t_norm)
else:
# Continuous mode: Time2Vec + RelativeClock fusion
abs_enc = self.absolute(t_norm)
rel_enc = self.relative(timestamp)
return self.fusion(abs_enc, rel_enc)
def record_event(self, timestamp: float | None = None):
"""Record an event in the relative clock."""
if timestamp is None:
timestamp = time_module.time()
self.relative.record_event(timestamp)