| """ |
| GLADIUS v2.0 β Time Engine |
| |
| Dual-clock temporal encoding: |
| Absolute Clock: Time2Vec β wall-clock time as learned periodic functions. |
| Relative Clock: Event-anchored with exponential decay. |
| |
| Injected ADDITIVELY into hidden states (like a bias), not concatenated. |
| Does not interfere with RoPE (rotational) β operates in different subspace. |
| |
| argmax_memory S(relevance | query, time_decay) β time modulates every scoring function. |
| """ |
|
|
| import torch |
| import torch.nn as nn |
| import math |
| import time as time_module |
|
|
| from .config import KernelConfig |
|
|
|
|
| class AbsoluteClock(nn.Module): |
| """ |
| Time2Vec: learned periodic encoding of wall-clock time. |
| |
| Input: scalar timestamp (seconds since epoch) |
| Output: vector of learned periodic features |
| |
| t β [Οβt + Οβ, sin(Οβt + Οβ), sin(Οβt + Οβ), ...] |
| First component is linear (trend), rest are periodic (patterns). |
| """ |
|
|
| def __init__(self, num_frequencies: int = 16): |
| super().__init__() |
| self.num_frequencies = num_frequencies |
|
|
| |
| self.omega = nn.Parameter(torch.randn(num_frequencies) * 0.01) |
| self.phi = nn.Parameter(torch.zeros(num_frequencies)) |
|
|
| def forward(self, timestamp: torch.Tensor) -> torch.Tensor: |
| """ |
| Args: |
| timestamp: (batch,) β seconds since epoch, normalized |
| Returns: |
| encoding: (batch, num_frequencies) |
| """ |
| |
| t = timestamp.unsqueeze(-1) |
|
|
| |
| linear = self.omega[0:1] * t + self.phi[0:1] |
|
|
| |
| periodic = torch.sin(self.omega[1:] * t + self.phi[1:]) |
|
|
| return torch.cat([linear, periodic], dim=-1) |
|
|
|
|
| class RelativeClock(nn.Module): |
| """ |
| Event-anchored temporal encoding with exponential decay. |
| |
| Tracks recent events and encodes "time since event X" with decay. |
| Recent events have strong encoding, old events fade. |
| |
| This gives GLADIUS a sense of "how long ago" something happened. |
| """ |
|
|
| def __init__(self, config: KernelConfig): |
| super().__init__() |
| self.max_events = config.time_max_events |
| self.num_frequencies = config.time_num_frequencies |
|
|
| |
| self.register_buffer('event_times', torch.zeros(config.time_max_events)) |
| self.register_buffer('event_head', torch.tensor(0, dtype=torch.long)) |
|
|
| |
| self.decay = nn.Parameter(torch.ones(config.time_num_frequencies) * 0.01) |
|
|
| |
| self.proj = nn.Linear(config.time_max_events, config.time_num_frequencies, bias=False) |
|
|
| def record_event(self, timestamp: float): |
| """Record a new event timestamp.""" |
| with torch.no_grad(): |
| idx = self.event_head.item() % self.max_events |
| self.event_times[idx] = timestamp |
| self.event_head += 1 |
|
|
| def forward(self, current_time: torch.Tensor) -> torch.Tensor: |
| """ |
| Args: |
| current_time: (batch,) β current timestamp |
| Returns: |
| encoding: (batch, num_frequencies) |
| """ |
| |
| deltas = current_time.unsqueeze(-1) - self.event_times.unsqueeze(0) |
| deltas = deltas.clamp(min=0) |
|
|
| |
| |
| log_deltas = torch.log1p(deltas) |
|
|
| |
| decayed = torch.exp(-self.decay.abs().unsqueeze(0) * self.proj(log_deltas)) |
|
|
| return decayed |
|
|
|
|
| class TemporalFusion(nn.Module): |
| """ |
| Fuses absolute + relative clock into a single temporal embedding. |
| Projects to hidden_dim for additive injection. |
| """ |
|
|
| def __init__(self, config: KernelConfig): |
| super().__init__() |
| input_dim = config.time_num_frequencies * 2 |
| self.proj = nn.Sequential( |
| nn.Linear(input_dim, config.time_dim), |
| nn.SiLU(), |
| nn.Linear(config.time_dim, config.hidden_dim), |
| ) |
|
|
| def forward(self, absolute: torch.Tensor, relative: torch.Tensor) -> torch.Tensor: |
| """ |
| Args: |
| absolute: (batch, num_frequencies) |
| relative: (batch, num_frequencies) |
| Returns: |
| temporal_embedding: (batch, hidden_dim) |
| """ |
| combined = torch.cat([absolute, relative], dim=-1) |
| return self.proj(combined) |
|
|
|
|
| class TimeEngine(nn.Module): |
| """ |
| Complete time engine. Produces temporal embeddings for additive injection. |
| |
| Supports two modes: |
| - 'continuous' (default): Time2Vec β learned periodic functions |
| - 'lattice': LatticeClock β discrete quantized positions |
| |
| Usage: |
| time_embed = time_engine(timestamp) # (B, hidden_dim) |
| hidden = hidden + time_embed.unsqueeze(1) # Broadcast across seq_len |
| """ |
|
|
| def __init__(self, config: KernelConfig): |
| super().__init__() |
| |
| |
| self.clock_mode = getattr(config, 'clock_mode', 'continuous') |
| |
| if self.clock_mode == 'lattice': |
| from .temporal_lattice import LatticeClock |
| self.lattice = LatticeClock(config) |
| else: |
| self.absolute = AbsoluteClock(config.time_num_frequencies) |
| self.fusion = TemporalFusion(config) |
| |
| |
| self.relative = RelativeClock(config) |
|
|
| |
| self.register_buffer('epoch_offset', torch.tensor(1735689600.0)) |
|
|
| def normalize_timestamp(self, timestamp: torch.Tensor) -> torch.Tensor: |
| """Normalize to reasonable range for learned frequencies.""" |
| |
| epoch = self.epoch_offset.to(timestamp.device) |
| return (timestamp - epoch) / 3600.0 |
|
|
| def forward(self, timestamp: torch.Tensor | float | None = None) -> torch.Tensor: |
| """ |
| Args: |
| timestamp: (batch,) seconds since Unix epoch, or None for current time |
| Returns: |
| temporal_embedding: (batch, hidden_dim) |
| """ |
| if timestamp is None: |
| timestamp = torch.tensor([time_module.time()]) |
| if isinstance(timestamp, (int, float)): |
| timestamp = torch.tensor([timestamp], dtype=torch.float32) |
| |
| target_device = next(self.parameters()).device |
| timestamp = timestamp.to(target_device) |
|
|
| t_norm = self.normalize_timestamp(timestamp) |
| |
| if self.clock_mode == 'lattice': |
| |
| return self.lattice(t_norm) |
| else: |
| |
| abs_enc = self.absolute(t_norm) |
| rel_enc = self.relative(timestamp) |
| return self.fusion(abs_enc, rel_enc) |
|
|
| def record_event(self, timestamp: float | None = None): |
| """Record an event in the relative clock.""" |
| if timestamp is None: |
| timestamp = time_module.time() |
| self.relative.record_event(timestamp) |
|
|