File size: 7,580 Bytes
63e99b4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 | """
GLADIUS v2.0 β Time Engine
Dual-clock temporal encoding:
Absolute Clock: Time2Vec β wall-clock time as learned periodic functions.
Relative Clock: Event-anchored with exponential decay.
Injected ADDITIVELY into hidden states (like a bias), not concatenated.
Does not interfere with RoPE (rotational) β operates in different subspace.
argmax_memory S(relevance | query, time_decay) β time modulates every scoring function.
"""
import torch
import torch.nn as nn
import math
import time as time_module
from .config import KernelConfig
class AbsoluteClock(nn.Module):
"""
Time2Vec: learned periodic encoding of wall-clock time.
Input: scalar timestamp (seconds since epoch)
Output: vector of learned periodic features
t β [Οβt + Οβ, sin(Οβt + Οβ), sin(Οβt + Οβ), ...]
First component is linear (trend), rest are periodic (patterns).
"""
def __init__(self, num_frequencies: int = 16):
super().__init__()
self.num_frequencies = num_frequencies
# Learnable frequency and phase for each component
self.omega = nn.Parameter(torch.randn(num_frequencies) * 0.01)
self.phi = nn.Parameter(torch.zeros(num_frequencies))
def forward(self, timestamp: torch.Tensor) -> torch.Tensor:
"""
Args:
timestamp: (batch,) β seconds since epoch, normalized
Returns:
encoding: (batch, num_frequencies)
"""
# Normalize timestamp to reasonable range
t = timestamp.unsqueeze(-1) # (B, 1)
# First component: linear (captures trend)
linear = self.omega[0:1] * t + self.phi[0:1]
# Remaining: periodic (captures patterns β daily, weekly, etc.)
periodic = torch.sin(self.omega[1:] * t + self.phi[1:])
return torch.cat([linear, periodic], dim=-1) # (B, num_frequencies)
class RelativeClock(nn.Module):
"""
Event-anchored temporal encoding with exponential decay.
Tracks recent events and encodes "time since event X" with decay.
Recent events have strong encoding, old events fade.
This gives GLADIUS a sense of "how long ago" something happened.
"""
def __init__(self, config: KernelConfig):
super().__init__()
self.max_events = config.time_max_events
self.num_frequencies = config.time_num_frequencies
# Event buffer (not a parameter β updated at runtime)
self.register_buffer('event_times', torch.zeros(config.time_max_events))
self.register_buffer('event_head', torch.tensor(0, dtype=torch.long))
# Learned decay rates per frequency
self.decay = nn.Parameter(torch.ones(config.time_num_frequencies) * 0.01)
# Projection from event features to encoding
self.proj = nn.Linear(config.time_max_events, config.time_num_frequencies, bias=False)
def record_event(self, timestamp: float):
"""Record a new event timestamp."""
with torch.no_grad():
idx = self.event_head.item() % self.max_events
self.event_times[idx] = timestamp
self.event_head += 1
def forward(self, current_time: torch.Tensor) -> torch.Tensor:
"""
Args:
current_time: (batch,) β current timestamp
Returns:
encoding: (batch, num_frequencies)
"""
# Time since each event
deltas = current_time.unsqueeze(-1) - self.event_times.unsqueeze(0) # (B, max_events)
deltas = deltas.clamp(min=0) # No future events
# Exponential decay
# log-scale the deltas to handle large time ranges
log_deltas = torch.log1p(deltas) # log(1 + delta) for numerical stability
# Decay-weighted features
decayed = torch.exp(-self.decay.abs().unsqueeze(0) * self.proj(log_deltas))
return decayed # (B, num_frequencies)
class TemporalFusion(nn.Module):
"""
Fuses absolute + relative clock into a single temporal embedding.
Projects to hidden_dim for additive injection.
"""
def __init__(self, config: KernelConfig):
super().__init__()
input_dim = config.time_num_frequencies * 2 # absolute + relative
self.proj = nn.Sequential(
nn.Linear(input_dim, config.time_dim),
nn.SiLU(),
nn.Linear(config.time_dim, config.hidden_dim),
)
def forward(self, absolute: torch.Tensor, relative: torch.Tensor) -> torch.Tensor:
"""
Args:
absolute: (batch, num_frequencies)
relative: (batch, num_frequencies)
Returns:
temporal_embedding: (batch, hidden_dim)
"""
combined = torch.cat([absolute, relative], dim=-1)
return self.proj(combined)
class TimeEngine(nn.Module):
"""
Complete time engine. Produces temporal embeddings for additive injection.
Supports two modes:
- 'continuous' (default): Time2Vec β learned periodic functions
- 'lattice': LatticeClock β discrete quantized positions
Usage:
time_embed = time_engine(timestamp) # (B, hidden_dim)
hidden = hidden + time_embed.unsqueeze(1) # Broadcast across seq_len
"""
def __init__(self, config: KernelConfig):
super().__init__()
# Determine clock mode from config
self.clock_mode = getattr(config, 'clock_mode', 'continuous')
if self.clock_mode == 'lattice':
from .temporal_lattice import LatticeClock
self.lattice = LatticeClock(config)
else:
self.absolute = AbsoluteClock(config.time_num_frequencies)
self.fusion = TemporalFusion(config)
# Relative clock is used in both modes
self.relative = RelativeClock(config)
# Normalization factor for timestamps (seconds since 2026-01-01)
self.register_buffer('epoch_offset', torch.tensor(1735689600.0)) # 2026-01-01 UTC
def normalize_timestamp(self, timestamp: torch.Tensor) -> torch.Tensor:
"""Normalize to reasonable range for learned frequencies."""
# Convert to hours since epoch_offset (ensure device alignment)
epoch = self.epoch_offset.to(timestamp.device)
return (timestamp - epoch) / 3600.0
def forward(self, timestamp: torch.Tensor | float | None = None) -> torch.Tensor:
"""
Args:
timestamp: (batch,) seconds since Unix epoch, or None for current time
Returns:
temporal_embedding: (batch, hidden_dim)
"""
if timestamp is None:
timestamp = torch.tensor([time_module.time()])
if isinstance(timestamp, (int, float)):
timestamp = torch.tensor([timestamp], dtype=torch.float32)
# Ensure timestamp is on same device as model parameters
target_device = next(self.parameters()).device
timestamp = timestamp.to(target_device)
t_norm = self.normalize_timestamp(timestamp)
if self.clock_mode == 'lattice':
# Lattice mode: discrete quantized temporal encoding
return self.lattice(t_norm)
else:
# Continuous mode: Time2Vec + RelativeClock fusion
abs_enc = self.absolute(t_norm)
rel_enc = self.relative(timestamp)
return self.fusion(abs_enc, rel_enc)
def record_event(self, timestamp: float | None = None):
"""Record an event in the relative clock."""
if timestamp is None:
timestamp = time_module.time()
self.relative.record_event(timestamp)
|