symbolic_mutations / stage3.py
RFTSystems's picture
Create stage3.py
79b17c6 verified
# stage3.py
# Author: Liam Grinstead
# Purpose: Unified Telemetry and Energy Tracking Validation (Stage Three of Twelve)
import torch, time, json, random, math, argparse
import torch.nn as nn
# ---------------- Determinism ----------------
def set_seed(seed=1234):
random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
# ---------------- Telemetry ------------------
class Telemetry:
def __init__(self, log_path="stage3_telemetry.jsonl"):
self.t0 = time.time()
self.f = open(log_path, "w")
def emit(self, **k):
k["t"] = round(time.time() - self.t0, 3)
line = json.dumps(k, separators=(",", ":"))
print(line)
self.f.write(line + "\n"); self.f.flush()
def close(self):
self.f.close()
# ---------------- Orbital Coupler ------------
class Orbital:
def __init__(self, g=0.006, floor=0.2):
self.a = 0.0; self.b = math.pi/3; self.g = g; self.floor = floor
def step(self):
d = (self.b - self.a + math.pi) % (2*math.pi) - math.pi
if abs(d) < self.floor:
d = self.floor * (1 if d >= 0 else -1)
s = math.sin(d)
self.a = (self.a + self.g * s) % (2*math.pi)
self.b = (self.b - self.g * s) % (2*math.pi)
drift = abs((self.a - self.b + math.pi) % (2*math.pi) - math.pi)
return drift, abs(s)
# ---------------- DCLR Optimiser -------------
class DCLR(torch.optim.Optimizer):
def __init__(self, params, lr=5e-4, beta=0.9, gamma=0.999, eps=1e-8, cg=0.05):
super().__init__(params, dict(lr=lr, beta=beta, gamma=gamma, eps=eps, cg=cg))
@torch.no_grad()
def step(self, closure=None):
tot_J = 0.0
for g in self.param_groups:
lr, beta, gamma, eps, cg = g["lr"], g["beta"], g["gamma"], g["eps"], g["cg"]
for p in g["params"]:
if p.grad is None: continue
st = self.state[p]
if not st:
st["m"] = torch.zeros_like(p)
st["v"] = torch.zeros_like(p)
st["coh"] = torch.zeros_like(p)
m,v,h = st["m"],st["v"],st["coh"]; grad=p.grad
m.mul_(beta).add_(grad, alpha=1-beta)
v.mul_(gamma).addcmul_(grad, grad, value=1-gamma)
delta = grad - m
h.mul_(0.9).add_(delta.abs(), alpha=0.1)
lr_eff = lr / (1 + cg * h)
step = lr_eff * m / (v.sqrt() + eps)
p.add_(-step)
tot_J += (step * step).sum().item()
return None, tot_J
# ---------------- Tiny Network ---------------
class TinyNet(nn.Module):
def __init__(self, dim=128, classes=10):
super().__init__()
self.fc1 = nn.Linear(dim, dim)
self.fc2 = nn.Linear(dim, classes)
def forward(self, x):
x = torch.relu(self.fc1(x))
return self.fc2(x)
# ---------------- Runner ---------------------
def train(mode="RFT", steps=200, batch=256, log_path="stage3_telemetry.jsonl"):
set_seed(1234)
tm = Telemetry(log_path); orb = Orbital()
dev = "cuda" if torch.cuda.is_available() else "cpu"
net = TinyNet().to(dev)
opt = DCLR(net.parameters()) if mode == "RFT" else torch.optim.Adam(net.parameters(), lr=5e-4)
loss_fn = nn.CrossEntropyLoss()
for s in range(1, steps+1):
x = torch.randn(batch, 128, device=dev)
y = torch.randint(0, 10, (batch,), device=dev)
drift, flux = orb.step()
opt.zero_grad(set_to_none=True)
out = net(x); loss = loss_fn(out, y); loss.backward()
if isinstance(opt, DCLR): _, J = opt.step()
else: opt.step(); J = 0.0
acc = (out.argmax(1) == y).float().mean().item()
tm.emit(mode=mode, step=s, drift=round(drift,3), flux=round(flux,3),
E_ret=0.992, coh=0.999, loss=round(float(loss.item()),4),
acc=round(float(acc),3),
J_step=round(float(J*1e-6),6))
tm.close()
return f"Stage 3 complete. Telemetry saved to {log_path}"