Fabuilds's picture
Upload 23 files
d68c0f8 verified
import torch
import torch.optim as optim
from hybrid_transformer import HybridResonanceTransformer
from hyperchaos_loss import HyperchaosLoss
def verify_training_step():
print("=== VERIFYING HYBRID RESONANCE TRAINING (pHASE 2) ===")
# Config
vocab_size = 100
hidden_dim = 64
seq_len = 10
batch_size = 2
# Initialize Model & Loss
model = HybridResonanceTransformer(vocab_size, hidden_dim)
loss_fn = HyperchaosLoss()
optimizer = optim.Adam(model.parameters(), lr=1e-3)
# Dummy Data
input_ids = torch.randint(0, vocab_size, (batch_size, seq_len))
targets = torch.randint(0, vocab_size, (batch_size, seq_len))
print("\n[INIT] Model initialized.")
print(f" Hidden Dim: {hidden_dim}")
print(f" Layers: {len(model.layers)}")
# Forward Pass
print("\n[FORWARD] Running forward pass...")
logits, hidden_states = model(input_ids, output_hidden_states=True)
print(f" Logits Shape: {logits.shape}")
print(f" Hidden States Captured: {len(hidden_states)}")
# Loss Calculation
print("\n[LOSS] Computing Hyperchaos Loss...")
losses = loss_fn(logits, targets, hidden_states)
print(f" Total Loss: {losses['total'].item():.4f}")
print(f" Task Loss: {losses['task'].item():.4f}")
print(f" Decoherence Loss: {losses['decoherence'].item():.4f}")
print(f" Instability Loss: {losses['instability'].item():.4f}")
# Backward Pass
print("\n[BACKWARD] Propagating gradients...")
optimizer.zero_grad()
losses['total'].backward()
optimizer.step()
print("[PASS] Gradient step successful. Architecture is valid.")
if __name__ == "__main__":
verify_training_step()