aithing / change_length.py
Aobangaming's picture
Upload 8 files
6b8206f verified
import torch
# 1. Load the current brain
checkpoint = torch.load("aoban_weights.pth", map_location='cpu')
old_pe = checkpoint['pos_encoder.pe']
old_len = old_pe.shape[0]
print(f"[System] Current Max Sequence Length is: {old_len}")
new_len_input = input("WHAT SEQ LENGTH WOULD YOU LIKE? ")
try:
new_len = int(new_len_input)
if new_len > old_len:
# EXPANDING: Add extra rows
diff = new_len - old_len
# Copy the last known position weights for the new slots
extra_padding = old_pe[-1:, :, :].repeat(diff, 1, 1)
checkpoint['pos_encoder.pe'] = torch.cat([old_pe, extra_padding], dim=0)
print(f"[System] Expanded from {old_len} to {new_len}.")
elif new_len < old_len:
# TRIMMING: Cut off the extra rows
checkpoint['pos_encoder.pe'] = old_pe[:new_len, :, :]
print(f"[System] Trimmed from {old_len} back to {new_len}.")
else:
print("[System] No change needed. Length is already correct.")
# 3. Save the modified brain
torch.save(checkpoint, "aoban_weights.pth")
print("[System] Surgery Complete. You can now restart your AI.")
except ValueError:
print("[System] Error: Please enter a valid number!")