vcstuff / freezestuff.py
dr87's picture
Upload freezestuff.py
fa2e79e verified
freeze_preset_selector = 5 # Change this
# 0: no freeze
# 1 freeze: phone embeddings, first text enc attention layer, pos encoder pre-processing layer
# 2: freeze: phone embeddings, first 2 text enc attention layers, pos encoder pre-processing & first few layers, and initial layers of decoder.
# 3: freeze: only phone embeddings
# 4: aggressive - freeze phone, all of text enc main encoder, pos encoder pre-processing, first 4 layers in pos encoder. adapts decoder, flow, and later pos encoder layers.
# 5: freeze phone embeddings, first 2 text enc attention layers, pos encoder pre-processing, first layers in pos encoder, and first decoder upsample block
net_g_mod = net_g.module if hasattr(net_g, 'module') else net_g
# Default all parameters to trainable, then selectively freeze
for param in net_g_mod.parameters():
param.requires_grad = True
active_freezing = False
if freeze_preset_selector == 0:
print("no layer freeze")
active_freezing = False
elif freeze_preset_selector == 1:
print("freeze: phone embeddings, first text enc attention layer, pos encoder pre-processing")
active_freezing = True
# phone embeddings
for param in net_g_mod.enc_p.emb_phone.parameters():
param.requires_grad = False
# text enc attention layer
for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
if i < 1: # Only freeze first layer
for param in layer.parameters():
param.requires_grad = False
# pre-processing layer of pos encoder
for param in net_g_mod.enc_q.pre.parameters():
param.requires_grad = False
elif freeze_preset_selector == 2:
print("freeze: phone, first 2 text enc attention layers, pos encoder pre-processing & first few layers, and initial layers of decoder")
active_freezing = True
# phone embeddings
for param in net_g_mod.enc_p.emb_phone.parameters():
param.requires_grad = False
# first 2 text enc attention layers
for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
if i < 2: # Freeze first two layers
for param in layer.parameters():
param.requires_grad = False
# pos encoder pre-processing and main encoder layers
for param in net_g_mod.enc_q.pre.parameters():
param.requires_grad = False
# first few layers in PosteriorEncoder
wavenet_module = net_g_mod.enc_q.enc
num_wavenet_layers_to_freeze = 2 #layers to freeze
for i, layer in enumerate(wavenet_module.in_layers):
if i < num_wavenet_layers_to_freeze:
for param in layer.parameters():
param.requires_grad = False
for i, layer in enumerate(wavenet_module.res_skip_layers):
if i < num_wavenet_layers_to_freeze:
for param in layer.parameters():
param.requires_grad = False
# 4. Freeze initial layers of the dec
for i, upsample_layer in enumerate(net_g_mod.dec.ups):
if i < 1: # upsampling layer
for param in upsample_layer.parameters():
param.requires_grad = False
elif freeze_preset_selector == 3:
print("freezing only phone embeddings")
active_freezing = True
# 1. Only freeze phone embeddings
for param in net_g_mod.enc_p.emb_phone.parameters():
param.requires_grad = False
elif freeze_preset_selector == 4:
print("freezing phone embeddings, all text enc main layers, pos encoder pre-processing, first 4 layers in pos encoder")
active_freezing = True
for param in net_g_mod.enc_p.emb_phone.parameters():
param.requires_grad = False
for param in net_g_mod.enc_p.encoder.parameters():
param.requires_grad = False
for param in net_g_mod.enc_q.pre.parameters():
param.requires_grad = False
wavenet_module_p4 = net_g_mod.enc_q.enc
num_wavenet_layers_to_freeze_p4 = 4
for i, layer in enumerate(wavenet_module_p4.in_layers):
if i < num_wavenet_layers_to_freeze_p4:
for param in layer.parameters():
param.requires_grad = False
for i, layer in enumerate(wavenet_module_p4.res_skip_layers):
if i < num_wavenet_layers_to_freeze_p4:
for param in layer.parameters():
param.requires_grad = False
elif freeze_preset_selector == 5:
print("freeze phone embedding, first 2 text enc attention layeer, pos encoder pre-processing, first 3 layers in pos encoder, decoder upsample block")
active_freezing = True
for param in net_g_mod.enc_p.emb_phone.parameters():
param.requires_grad = False
for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
if i < 2:
for param in layer.parameters():
param.requires_grad = False
for param in net_g_mod.enc_q.pre.parameters():
param.requires_grad = False
wavenet_module_p5 = net_g_mod.enc_q.enc
num_wavenet_layers_to_freeze_p5 = 3
for i, layer in enumerate(wavenet_module_p5.in_layers):
if i < num_wavenet_layers_to_freeze_p5:
for param in layer.parameters():
param.requires_grad = False
for i, layer in enumerate(wavenet_module_p5.res_skip_layers):
if i < num_wavenet_layers_to_freeze_p5:
for param in layer.parameters():
param.requires_grad = False
for i, upsample_layer in enumerate(net_g_mod.dec.ups):
if i < 1:
for param in upsample_layer.parameters():
param.requires_grad = False
else:
raise ValueError(f"invalid preset")
if active_freezing:
total_params = 0
frozen_params = 0
for name, param in net_g_mod.named_parameters():
total_params += param.numel()
if not param.requires_grad:
frozen_params += param.numel()
print(f"Freezing applied (Preset {freeze_preset_selector}): {frozen_params:,}/{total_params:,} parameters frozen.")
else:
total_params = sum(p.numel() for p in net_g_mod.parameters())
print(f"no freezing applied")