File size: 6,679 Bytes
fa2e79e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
    freeze_preset_selector = 5 # Change this
    # 0: no freeze
    # 1  freeze: phone embeddings, first text enc attention layer, pos encoder pre-processing layer
    # 2: freeze: phone embeddings, first 2 text enc attention layers, pos encoder pre-processing & first few layers, and initial layers of decoder.
    # 3: freeze: only phone embeddings
    # 4: aggressive - freeze phone, all of text enc main encoder, pos encoder pre-processing, first 4 layers in pos encoder. adapts decoder, flow, and later pos encoder layers.
    # 5: freeze phone embeddings, first 2 text enc attention layers, pos encoder pre-processing, first layers in pos encoder, and first decoder upsample block

    net_g_mod = net_g.module if hasattr(net_g, 'module') else net_g

    # Default all parameters to trainable, then selectively freeze
    for param in net_g_mod.parameters():
        param.requires_grad = True

    active_freezing = False

    if freeze_preset_selector == 0:
        print("no layer freeze")
        active_freezing = False
    elif freeze_preset_selector == 1:
        print("freeze: phone embeddings, first text enc attention layer, pos encoder pre-processing") 
        active_freezing = True
        # phone embeddings
        for param in net_g_mod.enc_p.emb_phone.parameters():
            param.requires_grad = False

        # text enc attention layer
        for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
            if i < 1:  # Only freeze first layer
                for param in layer.parameters():
                    param.requires_grad = False

        # pre-processing layer of pos encoder
        for param in net_g_mod.enc_q.pre.parameters():
            param.requires_grad = False


    elif freeze_preset_selector == 2:
        print("freeze: phone, first 2 text enc attention layers, pos encoder pre-processing & first few layers, and initial layers of decoder")
        active_freezing = True
        # phone embeddings
        for param in net_g_mod.enc_p.emb_phone.parameters():
            param.requires_grad = False

        # first 2 text enc attention layers
        for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
            if i < 2: # Freeze first two layers
                for param in layer.parameters():
                    param.requires_grad = False

        # pos encoder pre-processing and main encoder layers
        for param in net_g_mod.enc_q.pre.parameters():
            param.requires_grad = False
            
        # first few layers in PosteriorEncoder
        wavenet_module = net_g_mod.enc_q.enc
        num_wavenet_layers_to_freeze = 2 #layers to freeze

        for i, layer in enumerate(wavenet_module.in_layers):
            if i < num_wavenet_layers_to_freeze:
                for param in layer.parameters():
                    param.requires_grad = False

        for i, layer in enumerate(wavenet_module.res_skip_layers):
            if i < num_wavenet_layers_to_freeze:
                for param in layer.parameters():
                    param.requires_grad = False
            
        # 4. Freeze initial layers of the dec
        for i, upsample_layer in enumerate(net_g_mod.dec.ups):
            if i < 1: #  upsampling layer
                for param in upsample_layer.parameters():
                    param.requires_grad = False

    elif freeze_preset_selector == 3:
        print("freezing only phone embeddings")
        active_freezing = True
        # 1. Only freeze phone embeddings
        for param in net_g_mod.enc_p.emb_phone.parameters():
            param.requires_grad = False


    elif freeze_preset_selector == 4:
        print("freezing phone embeddings, all text enc main layers, pos encoder pre-processing, first 4 layers in pos encoder")
        active_freezing = True

        for param in net_g_mod.enc_p.emb_phone.parameters():
            param.requires_grad = False


        for param in net_g_mod.enc_p.encoder.parameters():
            param.requires_grad = False


        for param in net_g_mod.enc_q.pre.parameters():
            param.requires_grad = False

        wavenet_module_p4 = net_g_mod.enc_q.enc
        num_wavenet_layers_to_freeze_p4 = 4
        for i, layer in enumerate(wavenet_module_p4.in_layers):
            if i < num_wavenet_layers_to_freeze_p4:
                for param in layer.parameters():
                    param.requires_grad = False
        for i, layer in enumerate(wavenet_module_p4.res_skip_layers):
            if i < num_wavenet_layers_to_freeze_p4:
                for param in layer.parameters():
                    param.requires_grad = False

    elif freeze_preset_selector == 5:
        print("freeze phone embedding, first 2 text enc attention layeer, pos encoder pre-processing, first 3 layers in pos encoder, decoder upsample block")
        active_freezing = True
        for param in net_g_mod.enc_p.emb_phone.parameters():
            param.requires_grad = False

        for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
            if i < 2:
                for param in layer.parameters():
                    param.requires_grad = False


        for param in net_g_mod.enc_q.pre.parameters():
            param.requires_grad = False


        wavenet_module_p5 = net_g_mod.enc_q.enc
        num_wavenet_layers_to_freeze_p5 = 3
        for i, layer in enumerate(wavenet_module_p5.in_layers):
            if i < num_wavenet_layers_to_freeze_p5:
                for param in layer.parameters():
                    param.requires_grad = False
        for i, layer in enumerate(wavenet_module_p5.res_skip_layers):
            if i < num_wavenet_layers_to_freeze_p5:
                for param in layer.parameters():
                    param.requires_grad = False

        for i, upsample_layer in enumerate(net_g_mod.dec.ups):
            if i < 1:
                for param in upsample_layer.parameters():
                    param.requires_grad = False


    else:
        raise ValueError(f"invalid preset")

    if active_freezing:
        total_params = 0
        frozen_params = 0
        for name, param in net_g_mod.named_parameters():
            total_params += param.numel()
            if not param.requires_grad:
                frozen_params += param.numel()
        print(f"Freezing applied (Preset {freeze_preset_selector}): {frozen_params:,}/{total_params:,} parameters frozen.")
    else:
        total_params = sum(p.numel() for p in net_g_mod.parameters())
        print(f"no freezing applied")