dr87 commited on
Commit
fa2e79e
·
verified ·
1 Parent(s): 885c4af

Upload freezestuff.py

Browse files
Files changed (1) hide show
  1. freezestuff.py +155 -0
freezestuff.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ freeze_preset_selector = 5 # Change this
2
+ # 0: no freeze
3
+ # 1 freeze: phone embeddings, first text enc attention layer, pos encoder pre-processing layer
4
+ # 2: freeze: phone embeddings, first 2 text enc attention layers, pos encoder pre-processing & first few layers, and initial layers of decoder.
5
+ # 3: freeze: only phone embeddings
6
+ # 4: aggressive - freeze phone, all of text enc main encoder, pos encoder pre-processing, first 4 layers in pos encoder. adapts decoder, flow, and later pos encoder layers.
7
+ # 5: freeze phone embeddings, first 2 text enc attention layers, pos encoder pre-processing, first layers in pos encoder, and first decoder upsample block
8
+
9
+ net_g_mod = net_g.module if hasattr(net_g, 'module') else net_g
10
+
11
+ # Default all parameters to trainable, then selectively freeze
12
+ for param in net_g_mod.parameters():
13
+ param.requires_grad = True
14
+
15
+ active_freezing = False
16
+
17
+ if freeze_preset_selector == 0:
18
+ print("no layer freeze")
19
+ active_freezing = False
20
+ elif freeze_preset_selector == 1:
21
+ print("freeze: phone embeddings, first text enc attention layer, pos encoder pre-processing")
22
+ active_freezing = True
23
+ # phone embeddings
24
+ for param in net_g_mod.enc_p.emb_phone.parameters():
25
+ param.requires_grad = False
26
+
27
+ # text enc attention layer
28
+ for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
29
+ if i < 1: # Only freeze first layer
30
+ for param in layer.parameters():
31
+ param.requires_grad = False
32
+
33
+ # pre-processing layer of pos encoder
34
+ for param in net_g_mod.enc_q.pre.parameters():
35
+ param.requires_grad = False
36
+
37
+
38
+ elif freeze_preset_selector == 2:
39
+ print("freeze: phone, first 2 text enc attention layers, pos encoder pre-processing & first few layers, and initial layers of decoder")
40
+ active_freezing = True
41
+ # phone embeddings
42
+ for param in net_g_mod.enc_p.emb_phone.parameters():
43
+ param.requires_grad = False
44
+
45
+ # first 2 text enc attention layers
46
+ for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
47
+ if i < 2: # Freeze first two layers
48
+ for param in layer.parameters():
49
+ param.requires_grad = False
50
+
51
+ # pos encoder pre-processing and main encoder layers
52
+ for param in net_g_mod.enc_q.pre.parameters():
53
+ param.requires_grad = False
54
+
55
+ # first few layers in PosteriorEncoder
56
+ wavenet_module = net_g_mod.enc_q.enc
57
+ num_wavenet_layers_to_freeze = 2 #layers to freeze
58
+
59
+ for i, layer in enumerate(wavenet_module.in_layers):
60
+ if i < num_wavenet_layers_to_freeze:
61
+ for param in layer.parameters():
62
+ param.requires_grad = False
63
+
64
+ for i, layer in enumerate(wavenet_module.res_skip_layers):
65
+ if i < num_wavenet_layers_to_freeze:
66
+ for param in layer.parameters():
67
+ param.requires_grad = False
68
+
69
+ # 4. Freeze initial layers of the dec
70
+ for i, upsample_layer in enumerate(net_g_mod.dec.ups):
71
+ if i < 1: # upsampling layer
72
+ for param in upsample_layer.parameters():
73
+ param.requires_grad = False
74
+
75
+ elif freeze_preset_selector == 3:
76
+ print("freezing only phone embeddings")
77
+ active_freezing = True
78
+ # 1. Only freeze phone embeddings
79
+ for param in net_g_mod.enc_p.emb_phone.parameters():
80
+ param.requires_grad = False
81
+
82
+
83
+ elif freeze_preset_selector == 4:
84
+ print("freezing phone embeddings, all text enc main layers, pos encoder pre-processing, first 4 layers in pos encoder")
85
+ active_freezing = True
86
+
87
+ for param in net_g_mod.enc_p.emb_phone.parameters():
88
+ param.requires_grad = False
89
+
90
+
91
+ for param in net_g_mod.enc_p.encoder.parameters():
92
+ param.requires_grad = False
93
+
94
+
95
+ for param in net_g_mod.enc_q.pre.parameters():
96
+ param.requires_grad = False
97
+
98
+ wavenet_module_p4 = net_g_mod.enc_q.enc
99
+ num_wavenet_layers_to_freeze_p4 = 4
100
+ for i, layer in enumerate(wavenet_module_p4.in_layers):
101
+ if i < num_wavenet_layers_to_freeze_p4:
102
+ for param in layer.parameters():
103
+ param.requires_grad = False
104
+ for i, layer in enumerate(wavenet_module_p4.res_skip_layers):
105
+ if i < num_wavenet_layers_to_freeze_p4:
106
+ for param in layer.parameters():
107
+ param.requires_grad = False
108
+
109
+ elif freeze_preset_selector == 5:
110
+ print("freeze phone embedding, first 2 text enc attention layeer, pos encoder pre-processing, first 3 layers in pos encoder, decoder upsample block")
111
+ active_freezing = True
112
+ for param in net_g_mod.enc_p.emb_phone.parameters():
113
+ param.requires_grad = False
114
+
115
+ for i, layer in enumerate(net_g_mod.enc_p.encoder.attn_layers):
116
+ if i < 2:
117
+ for param in layer.parameters():
118
+ param.requires_grad = False
119
+
120
+
121
+ for param in net_g_mod.enc_q.pre.parameters():
122
+ param.requires_grad = False
123
+
124
+
125
+ wavenet_module_p5 = net_g_mod.enc_q.enc
126
+ num_wavenet_layers_to_freeze_p5 = 3
127
+ for i, layer in enumerate(wavenet_module_p5.in_layers):
128
+ if i < num_wavenet_layers_to_freeze_p5:
129
+ for param in layer.parameters():
130
+ param.requires_grad = False
131
+ for i, layer in enumerate(wavenet_module_p5.res_skip_layers):
132
+ if i < num_wavenet_layers_to_freeze_p5:
133
+ for param in layer.parameters():
134
+ param.requires_grad = False
135
+
136
+ for i, upsample_layer in enumerate(net_g_mod.dec.ups):
137
+ if i < 1:
138
+ for param in upsample_layer.parameters():
139
+ param.requires_grad = False
140
+
141
+
142
+ else:
143
+ raise ValueError(f"invalid preset")
144
+
145
+ if active_freezing:
146
+ total_params = 0
147
+ frozen_params = 0
148
+ for name, param in net_g_mod.named_parameters():
149
+ total_params += param.numel()
150
+ if not param.requires_grad:
151
+ frozen_params += param.numel()
152
+ print(f"Freezing applied (Preset {freeze_preset_selector}): {frozen_params:,}/{total_params:,} parameters frozen.")
153
+ else:
154
+ total_params = sum(p.numel() for p in net_g_mod.parameters())
155
+ print(f"no freezing applied")