File size: 1,980 Bytes
8c70653 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
from torch import nn
# pylint: disable=dangerous-default-value
class ResStack(nn.Module):
def __init__(self, kernel, channel, padding, dilations=[1, 3, 5]):
super().__init__()
resstack = []
for dilation in dilations:
resstack += [
nn.LeakyReLU(0.2),
nn.ReflectionPad1d(dilation),
nn.utils.weight_norm(nn.Conv1d(channel, channel, kernel_size=kernel, dilation=dilation)),
nn.LeakyReLU(0.2),
nn.ReflectionPad1d(padding),
nn.utils.weight_norm(nn.Conv1d(channel, channel, kernel_size=1)),
]
self.resstack = nn.Sequential(*resstack)
self.shortcut = nn.utils.weight_norm(nn.Conv1d(channel, channel, kernel_size=1))
def forward(self, x):
x1 = self.shortcut(x)
x2 = self.resstack(x)
return x1 + x2
def remove_weight_norm(self):
nn.utils.remove_weight_norm(self.shortcut)
nn.utils.remove_weight_norm(self.resstack[2])
nn.utils.remove_weight_norm(self.resstack[5])
nn.utils.remove_weight_norm(self.resstack[8])
nn.utils.remove_weight_norm(self.resstack[11])
nn.utils.remove_weight_norm(self.resstack[14])
nn.utils.remove_weight_norm(self.resstack[17])
class MRF(nn.Module):
def __init__(self, kernels, channel, dilations=[1, 3, 5]): # # pylint: disable=dangerous-default-value
super().__init__()
self.resblock1 = ResStack(kernels[0], channel, 0, dilations)
self.resblock2 = ResStack(kernels[1], channel, 6, dilations)
self.resblock3 = ResStack(kernels[2], channel, 12, dilations)
def forward(self, x):
x1 = self.resblock1(x)
x2 = self.resblock2(x)
x3 = self.resblock3(x)
return x1 + x2 + x3
def remove_weight_norm(self):
self.resblock1.remove_weight_norm()
self.resblock2.remove_weight_norm()
self.resblock3.remove_weight_norm()
|