saeki
commited on
Commit
•
e23163d
1
Parent(s):
a9849ea
fix
Browse files- hifigan/LICENSE +21 -0
- hifigan/__init__.py +7 -0
- hifigan/__pycache__/__init__.cpython-38.pyc +0 -0
- hifigan/__pycache__/models.cpython-38.pyc +0 -0
- hifigan/config_melspec.json +39 -0
- hifigan/config_vocfeats.json +39 -0
- hifigan/models.py +133 -0
hifigan/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2020 Jungil Kong
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
hifigan/__init__.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .models import Generator
|
2 |
+
|
3 |
+
|
4 |
+
class AttrDict(dict):
|
5 |
+
def __init__(self, *args, **kwargs):
|
6 |
+
super(AttrDict, self).__init__(*args, **kwargs)
|
7 |
+
self.__dict__ = self
|
hifigan/__pycache__/__init__.cpython-38.pyc
ADDED
Binary file (588 Bytes). View file
|
|
hifigan/__pycache__/models.cpython-38.pyc
ADDED
Binary file (4.54 kB). View file
|
|
hifigan/config_melspec.json
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"resblock": "1",
|
3 |
+
"num_gpus": 0,
|
4 |
+
"batch_size": 16,
|
5 |
+
"learning_rate": 0.0002,
|
6 |
+
"adam_b1": 0.8,
|
7 |
+
"adam_b2": 0.99,
|
8 |
+
"lr_decay": 0.999,
|
9 |
+
"seed": 1234,
|
10 |
+
|
11 |
+
"upsample_rates": [8,8,2,2],
|
12 |
+
"upsample_kernel_sizes": [16,16,4,4],
|
13 |
+
"upsample_initial_channel": 512,
|
14 |
+
"resblock_kernel_sizes": [3,7,11],
|
15 |
+
"resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
|
16 |
+
|
17 |
+
"segment_size": 8192,
|
18 |
+
"num_mels": 80,
|
19 |
+
"num_freq": 1025,
|
20 |
+
"n_fft": 1024,
|
21 |
+
"hop_size": 256,
|
22 |
+
"win_size": 1024,
|
23 |
+
|
24 |
+
"sampling_rate": 22050,
|
25 |
+
|
26 |
+
"feat_order": 80,
|
27 |
+
|
28 |
+
"fmin": 0,
|
29 |
+
"fmax": 8000,
|
30 |
+
"fmax_for_loss": null,
|
31 |
+
|
32 |
+
"num_workers": 4,
|
33 |
+
|
34 |
+
"dist_config": {
|
35 |
+
"dist_backend": "nccl",
|
36 |
+
"dist_url": "tcp://localhost:54321",
|
37 |
+
"world_size": 1
|
38 |
+
}
|
39 |
+
}
|
hifigan/config_vocfeats.json
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"resblock": "1",
|
3 |
+
"num_gpus": 0,
|
4 |
+
"batch_size": 16,
|
5 |
+
"learning_rate": 0.0002,
|
6 |
+
"adam_b1": 0.8,
|
7 |
+
"adam_b2": 0.99,
|
8 |
+
"lr_decay": 0.999,
|
9 |
+
"seed": 1234,
|
10 |
+
|
11 |
+
"upsample_rates": [8,8,2,2],
|
12 |
+
"upsample_kernel_sizes": [16,16,4,4],
|
13 |
+
"upsample_initial_channel": 512,
|
14 |
+
"resblock_kernel_sizes": [3,7,11],
|
15 |
+
"resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
|
16 |
+
|
17 |
+
"segment_size": 8192,
|
18 |
+
"num_mels": 80,
|
19 |
+
"num_freq": 1025,
|
20 |
+
"n_fft": 1024,
|
21 |
+
"hop_size": 256,
|
22 |
+
"win_size": 1024,
|
23 |
+
|
24 |
+
"sampling_rate": 22050,
|
25 |
+
|
26 |
+
"feat_order": 42,
|
27 |
+
|
28 |
+
"fmin": 0,
|
29 |
+
"fmax": 8000,
|
30 |
+
"fmax_for_loss": null,
|
31 |
+
|
32 |
+
"num_workers": 4,
|
33 |
+
|
34 |
+
"dist_config": {
|
35 |
+
"dist_backend": "nccl",
|
36 |
+
"dist_url": "tcp://localhost:54321",
|
37 |
+
"world_size": 1
|
38 |
+
}
|
39 |
+
}
|
hifigan/models.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
import torch.nn.functional as F
|
4 |
+
from torch.nn import Conv1d, ConvTranspose1d
|
5 |
+
from torch.nn.utils import weight_norm, remove_weight_norm
|
6 |
+
|
7 |
+
LRELU_SLOPE = 0.1
|
8 |
+
|
9 |
+
def init_weights(m, mean=0.0, std=0.01):
|
10 |
+
classname = m.__class__.__name__
|
11 |
+
if classname.find("Conv") != -1:
|
12 |
+
m.weight.data.normal_(mean, std)
|
13 |
+
|
14 |
+
|
15 |
+
def get_padding(kernel_size, dilation=1):
|
16 |
+
return int((kernel_size * dilation - dilation) / 2)
|
17 |
+
|
18 |
+
|
19 |
+
class ResBlock1(torch.nn.Module):
|
20 |
+
def __init__(self, h, channels, kernel_size=3, dilation=(1, 3, 5)):
|
21 |
+
super(ResBlock1, self).__init__()
|
22 |
+
self.h = h
|
23 |
+
self.convs1 = nn.ModuleList([
|
24 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0],
|
25 |
+
padding=get_padding(kernel_size, dilation[0]))),
|
26 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1],
|
27 |
+
padding=get_padding(kernel_size, dilation[1]))),
|
28 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[2],
|
29 |
+
padding=get_padding(kernel_size, dilation[2])))
|
30 |
+
])
|
31 |
+
self.convs1.apply(init_weights)
|
32 |
+
|
33 |
+
self.convs2 = nn.ModuleList([
|
34 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=1,
|
35 |
+
padding=get_padding(kernel_size, 1))),
|
36 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=1,
|
37 |
+
padding=get_padding(kernel_size, 1))),
|
38 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=1,
|
39 |
+
padding=get_padding(kernel_size, 1)))
|
40 |
+
])
|
41 |
+
self.convs2.apply(init_weights)
|
42 |
+
|
43 |
+
def forward(self, x):
|
44 |
+
for c1, c2 in zip(self.convs1, self.convs2):
|
45 |
+
xt = F.leaky_relu(x, LRELU_SLOPE)
|
46 |
+
xt = c1(xt)
|
47 |
+
xt = F.leaky_relu(xt, LRELU_SLOPE)
|
48 |
+
xt = c2(xt)
|
49 |
+
x = xt + x
|
50 |
+
return x
|
51 |
+
|
52 |
+
def remove_weight_norm(self):
|
53 |
+
for l in self.convs1:
|
54 |
+
remove_weight_norm(l)
|
55 |
+
for l in self.convs2:
|
56 |
+
remove_weight_norm(l)
|
57 |
+
|
58 |
+
|
59 |
+
class ResBlock2(torch.nn.Module):
|
60 |
+
def __init__(self, h, channels, kernel_size=3, dilation=(1, 3)):
|
61 |
+
super(ResBlock2, self).__init__()
|
62 |
+
self.h = h
|
63 |
+
self.convs = nn.ModuleList([
|
64 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[0],
|
65 |
+
padding=get_padding(kernel_size, dilation[0]))),
|
66 |
+
weight_norm(Conv1d(channels, channels, kernel_size, 1, dilation=dilation[1],
|
67 |
+
padding=get_padding(kernel_size, dilation[1])))
|
68 |
+
])
|
69 |
+
self.convs.apply(init_weights)
|
70 |
+
|
71 |
+
def forward(self, x):
|
72 |
+
for c in self.convs:
|
73 |
+
xt = F.leaky_relu(x, LRELU_SLOPE)
|
74 |
+
xt = c(xt)
|
75 |
+
x = xt + x
|
76 |
+
return x
|
77 |
+
|
78 |
+
def remove_weight_norm(self):
|
79 |
+
for l in self.convs:
|
80 |
+
remove_weight_norm(l)
|
81 |
+
|
82 |
+
|
83 |
+
class Generator(torch.nn.Module):
|
84 |
+
def __init__(self, h):
|
85 |
+
super(Generator, self).__init__()
|
86 |
+
self.h = h
|
87 |
+
self.num_kernels = len(h.resblock_kernel_sizes)
|
88 |
+
self.num_upsamples = len(h.upsample_rates)
|
89 |
+
self.conv_pre = weight_norm(Conv1d(h.feat_order, h.upsample_initial_channel, 7, 1, padding=3))
|
90 |
+
resblock = ResBlock1 if h.resblock == '1' else ResBlock2
|
91 |
+
|
92 |
+
self.ups = nn.ModuleList()
|
93 |
+
for i, (u, k) in enumerate(zip(h.upsample_rates, h.upsample_kernel_sizes)):
|
94 |
+
self.ups.append(weight_norm(
|
95 |
+
ConvTranspose1d(h.upsample_initial_channel//(2**i), h.upsample_initial_channel//(2**(i+1)),
|
96 |
+
k, u, padding=(k-u)//2)))
|
97 |
+
|
98 |
+
self.resblocks = nn.ModuleList()
|
99 |
+
for i in range(len(self.ups)):
|
100 |
+
ch = h.upsample_initial_channel//(2**(i+1))
|
101 |
+
for j, (k, d) in enumerate(zip(h.resblock_kernel_sizes, h.resblock_dilation_sizes)):
|
102 |
+
self.resblocks.append(resblock(h, ch, k, d))
|
103 |
+
|
104 |
+
self.conv_post = weight_norm(Conv1d(ch, 1, 7, 1, padding=3))
|
105 |
+
self.ups.apply(init_weights)
|
106 |
+
self.conv_post.apply(init_weights)
|
107 |
+
|
108 |
+
def forward(self, x):
|
109 |
+
x = self.conv_pre(x)
|
110 |
+
for i in range(self.num_upsamples):
|
111 |
+
x = F.leaky_relu(x, LRELU_SLOPE)
|
112 |
+
x = self.ups[i](x)
|
113 |
+
xs = None
|
114 |
+
for j in range(self.num_kernels):
|
115 |
+
if xs is None:
|
116 |
+
xs = self.resblocks[i*self.num_kernels+j](x)
|
117 |
+
else:
|
118 |
+
xs += self.resblocks[i*self.num_kernels+j](x)
|
119 |
+
x = xs / self.num_kernels
|
120 |
+
x = F.leaky_relu(x)
|
121 |
+
x = self.conv_post(x)
|
122 |
+
x = torch.tanh(x)
|
123 |
+
|
124 |
+
return x
|
125 |
+
|
126 |
+
def remove_weight_norm(self):
|
127 |
+
print('Removing weight norm...')
|
128 |
+
for l in self.ups:
|
129 |
+
remove_weight_norm(l)
|
130 |
+
for l in self.resblocks:
|
131 |
+
l.remove_weight_norm()
|
132 |
+
remove_weight_norm(self.conv_pre)
|
133 |
+
remove_weight_norm(self.conv_post)
|