File size: 2,057 Bytes
671e149
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
allow_cache: true
batch_max_steps: 24000
batch_size: 6
config: conf/parallel_wavegan.v1.long.yaml
dev_dumpdir: dump/dev_all/norm
dev_feats_scp: null
dev_segments: null
dev_wav_scp: null
discriminator_grad_norm: 1
discriminator_optimizer_params:
  eps: 1.0e-06
  lr: 5.0e-05
  weight_decay: 0.0
discriminator_params:
  bias: true
  conv_channels: 64
  in_channels: 1
  kernel_size: 3
  layers: 10
  nonlinear_activation: LeakyReLU
  nonlinear_activation_params:
    negative_slope: 0.2
  out_channels: 1
  use_weight_norm: true
discriminator_scheduler_params:
  gamma: 0.5
  step_size: 200000
discriminator_train_start_steps: 100000
distributed: false
eval_interval_steps: 1000
fft_size: 2048
fmax: 7600
fmin: 80
format: hdf5
generator_grad_norm: 10
generator_optimizer_params:
  eps: 1.0e-06
  lr: 0.0001
  weight_decay: 0.0
generator_params:
  aux_channels: 80
  aux_context_window: 2
  dropout: 0.0
  gate_channels: 128
  in_channels: 1
  kernel_size: 3
  layers: 30
  out_channels: 1
  residual_channels: 64
  skip_channels: 64
  stacks: 3
  upsample_net: ConvInUpsampleNetwork
  upsample_params:
    upsample_scales:
    - 4
    - 5
    - 3
    - 5
  use_weight_norm: true
generator_scheduler_params:
  gamma: 0.5
  step_size: 200000
global_gain_scale: 1.0
hop_size: 300
lambda_adv: 4.0
log_interval_steps: 100
num_mels: 80
num_save_intermediate_results: 4
num_workers: 2
outdir: exp/train_nodev_all_vctk_parallel_wavegan.v1.long
pin_memory: true
pretrain: ''
rank: 0
remove_short_samples: true
resume: exp/train_nodev_all_vctk_parallel_wavegan.v1.long/checkpoint-970000steps.pkl
sampling_rate: 24000
save_interval_steps: 5000
stft_loss_params:
  fft_sizes:
  - 1024
  - 2048
  - 512
  hop_sizes:
  - 120
  - 240
  - 50
  win_lengths:
  - 600
  - 1200
  - 240
  window: hann_window
train_dumpdir: dump/train_nodev_all/norm
train_feats_scp: null
train_max_steps: 1000000
train_segments: null
train_wav_scp: null
trim_frame_size: 1024
trim_hop_size: 256
trim_silence: false
trim_threshold_in_db: 20
verbose: 1
version: 0.3.4
win_length: 1200
window: hann