|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import torch.nn as nn
|
|
from utils.utils import instantiate_from_config
|
|
|
|
|
|
def disabled_train(self, mode=True):
|
|
"""Overwrite model.train with this function to make sure train/eval mode
|
|
does not change anymore."""
|
|
return self
|
|
|
|
def zero_module(module):
|
|
"""
|
|
Zero out the parameters of a module and return it.
|
|
"""
|
|
for p in module.parameters():
|
|
p.detach().zero_()
|
|
return module
|
|
|
|
def scale_module(module, scale):
|
|
"""
|
|
Scale the parameters of a module and return it.
|
|
"""
|
|
for p in module.parameters():
|
|
p.detach().mul_(scale)
|
|
return module
|
|
|
|
|
|
def conv_nd(dims, *args, **kwargs):
|
|
"""
|
|
Create a 1D, 2D, or 3D convolution module.
|
|
"""
|
|
if dims == 1:
|
|
return nn.Conv1d(*args, **kwargs)
|
|
elif dims == 2:
|
|
return nn.Conv2d(*args, **kwargs)
|
|
elif dims == 3:
|
|
return nn.Conv3d(*args, **kwargs)
|
|
raise ValueError(f"unsupported dimensions: {dims}")
|
|
|
|
|
|
def linear(*args, **kwargs):
|
|
"""
|
|
Create a linear module.
|
|
"""
|
|
return nn.Linear(*args, **kwargs)
|
|
|
|
|
|
def avg_pool_nd(dims, *args, **kwargs):
|
|
"""
|
|
Create a 1D, 2D, or 3D average pooling module.
|
|
"""
|
|
if dims == 1:
|
|
return nn.AvgPool1d(*args, **kwargs)
|
|
elif dims == 2:
|
|
return nn.AvgPool2d(*args, **kwargs)
|
|
elif dims == 3:
|
|
return nn.AvgPool3d(*args, **kwargs)
|
|
raise ValueError(f"unsupported dimensions: {dims}")
|
|
|
|
|
|
def nonlinearity(type='silu'):
|
|
if type == 'silu':
|
|
return nn.SiLU()
|
|
elif type == 'leaky_relu':
|
|
return nn.LeakyReLU()
|
|
|
|
|
|
class GroupNormSpecific(nn.GroupNorm):
|
|
def forward(self, x):
|
|
return super().forward(x.float()).type(x.dtype)
|
|
|
|
|
|
def normalization(channels, num_groups=32):
|
|
"""
|
|
Make a standard normalization layer.
|
|
:param channels: number of input channels.
|
|
:return: an nn.Module for normalization.
|
|
"""
|
|
return GroupNormSpecific(num_groups, channels)
|
|
|
|
|
|
class HybridConditioner(nn.Module):
|
|
|
|
def __init__(self, c_concat_config, c_crossattn_config):
|
|
super().__init__()
|
|
self.concat_conditioner = instantiate_from_config(c_concat_config)
|
|
self.crossattn_conditioner = instantiate_from_config(c_crossattn_config)
|
|
|
|
def forward(self, c_concat, c_crossattn):
|
|
c_concat = self.concat_conditioner(c_concat)
|
|
c_crossattn = self.crossattn_conditioner(c_crossattn)
|
|
return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} |