mirror of
https://github.com/brycedrennan/imaginAIry
synced 2024-11-17 09:25:47 +00:00
35ac8d64d7
Trying to get rid of tb-nightly dependency and any other unnecessary dependencies.
78 lines
2.5 KiB
Python
78 lines
2.5 KiB
Python
# from https://github.com/XPixelGroup/BasicSR/blob/b0ee3c8414bd39da34f0216cd6bfd8110b85da60/basicsr/archs/arch_util.py
|
|
# with removals and slightmodifications
|
|
import logging
|
|
|
|
import torch
|
|
from torch import nn
|
|
from torch.nn import init
|
|
from torch.nn.modules.batchnorm import _BatchNorm
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@torch.no_grad()
|
|
def default_init_weights(module_list, scale=1, bias_fill=0, **kwargs):
|
|
"""Initialize network weights.
|
|
|
|
Args:
|
|
module_list (list[nn.Module] | nn.Module): Modules to be initialized.
|
|
scale (float): Scale initialized weights, especially for residual
|
|
blocks. Default: 1.
|
|
bias_fill (float): The value to fill bias. Default: 0
|
|
kwargs (dict): Other arguments for initialization function.
|
|
"""
|
|
if not isinstance(module_list, list):
|
|
module_list = [module_list]
|
|
for module in module_list:
|
|
for m in module.modules():
|
|
if isinstance(m, nn.Conv2d):
|
|
init.kaiming_normal_(m.weight, **kwargs)
|
|
m.weight.data *= scale
|
|
if m.bias is not None:
|
|
m.bias.data.fill_(bias_fill)
|
|
elif isinstance(m, nn.Linear):
|
|
init.kaiming_normal_(m.weight, **kwargs)
|
|
m.weight.data *= scale
|
|
if m.bias is not None:
|
|
m.bias.data.fill_(bias_fill)
|
|
elif isinstance(m, _BatchNorm):
|
|
init.constant_(m.weight, 1)
|
|
if m.bias is not None:
|
|
m.bias.data.fill_(bias_fill)
|
|
|
|
|
|
def make_layer(basic_block, num_basic_block, **kwarg):
|
|
"""Make layers by stacking the same blocks.
|
|
|
|
Args:
|
|
basic_block (nn.module): nn.module class for basic block.
|
|
num_basic_block (int): number of blocks.
|
|
|
|
Returns:
|
|
nn.Sequential: Stacked blocks in nn.Sequential.
|
|
"""
|
|
layers = []
|
|
for _ in range(num_basic_block):
|
|
layers.append(basic_block(**kwarg))
|
|
return nn.Sequential(*layers)
|
|
|
|
|
|
# TODO: may write a cpp file
|
|
def pixel_unshuffle(x, scale):
|
|
"""Pixel unshuffle.
|
|
|
|
Args:
|
|
x (Tensor): Input feature with shape (b, c, hh, hw).
|
|
scale (int): Downsample ratio.
|
|
|
|
Returns:
|
|
Tensor: the pixel unshuffled feature.
|
|
"""
|
|
b, c, hh, hw = x.size()
|
|
out_channel = c * (scale**2)
|
|
assert hh % scale == 0 and hw % scale == 0
|
|
h = hh // scale
|
|
w = hw // scale
|
|
x_view = x.view(b, c, h, scale, w, scale)
|
|
return x_view.permute(0, 1, 3, 5, 2, 4).reshape(b, out_channel, h, w)
|