2022-09-10 07:32:31 +00:00
|
|
|
import os
|
2022-09-08 03:59:30 +00:00
|
|
|
import importlib
|
2022-09-09 04:51:25 +00:00
|
|
|
import logging
|
2022-09-10 07:32:31 +00:00
|
|
|
from contextlib import contextmanager
|
2022-09-08 03:59:30 +00:00
|
|
|
from functools import lru_cache
|
2022-09-10 07:32:31 +00:00
|
|
|
from typing import List, Optional
|
2022-09-08 03:59:30 +00:00
|
|
|
|
|
|
|
import torch
|
2022-09-10 07:32:31 +00:00
|
|
|
from torch import Tensor
|
2022-09-08 03:59:30 +00:00
|
|
|
|
2022-09-09 04:51:25 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2022-09-08 03:59:30 +00:00
|
|
|
|
|
|
|
@lru_cache()
|
|
|
|
def get_device():
|
|
|
|
if torch.cuda.is_available():
|
|
|
|
return "cuda"
|
|
|
|
elif torch.backends.mps.is_available():
|
|
|
|
return "mps"
|
|
|
|
else:
|
|
|
|
return "cpu"
|
|
|
|
|
|
|
|
|
2022-09-09 04:51:25 +00:00
|
|
|
def log_params(model):
|
2022-09-08 03:59:30 +00:00
|
|
|
total_params = sum(p.numel() for p in model.parameters())
|
2022-09-09 04:51:25 +00:00
|
|
|
logger.info(f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.")
|
2022-09-08 03:59:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def instantiate_from_config(config):
|
|
|
|
if not "target" in config:
|
|
|
|
if config == "__is_first_stage__":
|
|
|
|
return None
|
|
|
|
elif config == "__is_unconditional__":
|
|
|
|
return None
|
|
|
|
raise KeyError("Expected key `target` to instantiate.")
|
|
|
|
return get_obj_from_str(config["target"])(**config.get("params", dict()))
|
|
|
|
|
|
|
|
|
|
|
|
def get_obj_from_str(string, reload=False):
|
|
|
|
module, cls = string.rsplit(".", 1)
|
|
|
|
if reload:
|
|
|
|
module_imp = importlib.import_module(module)
|
|
|
|
importlib.reload(module_imp)
|
|
|
|
return getattr(importlib.import_module(module, package=None), cls)
|
2022-09-10 07:32:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
from torch.overrides import has_torch_function_variadic, handle_torch_function
|
|
|
|
|
|
|
|
|
|
|
|
def _fixed_layer_norm(
|
|
|
|
input: Tensor,
|
|
|
|
normalized_shape: List[int],
|
|
|
|
weight: Optional[Tensor] = None,
|
|
|
|
bias: Optional[Tensor] = None,
|
|
|
|
eps: float = 1e-5,
|
|
|
|
) -> Tensor:
|
|
|
|
r"""Applies Layer Normalization for last certain number of dimensions.
|
|
|
|
See :class:`~torch.nn.LayerNorm` for details.
|
|
|
|
"""
|
|
|
|
if has_torch_function_variadic(input, weight, bias):
|
|
|
|
return handle_torch_function(
|
|
|
|
_fixed_layer_norm,
|
|
|
|
(input, weight, bias),
|
|
|
|
input,
|
|
|
|
normalized_shape,
|
|
|
|
weight=weight,
|
|
|
|
bias=bias,
|
|
|
|
eps=eps,
|
|
|
|
)
|
|
|
|
return torch.layer_norm(
|
|
|
|
input.contiguous(),
|
|
|
|
normalized_shape,
|
|
|
|
weight,
|
|
|
|
bias,
|
|
|
|
eps,
|
|
|
|
torch.backends.cudnn.enabled,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def fix_torch_nn_layer_norm():
|
|
|
|
"""https://github.com/CompVis/stable-diffusion/issues/25#issuecomment-1221416526"""
|
|
|
|
from torch.nn import functional
|
|
|
|
|
|
|
|
orig_function = functional.layer_norm
|
|
|
|
functional.layer_norm = _fixed_layer_norm
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
functional.layer_norm = orig_function
|