From d4d3a16689b2ae1076b347d190b941bfbb90c129 Mon Sep 17 00:00:00 2001 From: blessedcoolant <54517381+blessedcoolant@users.noreply.github.com> Date: Fri, 10 May 2024 20:18:54 +0530 Subject: [PATCH 1/2] fix: fix seamless --- invokeai/backend/stable_diffusion/seamless.py | 92 ++++++------------- 1 file changed, 28 insertions(+), 64 deletions(-) diff --git a/invokeai/backend/stable_diffusion/seamless.py b/invokeai/backend/stable_diffusion/seamless.py index 2e22c19d0ef..a004a4a9d14 100644 --- a/invokeai/backend/stable_diffusion/seamless.py +++ b/invokeai/backend/stable_diffusion/seamless.py @@ -1,89 +1,53 @@ from __future__ import annotations from contextlib import contextmanager -from typing import Callable, List, Union +from typing import Callable, List, Optional, Tuple, Union +import torch import torch.nn as nn from diffusers.models.autoencoders.autoencoder_kl import AutoencoderKL from diffusers.models.autoencoders.autoencoder_tiny import AutoencoderTiny +from diffusers.models.lora import LoRACompatibleConv from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel -def _conv_forward_asymmetric(self, input, weight, bias): - """ - Patch for Conv2d._conv_forward that supports asymmetric padding - """ - working = nn.functional.pad(input, self.asymmetric_padding["x"], mode=self.asymmetric_padding_mode["x"]) - working = nn.functional.pad(working, self.asymmetric_padding["y"], mode=self.asymmetric_padding_mode["y"]) - return nn.functional.conv2d( - working, - weight, - bias, - self.stride, - nn.modules.utils._pair(0), - self.dilation, - self.groups, - ) - - @contextmanager def set_seamless(model: Union[UNet2DConditionModel, AutoencoderKL, AutoencoderTiny], seamless_axes: List[str]): if not seamless_axes: yield return - # Callable: (input: Tensor, weight: Tensor, bias: Optional[Tensor]) -> Tensor - to_restore: list[tuple[nn.Conv2d | nn.ConvTranspose2d, Callable]] = [] - try: - # Hard coded to skip down block layers, allowing for seamless tiling at the expense of prompt adherence - skipped_layers = 1 - for m_name, m in model.named_modules(): - if not isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)): - continue + # override conv_forward + # https://github.com/huggingface/diffusers/issues/556#issuecomment-1993287019 + def _conv_forward_asymmetric(self, input: torch.Tensor, weight: torch.Tensor, bias: Optional[torch.Tensor] = None): + self.paddingX = (self._reversed_padding_repeated_twice[0], self._reversed_padding_repeated_twice[1], 0, 0) + self.paddingY = (0, 0, self._reversed_padding_repeated_twice[2], self._reversed_padding_repeated_twice[3]) + working = torch.nn.functional.pad(input, self.paddingX, mode=x_mode) + working = torch.nn.functional.pad(working, self.paddingY, mode=y_mode) + return torch.nn.functional.conv2d( + working, weight, bias, self.stride, torch.nn.modules.utils._pair(0), self.dilation, self.groups + ) - if isinstance(model, UNet2DConditionModel) and m_name.startswith("down_blocks.") and ".resnets." in m_name: - # down_blocks.1.resnets.1.conv1 - _, block_num, _, resnet_num, submodule_name = m_name.split(".") - block_num = int(block_num) - resnet_num = int(resnet_num) + original_layers: List[Tuple[nn.Conv2d | nn.ConvTranspose2d, Callable]] = [] - if block_num >= len(model.down_blocks) - skipped_layers: - continue - - # Skip the second resnet (could be configurable) - if resnet_num > 0: - continue + try: + x_mode = "circular" if "x" in seamless_axes else "constant" + y_mode = "circular" if "y" in seamless_axes else "constant" - # Skip Conv2d layers (could be configurable) - if submodule_name == "conv2": - continue + conv_layers: List[torch.nn.Conv2d] = [] - m.asymmetric_padding_mode = {} - m.asymmetric_padding = {} - m.asymmetric_padding_mode["x"] = "circular" if ("x" in seamless_axes) else "constant" - m.asymmetric_padding["x"] = ( - m._reversed_padding_repeated_twice[0], - m._reversed_padding_repeated_twice[1], - 0, - 0, - ) - m.asymmetric_padding_mode["y"] = "circular" if ("y" in seamless_axes) else "constant" - m.asymmetric_padding["y"] = ( - 0, - 0, - m._reversed_padding_repeated_twice[2], - m._reversed_padding_repeated_twice[3], - ) + for module in model.modules(): + if isinstance(module, torch.nn.Conv2d): + conv_layers.append(module) - to_restore.append((m, m._conv_forward)) - m._conv_forward = _conv_forward_asymmetric.__get__(m, nn.Conv2d) + for layer in conv_layers: + if isinstance(layer, LoRACompatibleConv) and layer.lora_layer is None: + layer.lora_layer = lambda *x: 0 + original_layers.append((layer, layer._conv_forward)) + layer._conv_forward = _conv_forward_asymmetric.__get__(layer, torch.nn.Conv2d) yield finally: - for module, orig_conv_forward in to_restore: - module._conv_forward = orig_conv_forward - if hasattr(module, "asymmetric_padding_mode"): - del module.asymmetric_padding_mode - if hasattr(module, "asymmetric_padding"): - del module.asymmetric_padding + for layer, orig_conv_forward in original_layers: + layer._conv_forward = orig_conv_forward From 83803ca8d69d45dc4ffb42745d97468503d14f36 Mon Sep 17 00:00:00 2001 From: blessedcoolant <54517381+blessedcoolant@users.noreply.github.com> Date: Fri, 10 May 2024 21:26:52 +0530 Subject: [PATCH 2/2] cleanup: seamless unused older code cleanup --- invokeai/app/invocations/latent.py | 7 --- invokeai/backend/image_util/__init__.py | 1 - invokeai/backend/image_util/seamless.py | 52 ------------------- invokeai/backend/stable_diffusion/seamless.py | 4 +- 4 files changed, 1 insertion(+), 63 deletions(-) delete mode 100644 invokeai/backend/image_util/seamless.py diff --git a/invokeai/app/invocations/latent.py b/invokeai/app/invocations/latent.py index 3d1439f7db4..b3ac3973bf3 100644 --- a/invokeai/app/invocations/latent.py +++ b/invokeai/app/invocations/latent.py @@ -586,13 +586,6 @@ def create_pipeline( unet: UNet2DConditionModel, scheduler: Scheduler, ) -> StableDiffusionGeneratorPipeline: - # TODO: - # configure_model_padding( - # unet, - # self.seamless, - # self.seamless_axes, - # ) - class FakeVae: class FakeVaeConfig: def __init__(self) -> None: diff --git a/invokeai/backend/image_util/__init__.py b/invokeai/backend/image_util/__init__.py index dec2a921504..f45af9feb47 100644 --- a/invokeai/backend/image_util/__init__.py +++ b/invokeai/backend/image_util/__init__.py @@ -4,5 +4,4 @@ from .infill_methods.patchmatch import PatchMatch # noqa: F401 from .pngwriter import PngWriter, PromptFormatter, retrieve_metadata, write_metadata # noqa: F401 -from .seamless import configure_model_padding # noqa: F401 from .util import InitImageResizer, make_grid # noqa: F401 diff --git a/invokeai/backend/image_util/seamless.py b/invokeai/backend/image_util/seamless.py deleted file mode 100644 index 8a2580bfcc4..00000000000 --- a/invokeai/backend/image_util/seamless.py +++ /dev/null @@ -1,52 +0,0 @@ -import torch.nn as nn - - -def _conv_forward_asymmetric(self, input, weight, bias): - """ - Patch for Conv2d._conv_forward that supports asymmetric padding - """ - working = nn.functional.pad(input, self.asymmetric_padding["x"], mode=self.asymmetric_padding_mode["x"]) - working = nn.functional.pad(working, self.asymmetric_padding["y"], mode=self.asymmetric_padding_mode["y"]) - return nn.functional.conv2d( - working, - weight, - bias, - self.stride, - nn.modules.utils._pair(0), - self.dilation, - self.groups, - ) - - -def configure_model_padding(model, seamless, seamless_axes): - """ - Modifies the 2D convolution layers to use a circular padding mode based on - the `seamless` and `seamless_axes` options. - """ - # TODO: get an explicit interface for this in diffusers: https://github.com/huggingface/diffusers/issues/556 - for m in model.modules(): - if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)): - if seamless: - m.asymmetric_padding_mode = {} - m.asymmetric_padding = {} - m.asymmetric_padding_mode["x"] = "circular" if ("x" in seamless_axes) else "constant" - m.asymmetric_padding["x"] = ( - m._reversed_padding_repeated_twice[0], - m._reversed_padding_repeated_twice[1], - 0, - 0, - ) - m.asymmetric_padding_mode["y"] = "circular" if ("y" in seamless_axes) else "constant" - m.asymmetric_padding["y"] = ( - 0, - 0, - m._reversed_padding_repeated_twice[2], - m._reversed_padding_repeated_twice[3], - ) - m._conv_forward = _conv_forward_asymmetric.__get__(m, nn.Conv2d) - else: - m._conv_forward = nn.Conv2d._conv_forward.__get__(m, nn.Conv2d) - if hasattr(m, "asymmetric_padding_mode"): - del m.asymmetric_padding_mode - if hasattr(m, "asymmetric_padding"): - del m.asymmetric_padding diff --git a/invokeai/backend/stable_diffusion/seamless.py b/invokeai/backend/stable_diffusion/seamless.py index a004a4a9d14..23ed978c6d0 100644 --- a/invokeai/backend/stable_diffusion/seamless.py +++ b/invokeai/backend/stable_diffusion/seamless.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from contextlib import contextmanager from typing import Callable, List, Optional, Tuple, Union @@ -28,7 +26,7 @@ def _conv_forward_asymmetric(self, input: torch.Tensor, weight: torch.Tensor, bi working, weight, bias, self.stride, torch.nn.modules.utils._pair(0), self.dilation, self.groups ) - original_layers: List[Tuple[nn.Conv2d | nn.ConvTranspose2d, Callable]] = [] + original_layers: List[Tuple[nn.Conv2d, Callable]] = [] try: x_mode = "circular" if "x" in seamless_axes else "constant"