Compare commits

..

4 Commits

Author SHA1 Message Date
sayakpaul
7d52558c15 Release: v0.26.2-patch 2024-02-06 07:36:31 +05:30
YiYi Xu
3efe355d52 add self.use_ada_layer_norm_* params back to BasicTransformerBlock (#6841)
fix sd reference community ppeline

Co-authored-by: yiyixuxu <yixu310@gmail,com>
2024-02-06 07:34:36 +05:30
sayakpaul
08e6558ab8 Release: v0.26.1-patch 2024-02-02 14:42:23 +05:30
YiYi Xu
1547720209 add is_torchvision_available (#6800)
* add

* remove transformer

---------

Co-authored-by: yiyixuxu <yixu310@gmail,com>
2024-02-02 14:40:36 +05:30
8 changed files with 29 additions and 7 deletions

View File

@@ -538,7 +538,7 @@ class StableDiffusionReferencePipeline(StableDiffusionPipeline):
return hidden_states, output_states return hidden_states, output_states
def hacked_DownBlock2D_forward(self, hidden_states, temb=None): def hacked_DownBlock2D_forward(self, hidden_states, temb=None, **kwargs):
eps = 1e-6 eps = 1e-6
output_states = () output_states = ()
@@ -634,7 +634,9 @@ class StableDiffusionReferencePipeline(StableDiffusionPipeline):
return hidden_states return hidden_states
def hacked_UpBlock2D_forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None): def hacked_UpBlock2D_forward(
self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None, **kwargs
):
eps = 1e-6 eps = 1e-6
for i, resnet in enumerate(self.resnets): for i, resnet in enumerate(self.resnets):
# pop res hidden states # pop res hidden states

View File

@@ -507,7 +507,7 @@ class StableDiffusionXLReferencePipeline(StableDiffusionXLPipeline):
return hidden_states, output_states return hidden_states, output_states
def hacked_DownBlock2D_forward(self, hidden_states, temb=None): def hacked_DownBlock2D_forward(self, hidden_states, temb=None, **kwargs):
eps = 1e-6 eps = 1e-6
output_states = () output_states = ()
@@ -603,7 +603,9 @@ class StableDiffusionXLReferencePipeline(StableDiffusionXLPipeline):
return hidden_states return hidden_states
def hacked_UpBlock2D_forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None): def hacked_UpBlock2D_forward(
self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None, **kwargs
):
eps = 1e-6 eps = 1e-6
for i, resnet in enumerate(self.resnets): for i, resnet in enumerate(self.resnets):
# pop res hidden states # pop res hidden states

View File

@@ -249,7 +249,7 @@ version_range_max = max(sys.version_info[1], 10) + 1
setup( setup(
name="diffusers", name="diffusers",
version="0.26.0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots) version="0.26.2", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
description="State-of-the-art diffusion in PyTorch and JAX.", description="State-of-the-art diffusion in PyTorch and JAX.",
long_description=open("README.md", "r", encoding="utf-8").read(), long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown", long_description_content_type="text/markdown",

View File

@@ -1,4 +1,4 @@
__version__ = "0.26.0" __version__ = "0.26.2"
from typing import TYPE_CHECKING from typing import TYPE_CHECKING

View File

@@ -158,6 +158,12 @@ class BasicTransformerBlock(nn.Module):
super().__init__() super().__init__()
self.only_cross_attention = only_cross_attention self.only_cross_attention = only_cross_attention
self.use_ada_layer_norm_zero = (num_embeds_ada_norm is not None) and norm_type == "ada_norm_zero"
self.use_ada_layer_norm = (num_embeds_ada_norm is not None) and norm_type == "ada_norm"
self.use_ada_layer_norm_single = norm_type == "ada_norm_single"
self.use_layer_norm = norm_type == "layer_norm"
self.use_ada_layer_norm_continuous = norm_type == "ada_norm_continuous"
if norm_type in ("ada_norm", "ada_norm_zero") and num_embeds_ada_norm is None: if norm_type in ("ada_norm", "ada_norm_zero") and num_embeds_ada_norm is None:
raise ValueError( raise ValueError(
f"`norm_type` is set to {norm_type}, but `num_embeds_ada_norm` is not defined. Please make sure to" f"`norm_type` is set to {norm_type}, but `num_embeds_ada_norm` is not defined. Please make sure to"

View File

@@ -5,7 +5,6 @@ from typing import Any, Dict, Iterable, List, Optional, Union
import numpy as np import numpy as np
import torch import torch
from transformers import is_torchvision_available
from .models import UNet2DConditionModel from .models import UNet2DConditionModel
from .utils import ( from .utils import (
@@ -13,6 +12,7 @@ from .utils import (
convert_state_dict_to_peft, convert_state_dict_to_peft,
deprecate, deprecate,
is_peft_available, is_peft_available,
is_torchvision_available,
is_transformers_available, is_transformers_available,
) )

View File

@@ -75,6 +75,7 @@ from .import_utils import (
is_torch_version, is_torch_version,
is_torch_xla_available, is_torch_xla_available,
is_torchsde_available, is_torchsde_available,
is_torchvision_available,
is_transformers_available, is_transformers_available,
is_transformers_version, is_transformers_version,
is_unidecode_available, is_unidecode_available,

View File

@@ -278,6 +278,13 @@ try:
except importlib_metadata.PackageNotFoundError: except importlib_metadata.PackageNotFoundError:
_peft_available = False _peft_available = False
_torchvision_available = importlib.util.find_spec("torchvision") is not None
try:
_torchvision_version = importlib_metadata.version("torchvision")
logger.debug(f"Successfully imported torchvision version {_torchvision_version}")
except importlib_metadata.PackageNotFoundError:
_torchvision_available = False
def is_torch_available(): def is_torch_available():
return _torch_available return _torch_available
@@ -367,6 +374,10 @@ def is_peft_available():
return _peft_available return _peft_available
def is_torchvision_available():
return _torchvision_available
# docstyle-ignore # docstyle-ignore
FLAX_IMPORT_ERROR = """ FLAX_IMPORT_ERROR = """
{0} requires the FLAX library but it was not found in your environment. Checkout the instructions on the {0} requires the FLAX library but it was not found in your environment. Checkout the instructions on the