Compare commits

...

2 Commits

Author SHA1 Message Date
yiyixuxu
1fbaaf3b64 up 2026-02-14 20:14:54 +01:00
YiYi Xu
6141ae2348 [Modular] add different pipeine blocks to init (#13145)
* up

* style + copies

* fix

---------

Co-authored-by: yiyi@huggingface.co <yiyi@ip-26-0-160-103.ec2.internal>
2026-02-13 18:36:47 -10:00
4 changed files with 129 additions and 3 deletions

View File

@@ -294,10 +294,17 @@ else:
)
_import_structure["modular_pipelines"].extend(
[
"AutoPipelineBlocks",
"ComponentsManager",
"ComponentSpec",
"ConditionalPipelineBlocks",
"ConfigSpec",
"InputParam",
"LoopSequentialPipelineBlocks",
"ModularPipeline",
"ModularPipelineBlocks",
"OutputParam",
"SequentialPipelineBlocks",
]
)
_import_structure["optimization"] = [
@@ -1063,7 +1070,19 @@ if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
ZImageTransformer2DModel,
attention_backend,
)
from .modular_pipelines import ComponentsManager, ComponentSpec, ModularPipeline, ModularPipelineBlocks
from .modular_pipelines import (
AutoPipelineBlocks,
ComponentsManager,
ComponentSpec,
ConditionalPipelineBlocks,
ConfigSpec,
InputParam,
LoopSequentialPipelineBlocks,
ModularPipeline,
ModularPipelineBlocks,
OutputParam,
SequentialPipelineBlocks,
)
from .optimization import (
get_constant_schedule,
get_constant_schedule_with_warmup,

View File

@@ -33,6 +33,7 @@ else:
"ModularPipeline",
"AutoPipelineBlocks",
"SequentialPipelineBlocks",
"ConditionalPipelineBlocks",
"LoopSequentialPipelineBlocks",
"PipelineState",
"BlockState",
@@ -105,6 +106,7 @@ if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .modular_pipeline import (
AutoPipelineBlocks,
BlockState,
ConditionalPipelineBlocks,
LoopSequentialPipelineBlocks,
ModularPipeline,
ModularPipelineBlocks,

View File

@@ -17,7 +17,7 @@ from typing import Callable
import numpy as np
import torch
from transformers import BertModel, BertTokenizer, CLIPImageProcessor, MT5Tokenizer, T5EncoderModel
from transformers import BertModel, BertTokenizer, CLIPImageProcessor, T5EncoderModel, T5Tokenizer
from diffusers.pipelines.stable_diffusion import StableDiffusionPipelineOutput
@@ -208,7 +208,7 @@ class HunyuanDiTPAGPipeline(DiffusionPipeline, PAGMixin):
feature_extractor: CLIPImageProcessor | None = None,
requires_safety_checker: bool = True,
text_encoder_2: T5EncoderModel | None = None,
tokenizer_2: MT5Tokenizer | None = None,
tokenizer_2: T5Tokenizer | None = None,
pag_applied_layers: str | list[str] = "blocks.1", # "blocks.16.attn1", "blocks.16", "16", 16
):
super().__init__()

View File

@@ -1905,6 +1905,21 @@ def attention_backend(*args, **kwargs):
requires_backends(attention_backend, ["torch"])
class AutoPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ComponentsManager(metaclass=DummyObject):
_backends = ["torch"]
@@ -1935,6 +1950,66 @@ class ComponentSpec(metaclass=DummyObject):
requires_backends(cls, ["torch"])
class ConditionalPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ConfigSpec(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class InputParam(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class LoopSequentialPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ModularPipeline(metaclass=DummyObject):
_backends = ["torch"]
@@ -1965,6 +2040,36 @@ class ModularPipelineBlocks(metaclass=DummyObject):
requires_backends(cls, ["torch"])
class OutputParam(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class SequentialPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
def get_constant_schedule(*args, **kwargs):
requires_backends(get_constant_schedule, ["torch"])