Compare commits

..

2 Commits

Author SHA1 Message Date
YiYi Xu
6141ae2348 [Modular] add different pipeine blocks to init (#13145)
* up

* style + copies

* fix

---------

Co-authored-by: yiyi@huggingface.co <yiyi@ip-26-0-160-103.ec2.internal>
2026-02-13 18:36:47 -10:00
Sayak Paul
3c1c62ec9d [docs] fix ltx2 i2v docstring. (#13135)
* fix ltx2 i2v docstring.

* up
2026-02-14 08:40:16 +05:30
4 changed files with 131 additions and 5 deletions

View File

@@ -294,10 +294,17 @@ else:
)
_import_structure["modular_pipelines"].extend(
[
"AutoPipelineBlocks",
"ComponentsManager",
"ComponentSpec",
"ConditionalPipelineBlocks",
"ConfigSpec",
"InputParam",
"LoopSequentialPipelineBlocks",
"ModularPipeline",
"ModularPipelineBlocks",
"OutputParam",
"SequentialPipelineBlocks",
]
)
_import_structure["optimization"] = [
@@ -1063,7 +1070,19 @@ if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
ZImageTransformer2DModel,
attention_backend,
)
from .modular_pipelines import ComponentsManager, ComponentSpec, ModularPipeline, ModularPipelineBlocks
from .modular_pipelines import (
AutoPipelineBlocks,
ComponentsManager,
ComponentSpec,
ConditionalPipelineBlocks,
ConfigSpec,
InputParam,
LoopSequentialPipelineBlocks,
ModularPipeline,
ModularPipelineBlocks,
OutputParam,
SequentialPipelineBlocks,
)
from .optimization import (
get_constant_schedule,
get_constant_schedule_with_warmup,

View File

@@ -33,6 +33,7 @@ else:
"ModularPipeline",
"AutoPipelineBlocks",
"SequentialPipelineBlocks",
"ConditionalPipelineBlocks",
"LoopSequentialPipelineBlocks",
"PipelineState",
"BlockState",
@@ -105,6 +106,7 @@ if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .modular_pipeline import (
AutoPipelineBlocks,
BlockState,
ConditionalPipelineBlocks,
LoopSequentialPipelineBlocks,
ModularPipeline,
ModularPipelineBlocks,

View File

@@ -22,7 +22,7 @@ from transformers import Gemma3ForConditionalGeneration, GemmaTokenizer, GemmaTo
from ...callbacks import MultiPipelineCallbacks, PipelineCallback
from ...image_processor import PipelineImageInput
from ...loaders import FromSingleFileMixin, LTX2LoraLoaderMixin
from ...loaders import FromSingleFileMixin, LTXVideoLoraLoaderMixin
from ...models.autoencoders import AutoencoderKLLTX2Audio, AutoencoderKLLTX2Video
from ...models.transformers import LTX2VideoTransformer3DModel
from ...schedulers import FlowMatchEulerDiscreteScheduler
@@ -48,7 +48,7 @@ EXAMPLE_DOC_STRING = """
Examples:
```py
>>> import torch
>>> from diffusers import LTX2Pipeline
>>> from diffusers import LTX2ImageToVideoPipeline
>>> from diffusers.pipelines.ltx2.export_utils import encode_video
>>> from diffusers.utils import load_image
@@ -62,7 +62,7 @@ EXAMPLE_DOC_STRING = """
>>> negative_prompt = "worst quality, inconsistent motion, blurry, jittery, distorted"
>>> frame_rate = 24.0
>>> video = pipe(
>>> video, audio = pipe(
... image=image,
... prompt=prompt,
... negative_prompt=negative_prompt,
@@ -202,7 +202,7 @@ def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0):
return noise_cfg
class LTX2ImageToVideoPipeline(DiffusionPipeline, FromSingleFileMixin, LTX2LoraLoaderMixin):
class LTX2ImageToVideoPipeline(DiffusionPipeline, FromSingleFileMixin, LTXVideoLoraLoaderMixin):
r"""
Pipeline for image-to-video generation.

View File

@@ -1905,6 +1905,21 @@ def attention_backend(*args, **kwargs):
requires_backends(attention_backend, ["torch"])
class AutoPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ComponentsManager(metaclass=DummyObject):
_backends = ["torch"]
@@ -1935,6 +1950,66 @@ class ComponentSpec(metaclass=DummyObject):
requires_backends(cls, ["torch"])
class ConditionalPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ConfigSpec(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class InputParam(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class LoopSequentialPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ModularPipeline(metaclass=DummyObject):
_backends = ["torch"]
@@ -1965,6 +2040,36 @@ class ModularPipelineBlocks(metaclass=DummyObject):
requires_backends(cls, ["torch"])
class OutputParam(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class SequentialPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
def get_constant_schedule(*args, **kwargs):
requires_backends(get_constant_schedule, ["torch"])