Compare commits

..

1 Commits

Author SHA1 Message Date
Álvaro Somoza
84718e0d6c change lora mixin 2026-02-13 23:57:20 -03:00
4 changed files with 5 additions and 131 deletions

View File

@@ -294,17 +294,10 @@ else:
)
_import_structure["modular_pipelines"].extend(
[
"AutoPipelineBlocks",
"ComponentsManager",
"ComponentSpec",
"ConditionalPipelineBlocks",
"ConfigSpec",
"InputParam",
"LoopSequentialPipelineBlocks",
"ModularPipeline",
"ModularPipelineBlocks",
"OutputParam",
"SequentialPipelineBlocks",
]
)
_import_structure["optimization"] = [
@@ -1070,19 +1063,7 @@ if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
ZImageTransformer2DModel,
attention_backend,
)
from .modular_pipelines import (
AutoPipelineBlocks,
ComponentsManager,
ComponentSpec,
ConditionalPipelineBlocks,
ConfigSpec,
InputParam,
LoopSequentialPipelineBlocks,
ModularPipeline,
ModularPipelineBlocks,
OutputParam,
SequentialPipelineBlocks,
)
from .modular_pipelines import ComponentsManager, ComponentSpec, ModularPipeline, ModularPipelineBlocks
from .optimization import (
get_constant_schedule,
get_constant_schedule_with_warmup,

View File

@@ -33,7 +33,6 @@ else:
"ModularPipeline",
"AutoPipelineBlocks",
"SequentialPipelineBlocks",
"ConditionalPipelineBlocks",
"LoopSequentialPipelineBlocks",
"PipelineState",
"BlockState",
@@ -106,7 +105,6 @@ if TYPE_CHECKING or DIFFUSERS_SLOW_IMPORT:
from .modular_pipeline import (
AutoPipelineBlocks,
BlockState,
ConditionalPipelineBlocks,
LoopSequentialPipelineBlocks,
ModularPipeline,
ModularPipelineBlocks,

View File

@@ -22,7 +22,7 @@ from transformers import Gemma3ForConditionalGeneration, GemmaTokenizer, GemmaTo
from ...callbacks import MultiPipelineCallbacks, PipelineCallback
from ...image_processor import PipelineImageInput
from ...loaders import FromSingleFileMixin, LTXVideoLoraLoaderMixin
from ...loaders import FromSingleFileMixin, LTX2LoraLoaderMixin
from ...models.autoencoders import AutoencoderKLLTX2Audio, AutoencoderKLLTX2Video
from ...models.transformers import LTX2VideoTransformer3DModel
from ...schedulers import FlowMatchEulerDiscreteScheduler
@@ -48,7 +48,7 @@ EXAMPLE_DOC_STRING = """
Examples:
```py
>>> import torch
>>> from diffusers import LTX2ImageToVideoPipeline
>>> from diffusers import LTX2Pipeline
>>> from diffusers.pipelines.ltx2.export_utils import encode_video
>>> from diffusers.utils import load_image
@@ -62,7 +62,7 @@ EXAMPLE_DOC_STRING = """
>>> negative_prompt = "worst quality, inconsistent motion, blurry, jittery, distorted"
>>> frame_rate = 24.0
>>> video, audio = pipe(
>>> video = pipe(
... image=image,
... prompt=prompt,
... negative_prompt=negative_prompt,
@@ -202,7 +202,7 @@ def rescale_noise_cfg(noise_cfg, noise_pred_text, guidance_rescale=0.0):
return noise_cfg
class LTX2ImageToVideoPipeline(DiffusionPipeline, FromSingleFileMixin, LTXVideoLoraLoaderMixin):
class LTX2ImageToVideoPipeline(DiffusionPipeline, FromSingleFileMixin, LTX2LoraLoaderMixin):
r"""
Pipeline for image-to-video generation.

View File

@@ -1905,21 +1905,6 @@ def attention_backend(*args, **kwargs):
requires_backends(attention_backend, ["torch"])
class AutoPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ComponentsManager(metaclass=DummyObject):
_backends = ["torch"]
@@ -1950,66 +1935,6 @@ class ComponentSpec(metaclass=DummyObject):
requires_backends(cls, ["torch"])
class ConditionalPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ConfigSpec(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class InputParam(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class LoopSequentialPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class ModularPipeline(metaclass=DummyObject):
_backends = ["torch"]
@@ -2040,36 +1965,6 @@ class ModularPipelineBlocks(metaclass=DummyObject):
requires_backends(cls, ["torch"])
class OutputParam(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
class SequentialPipelineBlocks(metaclass=DummyObject):
_backends = ["torch"]
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
@classmethod
def from_config(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
@classmethod
def from_pretrained(cls, *args, **kwargs):
requires_backends(cls, ["torch"])
def get_constant_schedule(*args, **kwargs):
requires_backends(get_constant_schedule, ["torch"])