mirror of
https://github.com/huggingface/diffusers.git
synced 2025-12-06 12:34:13 +08:00
@@ -36,7 +36,7 @@ from diffusers.loaders.single_file_utils import convert_ldm_vae_checkpoint
|
||||
from diffusers.utils.import_utils import is_accelerate_available
|
||||
|
||||
|
||||
CTX = init_empty_weights if is_accelerate_available else nullcontext
|
||||
CTX = init_empty_weights if is_accelerate_available() else nullcontext
|
||||
|
||||
TOKENIZER_MAX_LENGTH = 224
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ python scripts/convert_flux_to_diffusers.py \
|
||||
--vae
|
||||
"""
|
||||
|
||||
CTX = init_empty_weights if is_accelerate_available else nullcontext
|
||||
CTX = init_empty_weights if is_accelerate_available() else nullcontext
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--original_state_dict_repo_id", default=None, type=str)
|
||||
|
||||
@@ -10,7 +10,7 @@ from diffusers import AutoencoderKLMochi, FlowMatchEulerDiscreteScheduler, Mochi
|
||||
from diffusers.utils.import_utils import is_accelerate_available
|
||||
|
||||
|
||||
CTX = init_empty_weights if is_accelerate_available else nullcontext
|
||||
CTX = init_empty_weights if is_accelerate_available() else nullcontext
|
||||
|
||||
TOKENIZER_MAX_LENGTH = 256
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from diffusers.models.modeling_utils import load_model_dict_into_meta
|
||||
from diffusers.utils.import_utils import is_accelerate_available
|
||||
|
||||
|
||||
CTX = init_empty_weights if is_accelerate_available else nullcontext
|
||||
CTX = init_empty_weights if is_accelerate_available() else nullcontext
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--checkpoint_path", type=str)
|
||||
|
||||
Reference in New Issue
Block a user