mirror of
https://github.com/huggingface/diffusers.git
synced 2025-12-06 20:44:33 +08:00
Compare commits
6 Commits
fbc-refact
...
fast-gpu-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93bf3bda52 | ||
|
|
1e27fa56d2 | ||
|
|
062125d0e5 | ||
|
|
36d4cd4075 | ||
|
|
08620d80e4 | ||
|
|
d69121118c |
@@ -157,11 +157,12 @@ class StableDiffusionLoRATests(PeftLoraLoaderMixinTests, unittest.TestCase):
|
||||
if ("adapter-1" in n or "adapter-2" in n) and not isinstance(m, (nn.Dropout, nn.Identity)):
|
||||
self.assertTrue(m.weight.device != torch.device("cpu"))
|
||||
|
||||
@slow
|
||||
@require_torch_gpu
|
||||
def test_integration_move_lora_dora_cpu(self):
|
||||
from peft import LoraConfig
|
||||
|
||||
path = "runwayml/stable-diffusion-v1-5"
|
||||
path = "Lykon/dreamshaper-8"
|
||||
unet_lora_config = LoraConfig(
|
||||
init_lora_weights="gaussian",
|
||||
target_modules=["to_k", "to_q", "to_v", "to_out.0"],
|
||||
|
||||
@@ -528,6 +528,10 @@ class AutoencoderOobleckTests(ModelTesterMixin, UNetTesterMixin, unittest.TestCa
|
||||
def test_forward_with_norm_groups(self):
|
||||
pass
|
||||
|
||||
@unittest.skip("No attention module used in this model")
|
||||
def test_set_attn_processor_for_determinism(self):
|
||||
return
|
||||
|
||||
|
||||
@slow
|
||||
class AutoencoderTinyIntegrationTests(unittest.TestCase):
|
||||
|
||||
@@ -220,6 +220,7 @@ class ModelTesterMixin:
|
||||
base_precision = 1e-3
|
||||
forward_requires_fresh_args = False
|
||||
model_split_percents = [0.5, 0.7, 0.9]
|
||||
uses_custom_attn_processor = False
|
||||
|
||||
def check_device_map_is_respected(self, model, device_map):
|
||||
for param_name, param in model.named_parameters():
|
||||
|
||||
@@ -32,6 +32,7 @@ enable_full_determinism()
|
||||
class CogVideoXTransformerTests(ModelTesterMixin, unittest.TestCase):
|
||||
model_class = CogVideoXTransformer3DModel
|
||||
main_input_name = "hidden_states"
|
||||
uses_custom_attn_processor = True
|
||||
|
||||
@property
|
||||
def dummy_input(self):
|
||||
|
||||
@@ -32,6 +32,7 @@ enable_full_determinism()
|
||||
class LuminaNextDiT2DModelTransformerTests(ModelTesterMixin, unittest.TestCase):
|
||||
model_class = LuminaNextDiT2DModel
|
||||
main_input_name = "hidden_states"
|
||||
uses_custom_attn_processor = True
|
||||
|
||||
@property
|
||||
def dummy_input(self):
|
||||
|
||||
Reference in New Issue
Block a user