Compare commits

...

8 Commits

Author SHA1 Message Date
sayakpaul
98bf2703e9 add a comment. 2024-01-04 11:23:47 +05:30
sayakpaul
521eb56ece alright then. 2024-01-04 11:22:44 +05:30
sayakpaul
6d5860e43a let's see again 2024-01-04 11:21:20 +05:30
sayakpaul
66bece9a80 let's see. 2024-01-04 11:13:03 +05:30
sayakpaul
c0be882abb place it right. 2024-01-04 10:55:58 +05:30
sayakpaul
75c16ac610 use the right method. 2024-01-04 10:53:04 +05:30
sayakpaul
1cb35e9403 debug test_with_different_scales_fusion_equivalence 2024-01-04 10:48:20 +05:30
sayakpaul
df675b904a debug 2024-01-04 10:40:04 +05:30

View File

@@ -317,9 +317,9 @@ class LoraLoaderMixinTests(unittest.TestCase):
text_encoder_lora_params = LoraLoaderMixin._modify_text_encoder(
text_encoder, dtype=torch.float32, rank=self.lora_rank
)
text_encoder_lora_params = set_lora_weights(
text_encoder_lora_state_dict(text_encoder), randn_weight=True, var=0.1
)
text_encoder_lora_params = text_encoder_lora_state_dict(text_encoder)
# We call this to ensure that the effects of the in-place `_modify_text_encoder` have been erased.
LoraLoaderMixin._remove_text_encoder_monkey_patch_classmethod(text_encoder)
pipeline_components = {
"unet": unet,
@@ -937,18 +937,17 @@ class SDXLLoraLoaderMixinTests(unittest.TestCase):
_, unet_lora_params = create_unet_lora_layers(unet, rank=self.lora_rank)
if modify_text_encoder:
text_encoder_lora_params = StableDiffusionXLLoraLoaderMixin._modify_text_encoder(
_ = StableDiffusionXLLoraLoaderMixin._modify_text_encoder(
text_encoder, dtype=torch.float32, rank=self.lora_rank
)
text_encoder_lora_params = set_lora_weights(
text_encoder_lora_state_dict(text_encoder), randn_weight=True, var=0.1
)
text_encoder_two_lora_params = StableDiffusionXLLoraLoaderMixin._modify_text_encoder(
text_encoder_lora_params = text_encoder_lora_state_dict(text_encoder)
StableDiffusionXLLoraLoaderMixin._remove_text_encoder_monkey_patch_classmethod(text_encoder)
_ = StableDiffusionXLLoraLoaderMixin._modify_text_encoder(
text_encoder_2, dtype=torch.float32, rank=self.lora_rank
)
text_encoder_two_lora_params = set_lora_weights(
text_encoder_lora_state_dict(text_encoder_2), randn_weight=True, var=0.1
)
text_encoder_two_lora_params = text_encoder_lora_state_dict(text_encoder_2)
StableDiffusionXLLoraLoaderMixin._remove_text_encoder_monkey_patch_classmethod(text_encoder_2)
else:
text_encoder_lora_params = None
text_encoder_two_lora_params = None
@@ -1446,7 +1445,7 @@ class SDXLLoraLoaderMixinTests(unittest.TestCase):
with tempfile.TemporaryDirectory() as tmpdirname:
sd_pipe.save_pretrained(tmpdirname)
sd_pipe_loaded = StableDiffusionXLPipeline.from_pretrained(tmpdirname)
sd_pipe_loaded = StableDiffusionXLPipeline.from_pretrained(tmpdirname).to(torch_device)
loaded_lora_images = sd_pipe_loaded(**pipeline_inputs, generator=torch.manual_seed(0)).images
loaded_lora_image_slice = loaded_lora_images[0, -3:, -3:, -1]