Compare commits

...

2 Commits

Author SHA1 Message Date
Dhruv Nair
373ea245eb Merge branch 'main' into push-test-fixes 2024-09-02 11:14:33 +05:30
Dhruv Nair
377dbb302c update 2024-08-28 12:02:52 +00:00
5 changed files with 12 additions and 0 deletions

View File

@@ -417,6 +417,9 @@ class ModelTesterMixin:
@require_torch_gpu
def test_set_attn_processor_for_determinism(self):
if self.uses_custom_attn_processor:
return
torch.use_deterministic_algorithms(False)
if self.forward_requires_fresh_args:
model = self.model_class(**self.init_dict)

View File

@@ -32,6 +32,9 @@ class FluxTransformerTests(ModelTesterMixin, unittest.TestCase):
# We override the items here because the transformer under consideration is small.
model_split_percents = [0.7, 0.6, 0.6]
# Skip setting testing with default: AttnProcessor
uses_custom_attn_processor = True
@property
def dummy_input(self):
batch_size = 1

View File

@@ -25,6 +25,9 @@ class FluxPipelineFastTests(unittest.TestCase, PipelineTesterMixin):
params = frozenset(["prompt", "height", "width", "guidance_scale", "prompt_embeds", "pooled_prompt_embeds"])
batch_params = frozenset(["prompt"])
# there is no xformers processor for Flux
test_xformers_attention = False
def get_dummy_components(self):
torch.manual_seed(0)
transformer = FluxTransformer2DModel(

View File

@@ -37,6 +37,7 @@ class StableDiffusion3PAGPipelineFastTests(unittest.TestCase, PipelineTesterMixi
]
)
batch_params = frozenset(["prompt", "negative_prompt"])
test_xformers_attention = False
def get_dummy_components(self):
torch.manual_seed(0)

View File

@@ -68,6 +68,8 @@ class StableAudioPipelineFastTests(PipelineTesterMixin, unittest.TestCase):
"callback_steps",
]
)
# There is not xformers version of the StableAudioPipeline custom attention processor
test_xformers_attention = False
def get_dummy_components(self):
torch.manual_seed(0)