mirror of
https://github.com/huggingface/diffusers.git
synced 2025-12-12 23:44:30 +08:00
Compare commits
3 Commits
cp-fixes-a
...
pr-tests-f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3ab9c3f24 | ||
|
|
a0119978f8 | ||
|
|
a4f7080bb4 |
@@ -21,11 +21,9 @@ import numpy as np
|
||||
import pytest
|
||||
import torch
|
||||
from transformers import (
|
||||
ClapAudioConfig,
|
||||
ClapConfig,
|
||||
ClapFeatureExtractor,
|
||||
ClapModel,
|
||||
ClapTextConfig,
|
||||
GPT2Config,
|
||||
GPT2LMHeadModel,
|
||||
RobertaTokenizer,
|
||||
@@ -111,33 +109,33 @@ class AudioLDM2PipelineFastTests(PipelineTesterMixin, unittest.TestCase):
|
||||
latent_channels=4,
|
||||
)
|
||||
torch.manual_seed(0)
|
||||
text_branch_config = ClapTextConfig(
|
||||
bos_token_id=0,
|
||||
eos_token_id=2,
|
||||
hidden_size=8,
|
||||
intermediate_size=37,
|
||||
layer_norm_eps=1e-05,
|
||||
num_attention_heads=1,
|
||||
num_hidden_layers=1,
|
||||
pad_token_id=1,
|
||||
vocab_size=1000,
|
||||
projection_dim=8,
|
||||
)
|
||||
audio_branch_config = ClapAudioConfig(
|
||||
spec_size=8,
|
||||
window_size=4,
|
||||
num_mel_bins=8,
|
||||
intermediate_size=37,
|
||||
layer_norm_eps=1e-05,
|
||||
depths=[1, 1],
|
||||
num_attention_heads=[1, 1],
|
||||
num_hidden_layers=1,
|
||||
hidden_size=192,
|
||||
projection_dim=8,
|
||||
patch_size=2,
|
||||
patch_stride=2,
|
||||
patch_embed_input_channels=4,
|
||||
)
|
||||
text_branch_config = {
|
||||
"bos_token_id": 0,
|
||||
"eos_token_id": 2,
|
||||
"hidden_size": 8,
|
||||
"intermediate_size": 37,
|
||||
"layer_norm_eps": 1e-05,
|
||||
"num_attention_heads": 1,
|
||||
"num_hidden_layers": 1,
|
||||
"pad_token_id": 1,
|
||||
"vocab_size": 1000,
|
||||
"projection_dim": 8,
|
||||
}
|
||||
audio_branch_config = {
|
||||
"spec_size": 8,
|
||||
"window_size": 4,
|
||||
"num_mel_bins": 8,
|
||||
"intermediate_size": 37,
|
||||
"layer_norm_eps": 1e-05,
|
||||
"depths": [1, 1],
|
||||
"num_attention_heads": [1, 1],
|
||||
"num_hidden_layers": 1,
|
||||
"hidden_size": 192,
|
||||
"projection_dim": 8,
|
||||
"patch_size": 2,
|
||||
"patch_stride": 2,
|
||||
"patch_embed_input_channels": 4,
|
||||
}
|
||||
text_encoder_config = ClapConfig(
|
||||
text_config=text_branch_config, audio_config=audio_branch_config, projection_dim=16
|
||||
)
|
||||
|
||||
@@ -23,7 +23,7 @@ from diffusers import (
|
||||
KandinskyV22InpaintCombinedPipeline,
|
||||
)
|
||||
|
||||
from ...testing_utils import enable_full_determinism, require_torch_accelerator, torch_device
|
||||
from ...testing_utils import enable_full_determinism, require_accelerator, require_torch_accelerator, torch_device
|
||||
from ..test_pipelines_common import PipelineTesterMixin
|
||||
from .test_kandinsky import Dummies
|
||||
from .test_kandinsky_img2img import Dummies as Img2ImgDummies
|
||||
@@ -402,6 +402,7 @@ class KandinskyV22PipelineInpaintCombinedFastTests(PipelineTesterMixin, unittest
|
||||
def test_save_load_optional_components(self):
|
||||
super().test_save_load_optional_components(expected_max_difference=5e-4)
|
||||
|
||||
@require_accelerator
|
||||
def test_sequential_cpu_offload_forward_pass(self):
|
||||
super().test_sequential_cpu_offload_forward_pass(expected_max_diff=5e-4)
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from ...testing_utils import (
|
||||
load_image,
|
||||
load_numpy,
|
||||
numpy_cosine_similarity_distance,
|
||||
require_accelerator,
|
||||
require_torch_accelerator,
|
||||
slow,
|
||||
torch_device,
|
||||
@@ -254,6 +255,7 @@ class KandinskyV22InpaintPipelineFastTests(PipelineTesterMixin, unittest.TestCas
|
||||
def test_save_load_optional_components(self):
|
||||
super().test_save_load_optional_components(expected_max_difference=5e-4)
|
||||
|
||||
@require_accelerator
|
||||
def test_sequential_cpu_offload_forward_pass(self):
|
||||
super().test_sequential_cpu_offload_forward_pass(expected_max_diff=5e-4)
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ from ...testing_utils import (
|
||||
floats_tensor,
|
||||
load_image,
|
||||
load_numpy,
|
||||
require_accelerator,
|
||||
require_torch_accelerator,
|
||||
slow,
|
||||
torch_device,
|
||||
@@ -222,6 +223,7 @@ class StableDiffusionLatentUpscalePipelineFastTests(
|
||||
def test_attention_slicing_forward_pass(self):
|
||||
super().test_attention_slicing_forward_pass(expected_max_diff=7e-3)
|
||||
|
||||
@require_accelerator
|
||||
def test_sequential_cpu_offload_forward_pass(self):
|
||||
super().test_sequential_cpu_offload_forward_pass(expected_max_diff=3e-3)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user