Add decorator for compile tests (#8703)

* update

* update

---------

Co-authored-by: Sayak Paul <spsayakpaul@gmail.com>
This commit is contained in:
Dhruv Nair
2024-06-26 11:26:47 +05:30
committed by GitHub
parent e8284281c1
commit 0f0b531827
9 changed files with 27 additions and 23 deletions

View File

@@ -330,6 +330,7 @@ jobs:
- name: Run example tests on GPU
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
RUN_COMPILE: yes
run: |
python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s -v -k "compile" --make-reports=tests_torch_compile_cuda tests/
- name: Failure short reports

View File

@@ -187,6 +187,7 @@ def parse_flag_from_env(key, default=False):
_run_slow_tests = parse_flag_from_env("RUN_SLOW", default=False)
_run_nightly_tests = parse_flag_from_env("RUN_NIGHTLY", default=False)
_run_compile_tests = parse_flag_from_env("RUN_COMPILE", default=False)
def floats_tensor(shape, scale=1.0, rng=None, name=None):
@@ -225,6 +226,16 @@ def nightly(test_case):
return unittest.skipUnless(_run_nightly_tests, "test is nightly")(test_case)
def is_torch_compile(test_case):
"""
Decorator marking a test that runs compile tests in the diffusers CI.
Compile tests are skipped by default. Set the RUN_COMPILE environment variable to a truthy value to run them.
"""
return unittest.skipUnless(_run_compile_tests, "test is torch compile")(test_case)
def require_torch(test_case):
"""
Decorator marking a test that requires PyTorch. These tests are skipped when PyTorch isn't installed.
@@ -390,14 +401,6 @@ def get_python_version():
return major, minor
def require_python39_or_higher(test_case):
def python39_available():
major, minor = get_python_version()
return major == 3 and minor >= 9
return unittest.skipUnless(python39_available(), "test requires Python 3.9 or higher")(test_case)
def load_numpy(arry: Union[str, np.ndarray], local_path: Optional[str] = None) -> np.ndarray:
if isinstance(arry, str):
if local_path is not None:

View File

@@ -43,7 +43,7 @@ from diffusers.utils import SAFE_WEIGHTS_INDEX_NAME, is_torch_npu_available, is_
from diffusers.utils.testing_utils import (
CaptureLogger,
get_python_version,
require_python39_or_higher,
is_torch_compile,
require_torch_2,
require_torch_accelerator_with_training,
require_torch_gpu,
@@ -512,7 +512,7 @@ class ModelTesterMixin:
max_diff = (image - new_image).abs().max().item()
self.assertLessEqual(max_diff, expected_max_diff, "Models give different forward passes")
@require_python39_or_higher
@is_torch_compile
@require_torch_2
@unittest.skipIf(
get_python_version == (3, 12),

View File

@@ -36,9 +36,9 @@ from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
enable_full_determinism,
get_python_version,
is_torch_compile,
load_image,
load_numpy,
require_python39_or_higher,
require_torch_2,
require_torch_gpu,
run_test_in_subprocess,
@@ -1022,7 +1022,7 @@ class ControlNetPipelineSlowTests(unittest.TestCase):
expected_slice = np.array([0.1655, 0.1721, 0.1623, 0.1685, 0.1711, 0.1646, 0.1651, 0.1631, 0.1494])
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
@require_python39_or_higher
@is_torch_compile
@require_torch_2
@unittest.skipIf(
get_python_version == (3, 12),

View File

@@ -35,9 +35,9 @@ from diffusers import (
from diffusers.utils.import_utils import is_xformers_available
from diffusers.utils.testing_utils import (
enable_full_determinism,
is_torch_compile,
load_image,
load_numpy,
require_python39_or_higher,
require_torch_2,
require_torch_gpu,
run_test_in_subprocess,
@@ -392,7 +392,7 @@ class ControlNetXSPipelineSlowTests(unittest.TestCase):
expected_image = np.array([0.4844, 0.4937, 0.4956, 0.4663, 0.5039, 0.5044, 0.4565, 0.4883, 0.4941])
assert np.allclose(original_image, expected_image, atol=1e-04)
@require_python39_or_higher
@is_torch_compile
@require_torch_2
def test_stable_diffusion_compile(self):
run_test_in_subprocess(test_case=self, target_func=_test_stable_diffusion_compile, inputs=None)

View File

@@ -45,12 +45,12 @@ from diffusers import (
from diffusers.utils.testing_utils import (
CaptureLogger,
enable_full_determinism,
is_torch_compile,
load_image,
load_numpy,
nightly,
numpy_cosine_similarity_distance,
require_accelerate_version_greater,
require_python39_or_higher,
require_torch_2,
require_torch_gpu,
require_torch_multi_gpu,
@@ -1282,7 +1282,7 @@ class StableDiffusionPipelineSlowTests(unittest.TestCase):
max_diff = np.abs(expected_image - image).max()
assert max_diff < 8e-1
@require_python39_or_higher
@is_torch_compile
@require_torch_2
def test_stable_diffusion_compile(self):
seed = 0

View File

@@ -37,10 +37,10 @@ from diffusers import (
from diffusers.utils.testing_utils import (
enable_full_determinism,
floats_tensor,
is_torch_compile,
load_image,
load_numpy,
nightly,
require_python39_or_higher,
require_torch_2,
require_torch_gpu,
run_test_in_subprocess,
@@ -643,7 +643,7 @@ class StableDiffusionImg2ImgPipelineSlowTests(unittest.TestCase):
assert out.nsfw_content_detected[0], f"Safety checker should work for prompt: {inputs['prompt']}"
assert np.abs(out.images[0]).sum() < 1e-5 # should be all zeros
@require_python39_or_higher
@is_torch_compile
@require_torch_2
def test_img2img_compile(self):
seed = 0

View File

@@ -39,10 +39,10 @@ from diffusers import (
from diffusers.utils.testing_utils import (
enable_full_determinism,
floats_tensor,
is_torch_compile,
load_image,
load_numpy,
nightly,
require_python39_or_higher,
require_torch_2,
require_torch_gpu,
run_test_in_subprocess,
@@ -715,7 +715,7 @@ class StableDiffusionInpaintPipelineSlowTests(unittest.TestCase):
# make sure that less than 2.2 GB is allocated
assert mem_bytes < 2.2 * 10**9
@require_python39_or_higher
@is_torch_compile
@require_torch_2
def test_inpaint_compile(self):
seed = 0
@@ -920,7 +920,7 @@ class StableDiffusionInpaintPipelineAsymmetricAutoencoderKLSlowTests(unittest.Te
# make sure that less than 2.45 GB is allocated
assert mem_bytes < 2.45 * 10**9
@require_python39_or_higher
@is_torch_compile
@require_torch_2
def test_inpaint_compile(self):
pass

View File

@@ -69,12 +69,12 @@ from diffusers.utils.testing_utils import (
floats_tensor,
get_python_version,
get_tests_dir,
is_torch_compile,
load_numpy,
nightly,
require_compel,
require_flax,
require_onnxruntime,
require_python39_or_higher,
require_torch_2,
require_torch_gpu,
run_test_in_subprocess,
@@ -1761,7 +1761,7 @@ class PipelineSlowTests(unittest.TestCase):
assert np.abs(image - new_image).max() < 1e-5, "Models don't give the same forward pass"
@require_python39_or_higher
@is_torch_compile
@require_torch_2
@unittest.skipIf(
get_python_version == (3, 12),