From db2d7e7bc455fb4fb11708ffacb2d63856938ca8 Mon Sep 17 00:00:00 2001 From: Dhruv Nair Date: Fri, 20 Feb 2026 09:01:20 +0530 Subject: [PATCH] [CI] Fix new LoRAHotswap tests (#13163) update Co-authored-by: Sayak Paul --- tests/models/testing_utils/lora.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/models/testing_utils/lora.py b/tests/models/testing_utils/lora.py index 994aaed55c..dfdc4835ee 100644 --- a/tests/models/testing_utils/lora.py +++ b/tests/models/testing_utils/lora.py @@ -375,7 +375,7 @@ class LoraHotSwappingForModelTesterMixin: # additionally check if dynamic compilation works. if different_shapes is not None: for height, width in different_shapes: - new_inputs_dict = self.prepare_dummy_input(height=height, width=width) + new_inputs_dict = self.get_dummy_inputs(height=height, width=width) _ = model(**new_inputs_dict) else: output0_after = model(**inputs_dict)["sample"] @@ -390,7 +390,7 @@ class LoraHotSwappingForModelTesterMixin: with torch.inference_mode(): if different_shapes is not None: for height, width in different_shapes: - new_inputs_dict = self.prepare_dummy_input(height=height, width=width) + new_inputs_dict = self.get_dummy_inputs(height=height, width=width) _ = model(**new_inputs_dict) else: output1_after = model(**inputs_dict)["sample"]