Skip to content

Commit e126a82

Browse files
authored
[Tests] Speed up panorama tests (#3067)
* fix: norm group test for UNet3D. * chore: speed up the panorama tests (fast). * set default value of _test_inference_batch_single_identical. * fix: batch_sizes default value.
1 parent e753454 commit e126a82

File tree

2 files changed

+19
-10
lines changed

2 files changed

+19
-10
lines changed

tests/pipelines/stable_diffusion/test_stable_diffusion_panorama.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def get_dummy_components(self):
4949
torch.manual_seed(0)
5050
unet = UNet2DConditionModel(
5151
block_out_channels=(32, 64),
52-
layers_per_block=2,
52+
layers_per_block=1,
5353
sample_size=32,
5454
in_channels=4,
5555
out_channels=4,
@@ -101,7 +101,7 @@ def get_dummy_inputs(self, device, seed=0):
101101
# Setting height and width to None to prevent OOMs on CPU.
102102
"height": None,
103103
"width": None,
104-
"num_inference_steps": 2,
104+
"num_inference_steps": 1,
105105
"guidance_scale": 6.0,
106106
"output_type": "numpy",
107107
}
@@ -119,10 +119,18 @@ def test_stable_diffusion_panorama_default_case(self):
119119
image_slice = image[0, -3:, -3:, -1]
120120
assert image.shape == (1, 64, 64, 3)
121121

122-
expected_slice = np.array([0.4794, 0.5084, 0.4992, 0.3941, 0.3555, 0.4754, 0.5248, 0.5224, 0.4839])
122+
expected_slice = np.array([0.6186, 0.5374, 0.4915, 0.4135, 0.4114, 0.4563, 0.5128, 0.4977, 0.4757])
123123

124124
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
125125

126+
# override to speed the overall test timing up.
127+
def test_inference_batch_consistent(self):
128+
super().test_inference_batch_consistent(batch_sizes=[1, 2])
129+
130+
# override to speed the overall test timing up.
131+
def test_inference_batch_single_identical(self):
132+
super().test_inference_batch_single_identical(batch_size=2)
133+
126134
def test_stable_diffusion_panorama_negative_prompt(self):
127135
device = "cpu" # ensure determinism for the device-dependent torch.Generator
128136
components = self.get_dummy_components()
@@ -138,7 +146,7 @@ def test_stable_diffusion_panorama_negative_prompt(self):
138146

139147
assert image.shape == (1, 64, 64, 3)
140148

141-
expected_slice = np.array([0.5029, 0.5075, 0.5002, 0.3965, 0.3584, 0.4746, 0.5271, 0.5273, 0.4877])
149+
expected_slice = np.array([0.6187, 0.5375, 0.4915, 0.4136, 0.4114, 0.4563, 0.5128, 0.4976, 0.4757])
142150

143151
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
144152

@@ -158,7 +166,7 @@ def test_stable_diffusion_panorama_euler(self):
158166

159167
assert image.shape == (1, 64, 64, 3)
160168

161-
expected_slice = np.array([0.4934, 0.5455, 0.4847, 0.5022, 0.5572, 0.4833, 0.5207, 0.4952, 0.5051])
169+
expected_slice = np.array([0.4886, 0.5586, 0.4476, 0.5053, 0.6013, 0.4737, 0.5538, 0.5100, 0.4927])
162170

163171
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
164172

tests/test_pipelines_common.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -175,8 +175,8 @@ def test_pipeline_call_signature(self):
175175
f"Required optional parameters not present: {remaining_required_optional_parameters}",
176176
)
177177

178-
def test_inference_batch_consistent(self):
179-
self._test_inference_batch_consistent()
178+
def test_inference_batch_consistent(self, batch_sizes=[2, 4, 13]):
179+
self._test_inference_batch_consistent(batch_sizes=batch_sizes)
180180

181181
def _test_inference_batch_consistent(
182182
self, batch_sizes=[2, 4, 13], additional_params_copy_to_batched_inputs=["num_inference_steps"]
@@ -235,11 +235,12 @@ def _test_inference_batch_consistent(
235235

236236
logger.setLevel(level=diffusers.logging.WARNING)
237237

238-
def test_inference_batch_single_identical(self):
239-
self._test_inference_batch_single_identical()
238+
def test_inference_batch_single_identical(self, batch_size=3):
239+
self._test_inference_batch_single_identical(batch_size=batch_size)
240240

241241
def _test_inference_batch_single_identical(
242242
self,
243+
batch_size=3,
243244
test_max_difference=None,
244245
test_mean_pixel_difference=None,
245246
relax_max_difference=False,
@@ -267,7 +268,7 @@ def _test_inference_batch_single_identical(
267268

268269
# batchify inputs
269270
batched_inputs = {}
270-
batch_size = 3
271+
batch_size = batch_size
271272
for name, value in inputs.items():
272273
if name in self.batch_params:
273274
# prompt is string

0 commit comments

Comments
 (0)