diff --git a/.github/workflows/test_models.yml b/.github/workflows/test_models.yml index 67c7054a..a46fc9fe 100644 --- a/.github/workflows/test_models.yml +++ b/.github/workflows/test_models.yml @@ -77,8 +77,8 @@ jobs: source turbine_venv/bin/activate pytest -v models/turbine_models/tests/sd_test.py - pytest -v models/turbine_models/tests/sdxl_test.py --device cpu --rt_device local-task --iree_target_triple x86_64-linux-gnu --num_inference_steps 2 -x - pytest -v models/turbine_models/tests/sdxl_test.py --device rocm --rt_device hip --iree_target_triple gfx90a --precision fp16 --attn_spec default -x - pytest -v models/turbine_models/tests/sdxl_test.py --device rocm --rt_device hip --iree_target_triple gfx90a --precision fp16 --clip_spec None --unet_spec None --vae_spec None --batch_size 2 -x + pytest -v models/turbine_models/tests/sdxl_test.py --device cpu --rt_device local-task --iree_target_triple x86_64-linux-gnu --num_inference_steps 2 -x -s + pytest -v models/turbine_models/tests/sdxl_test.py --device rocm --rt_device hip --iree_target_triple gfx90a --precision fp16 -x -s + pytest -v models/turbine_models/tests/sdxl_test.py --device rocm --rt_device hip --iree_target_triple gfx90a --precision fp16 --batch_size 2 -x pytest -v models/turbine_models/tests/sd3_test.py --device cpu --rt_device local-task --iree_target_triple x86_64-linux-gnu --num_inference_steps 2 -x diff --git a/models/turbine_models/tests/sdxl_test.py b/models/turbine_models/tests/sdxl_test.py index 6eba5b8e..3868f919 100644 --- a/models/turbine_models/tests/sdxl_test.py +++ b/models/turbine_models/tests/sdxl_test.py @@ -93,11 +93,7 @@ def test00_sdxl_pipe(self): decomp_attn = { "text_encoder": True, "unet": False, - "vae": ( - False - if any(x in arguments["device"] for x in ["hip", "rocm"]) - else True - ), + "vae": False, } self.pipe = SharkSDPipeline( arguments["hf_model_name"], @@ -135,8 +131,12 @@ def test00_sdxl_pipe(self): True, # return_img ) assert output is not None + del output + del self.pipe def test01_sdxl_pipe_i8_punet(self): + if arguments["device"] not in ["rocm", "hip"]: + self.skipTest("Currently unimplemented/pending validation") from turbine_models.custom_models.sd_inference.sd_pipeline import ( SharkSDPipeline, ) @@ -187,6 +187,8 @@ def test01_sdxl_pipe_i8_punet(self): True, # return_img ) assert output is not None + del output + del self.pipe def test02_PromptEncoder(self): if arguments["device"] in ["vulkan", "cuda"]: