Skip to content

Commit

Permalink
Remove xfails on submodels for rocm.
Browse files Browse the repository at this point in the history
  • Loading branch information
monorimet committed Apr 11, 2024
1 parent bb2c7e0 commit 12b91f4
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions models/turbine_models/tests/sdxl_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def setUp(self):
)

def test01_ExportClipModels(self):
if arguments["device"] in ["vulkan", "rocm", "cuda"]:
if arguments["device"] in ["vulkan", "cuda"]:
self.skipTest(

This comment has been minimized.

Copy link
@IanNod

IanNod Apr 11, 2024

Contributor

Instead of skipping these tests, can we just xfail them?

This comment has been minimized.

Copy link
@monorimet

monorimet Apr 11, 2024

Author Contributor

We can set up a runner for vulkan in a follow-up, but since we don't have runners for these backends I think it's better to skip them unless we want to test compilation?

"Compilation error on vulkan; Runtime error on rocm; To be tested on cuda."
)
Expand Down Expand Up @@ -215,7 +215,7 @@ def test01_ExportClipModels(self):
np.testing.assert_allclose(torch_output_2, turbine_2[0], rtol, atol)

def test02_ExportUnetModel(self):
if arguments["device"] in ["vulkan", "rocm", "cuda"]:
if arguments["device"] in ["vulkan", "cuda"]:
self.skipTest(
"Unknown error on vulkan; Runtime error on rocm; To be tested on cuda."
)
Expand Down Expand Up @@ -325,7 +325,7 @@ def test02_ExportUnetModel(self):
np.testing.assert_allclose(torch_output, turbine, rtol, atol)

def test03_ExportVaeModelDecode(self):
if arguments["device"] in ["vulkan", "cuda", "rocm"]:
if arguments["device"] in ["vulkan", "cuda"]:
self.skipTest(
"Compilation error on vulkan; Runtime error on rocm; To be tested on cuda."
)
Expand Down

0 comments on commit 12b91f4

Please sign in to comment.