From 9fc1c7888267f74bb918881f9073846460bfadaa Mon Sep 17 00:00:00 2001 From: Tom Fogal <60981+tfogal@users.noreply.github.com> Date: Thu, 17 Oct 2024 15:22:36 -0700 Subject: [PATCH] Temporarily disable some inductor tests on windows These tests require that the `cl` compiler is available and this may take some setup. --- thunder/tests/test_autocast.py | 5 ++++- thunder/tests/test_torch_compile_executor.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/thunder/tests/test_autocast.py b/thunder/tests/test_autocast.py index 5e0c345ee7..2d02ca92d4 100644 --- a/thunder/tests/test_autocast.py +++ b/thunder/tests/test_autocast.py @@ -1,4 +1,5 @@ import itertools +import platform import pytest import torch @@ -140,7 +141,9 @@ def func(a, b): assert output.dtype == (torch.float16 if torch_device.type == "cuda" else torch.bfloat16) -@pytest.mark.skipif(not is_inductor_supported(), reason="inductor unsupported") +# Disabling on windows temporarily, until our windows runners source the +# appropriate visual studio config. +@pytest.mark.skipif(not is_inductor_supported() or platform.system() == "Windows", reason="inductor unsupported") def test_torch_compile_autocast(): """Checks if our autocast decorator plays well with ``torch.compile``""" diff --git a/thunder/tests/test_torch_compile_executor.py b/thunder/tests/test_torch_compile_executor.py index eac99957e3..c0bd1b351d 100644 --- a/thunder/tests/test_torch_compile_executor.py +++ b/thunder/tests/test_torch_compile_executor.py @@ -1,3 +1,4 @@ +import platform import pytest import torch from torch._dynamo import is_inductor_supported @@ -15,7 +16,9 @@ def test_supported_ops_are_in_pytorch_executor(): assert supported_ops - pytorch_ex.implmap.keys() == set() -@pytest.mark.skipif(not is_inductor_supported(), reason="inductor unsupported") +# Disabling on windows temporarily, until our windows runners source the +# appropriate visual studio config. +@pytest.mark.skipif(not is_inductor_supported() or platform.system() == "Windows", reason="inductor unsupported") def test_torch_compile_litgpt(): from litgpt.model import GPT