mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Enable some tests on Windows (#146243)
Fixes #ISSUE_NUMBER Pull Request resolved: https://github.com/pytorch/pytorch/pull/146243 Approved by: https://github.com/albanD
This commit is contained in:
@ -2329,12 +2329,10 @@ class TestImports(TestCase):
|
||||
raise RuntimeError(f"Failed to import {mod_name}: {e}") from e
|
||||
self.assertTrue(inspect.ismodule(mod))
|
||||
|
||||
@unittest.skipIf(IS_WINDOWS, "TODO enable on Windows")
|
||||
def test_lazy_imports_are_lazy(self) -> None:
|
||||
out = self._check_python_output("import sys;import torch;print(all(x not in sys.modules for x in torch._lazy_modules))")
|
||||
self.assertEqual(out.strip(), "True")
|
||||
|
||||
@unittest.skipIf(IS_WINDOWS, "importing torch+CUDA on CPU results in warning")
|
||||
def test_no_warning_on_import(self) -> None:
|
||||
out = self._check_python_output("import torch")
|
||||
self.assertEqual(out, "")
|
||||
@ -2350,7 +2348,6 @@ class TestImports(TestCase):
|
||||
" - Use TYPE_CHECKING if you are using sympy + strings if you are using sympy on type annotations\n"
|
||||
" - Import things that depend on SymPy locally")
|
||||
|
||||
@unittest.skipIf(IS_WINDOWS, "importing torch+CUDA on CPU results in warning")
|
||||
@parametrize('path', ['torch', 'functorch'])
|
||||
def test_no_mutate_global_logging_on_import(self, path) -> None:
|
||||
# Calling logging.basicConfig, among other things, modifies the global
|
||||
|
@ -36,7 +36,7 @@ from torch.testing._internal.common_optimizers import (
|
||||
|
||||
from torch.testing._internal.common_utils import ( # type: ignore[attr-defined]
|
||||
MI300_ARCH, TEST_WITH_TORCHINDUCTOR, TEST_WITH_ROCM, run_tests, IS_JETSON,
|
||||
IS_WINDOWS, IS_FILESYSTEM_UTF8_ENCODING, NO_MULTIPROCESSING_SPAWN,
|
||||
IS_FILESYSTEM_UTF8_ENCODING, NO_MULTIPROCESSING_SPAWN,
|
||||
IS_SANDCASTLE, IS_FBCODE, IS_REMOTE_GPU, skipIfRocmArch, skipIfTorchInductor, load_tests, slowTest, slowTestIf,
|
||||
skipIfCrossRef, TEST_WITH_CROSSREF, skipIfTorchDynamo, skipRocmIfTorchInductor, set_default_dtype,
|
||||
skipCUDAMemoryLeakCheckIf, BytesIOContext,
|
||||
@ -6111,7 +6111,6 @@ else:
|
||||
self._run_scaling_case(device.type, run, unskipped=3, skipped=1)
|
||||
|
||||
@onlyNativeDeviceTypes
|
||||
@unittest.skipIf(IS_WINDOWS, 'FIXME: fix this test for Windows')
|
||||
def test_grad_scaling_penalty(self, device):
|
||||
device = torch.device(device)
|
||||
|
||||
@ -9591,11 +9590,10 @@ tensor([[[1.+1.j, 1.+1.j, 1.+1.j, ..., 1.+1.j, 1.+1.j, 1.+1.j],
|
||||
self.assertNotEqual(output, None)
|
||||
self.assertIn('Unhandled exception caught in c10/util/AbortHandler.h', output)
|
||||
|
||||
# FIXME: port to a distributed test suite -- also... how could this be OOMing on Windows CUDA?
|
||||
# FIXME: port to a distributed test suite
|
||||
@slowTest
|
||||
@unittest.skipIf(NO_MULTIPROCESSING_SPAWN, "Disabled for environments that \
|
||||
don't support multiprocessing with spawn start method")
|
||||
@unittest.skipIf(IS_WINDOWS, 'FIXME: CUDA OOM error on Windows')
|
||||
def test_multinomial_invalid_probs(self):
|
||||
def _spawn_method(self, method, arg):
|
||||
try:
|
||||
|
@ -41,7 +41,6 @@ from torch.testing._internal.common_methods_invocations import (
|
||||
from torch.testing._internal.common_utils import (
|
||||
gradcheck,
|
||||
is_iterable_of_tensors,
|
||||
IS_WINDOWS,
|
||||
numpy_to_torch_dtype_dict,
|
||||
run_tests,
|
||||
skipIfNoSciPy,
|
||||
@ -549,9 +548,7 @@ class TestUnaryUfuncs(TestCase):
|
||||
x = torch.tensor(0.0 - 1.0e20j, dtype=dtype, device=device)
|
||||
self.compare_with_numpy(torch.sqrt, np.sqrt, x)
|
||||
# acos test reference: https://github.com/pytorch/pytorch/issue/42952
|
||||
# Skip on Windows, as CUDA acos returns conjugate value
|
||||
# see https://github.com/pytorch/pytorch/issues/52299
|
||||
if not (IS_WINDOWS and dtype == torch.cdouble and "cuda" in device):
|
||||
if not (dtype == torch.cdouble and "cuda" in device):
|
||||
self.compare_with_numpy(torch.acos, np.arccos, x)
|
||||
|
||||
x = torch.tensor(
|
||||
|
Reference in New Issue
Block a user