mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
fix sigmoid for torch.complex datatypes on CPU (#140391)
Fix https://github.com/pytorch/pytorch/issues/135777. This issue is caused by the lack of special handling of the case where the real number/imag number is 0/Inf/NaN in the vectorized implementation of `reciprocal`. For correctness, I temporarily fallback the implementation of `reciprocal` to scalar implementation. Pull Request resolved: https://github.com/pytorch/pytorch/pull/140391 Approved by: https://github.com/mingfeima, https://github.com/Skylion007 ghstack dependencies: #140358
This commit is contained in:
committed by
PyTorch MergeBot
parent
507bf65c6a
commit
c922ccb7c4
@ -20081,9 +20081,9 @@ op_db: List[OpInfo] = [
|
||||
skips=(
|
||||
# Reference: https://github.com/pytorch/pytorch/issues/56012
|
||||
DecorateInfo(unittest.skip("Skipped!"), 'TestUnaryUfuncs', 'test_reference_numerics_extremal',
|
||||
dtypes=[torch.complex64, torch.cdouble]),
|
||||
dtypes=[torch.complex64, torch.cdouble], device_type='cuda'),
|
||||
DecorateInfo(unittest.skip("Skipped!"), 'TestUnaryUfuncs', 'test_reference_numerics_large',
|
||||
dtypes=[torch.chalf, torch.complex64, torch.cdouble])),
|
||||
dtypes=[torch.chalf, torch.complex64, torch.cdouble], device_type='cuda')),
|
||||
dtypes=all_types_and_complex_and(torch.bool, torch.float16, torch.bfloat16),
|
||||
dtypesIfCUDA=all_types_and_complex_and(torch.complex32, torch.bool, torch.half, torch.bfloat16),
|
||||
supports_forward_ad=True,
|
||||
@ -22579,10 +22579,10 @@ python_ref_db = [
|
||||
# Reference: https://github.com/pytorch/pytorch/issues/56012
|
||||
DecorateInfo(unittest.skip("Skipped!"), 'TestUnaryUfuncs',
|
||||
'test_reference_numerics_extremal',
|
||||
dtypes=[torch.complex64, torch.cdouble]),
|
||||
dtypes=[torch.complex64, torch.cdouble], device_type='cuda'),
|
||||
DecorateInfo(unittest.skip("Skipped!"), 'TestUnaryUfuncs',
|
||||
'test_reference_numerics_large',
|
||||
dtypes=[torch.chalf, torch.complex64, torch.cdouble])
|
||||
dtypes=[torch.chalf, torch.complex64, torch.cdouble], device_type='cuda')
|
||||
),
|
||||
),
|
||||
ElementwiseUnaryPythonRefInfo(
|
||||
|
Reference in New Issue
Block a user