mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Allow int vals to go down the fastpath for _foreach_max (#127303)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/127303 Approved by: https://github.com/albanD ghstack dependencies: #127187
This commit is contained in:
committed by
PyTorch MergeBot
parent
601c5e085d
commit
05e99154ee
@ -1015,7 +1015,7 @@ class TestForeach(TestCase):
|
||||
def test_foreach_reduce_large_input(self, device, dtype, op):
|
||||
# test inputs larger than kChunkSize = 65536
|
||||
N = 65536 * 2
|
||||
disable_fastpath = dtype in (torch.int8, torch.int16, torch.bool)
|
||||
disable_fastpath = False
|
||||
kwargs = {}
|
||||
if op.name == "_foreach_norm":
|
||||
ord = 2
|
||||
|
Reference in New Issue
Block a user