mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[hop] add supports_higher_order_operators flag to TorchDispatchMode (#158077)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/158077 Approved by: https://github.com/zou3519
This commit is contained in:
committed by
PyTorch MergeBot
parent
a369350065
commit
82b1c48292
@ -68,6 +68,12 @@ class TorchDispatchMode:
|
||||
API self-referential (beware of infinite loops, in this case!)
|
||||
"""
|
||||
|
||||
# - When False, custom torch dispatch mode will error out explicitly when a hop
|
||||
# is called under the mode.
|
||||
# - When True, custom torch dispatch mode's __torch_dispatch__ will be triggered.
|
||||
# Mode authors can implement how the mode interacts with higher order operators.
|
||||
supports_higher_order_operators = False
|
||||
|
||||
def __init__(self, _dispatch_key=None):
|
||||
if _dispatch_key is not None:
|
||||
assert isinstance(_dispatch_key, torch._C.DispatchKey)
|
||||
|
Reference in New Issue
Block a user