mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[BE][CI] bump ruff
to 0.9.2: multiline assert
statements (#144546)
Reference: https://docs.astral.sh/ruff/formatter/black/#assert-statements > Unlike Black, Ruff prefers breaking the message over breaking the assertion, similar to how both Ruff and Black prefer breaking the assignment value over breaking the assignment target: > > ```python > # Input > assert ( > len(policy_types) >= priority + num_duplicates > ), f"This tests needs at least {priority+num_duplicates} many types." > > > # Black > assert ( > len(policy_types) >= priority + num_duplicates > ), f"This tests needs at least {priority+num_duplicates} many types." > > # Ruff > assert len(policy_types) >= priority + num_duplicates, ( > f"This tests needs at least {priority + num_duplicates} many types." > ) > ``` Pull Request resolved: https://github.com/pytorch/pytorch/pull/144546 Approved by: https://github.com/malfet
This commit is contained in:
committed by
PyTorch MergeBot
parent
f0d00421cf
commit
c73a92fbf5
@ -80,34 +80,34 @@ def parse_backend_yaml(
|
||||
|
||||
# Mostly just defaulting to false to stick with LazyTensor convention.
|
||||
use_out_as_primary = yaml_values.pop("use_out_as_primary", False)
|
||||
assert isinstance(
|
||||
use_out_as_primary, bool
|
||||
), f"You must provide either True or False for use_out_as_primary. Provided: {use_out_as_primary}"
|
||||
assert isinstance(use_out_as_primary, bool), (
|
||||
f"You must provide either True or False for use_out_as_primary. Provided: {use_out_as_primary}"
|
||||
)
|
||||
|
||||
use_device_guard = yaml_values.pop("device_guard", False)
|
||||
assert isinstance(
|
||||
use_device_guard, bool
|
||||
), f"You must provide either True or False for device_guard. Provided: {use_device_guard}"
|
||||
assert isinstance(use_device_guard, bool), (
|
||||
f"You must provide either True or False for device_guard. Provided: {use_device_guard}"
|
||||
)
|
||||
|
||||
supported = yaml_values.pop("supported", [])
|
||||
if supported is None:
|
||||
supported = [] # Allow an empty list of supported ops
|
||||
assert isinstance(
|
||||
supported, list
|
||||
), f'expected "supported" to be a list, but got: {supported} (of type {type(supported)})'
|
||||
assert isinstance(supported, list), (
|
||||
f'expected "supported" to be a list, but got: {supported} (of type {type(supported)})'
|
||||
)
|
||||
|
||||
symint = yaml_values.pop("symint", [])
|
||||
if symint is None:
|
||||
symint = [] # Allow an empty list of symint ops
|
||||
assert isinstance(
|
||||
symint, list
|
||||
), f'expected "symint" to be a list, but got: {supported} (of type {type(supported)})'
|
||||
assert isinstance(symint, list), (
|
||||
f'expected "symint" to be a list, but got: {supported} (of type {type(supported)})'
|
||||
)
|
||||
symint_set = set(symint)
|
||||
|
||||
supported_autograd = yaml_values.pop("autograd", [])
|
||||
assert isinstance(
|
||||
supported_autograd, list
|
||||
), f'expected "autograd" to be a list, but got: {supported_autograd}'
|
||||
assert isinstance(supported_autograd, list), (
|
||||
f'expected "autograd" to be a list, but got: {supported_autograd}'
|
||||
)
|
||||
|
||||
# full_codegen is ignored by parse_backend_yaml, and re-parsed in gen_lazy_tensor.py
|
||||
full_codegen = yaml_values.pop("full_codegen", [])
|
||||
@ -135,9 +135,9 @@ def parse_backend_yaml(
|
||||
metadata: dict[OperatorName, BackendMetadata] = {}
|
||||
for op in backend_ops:
|
||||
op_name = OperatorName.parse(op)
|
||||
assert (
|
||||
op_name in native_functions_map
|
||||
), f"Found an invalid operator name: {op_name}"
|
||||
assert op_name in native_functions_map, (
|
||||
f"Found an invalid operator name: {op_name}"
|
||||
)
|
||||
# See Note [External Backends Follow Dispatcher API]
|
||||
kernel_name = dispatcher.name(native_functions_map[op_name].func)
|
||||
if op in symint_ops:
|
||||
@ -238,11 +238,11 @@ the behavior of autograd for some operators on your backend. However "Autograd{b
|
||||
|
||||
forward_kernels = [f for f in forward_kernels if f is not None]
|
||||
backward_kernels = [f for f in backward_kernels if f is not None]
|
||||
assert (
|
||||
len(forward_kernels) == 0 or len(backward_kernels) == 0
|
||||
), f'Currently, all variants of an op must either be registered to a backend key, or to a backend\'s \
|
||||
assert len(forward_kernels) == 0 or len(backward_kernels) == 0, (
|
||||
f'Currently, all variants of an op must either be registered to a backend key, or to a backend\'s \
|
||||
autograd key. They cannot be mix and matched. If this is something you need, feel free to create an issue! \
|
||||
{forward_kernels[0].kernel} is listed under "supported", but {backward_kernels[0].kernel} is listed under "autograd".'
|
||||
)
|
||||
|
||||
return ParsedExternalYaml(
|
||||
backend_key, autograd_key, class_name, cpp_namespace, backend_indices
|
||||
|
Reference in New Issue
Block a user