[BE] Fix SIM109 compare-with-tuple (#100337)

Use {replacement} instead of multiple equality comparisons

Pull Request resolved: https://github.com/pytorch/pytorch/pull/100337
Approved by: https://github.com/Skylion007
This commit is contained in:
Justin Chu
2023-04-30 10:16:52 -07:00
committed by PyTorch MergeBot
parent 01abbfbaae
commit e779a30d50
6 changed files with 6 additions and 10 deletions

View File

@ -20,7 +20,7 @@ ignore =
# these ignores are from flake8-logging-format; please fix!
G100,G101,G200,G201,G202
# these ignores are from flake8-simplify. please fix or ignore with commented reason
SIM105,SIM108,SIM109,SIM110,SIM111,SIM113,SIM114,SIM115,SIM116,SIM117,SIM118,SIM119,SIM12,
SIM105,SIM108,SIM110,SIM111,SIM113,SIM114,SIM115,SIM116,SIM117,SIM118,SIM119,SIM12,
# flake8-simplify code styles
SIM102,SIM103,SIM106,SIM112,
per-file-ignores =

View File

@ -250,10 +250,7 @@ def remove_disabled_jobs(
)
return filtered_test_matrix
if (
disabled_job_cfg == TEST_JOB_NAME
or disabled_job_cfg == BUILD_AND_TEST_JOB_NAME
):
if disabled_job_cfg in (TEST_JOB_NAME, BUILD_AND_TEST_JOB_NAME):
print(
f"Issue {disabled_url} created by {author} has disabled all the test jobs for {workflow} / {job_name}"
)
@ -263,7 +260,7 @@ def remove_disabled_jobs(
if m:
disabled_job = m.group("job")
# Make sure that the job name is a valid test job name first before checking the config
if disabled_job == TEST_JOB_NAME or disabled_job == BUILD_AND_TEST_JOB_NAME:
if disabled_job in (TEST_JOB_NAME, BUILD_AND_TEST_JOB_NAME):
disabled_cfg = m.group("cfg")
# Remove the disabled config from the test matrix
filtered_test_matrix["include"] = [

View File

@ -48,7 +48,6 @@ ignore = [
"SIM102", "SIM103", "SIM112", # flake8-simplify code styles
"SIM105", # these ignores are from flake8-simplify. please fix or ignore with commented reason
"SIM108",
"SIM109",
"SIM110",
"SIM114", # Combine `if` branches using logical `or` operator
"SIM115",

View File

@ -5018,7 +5018,7 @@ class TestTransformFailure(TestCase):
def f(x):
return Test.apply(x)
if transform == grad or transform == grad_and_value:
if transform in (grad, grad_and_value):
input = torch.tensor(4.)
else:
input = torch.randn(5)

View File

@ -1625,7 +1625,7 @@ def meta_embedding_bag(
max_indices = indices.new_empty(0)
else:
fast_path_sum = is_fast_path(weight, per_sample_weights, output, padding_idx)
if mode == MODE_MEAN or mode == MODE_MAX or not fast_path_sum:
if mode in (MODE_MEAN, MODE_MAX) or not fast_path_sum:
offset2bag = offsets.new_empty(indices.size(0))
else:
offset2bag = offsets.new_empty(0)

View File

@ -278,7 +278,7 @@ def processKernelLaunches(string, stats):
char = string[i]
# Handle Templating Arguments
if status == START or status == AT_TEMPLATE:
if status in (START, AT_TEMPLATE):
if char == ">":
if status == START:
status = AT_TEMPLATE