[BE]: Apply FURB145 to make code more readable and idiomatic. (#112990)

Testing out some new rules that are in beta, I think I will apply this one codebase wide once it's out of preview. Replaces the hack of using `[:]` to do copies of list with the proper copy method. More efficient and more readable.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/112990
Approved by: https://github.com/ezyang
This commit is contained in:
Aaron Gokaslan
2023-11-06 13:15:04 +00:00
committed by PyTorch MergeBot
parent fa9045a872
commit 376217cc0b
3 changed files with 3 additions and 3 deletions

View File

@ -277,7 +277,7 @@ class TestDatasetRandomSplit(TestCase):
subset1, subset2 = random_split(dataset, [4, 1])
subset_of_subset1, subset_of_subset2 = random_split(subset1, [3, 1])
idx = [subset1.indices[i] for i in subset_of_subset1.indices]
self.assertEqual(subset_of_subset1[:], dataset[idx[:]])
self.assertEqual(subset_of_subset1[:], dataset[idx.copy()])
self.assertEqual(subset_of_subset1[0:2], dataset[idx[0:2]])
self.assertEqual(subset_of_subset1[0:-1:2], dataset[idx[0:-1:2]])

View File

@ -48,7 +48,7 @@ def gen_transitive_closure(
train: bool = False,
) -> List[str]:
result = set(root_ops)
queue = root_ops[:]
queue = root_ops.copy()
# The dependency graph might contain a special entry with key = `__BASE__`
# and value = (set of `base` ops to always include in custom build).

View File

@ -3334,7 +3334,7 @@ def meta_cdist_backward(grad, x1, x2, p, cdist):
batch_tensor1 = x1.shape[:-2]
batch_tensor2 = x2.shape[:-2]
expand_batch_portion = list(torch.broadcast_shapes(batch_tensor1, batch_tensor2))
tensor1_expand_size = expand_batch_portion[:]
tensor1_expand_size = expand_batch_portion.copy()
tensor1_expand_size.extend([r1, c1])
batch_product = math.prod(expand_batch_portion)
if r1 == 0 or r2 == 0 or c1 == 0 or batch_product == 0: