mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[BE][Ez]: Use interned hardcoded string FURB156 (#138330)
Uses string constants from string module. Pull Request resolved: https://github.com/pytorch/pytorch/pull/138330 Approved by: https://github.com/albanD
This commit is contained in:
committed by
PyTorch MergeBot
parent
9c2a80322a
commit
195d0a666b
@ -30,6 +30,7 @@ from torch.testing._internal.common_dtype import all_types_and_complex_and, floa
|
||||
from torch.testing._internal.common_modules import modules, module_db, ModuleInfo
|
||||
from torch.testing._internal.opinfo.core import SampleInput, DecorateInfo, OpInfo
|
||||
import operator
|
||||
import string
|
||||
|
||||
# For testing TestCase methods and torch.testing functions
|
||||
class TestTesting(TestCase):
|
||||
@ -2299,7 +2300,7 @@ class TestImports(TestCase):
|
||||
# Calling logging.basicConfig, among other things, modifies the global
|
||||
# logging state. It is not OK to modify the global logging state on
|
||||
# `import torch` (or other submodules we own) because users do not expect it.
|
||||
expected = 'abcdefghijklmnopqrstuvwxyz'
|
||||
expected = string.ascii_lowercase
|
||||
commands = [
|
||||
'import logging',
|
||||
f'import {path}',
|
||||
|
@ -33,6 +33,8 @@ HAS_REFCOUNT = True
|
||||
IS_WASM = False
|
||||
IS_PYPY = False
|
||||
|
||||
import string
|
||||
|
||||
# FIXME: make from torch._numpy
|
||||
# These are commented, as if they are imported, some of the tests pass for the wrong reasons
|
||||
# from numpy lib import digitize, piecewise, trapz, select, trim_zeros, interp
|
||||
@ -1528,7 +1530,7 @@ class TestVectorize(TestCase):
|
||||
def test_string_ticket_1892(self):
|
||||
# Test vectorization over strings: issue 1892.
|
||||
f = np.vectorize(lambda x: x)
|
||||
s = "0123456789" * 10
|
||||
s = string.digits * 10
|
||||
assert_equal(s, f(s))
|
||||
|
||||
def test_cache(self):
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright (c) Meta Platforms, Inc. and affiliates
|
||||
import string
|
||||
from typing import cast, Dict, List, Optional, Tuple
|
||||
|
||||
import torch
|
||||
@ -234,7 +235,7 @@ def pointwise_rule(op_schema: OpSchema, linearity: bool = False) -> OutputShardi
|
||||
ij,ij->ij - addition/mul
|
||||
ij,j->ij - broadcasted addition
|
||||
"""
|
||||
alphabet = "abcdefghijklmnopqrstuvwxyz"
|
||||
alphabet = string.ascii_lowercase
|
||||
# find the max_dim first in case we need to broadcasting
|
||||
input_specs = op_schema.args_spec
|
||||
max_dim = max(input.ndim for input in input_specs)
|
||||
|
@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import string
|
||||
from collections import defaultdict
|
||||
from typing import Sequence
|
||||
|
||||
@ -194,9 +195,7 @@ def generate_out_args_from_schema(
|
||||
lambda a: [] if a.annotation is None else a.annotation.alias_set,
|
||||
func.arguments.flat_all,
|
||||
)
|
||||
valid_annotations = [
|
||||
x for x in "abcdefghijklmnopqrstuvwxyz" if x not in used_annotations
|
||||
]
|
||||
valid_annotations = [x for x in string.ascii_lowercase if x not in used_annotations]
|
||||
|
||||
all_rets_are_tensors = all(r.type == BaseType(BaseTy.Tensor) for r in func.returns)
|
||||
|
||||
|
Reference in New Issue
Block a user