mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Fixes #112633 Fixed errors relating to pydocstyle in the following files. The remaining errors are not covered in this issue. `torch/utils/dlpack.py` was not modified as the errors are relating to the function signature in the first line in the docstring which must be maintained as is for proper Sphinx interpretation. ```python def from_dlpack(ext_tensor: Any) -> 'torch.Tensor': """from_dlpack(ext_tensor) -> Tensor ..... """ ``` pydocstyle torch/utils/_contextlib.py --count before: 4 after: 0 pydocstyle torch/backends/mps/__init__.py --count before: 8 after: 1 **remaining errors** ``` torch/backends/mps/__init__.py:1 at module level: D104: Missing docstring in public package ``` pydocstyle torch/backends/xeon/run_cpu.py --count before: 13 after: 1 **remaining errors** ``` torch/backends/xeon/run_cpu.py:864 in public function `main`: D103: Missing docstring in public function ``` pydocstyle torch/backends/cpu/__init__.py --count before: 2 after: 1 **remaining errors** ``` torch/backends/cpu/__init__.py:1 at module level: D104: Missing docstring in public package ``` pydocstyle torch/utils/cpp_backtrace.py --count before: 4 after: 1 **remaining errors** ``` torch/utils/cpp_backtrace.py:1 at module level: D100: Missing docstring in public module ``` pydocstyle torch/utils/bundled_inputs.py --count before: 8 after: 1 **remaining errors** ``` torch/utils/bundled_inputs.py:1 at module level: D100: Missing docstring in public module ``` pydocstyle torch/utils/file_baton.py --count before: 8 after: 1 **remaining errors** ``` torch/utils/file_baton.py:1 at module level: D100: Missing docstring in public module ``` pydocstyle torch/utils/mobile_optimizer.py --count before: 6 after: 1 **remaining errors** ``` torch/utils/mobile_optimizer.py:8 in public class `LintCode`: D101: Missing docstring in public class ``` pydocstyle torch/backends/opt_einsum/__init__.py --count before: 7 after: 5 **remaining errors** ``` torch/backends/opt_einsum/__init__.py:1 at module level: D104: Missing docstring in public package torch/backends/opt_einsum/__init__.py:67 in public function `set_flags`: D103: Missing docstring in public function torch/backends/opt_einsum/__init__.py:77 in public function `flags`: D103: Missing docstring in public function torch/backends/opt_einsum/__init__.py:93 in public class `OptEinsumModule`: D101: Missing docstring in public class torch/backends/opt_einsum/__init__.py:94 in public method `__init__`: D107: Missing docstring in __init__ ``` pydocstyle torch/utils/_device.py --count before: 9 after: 6 **remaining errors** ``` torch/utils/_device.py:58 in public class `DeviceContext`: D101: Missing docstring in public class torch/utils/_device.py:59 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/_device.py:62 in public method `__enter__`: D105: Missing docstring in magic method torch/utils/_device.py:68 in public method `__exit__`: D105: Missing docstring in magic method torch/utils/_device.py:73 in public method `__torch_function__`: D105: Missing docstring in magic method torch/utils/_device.py:80 in public function `device_decorator`: D103: Missing docstring in public function ``` pydocstyle torch/utils/_freeze.py --count before: 15 after: 7 **remaining errors** ``` torch/utils/_freeze.py:77 in public function `indent_msg`: D103: Missing docstring in public function torch/utils/_freeze.py:89 in public class `FrozenModule`: D101: Missing docstring in public class torch/utils/_freeze.py:100 in public class `Freezer`: D101: Missing docstring in public class torch/utils/_freeze.py:101 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/_freeze.py:106 in public method `msg`: D102: Missing docstring in public method torch/utils/_freeze.py:185 in public method `get_module_qualname`: D102: Missing docstring in public method torch/utils/_freeze.py:206 in public method `compile_string`: D102: Missing docstring in public method ``` pydocstyle torch/utils/throughput_benchmark.py --count before: 25 after: 8 **remaining errors** ``` torch/utils/throughput_benchmark.py:1 at module level: D100: Missing docstring in public module torch/utils/throughput_benchmark.py:27 in public class `ExecutionStats`: D101: Missing docstring in public class torch/utils/throughput_benchmark.py:28 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/throughput_benchmark.py:33 in public method `latency_avg_ms`: D102: Missing docstring in public method torch/utils/throughput_benchmark.py:37 in public method `num_iters`: D102: Missing docstring in public method torch/utils/throughput_benchmark.py:46 in public method `total_time_seconds`: D102: Missing docstring in public method torch/utils/throughput_benchmark.py:50 in public method `__str__`: D105: Missing docstring in magic method torch/utils/throughput_benchmark.py:94 in public method `__init__`: D107: Missing docstring in __init__ ``` pydocstyle torch/utils/hooks.py --count before: 14 after: 11 **remaining errors** ``` torch/utils/hooks.py:1 at module level: D100: Missing docstring in public module torch/utils/hooks.py:23 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/hooks.py:34 in public method `remove`: D102: Missing docstring in public method torch/utils/hooks.py:44 in public method `__getstate__`: D105: Missing docstring in magic method torch/utils/hooks.py:50 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/hooks.py:64 in public method `__enter__`: D105: Missing docstring in magic method torch/utils/hooks.py:67 in public method `__exit__`: D105: Missing docstring in magic method torch/utils/hooks.py:82 in public function `warn_if_has_hooks`: D103: Missing docstring in public function torch/utils/hooks.py:103 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/hooks.py:188 in public method `setup_input_hook`: D102: Missing docstring in public method torch/utils/hooks.py:197 in public method `setup_output_hook`: D102: Missing docstring in public method ``` pydocstyle torch/utils/_traceback.py --count before: 19 after: 14 **remaining errors** ``` torch/utils/_traceback.py:47 in public function `report_compile_source_on_error`: D103: Missing docstring in public function torch/utils/_traceback.py:160 in public class `CapturedTraceback`: D101: Missing docstring in public class torch/utils/_traceback.py:163 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/_traceback.py:167 in public method `cleanup`: D102: Missing docstring in public method torch/utils/_traceback.py:170 in public method `summary`: D102: Missing docstring in public method torch/utils/_traceback.py:182 in public method `__getstate__`: D105: Missing docstring in magic method torch/utils/_traceback.py:190 in public method `extract`: D205: 1 blank line required between summary line and description (found 0) torch/utils/_traceback.py:190 in public method `extract`: D400: First line should end with a period (not 't') torch/utils/_traceback.py:213 in public method `format`: D205: 1 blank line required between summary line and description (found 0) torch/utils/_traceback.py:213 in public method `format`: D400: First line should end with a period (not 'f') torch/utils/_traceback.py:213 in public method `format`: D401: First line should be in imperative mood (perhaps 'Format', not 'Formats') torch/utils/_traceback.py:224 in public method `format_all`: D200: One-line docstring should fit on one line with quotes (found 3) torch/utils/_traceback.py:247 in private function `_extract_symbolized_tb`: D205: 1 blank line required between summary line and description (found 0) torch/utils/_traceback.py:247 in private function `_extract_symbolized_tb`: D400: First line should end with a period (not 'f') ``` pydocstyle torch/utils/mkldnn.py --count before: 28 after: 26 **remaining errors** ``` torch/utils/mkldnn.py:1 at module level: D100: Missing docstring in public module torch/utils/mkldnn.py:4 in public class `MkldnnLinear`: D101: Missing docstring in public class torch/utils/mkldnn.py:5 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/mkldnn.py:19 in public method `__getstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:23 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:29 in public method `forward`: D102: Missing docstring in public method torch/utils/mkldnn.py:75 in public class `MkldnnConv1d`: D101: Missing docstring in public class torch/utils/mkldnn.py:76 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/mkldnn.py:82 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:88 in public class `MkldnnConv2d`: D101: Missing docstring in public class torch/utils/mkldnn.py:89 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/mkldnn.py:100 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:110 in public class `MkldnnConv3d`: D101: Missing docstring in public class torch/utils/mkldnn.py:111 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/mkldnn.py:122 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:133 in public class `MkldnnBatchNorm`: D101: Missing docstring in public class torch/utils/mkldnn.py:136 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/mkldnn.py:155 in public method `__getstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:163 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:171 in public method `forward`: D102: Missing docstring in public method torch/utils/mkldnn.py:184 in public class `MkldnnPrelu`: D101: Missing docstring in public class torch/utils/mkldnn.py:185 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/mkldnn.py:190 in public method `__getstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:194 in public method `__setstate__`: D105: Missing docstring in magic method torch/utils/mkldnn.py:199 in public method `forward`: D102: Missing docstring in public method torch/utils/mkldnn.py:205 in public function `to_mkldnn`: D103: Missing docstring in public function ``` pydocstyle torch/utils/weak.py --count before: 32 after: 30 **remaining errors** ``` torch/utils/weak.py:1 at module level: D100: Missing docstring in public module torch/utils/weak.py:42 in public class `WeakIdRef`: D101: Missing docstring in public class torch/utils/weak.py:45 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/weak.py:54 in public method `__call__`: D102: Missing docstring in public method torch/utils/weak.py:61 in public method `__hash__`: D105: Missing docstring in magic method torch/utils/weak.py:64 in public method `__eq__`: D105: Missing docstring in magic method torch/utils/weak.py:84 in public class `WeakIdKeyDictionary`: D101: Missing docstring in public class torch/utils/weak.py:87 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/weak.py:131 in public method `__delitem__`: D105: Missing docstring in magic method torch/utils/weak.py:135 in public method `__getitem__`: D105: Missing docstring in magic method torch/utils/weak.py:138 in public method `__len__`: D105: Missing docstring in magic method torch/utils/weak.py:145 in public method `__repr__`: D105: Missing docstring in magic method torch/utils/weak.py:148 in public method `__setitem__`: D105: Missing docstring in magic method torch/utils/weak.py:151 in public method `copy`: D102: Missing docstring in public method torch/utils/weak.py:162 in public method `__deepcopy__`: D105: Missing docstring in magic method torch/utils/weak.py:172 in public method `get`: D102: Missing docstring in public method torch/utils/weak.py:175 in public method `__contains__`: D105: Missing docstring in magic method torch/utils/weak.py:182 in public method `items`: D102: Missing docstring in public method torch/utils/weak.py:189 in public method `keys`: D102: Missing docstring in public method torch/utils/weak.py:198 in public method `values`: D102: Missing docstring in public method torch/utils/weak.py:216 in public method `popitem`: D102: Missing docstring in public method torch/utils/weak.py:224 in public method `pop`: D102: Missing docstring in public method torch/utils/weak.py:228 in public method `setdefault`: D102: Missing docstring in public method torch/utils/weak.py:231 in public method `update`: D102: Missing docstring in public method torch/utils/weak.py:241 in public method `__ior__`: D105: Missing docstring in magic method torch/utils/weak.py:245 in public method `__or__`: D105: Missing docstring in magic method torch/utils/weak.py:252 in public method `__ror__`: D105: Missing docstring in magic method torch/utils/weak.py:262 in public method `__eq__`: D105: Missing docstring in magic method torch/utils/weak.py:276 in public method `__init__`: D107: Missing docstring in __init__ torch/utils/weak.py:280 in public method `__call__`: D102: Missing docstring in public method ``` @mikaylagawarecki @jbschlosser @svekars Pull Request resolved: https://github.com/pytorch/pytorch/pull/113311 Approved by: https://github.com/ezyang
160 lines
6.3 KiB
Python
160 lines
6.3 KiB
Python
|
|
import torch._C
|
|
|
|
|
|
def format_time(time_us=None, time_ms=None, time_s=None):
|
|
"""Define time formatting."""
|
|
assert sum([time_us is not None, time_ms is not None, time_s is not None]) == 1
|
|
|
|
US_IN_SECOND = 1e6
|
|
US_IN_MS = 1e3
|
|
|
|
if time_us is None:
|
|
if time_ms is not None:
|
|
time_us = time_ms * US_IN_MS
|
|
elif time_s is not None:
|
|
time_us = time_s * US_IN_SECOND
|
|
else:
|
|
raise AssertionError("Shouldn't reach here :)")
|
|
|
|
if time_us >= US_IN_SECOND:
|
|
return f'{time_us / US_IN_SECOND:.3f}s'
|
|
if time_us >= US_IN_MS:
|
|
return f'{time_us / US_IN_MS:.3f}ms'
|
|
return f'{time_us:.3f}us'
|
|
|
|
|
|
class ExecutionStats:
|
|
def __init__(self, c_stats, benchmark_config):
|
|
self._c_stats = c_stats
|
|
self.benchmark_config = benchmark_config
|
|
|
|
@property
|
|
def latency_avg_ms(self):
|
|
return self._c_stats.latency_avg_ms
|
|
|
|
@property
|
|
def num_iters(self):
|
|
return self._c_stats.num_iters
|
|
|
|
@property
|
|
def iters_per_second(self):
|
|
"""Return total number of iterations per second across all calling threads."""
|
|
return self.num_iters / self.total_time_seconds
|
|
|
|
@property
|
|
def total_time_seconds(self):
|
|
return self.num_iters * (
|
|
self.latency_avg_ms / 1000.0) / self.benchmark_config.num_calling_threads
|
|
|
|
def __str__(self):
|
|
return '\n'.join([
|
|
"Average latency per example: " + format_time(time_ms=self.latency_avg_ms),
|
|
f"Total number of iterations: {self.num_iters}",
|
|
f"Total number of iterations per second (across all threads): {self.iters_per_second:.2f}",
|
|
"Total time: " + format_time(time_s=self.total_time_seconds)
|
|
])
|
|
|
|
|
|
class ThroughputBenchmark:
|
|
"""
|
|
This class is a wrapper around a c++ component throughput_benchmark::ThroughputBenchmark.
|
|
|
|
This wrapper on the throughput_benchmark::ThroughputBenchmark component is responsible
|
|
for executing a PyTorch module (nn.Module or ScriptModule) under an inference
|
|
server like load. It can emulate multiple calling threads to a single module
|
|
provided. In the future we plan to enhance this component to support inter and
|
|
intra-op parallelism as well as multiple models running in a single process.
|
|
|
|
Please note that even though nn.Module is supported, it might incur an overhead
|
|
from the need to hold GIL every time we execute Python code or pass around
|
|
inputs as Python objects. As soon as you have a ScriptModule version of your
|
|
model for inference deployment it is better to switch to using it in this
|
|
benchmark.
|
|
|
|
Example::
|
|
|
|
>>> # xdoctest: +SKIP("undefined vars")
|
|
>>> from torch.utils import ThroughputBenchmark
|
|
>>> bench = ThroughputBenchmark(my_module)
|
|
>>> # Pre-populate benchmark's data set with the inputs
|
|
>>> for input in inputs:
|
|
... # Both args and kwargs work, same as any PyTorch Module / ScriptModule
|
|
... bench.add_input(input[0], x2=input[1])
|
|
>>> # Inputs supplied above are randomly used during the execution
|
|
>>> stats = bench.benchmark(
|
|
... num_calling_threads=4,
|
|
... num_warmup_iters = 100,
|
|
... num_iters = 1000,
|
|
... )
|
|
>>> print("Avg latency (ms): {}".format(stats.latency_avg_ms))
|
|
>>> print("Number of iterations: {}".format(stats.num_iters))
|
|
"""
|
|
|
|
def __init__(self, module):
|
|
if isinstance(module, torch.jit.ScriptModule):
|
|
self._benchmark = torch._C.ThroughputBenchmark(module._c)
|
|
else:
|
|
self._benchmark = torch._C.ThroughputBenchmark(module)
|
|
|
|
def run_once(self, *args, **kwargs):
|
|
"""
|
|
Given input id (input_idx) run benchmark once and return prediction.
|
|
|
|
This is useful for testing that benchmark actually runs the module you
|
|
want it to run. input_idx here is an index into inputs array populated
|
|
by calling add_input() method.
|
|
"""
|
|
return self._benchmark.run_once(*args, **kwargs)
|
|
|
|
def add_input(self, *args, **kwargs):
|
|
"""
|
|
Store a single input to a module into the benchmark memory and keep it there.
|
|
|
|
During the benchmark execution every thread is going to pick up a
|
|
random input from the all the inputs ever supplied to the benchmark via
|
|
this function.
|
|
"""
|
|
self._benchmark.add_input(*args, **kwargs)
|
|
|
|
def benchmark(
|
|
self,
|
|
num_calling_threads=1,
|
|
num_warmup_iters=10,
|
|
num_iters=100,
|
|
profiler_output_path=""):
|
|
"""
|
|
Run a benchmark on the module.
|
|
|
|
Args:
|
|
num_warmup_iters (int): Warmup iters are used to make sure we run a module
|
|
a few times before actually measuring things. This way we avoid cold
|
|
caches and any other similar problems. This is the number of warmup
|
|
iterations for each of the thread in separate
|
|
|
|
num_iters (int): Number of iterations the benchmark should run with.
|
|
This number is separate from the warmup iterations. Also the number is
|
|
shared across all the threads. Once the num_iters iterations across all
|
|
the threads is reached, we will stop execution. Though total number of
|
|
iterations might be slightly larger. Which is reported as
|
|
stats.num_iters where stats is the result of this function
|
|
|
|
profiler_output_path (str): Location to save Autograd Profiler trace.
|
|
If not empty, Autograd Profiler will be enabled for the main benchmark
|
|
execution (but not the warmup phase). The full trace will be saved
|
|
into the file path provided by this argument
|
|
|
|
|
|
This function returns BenchmarkExecutionStats object which is defined via pybind11.
|
|
It currently has two fields:
|
|
- num_iters - number of actual iterations the benchmark have made
|
|
- avg_latency_ms - average time it took to infer on one input example in milliseconds
|
|
"""
|
|
config = torch._C.BenchmarkConfig()
|
|
config.num_calling_threads = num_calling_threads
|
|
config.num_warmup_iters = num_warmup_iters
|
|
config.num_iters = num_iters
|
|
config.profiler_output_path = profiler_output_path
|
|
c_stats = self._benchmark.benchmark(config)
|
|
return ExecutionStats(c_stats, config)
|