mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-26 08:34:52 +08:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/49957 Test Plan: Sandcastle Reviewed By: xush6528 Differential Revision: D25729745 fbshipit-source-id: 85810e2c18ca6856480bef81217da1359b63d8a3
35 lines
987 B
Python
35 lines
987 B
Python
from typing import List
|
|
|
|
from torch._six import container_abcs
|
|
from itertools import repeat
|
|
|
|
|
|
def _ntuple(n):
|
|
def parse(x):
|
|
if isinstance(x, container_abcs.Iterable):
|
|
return x
|
|
return tuple(repeat(x, n))
|
|
return parse
|
|
|
|
_single = _ntuple(1)
|
|
_pair = _ntuple(2)
|
|
_triple = _ntuple(3)
|
|
_quadruple = _ntuple(4)
|
|
|
|
|
|
def _reverse_repeat_tuple(t, n):
|
|
r"""Reverse the order of `t` and repeat each element for `n` times.
|
|
|
|
This can be used to translate padding arg used by Conv and Pooling modules
|
|
to the ones used by `F.pad`.
|
|
"""
|
|
return tuple(x for x in reversed(t) for _ in range(n))
|
|
|
|
|
|
def _list_with_default(out_size: List[int], defaults: List[int]) -> List[int]:
|
|
if isinstance(out_size, int):
|
|
return out_size
|
|
if len(defaults) <= len(out_size):
|
|
raise ValueError('Input dimension should be at least {}'.format(len(out_size) + 1))
|
|
return [v if v is not None else d for v, d in zip(out_size, defaults[-len(out_size):])]
|