mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/61608 See #61544 for an example of issues created by functional wrappers. In this case, these are directly wrapping the native function with no added functionality. One exception was `bilinear` which was just missing the default argument in C++, but was otherwise the same. I've kept the symbol `torch.functional.istft` because it looks like public API, but it could just as easily be moved to `_torch_docs.py`. Test Plan: Imported from OSS Reviewed By: ngimel Differential Revision: D31401361 Pulled By: albanD fbshipit-source-id: 162b74d0b2d4f2e5c4834687a94541960cefdd52 (cherry picked from commit 700cd73ca121d903f04f539af171d3f768565921)
255 lines
6.7 KiB
Plaintext
255 lines
6.7 KiB
Plaintext
node {
|
|
name: "input/x"
|
|
op: "IO Node"
|
|
attr {
|
|
key: "_output_shapes"
|
|
value {
|
|
list {
|
|
shape {
|
|
dim {
|
|
size: 2
|
|
}
|
|
dim {
|
|
size: 3
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: ""
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "output/output.1"
|
|
op: "IO Node"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/98"
|
|
attr {
|
|
key: "_output_shapes"
|
|
value {
|
|
list {
|
|
shape {
|
|
dim {
|
|
size: 2
|
|
}
|
|
dim {
|
|
size: 3
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: ""
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/bias/bias.1"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/weight/_0.1"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : bias }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/weight/weight.1"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/weight/_0.1"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : weight }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/input.1"
|
|
op: "aten::linear"
|
|
input: "input/x"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/weight/weight.1"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/bias/bias.1"
|
|
attr {
|
|
key: "_output_shapes"
|
|
value {
|
|
list {
|
|
shape {
|
|
dim {
|
|
size: 2
|
|
}
|
|
dim {
|
|
size: 4
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{}"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/bias/bias.3"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/weight/_1.1"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : bias }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/weight/weight.3"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/weight/_1.1"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : weight }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/input.3"
|
|
op: "aten::linear"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[0]/input.1"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/weight/weight.3"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/bias/bias.3"
|
|
attr {
|
|
key: "_output_shapes"
|
|
value {
|
|
list {
|
|
shape {
|
|
dim {
|
|
size: 2
|
|
}
|
|
dim {
|
|
size: 3
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{}"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/bias/bias.5"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/weight/_0"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : bias }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/weight/weight.5"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/weight/_0"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : weight }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/input"
|
|
op: "aten::linear"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[0]/Sequential[inner_nn_squential]/Linear[1]/input.3"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/weight/weight.5"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/bias/bias.5"
|
|
attr {
|
|
key: "_output_shapes"
|
|
value {
|
|
list {
|
|
shape {
|
|
dim {
|
|
size: 2
|
|
}
|
|
dim {
|
|
size: 4
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{}"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/bias/bias"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/weight/_1.3"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : bias }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/weight/weight"
|
|
op: "prim::GetAttr"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/weight/_1.3"
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{ name : weight }"
|
|
}
|
|
}
|
|
}
|
|
node {
|
|
name: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/98"
|
|
op: "aten::linear"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[0]/input"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/weight/weight"
|
|
input: "OuterNNSquential/Sequential[outer_nn_squential]/InnerNNSquential[1]/Sequential[inner_nn_squential]/Linear[1]/bias/bias"
|
|
attr {
|
|
key: "_output_shapes"
|
|
value {
|
|
list {
|
|
shape {
|
|
dim {
|
|
size: 2
|
|
}
|
|
dim {
|
|
size: 3
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
attr {
|
|
key: "attr"
|
|
value {
|
|
s: "{}"
|
|
}
|
|
}
|
|
}
|
|
versions {
|
|
producer: 22
|
|
}
|