mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
[JIT] clean up (#60390)
Summary: * Minor: spelling, grammar. * Add calls to `GRAPH_DUMP()` where they were missing. * Add or expand a few comments. * Move a few comments to seemingly more appropriate spots. * In canonicalize_graph_fuser_ops.cpp inline `runnableInputs()` since it was only called in one place and had a misleading comment and confusing name. * In `PeepholeOptimizeImpl::optimizeBlock()`, set `changed = true;` when removing `aten::is_complex`. Pretty sure its absence was a bug. * Delete unused `_jit_pass_remove_inplace_ops` and and its implementation `RemoveInplaceOps()`. * In `preprocessCaffe2Ops()`, remove redundant check for nested optional types. It was already checked in `checkONNXCompatibility()`. * In `EncoderBase::AddAttribute`, log the unexpected attribute kind. I don't remember the repro case now but I did hit this error at some point and this additional logging made it easier to understand. * In `fuseConvBatchNorm()` in eval_peephole.cpp, consistently use camelCase instead of snake_case for local variables. * Add curly braces around the bodies of if and loops. Pull Request resolved: https://github.com/pytorch/pytorch/pull/60390 Reviewed By: Krovatkin Differential Revision: D29523283 Pulled By: SplitInfinity fbshipit-source-id: 4e16c5648616f53da07d68dab7fdf252e06a0752
This commit is contained in:
committed by
Facebook GitHub Bot
parent
54ea7d33ba
commit
dec5aa2260
@ -68,18 +68,18 @@ void flatten_rec(PyObject* obj, ParsedArgs& args) {
|
||||
args.desc.structure.push_back(D::Variable);
|
||||
} else if (strcmp(THPUtils_typename(obj), "NoneType") == 0) {
|
||||
args.desc.structure.push_back(D::NoneType);
|
||||
} else if (PyBool_Check(obj)) { // Wrap integers in bool tensors
|
||||
} else if (PyBool_Check(obj)) { // Wrap bools in Bool tensors
|
||||
at::Tensor var = scalar_to_tensor(at::Scalar(THPUtils_unpackBool(obj)));
|
||||
args.vars.push_back(var);
|
||||
args.desc.metadata.emplace_back(var);
|
||||
args.desc.structure.push_back(D::Bool);
|
||||
} else if (PyLong_Check(obj)) { // Wrap integers in long tensors
|
||||
} else if (PyLong_Check(obj)) { // Wrap longs in Long tensors
|
||||
at::Tensor var = scalar_to_tensor(
|
||||
at::Scalar(static_cast<int64_t>(THPUtils_unpackLong(obj))));
|
||||
args.vars.push_back(var);
|
||||
args.desc.metadata.emplace_back(var);
|
||||
args.desc.structure.push_back(D::Long);
|
||||
} else if (PyFloat_Check(obj)) { // Wrap floating points in double tensors
|
||||
} else if (PyFloat_Check(obj)) { // Wrap floats in Double tensors
|
||||
at::Tensor var = scalar_to_tensor(THPUtils_unpackDouble(obj));
|
||||
args.vars.push_back(var);
|
||||
args.desc.metadata.emplace_back(var);
|
||||
|
Reference in New Issue
Block a user