mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/24801 This is to fix the ODR-violations in fbcode static builds, which have been broken for several months. This PR is unfortunately quite large, but the changes are only mechanical: 1. Tests defined in header files -> tests defined in cpp files 2. Remove the `torch::jit::testing` namespace -> `torch::jit`. 3. Single `test.h` file that aggregates all tests. 4. Separate out files for gtest and python versions of the tests instead of using a build flag 5. Add a readme for how to add a new test, and explaining a bit about why the cpp tests are the way they are. Test Plan: Imported from OSS Differential Revision: D16878605 Pulled By: suo fbshipit-source-id: 27b5c077dadd990a5f74e25d01731f9c1f491603
100 lines
2.2 KiB
C++
100 lines
2.2 KiB
C++
#include <test/cpp/jit/test_base.h>
|
|
#include <test/cpp/jit/test_utils.h>
|
|
|
|
#include <torch/csrc/jit/irparser.h>
|
|
#include <torch/csrc/jit/passes/peephole.h>
|
|
|
|
namespace torch {
|
|
namespace jit {
|
|
|
|
using namespace script;
|
|
|
|
|
|
void testPeepholeOptimize() {
|
|
// test is / is not none optimization
|
|
{
|
|
auto graph = std::make_shared<Graph>();
|
|
parseIR(
|
|
R"IR(
|
|
graph(%0 : int):
|
|
%1 : None = prim::Constant()
|
|
%2 : bool = aten::__is__(%0, %1)
|
|
%3 : bool = aten::__isnot__(%0, %1)
|
|
return (%2, %3)
|
|
)IR",
|
|
graph.get());
|
|
PeepholeOptimize(graph);
|
|
testing::FileCheck()
|
|
.check_not("aten::__is__")
|
|
->check_not("aten::__isnot__")
|
|
->run(*graph);
|
|
}
|
|
{
|
|
auto graph = std::make_shared<Graph>();
|
|
parseIR(
|
|
R"IR(
|
|
graph(%0: int?):
|
|
%1 : None = prim::Constant()
|
|
%2 : bool = aten::__is__(%0, %1)
|
|
%3 : bool = aten::__isnot__(%0, %1)
|
|
return (%2, %3)
|
|
)IR",
|
|
graph.get());
|
|
PeepholeOptimize(graph);
|
|
testing::FileCheck()
|
|
.check("aten::__is__")
|
|
->check("aten::__isnot__")
|
|
->run(*graph);
|
|
}
|
|
|
|
{
|
|
auto graph = std::make_shared<Graph>();
|
|
parseIR(
|
|
R"IR(
|
|
graph(%0: int?):
|
|
%1 : Tensor = prim::AutogradZero()
|
|
%2 : None = prim::Constant()
|
|
%4 : bool = aten::__is__(%0, %1)
|
|
%5 : bool = aten::__isnot__(%1, %2)
|
|
return (%4, %5)
|
|
)IR",
|
|
graph.get());
|
|
PeepholeOptimize(graph);
|
|
testing::FileCheck()
|
|
.check("aten::__is__")
|
|
->check_not("aten::__isnot__")
|
|
->run(*graph);
|
|
}
|
|
|
|
// test unwrap optional
|
|
{
|
|
auto graph = std::make_shared<Graph>();
|
|
parseIR(
|
|
R"IR(
|
|
graph():
|
|
%1 : Float(*, *, *) = prim::Constant()
|
|
%2 : bool = aten::_unwrap_optional(%1)
|
|
%3 : bool = prim::unchecked_unwrap_optional(%1)
|
|
return (%2, %3)
|
|
)IR",
|
|
graph.get());
|
|
PeepholeOptimize(graph);
|
|
testing::FileCheck().check_not("unwrap")->run(*graph);
|
|
}
|
|
{
|
|
auto graph = std::make_shared<Graph>();
|
|
parseIR(
|
|
R"IR(
|
|
graph(%1 : Float(*, *, *)?):
|
|
%2 : bool = aten::_unwrap_optional(%1)
|
|
%3 : bool = prim::unchecked_unwrap_optional(%1)
|
|
return (%2, %3)
|
|
)IR",
|
|
graph.get());
|
|
PeepholeOptimize(graph);
|
|
testing::FileCheck().check_count("unwrap", 2)->run(*graph);
|
|
}
|
|
}
|
|
} // namespace jit
|
|
} // namespace torch
|