mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/35190 The following are the main changes: - The main logic of C++ API parity test mechanism is moved from `test/test_cpp_api_parity.py` to `test/cpp_api_parity/module_impl_check.py` and `test/cpp_api_parity/functional_impl_check.py`, so that there is a clear separation between module tests and functional tests, although they still share a lot of common utility functions which are all in `test/cpp_api_parity/utils.py`. - Module init tests (i.e. testing whether C++ module accepts the same constructor options as the corresponding Python module) is removed and will be added again in the future. - `cpp_constructor_args` / `cpp_options_args` / `cpp_function_call` are added as appropriate to all test params dict in `torch/testing/_internal/common_nn.py`, to indicate how to run C++ API parity test for this test params dict. Test Plan: Imported from OSS Differential Revision: D20588198 Pulled By: yf225 fbshipit-source-id: 11238c560c8247129584b9b49df73fff40c4d81d
104 lines
3.4 KiB
Python
104 lines
3.4 KiB
Python
import torch
|
|
|
|
'''
|
|
`SampleModule` is used by `test_cpp_api_parity.py` to test that Python / C++ API
|
|
parity test harness works for `torch.nn.Module` subclasses.
|
|
|
|
When `SampleModule.has_parity` is true, behavior of `forward` / `backward`
|
|
is the same as the C++ equivalent.
|
|
|
|
When `SampleModule.has_parity` is false, behavior of `forward` / `backward`
|
|
is different from the C++ equivalent.
|
|
'''
|
|
|
|
class SampleModule(torch.nn.Module):
|
|
def __init__(self, has_parity, has_submodule):
|
|
super(SampleModule, self).__init__()
|
|
self.has_parity = has_parity
|
|
if has_submodule:
|
|
self.submodule = SampleModule(self.has_parity, False)
|
|
|
|
self.has_submodule = has_submodule
|
|
self.register_parameter('param', torch.nn.Parameter(torch.empty(3, 4)))
|
|
|
|
self.reset_parameters()
|
|
|
|
def reset_parameters(self):
|
|
with torch.no_grad():
|
|
self.param.fill_(1)
|
|
|
|
def forward(self, x):
|
|
submodule_forward_result = self.submodule(x) if hasattr(self, 'submodule') else 0
|
|
if self.has_parity:
|
|
return x + self.param * 2 + submodule_forward_result
|
|
else:
|
|
return x + self.param * 4 + submodule_forward_result + 3
|
|
|
|
torch.nn.SampleModule = SampleModule
|
|
|
|
SAMPLE_MODULE_CPP_SOURCE = """\n
|
|
namespace torch {
|
|
namespace nn {
|
|
struct C10_EXPORT SampleModuleOptions {
|
|
SampleModuleOptions(bool has_parity, bool has_submodule) : has_parity_(has_parity), has_submodule_(has_submodule) {}
|
|
|
|
TORCH_ARG(bool, has_parity);
|
|
TORCH_ARG(bool, has_submodule);
|
|
};
|
|
|
|
struct C10_EXPORT SampleModuleImpl : public torch::nn::Cloneable<SampleModuleImpl> {
|
|
explicit SampleModuleImpl(SampleModuleOptions options) : options(std::move(options)) {
|
|
if (options.has_submodule()) {
|
|
submodule = register_module(
|
|
"submodule",
|
|
std::make_shared<SampleModuleImpl>(SampleModuleOptions(options.has_parity(), false)));
|
|
}
|
|
reset();
|
|
}
|
|
void reset() {
|
|
param = register_parameter("param", torch::ones({3, 4}));
|
|
}
|
|
torch::Tensor forward(torch::Tensor x) {
|
|
return x + param * 2 + (submodule ? submodule->forward(x) : torch::zeros_like(x));
|
|
}
|
|
SampleModuleOptions options;
|
|
torch::Tensor param;
|
|
std::shared_ptr<SampleModuleImpl> submodule{nullptr};
|
|
};
|
|
|
|
TORCH_MODULE(SampleModule);
|
|
} // namespace nn
|
|
} // namespace torch
|
|
"""
|
|
|
|
module_tests = [
|
|
dict(
|
|
module_name='SampleModule',
|
|
desc='has_parity',
|
|
constructor_args=(True, True),
|
|
cpp_constructor_args='torch::nn::SampleModuleOptions(true, true)',
|
|
input_size=(3, 4),
|
|
cpp_input_args=['torch::randn({3, 4})'],
|
|
has_parity=True,
|
|
),
|
|
dict(
|
|
fullname='SampleModule_no_parity',
|
|
constructor=lambda: SampleModule(has_parity=False, has_submodule=True),
|
|
cpp_constructor_args='torch::nn::SampleModuleOptions(false, true)',
|
|
input_size=(3, 4),
|
|
cpp_input_args=['torch::randn({3, 4})'],
|
|
has_parity=False,
|
|
),
|
|
# This is to test that setting the `test_cpp_api_parity=False` flag skips
|
|
# the C++ API parity test accordingly (otherwise this test would run and
|
|
# throw a parity error).
|
|
dict(
|
|
fullname='SampleModule_THIS_TEST_SHOULD_BE_SKIPPED',
|
|
constructor=lambda: SampleModule(False, True),
|
|
cpp_constructor_args='torch::nn::SampleModuleOptions(false, true)',
|
|
input_size=(3, 4),
|
|
cpp_input_args=['torch::randn({3, 4})'],
|
|
test_cpp_api_parity=False,
|
|
),
|
|
]
|