mirror of
https://github.com/pytorch/pytorch.git
synced 2025-11-06 00:54:56 +08:00
[PyTorch] Avoid initializing storage for empty Optionals
Pull Request resolved: https://github.com/pytorch/pytorch/pull/78947 We don't need to initialize for the non-constexpr case ever, or in the constexpr case after C++20. Differential Revision: [D36519379](https://our.internmc.facebook.com/intern/diff/D36519379/) Approved by: https://github.com/ezyang, https://github.com/malfet
This commit is contained in:
committed by
PyTorch MergeBot
parent
65fd0cdddb
commit
cf3ce329b5
@ -5,20 +5,13 @@ namespace torch {
|
||||
namespace distributed {
|
||||
namespace rpc {
|
||||
|
||||
namespace {
|
||||
// WorkerInfo needs to be registered exactly once. Since the op registration
|
||||
// happens in libtorch_python we wrap the class registration in a helper to make
|
||||
// sure that if there's multiple copies of Python such as used in torch::deploy
|
||||
// we only ever register it once.
|
||||
static std::once_flag workerInfoFlag;
|
||||
static c10::optional<torch::class_<WorkerInfo>> workerInfo;
|
||||
} // namespace
|
||||
|
||||
RegisterWorkerInfoOnce::RegisterWorkerInfoOnce() {
|
||||
std::call_once(workerInfoFlag, []() {
|
||||
workerInfo = torch::class_<WorkerInfo>("dist_rpc", "WorkerInfo")
|
||||
.def(torch::init<std::string, int64_t>());
|
||||
});
|
||||
// WorkerInfo needs to be registered exactly once. Since the op registration
|
||||
// happens in libtorch_python we wrap the class registration in a helper to make
|
||||
// sure that if there's multiple copies of Python such as used in torch::deploy
|
||||
// we only ever register it once.
|
||||
static auto workerInfo = torch::class_<WorkerInfo>("dist_rpc", "WorkerInfo")
|
||||
.def(torch::init<std::string, int64_t>());
|
||||
}
|
||||
|
||||
constexpr size_t WorkerInfo::MAX_NAME_LEN;
|
||||
|
||||
Reference in New Issue
Block a user