Files
pytorch/torch/csrc/distributed/rpc/message.cpp
cyy f7c0c230b0 Fix compile errors (#148758)
Fix
```
  /usr/bin/../lib64/gcc/x86_64-pc-linux-gnu/14.2.1/../../../../include/c++/14.2.1/bits/unique_ptr.h:91:16: error: invalid application of 'sizeof' to an incomplete type 'torch::jit::AliasDb::WriteRegistry'
     91 |         static_assert(sizeof(_Tp)>0,
        |                       ^~~~~~~~~~~
  /usr/bin/../lib64/gcc/x86_64-pc-linux-gnu/14.2.1/../../../../include/c++/14.2.1/bits/unique_ptr.h:399:4: note: in instantiation of member function 'std::default_delete<torch::jit::AliasDb::WriteRegistry>::operator()' requested here
    399 |           get_deleter()(std::move(__ptr));
        |           ^
  ../torch/csrc/jit/ir/alias_analysis.cpp:200:10: note: in instantiation of member function 'std::unique_ptr<torch::jit::AliasDb::WriteRegistry>::~unique_ptr' requested here
    200 | AliasDb::~AliasDb() = default;
        |          ^
  ../torch/csrc/jit/ir/alias_analysis.cpp:200:23: note: in defaulted destructor for 'torch::jit::AliasDb' first required here
    200 | AliasDb::~AliasDb() = default;
        |                       ^
  ../torch/csrc/jit/ir/alias_analysis.h:298:10: note: forward declaration of 'torch::jit::AliasDb::WriteRegistry'
    298 |   struct WriteRegistry;
        |          ^
  1 error generated.
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/148758
Approved by: https://github.com/Skylion007
2025-03-08 04:56:42 +00:00

117 lines
3.0 KiB
C++

#include <torch/csrc/distributed/rpc/message.h>
#include <torch/custom_class.h>
namespace torch::distributed::rpc {
Message::Message() = default;
Message::Message(
std::vector<char>&& payload,
std::vector<torch::Tensor>&& tensors,
MessageType type)
: payload_(std::move(payload)), tensors_(std::move(tensors)), type_(type) {}
Message::Message(
std::vector<char>&& payload,
std::vector<torch::Tensor>&& tensors,
MessageType type,
int64_t id)
: payload_(std::move(payload)),
tensors_(std::move(tensors)),
type_(type),
id_(id) {}
std::vector<char>&& Message::movePayload() && {
return std::move(payload_);
}
std::vector<char>& Message::payload() {
return payload_;
}
const std::vector<char>& Message::payload() const {
return payload_;
}
std::vector<torch::Tensor>&& Message::moveTensors() && {
return std::move(tensors_);
}
std::vector<torch::Tensor>& Message::tensors() {
return tensors_;
}
const std::vector<torch::Tensor>& Message::tensors() const {
return tensors_;
}
MessageType Message::type() const {
return type_;
}
bool Message::isRequest() const {
return static_cast<int>(MessageTypeFlags::REQUEST_TYPE) &
static_cast<int>(type_);
}
bool Message::isResponse() const {
return static_cast<int>(MessageTypeFlags::RESPONSE_TYPE) &
static_cast<int>(type_);
}
int64_t Message::id() const {
return id_;
}
void Message::setId(int64_t id) {
id_ = id;
}
std::vector<c10::weak_intrusive_ptr<c10::StorageImpl>> Message::getStorages()
const {
// Sparse tensors do not have storage. Instead, a sparse tensor
// contains two tensors indices and values, and both contain storage.
std::vector<c10::weak_intrusive_ptr<c10::StorageImpl>> storages;
storages.reserve(2 * tensors_.size());
for (const auto& tensor : tensors_) {
if (tensor.is_sparse()) {
storages.emplace_back(tensor._indices().storage().getWeakStorageImpl());
storages.emplace_back(tensor._values().storage().getWeakStorageImpl());
} else {
storages.emplace_back(tensor.storage().getWeakStorageImpl());
}
}
return storages;
}
c10::intrusive_ptr<Message> createExceptionResponse(
const std::exception& e,
int64_t id) {
return createExceptionResponse(e.what(), id);
}
c10::intrusive_ptr<Message> createExceptionResponse(
const std::string& exceptionStr,
int64_t id) {
std::vector<char> payload(exceptionStr.begin(), exceptionStr.end());
return c10::make_intrusive<Message>(
std::move(payload),
std::vector<torch::Tensor>(),
MessageType::EXCEPTION,
id);
}
namespace {
// NB: need to call torch::class_ to register Message in the map returned by
// c10::getCustomClassTypeMap(). Otherwise, Message cannot be wrapped within
// an IValue.
// NB: add this line here instead of in rpc/init.cpp because 1) we have C++
// only tests that won't run rpc/init.cpp; 2) Message is not meant to be
// visible from Python.
static const auto message = torch::class_<Message>("rpc", "_Message");
} // namespace
} // namespace torch::distributed::rpc