mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-22 22:25:10 +08:00
Add API usage logging for several other RPC APIs. (#67722)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/67722 ghstack-source-id: 142259452 Test Plan: waitforbuildbot Reviewed By: jaceyca, fduwjj Differential Revision: D32118872 fbshipit-source-id: 041ab5601221b1846c56ce4bb63364bec9ad28b0
This commit is contained in:
committed by
Facebook GitHub Bot
parent
5fd93fb5f8
commit
05e17e7ff6
@ -12,6 +12,7 @@ void backward(
|
||||
int64_t context_id,
|
||||
const variable_list& roots,
|
||||
bool retain_graph) {
|
||||
C10_LOG_API_USAGE_ONCE("torch.distributed.autograd.backward");
|
||||
RECORD_FUNCTION(
|
||||
kDistAutogradBackwardProfilingKey, std::vector<c10::IValue>());
|
||||
try {
|
||||
|
@ -121,6 +121,7 @@ TypePtr tryInferTypeWithTypeHint(
|
||||
PyRRef::PyRRef(c10::intrusive_ptr<RRef> rref)
|
||||
: rref_(std::move(rref)), profilingFuture_(c10::nullopt) {
|
||||
TORCH_CHECK(rref_, "PyRRef must not wrap nullptr");
|
||||
C10_LOG_API_USAGE_ONCE("torch.distributed.rref");
|
||||
}
|
||||
|
||||
PyRRef::PyRRef(const py::object& value, const py::object& type_hint)
|
||||
@ -181,6 +182,7 @@ std::string PyRRef::ownerName() const {
|
||||
}
|
||||
|
||||
py::object PyRRef::toHere(const float timeoutSeconds) const {
|
||||
C10_LOG_API_USAGE_ONCE("torch.distributed.rref.to_here");
|
||||
if (rref_->isOwner()) {
|
||||
return localValue();
|
||||
} else {
|
||||
|
@ -208,6 +208,7 @@ class _RemoteModule(nn.Module):
|
||||
>>> rpc.shutdown()
|
||||
"""
|
||||
super().__init__()
|
||||
torch._C._log_api_usage_once("torch.distributed.nn.api.remote_module")
|
||||
|
||||
enable_moving_cpu_tensors_to_cuda = self._prepare_init(remote_device)
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
|
||||
import torch
|
||||
import torch.distributed.rpc as rpc
|
||||
import torch.jit as jit
|
||||
import torch.nn as nn
|
||||
@ -186,6 +187,7 @@ class DistributedOptimizer:
|
||||
"""
|
||||
|
||||
def __init__(self, optimizer_class, params_rref, *args, **kwargs):
|
||||
torch._C._log_api_usage_once("torch.distributed.optim.DistributedOptimizer")
|
||||
per_worker_params_rref = defaultdict(list)
|
||||
for param in params_rref:
|
||||
per_worker_params_rref[param.owner()].append(param)
|
||||
|
@ -110,7 +110,7 @@ if is_available():
|
||||
:ref:`rpc-backends` for more information and find which options
|
||||
are available.
|
||||
"""
|
||||
|
||||
torch._C._log_api_usage_once("torch.distributed.init_rpc")
|
||||
if backend is not None and not isinstance(
|
||||
backend, backend_registry.BackendType
|
||||
):
|
||||
|
Reference in New Issue
Block a user