mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Document dispatch trace build flag (#145517)
Ok, the build flag seems to have been broken for a while since the function it calls doesn't exist anymore. Repurposed it to enable dispatcher printing (which requires a full (and slow) debug build otherwise). Pull Request resolved: https://github.com/pytorch/pytorch/pull/145517 Approved by: https://github.com/bdhirsh
This commit is contained in:
@ -763,7 +763,7 @@ C10_ALWAYS_INLINE_UNLESS_MOBILE Return Dispatcher::call(
|
||||
auto dispatchKeySet =
|
||||
op.operatorDef_->op.dispatchKeyExtractor()
|
||||
.template getDispatchKeySetUnboxed<Args...>(args...);
|
||||
#ifndef NDEBUG
|
||||
#if defined(HAS_TORCH_SHOW_DISPATCH_TRACE) || !defined(NDEBUG)
|
||||
DispatchTraceNestingGuard debug_guard;
|
||||
if (show_dispatch_trace()) {
|
||||
detail::_print_dispatch_trace(
|
||||
@ -816,7 +816,7 @@ inline Return Dispatcher::redispatch(
|
||||
DispatchKeySet currentDispatchKeySet,
|
||||
Args... args) const {
|
||||
// do not use RecordFunction on redispatch
|
||||
#ifndef NDEBUG
|
||||
#if defined(HAS_TORCH_SHOW_DISPATCH_TRACE) || !defined(NDEBUG)
|
||||
DispatchTraceNestingGuard debug_guard;
|
||||
if (show_dispatch_trace()) {
|
||||
detail::_print_dispatch_trace(
|
||||
@ -836,7 +836,7 @@ inline void Dispatcher::callBoxed(const OperatorHandle& op, Stack* stack)
|
||||
const auto& entry = op.operatorDef_->op;
|
||||
auto dispatchKeySet =
|
||||
entry.dispatchKeyExtractor().getDispatchKeySetBoxed(stack);
|
||||
#ifndef NDEBUG
|
||||
#if defined(HAS_TORCH_SHOW_DISPATCH_TRACE) || !defined(NDEBUG)
|
||||
DispatchTraceNestingGuard debug_guard;
|
||||
if (show_dispatch_trace()) {
|
||||
detail::_print_dispatch_trace(
|
||||
@ -904,7 +904,7 @@ inline void Dispatcher::redispatchBoxed(
|
||||
// note: this doesn't need the mutex because write operations on the list keep
|
||||
// iterators intact.
|
||||
const auto& entry = op.operatorDef_->op;
|
||||
#ifndef NDEBUG
|
||||
#if defined(HAS_TORCH_SHOW_DISPATCH_TRACE) || !defined(NDEBUG)
|
||||
DispatchTraceNestingGuard debug_guard;
|
||||
if (show_dispatch_trace()) {
|
||||
detail::_print_dispatch_trace(
|
||||
|
@ -114,9 +114,6 @@ static void autogradBasedTransformSendToNext(
|
||||
if (!tensor.defined()) {
|
||||
return tensor;
|
||||
}
|
||||
// if (c10::show_dispatch_trace_enabled()) {
|
||||
// std::cout << "wrap " << current_level << std::endl;
|
||||
// }
|
||||
return makeTensorWrapper(tensor, interpreter, is_immutable);
|
||||
};
|
||||
|
||||
|
@ -223,11 +223,6 @@ DynamicLayer popDynamicLayer() {
|
||||
dynamicLayerStack.pop_back();
|
||||
|
||||
if (dynamicLayerStack.empty()) {
|
||||
#ifdef HAS_TORCH_SHOW_DISPATCH_TRACE
|
||||
if (c10::show_dispatch_trace_enabled()) {
|
||||
std::cout << "DynamicLayer off" << std::endl;
|
||||
}
|
||||
#endif
|
||||
setDynamicLayerFrontBackKeysIncluded(false);
|
||||
}
|
||||
|
||||
@ -242,11 +237,6 @@ int64_t pushDynamicLayer(DynamicLayer&& dynamic_layer) {
|
||||
|
||||
if (layerId == 1) {
|
||||
setDynamicLayerFrontBackKeysIncluded(true);
|
||||
#ifdef HAS_TORCH_SHOW_DISPATCH_TRACE
|
||||
if (c10::show_dispatch_trace_enabled()) {
|
||||
std::cout << "DynamicLayer on" << std::endl;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
return layerId;
|
||||
@ -398,14 +388,6 @@ std::optional<size_t> findAliasedOutput(const FunctionSchema& schema, const int6
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
#ifdef HAS_TORCH_SHOW_DISPATCH_TRACE
|
||||
static void dump_local_tls() {
|
||||
auto tls = c10::impl::tls_local_dispatch_key_set();
|
||||
std::cout << "[Local Include] " << tls.included_ << std::endl;
|
||||
std::cout << "[Local Exclude] " << tls.excluded_ << std::endl;
|
||||
}
|
||||
#endif
|
||||
|
||||
struct WithoutTop {
|
||||
WithoutTop();
|
||||
WithoutTop(WithoutTop&& other) = delete;
|
||||
@ -451,12 +433,6 @@ static void dynamicLayerFrontFallback(
|
||||
torch::jit::Stack* stack) {
|
||||
auto& dynamicLayerStack = dynamicLayerStackAccessor();
|
||||
TORCH_INTERNAL_ASSERT(!dynamicLayerStack.empty());
|
||||
#ifdef HAS_TORCH_SHOW_DISPATCH_TRACE
|
||||
if (c10::show_dispatch_trace_enabled()) {
|
||||
std::cout << dynamicLayerStack << std::endl;
|
||||
dump_local_tls();
|
||||
}
|
||||
#endif
|
||||
// Save the current LocalDispatchKeySet (to the current DynamicLayer).
|
||||
// Upon exiting the current scope, that LocalDispatchKeySet gets restored.
|
||||
// When the current DynamicLayer dispatches to the next (inner) DynamicLayer,
|
||||
|
5
setup.py
5
setup.py
@ -22,6 +22,11 @@
|
||||
# also applies to C++ files (unless CXXFLAGS is set), in contrast to the
|
||||
# default behavior of autogoo and cmake build systems.)
|
||||
#
|
||||
# A specific flag that can be used is
|
||||
# -DHAS_TORCH_SHOW_DISPATCH_TRACE
|
||||
# build with dispatch trace that can be enabled with
|
||||
# TORCH_SHOW_DISPATCH_TRACE=1 at runtime.
|
||||
#
|
||||
# CC
|
||||
# the C/C++ compiler to use
|
||||
#
|
||||
|
Reference in New Issue
Block a user