mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
This does a few things that were originally a few PRs but I am on a new machine and don't have ghstack. If it is too problematic to review, I can re-split, just let me know. This does: - Cleanup context manager use in test_flop_counter - Remove need for mod argument in FlopCounterMode, warning about it - Re-implement a Module tracker from scratch using global forward Module use and multi_grad_hook (we cannot use global backward Module hook because they don't look for nested Tensor and they're custom Function based instead of multi_grad_hook). - Update FlopCouterMode to use the new ModuleTracker. All the existing test suite passes as-is (only changes there are new tests and refactoring mentioned above) Pull Request resolved: https://github.com/pytorch/pytorch/pull/125352 Approved by: https://github.com/mikaylagawarecki
66 lines
1.8 KiB
Python
66 lines
1.8 KiB
Python
# Owner(s): ["module: unknown"]
|
|
|
|
from copy import copy
|
|
|
|
import torch
|
|
from torch.testing._internal.common_utils import run_tests, TestCase
|
|
from torch.utils.module_tracker import ModuleTracker
|
|
|
|
|
|
class TestModuleTracker(TestCase):
|
|
def test_module_hierarchy(self):
|
|
seen_fw = []
|
|
seen_bw = []
|
|
|
|
class Foo(torch.nn.Module):
|
|
def forward(self, x):
|
|
x = x["a"].relu_()
|
|
seen_fw.append((copy(tracker.parents), tracker.is_bw))
|
|
x.register_hook(
|
|
lambda grad: seen_bw.append((copy(tracker.parents), tracker.is_bw))
|
|
)
|
|
return {"a": torch.mm(x, x)}
|
|
|
|
class Mod(torch.nn.Module):
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.a = Foo()
|
|
self.b = Foo()
|
|
|
|
def forward(self, x):
|
|
return self.b(self.a(x))
|
|
|
|
mod = Mod()
|
|
|
|
with ModuleTracker() as tracker:
|
|
mod({"a": torch.randn(10, 10, requires_grad=True).clone()})[
|
|
"a"
|
|
].sum().backward()
|
|
mod({"a": torch.randn(10, 10, requires_grad=True).clone()})[
|
|
"a"
|
|
].sum().backward()
|
|
|
|
self.assertEqual(
|
|
seen_fw,
|
|
[
|
|
({"Global", "Mod", "Mod.a"}, False),
|
|
({"Global", "Mod", "Mod.b"}, False),
|
|
({"Global", "Mod", "Mod.a"}, False),
|
|
({"Global", "Mod", "Mod.b"}, False),
|
|
],
|
|
)
|
|
|
|
self.assertEqual(
|
|
seen_bw,
|
|
[
|
|
({"Global", "Mod", "Mod.b"}, True),
|
|
({"Global", "Mod", "Mod.a"}, True),
|
|
({"Global", "Mod", "Mod.b"}, True),
|
|
({"Global", "Mod", "Mod.a"}, True),
|
|
],
|
|
)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
run_tests()
|