Files
pytorch/test/cpp/jit/tests_setup.py
Xuehai Pan 758a0a88a2 [BE][Easy] enable ruff rule PIE790: unnecessary pass statement (#133200)
This PR removes unnecessary `pass` statement. This is semanticly safe because the bytecode for the Python code does not change.

Note that if there is a docstring in the function, a empty function does not need a `pass` statement as placeholder.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/133200
Approved by: https://github.com/malfet, https://github.com/eqy, https://github.com/kit1980
2024-08-15 15:50:19 +00:00

119 lines
2.5 KiB
Python

import os
import sys
import torch
class Setup:
def setup(self):
raise NotImplementedError
def shutdown(self):
raise NotImplementedError
class FileSetup:
path = None
def shutdown(self):
if os.path.exists(self.path):
os.remove(self.path)
class EvalModeForLoadedModule(FileSetup):
path = "dropout_model.pt"
def setup(self):
class Model(torch.jit.ScriptModule):
def __init__(self) -> None:
super().__init__()
self.dropout = torch.nn.Dropout(0.1)
@torch.jit.script_method
def forward(self, x):
x = self.dropout(x)
return x
model = Model()
model = model.train()
model.save(self.path)
class SerializationInterop(FileSetup):
path = "ivalue.pt"
def setup(self):
ones = torch.ones(2, 2)
twos = torch.ones(3, 5) * 2
value = (ones, twos)
torch.save(value, self.path, _use_new_zipfile_serialization=True)
# See testTorchSaveError in test/cpp/jit/tests.h for usage
class TorchSaveError(FileSetup):
path = "eager_value.pt"
def setup(self):
ones = torch.ones(2, 2)
twos = torch.ones(3, 5) * 2
value = (ones, twos)
torch.save(value, self.path, _use_new_zipfile_serialization=False)
class TorchSaveJitStream_CUDA(FileSetup):
path = "saved_stream_model.pt"
def setup(self):
if not torch.cuda.is_available():
return
class Model(torch.nn.Module):
def forward(self):
s = torch.cuda.Stream()
a = torch.rand(3, 4, device="cuda")
b = torch.rand(3, 4, device="cuda")
with torch.cuda.stream(s):
is_stream_s = (
torch.cuda.current_stream(s.device_index()).id() == s.id()
)
c = torch.cat((a, b), 0).to("cuda")
s.synchronize()
return is_stream_s, a, b, c
model = Model()
# Script the model and save
script_model = torch.jit.script(model)
torch.jit.save(script_model, self.path)
tests = [
EvalModeForLoadedModule(),
SerializationInterop(),
TorchSaveError(),
TorchSaveJitStream_CUDA(),
]
def setup():
for test in tests:
test.setup()
def shutdown():
for test in tests:
test.shutdown()
if __name__ == "__main__":
command = sys.argv[1]
if command == "setup":
setup()
elif command == "shutdown":
shutdown()