[CI] Removed tests for torch.utils.tensorboard.summary.hparams (#122556)

Partially addresses #122160

In the module `torch.utils.tensorboard.summary`, the `hparams` method does not depend on any utilities from pytorch as it uses only the utilities from `tensorboard`. Thus, I think it will be safe to delete the test for `hparams` method as it does not depend on pytorch.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/122556
Approved by: https://github.com/huydhn
This commit is contained in:
Arun Pa
2024-03-29 21:43:58 +00:00
committed by PyTorch MergeBot
parent 482d8bf1ea
commit 05e54536fb

View File

@ -448,60 +448,6 @@ class TestTensorBoardSummary(BaseTestCase):
}
summary.custom_scalars(layout) # only smoke test. Because protobuf in python2/3 serialize dictionary differently.
def test_hparams_smoke(self):
hp = {'lr': 0.1, 'bsize': 4}
mt = {'accuracy': 0.1, 'loss': 10}
summary.hparams(hp, mt) # only smoke test. Because protobuf in python2/3 serialize dictionary differently.
hp = {'use_magic': True, 'init_string': "42"}
mt = {'accuracy': 0.1, 'loss': 10}
summary.hparams(hp, mt)
mt = {'accuracy': torch.zeros(1), 'loss': torch.zeros(1)}
summary.hparams(hp, mt)
def test_hparams_wrong_parameter(self):
with self.assertRaises(TypeError):
summary.hparams([], {})
with self.assertRaises(TypeError):
summary.hparams({}, [])
with self.assertRaises(ValueError):
res = summary.hparams({'pytorch': [1, 2]}, {'accuracy': 2.0})
# metric data is used in writer.py so the code path is different, which leads to different exception type.
with self.assertRaises(NotImplementedError):
with self.createSummaryWriter() as writer:
writer.add_hparams({'pytorch': 1.0}, {'accuracy': [1, 2]})
def test_hparams_number(self):
hp = {'lr': 0.1}
mt = {'accuracy': 0.1}
self.assertTrue(compare_proto(summary.hparams(hp, mt), self))
def test_hparams_bool(self):
hp = {'bool_var': True}
mt = {'accuracy': 0.1}
self.assertTrue(compare_proto(summary.hparams(hp, mt), self))
def test_hparams_string(self):
hp = {'string_var': "hi"}
mt = {'accuracy': 0.1}
self.assertTrue(compare_proto(summary.hparams(hp, mt), self))
def test_hparams_domain_discrete(self):
hp = {"lr": 0.1, "bool_var": True, "string_var": "hi"}
mt = {"accuracy": 0.1}
hp_domain = {"lr": [0.1], "bool_var": [True], "string_var": ["hi"]}
# hparam_domain_discrete keys needs to be subset of hparam_dict keys
with self.assertRaises(TypeError):
summary.hparams(hp, mt, hparam_domain_discrete={"wrong_key": []})
# hparam_domain_discrete values needs to be same type as hparam_dict values
with self.assertRaises(TypeError):
summary.hparams(hp, mt, hparam_domain_discrete={"lr": [True]})
# only smoke test. Because protobuf map serialization is nondeterministic.
summary.hparams(hp, mt, hparam_domain_discrete=hp_domain)
@unittest.skipIf(IS_MACOS, "Skipping on mac, see https://github.com/pytorch/pytorch/pull/109349 ")
def test_mesh(self):