mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Add aten::softmax NNAPI converter (#58539)
Summary: Add support for aten::softmax op in the NNAPI model converter with flexible size Pull Request resolved: https://github.com/pytorch/pytorch/pull/58539 Test Plan: pytest test/test_nnapi.py::TestNNAPI::test_softmax Reviewed By: anshuljain1 Differential Revision: D28531946 fbshipit-source-id: 8633f3e3f7f52795f9866ff16ad0867ea36a19e8
This commit is contained in:
committed by
Facebook GitHub Bot
parent
45ce26c397
commit
14d604a13e
@ -227,6 +227,17 @@ class TestNNAPI(TestCase):
|
||||
with self.assertRaisesRegex(Exception, "hardtanh with args"):
|
||||
self.check(torch.nn.Hardtanh(0.0, 5.0), inp)
|
||||
|
||||
def test_softmax(self):
|
||||
inp = torch.tensor([[-2.0, -0.5], [0.5, 2.0]])
|
||||
self.check(torch.nn.Softmax(), inp)
|
||||
self.check(torch.nn.Softmax(dim=0), inp)
|
||||
# Test flexible size
|
||||
self.check(
|
||||
torch.nn.Softmax(),
|
||||
inp,
|
||||
convert_args=[torch.zeros(0, 0)],
|
||||
)
|
||||
|
||||
def test_mean(self):
|
||||
class MeanModule(torch.nn.Module):
|
||||
def __init__(self, dim, keep=False):
|
||||
|
Reference in New Issue
Block a user