Add aten::softmax NNAPI converter (#58539)

Summary:
Add support for aten::softmax op in the NNAPI model converter with
flexible size

Pull Request resolved: https://github.com/pytorch/pytorch/pull/58539

Test Plan: pytest test/test_nnapi.py::TestNNAPI::test_softmax

Reviewed By: anshuljain1

Differential Revision: D28531946

fbshipit-source-id: 8633f3e3f7f52795f9866ff16ad0867ea36a19e8
This commit is contained in:
Akshit Khurana
2021-07-07 12:37:51 -07:00
committed by Facebook GitHub Bot
parent 45ce26c397
commit 14d604a13e
2 changed files with 34 additions and 0 deletions

View File

@ -227,6 +227,17 @@ class TestNNAPI(TestCase):
with self.assertRaisesRegex(Exception, "hardtanh with args"):
self.check(torch.nn.Hardtanh(0.0, 5.0), inp)
def test_softmax(self):
inp = torch.tensor([[-2.0, -0.5], [0.5, 2.0]])
self.check(torch.nn.Softmax(), inp)
self.check(torch.nn.Softmax(dim=0), inp)
# Test flexible size
self.check(
torch.nn.Softmax(),
inp,
convert_args=[torch.zeros(0, 0)],
)
def test_mean(self):
class MeanModule(torch.nn.Module):
def __init__(self, dim, keep=False):