Add mish activation function (#58648)

Summary:
See issus: https://github.com/pytorch/pytorch/issues/58375

Pull Request resolved: https://github.com/pytorch/pytorch/pull/58648

Reviewed By: gchanan

Differential Revision: D28625390

Pulled By: jbschlosser

fbshipit-source-id: 23ea2eb7d5b3dc89c6809ff6581b90ee742149f4
This commit is contained in:
Adnios
2021-05-25 10:34:50 -07:00
committed by Facebook GitHub Bot
parent bf269fdc98
commit 09a8f22bf9
37 changed files with 434 additions and 2 deletions

View File

@ -1760,6 +1760,15 @@ TEST_F(FunctionalTest, Softsign) {
ASSERT_TRUE(torch::allclose(y, y_exp));
}
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)
TEST_F(FunctionalTest, Mish) {
auto x = torch::randn(100) * 10;
auto y_exp = x * x.exp().log1p().tanh();
auto y = F::mish(x);
ASSERT_TRUE(torch::allclose(y, y_exp));
}
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)
TEST_F(FunctionalTest, Tanhshrink) {
auto x = torch::randn(100) * 10;