mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Enable Gelu fp32/bf16 in CPU path using Mkldnn implementation (#58525)
Summary: Enable Gelu bf16/fp32 in CPU path using Mkldnn implementation. User doesn't need to_mkldnn() explicitly. New Gelu fp32 performs better than original one. Add Gelu backward for https://github.com/pytorch/pytorch/pull/53615. Pull Request resolved: https://github.com/pytorch/pytorch/pull/58525 Reviewed By: ejguan Differential Revision: D29940369 Pulled By: ezyang fbshipit-source-id: df9598262ec50e5d7f6e96490562aa1b116948bf
This commit is contained in:
committed by
Facebook GitHub Bot
parent
fd8004b42e
commit
c7a7c2b62f
@ -2819,7 +2819,7 @@ TEST_F(ModulesTest, GELU) {
|
||||
const auto x = torch::linspace(-3.0, 3.0, 100);
|
||||
const auto y_exp = x * 0.5 * (1.0 + torch::erf(x / std::sqrt(2.0)));
|
||||
const auto y = model(x);
|
||||
ASSERT_TRUE(torch::allclose(y, y_exp));
|
||||
ASSERT_TRUE(torch::allclose(y, y_exp, 1.4e-06, 1e-05));
|
||||
}
|
||||
|
||||
TEST_F(ModulesTest, Mish) {
|
||||
|
Reference in New Issue
Block a user