mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
Enable alternative LayerNorm impl in FisherGan (#12178)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/12178 Fisher GAN calls processor_util.add_mlp, which inject the layer norm through the normalizer. We allow to use alternative impl for LayerNorn in the normalizer. Reviewed By: Wakeupbuddy Differential Revision: D9235528 fbshipit-source-id: 88c126c658102926613242ef84a481f6de1676ed
This commit is contained in:
committed by
Facebook Github Bot
parent
8ac8b823c2
commit
93a4b76114
@ -31,11 +31,12 @@ class BatchNormalizer(Normalizer):
|
||||
|
||||
|
||||
class LayerNormalizer(Normalizer):
|
||||
def __init__(self, epsilon):
|
||||
def __init__(self, epsilon, use_layer_norm_op=True):
|
||||
super(LayerNormalizer, self).__init__()
|
||||
self._epsilon = float(epsilon)
|
||||
self._use_layer_norm_op = use_layer_norm_op
|
||||
|
||||
def _run(self, layer_model, param):
|
||||
return layer_model.LayerNormalization(
|
||||
param, epsilon=self._epsilon
|
||||
param, epsilon=self._epsilon, use_layer_norm_op=self._use_layer_norm_op
|
||||
)
|
||||
|
Reference in New Issue
Block a user