Files
pytorch/caffe2/python/normalizer.py
Bugra Akyildiz 27c7158166 Remove __future__ imports for legacy Python2 supports (#45033)
Summary:
There is a module called `2to3` which you can target for future specifically to remove these, the directory of `caffe2` has the most redundant imports:

```2to3 -f future -w caffe2```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/45033

Reviewed By: seemethere

Differential Revision: D23808648

Pulled By: bugra

fbshipit-source-id: 38971900f0fe43ab44a9168e57f2307580d36a38
2020-09-23 17:57:02 -07:00

45 lines
1.4 KiB
Python

# @package optimizer
# Module caffe2.python.normalizer
class Normalizer(object):
def __init__(self):
pass
"""
Adds normalization to train_net for given parameter. Its factor ahead of
regularization is given when initialization.
The param should be a BlobReference.
"""
def __call__(self, net, param):
return self._run(net, param)
def _run(self, net, param):
raise Exception("Not Impelemented")
class BatchNormalizer(Normalizer):
def __init__(self, momentum, scale_init_value=1.0):
super(BatchNormalizer, self).__init__()
self._momentum = float(momentum)
self._scale_init_value = float(scale_init_value)
def _run(self, layer_model, param):
return layer_model.BatchNormalization(
param, momentum=self._momentum, scale_init_value=self._scale_init_value
)
class LayerNormalizer(Normalizer):
def __init__(self, epsilon, use_layer_norm_op=True, scale_init_value=1.0):
super(LayerNormalizer, self).__init__()
self._epsilon = float(epsilon)
self._use_layer_norm_op = use_layer_norm_op
self._scale_init_value = float(scale_init_value)
def _run(self, layer_model, param):
return layer_model.LayerNormalization(
param, epsilon=self._epsilon, use_layer_norm_op=self._use_layer_norm_op, scale_init_value=self._scale_init_value
)