Files
pytorch/caffe2/python/normalizer.py
Xuehai Pan 8d45f555d7 [BE] [1/3] Rewrite super() calls in caffe2 and benchmarks (#94587)
Rewrite Python built-in class `super()` calls. Only non-semantic changes should be applied.

- #94587
- #94588
- #94592

Also, methods with only a `super()` call are removed:

```diff
class MyModule(nn.Module):
-   def __init__(self):
-       super().__init__()
-
    def forward(self, ...):
        ...
```

Some cases that change the semantics should be kept unchanged. E.g.:

f152a79be9/caffe2/python/net_printer.py (L184-L190)

f152a79be9/test/test_jit_fuser_te.py (L2628-L2635)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/94587
Approved by: https://github.com/ezyang
2023-02-11 18:19:48 +00:00

45 lines
1.3 KiB
Python

# @package optimizer
# Module caffe2.python.normalizer
class Normalizer:
def __init__(self):
pass
"""
Adds normalization to train_net for given parameter. Its factor ahead of
regularization is given when initialization.
The param should be a BlobReference.
"""
def __call__(self, net, param):
return self._run(net, param)
def _run(self, net, param):
raise Exception("Not Impelemented")
class BatchNormalizer(Normalizer):
def __init__(self, momentum, scale_init_value=1.0):
super().__init__()
self._momentum = float(momentum)
self._scale_init_value = float(scale_init_value)
def _run(self, layer_model, param):
return layer_model.BatchNormalization(
param, momentum=self._momentum, scale_init_value=self._scale_init_value
)
class LayerNormalizer(Normalizer):
def __init__(self, epsilon, use_layer_norm_op=True, scale_init_value=1.0):
super().__init__()
self._epsilon = float(epsilon)
self._use_layer_norm_op = use_layer_norm_op
self._scale_init_value = float(scale_init_value)
def _run(self, layer_model, param):
return layer_model.LayerNormalization(
param, epsilon=self._epsilon, use_layer_norm_op=self._use_layer_norm_op, scale_init_value=self._scale_init_value
)