Revert D30279364: [codemod][lint][fbcode/c*] Enable BLACK by default

Test Plan: revert-hammer

Differential Revision:
D30279364 (b004307252)

Original commit changeset: c1ed77dfe43a

fbshipit-source-id: eab50857675c51e0088391af06ec0ecb14e2347e
This commit is contained in:
Shen Li
2021-08-12 11:39:31 -07:00
committed by Facebook GitHub Bot
parent ed0b8a3e83
commit 1022443168
188 changed files with 28697 additions and 56828 deletions

View File

@ -1,7 +1,6 @@
"""Script to generate baseline values from PyTorch initialization algorithms"""
import sys
import torch
HEADER = """
@ -20,13 +19,13 @@ INITIALIZERS = {
"Xavier_Uniform": lambda w: torch.nn.init.xavier_uniform(w),
"Xavier_Normal": lambda w: torch.nn.init.xavier_normal(w),
"Kaiming_Normal": lambda w: torch.nn.init.kaiming_normal(w),
"Kaiming_Uniform": lambda w: torch.nn.init.kaiming_uniform(w),
"Kaiming_Uniform": lambda w: torch.nn.init.kaiming_uniform(w)
}
def emit(initializer_parameter_map):
# Don't write generated with an @ in front, else this file is recognized as generated.
print("// @{} from {}".format("generated", __file__))
print("// @{} from {}".format('generated', __file__))
print(HEADER)
for initializer_name, weights in initializer_parameter_map.items():
print(PARAMETERS.format(initializer_name))
@ -64,11 +63,10 @@ def run(initializer):
def main():
initializer_parameter_map = {}
for initializer in INITIALIZERS.keys():
sys.stderr.write("Evaluating {} ...\n".format(initializer))
sys.stderr.write('Evaluating {} ...\n'.format(initializer))
initializer_parameter_map[initializer] = run(initializer)
emit(initializer_parameter_map)
if __name__ == "__main__":
main()