mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
[BE] fix ruff rule E226: add missing whitespace around operator in f-strings (#144415)
The fixes are generated by: ```bash ruff check --fix --preview --unsafe-fixes --select=E226 . lintrunner -a --take "RUFF,PYFMT" --all-files ``` Pull Request resolved: https://github.com/pytorch/pytorch/pull/144415 Approved by: https://github.com/huydhn, https://github.com/Skylion007
This commit is contained in:
committed by
PyTorch MergeBot
parent
a742859fc2
commit
dcc3cf7066
@ -203,7 +203,9 @@ class NNModuleToString:
|
||||
)
|
||||
if buffer.is_cuda:
|
||||
tensor_str = f"{tensor_str}.cuda()"
|
||||
model_str += f"{tab*2}self.register_buffer('{buffer_name}', {tensor_str})\n"
|
||||
model_str += (
|
||||
f"{tab * 2}self.register_buffer('{buffer_name}', {tensor_str})\n"
|
||||
)
|
||||
|
||||
for param_name, param in gm._parameters.items():
|
||||
if param is None:
|
||||
|
@ -2288,9 +2288,7 @@ def check_free_memory(free_bytes):
|
||||
)
|
||||
mem_free = -1
|
||||
else:
|
||||
msg = (
|
||||
f"{free_bytes/1e9} GB memory required, but {mem_free/1e9} GB available"
|
||||
)
|
||||
msg = f"{free_bytes / 1e9} GB memory required, but {mem_free / 1e9} GB available"
|
||||
|
||||
return msg if mem_free < free_bytes else None
|
||||
|
||||
|
@ -243,13 +243,16 @@ def train_convnext_example():
|
||||
max_reserved = torch.cuda.max_memory_reserved()
|
||||
max_allocated = torch.cuda.max_memory_allocated()
|
||||
print(
|
||||
f"rank {rank}, {ITER_TIME} iterations, average latency {(end - start)/ITER_TIME*1000:10.2f} ms"
|
||||
f"rank {rank}, {ITER_TIME} iterations, "
|
||||
f"average latency {(end - start) / ITER_TIME * 1000:10.2f} ms"
|
||||
)
|
||||
print(
|
||||
f"rank {rank}, forward {forward_time/ITER_TIME*1000:10.2f} ms, backward {backward_time/ITER_TIME*1000:10.2f} ms"
|
||||
f"rank {rank}, forward {forward_time / ITER_TIME * 1000:10.2f} ms, "
|
||||
f"backward {backward_time / ITER_TIME * 1000:10.2f} ms"
|
||||
)
|
||||
print(
|
||||
f"rank {rank}, max reserved {max_reserved/1024/1024/1024:8.2f} GiB, max allocated {max_allocated/1024/1024/1024:8.2f} GiB"
|
||||
f"rank {rank}, max reserved {max_reserved / 1024 / 1024 / 1024:8.2f} GiB, "
|
||||
f"max allocated {max_allocated / 1024 / 1024 / 1024:8.2f} GiB"
|
||||
)
|
||||
dist.destroy_process_group()
|
||||
|
||||
|
Reference in New Issue
Block a user