PEP585 update - torch/distributed/elastic torch/distributed/checkpoint (#145163)

See #145101 for details.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/145163
Approved by: https://github.com/Skylion007
This commit is contained in:
Aaron Orenstein
2025-01-18 14:58:05 -08:00
committed by PyTorch MergeBot
parent c64e657632
commit 316808e4e9
47 changed files with 311 additions and 344 deletions

View File

@ -12,7 +12,7 @@ import os
import time
from concurrent.futures.thread import ThreadPoolExecutor
from threading import Event
from typing import Dict, List, Optional, TextIO, TYPE_CHECKING
from typing import Optional, TextIO, TYPE_CHECKING
if TYPE_CHECKING:
@ -89,9 +89,9 @@ class TailLog:
def __init__(
self,
name: str,
log_files: Dict[int, str],
log_files: dict[int, str],
dst: TextIO,
log_line_prefixes: Optional[Dict[int, str]] = None,
log_line_prefixes: Optional[dict[int, str]] = None,
interval_sec: float = 0.1,
):
n = len(log_files)
@ -106,10 +106,10 @@ class TailLog:
self._dst = dst
self._log_files = log_files
self._log_line_prefixes = log_line_prefixes
self._finished_events: Dict[int, Event] = {
self._finished_events: dict[int, Event] = {
local_rank: Event() for local_rank in log_files.keys()
}
self._futs: List[Future] = []
self._futs: list[Future] = []
self._interval_sec = interval_sec
self._stopped = False