Revert "Add print statements to debug sharding error (#102713)"

This reverts commit c7873522c2ceefbc3b747224da1d26d566115c9a.

Reverted https://github.com/pytorch/pytorch/pull/102713 on behalf of https://github.com/clee2000 due to issue should be resolved now ([comment](https://github.com/pytorch/pytorch/pull/102713#issuecomment-1583334560))
This commit is contained in:
PyTorch MergeBot
2023-06-08 21:02:17 +00:00
parent cea899cd57
commit b52ee80cdc
2 changed files with 1 additions and 17 deletions

View File

@ -97,16 +97,9 @@ def calculate_shards(
tests: List[str],
test_file_times: Dict[str, float],
must_serial: Optional[Callable[[str], bool]] = None,
debug: bool = False,
) -> List[Tuple[float, List[ShardedTest]]]:
must_serial = must_serial or (lambda x: True)
if debug:
print(test_file_times)
print(tests)
print(num_shards)
print([x for x in tests if must_serial(x)])
known_tests = [x for x in tests if x in test_file_times]
unknown_tests: List[str] = [x for x in tests if x not in known_tests]
@ -130,11 +123,6 @@ def calculate_shards(
for unknown_test in unknown_tests:
sharded_jobs[index].serial.append(ShardedTest(unknown_test, 1, 1, None))
index = (index + 1) % num_shards
if debug:
for j in sharded_jobs:
print(j.convert_to_tuple()[1])
return [job.convert_to_tuple() for job in sharded_jobs]