mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Revert "Update round size with 1 division behavior (#162203)"
This reverts commit 12d2ef557f6e127100267c31a31572d8ab5cc788. Reverted https://github.com/pytorch/pytorch/pull/162203 on behalf of https://github.com/izaitsevfb due to Diff reverted internally ([comment](https://github.com/pytorch/pytorch/pull/162203#issuecomment-3398622898))
This commit is contained in:
@ -2502,8 +2502,6 @@ class DeviceCachingAllocator {
|
||||
auto divisions = CUDAAllocatorConfig::roundup_power2_divisions(size);
|
||||
if (divisions > 1 && size > (kMinBlockSize * divisions)) {
|
||||
return roundup_power2_next_division(size, divisions);
|
||||
} else if (divisions == 1) {
|
||||
return llvm::PowerOf2Ceil(size);
|
||||
} else {
|
||||
return kMinBlockSize * ((size + kMinBlockSize - 1) / kMinBlockSize);
|
||||
}
|
||||
|
Reference in New Issue
Block a user