mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[Inductor XPU] Add XPU check for is_big_gpu(). (#143491)
Fix #143472 Pull Request resolved: https://github.com/pytorch/pytorch/pull/143491 Approved by: https://github.com/desertfire, https://github.com/jansel, https://github.com/EikanWang
This commit is contained in:
committed by
PyTorch MergeBot
parent
0da004f3dd
commit
af0e159740
@ -1129,7 +1129,7 @@ def is_big_gpu(index_or_device: Union[int, torch.device] = 0) -> bool:
|
||||
if isinstance(index_or_device, torch.device):
|
||||
device = index_or_device
|
||||
else:
|
||||
device = torch.device("cuda", index_or_device)
|
||||
device = torch.device(get_gpu_type(), index_or_device)
|
||||
|
||||
prop = DeviceProperties.create(device)
|
||||
|
||||
@ -1142,7 +1142,7 @@ def is_big_gpu(index_or_device: Union[int, torch.device] = 0) -> bool:
|
||||
return False
|
||||
return True
|
||||
|
||||
min_sms = 68 # 3080
|
||||
min_sms = 16 if device.type == "xpu" else 68 # 3080
|
||||
avail_sms = prop.multi_processor_count
|
||||
if avail_sms < min_sms:
|
||||
log.warning(
|
||||
|
||||
Reference in New Issue
Block a user