Simplify BFLOAT16_AVAILABLE (#163445)

Simplify `BFLOAT16_AVAILABLE` by using `torch.cuda.is_bf16_supported()`  and `torch.xpu.is_bf16_supported()`. Outdated comments are also removed.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/163445
Approved by: https://github.com/Skylion007, https://github.com/kwen2501
This commit is contained in:
Yuanyuan Chen
2025-09-22 07:31:43 +00:00
committed by PyTorch MergeBot
parent edafc902d7
commit 96a3afb8ec
2 changed files with 1 additions and 6 deletions

View File

@ -34,11 +34,7 @@ device_type = (
acc.type if (acc := torch.accelerator.current_accelerator(True)) else "cpu"
)
# bfloat16 is only supported by CUDA 11+ or XPU
BFLOAT16_AVAILABLE = (
torch.cuda.is_available()
and (torch.version.cuda is not None or torch.version.hip is not None)
) or torch.xpu.is_available()
BFLOAT16_AVAILABLE = torch.cuda.is_bf16_supported() or torch.xpu.is_bf16_supported()
class Net(nn.Module):

View File

@ -83,7 +83,6 @@ if TEST_WITH_DEV_DBG_ASAN:
)
sys.exit(0)
# bfloat16 is only supported by CUDA 11+
BFLOAT16_AVAILABLE = torch.cuda.is_available() and (
torch.version.cuda is not None or torch.version.hip is not None
)