Remove require_torch_bf16_gpu (#40979)

* More cleanup

Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>

* Remove more functions

Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>

* More fixes

Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>

---------

Signed-off-by: Yuanyuan Chen <cyyever@outlook.com>
Co-authored-by: Yih-Dar <2521628+ydshieh@users.noreply.github.com>
This commit is contained in:
Yuanyuan Chen
2025-10-17 18:35:19 +08:00
committed by GitHub
parent 252d7cd952
commit 151d6adc86

View File

@ -142,7 +142,6 @@ from .utils import (
is_tokenizers_available, is_tokenizers_available,
is_torch_available, is_torch_available,
is_torch_bf16_available_on_device, is_torch_bf16_available_on_device,
is_torch_bf16_gpu_available,
is_torch_fp16_available_on_device, is_torch_fp16_available_on_device,
is_torch_greater_or_equal, is_torch_greater_or_equal,
is_torch_hpu_available, is_torch_hpu_available,
@ -1098,14 +1097,6 @@ def require_torch_bf16(test_case):
)(test_case) )(test_case)
def require_torch_bf16_gpu(test_case):
"""Decorator marking a test that requires torch>=1.10, using Ampere GPU or newer arch with cuda>=11.0"""
return unittest.skipUnless(
is_torch_bf16_gpu_available(),
"test requires torch>=1.10, using Ampere GPU or newer arch with cuda>=11.0",
)(test_case)
def require_deterministic_for_xpu(test_case): def require_deterministic_for_xpu(test_case):
@wraps(test_case) @wraps(test_case)
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):