mirror of
https://github.com/huggingface/transformers.git
synced 2025-10-20 17:13:56 +08:00
Remove require_torch_bf16_gpu (#40979)
* More cleanup Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> * Remove more functions Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> * More fixes Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> --------- Signed-off-by: Yuanyuan Chen <cyyever@outlook.com> Co-authored-by: Yih-Dar <2521628+ydshieh@users.noreply.github.com>
This commit is contained in:
@ -142,7 +142,6 @@ from .utils import (
|
||||
is_tokenizers_available,
|
||||
is_torch_available,
|
||||
is_torch_bf16_available_on_device,
|
||||
is_torch_bf16_gpu_available,
|
||||
is_torch_fp16_available_on_device,
|
||||
is_torch_greater_or_equal,
|
||||
is_torch_hpu_available,
|
||||
@ -1098,14 +1097,6 @@ def require_torch_bf16(test_case):
|
||||
)(test_case)
|
||||
|
||||
|
||||
def require_torch_bf16_gpu(test_case):
|
||||
"""Decorator marking a test that requires torch>=1.10, using Ampere GPU or newer arch with cuda>=11.0"""
|
||||
return unittest.skipUnless(
|
||||
is_torch_bf16_gpu_available(),
|
||||
"test requires torch>=1.10, using Ampere GPU or newer arch with cuda>=11.0",
|
||||
)(test_case)
|
||||
|
||||
|
||||
def require_deterministic_for_xpu(test_case):
|
||||
@wraps(test_case)
|
||||
def wrapper(*args, **kwargs):
|
||||
|
Reference in New Issue
Block a user