mirror of
https://github.com/vllm-project/vllm.git
synced 2025-10-20 14:53:52 +08:00
[Bugfix] Remove faulty test for oot attention backend (#22286)
Signed-off-by: mgoin <mgoin64@gmail.com>
This commit is contained in:
@ -4,9 +4,7 @@
|
||||
import pytest
|
||||
import torch
|
||||
|
||||
from vllm.attention.selector import get_attn_backend
|
||||
from vllm.plugins import load_general_plugins
|
||||
from vllm.utils import STR_BACKEND_ENV_VAR, STR_INVALID_VAL
|
||||
|
||||
|
||||
def test_platform_plugins():
|
||||
@ -27,14 +25,6 @@ def test_platform_plugins():
|
||||
f" is loaded. The first import:\n{_init_trace}")
|
||||
|
||||
|
||||
def test_oot_attention_backend(monkeypatch: pytest.MonkeyPatch):
|
||||
# ignore the backend env variable if it is set
|
||||
with monkeypatch.context() as m:
|
||||
m.setenv(STR_BACKEND_ENV_VAR, STR_INVALID_VAL)
|
||||
backend = get_attn_backend(16, torch.float16, "auto", 16, False)
|
||||
assert backend.get_name() == "Dummy_Backend"
|
||||
|
||||
|
||||
def test_oot_custom_op(monkeypatch: pytest.MonkeyPatch):
|
||||
# simulate workload by running an example
|
||||
load_general_plugins()
|
||||
|
Reference in New Issue
Block a user