[CI] Fix tensorizer test assertion (#24545)

Signed-off-by: Peter Schuurman <psch@google.com>
This commit is contained in:
pwschuurman
2025-09-10 06:57:36 -07:00
committed by GitHub
parent 736569da8d
commit fcc0a3130a

View File

@ -161,11 +161,11 @@ def test_load_without_tensorizer_load_format(vllm_runner, capfd, model_ref):
model = vllm_runner(
model_ref,
model_loader_extra_config=TensorizerConfig(tensorizer_uri="test"))
pytest.fail("Expected RuntimeError for extra config keys")
except RuntimeError:
out, err = capfd.readouterr()
combined_output = out + err
assert ("ValueError: Model loader extra config "
"is not supported for load "
assert ("ValueError: Unexpected extra config keys for load "
"format auto") in combined_output
finally:
del model
@ -181,11 +181,12 @@ def test_raise_value_error_on_invalid_load_format(vllm_runner, capfd,
model_ref,
load_format="safetensors",
model_loader_extra_config=TensorizerConfig(tensorizer_uri="test"))
pytest.fail("Expected RuntimeError for extra config keys")
except RuntimeError:
out, err = capfd.readouterr()
combined_output = out + err
assert ("ValueError: Model loader extra config is not supported "
assert ("ValueError: Unexpected extra config keys "
"for load format safetensors") in combined_output
finally:
del model