mirror of
https://github.com/vllm-project/vllm.git
synced 2025-10-20 14:53:52 +08:00
[ci][test] fix RemoteOpenAIServer (#7838)
This commit is contained in:
@ -68,7 +68,7 @@ class RemoteOpenAIServer:
|
||||
if not model.startswith("/"):
|
||||
# download the model if it's not a local path
|
||||
# to exclude the model download time from the server start time
|
||||
model = snapshot_download(model)
|
||||
snapshot_download(model)
|
||||
if auto_port:
|
||||
if "-p" in cli_args or "--port" in cli_args:
|
||||
raise ValueError("You have manually specified the port"
|
||||
|
Reference in New Issue
Block a user