[Frontend] Disallow passing model as both argument and option (#7347)

This commit is contained in:
Cyrus Leung
2024-08-12 20:58:34 +08:00
committed by GitHub
parent e6e42e4b17
commit 24154f8618
2 changed files with 9 additions and 2 deletions

View File

@ -32,7 +32,7 @@ def nullable_str(val: str):
@dataclass
class EngineArgs:
"""Arguments for vLLM engine."""
model: str
model: str = 'facebook/opt-125m'
served_model_name: Optional[Union[List[str]]] = None
tokenizer: Optional[str] = None
skip_tokenizer_init: bool = False
@ -133,7 +133,7 @@ class EngineArgs:
parser.add_argument(
'--model',
type=str,
default='facebook/opt-125m',
default=EngineArgs.model,
help='Name or path of the huggingface model to use.')
parser.add_argument(
'--tokenizer',

View File

@ -9,6 +9,7 @@ from typing import List, Optional
from openai import OpenAI
from openai.types.chat import ChatCompletionMessageParam
from vllm.engine.arg_utils import EngineArgs
from vllm.entrypoints.openai.api_server import run_server
from vllm.entrypoints.openai.cli_args import make_arg_parser
from vllm.utils import FlexibleArgumentParser
@ -24,6 +25,12 @@ def register_signal_handlers():
def serve(args: argparse.Namespace) -> None:
# The default value of `--model`
if args.model != EngineArgs.model:
raise ValueError(
"With `vllm serve`, you should provide the model as a "
"positional argument instead of via the `--model` option.")
# EngineArgs expects the model name to be passed as --model.
args.model = args.model_tag