[misc][frontend] log all available endpoints (#6195)

Co-authored-by: Cody Yu <hao.yu.cody@gmail.com>
This commit is contained in:
youkaichao
2024-07-07 15:11:12 -07:00
committed by GitHub
parent abfe705a02
commit 3b08fe2b13
2 changed files with 19 additions and 0 deletions

View File

@ -16,10 +16,13 @@ from fastapi.responses import JSONResponse, Response, StreamingResponse
from vllm.engine.arg_utils import AsyncEngineArgs
from vllm.engine.async_llm_engine import AsyncLLMEngine
from vllm.logger import init_logger
from vllm.sampling_params import SamplingParams
from vllm.usage.usage_lib import UsageContext
from vllm.utils import FlexibleArgumentParser, random_uuid
logger = init_logger("vllm.entrypoints.api_server")
TIMEOUT_KEEP_ALIVE = 5 # seconds.
app = FastAPI()
engine = None
@ -107,6 +110,14 @@ if __name__ == "__main__":
engine_args, usage_context=UsageContext.API_SERVER)
app.root_path = args.root_path
logger.info("Available routes are:")
for route in app.routes:
if not hasattr(route, 'methods'):
continue
methods = ', '.join(route.methods)
logger.info("Route: %s, Methods: %s", route.path, methods)
uvicorn.run(app,
host=args.host,
port=args.port,

View File

@ -240,6 +240,14 @@ if __name__ == "__main__":
openai_serving_embedding = OpenAIServingEmbedding(engine, model_config,
served_model_names)
app.root_path = args.root_path
logger.info("Available routes are:")
for route in app.routes:
if not hasattr(route, 'methods'):
continue
methods = ', '.join(route.methods)
logger.info("Route: %s, Methods: %s", route.path, methods)
uvicorn.run(app,
host=args.host,
port=args.port,