[CI]: Fix doctest ci for main release (#3451)

### What this PR does / why we need it?
Fix dockets CI for main release.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?

- vLLM version: v0.11.0rc3
- vLLM main: https://github.com/vllm-project/vllm/commit/v0.11.0

Signed-off-by: menogrey <1299267905@qq.com>
This commit is contained in:
menogrey
2025-10-16 14:38:11 +08:00
committed by GitHub
parent b0ae203e72
commit 9ff6b0b862
2 changed files with 21 additions and 5 deletions

View File

@ -141,8 +141,12 @@ Then you can install `vllm` and `vllm-ascend` from **pre-built wheel**:
```{code-block} bash
:substitutions:
# Install vllm-project/vllm from pypi
pip install vllm==|pip_vllm_version|
# Install vllm-project/vllm. The newest supported version is |vllm_version|.
# Because the version |vllm_version| has not been archived in pypi, so you need to install from source.
git clone --depth 1 --branch |vllm_version| https://github.com/vllm-project/vllm
cd vllm
VLLM_TARGET_DEVICE=empty pip install -v -e .
cd ..
# Install vllm-project/vllm-ascend from pypi.
pip install vllm-ascend==|pip_vllm_ascend_version|

View File

@ -40,21 +40,33 @@ function install_binary_test() {
create_vllm_venv
PIP_VLLM_VERSION=$(get_version pip_vllm_version)
VLLM_VERSION=$(get_version vllm_version)
PIP_VLLM_ASCEND_VERSION=$(get_version pip_vllm_ascend_version)
_info "====> Install vllm==${PIP_VLLM_VERSION} and vllm-ascend ${PIP_VLLM_ASCEND_VERSION}"
# Setup extra-index-url for x86 & torch_npu dev version
pip config set global.extra-index-url "https://download.pytorch.org/whl/cpu/ https://mirrors.huaweicloud.com/ascend/repos/pypi"
pip install vllm=="$(get_version pip_vllm_version)"
pip install vllm-ascend=="$(get_version pip_vllm_ascend_version)"
if [[ "${VLLM_VERSION} " != "v0.11.0rc3" ]]; then
# The vLLM version already in pypi, we install from pypi.
pip install vllm=="${PIP_VLLM_VERSION}"
else
# The vLLM version not in pypi, we install from source code with a specific tag.
git clone --depth 1 --branch "${VLLM_VERSION}" https://github.com/vllm-project/vllm
cd vllm
VLLM_TARGET_DEVICE=empty pip install -v -e .
cd ..
fi
pip install vllm-ascend=="${PIP_VLLM_ASCEND_VERSION}"
pip list | grep vllm
# Verify the installation
_info "====> Run offline example test"
pip install modelscope
python3 "${SCRIPT_DIR}/../../examples/offline_inference_npu.py"
cd ${SCRIPT_DIR}/../../examples && python3 ./offline_inference_npu.py
cd -
}