File tree Expand file tree Collapse file tree 2 files changed +21
-5
lines changed
Expand file tree Collapse file tree 2 files changed +21
-5
lines changed Original file line number Diff line number Diff line change @@ -141,8 +141,12 @@ Then you can install `vllm` and `vllm-ascend` from **pre-built wheel**:
141141``` {code-block} bash
142142 :substitutions:
143143
144- # Install vllm-project/vllm from pypi
145- pip install vllm==|pip_vllm_version|
144+ # Install vllm-project/vllm. The newest supported version is |vllm_version|.
145+ # Because the version |vllm_version| has not been archived in pypi, so you need to install from source.
146+ git clone --depth 1 --branch |vllm_version| https://github.com/vllm-project/vllm
147+ cd vllm
148+ VLLM_TARGET_DEVICE=empty pip install -v -e .
149+ cd ..
146150
147151# Install vllm-project/vllm-ascend from pypi.
148152pip install vllm-ascend==|pip_vllm_ascend_version|
Original file line number Diff line number Diff line change @@ -40,21 +40,33 @@ function install_binary_test() {
4040 create_vllm_venv
4141
4242 PIP_VLLM_VERSION=$( get_version pip_vllm_version)
43+ VLLM_VERSION=$( get_version vllm_version)
4344 PIP_VLLM_ASCEND_VERSION=$( get_version pip_vllm_ascend_version)
4445 _info " ====> Install vllm==${PIP_VLLM_VERSION} and vllm-ascend ${PIP_VLLM_ASCEND_VERSION} "
4546
4647 # Setup extra-index-url for x86 & torch_npu dev version
4748 pip config set global.extra-index-url " https://download.pytorch.org/whl/cpu/ https://mirrors.huaweicloud.com/ascend/repos/pypi"
4849
49- pip install vllm==" $( get_version pip_vllm_version) "
50- pip install vllm-ascend==" $( get_version pip_vllm_ascend_version) "
50+ if [[ " ${VLLM_VERSION} " != " v0.11.0rc3" ]]; then
51+ # The vLLM version already in pypi, we install from pypi.
52+ pip install vllm==" ${PIP_VLLM_VERSION} "
53+ else
54+ # The vLLM version not in pypi, we install from source code with a specific tag.
55+ git clone --depth 1 --branch " ${VLLM_VERSION} " https://github.com/vllm-project/vllm
56+ cd vllm
57+ VLLM_TARGET_DEVICE=empty pip install -v -e .
58+ cd ..
59+ fi
60+
61+ pip install vllm-ascend==" ${PIP_VLLM_ASCEND_VERSION} "
5162
5263 pip list | grep vllm
5364
5465 # Verify the installation
5566 _info " ====> Run offline example test"
5667 pip install modelscope
57- python3 " ${SCRIPT_DIR} /../../examples/offline_inference_npu.py"
68+ cd ${SCRIPT_DIR} /../../examples && python3 ./offline_inference_npu.py
69+ cd -
5870
5971}
6072
You can’t perform that action at this time.
0 commit comments