Fix vllm hpu to a stable release (#1156)

There exist risks with vllm-fork main branch, change to latest stable release v0.6.4.post2+Gaudi-1.19.0

Signed-off-by: Xinyao Wang <xinyao.wang@intel.com>
This commit is contained in:
XinyaoWa
2025-01-16 17:48:06 +08:00
committed by GitHub
parent 2c42941a31
commit d6312a73b9
5 changed files with 5 additions and 0 deletions

View File

@@ -65,6 +65,7 @@ jobs:
fi
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_yml}) != 0 ]]; then
git clone https://github.com/HabanaAI/vllm-fork.git vllm-fork
cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../
fi
- name: Get build list
id: get-build-list

View File

@@ -37,6 +37,7 @@ fi
if [ "$hw_mode" = "hpu" ]; then
git clone https://github.com/HabanaAI/vllm-fork.git
cd ./vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
docker build -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
cd ..
rm -rf vllm-fork

View File

@@ -12,6 +12,7 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"

View File

@@ -12,6 +12,7 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"

View File

@@ -12,6 +12,7 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"