diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index 010eece64..f3b717a28 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -78,16 +78,18 @@ jobs: cd ${{ github.workspace }}/${{ inputs.example }}/docker_image_build docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then - git clone https://github.com/vllm-project/vllm.git && cd vllm + git clone https://github.com/vllm-project/vllm.git && cd vllm # Get the latest tag - VLLM_VER="$(git describe --tags "$(git rev-list --tags --max-count=1)" )" + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") echo "Check out vLLM tag ${VLLM_VER}" - git checkout ${VLLM_VER} &> /dev/null - # make sure do not change the pwd - git rev-parse HEAD && cd ../ + git checkout ${VLLM_VER} &> /dev/null && cd ../ fi if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then - git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork + # Get the latest tag + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") + echo "Check out vLLM tag ${VLLM_VER}" + git checkout ${VLLM_VER} &> /dev/null && cd ../ fi git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git cd GenAIComps && git rev-parse HEAD && cd ../ diff --git a/AgentQnA/tests/step1_build_images.sh b/AgentQnA/tests/step1_build_images.sh index 4cb8a2e4d..aa8352144 100644 --- a/AgentQnA/tests/step1_build_images.sh +++ b/AgentQnA/tests/step1_build_images.sh @@ -42,7 +42,8 @@ function build_vllm_docker_image() { git clone https://github.com/HabanaAI/vllm-fork.git fi cd ./vllm-fork - git checkout v0.6.4.post2+Gaudi-1.19.0 + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") + git checkout ${VLLM_VER} &> /dev/null docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:ci --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy if [ $? -ne 0 ]; then echo "opea/vllm-gaudi:ci failed" diff --git a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh index d667a89f3..c882a7ef7 100644 --- a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh @@ -29,7 +29,9 @@ function build_docker_images() { fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git - git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") + git checkout ${VLLM_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna-guardrails chatqna-ui dataprep retriever vllm-gaudi guardrails nginx" diff --git a/ChatQnA/tests/test_compose_on_gaudi.sh b/ChatQnA/tests/test_compose_on_gaudi.sh index 885890014..7f64e3b0d 100644 --- a/ChatQnA/tests/test_compose_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_on_gaudi.sh @@ -29,7 +29,9 @@ function build_docker_images() { fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git - git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") + git checkout ${VLLM_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi nginx" diff --git a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh index 9e9d7df73..c9dc86a0b 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh @@ -29,7 +29,9 @@ function build_docker_images() { fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git - git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") + git checkout ${VLLM_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna-without-rerank chatqna-ui dataprep retriever vllm-gaudi nginx" diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 9c78ea597..39bf47252 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -30,7 +30,9 @@ function build_docker_images() { cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git - git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork + VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") + git checkout ${VLLM_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codetrans codetrans-ui llm-textgen vllm-gaudi nginx"