This commit is contained in:
2
.github/workflows/_example-workflow.yml
vendored
2
.github/workflows/_example-workflow.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
||||
docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
|
||||
if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then
|
||||
git clone https://github.com/vllm-project/vllm.git
|
||||
cd vllm && git checkout 446c780 && cd ../
|
||||
cd vllm && git rev-parse HEAD && cd ../
|
||||
fi
|
||||
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
|
||||
git clone https://github.com/HabanaAI/vllm-fork.git
|
||||
|
||||
@@ -113,12 +113,12 @@ services:
|
||||
dockerfile: comps/guardrails/llama_guard/langchain/Dockerfile
|
||||
extends: chatqna
|
||||
image: ${REGISTRY:-opea}/guardrails-tgi:${TAG:-latest}
|
||||
# vllm:
|
||||
# build:
|
||||
# context: vllm
|
||||
# dockerfile: Dockerfile.cpu
|
||||
# extends: chatqna
|
||||
# image: ${REGISTRY:-opea}/vllm:${TAG:-latest}
|
||||
vllm:
|
||||
build:
|
||||
context: vllm
|
||||
dockerfile: Dockerfile.cpu
|
||||
extends: chatqna
|
||||
image: ${REGISTRY:-opea}/vllm:${TAG:-latest}
|
||||
vllm-gaudi:
|
||||
build:
|
||||
context: vllm-fork
|
||||
|
||||
Reference in New Issue
Block a user