Enhance CI/CD infrastructure (#593)
Signed-off-by: chensuyue <suyue.chen@intel.com> Signed-off-by: Sun, Xuehao <xuehao.sun@intel.com>
This commit is contained in:
48
SearchQnA/docker/docker_build_compose.yaml
Normal file
48
SearchQnA/docker/docker_build_compose.yaml
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
searchqna:
|
||||
build:
|
||||
args:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
no_proxy: ${no_proxy}
|
||||
dockerfile: ./Dockerfile
|
||||
image: ${REGISTRY:-opea}/searchqna:${TAG:-latest}
|
||||
searchqna-ui:
|
||||
build:
|
||||
context: ui
|
||||
dockerfile: ./docker/Dockerfile
|
||||
extends: searchqna
|
||||
image: ${REGISTRY:-opea}/searchqna-ui:${TAG:-latest}
|
||||
embedding-tei:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/embeddings/langchain/docker/Dockerfile
|
||||
extends: searchqna
|
||||
image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest}
|
||||
web-retriever-chroma:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/web_retrievers/langchain/chroma/docker/Dockerfile
|
||||
extends: searchqna
|
||||
image: ${REGISTRY:-opea}/web-retriever-chroma:${TAG:-latest}
|
||||
reranking-tei:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/reranks/tei/docker/Dockerfile
|
||||
extends: searchqna
|
||||
image: ${REGISTRY:-opea}/reranking-tei:${TAG:-latest}
|
||||
llm-tgi:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/llms/text-generation/tgi/Dockerfile
|
||||
extends: searchqna
|
||||
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
|
||||
tei-gaudi:
|
||||
build:
|
||||
context: tei-gaudi
|
||||
dockerfile: Dockerfile-hpu
|
||||
extends: searchqna
|
||||
image: ${REGISTRY:-opea}/tei-gaudi:${TAG:-latest}
|
||||
@@ -1,12 +1,9 @@
|
||||
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
tei-embedding-service:
|
||||
image: opea/tei-gaudi:latest
|
||||
image: ${REGISTRY:-opea}/tei-gaudi:${TAG:-latest}
|
||||
container_name: tei-embedding-gaudi-server
|
||||
ports:
|
||||
- "3001:80"
|
||||
@@ -27,7 +24,7 @@ services:
|
||||
ENABLE_EXPERIMENTAL_FLAGS: true
|
||||
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
|
||||
embedding:
|
||||
image: opea/embedding-tei:latest
|
||||
image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest}
|
||||
container_name: embedding-tei-server
|
||||
depends_on:
|
||||
- tei-embedding-service
|
||||
@@ -41,7 +38,7 @@ services:
|
||||
TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
|
||||
restart: unless-stopped
|
||||
web-retriever:
|
||||
image: opea/web-retriever-chroma:latest
|
||||
image: ${REGISTRY:-opea}/web-retriever-chroma:${TAG:-latest}
|
||||
container_name: web-retriever-chroma-server
|
||||
ports:
|
||||
- "3003:7077"
|
||||
@@ -68,7 +65,7 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
command: --model-id ${RERANK_MODEL_ID} --auto-truncate
|
||||
reranking:
|
||||
image: opea/reranking-tei:latest
|
||||
image: ${REGISTRY:-opea}/reranking-tei:${TAG:-latest}
|
||||
container_name: reranking-tei-xeon-server
|
||||
depends_on:
|
||||
- tei-reranking-service
|
||||
@@ -104,7 +101,7 @@ services:
|
||||
ipc: host
|
||||
command: --model-id ${LLM_MODEL_ID} --max-input-length 1024 --max-total-tokens 2048
|
||||
llm:
|
||||
image: opea/llm-tgi:latest
|
||||
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
|
||||
container_name: llm-tgi-gaudi-server
|
||||
depends_on:
|
||||
- tgi-service
|
||||
@@ -121,7 +118,7 @@ services:
|
||||
HF_HUB_ENABLE_HF_TRANSFER: 0
|
||||
restart: unless-stopped
|
||||
searchqna-gaudi-backend-server:
|
||||
image: opea/searchqna:latest
|
||||
image: ${REGISTRY:-opea}/searchqna:${TAG:-latest}
|
||||
container_name: searchqna-gaudi-backend-server
|
||||
depends_on:
|
||||
- tei-embedding-service
|
||||
@@ -149,7 +146,7 @@ services:
|
||||
ipc: host
|
||||
restart: always
|
||||
searchqna-gaudi-ui-server:
|
||||
image: opea/searchqna-ui:latest
|
||||
image: ${REGISTRY:-opea}/searchqna-ui:${TAG:-latest}
|
||||
container_name: searchqna-gaudi-ui-server
|
||||
depends_on:
|
||||
- searchqna-gaudi-backend-server
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
tei-embedding-service:
|
||||
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
@@ -19,7 +16,7 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
|
||||
embedding:
|
||||
image: opea/embedding-tei:latest
|
||||
image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest}
|
||||
container_name: embedding-tei-server
|
||||
depends_on:
|
||||
- tei-embedding-service
|
||||
@@ -34,7 +31,7 @@ services:
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
restart: unless-stopped
|
||||
web-retriever:
|
||||
image: opea/web-retriever-chroma:latest
|
||||
image: ${REGISTRY:-opea}/web-retriever-chroma:${TAG:-latest}
|
||||
container_name: web-retriever-chroma-server
|
||||
ports:
|
||||
- "3003:7077"
|
||||
@@ -61,7 +58,7 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
command: --model-id ${RERANK_MODEL_ID} --auto-truncate
|
||||
reranking:
|
||||
image: opea/reranking-tei:latest
|
||||
image: ${REGISTRY:-opea}/reranking-tei:${TAG:-latest}
|
||||
container_name: reranking-tei-xeon-server
|
||||
depends_on:
|
||||
- tei-reranking-service
|
||||
@@ -90,7 +87,7 @@ services:
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
command: --model-id ${LLM_MODEL_ID}
|
||||
llm:
|
||||
image: opea/llm-tgi:latest
|
||||
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
|
||||
container_name: llm-tgi-server
|
||||
depends_on:
|
||||
- tgi-service
|
||||
@@ -105,7 +102,7 @@ services:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
restart: unless-stopped
|
||||
searchqna-xeon-backend-server:
|
||||
image: opea/searchqna:latest
|
||||
image: ${REGISTRY:-opea}/searchqna:${TAG:-latest}
|
||||
container_name: searchqna-xeon-backend-server
|
||||
depends_on:
|
||||
- tei-embedding-service
|
||||
@@ -133,7 +130,7 @@ services:
|
||||
ipc: host
|
||||
restart: always
|
||||
searchqna-gaudi-ui-server:
|
||||
image: opea/searchqna-ui:latest
|
||||
image: ${REGISTRY:-opea}/searchqna-ui:${TAG:-latest}
|
||||
container_name: searchqna-xeon-ui-server
|
||||
depends_on:
|
||||
- searchqna-xeon-backend-server
|
||||
|
||||
@@ -2,46 +2,33 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# for test
|
||||
|
||||
set -xe
|
||||
IMAGE_REPO=${IMAGE_REPO:-"opea"}
|
||||
IMAGE_TAG=${IMAGE_TAG:-"latest"}
|
||||
echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}"
|
||||
echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
|
||||
export REGISTRY=${IMAGE_REPO}
|
||||
export TAG=${IMAGE_TAG}
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
LOG_PATH="$WORKPATH/tests"
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
function build_docker_images() {
|
||||
cd $WORKPATH
|
||||
cd $WORKPATH/docker
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps
|
||||
git clone https://github.com/huggingface/tei-gaudi
|
||||
|
||||
docker build --no-cache -t opea/embedding-tei:latest -f comps/embeddings/langchain/docker/Dockerfile .
|
||||
docker build --no-cache -t opea/web-retriever-chroma:latest -f comps/web_retrievers/langchain/chroma/docker/Dockerfile .
|
||||
docker build --no-cache -t opea/reranking-tei:latest -f comps/reranks/tei/docker/Dockerfile .
|
||||
docker build --no-cache -t opea/llm-tgi:latest -f comps/llms/text-generation/tgi/Dockerfile .
|
||||
|
||||
# cd ..
|
||||
# git clone https://github.com/huggingface/tei-gaudi
|
||||
# cd tei-gaudi/
|
||||
# docker build --no-cache -f Dockerfile-hpu -t opea/tei-gaudi:latest .
|
||||
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
|
||||
service_list="searchqna searchqna-ui embedding-tei web-retriever-chroma reranking-tei llm-tgi tei-gaudi"
|
||||
docker compose -f docker_build_compose.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
|
||||
|
||||
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.1
|
||||
cd $WORKPATH/docker
|
||||
docker build --no-cache -t opea/searchqna:latest -f Dockerfile .
|
||||
|
||||
cd $WORKPATH/docker/ui
|
||||
docker build --no-cache -t opea/searchqna-ui:latest -f docker/Dockerfile .
|
||||
|
||||
docker images
|
||||
}
|
||||
|
||||
function start_services() {
|
||||
# build tei-gaudi for each test instead of pull from local registry
|
||||
cd $WORKPATH
|
||||
git clone https://github.com/huggingface/tei-gaudi
|
||||
cd tei-gaudi/
|
||||
docker build --no-cache -f Dockerfile-hpu -t opea/tei-gaudi:latest .
|
||||
|
||||
cd $WORKPATH/docker/gaudi
|
||||
export GOOGLE_CSE_ID=$GOOGLE_CSE_ID
|
||||
@@ -71,17 +58,6 @@ function start_services() {
|
||||
|
||||
sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
|
||||
|
||||
if [[ "$IMAGE_REPO" != "" ]]; then
|
||||
# Replace the container name with a test-specific name
|
||||
echo "using image repository $IMAGE_REPO and image tag $IMAGE_TAG"
|
||||
sed -i "s#image: opea/searchqna:latest#image: opea/searchqna:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/searchqna-ui:latest#image: opea/searchqna-ui:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/*#image: ${IMAGE_REPO}opea/#g" compose.yaml
|
||||
sed -i "s#image: ${IMAGE_REPO}opea/tei-gaudi:latest#image: opea/tei-gaudi:latest#g" compose.yaml
|
||||
echo "cat compose.yaml"
|
||||
cat compose.yaml
|
||||
fi
|
||||
|
||||
# Start Docker Containers
|
||||
docker compose up -d
|
||||
n=0
|
||||
@@ -118,13 +94,16 @@ function validate_frontend() {
|
||||
local conda_env_name="OPEA_e2e"
|
||||
|
||||
export PATH=${HOME}/miniforge3/bin/:$PATH
|
||||
# conda remove -n ${conda_env_name} --all -y
|
||||
# conda create -n ${conda_env_name} python=3.12 -y
|
||||
if conda info --envs | grep -q "$conda_env_name"; then
|
||||
echo "$conda_env_name exist!"
|
||||
else
|
||||
conda create -n ${conda_env_name} python=3.12 -y
|
||||
fi
|
||||
source activate ${conda_env_name}
|
||||
|
||||
sed -i "s/localhost/$ip_address/g" playwright.config.ts
|
||||
|
||||
# conda install -c conda-forge nodejs -y
|
||||
conda install -c conda-forge nodejs -y
|
||||
npm install && npm ci && npx playwright install --with-deps
|
||||
node -v && npm -v && pip list
|
||||
|
||||
@@ -147,7 +126,7 @@ function stop_docker() {
|
||||
function main() {
|
||||
|
||||
stop_docker
|
||||
if [[ "$IMAGE_REPO" == "" ]]; then build_docker_images; fi
|
||||
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
|
||||
start_services
|
||||
|
||||
validate_megaservice
|
||||
|
||||
@@ -3,28 +3,27 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -xe
|
||||
IMAGE_REPO=${IMAGE_REPO:-"opea"}
|
||||
IMAGE_TAG=${IMAGE_TAG:-"latest"}
|
||||
echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}"
|
||||
echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
|
||||
export REGISTRY=${IMAGE_REPO}
|
||||
export TAG=${IMAGE_TAG}
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
LOG_PATH="$WORKPATH/tests"
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
function build_docker_images() {
|
||||
cd $WORKPATH
|
||||
cd $WORKPATH/docker
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps
|
||||
|
||||
docker build -t opea/embedding-tei:latest -f comps/embeddings/langchain/docker/Dockerfile .
|
||||
docker build -t opea/web-retriever-chroma:latest -f comps/web_retrievers/langchain/chroma/docker/Dockerfile .
|
||||
docker build -t opea/reranking-tei:latest -f comps/reranks/tei/docker/Dockerfile .
|
||||
docker build -t opea/llm-tgi:latest -f comps/llms/text-generation/tgi/Dockerfile .
|
||||
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
|
||||
service_list="searchqna searchqna-ui embedding-tei web-retriever-chroma reranking-tei llm-tgi"
|
||||
docker compose -f docker_build_compose.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
|
||||
|
||||
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
docker pull ghcr.io/huggingface/text-generation-inference:1.4
|
||||
cd $WORKPATH/docker
|
||||
docker build -t opea/searchqna:latest -f Dockerfile .
|
||||
|
||||
cd $WORKPATH/docker/ui
|
||||
docker build --no-cache -t opea/searchqna-ui:latest -f docker/Dockerfile .
|
||||
|
||||
docker images
|
||||
}
|
||||
|
||||
@@ -57,25 +56,15 @@ function start_services() {
|
||||
|
||||
sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
|
||||
|
||||
if [[ "$IMAGE_REPO" != "" ]]; then
|
||||
# Replace the container name with a test-specific name
|
||||
echo "using image repository $IMAGE_REPO and image tag $IMAGE_TAG"
|
||||
sed -i "s#image: opea/searchqna:latest#image: opea/searchqna:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/searchqna-ui:latest#image: opea/searchqna-ui:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/*#image: ${IMAGE_REPO}opea/#g" compose.yaml
|
||||
echo "cat compose.yaml"
|
||||
cat compose.yaml
|
||||
fi
|
||||
|
||||
# Start Docker Containers
|
||||
docker compose up -d
|
||||
n=0
|
||||
until [[ "$n" -ge 200 ]]; do
|
||||
until [[ "$n" -ge 100 ]]; do
|
||||
docker logs tgi-service > $LOG_PATH/tgi_service_start.log
|
||||
if grep -q Connected $LOG_PATH/tgi_service_start.log; then
|
||||
break
|
||||
fi
|
||||
sleep 1s
|
||||
sleep 5s
|
||||
n=$((n+1))
|
||||
done
|
||||
}
|
||||
@@ -97,28 +86,31 @@ function validate_megaservice() {
|
||||
}
|
||||
|
||||
function validate_frontend() {
|
||||
cd $WORKPATH/docker/ui/svelte
|
||||
local conda_env_name="OPEA_e2e"
|
||||
export PATH=${HOME}/miniforge3/bin/:$PATH
|
||||
# conda remove -n ${conda_env_name} --all -y
|
||||
# conda create -n ${conda_env_name} python=3.12 -y
|
||||
source activate ${conda_env_name}
|
||||
cd $WORKPATH/docker/ui/svelte
|
||||
local conda_env_name="OPEA_e2e"
|
||||
export PATH=${HOME}/miniforge3/bin/:$PATH
|
||||
if conda info --envs | grep -q "$conda_env_name"; then
|
||||
echo "$conda_env_name exist!"
|
||||
else
|
||||
conda create -n ${conda_env_name} python=3.12 -y
|
||||
fi
|
||||
source activate ${conda_env_name}
|
||||
|
||||
sed -i "s/localhost/$ip_address/g" playwright.config.ts
|
||||
sed -i "s/localhost/$ip_address/g" playwright.config.ts
|
||||
|
||||
# conda install -c conda-forge nodejs -y
|
||||
npm install && npm ci && npx playwright install --with-deps
|
||||
node -v && npm -v && pip list
|
||||
conda install -c conda-forge nodejs -y
|
||||
npm install && npm ci && npx playwright install --with-deps
|
||||
node -v && npm -v && pip list
|
||||
|
||||
exit_status=0
|
||||
npx playwright test || exit_status=$?
|
||||
exit_status=0
|
||||
npx playwright test || exit_status=$?
|
||||
|
||||
if [ $exit_status -ne 0 ]; then
|
||||
if [ $exit_status -ne 0 ]; then
|
||||
echo "[TEST INFO]: ---------frontend test failed---------"
|
||||
exit $exit_status
|
||||
else
|
||||
else
|
||||
echo "[TEST INFO]: ---------frontend test passed---------"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function stop_docker() {
|
||||
@@ -129,7 +121,7 @@ function stop_docker() {
|
||||
function main() {
|
||||
|
||||
stop_docker
|
||||
if [[ "$IMAGE_REPO" == "" ]]; then build_docker_images; fi
|
||||
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
|
||||
start_services
|
||||
|
||||
validate_megaservice
|
||||
|
||||
Reference in New Issue
Block a user