diff --git a/Text2Image/docker_compose/intel/cpu/xeon/compose.yaml b/Text2Image/docker_compose/intel/cpu/xeon/compose.yaml new file mode 100644 index 000000000..4a8450a2b --- /dev/null +++ b/Text2Image/docker_compose/intel/cpu/xeon/compose.yaml @@ -0,0 +1,53 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + text2image-server: + image: ${REGISTRY:-opea}/text2image:${TAG:-latest} + container_name: text2image-server + ports: + - "9379:9379" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - MODEL=${MODEL} + - HF_TOKEN=${HF_TOKEN} + ipc: host + restart: always + text2image-ui-server: + image: ${REGISTRY:-opea}/text2image-ui:${TAG:-latest} + container_name: text2image-ui-server + depends_on: + - text2image-server + ports: + - "5173:5173" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + ipc: host + restart: always + text2image-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: text2image-nginx-server + depends_on: + - text2image-server + - text2image-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=text2image-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=text2image + - BACKEND_SERVICE_IP=text2image-server + - BACKEND_SERVICE_PORT=9379 + ipc: host + restart: always + +networks: + default: + driver: bridge diff --git a/Text2Image/docker_compose/intel/hpu/gaudi/compose.yaml b/Text2Image/docker_compose/intel/hpu/gaudi/compose.yaml new file mode 100644 index 000000000..4e41f4d44 --- /dev/null +++ b/Text2Image/docker_compose/intel/hpu/gaudi/compose.yaml @@ -0,0 +1,58 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + text2image-gaudi-server: + image: ${REGISTRY:-opea}/text2image-gaudi:${TAG:-latest} + container_name: text2image-gaudi-server + ports: + - "9379:9379" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - MODEL=${MODEL} + - HF_TOKEN=${HF_TOKEN} + - HABANA_VISIBLE_DEVICES=all + - OMPI_MCA_btl_vader_single_copy_mechanism=none + ipc: host + runtime: habana + cap_add: + - SYS_NICE + restart: always + text2image-gaudi-ui-server: + image: ${REGISTRY:-opea}/text2image-ui:${TAG:-latest} + container_name: text2image-gaudi-ui-server + depends_on: + - text2image-gaudi-server + ports: + - "5173:5173" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + ipc: host + restart: always + text2image-gaudi-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: text2image-gaudi-nginx-server + depends_on: + - text2image-gaudi-server + - text2image-gaudi-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=text2image-gaudi-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=text2image + - BACKEND_SERVICE_IP=text2image-gaudi-server + - BACKEND_SERVICE_PORT=9379 + ipc: host + restart: always + +networks: + default: + driver: bridge diff --git a/Text2Image/docker_image_build/build.yaml b/Text2Image/docker_image_build/build.yaml index 1fe6f18d4..4fd4c5723 100644 --- a/Text2Image/docker_image_build/build.yaml +++ b/Text2Image/docker_image_build/build.yaml @@ -25,3 +25,9 @@ services: context: ../ui dockerfile: ./docker/Dockerfile image: ${REGISTRY:-opea}/text2image-ui:${TAG:-latest} + nginx: + build: + context: GenAIComps + dockerfile: comps/third_parties/nginx/src/Dockerfile + extends: text2image + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/Text2Image/tests/test_compose_on_gaudi.sh b/Text2Image/tests/test_compose_on_gaudi.sh index 6414a2c59..2e4e70c84 100644 --- a/Text2Image/tests/test_compose_on_gaudi.sh +++ b/Text2Image/tests/test_compose_on_gaudi.sh @@ -14,18 +14,22 @@ LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') text2image_service_port=9379 MODEL=stabilityai/stable-diffusion-2-1 +export MODEL=${MODEL} function build_docker_images() { cd $WORKPATH/docker_image_build if [ ! -d "GenAIComps" ] ; then git clone --depth 1 --branch ${opea_branch:-"main"} https://github.com/opea-project/GenAIComps.git fi - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + service_list="text2image-gaudi text2image-ui nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log } function start_service() { + cd $WORKPATH/docker_compose/intel/hpu/gaudi export no_proxy="localhost,127.0.0.1,"${ip_address} - docker run -d --name="text2image-server" -p $text2image_service_port:$text2image_service_port --runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e MODEL=$MODEL -e no_proxy=$no_proxy ${IMAGE_REPO}/text2image-gaudi:${IMAGE_TAG} + # Start Docker Containers + docker compose -f compose.yaml up -d > ${LOG_PATH}/start_services_with_compose.log sleep 30s } @@ -38,11 +42,11 @@ function validate_microservice() { HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d '{"prompt":"An astronaut riding a green horse", "num_images_per_prompt":1}' -H 'Content-Type: application/json' "$URL") HTTP_STATUS=$(echo $HTTP_RESPONSE | tr -d '\n' | sed -e 's/.*HTTPSTATUS://') RESPONSE_BODY=$(echo $HTTP_RESPONSE | sed -e 's/HTTPSTATUS\:.*//g') - SERVICE_NAME="text2image-server - generate image" + SERVICE_NAME="text2image-gaudi-server - generate image" if [ "$HTTP_STATUS" -ne "200" ]; then echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS" - docker logs text2image-server >> ${LOG_PATH}/text2image-server_generate_image.log + docker logs text2image-gaudi-server >> ${LOG_PATH}/text2image-gaudi-server_generate_image.log exit 1 else echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..." @@ -52,14 +56,14 @@ function validate_microservice() { echo "Content correct." else echo "Content wrong." - docker logs text2image-server >> ${LOG_PATH}/text2image-server_generate_image.log + docker logs text2image-gaudi-server >> ${LOG_PATH}/text2image-gaudi-server_generate_image.log exit 1 fi } function stop_docker() { - cid=$(docker ps -aq --filter "name=text2image-server*") - if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi + cd $WORKPATH/docker_compose/intel/hpu/gaudi + docker compose -f compose.yaml down } function main() { diff --git a/Text2Image/tests/test_compose_on_xeon.sh b/Text2Image/tests/test_compose_on_xeon.sh index 44eee453b..ac2f34448 100644 --- a/Text2Image/tests/test_compose_on_xeon.sh +++ b/Text2Image/tests/test_compose_on_xeon.sh @@ -14,18 +14,21 @@ LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') text2image_service_port=9379 MODEL=stabilityai/stable-diffusion-2-1 +export MODEL=${MODEL} function build_docker_images() { cd $WORKPATH/docker_image_build if [ ! -d "GenAIComps" ] ; then git clone --depth 1 --branch ${opea_branch:-"main"} https://github.com/opea-project/GenAIComps.git fi - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + service_list="text2image text2image-ui nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log } function start_service() { + cd $WORKPATH/docker_compose/intel/cpu/xeon export no_proxy="localhost,127.0.0.1,"${ip_address} - docker run -d --name="text2image-server" -p $text2image_service_port:$text2image_service_port --runtime=runc --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e MODEL=$MODEL -e no_proxy=$no_proxy ${IMAGE_REPO}/text2image:${IMAGE_TAG} + docker compose -f compose.yaml up -d > ${LOG_PATH}/start_services_with_compose.log sleep 30s } @@ -58,8 +61,8 @@ function validate_microservice() { } function stop_docker() { - cid=$(docker ps -aq --filter "name=text2image-server*") - if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi + cd $WORKPATH/docker_compose/intel/cpu/xeon + docker compose -f compose.yaml down } function main() {