From 8a9f3f43510f23fb8cb1f666b9e1ec39a6d07d5e Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 20 May 2025 10:05:00 +0800 Subject: [PATCH] Organize set_env.sh paths and update README.md (#1920) Signed-off-by: ZePan110 Co-authored-by: chen, suyue Co-authored-by: Ying Hu --- .github/workflows/pr-link-path-scan.yml | 6 ++++-- CodeTrans/docker_compose/intel/cpu/xeon/README.md | 2 +- CodeTrans/docker_compose/intel/hpu/gaudi/README.md | 2 +- CodeTrans/docker_compose/{ => intel}/set_env.sh | 2 +- CodeTrans/tests/test_compose_on_gaudi.sh | 4 ++-- CodeTrans/tests/test_compose_on_xeon.sh | 4 ++-- CodeTrans/tests/test_compose_tgi_on_gaudi.sh | 4 ++-- CodeTrans/tests/test_compose_tgi_on_xeon.sh | 4 ++-- DocSum/docker_compose/intel/cpu/xeon/README.md | 5 +++-- DocSum/docker_compose/intel/hpu/gaudi/README.md | 5 +++-- DocSum/docker_compose/{ => intel}/set_env.sh | 0 DocSum/tests/test_compose_on_gaudi.sh | 2 +- DocSum/tests/test_compose_on_xeon.sh | 2 +- DocSum/tests/test_compose_tgi_on_gaudi.sh | 2 +- DocSum/tests/test_compose_tgi_on_xeon.sh | 2 +- Translation/docker_compose/intel/cpu/xeon/README.md | 4 ++-- Translation/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- Translation/docker_compose/{ => intel}/set_env.sh | 2 +- Translation/tests/test_compose_on_gaudi.sh | 4 ++-- Translation/tests/test_compose_on_xeon.sh | 4 ++-- 20 files changed, 34 insertions(+), 30 deletions(-) rename CodeTrans/docker_compose/{ => intel}/set_env.sh (95%) rename DocSum/docker_compose/{ => intel}/set_env.sh (100%) rename Translation/docker_compose/{ => intel}/set_env.sh (95%) diff --git a/.github/workflows/pr-link-path-scan.yml b/.github/workflows/pr-link-path-scan.yml index 30040bc8b..3b147af24 100644 --- a/.github/workflows/pr-link-path-scan.yml +++ b/.github/workflows/pr-link-path-scan.yml @@ -80,6 +80,7 @@ jobs: - name: Checking Relative Path Validity run: | cd ${{github.workspace}} + delay=15 fail="FALSE" repo_name=${{ github.event.pull_request.head.repo.full_name }} branch="https://github.com/$repo_name/blob/${{ github.event.pull_request.head.ref }}" @@ -111,14 +112,15 @@ jobs: if [[ "$png_line" == *#* ]]; then if [ -n "changed_files" ] && echo "$changed_files" | grep -q "^${refer_path}$"; then url_dev=$branch$(echo "$real_path" | sed 's|.*/GenAIExamples||')$png_path + sleep $delay response=$(curl -I -L -s -o /dev/null -w "%{http_code}" "$url_dev") if [ "$response" -ne 200 ]; then - echo "**********Validation failed, try again**********" + echo "**********Validation failed ($response), try again**********" response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url_dev") if [ "$response_retry" -eq 200 ]; then echo "*****Retry successfully*****" else - echo "Invalid path from ${{github.workspace}}/$refer_path: $png_path" + echo "Invalid path ($response_retry) from ${{github.workspace}}/$refer_path: $png_path" fail="TRUE" fi else diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/README.md b/CodeTrans/docker_compose/intel/cpu/xeon/README.md index 2e2cb44c0..a4061fd42 100755 --- a/CodeTrans/docker_compose/intel/cpu/xeon/README.md +++ b/CodeTrans/docker_compose/intel/cpu/xeon/README.md @@ -46,7 +46,7 @@ export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source ./set_env.sh +source docker_compose/intel/set_env.sh ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md index d43f12bcf..9d6bc8ae4 100755 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md @@ -46,7 +46,7 @@ export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source ./set_env.sh +source docker_compose/intel/set_env.sh ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/CodeTrans/docker_compose/set_env.sh b/CodeTrans/docker_compose/intel/set_env.sh similarity index 95% rename from CodeTrans/docker_compose/set_env.sh rename to CodeTrans/docker_compose/intel/set_env.sh index d24bc1c20..075b62e89 100644 --- a/CodeTrans/docker_compose/set_env.sh +++ b/CodeTrans/docker_compose/intel/set_env.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null +pushd "../../../" > /dev/null source .set_env.sh popd > /dev/null diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 7b0baa660..600c20a0c 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -37,12 +37,12 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/hpu/gaudi + cd hpu/gaudi sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 54ae5ee0c..42f80469e 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -39,13 +39,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/cpu/xeon/ + cd cpu/xeon/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index 5914dc29c..051afce9d 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -34,13 +34,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/hpu/gaudi/ + cd hpu/gaudi/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index 99a4f6a7d..00da9bde7 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -34,13 +34,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/cpu/xeon/ + cd cpu/xeon/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/DocSum/docker_compose/intel/cpu/xeon/README.md b/DocSum/docker_compose/intel/cpu/xeon/README.md index b06d6007e..f62a62c1e 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/README.md +++ b/DocSum/docker_compose/intel/cpu/xeon/README.md @@ -27,9 +27,9 @@ Clone the GenAIExample repository and access the ChatQnA Intel Xeon platform Doc ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose +cd GenAIExamples/DocSum/docker_compose/intel source set_env.sh -cd intel/cpu/xeon/ +cd cpu/xeon/ ``` NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. @@ -49,6 +49,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: ```bash +cd cpu/xeon/ docker compose up -d ``` diff --git a/DocSum/docker_compose/intel/hpu/gaudi/README.md b/DocSum/docker_compose/intel/hpu/gaudi/README.md index 2edd6934f..98a0b132a 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/README.md +++ b/DocSum/docker_compose/intel/hpu/gaudi/README.md @@ -29,9 +29,9 @@ Clone the GenAIExample repository and access the DocSum IntelĀ® GaudiĀ® platform ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose +cd GenAIExamples/DocSum/docker_compose/intel source set_env.sh -cd intel/hpu/gaudi/ +cd hpu/gaudi/ ``` NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. @@ -51,6 +51,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: ```bash +cd hpu/gaudi/ docker compose up -d ``` diff --git a/DocSum/docker_compose/set_env.sh b/DocSum/docker_compose/intel/set_env.sh similarity index 100% rename from DocSum/docker_compose/set_env.sh rename to DocSum/docker_compose/intel/set_env.sh diff --git a/DocSum/tests/test_compose_on_gaudi.sh b/DocSum/tests/test_compose_on_gaudi.sh index 64d306387..a75a16c5d 100644 --- a/DocSum/tests/test_compose_on_gaudi.sh +++ b/DocSum/tests/test_compose_on_gaudi.sh @@ -16,7 +16,7 @@ echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} diff --git a/DocSum/tests/test_compose_on_xeon.sh b/DocSum/tests/test_compose_on_xeon.sh index c231e7264..ee2ed3025 100644 --- a/DocSum/tests/test_compose_on_xeon.sh +++ b/DocSum/tests/test_compose_on_xeon.sh @@ -17,7 +17,7 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} export MAX_INPUT_TOKENS=2048 diff --git a/DocSum/tests/test_compose_tgi_on_gaudi.sh b/DocSum/tests/test_compose_tgi_on_gaudi.sh index 06dd9b729..301a6f234 100644 --- a/DocSum/tests/test_compose_tgi_on_gaudi.sh +++ b/DocSum/tests/test_compose_tgi_on_gaudi.sh @@ -16,7 +16,7 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} export MAX_INPUT_TOKENS=2048 diff --git a/DocSum/tests/test_compose_tgi_on_xeon.sh b/DocSum/tests/test_compose_tgi_on_xeon.sh index 52edea31f..4ac895d7a 100644 --- a/DocSum/tests/test_compose_tgi_on_xeon.sh +++ b/DocSum/tests/test_compose_tgi_on_xeon.sh @@ -16,7 +16,7 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} export MAX_INPUT_TOKENS=2048 diff --git a/Translation/docker_compose/intel/cpu/xeon/README.md b/Translation/docker_compose/intel/cpu/xeon/README.md index 1af360be8..095ca54c3 100644 --- a/Translation/docker_compose/intel/cpu/xeon/README.md +++ b/Translation/docker_compose/intel/cpu/xeon/README.md @@ -42,9 +42,9 @@ Some HuggingFace resources, such as some models, are only accessible if you have To set up environment variables for deploying Translation service, source the set_env.sh script in this directory: ``` -cd ../../../ +cd ../../ source set_env.sh -cd intel/cpu/xeon +cd cpu/xeon ``` The set_env.sh script will prompt for required and optional environment variables used to configure the Translation service. If a value is not entered, the script will use a default value for the same. It will also generate a env file defining the desired configuration. Consult the section on [Translation Service configuration](#translation-service-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/Translation/docker_compose/intel/hpu/gaudi/README.md b/Translation/docker_compose/intel/hpu/gaudi/README.md index 005504a1a..097cb42c8 100644 --- a/Translation/docker_compose/intel/hpu/gaudi/README.md +++ b/Translation/docker_compose/intel/hpu/gaudi/README.md @@ -42,9 +42,9 @@ Some HuggingFace resources, such as some models, are only accessible if you have To set up environment variables for deploying Translation service, source the _set_env.sh_ script in this directory: ``` -cd ../../../ +cd ../../ source set_env.sh -cd intel/hpu/gaudi/ +cd hpu/gaudi/ ``` The set_env.sh script will prompt for required and optional environment variables used to configure the Translation service. If a value is not entered, the script will use a default value for the same. It will also generate a env file defining the desired configuration. Consult the section on [Translation Service configuration](#translation-service-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/Translation/docker_compose/set_env.sh b/Translation/docker_compose/intel/set_env.sh similarity index 95% rename from Translation/docker_compose/set_env.sh rename to Translation/docker_compose/intel/set_env.sh index 25edfd402..37762fbd5 100644 --- a/Translation/docker_compose/set_env.sh +++ b/Translation/docker_compose/intel/set_env.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null +pushd "../../../" > /dev/null source .set_env.sh popd > /dev/null diff --git a/Translation/tests/test_compose_on_gaudi.sh b/Translation/tests/test_compose_on_gaudi.sh index 2f39c3628..afd33981a 100644 --- a/Translation/tests/test_compose_on_gaudi.sh +++ b/Translation/tests/test_compose_on_gaudi.sh @@ -40,10 +40,10 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export host_ip=${ip_address} source set_env.sh - cd intel/hpu/gaudi + cd hpu/gaudi sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/Translation/tests/test_compose_on_xeon.sh b/Translation/tests/test_compose_on_xeon.sh index 6b06eacdb..7eeec8c7a 100644 --- a/Translation/tests/test_compose_on_xeon.sh +++ b/Translation/tests/test_compose_on_xeon.sh @@ -40,10 +40,10 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export host_ip=${ip_address} source set_env.sh - cd intel/cpu/xeon + cd cpu/xeon sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env