Integrate set_env to ut scripts for CodeTrans. (#1868)

Signed-off-by: ZePan110 <ze.pan@intel.com>
This commit is contained in:
ZePan110
2025-04-28 13:53:50 +08:00
committed by GitHub
parent 13c4749ca3
commit 04d527d3b0
9 changed files with 76 additions and 97 deletions

View File

@@ -8,14 +8,14 @@
# which can be used to connect to the server from the Internet. It must be specified in the EXTERNAL_HOST_IP variable.
# If the server is used only on the internal network or has a direct external address,
# specify it in HOST_IP and in EXTERNAL_HOST_IP.
export HOST_IP=''
export EXTERNAL_HOST_IP=''
export HOST_IP=${ip_address}
export EXTERNAL_HOST_IP=${ip_address}
### Model ID
export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
### The port of the TGI service. On this port, the TGI service will accept connections
export CODETRANS_TGI_SERVICE_PORT=18156
export CODETRANS_TGI_SERVICE_PORT=8008
### The endpoint of the TGI service to which requests to this service will be sent (formed from previously set variables)
export CODETRANS_TGI_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_TGI_SERVICE_PORT}"
@@ -24,7 +24,7 @@ export CODETRANS_TGI_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_TGI_SERVICE_POR
export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
### The port of the LLM service. On this port, the LLM service will accept connections
export CODETRANS_LLM_SERVICE_PORT=18157
export CODETRANS_LLM_SERVICE_PORT=9000
### The IP address or domain name of the server for CodeTrans MegaService
export CODETRANS_MEGA_SERVICE_HOST_IP=${HOST_IP}
@@ -36,7 +36,7 @@ export CODETRANS_LLM_SERVICE_HOST_IP=${HOST_IP}
export CODETRANS_FRONTEND_SERVICE_IP=${HOST_IP}
### The port of the frontend service
export CODETRANS_FRONTEND_SERVICE_PORT=18155
export CODETRANS_FRONTEND_SERVICE_PORT=5173
### Name of GenAI service for route requests to application
export CODETRANS_BACKEND_SERVICE_NAME=codetrans
@@ -45,10 +45,10 @@ export CODETRANS_BACKEND_SERVICE_NAME=codetrans
export CODETRANS_BACKEND_SERVICE_IP=${HOST_IP}
### The port of the backend service
export CODETRANS_BACKEND_SERVICE_PORT=18154
export CODETRANS_BACKEND_SERVICE_PORT=7777
### The port of the Nginx reverse proxy for application
export CODETRANS_NGINX_PORT=18153
export CODETRANS_NGINX_PORT=8088
### Endpoint of the backend service
export CODETRANS_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans"

View File

@@ -8,14 +8,14 @@
# which can be used to connect to the server from the Internet. It must be specified in the EXTERNAL_HOST_IP variable.
# If the server is used only on the internal network or has a direct external address,
# specify it in HOST_IP and in EXTERNAL_HOST_IP.
export HOST_IP=''
export EXTERNAL_HOST_IP=''
export HOST_IP=${ip_address}
export EXTERNAL_HOST_IP=${ip_address}
### Model ID
export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
### The port of the TGI service. On this port, the TGI service will accept connections
export CODETRANS_VLLM_SERVICE_PORT=18156
export CODETRANS_VLLM_SERVICE_PORT=8008
### The endpoint of the TGI service to which requests to this service will be sent (formed from previously set variables)
export CODETRANS_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_VLLM_SERVICE_PORT}"
@@ -24,7 +24,7 @@ export CODETRANS_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_VLLM_SERVICE_PORT}"
export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
### The port of the LLM service. On this port, the LLM service will accept connections
export CODETRANS_LLM_SERVICE_PORT=18157
export CODETRANS_LLM_SERVICE_PORT=9000
### The IP address or domain name of the server for CodeTrans MegaService
export CODETRANS_MEGA_SERVICE_HOST_IP=${HOST_IP}
@@ -36,7 +36,7 @@ export CODETRANS_LLM_SERVICE_HOST_IP=${HOST_IP}
export CODETRANS_FRONTEND_SERVICE_IP=${HOST_IP}
### The port of the frontend service
export CODETRANS_FRONTEND_SERVICE_PORT=18155
export CODETRANS_FRONTEND_SERVICE_PORT=5173
### Name of GenAI service for route requests to application
export CODETRANS_BACKEND_SERVICE_NAME=codetrans
@@ -45,10 +45,10 @@ export CODETRANS_BACKEND_SERVICE_NAME=codetrans
export CODETRANS_BACKEND_SERVICE_IP=${HOST_IP}
### The port of the backend service
export CODETRANS_BACKEND_SERVICE_PORT=18154
export CODETRANS_BACKEND_SERVICE_PORT=7777
### The port of the Nginx reverse proxy for application
export CODETRANS_NGINX_PORT=18153
export CODETRANS_NGINX_PORT=8088
### Endpoint of the backend service
export CODETRANS_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans"

45
CodeTrans/tests/README.md Normal file
View File

@@ -0,0 +1,45 @@
# CodeTrans E2E test scripts
## Set the required environment variable
```bash
export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token"
```
## Run test
On Intel Xeon with TGI:
```bash
bash test_compose_tgi_on_xeon.sh
```
On Intel Xeon with vLLM:
```bash
bash test_compose_on_xeon.sh
```
On Intel Gaudi with TGI:
```bash
bash test_compose_tgi_on_gaudi.sh
```
On Intel Gaudi with vLLM:
```bash
bash test_compose_on_gaudi.sh
```
On AMD ROCm with TGI:
```bash
bash test_compose_on_rocm.sh
```
On AMD ROCm with vLLM:
```bash
bash test_compose_vllm_on_rocm.sh
```

View File

@@ -42,25 +42,12 @@ function build_docker_images() {
}
function start_services() {
cd $WORKPATH/docker_compose/intel/hpu/gaudi
export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3"
export LLM_ENDPOINT="http://${ip_address}:8008"
export LLM_COMPONENT_NAME="OpeaTextGenService"
export NUM_CARDS=1
export BLOCK_SIZE=128
export MAX_NUM_SEQS=256
export MAX_SEQ_LEN_TO_CAPTURE=2048
cd $WORKPATH/docker_compose
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export MEGA_SERVICE_HOST_IP=${ip_address}
export LLM_SERVICE_HOST_IP=${ip_address}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans"
export FRONTEND_SERVICE_IP=${ip_address}
export FRONTEND_SERVICE_PORT=5173
export BACKEND_SERVICE_NAME=codetrans
export BACKEND_SERVICE_IP=${ip_address}
export BACKEND_SERVICE_PORT=7777
export NGINX_PORT=80
export host_ip=${ip_address}
source set_env.sh
cd intel/hpu/gaudi
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env

View File

@@ -42,21 +42,7 @@ function build_docker_images() {
function start_services() {
cd $WORKPATH/docker_compose/amd/gpu/rocm/
export CODETRANS_TGI_SERVICE_PORT=8008
export CODETRANS_LLM_SERVICE_PORT=9000
export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
export CODETRANS_TGI_LLM_ENDPOINT="http://${ip_address}:${CODETRANS_TGI_SERVICE_PORT}"
export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export CODETRANS_MEGA_SERVICE_HOST_IP=${ip_address}
export CODETRANS_LLM_SERVICE_HOST_IP=${ip_address}
export CODETRANS_FRONTEND_SERVICE_IP=${ip_address}
export CODETRANS_FRONTEND_SERVICE_PORT=5173
export CODETRANS_BACKEND_SERVICE_NAME=codetrans
export CODETRANS_BACKEND_SERVICE_IP=${ip_address}
export CODETRANS_BACKEND_SERVICE_PORT=7777
export CODETRANS_NGINX_PORT=8088
export CODETRANS_BACKEND_SERVICE_URL="http://${ip_address}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans"
export HOST_IP=${ip_address}
source set_env.sh
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env

View File

@@ -44,21 +44,13 @@ function build_docker_images() {
}
function start_services() {
cd $WORKPATH/docker_compose/intel/cpu/xeon/
export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3"
export LLM_ENDPOINT="http://${ip_address}:8008"
export LLM_COMPONENT_NAME="OpeaTextGenService"
cd $WORKPATH/docker_compose
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export MEGA_SERVICE_HOST_IP=${ip_address}
export LLM_SERVICE_HOST_IP=${ip_address}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans"
export FRONTEND_SERVICE_IP=${ip_address}
export FRONTEND_SERVICE_PORT=5173
export BACKEND_SERVICE_NAME=codetrans
export BACKEND_SERVICE_IP=${ip_address}
export BACKEND_SERVICE_PORT=7777
export NGINX_PORT=80
export host_ip=${ip_address}
source set_env.sh
cd intel/cpu/xeon/
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env

View File

@@ -40,21 +40,13 @@ function build_docker_images() {
}
function start_services() {
cd $WORKPATH/docker_compose/intel/hpu/gaudi/
export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3"
export LLM_ENDPOINT="http://${ip_address}:8008"
export LLM_COMPONENT_NAME="OpeaTextGenService"
cd $WORKPATH/docker_compose
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export MEGA_SERVICE_HOST_IP=${ip_address}
export LLM_SERVICE_HOST_IP=${ip_address}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans"
export FRONTEND_SERVICE_IP=${ip_address}
export FRONTEND_SERVICE_PORT=5173
export BACKEND_SERVICE_NAME=codetrans
export BACKEND_SERVICE_IP=${ip_address}
export BACKEND_SERVICE_PORT=7777
export NGINX_PORT=80
export host_ip=${ip_address}
source set_env.sh
cd intel/hpu/gaudi/
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env

View File

@@ -40,21 +40,13 @@ function build_docker_images() {
}
function start_services() {
cd $WORKPATH/docker_compose/intel/cpu/xeon/
export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3"
export LLM_ENDPOINT="http://${ip_address}:8008"
export LLM_COMPONENT_NAME="OpeaTextGenService"
cd $WORKPATH/docker_compose
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export MEGA_SERVICE_HOST_IP=${ip_address}
export LLM_SERVICE_HOST_IP=${ip_address}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans"
export FRONTEND_SERVICE_IP=${ip_address}
export FRONTEND_SERVICE_PORT=5173
export BACKEND_SERVICE_NAME=codetrans
export BACKEND_SERVICE_IP=${ip_address}
export BACKEND_SERVICE_PORT=7777
export NGINX_PORT=80
export host_ip=${ip_address}
source set_env.sh
cd intel/cpu/xeon/
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env

View File

@@ -40,22 +40,7 @@ function build_docker_images() {
function start_services() {
cd $WORKPATH/docker_compose/amd/gpu/rocm/
export HOST_IP=${ip_address}
export CODETRANS_VLLM_SERVICE_PORT=8008
export CODETRANS_LLM_SERVICE_PORT=9000
export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
export CODETRANS_LLM_ENDPOINT="http://${ip_address}:${CODETRANS_VLLM_SERVICE_PORT}"
export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export CODETRANS_MEGA_SERVICE_HOST_IP=${ip_address}
export CODETRANS_LLM_SERVICE_HOST_IP=${ip_address}
export CODETRANS_FRONTEND_SERVICE_IP=${ip_address}
export CODETRANS_FRONTEND_SERVICE_PORT=5173
export CODETRANS_BACKEND_SERVICE_NAME=codetrans
export CODETRANS_BACKEND_SERVICE_IP=${ip_address}
export CODETRANS_BACKEND_SERVICE_PORT=7777
export CODETRANS_NGINX_PORT=8088
export CODETRANS_BACKEND_SERVICE_URL="http://${ip_address}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans"
export HOST_IP=${ip_address}
source set_env_vllm.sh
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env