Files
GenAIExamples/CodeTrans/tests/test_codetrans_on_xeon.sh
lvliang-intel a6b3caf128 Refactor example code (#183)
Signed-off-by: lvliang-intel <liang1.lv@intel.com>
Signed-off-by: Yue, Wenjiao <wenjiao.yue@intel.com>
Signed-off-by: chensuyue <suyue.chen@intel.com>
2024-05-24 13:32:14 +08:00

108 lines
3.4 KiB
Bash

#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -xe
WORKPATH=$(dirname "$PWD")
LOG_PATH="$WORKPATH/tests"
ip_address=$(hostname -I | awk '{print $1}')
function build_docker_images() {
cd $WORKPATH
git clone https://github.com/opea-project/GenAIComps.git
cd GenAIComps
docker build -t opea/gen-ai-comps:llm-tgi-server -f comps/llms/text-generation/tgi/Dockerfile .
cd $WORKPATH
docker build --no-cache -t opea/gen-ai-comps:codetrans-megaservice-server -f Dockerfile .
cd $WORKPATH/ui
docker build --no-cache -t opea/gen-ai-comps:codetrans-ui-server -f docker/Dockerfile .
docker images
}
function start_services() {
cd $WORKPATH/docker-composer/xeon
export http_proxy=${http_proxy}
export https_proxy=${http_proxy}
export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok"
export TGI_LLM_ENDPOINT="http://${ip_address}:8008"
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export MEGA_SERVICE_HOST_IP=${ip_address}
export LLM_SERVICE_HOST_IP=${ip_address}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans"
# Start Docker Containers
# TODO: Replace the container name with a test-specific name
docker compose -f docker_compose.yaml up -d
sleep 2m
}
function validate_microservices() {
curl http://${ip_address}:8008/generate \
-X POST \
-d '{"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":17, "do_sample": true}}' \
-H 'Content-Type: application/json' > ${LOG_PATH}/generate.log
sleep 5s
curl http://${ip_address}:9000/v1/chat/completions \
-X POST \
-d '{"query":" ### System: Please translate the following Golang codes into Python codes. ### Original codes: '\'''\'''\''Golang \npackage main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n '\'''\'''\'' ### Translated codes:"}' \
-H 'Content-Type: application/json' > ${LOG_PATH}/completions.log
sleep 5s
}
function validate_megaservice() {
# Curl the Mega Service
curl http://${ip_address}:7777/v1/codetrans -H "Content-Type: application/json" -d '{"language_from": "Golang","language_to": "Python","source_code": "package main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n}"}' > ${LOG_PATH}/curl_megaservice.log
echo "Checking response results, make sure the output is reasonable. "
local status=false
if [[ -f $LOG_PATH/curl_megaservice.log ]] && \
[[ $(grep -c "@#$print" $LOG_PATH/curl_megaservice.log) != 0 ]]; then
status=true
fi
if [ $status == false ]; then
echo "Response check failed, please check the logs in artifacts!"
exit 1
else
echo "Response check succeed!"
fi
echo "Checking response format, make sure the output format is acceptable for UI."
# TODO
}
function stop_docker() {
cd $WORKPATH/docker-composer/xeon
container_list=$(cat docker_compose.yaml | grep container_name | cut -d':' -f2)
for container_name in $container_list; do
cid=$(docker ps -aq --filter "name=$container_name")
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
done
}
function main() {
stop_docker
build_docker_images
start_services
validate_microservices
validate_megaservice
stop_docker
echo y | docker system prune
}
main