Files
GenAIExamples/Translation/docker_compose/amd/gpu/rocm/set_env_vllm.sh
chyundunovDatamonsters 340fa075bd Adding files to deploy Translation application on ROCm vLLM (#1648)
Signed-off-by: Chingis Yundunov <YundunovCN@sibedge.com>
Signed-off-by: Artem Astafev <a.astafev@datamonsters.com>
2025-03-31 13:49:33 +08:00

24 lines
972 B
Bash

#!/usr/bin/env bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# SPDX-License-Identifier: Apache-2.0
export HOST_IP=''
export EXTERNAL_HOST_IP=''
export TRANSLATION_LLM_MODEL_ID="haoranxu/ALMA-13B"
export TRANSLATION_VLLM_SERVICE_PORT=8088
export TRANSLATION_LLM_ENDPOINT="http://${HOST_IP}:${TRANSLATION_VLLM_SERVICE_PORT}"
export TRANSLATION_LLM_PORT=9088
export TRANSLATION_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export TRANSLATION_MEGA_SERVICE_HOST_IP=${HOST_IP}
export TRANSLATION_LLM_SERVICE_HOST_IP=${HOST_IP}
export TRANSLATION_FRONTEND_SERVICE_IP=${HOST_IP}
export TRANSLATION_FRONTEND_SERVICE_PORT=18122
export TRANSLATION_BACKEND_SERVICE_NAME=translation
export TRANSLATION_BACKEND_SERVICE_IP=${HOST_IP}
export TRANSLATION_BACKEND_SERVICE_PORT=18121
export TRANSLATION_BACKEND_SERVICE_ENDPOINT="http://${EXTERNAL_HOST_IP}:${TRANSLATION_BACKEND_SERVICE_PORT}/v1/translation"
export TRANSLATION_NGINX_PORT=18123