Files
GenAIExamples/CodeTrans/docker/gaudi/docker_compose.yaml
chen, suyue 3505bd25a4 update tgi-gaudi version (#389)
Signed-off-by: chensuyue <suyue.chen@intel.com>
Co-authored-by: Spycsh <sihan.chen@intel.com>
2024-07-12 15:01:03 +08:00

77 lines
2.0 KiB
YAML

# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
version: "3.8"
services:
tgi_service:
image: ghcr.io/huggingface/tgi-gaudi:2.0.1
container_name: codetrans-tgi-service
ports:
- "8008:80"
volumes:
- "./data:/data"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HABANA_VISIBLE_DEVICES: all
OMPI_MCA_btl_vader_single_copy_mechanism: none
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
runtime: habana
cap_add:
- SYS_NICE
ipc: host
command: --model-id ${LLM_MODEL_ID} --max-input-length 1024 --max-total-tokens 2048
llm:
image: opea/llm-tgi:latest
container_name: llm-tgi-gaudi-server
ports:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
LANGCHAIN_PROJECT: "opea-llm-service"
restart: unless-stopped
codetrans-gaudi-backend-server:
image: opea/codetrans:latest
container_name: codetrans-gaudi-backend-server
depends_on:
- tgi_service
- llm
ports:
- "7777:7777"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
- LLM_SERVICE_HOST_IP=${LLM_SERVICE_HOST_IP}
ipc: host
restart: always
codetrans-gaudi-ui-server:
image: opea/codetrans-ui:latest
container_name: codetrans-gaudi-ui-server
depends_on:
- codetrans-gaudi-backend-server
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- BASE_URL=${BACKEND_SERVICE_ENDPOINT}
ipc: host
restart: always
networks:
default:
driver: bridge