# Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 services: whisper-service: image: ${REGISTRY:-opea}/whisper:${TAG:-latest} container_name: whisper-service ports: - ${WHISPER_SERVER_PORT:-7066}:7066 ipc: host environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} restart: unless-stopped speecht5-service: image: ${REGISTRY:-opea}/speecht5:${TAG:-latest} container_name: speecht5-service ports: - ${SPEECHT5_SERVER_PORT:-7055}:7055 ipc: host environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} restart: unless-stopped vllm-service: image: ${REGISTRY:-opea}/vllm:${TAG:-latest} container_name: vllm-service ports: - ${LLM_SERVER_PORT:-3006}:80 volumes: - "${MODEL_CACHE:-./data}:/root/.cache/huggingface/hub" shm_size: 128g environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" LLM_SERVER_PORT: ${LLM_SERVER_PORT} healthcheck: test: ["CMD-SHELL", "curl -f http://$host_ip:${LLM_SERVER_PORT}/health || exit 1"] interval: 10s timeout: 10s retries: 100 command: --model ${LLM_MODEL_ID} --host 0.0.0.0 --port 80 audioqna-xeon-backend-server: image: ${REGISTRY:-opea}/audioqna:${TAG:-latest} container_name: audioqna-xeon-backend-server depends_on: - whisper-service - vllm-service - speecht5-service ports: - "3008:8888" environment: - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} - WHISPER_SERVER_HOST_IP=${WHISPER_SERVER_HOST_IP} - WHISPER_SERVER_PORT=${WHISPER_SERVER_PORT} - LLM_SERVER_HOST_IP=${LLM_SERVER_HOST_IP} - LLM_SERVER_PORT=${LLM_SERVER_PORT} - LLM_MODEL_ID=${LLM_MODEL_ID} - SPEECHT5_SERVER_HOST_IP=${SPEECHT5_SERVER_HOST_IP} - SPEECHT5_SERVER_PORT=${SPEECHT5_SERVER_PORT} ipc: host restart: always audioqna-xeon-ui-server: image: ${REGISTRY:-opea}/audioqna-ui:${TAG:-latest} container_name: audioqna-xeon-ui-server depends_on: - audioqna-xeon-backend-server ports: - "5173:5173" environment: - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - CHAT_URL=${BACKEND_SERVICE_ENDPOINT} ipc: host restart: always networks: default: driver: bridge