Files
Letong Han 9180f1066d Enable vllm for CodeTrans (#1626)
Set vllm as default llm serving, and add related docker compose files, readmes, and test scripts.

Issue: https://github.com/opea-project/GenAIExamples/issues/1436

Signed-off-by: letonghan <letong.han@intel.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
2025-03-07 10:56:21 +08:00

44 lines
1.1 KiB
YAML

# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
services:
codetrans:
build:
args:
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
no_proxy: ${no_proxy}
context: ../
dockerfile: ./Dockerfile
image: ${REGISTRY:-opea}/codetrans:${TAG:-latest}
codetrans-ui:
build:
context: ../ui
dockerfile: ./docker/Dockerfile
extends: codetrans
image: ${REGISTRY:-opea}/codetrans-ui:${TAG:-latest}
llm-textgen:
build:
context: GenAIComps
dockerfile: comps/llms/src/text-generation/Dockerfile
extends: codetrans
image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}
vllm:
build:
context: vllm
dockerfile: Dockerfile.cpu
extends: codetrans
image: ${REGISTRY:-opea}/vllm:${TAG:-latest}
vllm-gaudi:
build:
context: vllm-fork
dockerfile: Dockerfile.hpu
extends: codetrans
image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest}
nginx:
build:
context: GenAIComps
dockerfile: comps/third_parties/nginx/src/Dockerfile
extends: codetrans
image: ${REGISTRY:-opea}/nginx:${TAG:-latest}