Check duplicated dockerfile (#1289)

Signed-off-by: ZePan110 <ze.pan@intel.com>
This commit is contained in:
ZePan110
2025-01-06 17:30:12 +08:00
committed by GitHub
parent b88d09e23f
commit aa5c91d7ee
77 changed files with 195 additions and 198 deletions

View File

@@ -27,7 +27,7 @@ docker build --no-cache -t opea/reranking-tei:latest --build-arg https_proxy=$ht
### 4. Build LLM Image
```bash
docker build --no-cache -t opea/llm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/text-generation/Dockerfile .
docker build --no-cache -t opea/llm-textgen:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/text-generation/Dockerfile .
```
### 5. Build MegaService Docker Image
@@ -54,7 +54,7 @@ Then run the command `docker images`, you will have following images ready:
1. `opea/embedding-tei:latest`
2. `opea/web-retriever-chroma:latest`
3. `opea/reranking-tei:latest`
4. `opea/llm-tgi:latest`
4. `opea/llm-textgen:latest`
5. `opea/searchqna:latest`
6. `opea/searchqna-ui:latest`

View File

@@ -111,8 +111,8 @@ services:
retries: 100
command: --model-id ${LLM_MODEL_ID} --cuda-graphs 0
llm:
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
container_name: llm-tgi-server
image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}
container_name: llm-textgen-server
depends_on:
tgi-service:
condition: service_healthy

View File

@@ -29,7 +29,7 @@ docker build --no-cache -t opea/reranking-tei:latest --build-arg https_proxy=$ht
### 4. Build LLM Image
```bash
docker build --no-cache -t opea/llm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/text-generation/Dockerfile .
docker build --no-cache -t opea/llm-textgen:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/text-generation/Dockerfile .
```
### 5. Build MegaService Docker Image
@@ -54,7 +54,7 @@ Then run the command `docker images`, you will have
1. `opea/embedding-tei:latest`
2. `opea/web-retriever-chroma:latest`
3. `opea/reranking-tei:latest`
4. `opea/llm-tgi:latest`
4. `opea/llm-textgen:latest`
5. `opea/searchqna:latest`
## 🚀 Set the environment variables

View File

@@ -128,8 +128,8 @@ services:
ipc: host
command: --model-id ${LLM_MODEL_ID} --max-input-length 2048 --max-total-tokens 4096
llm:
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
container_name: llm-tgi-gaudi-server
image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}
container_name: llm-textgen-gaudi-server
depends_on:
tgi-service:
condition: service_healthy