Refactor Retrievers related Examples (#1387)

Delete redundant retrievers docker image in docker_images_list.md.
Refactor Retrievers related Examples READMEs.
Change all of the comps/retrievers/xxx/xxx/Dockerfile path into comps/retrievers/src/Dockerfile.

Fix the Examples CI issues of PR opea-project/GenAIComps#1138.
Signed-off-by: letonghan <letong.han@intel.com>
This commit is contained in:
Letong Han
2025-01-16 14:21:48 +08:00
committed by GitHub
parent 698a06edbf
commit 4cabd55778
61 changed files with 138 additions and 124 deletions

View File

@@ -21,7 +21,7 @@ export https_proxy="Your_HTTPs_Proxy"
### 1. Build Retriever Image
```bash
docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile .
docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile .
```
### 2. Build Dataprep Image
@@ -61,7 +61,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a
Then run the command `docker images`, you will have the following Docker Images:
1. `opea/dataprep-redis:latest`
2. `opea/retriever-redis:latest`
2. `opea/retriever:latest`
3. `opea/chatqna:latest`
4. `opea/chatqna-ui:latest`
5. `opea/nginx:latest`

View File

@@ -39,7 +39,7 @@ services:
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
retriever:
image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest}
image: ${REGISTRY:-opea}/retriever:${TAG:-latest}
container_name: retriever-redis-server
depends_on:
- redis-vector-db
@@ -55,6 +55,8 @@ services:
INDEX_NAME: ${INDEX_NAME}
TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
LOGFLAG: ${LOGFLAG}
RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS"
restart: unless-stopped
tei-reranking-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5

View File

@@ -105,7 +105,7 @@ cd GenAIComps
### 1. Build Retriever Image
```bash
docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile .
docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile .
```
### 2. Build Dataprep Image
@@ -167,7 +167,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a
Then run the command `docker images`, you will have the following 5 Docker Images:
1. `opea/dataprep-redis:latest`
2. `opea/retriever-redis:latest`
2. `opea/retriever:latest`
3. `opea/chatqna:latest` or `opea/chatqna-without-rerank:latest`
4. `opea/chatqna-ui:latest`
5. `opea/nginx:latest`

View File

@@ -108,7 +108,7 @@ cd GenAIComps
### 1. Build Retriever Image
```bash
docker build --no-cache -t opea/retriever-pinecone:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/pinecone/langchain/Dockerfile .
docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile .
```
### 2. Build Dataprep Image
@@ -170,7 +170,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a
Then run the command `docker images`, you will have the following 5 Docker Images:
1. `opea/dataprep-pinecone:latest`
2. `opea/retriever-pinecone:latest`
2. `opea/retriever:latest`
3. `opea/chatqna:latest` or `opea/chatqna-without-rerank:latest`
4. `opea/chatqna-ui:latest`
5. `opea/nginx:latest`
@@ -352,7 +352,7 @@ click [here](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comp
Or run this command to get the file on a terminal.
```bash
wget https://raw.githubusercontent.com/opea-project/GenAIComps/main/comps/retrievers/redis/data/nke-10k-2023.pdf
wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf
```

View File

@@ -73,7 +73,7 @@ cd GenAIComps
### 1. Build Retriever Image
```bash
docker build --no-cache -t opea/retriever-qdrant:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/qdrant/haystack/Dockerfile .
docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile .
```
### 2. Build Dataprep Image
@@ -128,7 +128,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a
Then run the command `docker images`, you will have the following 5 Docker Images:
1. `opea/dataprep-qdrant:latest`
2. `opea/retriever-qdrant:latest`
2. `opea/retriever:latest`
3. `opea/chatqna:latest`
4. `opea/chatqna-ui:latest`
5. `opea/nginx:latest`

View File

@@ -39,7 +39,7 @@ services:
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
retriever:
image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest}
image: ${REGISTRY:-opea}/retriever:${TAG:-latest}
container_name: retriever-redis-server
depends_on:
- redis-vector-db
@@ -55,6 +55,8 @@ services:
INDEX_NAME: ${INDEX_NAME}
TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
LOGFLAG: ${LOGFLAG}
RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS"
restart: unless-stopped
tei-reranking-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5

View File

@@ -12,8 +12,6 @@ services:
- tei-embedding-service
ports:
- "6007:6007"
- "6008:6008"
- "6009:6009"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
@@ -37,7 +35,7 @@ services:
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
retriever:
image: ${REGISTRY:-opea}/retriever-pinecone:${TAG:-latest}
image: ${REGISTRY:-opea}/retriever:${TAG:-latest}
container_name: retriever-pinecone-server
ports:
- "7000:7000"
@@ -51,6 +49,8 @@ services:
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
LOGFLAG: ${LOGFLAG}
RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_PINECONE"
restart: unless-stopped
tei-reranking-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5

View File

@@ -22,8 +22,8 @@ services:
https_proxy: ${https_proxy}
QDRANT_HOST: qdrant-vector-db
QDRANT_PORT: 6333
COLLECTION_NAME: ${INDEX_NAME}
TEI_ENDPOINT: http://tei-embedding-service:80
QDRANT_INDEX_NAME: ${INDEX_NAME}
TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
tei-embedding-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
@@ -39,7 +39,7 @@ services:
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
retriever:
image: ${REGISTRY:-opea}/retriever-qdrant:${TAG:-latest}
image: ${REGISTRY:-opea}/retriever:${TAG:-latest}
container_name: retriever-qdrant-server
depends_on:
- qdrant-vector-db
@@ -54,6 +54,8 @@ services:
QDRANT_PORT: 6333
INDEX_NAME: ${INDEX_NAME}
TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
LOGFLAG: ${LOGFLAG}
RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_QDRANT"
restart: unless-stopped
tei-reranking-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5

View File

@@ -39,7 +39,7 @@ services:
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
retriever:
image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest}
image: ${REGISTRY:-opea}/retriever:${TAG:-latest}
container_name: retriever-redis-server
depends_on:
- redis-vector-db
@@ -55,6 +55,8 @@ services:
INDEX_NAME: ${INDEX_NAME}
TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
LOGFLAG: ${LOGFLAG}
RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS"
restart: unless-stopped
tei-reranking-service:
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5

View File

@@ -39,7 +39,7 @@ services:
https_proxy: ${https_proxy}
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate
retriever:
image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest}
image: ${REGISTRY:-opea}/retriever:${TAG:-latest}
container_name: retriever-redis-server
depends_on:
- redis-vector-db
@@ -55,6 +55,8 @@ services:
INDEX_NAME: ${INDEX_NAME}
TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
LOGFLAG: ${LOGFLAG}
RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS"
restart: unless-stopped
tgi-service:
image: ghcr.io/huggingface/text-generation-inference:2.4.0-intel-cpu