From 3ca78867eb5d35b4b410ee13a6de2a26ce0a2c95 Mon Sep 17 00:00:00 2001 From: Liang Lv Date: Fri, 10 Jan 2025 15:36:58 +0800 Subject: [PATCH] Update example code for embedding dependency moving to 3rd_party (#1368) Signed-off-by: lvliang-intel --- .../docker_compose/intel/cpu/xeon/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- AvatarChatbot/docker_image_build/build.yaml | 4 +- ChatQnA/docker_compose/amd/gpu/rocm/README.md | 2 +- .../docker_compose/intel/cpu/aipc/README.md | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 2 +- .../intel/cpu/xeon/README_pinecone.md | 2 +- .../intel/cpu/xeon/README_qdrant.md | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- ChatQnA/docker_compose/nvidia/gpu/README.md | 2 +- ChatQnA/docker_image_build/build.yaml | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- CodeTrans/docker_image_build/build.yaml | 2 +- GraphRAG/docker_image_build/build.yaml | 2 +- .../docker_compose/amd/gpu/rocm/README.md | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- MultimodalQnA/docker_image_build/build.yaml | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- Translation/docker_image_build/build.yaml | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 2 +- VideoQnA/docker_image_build/build.yaml | 2 +- .../docker_compose/amd/gpu/rocm/README.md | 2 +- .../docker_compose/intel/cpu/xeon/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- VisualQnA/docker_image_build/build.yaml | 2 +- docker_images_list.md | 118 +++++++++--------- 29 files changed, 88 insertions(+), 88 deletions(-) diff --git a/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md b/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md index c4b9122a7..32b8c19ae 100644 --- a/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md +++ b/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md @@ -30,7 +30,7 @@ docker build -t opea/speecht5:latest --build-arg https_proxy=$https_proxy --buil ### 5. Build Animation Image ```bash -docker build -t opea/wav2lip:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/wav2lip/src/Dockerfile . +docker build -t opea/wav2lip:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/wav2lip/src/Dockerfile . docker build -t opea/animation:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/animation/src/Dockerfile . ``` diff --git a/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md b/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md index 8c421e1c4..68ae44fb2 100644 --- a/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md +++ b/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md @@ -30,7 +30,7 @@ docker build -t opea/speecht5-gaudi:latest --build-arg https_proxy=$https_proxy ### 5. Build Animation Image ```bash -docker build -t opea/wav2lip-gaudi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/wav2lip/src/Dockerfile.intel_hpu . +docker build -t opea/wav2lip-gaudi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/wav2lip/src/Dockerfile.intel_hpu . docker build -t opea/animation:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/animation/src/Dockerfile . ``` diff --git a/AvatarChatbot/docker_image_build/build.yaml b/AvatarChatbot/docker_image_build/build.yaml index 677b6f86c..3b47269d3 100644 --- a/AvatarChatbot/docker_image_build/build.yaml +++ b/AvatarChatbot/docker_image_build/build.yaml @@ -56,13 +56,13 @@ services: wav2lip-gaudi: build: context: GenAIComps - dockerfile: comps/3rd_parties/wav2lip/src/Dockerfile.intel_hpu + dockerfile: comps/third_parties/wav2lip/src/Dockerfile.intel_hpu extends: avatarchatbot image: ${REGISTRY:-opea}/wav2lip-gaudi:${TAG:-latest} wav2lip: build: context: GenAIComps - dockerfile: comps/3rd_parties/wav2lip/src/Dockerfile + dockerfile: comps/third_parties/wav2lip/src/Dockerfile extends: avatarchatbot image: ${REGISTRY:-opea}/wav2lip:${TAG:-latest} animation: diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/README.md b/ChatQnA/docker_compose/amd/gpu/rocm/README.md index bde630795..400cf325d 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/README.md +++ b/ChatQnA/docker_compose/amd/gpu/rocm/README.md @@ -138,7 +138,7 @@ cd ../../../.. ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 5 Docker Images: diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/README.md b/ChatQnA/docker_compose/intel/cpu/aipc/README.md index cb167337d..860629fa4 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/README.md +++ b/ChatQnA/docker_compose/intel/cpu/aipc/README.md @@ -55,7 +55,7 @@ docker build --no-cache -t opea/chatqna-ui:latest --build-arg https_proxy=$https ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 6 Docker Images: diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README.md b/ChatQnA/docker_compose/intel/cpu/xeon/README.md index 349807e67..91aa86789 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README.md @@ -161,7 +161,7 @@ docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 5 Docker Images: diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md index 07a94db5b..cd1737d42 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md @@ -164,7 +164,7 @@ docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 5 Docker Images: diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md index 3426353d9..7cb4241ee 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md @@ -122,7 +122,7 @@ cd ../../../.. ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 5 Docker Images: diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md index a0b0df5b2..5276321e6 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md @@ -151,7 +151,7 @@ docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 5 Docker Images: diff --git a/ChatQnA/docker_compose/nvidia/gpu/README.md b/ChatQnA/docker_compose/nvidia/gpu/README.md index 2129766bf..4b21130f1 100644 --- a/ChatQnA/docker_compose/nvidia/gpu/README.md +++ b/ChatQnA/docker_compose/nvidia/gpu/README.md @@ -148,7 +148,7 @@ cd ../../.. ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following 5 Docker Images: diff --git a/ChatQnA/docker_image_build/build.yaml b/ChatQnA/docker_image_build/build.yaml index a93a4b6a5..ac85d0ab0 100644 --- a/ChatQnA/docker_image_build/build.yaml +++ b/ChatQnA/docker_image_build/build.yaml @@ -116,6 +116,6 @@ services: nginx: build: context: GenAIComps - dockerfile: comps/3rd_parties/nginx/src/Dockerfile + dockerfile: comps/third_parties/nginx/src/Dockerfile extends: chatqna image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/README.md b/CodeTrans/docker_compose/intel/cpu/xeon/README.md index 3c878adaa..b5aebe869 100755 --- a/CodeTrans/docker_compose/intel/cpu/xeon/README.md +++ b/CodeTrans/docker_compose/intel/cpu/xeon/README.md @@ -41,7 +41,7 @@ docker build -t opea/codetrans-ui:latest --build-arg https_proxy=$https_proxy -- ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following Docker Images: diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md index 668af444e..00241d6ac 100755 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md @@ -33,7 +33,7 @@ docker build -t opea/codetrans-ui:latest --build-arg https_proxy=$https_proxy -- ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following Docker Images: diff --git a/CodeTrans/docker_image_build/build.yaml b/CodeTrans/docker_image_build/build.yaml index d9a565fcd..bfc007061 100644 --- a/CodeTrans/docker_image_build/build.yaml +++ b/CodeTrans/docker_image_build/build.yaml @@ -26,6 +26,6 @@ services: nginx: build: context: GenAIComps - dockerfile: comps/3rd_parties/nginx/src/Dockerfile + dockerfile: comps/third_parties/nginx/src/Dockerfile extends: codetrans image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/GraphRAG/docker_image_build/build.yaml b/GraphRAG/docker_image_build/build.yaml index d0363ffc9..0be2bcb52 100644 --- a/GraphRAG/docker_image_build/build.yaml +++ b/GraphRAG/docker_image_build/build.yaml @@ -36,7 +36,7 @@ services: https_proxy: ${https_proxy} no_proxy: ${no_proxy} context: GenAIComps - dockerfile: comps/3rd_parties/nginx/src/Dockerfile + dockerfile: comps/third_parties/nginx/src/Dockerfile image: ${REGISTRY:-opea}/nginx:${TAG:-latest} graphrag-ui: build: diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md b/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md index 7fd71fa11..bf5b7e494 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md @@ -25,7 +25,7 @@ Build embedding-multimodal-bridgetower docker image ```bash git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps -docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile . +docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/bridgetower/src/Dockerfile . ``` Build embedding microservice image diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md index 25ace0196..2306f0499 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md @@ -112,7 +112,7 @@ Build embedding-multimodal-bridgetower docker image ```bash git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps -docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile . +docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/bridgetower/src/Dockerfile . ``` Build embedding microservice image diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md b/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md index 2262f34e9..a8fc93bb3 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md @@ -63,7 +63,7 @@ Build embedding-multimodal-bridgetower docker image ```bash git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps -docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile . +docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/bridgetower/src/Dockerfile . ``` Build embedding microservice image diff --git a/MultimodalQnA/docker_image_build/build.yaml b/MultimodalQnA/docker_image_build/build.yaml index 192ee933a..7a02452d2 100644 --- a/MultimodalQnA/docker_image_build/build.yaml +++ b/MultimodalQnA/docker_image_build/build.yaml @@ -20,7 +20,7 @@ services: embedding-multimodal-bridgetower: build: context: GenAIComps - dockerfile: comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile + dockerfile: comps/third_parties/bridgetower/src/Dockerfile extends: multimodalqna image: ${REGISTRY:-opea}/embedding-multimodal-bridgetower:${TAG:-latest} embedding: diff --git a/Translation/docker_compose/intel/cpu/xeon/README.md b/Translation/docker_compose/intel/cpu/xeon/README.md index f1f109a17..4a41cb538 100644 --- a/Translation/docker_compose/intel/cpu/xeon/README.md +++ b/Translation/docker_compose/intel/cpu/xeon/README.md @@ -60,7 +60,7 @@ docker build -t opea/translation-ui:latest --build-arg https_proxy=$https_proxy ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following Docker Images: diff --git a/Translation/docker_compose/intel/hpu/gaudi/README.md b/Translation/docker_compose/intel/hpu/gaudi/README.md index 432d42a81..31ed7da04 100644 --- a/Translation/docker_compose/intel/hpu/gaudi/README.md +++ b/Translation/docker_compose/intel/hpu/gaudi/README.md @@ -52,7 +52,7 @@ docker build -t opea/translation-ui:latest --build-arg https_proxy=$https_proxy ```bash cd GenAIComps -docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` Then run the command `docker images`, you will have the following four Docker Images: diff --git a/Translation/docker_image_build/build.yaml b/Translation/docker_image_build/build.yaml index faf758555..1dad29cb7 100644 --- a/Translation/docker_image_build/build.yaml +++ b/Translation/docker_image_build/build.yaml @@ -26,6 +26,6 @@ services: nginx: build: context: GenAIComps - dockerfile: comps/3rd_parties/nginx/src/Dockerfile + dockerfile: comps/third_parties/nginx/src/Dockerfile extends: translation image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/VideoQnA/docker_compose/intel/cpu/xeon/README.md b/VideoQnA/docker_compose/intel/cpu/xeon/README.md index 2d8a7a967..06bffd797 100644 --- a/VideoQnA/docker_compose/intel/cpu/xeon/README.md +++ b/VideoQnA/docker_compose/intel/cpu/xeon/README.md @@ -53,7 +53,7 @@ First of all, you need to build Docker Images locally and install the python pac ```bash git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps -docker build -t opea/embedding-multimodal-clip:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/integrations/dependency/clip/Dockerfile . +docker build -t opea/embedding-multimodal-clip:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/clip/src/Dockerfile . ``` ### 2. Build Retriever Image diff --git a/VideoQnA/docker_image_build/build.yaml b/VideoQnA/docker_image_build/build.yaml index 5305c450c..45f0dd92a 100644 --- a/VideoQnA/docker_image_build/build.yaml +++ b/VideoQnA/docker_image_build/build.yaml @@ -26,7 +26,7 @@ services: embedding-multimodal-clip: build: context: GenAIComps - dockerfile: comps/embeddings/src/integrations/dependency/clip/Dockerfile + dockerfile: comps/third_parties/clip/src/Dockerfile extends: videoqna image: ${REGISTRY:-opea}/embedding-multimodal-clip:${TAG:-latest} retriever-vdms: diff --git a/VisualQnA/docker_compose/amd/gpu/rocm/README.md b/VisualQnA/docker_compose/amd/gpu/rocm/README.md index 9039d5326..7e66471e2 100644 --- a/VisualQnA/docker_compose/amd/gpu/rocm/README.md +++ b/VisualQnA/docker_compose/amd/gpu/rocm/README.md @@ -12,7 +12,7 @@ First of all, you need to build Docker Images locally and install the python pac git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/tgi-llava/Dockerfile . -docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` ### 2. Build MegaService Docker Image diff --git a/VisualQnA/docker_compose/intel/cpu/xeon/README.md b/VisualQnA/docker_compose/intel/cpu/xeon/README.md index 74e37860c..62af68fcb 100644 --- a/VisualQnA/docker_compose/intel/cpu/xeon/README.md +++ b/VisualQnA/docker_compose/intel/cpu/xeon/README.md @@ -42,7 +42,7 @@ First of all, you need to build Docker Images locally and install the python pac git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/tgi-llava/Dockerfile . -docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` ### 2. Build MegaService Docker Image diff --git a/VisualQnA/docker_compose/intel/hpu/gaudi/README.md b/VisualQnA/docker_compose/intel/hpu/gaudi/README.md index 86fa1161d..5b4d6313e 100644 --- a/VisualQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/VisualQnA/docker_compose/intel/hpu/gaudi/README.md @@ -12,7 +12,7 @@ First of all, you need to build Docker Images locally. This step can be ignored git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/tgi-llava/Dockerfile . -docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/3rd_parties/nginx/src/Dockerfile . +docker build --no-cache -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` ### 2. Pull TGI Gaudi Image diff --git a/VisualQnA/docker_image_build/build.yaml b/VisualQnA/docker_image_build/build.yaml index f6b5e9e2c..9aacda7f5 100644 --- a/VisualQnA/docker_image_build/build.yaml +++ b/VisualQnA/docker_image_build/build.yaml @@ -26,6 +26,6 @@ services: nginx: build: context: GenAIComps - dockerfile: comps/3rd_parties/nginx/src/Dockerfile + dockerfile: comps/third_parties/nginx/src/Dockerfile extends: visualqna image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/docker_images_list.md b/docker_images_list.md index 27e5a4bf3..216ddf8fa 100644 --- a/docker_images_list.md +++ b/docker_images_list.md @@ -40,62 +40,62 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the ## Microservice images -| Microservice Images | Dockerfile | Description | -| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [opea/agent]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/agent/src/Dockerfile) | The docker image exposed the OPEA agent microservice for GenAI application use | -| [opea/asr](https://hub.docker.com/r/opea/asr) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/Dockerfile) | The docker image exposed the OPEA Audio-Speech-Recognition microservice for GenAI application use | -| [opea/chathistory-mongo-server](https://hub.docker.com/r/opea/chathistory-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/chathistory/mongo/Dockerfile) | The docker image exposes OPEA Chat History microservice which based on MongoDB database, designed to allow user to store, retrieve and manage chat conversations | -| [opea/dataprep-milvus](https://hub.docker.com/r/opea/dataprep-milvus) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/milvus/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on milvus vectordb for GenAI application use | -| [opea/dataprep-multimodal-vdms](https://hub.docker.com/r/opea/dataprep-multimodal-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/vdms/multimodal_langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on a multi-modal VDMS for use by GenAI applications. | -| [opea/dataprep-multimodal-redis](https://hub.docker.com/r/opea/dataprep-multimodal-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on a multi-modal redis for use by GenAI applications. | -| [opea/dataprep-on-ray-redis](https://hub.docker.com/r/opea/dataprep-on-ray-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain_ray/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on redis vectordb and optimized ray for GenAI application use | -| [opea/dataprep-pgvector](https://hub.docker.com/r/opea/dataprep-pgvector) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/pgvector/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on pgvector vectordb for GenAI application use | -| [opea/dataprep-pinecone](https://hub.docker.com/r/opea/dataprep-pinecone) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/pinecone/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on pincone vectordb for GenAI application use | -| [opea/dataprep-qdrant](https://hub.docker.com/r/opea/dataprep-qdrant) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/qdrant/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on qdrant vectordb for GenAI application use | -| [opea/dataprep-redis](https://hub.docker.com/r/opea/dataprep-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on redis vectordb Langchain framework for GenAI application use | -| [opea/dataprep-redis-llama-index](https://hub.docker.com/r/opea/dataprep-redis-llama-index) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/llama_index/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on redis vectordb LlamaIndex framework for GenAI application use | -| [opea/dataprep-vdms](https://hub.docker.com/r/opea/dataprep-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/vdms/langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on VDMS vectordb for use by GenAI applications. | -| [opea/embedding-langchain-mosec](https://hub.docker.com/r/opea/embedding-langchain-mosec) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/3rd_parties/nginx/src/Dockerfile) | The docker image exposed the OPEA mosec embedding microservice base on Langchain framework for GenAI application use | -| [opea/embedding-multimodal-clip](https://hub.docker.com/r/opea/embedding-multimodal-clip) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/integrations/dependency/clip/Dockerfile) | The docker image exposes OPEA multimodal CLIP-based embedded microservices for use by GenAI applications | -| [opea/embedding](https://hub.docker.com/r/opea/embedding) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/Dockerfile) | The docker image exposes OPEA multimodal embedded microservices for use by GenAI applications | -| [opea/embedding-multimodal-bridgetower](https://hub.docker.com/r/opea/embedding-multimodal-bridgetower) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile) | The docker image exposes OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications | -| [opea/embedding-multimodal-bridgetower-gaudi](https://hub.docker.com/r/opea/embedding-multimodal-bridgetower-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile.intel_hpu) | The docker image exposes OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications on the Gaudi | -| [opea/feedbackmanagement](https://hub.docker.com/r/opea/feedbackmanagement) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/feedback_management/src/Dockerfile) | The docker image exposes that the OPEA feedback management microservice uses a MongoDB database for GenAI applications. | -| [opea/finetuning](https://hub.docker.com/r/opea/finetuning) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile) | The docker image exposed the OPEA Fine-tuning microservice for GenAI application use | -| [opea/finetuning-gaudi](https://hub.docker.com/r/opea/finetuning-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile.intel_hpu) | The docker image exposed the OPEA Fine-tuning microservice for GenAI application use on the Gaudi | -| [opea/gmcrouter](https://hub.docker.com/r/opea/gmcrouter) | [Link](https://github.com/opea-project/GenAIInfra/blob/main/microservices-connector/Dockerfile.manager) | The docker image served as one of key parts of the OPEA GenAI Microservice Connector(GMC) to route the traffic among the microservices defined in GMC | -| [opea/gmcmanager](https://hub.docker.com/r/opea/gmcmanager) | [Link](https://github.com/opea-project/GenAIInfra/blob/main/microservices-connector/Dockerfile.router) | The docker image served as one of key parts of the OPEA GenAI Microservice Connector(GMC) to be controller manager to handle GMC CRD | -| [opea/guardrails]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/guardrails/Dockerfile) | The docker image exposed the OPEA guardrail microservice to provide content review for GenAI application use | -| [opea/guardrails-toxicity-detection](https://hub.docker.com/r/opea/guardrails-toxicity-detection) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/toxicity_detection/Dockerfile) | The docker image exposed the OPEA guardrail microservice to provide toxicity detection for GenAI application use | -| [opea/guardrails-pii-detection](https://hub.docker.com/r/opea/guardrails-pii-detection) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/pii_detection/Dockerfile) | The docker image exposed the OPEA guardrail microservice to provide PII detection for GenAI application use | -| [opea/llm-docsum-tgi](https://hub.docker.com/r/opea/llm-docsum-tgi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/summarization/tgi/langchain/Dockerfile) | This docker image is designed to build a document summarization microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a document summary. | -| [opea/llm-faqgen-tgi](https://hub.docker.com/r/opea/llm-faqgen-tgi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/faq-generation/tgi/langchain/Dockerfile) | This docker image is designed to build a frequently asked questions microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a FAQ. | -| [opea/llm-textgen](https://hub.docker.com/r/opea/llm-textgen) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/Dockerfile) | The docker image exposed the OPEA LLM microservice upon TGI docker image for GenAI application use | -| [opea/llava-gaudi](https://hub.docker.com/r/opea/llava-hpu) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/llava/dependency/Dockerfile.intel_hpu) | The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) service for GenAI application use on the Gaudi | -| [opea/lvm-tgi](https://hub.docker.com/r/opea/lvm-tgi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/tgi-llava/Dockerfile) | This docker image is designed to build a large visual model (LVM) microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a answer to question. | -| [opea/lvm-llava](https://hub.docker.com/r/opea/lvm-llava) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/llava/dependency/Dockerfile) | The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) server for GenAI application use | -| [opea/lvm-llava-svc](https://hub.docker.com/r/opea/lvm-llava-svc) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/llava/Dockerfile) | The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) service for GenAI application use | -| [opea/lvm-video-llama](https://hub.docker.com/r/opea/lvm-video-llama) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/video-llama/Dockerfile) | The docker image exposed the OPEA microservice running Video-Llama as a large visual model (LVM) for GenAI application use | -| [opea/nginx](https://hub.docker.com/r/opea/nginx) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/3rd_parties/nginx/src/Dockerfile) | The docker image exposed the OPEA nginx microservice for GenAI application use | -| [opea/promptregistry-mongo-server](https://hub.docker.com/r/opea/promptregistry-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/prompt_registry/src/Dockerfile) | The docker image exposes the OPEA Prompt Registry microservices which based on MongoDB database, designed to store and retrieve user's preferred prompts | -| [opea/reranking]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/rerankings/src/Dockerfile) | The docker image exposed the OPEA reranking microservice based on tei docker image for GenAI application use | -| [opea/retriever-milvus](https://hub.docker.com/r/opea/retriever-milvus) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/milvus/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on milvus vectordb for GenAI application use | -| [opea/retriever-pathway](https://hub.docker.com/r/opea/retriever-pathway) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pathway/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice with pathway for GenAI application use | -| [opea/retriever-pgvector](https://hub.docker.com/r/opea/retriever-pgvector) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pgvector/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on pgvector vectordb for GenAI application use | -| [opea/retriever-pinecone](https://hub.docker.com/r/opea/retriever-pinecone) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pinecone/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on pinecone vectordb for GenAI application use | -| [opea/retriever-qdrant](https://hub.docker.com/r/opea/retriever-qdrant) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/qdrant/haystack/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on qdrant vectordb for GenAI application use | -| [opea/retriever-redis](https://hub.docker.com/r/opea/retriever-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/redis/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on redis vectordb for GenAI application use | -| [opea/retriever-redis-llamaindex](https://hub.docker.com/r/opea/retriever-redis-llamaindex) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/redis/llama_index/Dockerfile) | The docker image exposed the OPEA retriever service based on LlamaIndex for GenAI application use | -| [opea/retriever-vdms](https://hub.docker.com/r/opea/retriever-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/vdms/langchain/Dockerfile) | The docker image exposed the OPEA retriever service based on Visual Data Management System for GenAI application use | -| [opea/speecht5](https://hub.docker.com/r/opea/speecht5) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/integrations/dependency/speecht5/Dockerfile) | The docker image exposed the OPEA SpeechT5 service for GenAI application use | -| [opea/speecht5-gaudi](https://hub.docker.com/r/opea/speecht5-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/integrations/dependency/speecht5/Dockerfile.intel_hpu) | The docker image exposed the OPEA SpeechT5 service on Gaudi2 for GenAI application use | -| [opea/tei-gaudi](https://hub.docker.com/r/opea/tei-gaudi/tags) | [Link](https://github.com/huggingface/tei-gaudi/blob/habana-main/Dockerfile-hpu) | The docker image powered by HuggingFace Text Embedding Inference (TEI) on Gaudi2 for deploying and serving Embedding Models | -| [opea/vectorstore-pathway](https://hub.docker.com/r/opea/vectorstore-pathway) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/vectorstores/pathway/Dockerfile) | The docker image exposed the OPEA Vectorstores microservice with Pathway for GenAI application use | -| [opea/video-llama-lvm-server](https://hub.docker.com/r/opea/video-llama-lvm-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/video-llama/dependency/Dockerfile) | The docker image exposed the OPEA microservice running Video-Llama as a large visual model (LVM) server for GenAI application use | -| [opea/tts](https://hub.docker.com/r/opea/tts) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/Dockerfile) | The docker image exposed the OPEA Text-To-Speech microservice for GenAI application use | -| [opea/vllm](https://hub.docker.com/r/opea/vllm) | [Link](https://github.com/vllm-project/vllm/blob/main/Dockerfile.cpu) | The docker image powered by vllm-project for deploying and serving vllm Models | -| [opea/vllm-gaudi]() | [Link](https://github.com/HabanaAI/vllm-fork/blob/habana_main/Dockerfile.hpu) | The docker image powered by vllm-fork for deploying and serving vllm-gaudi Models | -| [opea/vllm-openvino](https://hub.docker.com/r/opea/vllm-openvino) | [Link](https://github.com/vllm-project/vllm/blob/main/Dockerfile.openvino) | The docker image powered by vllm-project for deploying and serving vllm Models of the Openvino Framework | -| [opea/web-retriever]() | [Link](https://github.com/opea-project/GenAIComps/tree/main/comps/web_retrievers/src/Dockerfile) | The docker image exposed the OPEA web retrieval microservice based on a search engine and vector DB | -| [opea/whisper](https://hub.docker.com/r/opea/whisper) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/integrations/dependency/whisper/Dockerfile) | The docker image exposed the OPEA Whisper service for GenAI application use | -| [opea/whisper-gaudi](https://hub.docker.com/r/opea/whisper-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/integrations/dependency/whisper/Dockerfile.intel_hpu) | The docker image exposed the OPEA Whisper service on Gaudi2 for GenAI application use | +| Microservice Images | Dockerfile | Description | +| ------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| [opea/agent]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/agent/src/Dockerfile) | The docker image exposed the OPEA agent microservice for GenAI application use | +| [opea/asr](https://hub.docker.com/r/opea/asr) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/Dockerfile) | The docker image exposed the OPEA Audio-Speech-Recognition microservice for GenAI application use | +| [opea/chathistory-mongo-server](https://hub.docker.com/r/opea/chathistory-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/chathistory/mongo/Dockerfile) | The docker image exposes OPEA Chat History microservice which based on MongoDB database, designed to allow user to store, retrieve and manage chat conversations | +| [opea/dataprep-milvus](https://hub.docker.com/r/opea/dataprep-milvus) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/milvus/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on milvus vectordb for GenAI application use | +| [opea/dataprep-multimodal-vdms](https://hub.docker.com/r/opea/dataprep-multimodal-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/vdms/multimodal_langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on a multi-modal VDMS for use by GenAI applications. | +| [opea/dataprep-multimodal-redis](https://hub.docker.com/r/opea/dataprep-multimodal-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on a multi-modal redis for use by GenAI applications. | +| [opea/dataprep-on-ray-redis](https://hub.docker.com/r/opea/dataprep-on-ray-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain_ray/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on redis vectordb and optimized ray for GenAI application use | +| [opea/dataprep-pgvector](https://hub.docker.com/r/opea/dataprep-pgvector) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/pgvector/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on pgvector vectordb for GenAI application use | +| [opea/dataprep-pinecone](https://hub.docker.com/r/opea/dataprep-pinecone) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/pinecone/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on pincone vectordb for GenAI application use | +| [opea/dataprep-qdrant](https://hub.docker.com/r/opea/dataprep-qdrant) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/qdrant/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on qdrant vectordb for GenAI application use | +| [opea/dataprep-redis](https://hub.docker.com/r/opea/dataprep-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on redis vectordb Langchain framework for GenAI application use | +| [opea/dataprep-redis-llama-index](https://hub.docker.com/r/opea/dataprep-redis-llama-index) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/llama_index/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on redis vectordb LlamaIndex framework for GenAI application use | +| [opea/dataprep-vdms](https://hub.docker.com/r/opea/dataprep-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/vdms/langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on VDMS vectordb for use by GenAI applications. | +| [opea/embedding-langchain-mosec](https://hub.docker.com/r/opea/embedding-langchain-mosec) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/nginx/src/Dockerfile) | The docker image exposed the OPEA mosec embedding microservice base on Langchain framework for GenAI application use | +| [opea/embedding-multimodal-clip](https://hub.docker.com/r/opea/embedding-multimodal-clip) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/clip/src/Dockerfile) | The docker image exposes OPEA multimodal CLIP-based embedded microservices for use by GenAI applications | +| [opea/embedding](https://hub.docker.com/r/opea/embedding) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/Dockerfile) | The docker image exposes OPEA multimodal embedded microservices for use by GenAI applications | +| [opea/embedding-multimodal-bridgetower](https://hub.docker.com/r/opea/embedding-multimodal-bridgetower) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/bridgetower/src/Dockerfile) | The docker image exposes OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications | +| [opea/embedding-multimodal-bridgetower-gaudi](https://hub.docker.com/r/opea/embedding-multimodal-bridgetower-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/bridgetower/src/Dockerfile.intel_hpu) | The docker image exposes OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications on the Gaudi | +| [opea/feedbackmanagement](https://hub.docker.com/r/opea/feedbackmanagement) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/feedback_management/src/Dockerfile) | The docker image exposes that the OPEA feedback management microservice uses a MongoDB database for GenAI applications. | +| [opea/finetuning](https://hub.docker.com/r/opea/finetuning) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile) | The docker image exposed the OPEA Fine-tuning microservice for GenAI application use | +| [opea/finetuning-gaudi](https://hub.docker.com/r/opea/finetuning-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile.intel_hpu) | The docker image exposed the OPEA Fine-tuning microservice for GenAI application use on the Gaudi | +| [opea/gmcrouter](https://hub.docker.com/r/opea/gmcrouter) | [Link](https://github.com/opea-project/GenAIInfra/blob/main/microservices-connector/Dockerfile.manager) | The docker image served as one of key parts of the OPEA GenAI Microservice Connector(GMC) to route the traffic among the microservices defined in GMC | +| [opea/gmcmanager](https://hub.docker.com/r/opea/gmcmanager) | [Link](https://github.com/opea-project/GenAIInfra/blob/main/microservices-connector/Dockerfile.router) | The docker image served as one of key parts of the OPEA GenAI Microservice Connector(GMC) to be controller manager to handle GMC CRD | +| [opea/guardrails]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/guardrails/Dockerfile) | The docker image exposed the OPEA guardrail microservice to provide content review for GenAI application use | +| [opea/guardrails-toxicity-detection](https://hub.docker.com/r/opea/guardrails-toxicity-detection) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/toxicity_detection/Dockerfile) | The docker image exposed the OPEA guardrail microservice to provide toxicity detection for GenAI application use | +| [opea/guardrails-pii-detection](https://hub.docker.com/r/opea/guardrails-pii-detection) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/pii_detection/Dockerfile) | The docker image exposed the OPEA guardrail microservice to provide PII detection for GenAI application use | +| [opea/llm-docsum-tgi](https://hub.docker.com/r/opea/llm-docsum-tgi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/summarization/tgi/langchain/Dockerfile) | This docker image is designed to build a document summarization microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a document summary. | +| [opea/llm-faqgen-tgi](https://hub.docker.com/r/opea/llm-faqgen-tgi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/faq-generation/tgi/langchain/Dockerfile) | This docker image is designed to build a frequently asked questions microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a FAQ. | +| [opea/llm-textgen](https://hub.docker.com/r/opea/llm-textgen) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/Dockerfile) | The docker image exposed the OPEA LLM microservice upon TGI docker image for GenAI application use | +| [opea/llava-gaudi](https://hub.docker.com/r/opea/llava-hpu) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/llava/dependency/Dockerfile.intel_hpu) | The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) service for GenAI application use on the Gaudi | +| [opea/lvm-tgi](https://hub.docker.com/r/opea/lvm-tgi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/tgi-llava/Dockerfile) | This docker image is designed to build a large visual model (LVM) microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a answer to question. | +| [opea/lvm-llava](https://hub.docker.com/r/opea/lvm-llava) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/llava/dependency/Dockerfile) | The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) server for GenAI application use | +| [opea/lvm-llava-svc](https://hub.docker.com/r/opea/lvm-llava-svc) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/llava/Dockerfile) | The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) service for GenAI application use | +| [opea/lvm-video-llama](https://hub.docker.com/r/opea/lvm-video-llama) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/video-llama/Dockerfile) | The docker image exposed the OPEA microservice running Video-Llama as a large visual model (LVM) for GenAI application use | +| [opea/nginx](https://hub.docker.com/r/opea/nginx) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/nginx/src/Dockerfile) | The docker image exposed the OPEA nginx microservice for GenAI application use | +| [opea/promptregistry-mongo-server](https://hub.docker.com/r/opea/promptregistry-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/prompt_registry/src/Dockerfile) | The docker image exposes the OPEA Prompt Registry microservices which based on MongoDB database, designed to store and retrieve user's preferred prompts | +| [opea/reranking]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/rerankings/src/Dockerfile) | The docker image exposed the OPEA reranking microservice based on tei docker image for GenAI application use | +| [opea/retriever-milvus](https://hub.docker.com/r/opea/retriever-milvus) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/milvus/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on milvus vectordb for GenAI application use | +| [opea/retriever-pathway](https://hub.docker.com/r/opea/retriever-pathway) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pathway/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice with pathway for GenAI application use | +| [opea/retriever-pgvector](https://hub.docker.com/r/opea/retriever-pgvector) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pgvector/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on pgvector vectordb for GenAI application use | +| [opea/retriever-pinecone](https://hub.docker.com/r/opea/retriever-pinecone) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pinecone/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on pinecone vectordb for GenAI application use | +| [opea/retriever-qdrant](https://hub.docker.com/r/opea/retriever-qdrant) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/qdrant/haystack/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on qdrant vectordb for GenAI application use | +| [opea/retriever-redis](https://hub.docker.com/r/opea/retriever-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/redis/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on redis vectordb for GenAI application use | +| [opea/retriever-redis-llamaindex](https://hub.docker.com/r/opea/retriever-redis-llamaindex) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/redis/llama_index/Dockerfile) | The docker image exposed the OPEA retriever service based on LlamaIndex for GenAI application use | +| [opea/retriever-vdms](https://hub.docker.com/r/opea/retriever-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/vdms/langchain/Dockerfile) | The docker image exposed the OPEA retriever service based on Visual Data Management System for GenAI application use | +| [opea/speecht5](https://hub.docker.com/r/opea/speecht5) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/integrations/dependency/speecht5/Dockerfile) | The docker image exposed the OPEA SpeechT5 service for GenAI application use | +| [opea/speecht5-gaudi](https://hub.docker.com/r/opea/speecht5-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/integrations/dependency/speecht5/Dockerfile.intel_hpu) | The docker image exposed the OPEA SpeechT5 service on Gaudi2 for GenAI application use | +| [opea/tei-gaudi](https://hub.docker.com/r/opea/tei-gaudi/tags) | [Link](https://github.com/huggingface/tei-gaudi/blob/habana-main/Dockerfile-hpu) | The docker image powered by HuggingFace Text Embedding Inference (TEI) on Gaudi2 for deploying and serving Embedding Models | +| [opea/vectorstore-pathway](https://hub.docker.com/r/opea/vectorstore-pathway) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/vectorstores/pathway/Dockerfile) | The docker image exposed the OPEA Vectorstores microservice with Pathway for GenAI application use | +| [opea/video-llama-lvm-server](https://hub.docker.com/r/opea/video-llama-lvm-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/video-llama/dependency/Dockerfile) | The docker image exposed the OPEA microservice running Video-Llama as a large visual model (LVM) server for GenAI application use | +| [opea/tts](https://hub.docker.com/r/opea/tts) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/Dockerfile) | The docker image exposed the OPEA Text-To-Speech microservice for GenAI application use | +| [opea/vllm](https://hub.docker.com/r/opea/vllm) | [Link](https://github.com/vllm-project/vllm/blob/main/Dockerfile.cpu) | The docker image powered by vllm-project for deploying and serving vllm Models | +| [opea/vllm-gaudi]() | [Link](https://github.com/HabanaAI/vllm-fork/blob/habana_main/Dockerfile.hpu) | The docker image powered by vllm-fork for deploying and serving vllm-gaudi Models | +| [opea/vllm-openvino](https://hub.docker.com/r/opea/vllm-openvino) | [Link](https://github.com/vllm-project/vllm/blob/main/Dockerfile.openvino) | The docker image powered by vllm-project for deploying and serving vllm Models of the Openvino Framework | +| [opea/web-retriever]() | [Link](https://github.com/opea-project/GenAIComps/tree/main/comps/web_retrievers/src/Dockerfile) | The docker image exposed the OPEA web retrieval microservice based on a search engine and vector DB | +| [opea/whisper](https://hub.docker.com/r/opea/whisper) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/integrations/dependency/whisper/Dockerfile) | The docker image exposed the OPEA Whisper service for GenAI application use | +| [opea/whisper-gaudi](https://hub.docker.com/r/opea/whisper-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/integrations/dependency/whisper/Dockerfile.intel_hpu) | The docker image exposed the OPEA Whisper service on Gaudi2 for GenAI application use |