Refactor lvm related examples (#1333)

This commit is contained in:
Sihan Chen
2025-01-13 13:42:06 +08:00
committed by GitHub
parent f48bd8e74f
commit ca15fe9bdb
25 changed files with 162 additions and 155 deletions

View File

@@ -36,7 +36,7 @@ lvm-llava
================
Port 8399 - Open to 0.0.0.0/0
lvm-llava-svc
lvm
===
Port 9399 - Open to 0.0.0.0/0
@@ -132,13 +132,13 @@ docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$
Build lvm-llava image
```bash
docker build --no-cache -t opea/lvm-llava:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/dependency/Dockerfile .
docker build --no-cache -t opea/lvm-llava:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/src/integrations/dependency/llava/Dockerfile .
```
Build lvm-llava-svc microservice image
Build lvm microservice image
```bash
docker build --no-cache -t opea/lvm-llava-svc:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/llava/Dockerfile .
docker build --no-cache -t opea/lvm:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/src/Dockerfile .
```
### 4. Build dataprep-multimodal-redis Image
@@ -179,7 +179,7 @@ cd ../../../
Then run the command `docker images`, you will have the following 11 Docker Images:
1. `opea/dataprep-multimodal-redis:latest`
2. `opea/lvm-llava-svc:latest`
2. `opea/lvm:latest`
3. `opea/lvm-llava:latest`
4. `opea/retriever-multimodal-redis:latest`
5. `opea/whisper:latest`
@@ -271,7 +271,7 @@ curl http://${host_ip}:${LLAVA_SERVER_PORT}/generate \
-d '{"prompt":"Describe the image please.", "img_b64_str": "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAFUlEQVR42mP8/5+hnoEIwDiqkL4KAcT9GO0U4BxoAAAAAElFTkSuQmCC"}'
```
6. lvm-llava-svc
6. lvm
```bash
curl http://${host_ip}:9399/v1/lvm \

View File

@@ -100,9 +100,9 @@ services:
https_proxy: ${https_proxy}
entrypoint: ["python", "llava_server.py", "--device", "cpu", "--model_name_or_path", $LVM_MODEL_ID]
restart: unless-stopped
lvm-llava-svc:
image: ${REGISTRY:-opea}/lvm-llava-svc:${TAG:-latest}
container_name: lvm-llava-svc
lvm:
image: ${REGISTRY:-opea}/lvm:${TAG:-latest}
container_name: lvm
depends_on:
- lvm-llava
ports:
@@ -112,6 +112,7 @@ services:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
LVM_COMPONENT_NAME: "OPEA_LLAVA_LVM"
LVM_ENDPOINT: ${LVM_ENDPOINT}
restart: unless-stopped
multimodalqna:
@@ -122,7 +123,7 @@ services:
- dataprep-multimodal-redis
- embedding
- retriever-redis
- lvm-llava-svc
- lvm
ports:
- "8888:8888"
environment:

View File

@@ -86,10 +86,10 @@ Build TGI Gaudi image
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6
```
Build lvm-tgi microservice image
Build lvm microservice image
```bash
docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/tgi-llava/Dockerfile .
docker build --no-cache -t opea/lvm:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/src/Dockerfile .
```
### 4. Build dataprep-multimodal-redis Image
@@ -128,7 +128,7 @@ docker build --no-cache -t opea/multimodalqna-ui:latest --build-arg https_proxy=
Then run the command `docker images`, you will have the following 11 Docker Images:
1. `opea/dataprep-multimodal-redis:latest`
2. `opea/lvm-tgi:latest`
2. `opea/lvm:latest`
3. `ghcr.io/huggingface/tgi-gaudi:2.0.6`
4. `opea/retriever-multimodal-redis:latest`
5. `opea/whisper:latest`
@@ -220,7 +220,7 @@ curl http://${host_ip}:${LLAVA_SERVER_PORT}/generate \
-H 'Content-Type: application/json'
```
6. lvm-tgi
6. lvm
```bash
curl http://${host_ip}:9399/v1/lvm \
@@ -274,7 +274,7 @@ curl --silent --write-out "HTTPSTATUS:%{http_code}" \
-F "files=@./${audio_fn}"
```
Also, test dataprep microservice with generating an image caption using lvm-tgi
Also, test dataprep microservice with generating an image caption using lvm
```bash
curl --silent --write-out "HTTPSTATUS:%{http_code}" \

View File

@@ -24,7 +24,7 @@ services:
container_name: dataprep-multimodal-redis
depends_on:
- redis-vector-db
- lvm-tgi
- lvm
ports:
- "6007:6007"
environment:
@@ -115,9 +115,9 @@ services:
ipc: host
command: --model-id ${LVM_MODEL_ID} --max-input-tokens 3048 --max-total-tokens 4096
restart: unless-stopped
lvm-tgi:
image: ${REGISTRY:-opea}/lvm-tgi:${TAG:-latest}
container_name: lvm-tgi
lvm:
image: ${REGISTRY:-opea}/lvm:${TAG:-latest}
container_name: lvm
depends_on:
- tgi-gaudi
ports:
@@ -127,6 +127,7 @@ services:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
LVM_COMPONENT_NAME: "OPEA_TGI_LLAVA_LVM"
LVM_ENDPOINT: ${LVM_ENDPOINT}
HF_HUB_DISABLE_PROGRESS_BARS: 1
HF_HUB_ENABLE_HF_TRANSFER: 0
@@ -139,7 +140,7 @@ services:
- dataprep-multimodal-redis
- embedding
- retriever-redis
- lvm-tgi
- lvm
ports:
- "8888:8888"
environment: