Exclude dockerfile under tests and exclude check Dockerfile under tests. (#1354)
Signed-off-by: ZePan110 <ze.pan@intel.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -28,10 +28,10 @@ cd GenAIComps
|
||||
docker build --no-cache -t opea/embedding-multimodal-bridgetower:latest --build-arg EMBEDDER_PORT=$EMBEDDER_PORT --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/integrations/dependency/bridgetower/Dockerfile .
|
||||
```
|
||||
|
||||
Build embedding-tei microservice image
|
||||
Build embedding microservice image
|
||||
|
||||
```bash
|
||||
docker build --no-cache -t opea/embedding-tei:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/Dockerfile .
|
||||
docker build --no-cache -t opea/embedding:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/src/Dockerfile .
|
||||
```
|
||||
|
||||
### 2. Build LVM Images
|
||||
@@ -87,7 +87,7 @@ Then run the command `docker images`, you will have the following 8 Docker Image
|
||||
2. `ghcr.io/huggingface/text-generation-inference:2.4.1-rocm`
|
||||
3. `opea/lvm-tgi:latest`
|
||||
4. `opea/retriever-multimodal-redis:latest`
|
||||
5. `opea/embedding-tei:latest`
|
||||
5. `opea/embedding:latest`
|
||||
6. `opea/embedding-multimodal-bridgetower:latest`
|
||||
7. `opea/multimodalqna:latest`
|
||||
8. `opea/multimodalqna-ui:latest`
|
||||
@@ -98,11 +98,11 @@ Then run the command `docker images`, you will have the following 8 Docker Image
|
||||
|
||||
By default, the multimodal-embedding and LVM models are set to a default value as listed below:
|
||||
|
||||
| Service | Model |
|
||||
| ------------- | ------------------------------------------- |
|
||||
| embedding-tei | BridgeTower/bridgetower-large-itm-mlm-gaudi |
|
||||
| LVM | llava-hf/llava-1.5-7b-hf |
|
||||
| LVM | Xkev/Llama-3.2V-11B-cot |
|
||||
| Service | Model |
|
||||
| --------- | ------------------------------------------- |
|
||||
| embedding | BridgeTower/bridgetower-large-itm-mlm-gaudi |
|
||||
| LVM | llava-hf/llava-1.5-7b-hf |
|
||||
| LVM | Xkev/Llama-3.2V-11B-cot |
|
||||
|
||||
Note:
|
||||
|
||||
@@ -158,7 +158,7 @@ curl http://${host_ip}:${EMBEDDER_PORT}/v1/encode \
|
||||
-d '{"text":"This is example", "img_b64_str": "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAFUlEQVR42mP8/5+hnoEIwDiqkL4KAcT9GO0U4BxoAAAAAElFTkSuQmCC"}'
|
||||
```
|
||||
|
||||
2. embedding-tei
|
||||
2. embedding
|
||||
|
||||
```bash
|
||||
curl http://${host_ip}:$MM_EMBEDDING_PORT_MICROSERVICE/v1/embeddings \
|
||||
|
||||
@@ -55,9 +55,9 @@ services:
|
||||
start_period: 30s
|
||||
entrypoint: ["python", "bridgetower_server.py", "--device", "cpu", "--model_name_or_path", $EMBEDDING_MODEL_ID]
|
||||
restart: unless-stopped
|
||||
embedding-tei:
|
||||
image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest}
|
||||
container_name: embedding-tei
|
||||
embedding:
|
||||
image: ${REGISTRY:-opea}/embedding:${TAG:-latest}
|
||||
container_name: embedding
|
||||
depends_on:
|
||||
embedding-multimodal-bridgetower:
|
||||
condition: service_healthy
|
||||
@@ -138,7 +138,7 @@ services:
|
||||
depends_on:
|
||||
- redis-vector-db
|
||||
- dataprep-multimodal-redis
|
||||
- embedding-tei
|
||||
- embedding
|
||||
- retriever-redis
|
||||
- lvm-tgi
|
||||
ports:
|
||||
|
||||
Reference in New Issue
Block a user