HUGGINGFACEHUB_API_TOKEN environment is change to HF_TOKEN (#1503)

Signed-off-by: Wang, Xigui <xigui.wang@intel.com>
This commit is contained in:
xiguiw
2025-02-09 20:33:06 +08:00
committed by GitHub
parent 1b3291a1c8
commit 45d5da2ddd
69 changed files with 263 additions and 113 deletions

View File

@@ -72,7 +72,7 @@ Change the `LLM_MODEL_ID` below for your needs.
export host_ip="External_Public_IP"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token"
export HF_TOKEN="Your_Huggingface_API_Token"
# Example: NGINX_PORT=80
export NGINX_PORT=${your_nginx_port}
```

View File

@@ -14,7 +14,7 @@ services:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HF_TOKEN: ${HF_TOKEN}
host_ip: ${host_ip}
healthcheck:
test: ["CMD-SHELL", "curl -f http://$host_ip:8008/health || exit 1"]
@@ -37,7 +37,8 @@ services:
https_proxy: ${https_proxy}
LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
LLM_MODEL_ID: ${LLM_MODEL_ID}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN}
HF_TOKEN: ${HF_TOKEN}
restart: unless-stopped
codetrans-xeon-backend-server:
image: ${REGISTRY:-opea}/codetrans:${TAG:-latest}

View File

@@ -64,7 +64,7 @@ Change the `LLM_MODEL_ID` below for your needs.
export host_ip="External_Public_IP"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token"
export HF_TOKEN="Your_Huggingface_API_Token"
# Example: NGINX_PORT=80
export NGINX_PORT=${your_nginx_port}
```

View File

@@ -3,7 +3,7 @@
services:
tgi-service:
image: ghcr.io/huggingface/tgi-gaudi:2.0.6
image: ghcr.io/huggingface/tgi-gaudi:2.3.1
container_name: codetrans-tgi-service
ports:
- "8008:80"
@@ -15,7 +15,8 @@ services:
https_proxy: ${https_proxy}
HABANA_VISIBLE_DEVICES: all
OMPI_MCA_btl_vader_single_copy_mechanism: none
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN}
HF_TOKEN: ${HF_TOKEN}
ENABLE_HPU_GRAPH: true
LIMIT_HPU_GRAPH: true
USE_FLASH_ATTENTION: true
@@ -45,7 +46,8 @@ services:
https_proxy: ${https_proxy}
LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
LLM_MODEL_ID: ${LLM_MODEL_ID}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN}
HF_TOKEN: ${HF_TOKEN}
restart: unless-stopped
codetrans-gaudi-backend-server:
image: ${REGISTRY:-opea}/codetrans:${TAG:-latest}