HUGGINGFACEHUB_API_TOKEN environment is change to HF_TOKEN (#1503)
Signed-off-by: Wang, Xigui <xigui.wang@intel.com>
This commit is contained in:
@@ -101,7 +101,7 @@ export http_proxy=${your_http_proxy}
|
||||
export https_proxy=${your_http_proxy}
|
||||
export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
|
||||
export TGI_LLM_ENDPOINT="http://${host_ip}:8028"
|
||||
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
|
||||
export HF_TOKEN=${your_hf_api_token}
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export LLM_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:7778/v1/codegen"
|
||||
|
||||
@@ -14,7 +14,7 @@ services:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HF_TOKEN: ${HF_TOKEN}
|
||||
host_ip: ${host_ip}
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://$host_ip:8028/health || exit 1"]
|
||||
@@ -37,7 +37,8 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
|
||||
LLM_MODEL_ID: ${LLM_MODEL_ID}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN}
|
||||
HF_TOKEN: ${HF_TOKEN}
|
||||
restart: unless-stopped
|
||||
codegen-xeon-backend-server:
|
||||
image: ${REGISTRY:-opea}/codegen:${TAG:-latest}
|
||||
|
||||
@@ -87,7 +87,7 @@ export http_proxy=${your_http_proxy}
|
||||
export https_proxy=${your_http_proxy}
|
||||
export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
|
||||
export TGI_LLM_ENDPOINT="http://${host_ip}:8028"
|
||||
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
|
||||
export HF_TOKEN=${your_hf_api_token}
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export LLM_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:7778/v1/codegen"
|
||||
|
||||
@@ -15,7 +15,8 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
HABANA_VISIBLE_DEVICES: all
|
||||
OMPI_MCA_btl_vader_single_copy_mechanism: none
|
||||
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN}
|
||||
HF_TOKEN: ${HF_TOKEN}
|
||||
ENABLE_HPU_GRAPH: true
|
||||
LIMIT_HPU_GRAPH: true
|
||||
USE_FLASH_ATTENTION: true
|
||||
@@ -45,7 +46,8 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
|
||||
LLM_MODEL_ID: ${LLM_MODEL_ID}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN}
|
||||
HF_TOKEN: ${HF_TOKEN}
|
||||
restart: unless-stopped
|
||||
codegen-gaudi-backend-server:
|
||||
image: ${REGISTRY:-opea}/codegen:${TAG:-latest}
|
||||
|
||||
@@ -6,7 +6,12 @@ pushd "../../" > /dev/null
|
||||
source .set_env.sh
|
||||
popd > /dev/null
|
||||
|
||||
if [ -z "$HF_TOKEN" ]; then
|
||||
echo "Error: The HF_TOKEN environment variable is **NOT** set. Please set it"
|
||||
return -1
|
||||
fi
|
||||
|
||||
export host_ip=$(hostname -I | awk '{print $1}')
|
||||
export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
|
||||
export TGI_LLM_ENDPOINT="http://${host_ip}:8028"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
|
||||
Reference in New Issue
Block a user