diff --git a/CodeGen/docker_compose/amd/gpu/rocm/compose.yaml b/CodeGen/docker_compose/amd/gpu/rocm/compose.yaml index a4fa74f19f..7d72e38699 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/compose.yaml +++ b/CodeGen/docker_compose/amd/gpu/rocm/compose.yaml @@ -15,7 +15,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} HUGGING_FACE_HUB_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} host_ip: ${HOST_IP} healthcheck: test: ["CMD-SHELL", "curl -f http://${HOST_IP}:${CODEGEN_TGI_SERVICE_PORT:-8028}/health || exit 1"] @@ -49,7 +49,6 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${CODEGEN_TGI_LLM_ENDPOINT} LLM_MODEL_ID: ${CODEGEN_LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} LLM_COMPONENT_NAME: "OpeaTextGenService" restart: unless-stopped diff --git a/CodeGen/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/CodeGen/docker_compose/amd/gpu/rocm/compose_vllm.yaml index f63aca7455..e9fdc44ffe 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/CodeGen/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -12,7 +12,6 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 @@ -53,7 +52,6 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${CODEGEN_VLLM_ENDPOINT} LLM_MODEL_ID: ${CODEGEN_LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} LLM_COMPONENT_NAME: "OpeaTextGenService" restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml b/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml index 774c6a34a5..f20825e7ea 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml +++ b/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml @@ -32,7 +32,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped llm-vllm-service: extends: llm-base @@ -103,7 +103,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: true healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -165,7 +165,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml b/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml index 23b8af1959..edec62f315 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml +++ b/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml @@ -59,7 +59,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: true restart: unless-stopped tei-embedding-serving: @@ -116,7 +116,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/CodeGen/docker_compose/intel/cpu/xeon/compose_tgi.yaml index a66d3258a8..ed4ed93225 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/CodeGen/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -32,7 +32,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped llm-tgi-service: extends: llm-base @@ -103,7 +103,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: true healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -165,7 +165,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml b/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml index ab8c059bd3..59640f0dca 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped llm-vllm-service: extends: llm-base @@ -111,7 +111,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: true healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -173,7 +173,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/CodeGen/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 1dbe9358e8..78d4323667 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/CodeGen/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped llm-tgi-service: extends: llm-base @@ -111,7 +111,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: true healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -173,7 +173,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/compose.yaml b/CodeTrans/docker_compose/amd/gpu/rocm/compose.yaml index f73a891a10..910df3d71a 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/compose.yaml +++ b/CodeTrans/docker_compose/amd/gpu/rocm/compose.yaml @@ -17,7 +17,7 @@ services: https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${CODETRANS_TGI_LLM_ENDPOINT} HUGGING_FACE_HUB_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${CODEGEN_HUGGINGFACEHUB_API_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://${HOST_IP}:${CODETRANS_TGI_SERVICE_PORT}/health || exit 1"] @@ -50,7 +50,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: "http://codetrans-tgi-service" LLM_MODEL_ID: ${CODETRANS_LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${CODETRANS_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${CODETRANS_HUGGINGFACEHUB_API_TOKEN} restart: unless-stopped codetrans-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/CodeTrans/docker_compose/amd/gpu/rocm/compose_vllm.yaml index dc39695792..e04465e42b 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/CodeTrans/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -12,7 +12,6 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${CODETRANS_HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${CODETRANS_HUGGINGFACEHUB_API_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 @@ -53,7 +52,6 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${CODETRANS_LLM_ENDPOINT} LLM_MODEL_ID: ${CODETRANS_LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${CODETRANS_HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${CODETRANS_HUGGINGFACEHUB_API_TOKEN} LLM_COMPONENT_NAME: "OpeaTextGenService" restart: unless-stopped diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 1eda99bccc..5b920bd326 100644 --- a/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -38,7 +38,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} LLM_COMPONENT_NAME: ${LLM_COMPONENT_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped codetrans-xeon-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index b2b4c268c8..f06bedbf0a 100644 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -42,7 +42,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} LLM_COMPONENT_NAME: ${LLM_COMPONENT_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped codetrans-gaudi-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/DBQnA/docker_compose/amd/gpu/rocm/compose.yaml b/DBQnA/docker_compose/amd/gpu/rocm/compose.yaml index 1d5e6f6b32..6af0afac39 100644 --- a/DBQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/DBQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -20,7 +20,7 @@ services: TGI_SERVICE_PORT: ${DBQNA_TGI_SERVICE_PORT} MODEL_ID: ${DBQNA_LLM_MODEL_ID} HUGGING_FACE_HUB_TOKEN: ${DBQNA_HUGGINGFACEHUB_API_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${DBQNA_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${DBQNA_HUGGINGFACEHUB_API_TOKEN} shm_size: 1g devices: - /dev/kfd:/dev/kfd diff --git a/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml b/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml index f8a948a234..cb9339b834 100644 --- a/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -14,7 +14,6 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} HF_TOKEN: ${HF_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} shm_size: 1g command: --model-id ${LLM_MODEL_ID} diff --git a/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh b/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh index 3990c7b114..909ca15dfc 100755 --- a/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -9,7 +9,6 @@ popd > /dev/null export host_ip=${ip_address} export no_proxy=$no_proxy,$host_ip,dbqna-xeon-react-ui-server,text2sql-service,tgi-service,postgres-container -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export HF_TOKEN=${HF_TOKEN} export POSTGRES_USER=postgres export POSTGRES_PASSWORD=testpwd diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml index 457afaae46..bc3d65e282 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml @@ -28,7 +28,7 @@ services: REDIS_HOST: ${REDIS_HOST} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -49,7 +49,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -87,7 +87,7 @@ services: https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" @@ -105,7 +105,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -129,7 +129,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml index eeacffa17b..3585585568 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -76,7 +76,7 @@ services: DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MILVUS" MILVUS_HOST: ${MILVUS_HOST} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -107,7 +107,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -148,7 +148,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} MILVUS_HOST: ${host_ip} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MILVUS" @@ -167,7 +167,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -194,7 +194,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml index 99af075420..c26c426bec 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml @@ -25,7 +25,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME:-rag-redis} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -46,7 +46,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -84,7 +84,7 @@ services: https_proxy: ${https_proxy} REDIS_URL: redis://redis-vector-db:6379 INDEX_NAME: ${INDEX_NAME:-rag-redis} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml index ded82b6c1b..5d24f476a4 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml @@ -28,7 +28,7 @@ services: REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -96,7 +96,7 @@ services: LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 @@ -111,7 +111,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -135,7 +135,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml index c05ec1cce4..80e2d988b6 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml @@ -76,7 +76,7 @@ services: DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MILVUS" MILVUS_HOST: ${MILVUS_HOST} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -154,7 +154,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} MILVUS_HOST: ${host_ip} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MILVUS" @@ -173,7 +173,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -200,7 +200,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocSum/docker_compose/amd/gpu/rocm/compose.yaml b/DocSum/docker_compose/amd/gpu/rocm/compose.yaml index 14f00f78c0..563f1468f1 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/compose.yaml +++ b/DocSum/docker_compose/amd/gpu/rocm/compose.yaml @@ -12,7 +12,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: "http://${HOST_IP}:${DOCSUM_TGI_SERVICE_PORT}" - HUGGINGFACEHUB_API_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} host_ip: ${host_ip} DOCSUM_TGI_SERVICE_PORT: ${DOCSUM_TGI_SERVICE_PORT} volumes: @@ -49,7 +49,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} LLM_ENDPOINT: ${DOCSUM_TGI_LLM_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} MAX_INPUT_TOKENS: ${DOCSUM_MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${DOCSUM_MAX_TOTAL_TOKENS} LLM_MODEL_ID: ${DOCSUM_LLM_MODEL_ID} diff --git a/DocSum/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/DocSum/docker_compose/amd/gpu/rocm/compose_vllm.yaml index 2378a8b2ce..d8a678f695 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/DocSum/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -11,7 +11,6 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 @@ -52,7 +51,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} LLM_ENDPOINT: ${DOCSUM_LLM_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${DOCSUM_HUGGINGFACEHUB_API_TOKEN} MAX_INPUT_TOKENS: ${DOCSUM_MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${DOCSUM_MAX_TOTAL_TOKENS} LLM_MODEL_ID: ${DOCSUM_LLM_MODEL_ID} diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml index 5489325d50..f60a63e47a 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml @@ -40,7 +40,6 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 4b799bd4cc..b2d643ae55 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -39,7 +39,6 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml index 739a41feba..e53b8064c7 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml @@ -44,7 +44,6 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 987706b0ee..b7c008c63d 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -48,7 +48,6 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS}