Compare commits

..

11 Commits

Author SHA1 Message Date
pre-commit-ci[bot]
97d277cd1d [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
2025-01-24 02:30:47 +00:00
letonghan
3f918422c9 refine script for hardcodes variables and test codes
Signed-off-by: letonghan <letong.han@intel.com>
2025-01-24 10:30:14 +08:00
letonghan
53e15bfb79 fix merge conflict
Signed-off-by: letonghan <letong.han@intel.com>
2025-01-23 15:13:19 +08:00
letonghan
bbe649c44c fix preci issues of variable names conflicts
Signed-off-by: letonghan <letong.han@intel.com>
2025-01-23 15:12:08 +08:00
pre-commit-ci[bot]
6e26d4615a [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
2025-01-23 06:44:39 +00:00
letonghan
500fcdb975 fix merge conflicts
Signed-off-by: letonghan <letong.han@intel.com>
2025-01-23 14:44:09 +08:00
letonghan
4825420f04 Merge branch 'main' of https://github.com/opea-project/GenAIExamples into refactor_benchmark 2025-01-23 14:42:10 +08:00
letonghan
78a1efd7f0 refactor python script into deploy_and_benchmark.py
Signed-off-by: letonghan <letong.han@intel.com>
2025-01-23 14:41:11 +08:00
Letong Han
9b9314b062 Merge branch 'main' into refactor_benchmark 2025-01-21 15:06:19 +08:00
pre-commit-ci[bot]
8b85e8c793 [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
2025-01-21 07:05:57 +00:00
letonghan
eba1c300b3 Support ChatQnA benchmark pipeline on pubmed dataset.
Add file benchmark.py, benchmark.yaml, and benchmark_requirements.txt.
Related PR in GenAIEval: https://github.com/opea-project/GenAIEval/pull/228

Signed-off-by: letonghan <letong.han@intel.com>
2025-01-21 15:02:30 +08:00
733 changed files with 18042 additions and 58954 deletions

View File

@@ -1,3 +1,2 @@
ModelIn
modelin
pressEnter

View File

@@ -1,65 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
name: Build Comps Base Image
permissions: read-all
on:
workflow_call:
inputs:
node:
required: true
type: string
build:
default: true
required: false
type: boolean
tag:
default: "latest"
required: false
type: string
opea_branch:
default: "main"
required: false
type: string
inject_commit:
default: false
required: false
type: boolean
jobs:
pre-build-image-check:
runs-on: ubuntu-latest
outputs:
should_skip: ${{ steps.check-skip.outputs.should_skip }}
steps:
- name: Check if job should be skipped
id: check-skip
run: |
should_skip=false
if [[ "${{ inputs.node }}" == "gaudi3" || "${{ inputs.node }}" == "rocm" || "${{ inputs.node }}" == "arc" ]]; then
should_skip=true
fi
echo "should_skip=$should_skip"
echo "should_skip=$should_skip" >> $GITHUB_OUTPUT
build-images:
needs: [ pre-build-image-check ]
if: ${{ needs.pre-build-image-check.outputs.should_skip == 'false' && fromJSON(inputs.build) }}
runs-on: "docker-build-${{ inputs.node }}"
steps:
- name: Clean Up Working Directory
run: sudo rm -rf ${{github.workspace}}/*
- name: Clone Required Repo
run: |
git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git
cd GenAIComps && git rev-parse HEAD && cd ../ && ls -l
- name: Build Image
uses: opea-project/validation/actions/image-build@main
with:
work_dir: ${{ github.workspace }}/GenAIComps
docker_compose_path: ${{ github.workspace }}/GenAIComps/.github/workflows/docker/compose/base-compose.yaml
registry: ${OPEA_IMAGE_REPO}opea
inject_commit: ${{ inputs.inject_commit }}
tag: ${{ inputs.tag }}

View File

@@ -1,103 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
name: Build Images
permissions: read-all
on:
workflow_call:
inputs:
node:
required: true
type: string
build:
default: true
required: false
type: boolean
example:
required: true
type: string
services:
default: ""
required: false
type: string
tag:
default: "latest"
required: false
type: string
opea_branch:
default: "main"
required: false
type: string
inject_commit:
default: false
required: false
type: boolean
jobs:
pre-build-image-check:
runs-on: ubuntu-latest
outputs:
should_skip: ${{ steps.check-skip.outputs.should_skip }}
steps:
- name: Check if job should be skipped
id: check-skip
run: |
should_skip=false
if [[ "${{ inputs.node }}" == "gaudi3" || "${{ inputs.node }}" == "rocm" || "${{ inputs.node }}" == "arc" ]]; then
should_skip=true
fi
echo "should_skip=$should_skip"
echo "should_skip=$should_skip" >> $GITHUB_OUTPUT
build-images:
needs: [ pre-build-image-check ]
if: ${{ needs.pre-build-image-check.outputs.should_skip == 'false' && fromJSON(inputs.build) }}
runs-on: "docker-build-${{ inputs.node }}"
steps:
- name: Clean Up Working Directory
run: sudo rm -rf ${{github.workspace}}/*
- name: Get Checkout Ref
run: |
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
else
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
fi
- name: Checkout out GenAIExamples
uses: actions/checkout@v4
with:
ref: ${{ env.CHECKOUT_REF }}
fetch-depth: 0
- name: Clone Required Repo
run: |
cd ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then
git clone https://github.com/vllm-project/vllm.git && cd vllm
# Get the latest tag
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null && cd ../
fi
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
# Get the latest tag
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null && cd ../
fi
git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git
cd GenAIComps && git rev-parse HEAD && cd ../
- name: Build Image
uses: opea-project/validation/actions/image-build@main
with:
work_dir: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
docker_compose_path: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
service_list: ${{ inputs.services }}
registry: ${OPEA_IMAGE_REPO}opea
inject_commit: ${{ inputs.inject_commit }}
tag: ${{ inputs.tag }}

View File

@@ -43,39 +43,68 @@ on:
inject_commit:
default: false
required: false
type: boolean
use_model_cache:
default: false
required: false
type: boolean
type: string
jobs:
####################################################################################################
# Image Build
####################################################################################################
build-images:
uses: ./.github/workflows/_build_image.yml
with:
node: ${{ inputs.node }}
build: ${{ fromJSON(inputs.build) }}
example: ${{ inputs.example }}
services: ${{ inputs.services }}
tag: ${{ inputs.tag }}
opea_branch: ${{ inputs.opea_branch }}
inject_commit: ${{ inputs.inject_commit }}
runs-on: "docker-build-${{ inputs.node }}"
steps:
- name: Clean Up Working Directory
run: sudo rm -rf ${{github.workspace}}/*
- name: Get Checkout Ref
run: |
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
else
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
fi
- name: Checkout out GenAIExamples
uses: actions/checkout@v4
with:
ref: ${{ env.CHECKOUT_REF }}
fetch-depth: 0
- name: Clone Required Repo
run: |
cd ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then
git clone --depth 1 https://github.com/vllm-project/vllm.git
cd vllm && git rev-parse HEAD && cd ../
fi
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git
fi
git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git
cd GenAIComps && git rev-parse HEAD && cd ../
- name: Build Image
if: ${{ fromJSON(inputs.build) }}
uses: opea-project/validation/actions/image-build@main
with:
work_dir: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
docker_compose_path: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
service_list: ${{ inputs.services }}
registry: ${OPEA_IMAGE_REPO}opea
inject_commit: ${{ inputs.inject_commit }}
tag: ${{ inputs.tag }}
####################################################################################################
# Docker Compose Test
####################################################################################################
test-example-compose:
needs: [build-images]
if: ${{ inputs.test_compose }}
if: ${{ fromJSON(inputs.test_compose) }}
uses: ./.github/workflows/_run-docker-compose.yml
with:
tag: ${{ inputs.tag }}
example: ${{ inputs.example }}
hardware: ${{ inputs.node }}
use_model_cache: ${{ inputs.use_model_cache }}
secrets: inherit
@@ -97,7 +126,7 @@ jobs:
####################################################################################################
test-gmc-pipeline:
needs: [build-images]
if: false # ${{ fromJSON(inputs.test_gmc) }}
if: ${{ fromJSON(inputs.test_gmc) }}
uses: ./.github/workflows/_gmc-e2e.yml
with:
example: ${{ inputs.example }}

View File

@@ -97,7 +97,6 @@ jobs:
helm-test:
needs: [get-test-case]
if: ${{ needs.get-test-case.outputs.value_files != '[]' }}
strategy:
matrix:
value_file: ${{ fromJSON(needs.get-test-case.outputs.value_files) }}
@@ -134,7 +133,7 @@ jobs:
CHART_NAME="${example,,}" # CodeGen
echo "CHART_NAME=$CHART_NAME" >> $GITHUB_ENV
echo "RELEASE_NAME=${CHART_NAME}$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
echo "NAMESPACE=${CHART_NAME}-$(head -c 4 /dev/urandom | xxd -p)" >> $GITHUB_ENV
echo "NAMESPACE=${CHART_NAME}-$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
echo "ROLLOUT_TIMEOUT_SECONDS=600s" >> $GITHUB_ENV
echo "TEST_TIMEOUT_SECONDS=600s" >> $GITHUB_ENV
echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV
@@ -173,7 +172,7 @@ jobs:
$RELEASE_NAME \
oci://ghcr.io/opea-project/charts/${CHART_NAME} \
--set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \
--set global.modelUseHostPath=/data2/hf_model \
--set global.modelUseHostPath=/home/sdp/.cache/huggingface/hub \
--set GOOGLE_API_KEY=${{ env.GOOGLE_API_KEY}} \
--set GOOGLE_CSE_ID=${{ env.GOOGLE_CSE_ID}} \
--set web-retriever.GOOGLE_API_KEY=${{ env.GOOGLE_API_KEY}} \

View File

@@ -28,10 +28,6 @@ on:
required: false
type: string
default: ""
use_model_cache:
required: false
type: boolean
default: false
jobs:
get-test-case:
runs-on: ubuntu-latest
@@ -64,14 +60,9 @@ jobs:
cd ${{ github.workspace }}/${{ inputs.example }}/tests
run_test_cases=""
if [ "${{ inputs.hardware }}" == "gaudi2" ] || [ "${{ inputs.hardware }}" == "gaudi3" ]; then
hardware="gaudi"
else
hardware="${{ inputs.hardware }}"
fi
default_test_case=$(find . -type f -name "test_compose_on_$hardware.sh" | cut -d/ -f2)
default_test_case=$(find . -type f -name "test_compose_on_${{ inputs.hardware }}.sh" | cut -d/ -f2)
if [ "$default_test_case" ]; then run_test_cases="$default_test_case"; fi
other_test_cases=$(find . -type f -name "test_compose_*_on_$hardware.sh" | cut -d/ -f2)
other_test_cases=$(find . -type f -name "test_compose_*_on_${{ inputs.hardware }}.sh" | cut -d/ -f2)
echo "default_test_case=$default_test_case"
echo "other_test_cases=$other_test_cases"
@@ -94,17 +85,12 @@ jobs:
fi
done
if [ -z "$run_test_cases" ] && [[ $(printf '%s\n' "${changed_files[@]}" | grep ${{ inputs.example }} | grep /tests/) ]]; then
run_test_cases=$other_test_cases
fi
test_cases=$(echo $run_test_cases | tr ' ' '\n' | sort -u | jq -R '.' | jq -sc '.')
echo "test_cases=$test_cases"
echo "test_cases=$test_cases" >> $GITHUB_OUTPUT
compose-test:
needs: [get-test-case]
if: ${{ needs.get-test-case.outputs.test_cases != '[""]' }}
strategy:
matrix:
test_case: ${{ fromJSON(needs.get-test-case.outputs.test_cases) }}
@@ -115,11 +101,6 @@ jobs:
- name: Clean up Working Directory
run: |
sudo rm -rf ${{github.workspace}}/* || true
# clean up containers use ports
cid=$(docker ps --format '{{.Names}} : {{.Ports}}' | grep -v ' : $' | grep -v 5000 | awk -F' : ' '{print $1}')
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
docker system prune -f
docker rmi $(docker images --filter reference="*/*/*:latest" -q) || true
docker rmi $(docker images --filter reference="*/*:ci" -q) || true
@@ -145,7 +126,6 @@ jobs:
shell: bash
env:
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
HF_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
PINECONE_KEY: ${{ secrets.PINECONE_KEY }}
@@ -158,19 +138,10 @@ jobs:
example: ${{ inputs.example }}
hardware: ${{ inputs.hardware }}
test_case: ${{ matrix.test_case }}
use_model_cache: ${{ inputs.use_model_cache }}
run: |
cd ${{ github.workspace }}/$example/tests
if [[ "$IMAGE_REPO" == "" ]]; then export IMAGE_REPO="${OPEA_IMAGE_REPO}opea"; fi
if [[ "$use_model_cache" == "true" ]]; then
if [ -d "/data2/hf_model" ]; then
export model_cache="/data2/hf_model"
else
echo "Model cache directory /data2/hf_model does not exist"
export model_cache="~/.cache/huggingface/hub"
fi
fi
if [ -f "${test_case}" ]; then timeout 60m bash "${test_case}"; else echo "Test script {${test_case}} not found, skip test!"; fi
if [ -f ${test_case} ]; then timeout 30m bash ${test_case}; else echo "Test script {${test_case}} not found, skip test!"; fi
- name: Clean up container after test
shell: bash
@@ -180,11 +151,6 @@ jobs:
export test_case=${{ matrix.test_case }}
export hardware=${{ inputs.hardware }}
bash ${{ github.workspace }}/.github/workflows/scripts/docker_compose_clean_up.sh "containers"
# clean up containers use ports
cid=$(docker ps --format '{{.Names}} : {{.Ports}}' | grep -v ' : $' | grep -v 5000 | awk -F' : ' '{print $1}')
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
docker system prune -f
docker rmi $(docker images --filter reference="*:5000/*/*" -q) || true

View File

@@ -1,28 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
name: Check stale issue and pr
on:
schedule:
- cron: "30 22 * * *"
jobs:
close-issues:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v9
with:
days-before-issue-stale: 30
days-before-pr-stale: 30
days-before-issue-close: 7
days-before-pr-close: 7
stale-issue-message: "This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 7 days."
stale-pr-message: "This PR is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 7 days."
close-issue-message: "This issue was closed because it has been stalled for 7 days with no activity."
close-pr-message: "This PR was closed because it has been stalled for 7 days with no activity."
repo-token: ${{ secrets.ACTION_TOKEN }}
start-date: "2025-03-01T00:00:00Z"

View File

@@ -1,984 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
name: Update Docker Hub Description
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
dockerHubDescription:
runs-on: ubuntu-latest
steps:
- name: Checkout current repository
uses: actions/checkout@v4
- name: Description for audioqna
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/audioqna
short-description: "The docker image served as an audioqna gateway and using language modeling to generate answers to user queries by converting audio input to text, and then using text-to-speech (TTS) to convert those answers back to speech for interaction."
readme-filepath: AudioQnA/README.md
enable-url-completion: true
- name: Description for audioqna-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/audioqna-ui
short-description: "The docker image acted as the audioqna UI entry for enabling seamless interaction with users"
readme-filepath: AudioQnA/ui/svelte/README.md
enable-url-completion: false
- name: Description for audioqna-multilang
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/audioqna-multilang
short-description: "The docker image served as an audioqna gateway and using language modeling to generate answers to user queries by converting multilingual audio input to text, and then use multilingual text-to-speech (TTS) to convert those answers back to speech for interaction."
readme-filepath: AudioQnA/README.md
enable-url-completion: false
- name: Description for avatarchatbot
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/avatarchatbot
short-description: "The docker image served as a avatarchatbot gateway and interacted with users by understanding their questions and providing relevant answers."
readme-filepath: AvatarChatbot/README.md
enable-url-completion: false
- name: Description for chatqna
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/chatqna
short-description: "The docker image served as a chatqna gateway and interacted with users by understanding their questions and providing relevant answers."
readme-filepath: ChatQnA/README.md
enable-url-completion: false
- name: Description for chatqna-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/chatqna-ui
short-description: "The docker image acted as the chatqna UI entry for facilitating interaction with users for question answering"
readme-filepath: ChatQnA/ui/svelte/README.md
enable-url-completion: false
- name: Description for chatqna-conversation-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/chatqna-conversation-ui
short-description: "The purpose of the docker image is to provide a user interface for chat-based Q&A using React. It allows for interaction with users and supports continuing conversations with a history that is stored in the browser's local storage."
readme-filepath: ChatQnA/ui/react/README.md
enable-url-completion: false
- name: Description for codegen
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/codegen
short-description: "The docker image served as the codegen gateway to provide service of the automatic creation of source code from a higher-level representation"
readme-filepath: CodeGen/README.md
enable-url-completion: false
- name: Description for codegen-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/codegen-ui
short-description: "The docker image acted as the codegen UI entry for facilitating interaction with users for automatically generating code from user's description"
readme-filepath: CodeGen/ui/svelte/README.md
enable-url-completion: false
- name: Description for codegen-react-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/codegen-react-ui
short-description: "The purpose of the docker image is to provide a user interface for Codegen using React. It allows generating the appropriate code based on the current user input."
readme-filepath: CodeGen/ui/react/README.md
enable-url-completion: false
- name: Description for codetrans
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/codetrans
short-description: "The docker image served as a codetrans gateway to provide service of converting source code written in one programming language into an equivalent version in another programming language"
readme-filepath: CodeTrans/README.md
enable-url-completion: false
- name: Description for codetrans-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/codetrans-ui
short-description: "The docker image acted as the codetrans UI entry for facilitating interaction with users for translating one programming language to another one"
readme-filepath: CodeTrans/ui/svelte/README.md
enable-url-completion: false
- name: Description for doc-index-retriever
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/doc-index-retriever
short-description: "The docker image acts as a DocRetriever gateway, It uses different methods to match user queries with a set of free text records."
readme-filepath: DocIndexRetriever/README.md
enable-url-completion: false
- name: Description for docsum
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/docsum
short-description: "The docker image served as a docsum gateway to provide service of capturing the main points and essential details of the original text"
readme-filepath: Docsum/README.md
enable-url-completion: false
- name: Description for docsum-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/docsum-ui
short-description: "The docker image acted as the docsum UI entry for facilitating interaction with users for document summarization"
readme-filepath: Docsum/ui/svelte/README.md
enable-url-completion: false
- name: Description for docsum-react-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/docsum-react-ui
short-description: "The purpose of the docker image is to provide a user interface for document summary using React. It allows upload a file or paste text and then click on “Generate Summary” to get a condensed summary of the generated content and automatically scroll to the bottom of the summary."
readme-filepath: Docsum/ui/react/README.md
enable-url-completion: false
- name: Description for docsum-gradio-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/docsum-gradio-ui
short-description: "The purpose of the docker image is to provides a user interface for summarizing documents and text using a Dockerized frontend application. Users can upload files or paste text to generate summaries."
readme-filepath: Docsum/ui/gradio/README.md
enable-url-completion: false
- name: Description for edgecraftrag
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/edgecraftrag
short-description: "The docker image served as an Edge Craft RAG (EC-RAG) gateway, delivering a customizable and production-ready Retrieval-Augmented Generation system optimized for edge solutions."
readme-filepath: EdgeCraftRag/README.md
enable-url-completion: false
- name: Description for edgecraftrag-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/edgecraftrag-ui
short-description: "The docker image acted as the Edge Craft RAG (EC-RAG) UI entry. It ensuring high-quality, performant interactions tailored for edge environments."
readme-filepath: EdgeCraftRag/README.md
enable-url-completion: false
- name: Description for edgecraftrag-server
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/edgecraftrag-server
short-description: "The docker image served as an Edge Craft RAG (EC-RAG) server, delivering a customizable and production-ready Retrieval-Augmented Generation system optimized for edge solutions."
readme-filepath: EdgeCraftRag/README.md
enable-url-completion: false
- name: Description for graphrag
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/graphrag
short-description: "The docker image served as a GraphRAG gateway, leveraging a knowledge graph derived from source documents to address both local and global queries."
readme-filepath: GraphRag/README.md
enable-url-completion: false
- name: Description for graphrag-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/graphrag-ui
short-description: "The docker image acted as the GraphRAG UI entry for facilitating interaction with users"
readme-filepath: GraphRag/ui/svelte/README.md
enable-url-completion: false
- name: Description for graphrag-react-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/graphrag-react-ui
short-description: "The purpose of the docker image is to provide a user interface for GraphRAG using React."
readme-filepath: GraphRag/ui/react/README.md
enable-url-completion: false
- name: Description for multimodalqna
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/multimodalqna
short-description: "The docker image served as a multimodalqna gateway and dynamically fetches the most relevant multimodal information (frames, transcripts, and/or subtitles) from the user's video collection to solve the problem."
readme-filepath: MultimodalQnA/README.md
enable-url-completion: false
- name: Description for multimodalqna-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/multimodalqna-ui
short-description: "The docker image serves as the multimodalqna UI entry point for easy interaction with users. Answers to questions are generated from videos uploaded by users."
readme-filepath: MultimodalQnA/README.md
enable-url-completion: false
- name: Description for productivity-suite-react-ui-server
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/productivity-suite-react-ui-server
short-description: "The purpose of the docker image is to provide a user interface for Productivity Suite Application using React. It allows interaction by uploading documents and inputs."
readme-filepath: ProductivitySuite/ui/react/README.md
enable-url-completion: false
- name: Description for searchqna
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/searchqna
short-description: "The docker image served as the searchqna gateway to provide service of retrieving accurate and relevant answers to user queries from a knowledge base or dataset"
readme-filepath: SearchQnA/README.md
enable-url-completion: false
- name: Description for searchqna-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/searchqna-ui
short-description: "The docker image acted as the searchqna UI entry for facilitating interaction with users for question answering"
readme-filepath: SearchQnA/ui/svelte/README.md
enable-url-completion: false
- name: Description for translation
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/translation
short-description: "The docker image served as the translation gateway to provide service of language translation"
readme-filepath: Translation/README.md
enable-url-completion: false
- name: Description for translation-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/translation-ui
short-description: "The docker image acted as the translation UI entry for facilitating interaction with users for language translation"
readme-filepath: Translation/ui/svelte/README.md
enable-url-completion: false
- name: Description for videoqna
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/videoqna
short-description: "The docker image acts as videoqna gateway, interacting with the user by retrieving videos based on user prompts"
readme-filepath: VideoQnA/README.md
enable-url-completion: false
- name: Description for videoqna-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/videoqna-ui
short-description: "The docker image serves as the user interface entry point for the videoqna, facilitating interaction with the user and retrieving the video based on user prompts."
readme-filepath: VideoQnA/README.md
enable-url-completion: false
- name: Description for visualqna
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/visualqna
short-description: "The docker image acts as a videoqna gateway, outputting answers in natural language based on a combination of images and questions"
readme-filepath: VisualQnA/README.md
enable-url-completion: false
- name: Description for visualqna-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/visualqna-ui
short-description: "The docker image serves as the user interface portal for VisualQnA, facilitating interaction with the user and outputting answers in natural language based on a combination of images and questions from the user."
readme-filepath: VisualQnA/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/agent-ui
short-description: "The docker image exposed the OPEA agent microservice UI entry for GenAI application use."
readme-filepath: AgentQnA/README.md
enable-url-completion: false
- name: Description for text2image-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/text2image-ui
short-description: "The docker image exposed the OPEA text-to-image microservice UI entry for GenAI application use."
readme-filepath: Text2Image/README.md
enable-url-completion: false
- name: Description for text2sql-react-ui
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/text2sql-react-ui
short-description: "The docker image exposed the OPEA text to Structured Query Language microservice react UI entry for GenAI application use."
readme-filepath: DBQnA/README.md
enable-url-completion: false
- name: Checkout GenAIComps
uses: actions/checkout@v4
with:
repository: opea-project/GenAIComps
path: GenAIComps
- name: Description for agent
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/agent
short-description: "The docker image exposed the OPEA agent microservice for GenAI application use."
readme-filepath: GenAIComps/comps/agent/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/asr
short-description: "The docker image exposed the OPEA Audio-Speech-Recognition microservice for GenAI application use."
readme-filepath: GenAIComps/comps/asr/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/animation
short-description: "The purpose of the Docker image is to expose the OPEA Avatar Animation microservice for GenAI application use."
readme-filepath: GenAIComps/comps/animation/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/chathistory-mongo
short-description: "The docker image exposes OPEA Chat History microservice which based on MongoDB database, designed to allow user to store, retrieve and manage chat conversations"
readme-filepath: GenAIComps/comps/chathistory/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/dataprep
short-description: "The docker image exposed the OPEA dataprep microservice for GenAI application use."
readme-filepath: GenAIComps/comps/dataprep/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/embedding
short-description: "The docker image exposed the OPEA mosec embedding microservice for GenAI application use."
readme-filepath: GenAIComps/comps/embeddings/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/embedding-multimodal-clip
short-description: "The docker image exposed the OPEA mosec embedding microservice base on Langchain framework for GenAI application use."
readme-filepath: GenAIComps/comps/third_parties/clip/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/embedding-multimodal-bridgetower
short-description: "The docker image exposes OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications."
readme-filepath: GenAIComps/comps/third_parties/bridgetower/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/embedding-multimodal-bridgetower-gaudi
short-description: "The docker image exposes OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications on the Gaudi."
readme-filepath: GenAIComps/comps/third_parties/bridgetower/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/feedbackmanagement-mongo
short-description: "The docker image exposes that the OPEA feedback management microservice uses a MongoDB database for GenAI applications."
readme-filepath: GenAIComps/comps/feedback_management/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/finetuning
short-description: "The docker image exposed the OPEA Fine-tuning microservice for GenAI application use."
readme-filepath: GenAIComps/comps/finetuning/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/finetuning-gaudi
short-description: "The docker image exposed the OPEA Fine-tuning microservice for GenAI application use on the Gaudi."
readme-filepath: GenAIComps/comps/finetuning/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/gpt-sovits
short-description: "The docker image exposed the OPEA GPT-SoVITS service for GenAI application use."
readme-filepath: GenAIComps/comps/tts/src/integrations/dependency/gpt-sovits/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails
short-description: "The docker image exposed the OPEA guardrail microservice for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/guardrails/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails-toxicity-predictionguard
short-description: "The docker image exposed the OPEA guardrail microservice to provide toxicity detection for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/toxicity_detection/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails-pii-predictionguard
short-description: "The docker image exposed the OPEA guardrail microservice to provide PII detection for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/pii_detection/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails-injection-predictionguard
short-description: "The docker image exposed the OPEA guardrail microservice to provide injection predictionguard for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/prompt_injection/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails-hallucination-detection
short-description: "The docker image exposed the OPEA guardrail microservice to provide hallucination detection for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/hallucination_detection/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails-factuality-predictionguard
short-description: "The docker image exposed the OPEA guardrail microservice to provide factuality predictionguard for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/factuality_alignment/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/guardrails-bias-detection
short-description: "The docker image exposed the OPEA guardrail microservice to provide bias detection for GenAI application use."
readme-filepath: GenAIComps/comps/guardrails/src/bias_detection/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/image2image-gaudi
short-description: "The purpose of the Docker image is to expose the OPEA Image-to-Image microservice for GenAI application use on the Gaudi."
readme-filepath: GenAIComps/comps/image2image/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/image2image
short-description: "The purpose of the Docker image is to expose the OPEA Image-to-Image microservice for GenAI application use."
readme-filepath: GenAIComps/comps/image2image/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/image2video-gaudi
short-description: "The purpose of the Docker image is to expose the OPEA image-to-video microservice for GenAI application use on the Gaudi."
readme-filepath: GenAIComps/comps/image2image/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/image2video
short-description: "The purpose of the Docker image is to expose the OPEA image-to-video microservice for GenAI application use."
readme-filepath: GenAIComps/comps/image2video/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/llm-textgen
short-description: "The docker image exposed the OPEA LLM microservice upon textgen docker image for GenAI application use."
readme-filepath: GenAIComps/comps/llms/src/text-generation/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/llm-textgen-gaudi
short-description: "The docker image exposed the OPEA LLM microservice upon textgen docker image for GenAI application use on the Gaudi2."
readme-filepath: GenAIComps/comps/llms/src/text-generation/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/llm-eval
short-description: "The docker image exposed the OPEA LLM microservice upon eval docker image for GenAI application use."
readme-filepath: GenAIComps/comps/llms/utils/lm-eval/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/llm-docsum
short-description: "The docker image exposed the OPEA LLM microservice upon docsum docker image for GenAI application use."
readme-filepath: GenAIComps/comps/llms/src/doc-summarization/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/llm-faqgen
short-description: "This docker image is designed to build a frequently asked questions microservice using the HuggingFace Text Generation Inference(TGI) framework. The microservice accepts document input and generates a FAQ."
readme-filepath: GenAIComps/comps/llms/src/faq-generation/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm
short-description: "The docker image exposed the OPEA large visual model (LVM) microservice for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm-llava
short-description: "The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) server for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/llava/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm-video-llama
short-description: "The docker image exposed the OPEA microservice running Video-Llama as a large visual model (LVM) for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/video-llama/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm-predictionguard
short-description: "The docker image exposed the OPEA microservice running predictionguard as a large visual model (LVM) server for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/predictionguard/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/llava-gaudi
short-description: "The docker image exposed the OPEA microservice running LLaVA as a large visual model (LVM) service for GenAI application use on the Gaudi2."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/llava/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm-llama-vision
short-description: "The docker image exposed the OPEA microservice running Llama Vision as the base large visual model service for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/llama-vision/README.md
enable-url-completion: false
- name: Description for
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm-llama-vision-tp
short-description: "The docker image exposed the OPEA microservice running Llama Vision with deepspeed as the base large visual model service for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/llama-vision/README.md
enable-url-completion: false
- name: Description for lvm-llama-vision-guard
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/lvm-llama-vision-guard
short-description: "The docker image exposed the OPEA microservice running Llama Vision Guard as the base large visual model service for GenAI application use."
readme-filepath: GenAIComps/comps/lvms/src/integrations/dependency/llama-vision/README.md
enable-url-completion: false
- name: Description for promptregistry-mongo
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/promptregistry-mongo
short-description: "The docker image exposes the OPEA Prompt Registry microservices which based on MongoDB database, designed to store and retrieve user's preferred prompts."
readme-filepath: GenAIComps/comps/prompt_registry/src/README.md
enable-url-completion: false
- name: Description for reranking
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/reranking
short-description: "The docker image exposed the OPEA reranking microservice for GenAI application use."
readme-filepath: GenAIComps/comps/rerankings/src/README.md
enable-url-completion: false
- name: Description for retriever
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/retriever
short-description: "The docker image exposed the OPEA retrieval microservice for GenAI application use."
readme-filepath: GenAIComps/comps/retrievers/README.md
enable-url-completion: false
- name: Description for text2image
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/text2image
short-description: "The docker image exposed the OPEA text-to-image microservice for GenAI application use."
readme-filepath: GenAIComps/comps/text2image/src/README.md
enable-url-completion: false
- name: Description for text2image-gaudi
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/text2image-gaudi
short-description: "The docker image exposed the OPEA text-to-image microservice for GenAI application use on the Gaudi."
readme-filepath: GenAIComps/comps/text2image/src/README.md
enable-url-completion: false
- name: Description for text2sql
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/text2sql
short-description: "The docker image exposed the OPEA text to Structured Query Language microservice for GenAI application use."
readme-filepath: GenAIComps/comps/text2sql/src/README.md
enable-url-completion: false
- name: Description for tts
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/tts
short-description: "The docker image exposed the OPEA Text-To-Speech microservice for GenAI application use."
readme-filepath: GenAIComps/comps/tts/src/README.md
enable-url-completion: false
- name: Description for speecht5
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/speecht5
short-description: "The docker image exposed the OPEA SpeechT5 service for GenAI application use."
readme-filepath: GenAIComps/comps/tts/src/README.md
enable-url-completion: false
- name: Description for speecht5-gaudi
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/speecht5-gaudi
short-description: "The docker image exposed the OPEA SpeechT5 service on Gaudi2 for GenAI application use."
readme-filepath: GenAIComps/comps/tts/src/README.md
enable-url-completion: false
- name: Description for gpt-sovits
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/gpt-sovits
short-description: "The docker image exposed the OPEA gpt-sovits service for GenAI application use."
readme-filepath: GenAIComps/comps/tts/src/integrations/dependency/gpt-sovits/README.md
enable-url-completion: false
- name: Description for nginx
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/nginx
short-description: "The docker image exposed the OPEA nginx microservice for GenAI application use."
readme-filepath: GenAIComps/comps/third_parties/nginx/deployment/kubernetes/README.md
enable-url-completion: false
- name: Description for vectorstore-pathway
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/vectorstore-pathway
short-description: "The docker image exposed the OPEA Vectorstores microservice with Pathway for GenAI application use."
readme-filepath: GenAIComps/comps/third_parties/pathway/src/README.md
enable-url-completion: false
- name: Description for wav2lip
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/wav2lip
short-description: "The docker image exposed the OPEA Generate lip movements from audio files microservice with Pathway for GenAI application use."
readme-filepath: GenAIComps/comps/third_parties/wav2lip/deployment/kubernetes/README.md
enable-url-completion: false
- name: Description for wav2lip-gaudi
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/wav2lip-gaudi
short-description: "The docker image exposed the OPEA Generate lip movements from audio files microservice with Pathway for GenAI application use on the Gaudi2."
readme-filepath: GenAIComps/comps/third_parties/wav2lip/deployment/kubernetes/README.md
enable-url-completion: false
- name: Description for vllm-arc
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/vllm-arc
short-description: "The docker image powered by vllm-project for deploying and serving vllm Models on Arc."
readme-filepath: GenAIComps/comps/third_parties/vllm/README.md
enable-url-completion: false
- name: Description for whisper-gaudi
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/whisper-gaudi
short-description: "The docker image exposed the OPEA Whisper service on Gaudi2 for GenAI application use."
readme-filepath: GenAIComps/comps/asr/src/README.md
enable-url-completion: false
- name: Description for whisper
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/whisper
short-description: "The docker image exposed the OPEA Whisper service for GenAI application use."
readme-filepath: GenAIComps/comps/asr/src/README.md
enable-url-completion: false
- name: Description for web-retriever
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/web-retriever
short-description: "The docker image exposed the OPEA retrieval microservice based on chroma vectordb for GenAI application use."
readme-filepath: GenAIComps/comps/web_retrievers/src/README.md
enable-url-completion: false
- name: Checkout vllm-openvino
uses: actions/checkout@v4
with:
repository: vllm-project/vllm
path: vllm
- name: Description for vllm-openvino
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/vllm-openvino
short-description: "The docker image powered by vllm-project for deploying and serving vllm Models of the Openvino Framework."
readme-filepath: vllm/README.md
enable-url-completion: false
- name: Checkout vllm-gaudi
uses: actions/checkout@v4
with:
repository: HabanaAI/vllm-fork
ref: habana_main
path: vllm-fork
- name: Description for vllm-gaudi
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/vllm-gaudi
short-description: "The docker image powered by vllm-project for deploying and serving vllm Models on Gaudi2."
readme-filepath: vllm-fork/README.md
enable-url-completion: false
- name: Description for vllm
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: opea/vllm
short-description: "The docker image powered by vllm-project for deploying and serving vllm Models."
readme-filepath: vllm-fork/README.md
enable-url-completion: false

View File

@@ -41,11 +41,9 @@ jobs:
publish:
needs: [get-image-list]
if: ${{ needs.get-image-list.outputs.matrix != '' }}
strategy:
matrix:
image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }}
fail-fast: false
runs-on: "docker-build-${{ inputs.node }}"
steps:
- uses: docker/login-action@v3.2.0

View File

@@ -12,7 +12,7 @@ on:
type: string
examples:
default: ""
description: 'List of examples to publish "AgentQnA,AudioQnA,ChatQnA,CodeGen,CodeTrans,DocIndexRetriever,DocSum,InstructionTuning,MultimodalQnA,ProductivitySuite,RerankFinetuning,SearchQnA,Translation,VideoQnA,VisualQnA"'
description: 'List of examples to publish "AgentQnA,AudioQnA,ChatQnA,CodeGen,CodeTrans,DocIndexRetriever,DocSum,FaqGen,InstructionTuning,MultimodalQnA,ProductivitySuite,RerankFinetuning,SearchQnA,Translation,VideoQnA,VisualQnA"'
required: false
type: string
images:
@@ -47,7 +47,6 @@ jobs:
scan-docker:
needs: get-image-list
runs-on: "docker-build-${{ inputs.node }}"
if: ${{ needs.get-image-list.outputs.matrix != '' }}
strategy:
matrix:
image: ${{ fromJson(needs.get-image-list.outputs.matrix) }}

View File

@@ -7,7 +7,7 @@ on:
inputs:
nodes:
default: "gaudi,xeon"
description: "Hardware to run test gaudi,gaudi3,xeon,rocm,arc"
description: "Hardware to run test"
required: true
type: string
examples:
@@ -20,6 +20,11 @@ on:
description: "Tag to apply to images"
required: true
type: string
deploy_gmc:
default: false
description: 'Whether to deploy gmc'
required: true
type: boolean
build:
default: true
description: 'Build test required images for Examples'
@@ -35,6 +40,11 @@ on:
description: 'Test examples with helm charts'
required: false
type: boolean
test_gmc:
default: false
description: 'Test examples with gmc'
required: false
type: boolean
opea_branch:
default: "main"
description: 'OPEA branch for image build'
@@ -42,14 +52,9 @@ on:
type: string
inject_commit:
default: false
description: "inject commit to docker images"
description: "inject commit to docker images true or false"
required: false
type: boolean
use_model_cache:
default: false
description: "use model cache"
required: false
type: boolean
type: string
permissions: read-all
jobs:
@@ -69,20 +74,23 @@ jobs:
nodes_json=$(printf '%s\n' "${nodes[@]}" | sort -u | jq -R '.' | jq -sc '.')
echo "nodes=$nodes_json" >> $GITHUB_OUTPUT
build-comps-base:
build-deploy-gmc:
needs: [get-test-matrix]
if: ${{ fromJSON(inputs.deploy_gmc) }}
strategy:
matrix:
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
uses: ./.github/workflows/_build_comps_base_image.yml
fail-fast: false
uses: ./.github/workflows/_gmc-workflow.yml
with:
node: ${{ matrix.node }}
build: ${{ fromJSON(inputs.build) }}
tag: ${{ inputs.tag }}
opea_branch: ${{ inputs.opea_branch }}
secrets: inherit
run-examples:
needs: [get-test-matrix, build-comps-base]
needs: [get-test-matrix, build-deploy-gmc]
if: always()
strategy:
matrix:
example: ${{ fromJson(needs.get-test-matrix.outputs.examples) }}
@@ -96,7 +104,7 @@ jobs:
build: ${{ fromJSON(inputs.build) }}
test_compose: ${{ fromJSON(inputs.test_compose) }}
test_helmchart: ${{ fromJSON(inputs.test_helmchart) }}
test_gmc: ${{ fromJSON(inputs.test_gmc) }}
opea_branch: ${{ inputs.opea_branch }}
inject_commit: ${{ inputs.inject_commit }}
use_model_cache: ${{ inputs.use_model_cache }}
secrets: inherit

View File

@@ -25,9 +25,9 @@ jobs:
- name: Set up Git
run: |
git config --global user.name "CICD-at-OPEA"
git config --global user.email "CICD@opea.dev"
git remote set-url origin https://CICD-at-OPEA:"${{ secrets.ACTION_TOKEN }}"@github.com/opea-project/GenAIExamples.git
git config --global user.name "NeuralChatBot"
git config --global user.email "grp_neural_chat_bot@intel.com"
git remote set-url origin https://NeuralChatBot:"${{ secrets.ACTION_TOKEN }}"@github.com/opea-project/GenAIExamples.git
- name: Run script
run: |

View File

@@ -32,9 +32,9 @@ on:
type: string
inject_commit:
default: false
description: "inject commit to docker images"
description: "inject commit to docker images true or false"
required: false
type: boolean
type: string
jobs:
get-test-matrix:
@@ -51,7 +51,6 @@ jobs:
image-build:
needs: get-test-matrix
if: ${{ needs.get-test-matrix.outputs.nodes != '' }}
strategy:
matrix:
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}

View File

@@ -33,7 +33,6 @@ jobs:
clean-up:
needs: get-build-matrix
if: ${{ needs.get-image-list.outputs.matrix != '' }}
strategy:
matrix:
node: ${{ fromJson(needs.get-build-matrix.outputs.nodes) }}
@@ -48,7 +47,6 @@ jobs:
build:
needs: [get-build-matrix, clean-up]
if: ${{ needs.get-image-list.outputs.matrix != '' }}
strategy:
matrix:
example: ${{ fromJson(needs.get-build-matrix.outputs.examples) }}

View File

@@ -32,15 +32,8 @@ jobs:
echo "TAG=$TAG" >> $GITHUB_OUTPUT
echo "PUBLISH_TAGS=$PUBLISH_TAGS" >> $GITHUB_OUTPUT
build-comps-base:
needs: [get-build-matrix]
uses: ./.github/workflows/_build_comps_base_image.yml
with:
node: gaudi
build-and-test:
needs: get-build-matrix
if: ${{ needs.get-build-matrix.outputs.examples_json != '' }}
strategy:
matrix:
example: ${{ fromJSON(needs.get-build-matrix.outputs.examples_json) }}
@@ -50,7 +43,6 @@ jobs:
node: gaudi
example: ${{ matrix.example }}
test_compose: true
inject_commit: true
secrets: inherit
get-image-list:
@@ -61,11 +53,9 @@ jobs:
publish:
needs: [get-build-matrix, get-image-list, build-and-test]
if: always() && ${{ needs.get-image-list.outputs.matrix != '' }}
strategy:
matrix:
image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }}
fail-fast: false
runs-on: "docker-build-gaudi"
steps:
- uses: docker/login-action@v3.2.0

View File

@@ -65,7 +65,7 @@ jobs:
helm-chart-test:
needs: [job1]
if: always() && ${{ fromJSON(needs.job1.outputs.run_matrix).length != 0 }}
if: always() && ${{ needs.job1.outputs.run_matrix.example.length > 0 }}
uses: ./.github/workflows/_helm-e2e.yml
strategy:
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}

View File

@@ -32,16 +32,15 @@ jobs:
example-test:
needs: [get-test-matrix]
if: ${{ needs.get-test-matrix.outputs.run_matrix != '' }}
strategy:
matrix: ${{ fromJSON(needs.get-test-matrix.outputs.run_matrix) }}
fail-fast: false
if: ${{ !github.event.pull_request.draft }}
uses: ./.github/workflows/_run-docker-compose.yml
with:
registry: "opea"
tag: "ci"
example: ${{ matrix.example }}
hardware: ${{ matrix.hardware }}
use_model_cache: true
diff_excluded_files: '\.github|\.md|\.txt|kubernetes|gmc|assets|benchmark'
secrets: inherit

View File

@@ -76,7 +76,13 @@ jobs:
cd ${{github.workspace}}
fail="FALSE"
repo_name=${{ github.event.pull_request.head.repo.full_name }}
branch="https://github.com/$repo_name/blob/${{ github.event.pull_request.head.ref }}"
if [ "$(echo "$repo_name"|cut -d'/' -f1)" != "opea-project" ]; then
owner=$(echo "${{ github.event.pull_request.head.repo.full_name }}" |cut -d'/' -f1)
branch="https://github.com/$owner/GenAIExamples/tree/${{ github.event.pull_request.head.ref }}"
else
branch="https://github.com/opea-project/GenAIExamples/blob/${{ github.event.pull_request.head.ref }}"
fi
link_head="https://github.com/opea-project/GenAIExamples/blob/main"
merged_commit=$(git log -1 --format='%H')
changed_files="$(git diff --name-status --diff-filter=ARM ${{ github.event.pull_request.base.sha }} ${merged_commit} | awk '/\.md$/ {print $NF}')"

View File

@@ -24,7 +24,6 @@ jobs:
image-build:
needs: job1
if: ${{ needs.job1.outputs.run_matrix != '{"include":[]}' }}
strategy:
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
fail-fast: false

View File

@@ -54,6 +54,6 @@ jobs:
${{ env.changed_files }}
Please verify if the helm charts need to be changed accordingly.
Please verify if the helm charts and manifests need to be changed accordingly.
> This issue was created automatically by CI.

View File

@@ -30,20 +30,13 @@ case "$1" in
echo "$ports"
for port in $ports; do
if [[ $port =~ [a-zA-Z_-] ]]; then
echo "Search port value $port from the test case..."
port_fix=$(grep -E "export $port=" tests/$test_case | cut -d'=' -f2)
if [[ "$port_fix" == "" ]]; then
echo "Can't find the port value from the test case, use the default value in yaml..."
port_fix=$(yq '.services[].ports[]' $yaml_file | grep $port | cut -d':' -f2 | grep -o '[0-9a-zA-Z]\+')
fi
port=$port_fix
port=$(grep -E "export $port=" tests/$test_case | cut -d'=' -f2)
fi
if [[ $port =~ [0-9] ]]; then
if [[ $port == 5000 ]]; then
echo "Error: Port 5000 is used by local docker registry, please DO NOT use it in docker compose deployment!!!"
exit 1
fi
echo "Check port $port..."
cid=$(docker ps --filter "publish=${port}" --format "{{.ID}}")
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && echo "release $port"; fi
fi

View File

@@ -12,7 +12,6 @@ run_matrix="{\"include\":["
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
for example in ${examples}; do
if [[ ! -d $WORKSPACE/$example ]]; then continue; fi
cd $WORKSPACE/$example
if [[ ! $(find . -type f | grep ${test_mode}) ]]; then continue; fi
cd tests
@@ -27,10 +26,7 @@ for example in ${examples}; do
run_hardware=""
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | cut -d'/' -f2 | grep -E '\.py|Dockerfile*|ui|docker_image_build' ) ]]; then
echo "run test on all hardware if megaservice or ui code change..."
run_hardware=$hardware_list
elif [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep 'tests'| cut -d'/' -f3 | grep -vE '^test_|^_test' ) ]]; then
echo "run test on all hardware if common test scripts change..."
# run test on all hardware if megaservice or ui code change
run_hardware=$hardware_list
else
for hardware in ${hardware_list}; do

View File

@@ -1,9 +1,11 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
name: Weekly update 3rd party images
name: Weekly update base images and 3rd party images
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
permissions:
@@ -14,8 +16,8 @@ jobs:
freeze-images:
runs-on: ubuntu-latest
env:
USER_NAME: "CICD-at-OPEA"
USER_EMAIL: "CICD@opea.dev"
USER_NAME: "NeuralChatBot"
USER_EMAIL: "grp_neural_chat_bot@intel.com"
BRANCH_NAME: "update_images_tag"
steps:
- name: Checkout repository

View File

@@ -1,16 +1,8 @@
# Agents for Question Answering
## Table of contents
1. [Overview](#overview)
2. [Deploy with Docker](#deploy-with-docker)
3. [Launch the UI](#launch-the-ui)
4. [Validate Services](#validate-services)
5. [Register Tools](#how-to-register-other-tools-with-the-ai-agent)
## Overview
This example showcases a hierarchical multi-agent system for question-answering applications. The architecture diagram below shows a supervisor agent that interfaces with the user and dispatches tasks to two worker agents to gather information and come up with answers. The worker RAG agent uses the retrieval tool to retrieve relevant documents from a knowledge base - a vector database. The worker SQL agent retrieves relevant data from a SQL database. Although not included in this example by default, other tools such as a web search tool or a knowledge graph query tool can be used by the supervisor agent to gather information from additional sources.
This example showcases a hierarchical multi-agent system for question-answering applications. The architecture diagram is shown below. The supervisor agent interfaces with the user and dispatch tasks to two worker agents to gather information and come up with answers. The worker RAG agent uses the retrieval tool to retrieve relevant documents from the knowledge base (a vector database). The worker SQL agent retrieve relevant data from the SQL database. Although not included in this example, but other tools such as a web search tool or a knowledge graph query tool can be used by the supervisor agent to gather information from additional sources.
![Architecture Overview](assets/img/agent_qna_arch.png)
The AgentQnA example is implemented using the component-level microservices defined in [GenAIComps](https://github.com/opea-project/GenAIComps). The flow chart below shows the information flow between different microservices for this example.
@@ -83,169 +75,201 @@ flowchart LR
```
### Why should AI Agents be used for question-answering?
### Why Agent for question answering?
1. **Improve relevancy of retrieved context.**
RAG agents can rephrase user queries, decompose user queries, and iterate to get the most relevant context for answering a user's question. Compared to conventional RAG, RAG agents significantly improve the correctness and relevancy of the answer because of the iterations it goes through.
2. **Expand scope of skills.**
The supervisor agent interacts with multiple worker agents that specialize in different skills (e.g., retrieve documents, write SQL queries, etc.). Thus, it can answer questions with different methods.
3. **Hierarchical multi-agents improve performance.**
Expert worker agents, such as RAG agents and SQL agents, can provide high-quality output for different aspects of a complex query, and the supervisor agent can aggregate the information to provide a comprehensive answer. If only one agent is used and all tools are provided to this single agent, it can lead to large overhead or not use the best tool to provide accurate answers.
1. Improve relevancy of retrieved context.
RAG agent can rephrase user queries, decompose user queries, and iterate to get the most relevant context for answering user's questions. Compared to conventional RAG, RAG agent can significantly improve the correctness and relevancy of the answer.
2. Expand scope of the agent.
The supervisor agent can interact with multiple worker agents that specialize in different domains with different skills (e.g., retrieve documents, write SQL queries, etc.), and thus can answer questions in multiple domains.
3. Hierarchical multi-agents can improve performance.
Expert worker agents, such as RAG agent and SQL agent, can provide high-quality output for different aspects of a complex query, and the supervisor agent can aggregate the information together to provide a comprehensive answer. If we only use one agent and provide all the tools to this single agent, it may get overwhelmed and not able to provide accurate answers.
## Deploy with docker
## Deployment with docker
### 1. Set up environment </br>
1. Build agent docker image [Optional]
#### First, clone the `GenAIExamples` repo.
> [!NOTE]
> the step is optional. The docker images will be automatically pulled when running the docker compose commands. This step is only needed if pulling images failed.
First, clone the opea GenAIComps repo.
```
export WORKDIR=<your-work-directory>
cd $WORKDIR
git clone https://github.com/opea-project/GenAIExamples.git
git clone https://github.com/opea-project/GenAIComps.git
```
#### Second, set up environment variables.
##### For proxy environments only
Then build the agent docker image. Both the supervisor agent and the worker agent will use the same docker image, but when we launch the two agents we will specify different strategies and register different tools.
```
export http_proxy="Your_HTTP_Proxy"
export https_proxy="Your_HTTPs_Proxy"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
cd GenAIComps
docker build -t opea/agent:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/agent/src/Dockerfile .
```
##### For using open-source llms
2. Set up environment for this example </br>
```
export HUGGINGFACEHUB_API_TOKEN=<your-HF-token>
export HF_CACHE_DIR=<directory-where-llms-are-downloaded> #so that no need to redownload every time
```
First, clone this repo.
##### [Optional] OPANAI_API_KEY to use OpenAI models
```
export WORKDIR=<your-work-directory>
cd $WORKDIR
git clone https://github.com/opea-project/GenAIExamples.git
```
```
export OPENAI_API_KEY=<your-openai-key>
```
Second, set up env vars.
#### Third, set up environment variables for the selected hardware using the corresponding `set_env.sh`
```
# Example: host_ip="192.168.1.1" or export host_ip="External_Public_IP"
export host_ip=$(hostname -I | awk '{print $1}')
# if you are in a proxy environment, also set the proxy-related environment variables
export http_proxy="Your_HTTP_Proxy"
export https_proxy="Your_HTTPs_Proxy"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
##### Gaudi
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
# for using open-source llms
export HUGGINGFACEHUB_API_TOKEN=<your-HF-token>
export HF_CACHE_DIR=<directory-where-llms-are-downloaded> #so that no need to redownload every time
```
source $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh
```
# optional: OPANAI_API_KEY if you want to use OpenAI models
export OPENAI_API_KEY=<your-openai-key>
```
##### Xeon
3. Deploy the retrieval tool (i.e., DocIndexRetriever mega-service)
```
source $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh
```
First, launch the mega-service.
### 2. Launch the multi-agent system. </br>
```
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool
bash launch_retrieval_tool.sh
```
Two options are provided for the `llm_engine` of the agents: 1. open-source LLMs on Gaudi, 2. OpenAI models via API calls.
Then, ingest data into the vector database. Here we provide an example. You can ingest your own data.
#### Gaudi
```
bash run_ingest_data.sh
```
On Gaudi, `meta-llama/Meta-Llama-3.1-70B-Instruct` will be served using vllm.
By default, both the RAG agent and SQL agent will be launched to support the React Agent.
The React Agent requires the DocIndexRetriever's [`compose.yaml`](../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml) file, so two `compose.yaml` files need to be run with docker compose to start the multi-agent system.
4. Prepare SQL database
In this example, we will use the Chinook SQLite database. Run the commands below.
> **Note**: To enable the web search tool, skip this step and proceed to the "[Optional] Web Search Tool Support" section.
```
# Download data
cd $WORKDIR
git clone https://github.com/lerocha/chinook-database.git
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite $WORKDIR/GenAIExamples/AgentQnA/tests/
```
```bash
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml up -d
```
5. Launch other tools. </br>
In this example, we will use some of the mock APIs provided in the Meta CRAG KDD Challenge to demonstrate the benefits of gaining additional context from mock knowledge graphs.
To enable Open Telemetry Tracing, compose.telemetry.yaml file need to be merged along with default compose.yaml file.
Gaudi example with Open Telemetry feature:
```
docker run -d -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
```
```bash
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml -f compose.telemetry.yaml up -d
```
6. Launch multi-agent system. </br>
We provide two options for `llm_engine` of the agents: 1. open-source LLMs on Intel Gaudi2, 2. OpenAI models via API calls.
##### [Optional] Web Search Tool Support
::::{tab-set}
:::{tab-item} Gaudi
:sync: Gaudi
<details>
<summary> Instructions </summary>
A web search tool is supported in this example and can be enabled by running docker compose with the `compose.webtool.yaml` file.
The Google Search API is used. Follow the [instructions](https://python.langchain.com/docs/integrations/tools/google_search) to create an API key and enable the Custom Search API on a Google account. The environment variables `GOOGLE_CSE_ID` and `GOOGLE_API_KEY` need to be set.
On Gaudi2 we will serve `meta-llama/Meta-Llama-3.1-70B-Instruct` using vllm.
```bash
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
export GOOGLE_CSE_ID="YOUR_ID"
export GOOGLE_API_KEY="YOUR_API_KEY"
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml -f compose.webtool.yaml up -d
```
First build vllm-gaudi docker image.
</details>
```bash
cd $WORKDIR
git clone https://github.com/vllm-project/vllm.git
cd ./vllm
git checkout v0.6.6
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
```
#### Xeon
Then launch vllm on Gaudi2 with the command below.
On Xeon, only OpenAI models are supported.
By default, both the RAG Agent and SQL Agent will be launched to support the React Agent.
The React Agent requires the DocIndexRetriever's [`compose.yaml`](../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml) file, so two `compose yaml` files need to be run with docker compose to start the multi-agent system.
```bash
vllm_port=8086
model="meta-llama/Meta-Llama-3.1-70B-Instruct"
docker run -d --runtime=habana --rm --name "vllm-gaudi-server" -e HABANA_VISIBLE_DEVICES=0,1,2,3 -p $vllm_port:8000 -v $vllm_volume:/data -e HF_TOKEN=$HF_TOKEN -e HUGGING_FACE_HUB_TOKEN=$HF_TOKEN -e HF_HOME=/data -e OMPI_MCA_btl_vader_single_copy_mechanism=none -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e no_proxy=$no_proxy -e VLLM_SKIP_WARMUP=true --cap-add=sys_nice --ipc=host opea/vllm-gaudi:latest --model ${model} --max-seq-len-to-capture 16384 --tensor-parallel-size 4
```
```bash
export OPENAI_API_KEY=<your-openai-key>
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose_openai.yaml up -d
```
Then launch Agent microservices.
### 3. Ingest Data into the vector database
```bash
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
bash launch_agent_service_gaudi.sh
```
The `run_ingest_data.sh` script will use an example jsonl file to ingest example documents into a vector database. Other ways to ingest data and other types of documents supported can be found in the OPEA dataprep microservice located in the opea-project/GenAIComps repo.
:::
:::{tab-item} Xeon
:sync: Xeon
```bash
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/
bash run_ingest_data.sh
```
To use OpenAI models, run commands below.
> **Note**: This is a one-time operation.
```
export OPENAI_API_KEY=<your-openai-key>
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon
bash launch_agent_service_openai.sh
```
## Launch the UI
:::
::::
Open a web browser to http://localhost:5173 to access the UI. Ensure the environment variable `AGENT_URL` is set to http://$ip_address:9090/v1/chat/completions in [ui/svelte/.env](./ui/svelte/.env) or else the UI may not work properly.
The AgentQnA UI can be deployed locally or using Docker. To customize deployment, refer to the [AgentQnA UI Guide](./ui/svelte/README.md).
## [Optional] Deploy using Helm Charts
## Deploy using Helm Chart
Refer to the [AgentQnA helm chart](./kubernetes/helm/README.md) for instructions on deploying AgentQnA on Kubernetes.
## Validate Services
## Validate services
1. First look at logs for each of the agent docker containers:
First look at logs of the agent docker containers:
```bash
```
# worker RAG agent
docker logs rag-agent-endpoint
# worker SQL agent
docker logs sql-agent-endpoint
```
```
# supervisor agent
docker logs react-agent-endpoint
```
Look for the message "HTTP server setup successful" to confirm the agent docker container has started successfully.</p>
You should see something like "HTTP server setup successful" if the docker containers are started successfully.</p>
2. Use python to validate each agent is working properly:
Second, validate worker RAG agent:
```bash
# RAG worker agent
python $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "Tell me about Michael Jackson song Thriller" --agent_role "worker" --ext_port 9095
# SQL agent
python $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "How many employees in company" --agent_role "worker" --ext_port 9096
# supervisor agent: this will test a two-turn conversation
python $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port 9090
```
curl http://${host_ip}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "Michael Jackson song Thriller"
}'
```
## How to register other tools with the AI agent
Third, validate worker SQL agent:
The [tools](./tools) folder contains YAML and Python files for additional tools for the supervisor and worker agents. Refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/src/README.md) to add tools and customize the AI agents.
```
curl http://${host_ip}:9096/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "How many employees are in the company"
}'
```
Finally, validate supervisor agent:
```
curl http://${host_ip}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "How many albums does Iron Maiden have?"
}'
```
## Deploy AgentQnA UI
The AgentQnA UI can be deployed locally or using Docker.
For detailed instructions on deploying AgentQnA UI, refer to the [AgentQnA UI Guide](./ui/svelte/README.md).
## How to register your own tools with agent
You can take a look at the tools yaml and python files in this example. For more details, please refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/src/README.md).

View File

@@ -1,342 +1,101 @@
# Build Mega Service of AgentQnA on AMD ROCm GPU
# Single node on-prem deployment with Docker Compose on AMD GPU
## Build Docker Images
This example showcases a hierarchical multi-agent system for question-answering applications. We deploy the example on Xeon. For LLMs, we use OpenAI models via API calls. For instructions on using open-source LLMs, please refer to the deployment guide [here](../../../../README.md).
### 1. Build Docker Image
## Deployment with docker
- #### Create application install directory and go to it:
1. First, clone this repo.
```
export WORKDIR=<your-work-directory>
cd $WORKDIR
git clone https://github.com/opea-project/GenAIExamples.git
```
2. Set up environment for this example </br>
```bash
mkdir ~/agentqna-install && cd agentqna-install
```
```
# Example: host_ip="192.168.1.1" or export host_ip="External_Public_IP"
export host_ip=$(hostname -I | awk '{print $1}')
# if you are in a proxy environment, also set the proxy-related environment variables
export http_proxy="Your_HTTP_Proxy"
export https_proxy="Your_HTTPs_Proxy"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
- #### Clone the repository GenAIExamples (the default repository branch "main" is used here):
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
#OPANAI_API_KEY if you want to use OpenAI models
export OPENAI_API_KEY=<your-openai-key>
# Set AMD GPU settings
export AGENTQNA_CARD_ID="card1"
export AGENTQNA_RENDER_ID="renderD136"
```
```bash
git clone https://github.com/opea-project/GenAIExamples.git
```
3. Deploy the retrieval tool (i.e., DocIndexRetriever mega-service)
If you need to use a specific branch/tag of the GenAIExamples repository, then (v1.3 replace with its own value):
First, launch the mega-service.
```bash
git clone https://github.com/opea-project/GenAIExamples.git && cd GenAIExamples && git checkout v1.3
```
```
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool
bash launch_retrieval_tool.sh
```
We remind you that when using a specific version of the code, you need to use the README from this version:
Then, ingest data into the vector database. Here we provide an example. You can ingest your own data.
- #### Go to build directory:
```
bash run_ingest_data.sh
```
```bash
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_image_build
```
4. Launch Tool service
In this example, we will use some of the mock APIs provided in the Meta CRAG KDD Challenge to demonstrate the benefits of gaining additional context from mock knowledge graphs.
```
docker run -d -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
```
5. Launch `Agent` service
- Cleaning up the GenAIComps repository if it was previously cloned in this directory.
This is necessary if the build was performed earlier and the GenAIComps folder exists and is not empty:
```
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash launch_agent_service_tgi_rocm.sh
```
```bash
echo Y | rm -R GenAIComps
```
6. [Optional] Build `Agent` docker image if pulling images failed.
- #### Clone the repository GenAIComps (the default repository branch "main" is used here):
```
git clone https://github.com/opea-project/GenAIComps.git
cd GenAIComps
docker build -t opea/agent:latest -f comps/agent/src/Dockerfile .
```
```bash
git clone https://github.com/opea-project/GenAIComps.git
## Validate services
First look at logs of the agent docker containers:
```
# worker agent
docker logs rag-agent-endpoint
```
We remind you that when using a specific version of the code, you need to use the README from this version.
- #### Setting the list of images for the build (from the build file.yaml)
If you want to deploy a vLLM-based or TGI-based application, then the set of services is installed as follows:
#### vLLM-based application
```bash
service_list="vllm-rocm agent agent-ui"
```
#### TGI-based application
```bash
service_list="agent agent-ui"
```
- #### Optional. Pull TGI Docker Image (Do this if you want to use TGI)
```bash
docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
```
- #### Build Docker Images
```bash
docker compose -f build.yaml build ${service_list} --no-cache
```
- #### Build DocIndexRetriever Docker Images
```bash
cd ~/agentqna-install/GenAIExamples/DocIndexRetriever/docker_image_build/
git clone https://github.com/opea-project/GenAIComps.git
service_list="doc-index-retriever dataprep embedding retriever reranking"
docker compose -f build.yaml build ${service_list} --no-cache
```
- #### Pull DocIndexRetriever Docker Images
```bash
docker pull redis/redis-stack:7.2.0-v9
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
```
After the build, we check the list of images with the command:
```bash
docker image ls
```
The list of images should include:
##### vLLM-based application:
- opea/vllm-rocm:latest
- opea/agent:latest
- redis/redis-stack:7.2.0-v9
- ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
- opea/embedding:latest
- opea/retriever:latest
- opea/reranking:latest
- opea/doc-index-retriever:latest
##### TGI-based application:
- ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
- opea/agent:latest
- redis/redis-stack:7.2.0-v9
- ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
- opea/embedding:latest
- opea/retriever:latest
- opea/reranking:latest
- opea/doc-index-retriever:latest
---
## Deploy the AgentQnA Application
### Docker Compose Configuration for AMD GPUs
To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose file:
- compose_vllm.yaml - for vLLM-based application
- compose.yaml - for TGI-based
```yaml
shm_size: 1g
devices:
- /dev/kfd:/dev/kfd
- /dev/dri:/dev/dri
cap_add:
- SYS_PTRACE
group_add:
- video
security_opt:
- seccomp:unconfined
```
# supervisor agent
docker logs react-agent-endpoint
```
This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs. For example:
You should see something like "HTTP server setup successful" if the docker containers are started successfully.</p>
```yaml
shm_size: 1g
devices:
- /dev/kfd:/dev/kfd
- /dev/dri/card0:/dev/dri/card0
- /dev/dri/render128:/dev/dri/render128
cap_add:
- SYS_PTRACE
group_add:
- video
security_opt:
- seccomp:unconfined
Second, validate worker agent:
```
curl http://${host_ip}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"query": "Most recent album by Taylor Swift"
}'
```
**How to Identify GPU Device IDs:**
Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU.
Third, validate supervisor agent:
### Set deploy environment variables
#### Setting variables in the operating system environment:
```bash
### Replace the string 'server_address' with your local server IP address
export host_ip='server_address'
### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token.
export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token'
### Replace the string 'your_langchain_api_key' with your LANGCHAIN API KEY.
export LANGCHAIN_API_KEY='your_langchain_api_key'
export LANGCHAIN_TRACING_V2=""
```
curl http://${host_ip}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"query": "Most recent album by Taylor Swift"
}'
```
### Start the services:
## How to register your own tools with agent
#### If you use vLLM
```bash
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash launch_agent_service_vllm_rocm.sh
```
#### If you use TGI
```bash
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash launch_agent_service_tgi_rocm.sh
```
All containers should be running and should not restart:
##### If you use vLLM:
- dataprep-redis-server
- doc-index-retriever-server
- embedding-server
- rag-agent-endpoint
- react-agent-endpoint
- redis-vector-db
- reranking-tei-xeon-server
- retriever-redis-server
- sql-agent-endpoint
- tei-embedding-server
- tei-reranking-server
- vllm-service
##### If you use TGI:
- dataprep-redis-server
- doc-index-retriever-server
- embedding-server
- rag-agent-endpoint
- react-agent-endpoint
- redis-vector-db
- reranking-tei-xeon-server
- retriever-redis-server
- sql-agent-endpoint
- tei-embedding-server
- tei-reranking-server
- tgi-service
---
## Validate the Services
### 1. Validate the vLLM/TGI Service
#### If you use vLLM:
```bash
DATA='{"model": "Intel/neural-chat-7b-v3-3t", '\
'"messages": [{"role": "user", "content": "What is Deep Learning?"}], "max_tokens": 256}'
curl http://${HOST_IP}:${VLLM_SERVICE_PORT}/v1/chat/completions \
-X POST \
-d "$DATA" \
-H 'Content-Type: application/json'
```
Checking the response from the service. The response should be similar to JSON:
```json
{
"id": "chatcmpl-142f34ef35b64a8db3deedd170fed951",
"object": "chat.completion",
"created": 1742270316,
"model": "Intel/neural-chat-7b-v3-3",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "",
"tool_calls": []
},
"logprobs": null,
"finish_reason": "length",
"stop_reason": null
}
],
"usage": { "prompt_tokens": 66, "total_tokens": 322, "completion_tokens": 256, "prompt_tokens_details": null },
"prompt_logprobs": null
}
```
If the service response has a meaningful response in the value of the "choices.message.content" key,
then we consider the vLLM service to be successfully launched
#### If you use TGI:
```bash
DATA='{"inputs":"What is Deep Learning?",'\
'"parameters":{"max_new_tokens":256,"do_sample": true}}'
curl http://${HOST_IP}:${TGI_SERVICE_PORT}/generate \
-X POST \
-d "$DATA" \
-H 'Content-Type: application/json'
```
Checking the response from the service. The response should be similar to JSON:
```json
{
"generated_text": " "
}
```
If the service response has a meaningful response in the value of the "generated_text" key,
then we consider the TGI service to be successfully launched
### 2. Validate Agent Services
#### Validate Rag Agent Service
```bash
export agent_port=${WORKER_RAG_AGENT_PORT}
prompt="Tell me about Michael Jackson song Thriller"
python3 ~/agentqna-install/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port
```
The response must contain the meaningful text of the response to the request from the "prompt" variable
#### Validate Sql Agent Service
```bash
export agent_port=${WORKER_SQL_AGENT_PORT}
prompt="How many employees are there in the company?"
python3 ~/agentqna-install/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port
```
The answer should make sense - "8 employees in the company"
#### Validate React (Supervisor) Agent Service
```bash
export agent_port=${SUPERVISOR_REACT_AGENT_PORT}
python3 ~/agentqna-install/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream
```
The response should contain "Iron Maiden"
### 3. Stop application
#### If you use vLLM
```bash
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash stop_agent_service_vllm_rocm.sh
```
#### If you use TGI
```bash
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash stop_agent_service_tgi_rocm.sh
```
You can take a look at the tools yaml and python files in this example. For more details, please refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/src/README.md).

View File

@@ -1,24 +1,26 @@
# Copyright (C) 2025 Advanced Micro Devices, Inc.
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
services:
tgi-service:
image: ghcr.io/huggingface/text-generation-inference:3.0.0-rocm
container_name: tgi-service
agent-tgi-server:
image: ${AGENTQNA_TGI_IMAGE}
container_name: agent-tgi-server
ports:
- "${TGI_SERVICE_PORT-8085}:80"
- "${AGENTQNA_TGI_SERVICE_PORT-8085}:80"
volumes:
- "${MODEL_CACHE:-./data}:/data"
- /var/opea/agent-service/:/data
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: "http://${ip_address}:${TGI_SERVICE_PORT}"
TGI_LLM_ENDPOINT: "http://${HOST_IP}:${AGENTQNA_TGI_SERVICE_PORT}"
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
shm_size: 32g
shm_size: 1g
devices:
- /dev/kfd:/dev/kfd
- /dev/dri:/dev/dri
- /dev/dri/${AGENTQNA_CARD_ID}:/dev/dri/${AGENTQNA_CARD_ID}
- /dev/dri/${AGENTQNA_RENDER_ID}:/dev/dri/${AGENTQNA_RENDER_ID}
cap_add:
- SYS_PTRACE
group_add:
@@ -32,14 +34,14 @@ services:
image: opea/agent:latest
container_name: rag-agent-endpoint
volumes:
- "${TOOLSET_PATH}:/home/user/tools/"
# - ${WORKDIR}/GenAIExamples/AgentQnA/docker_image_build/GenAIComps/comps/agent/langchain/:/home/user/comps/agent/langchain/
- ${TOOLSET_PATH}:/home/user/tools/
ports:
- "${WORKER_RAG_AGENT_PORT:-9095}:9095"
- "9095:9095"
ipc: host
environment:
ip_address: ${ip_address}
strategy: rag_agent_llama
with_memory: false
recursion_limit: ${recursion_limit_worker}
llm_engine: tgi
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
@@ -59,49 +61,21 @@ services:
LANGCHAIN_PROJECT: "opea-worker-agent-service"
port: 9095
worker-sql-agent:
image: opea/agent:latest
container_name: sql-agent-endpoint
volumes:
- "${WORKDIR}/tests/Chinook_Sqlite.sqlite:/home/user/chinook-db/Chinook_Sqlite.sqlite:rw"
ports:
- "${WORKER_SQL_AGENT_PORT:-9096}:9096"
ipc: host
environment:
ip_address: ${ip_address}
strategy: sql_agent_llama
with_memory: false
db_name: ${db_name}
db_path: ${db_path}
use_hints: false
recursion_limit: ${recursion_limit_worker}
llm_engine: vllm
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
llm_endpoint_url: ${LLM_ENDPOINT_URL}
model: ${LLM_MODEL_ID}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: false
require_human_feedback: false
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
port: 9096
supervisor-react-agent:
image: opea/agent:latest
container_name: react-agent-endpoint
depends_on:
- agent-tgi-server
- worker-rag-agent
volumes:
- "${TOOLSET_PATH}:/home/user/tools/"
# - ${WORKDIR}/GenAIExamples/AgentQnA/docker_image_build/GenAIComps/comps/agent/langchain/:/home/user/comps/agent/langchain/
- ${TOOLSET_PATH}:/home/user/tools/
ports:
- "${SUPERVISOR_REACT_AGENT_PORT:-9090}:9090"
- "${AGENTQNA_FRONTEND_PORT}:9090"
ipc: host
environment:
ip_address: ${ip_address}
strategy: react_llama
with_memory: true
strategy: react_langgraph
recursion_limit: ${recursion_limit_supervisor}
llm_engine: tgi
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
@@ -109,7 +83,7 @@ services:
model: ${LLM_MODEL_ID}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: true
stream: false
tools: /home/user/tools/supervisor_agent_tools.yaml
require_human_feedback: false
no_proxy: ${no_proxy}
@@ -118,7 +92,6 @@ services:
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
CRAG_SERVER: ${CRAG_SERVER}
WORKER_AGENT_URL: ${WORKER_AGENT_URL}
SQL_AGENT_URL: ${SQL_AGENT_URL}
CRAG_SERVER: $CRAG_SERVER
WORKER_AGENT_URL: $WORKER_AGENT_URL
port: 9090

View File

@@ -1,128 +0,0 @@
# Copyright (C) 2025 Advanced Micro Devices, Inc.
services:
vllm-service:
image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest}
container_name: vllm-service
ports:
- "${VLLM_SERVICE_PORT:-8081}:8011"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HF_HUB_DISABLE_PROGRESS_BARS: 1
HF_HUB_ENABLE_HF_TRANSFER: 0
WILM_USE_TRITON_FLASH_ATTENTION: 0
PYTORCH_JIT: 0
volumes:
- "${MODEL_CACHE:-./data}:/data"
shm_size: 20G
devices:
- /dev/kfd:/dev/kfd
- /dev/dri/:/dev/dri/
cap_add:
- SYS_PTRACE
group_add:
- video
security_opt:
- seccomp:unconfined
- apparmor=unconfined
command: "--model ${VLLM_LLM_MODEL_ID} --swap-space 16 --disable-log-requests --dtype float16 --tensor-parallel-size 4 --host 0.0.0.0 --port 8011 --num-scheduler-steps 1 --distributed-executor-backend \"mp\""
ipc: host
worker-rag-agent:
image: opea/agent:latest
container_name: rag-agent-endpoint
volumes:
- ${TOOLSET_PATH}:/home/user/tools/
ports:
- "${WORKER_RAG_AGENT_PORT:-9095}:9095"
ipc: host
environment:
ip_address: ${ip_address}
strategy: rag_agent_llama
with_memory: false
recursion_limit: ${recursion_limit_worker}
llm_engine: vllm
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
llm_endpoint_url: ${LLM_ENDPOINT_URL}
model: ${LLM_MODEL_ID}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: false
tools: /home/user/tools/worker_agent_tools.yaml
require_human_feedback: false
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
LANGCHAIN_PROJECT: "opea-worker-agent-service"
port: 9095
worker-sql-agent:
image: opea/agent:latest
container_name: sql-agent-endpoint
volumes:
- "${WORKDIR}/tests/Chinook_Sqlite.sqlite:/home/user/chinook-db/Chinook_Sqlite.sqlite:rw"
ports:
- "${WORKER_SQL_AGENT_PORT:-9096}:9096"
ipc: host
environment:
ip_address: ${ip_address}
strategy: sql_agent_llama
with_memory: false
db_name: ${db_name}
db_path: ${db_path}
use_hints: false
recursion_limit: ${recursion_limit_worker}
llm_engine: vllm
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
llm_endpoint_url: ${LLM_ENDPOINT_URL}
model: ${LLM_MODEL_ID}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: false
require_human_feedback: false
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
port: 9096
supervisor-react-agent:
image: opea/agent:latest
container_name: react-agent-endpoint
depends_on:
- worker-rag-agent
volumes:
- ${TOOLSET_PATH}:/home/user/tools/
ports:
- "${SUPERVISOR_REACT_AGENT_PORT:-9090}:9090"
ipc: host
environment:
ip_address: ${ip_address}
strategy: react_llama
with_memory: true
recursion_limit: ${recursion_limit_supervisor}
llm_engine: vllm
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
llm_endpoint_url: ${LLM_ENDPOINT_URL}
model: ${LLM_MODEL_ID}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: true
tools: /home/user/tools/supervisor_agent_tools.yaml
require_human_feedback: false
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
CRAG_SERVER: ${CRAG_SERVER}
WORKER_AGENT_URL: ${WORKER_AGENT_URL}
SQL_AGENT_URL: ${SQL_AGENT_URL}
port: 9090

View File

@@ -1,87 +1,47 @@
# Copyright (C) 2024 Advanced Micro Devices, Inc.
# SPDX-License-Identifier: Apache-2.0
# Before start script:
# export host_ip="your_host_ip_or_host_name"
# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token"
# export LANGCHAIN_API_KEY="your_langchain_api_key"
# export LANGCHAIN_TRACING_V2=""
# Set server hostname or IP address
WORKPATH=$(dirname "$PWD")/..
export ip_address=${host_ip}
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
export AGENTQNA_TGI_IMAGE=ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
export AGENTQNA_TGI_SERVICE_PORT="8085"
# Set services IP ports
export TGI_SERVICE_PORT="18110"
export WORKER_RAG_AGENT_PORT="18111"
export WORKER_SQL_AGENT_PORT="18112"
export SUPERVISOR_REACT_AGENT_PORT="18113"
export CRAG_SERVER_PORT="18114"
export WORKPATH=$(dirname "$PWD")
export WORKDIR=${WORKPATH}/../../../
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
export HF_CACHE_DIR="./data"
export MODEL_CACHE="./data"
export TOOLSET_PATH=${WORKPATH}/../../../tools/
export recursion_limit_worker=12
export LLM_ENDPOINT_URL=http://${ip_address}:${TGI_SERVICE_PORT}
# LLM related environment variables
export AGENTQNA_CARD_ID="card1"
export AGENTQNA_RENDER_ID="renderD136"
export HF_CACHE_DIR=${HF_CACHE_DIR}
ls $HF_CACHE_DIR
export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct"
#export NUM_SHARDS=4
export LLM_ENDPOINT_URL="http://${ip_address}:${AGENTQNA_TGI_SERVICE_PORT}"
export temperature=0.01
export max_new_tokens=512
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export LANGCHAIN_API_KEY=${LANGCHAIN_API_KEY}
export LANGCHAIN_TRACING_V2=${LANGCHAIN_TRACING_V2}
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
# agent related environment variables
export AGENTQNA_WORKER_AGENT_SERVICE_PORT="9095"
export TOOLSET_PATH=/home/huggingface/datamonsters/amd-opea/GenAIExamples/AgentQnA/tools/
echo "TOOLSET_PATH=${TOOLSET_PATH}"
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT}
export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions"
export HF_CACHE_DIR=${HF_CACHE_DIR}
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export no_proxy=${no_proxy}
export http_proxy=${http_proxy}
export https_proxy=${https_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
export WORKER_AGENT_URL="http://${ip_address}:${AGENTQNA_WORKER_AGENT_SERVICE_PORT}/v1/chat/completions"
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export CRAG_SERVER=http://${ip_address}:18881
export AGENTQNA_FRONTEND_PORT="9090"
#retrieval_tool
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
export REDIS_URL="redis://${host_ip}:6379"
export REDIS_URL="redis://${host_ip}:26379"
export INDEX_NAME="rag-redis"
export RERANK_TYPE="tei"
export MEGA_SERVICE_HOST_IP=${host_ip}
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
export RERANK_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete"
echo ${WORKER_RAG_AGENT_PORT} > ${WORKPATH}/WORKER_RAG_AGENT_PORT_tmp
echo ${WORKER_SQL_AGENT_PORT} > ${WORKPATH}/WORKER_SQL_AGENT_PORT_tmp
echo ${SUPERVISOR_REACT_AGENT_PORT} > ${WORKPATH}/SUPERVISOR_REACT_AGENT_PORT_tmp
echo ${CRAG_SERVER_PORT} > ${WORKPATH}/CRAG_SERVER_PORT_tmp
echo "Downloading chinook data..."
echo Y | rm -R chinook-database
git clone https://github.com/lerocha/chinook-database.git
echo Y | rm -R ../../../../../AgentQnA/tests/Chinook_Sqlite.sqlite
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite ../../../../../AgentQnA/tests
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml up -d
docker compose -f compose.yaml up -d
n=0
until [[ "$n" -ge 100 ]]; do
docker logs tgi-service > ${WORKPATH}/tgi_service_start.log
if grep -q Connected ${WORKPATH}/tgi_service_start.log; then
break
fi
sleep 10s
n=$((n+1))
done
echo "Starting CRAG server"
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=${CRAG_SERVER_PORT}:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0

View File

@@ -1,88 +0,0 @@
# Copyright (C) 2024 Advanced Micro Devices, Inc.
# SPDX-License-Identifier: Apache-2.0
# Before start script:
# export host_ip="your_host_ip_or_host_name"
# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token"
# export LANGCHAIN_API_KEY="your_langchain_api_key"
# export LANGCHAIN_TRACING_V2=""
# Set server hostname or IP address
export ip_address=${host_ip}
# Set services IP ports
export VLLM_SERVICE_PORT="18110"
export WORKER_RAG_AGENT_PORT="18111"
export WORKER_SQL_AGENT_PORT="18112"
export SUPERVISOR_REACT_AGENT_PORT="18113"
export CRAG_SERVER_PORT="18114"
export WORKPATH=$(dirname "$PWD")
export WORKDIR=${WORKPATH}/../../../
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export VLLM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
export HF_CACHE_DIR="./data"
export MODEL_CACHE="./data"
export TOOLSET_PATH=${WORKPATH}/../../../tools/
export recursion_limit_worker=12
export LLM_ENDPOINT_URL=http://${ip_address}:${VLLM_SERVICE_PORT}
export LLM_MODEL_ID=${VLLM_LLM_MODEL_ID}
export temperature=0.01
export max_new_tokens=512
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export LANGCHAIN_API_KEY=${LANGCHAIN_API_KEY}
export LANGCHAIN_TRACING_V2=${LANGCHAIN_TRACING_V2}
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT}
export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions"
export HF_CACHE_DIR=${HF_CACHE_DIR}
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export no_proxy=${no_proxy}
export http_proxy=${http_proxy}
export https_proxy=${https_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
export REDIS_URL="redis://${host_ip}:6379"
export INDEX_NAME="rag-redis"
export RERANK_TYPE="tei"
export MEGA_SERVICE_HOST_IP=${host_ip}
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
export RERANK_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
echo ${WORKER_RAG_AGENT_PORT} > ${WORKPATH}/WORKER_RAG_AGENT_PORT_tmp
echo ${WORKER_SQL_AGENT_PORT} > ${WORKPATH}/WORKER_SQL_AGENT_PORT_tmp
echo ${SUPERVISOR_REACT_AGENT_PORT} > ${WORKPATH}/SUPERVISOR_REACT_AGENT_PORT_tmp
echo ${CRAG_SERVER_PORT} > ${WORKPATH}/CRAG_SERVER_PORT_tmp
echo "Downloading chinook data..."
echo Y | rm -R chinook-database
git clone https://github.com/lerocha/chinook-database.git
echo Y | rm -R ../../../../../AgentQnA/tests/Chinook_Sqlite.sqlite
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite ../../../../../AgentQnA/tests
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml up -d
docker compose -f compose_vllm.yaml up -d
n=0
until [[ "$n" -ge 500 ]]; do
docker logs vllm-service >& "${WORKPATH}"/vllm-service_start.log
if grep -q "Application startup complete" "${WORKPATH}"/vllm-service_start.log; then
break
fi
sleep 20s
n=$((n+1))
done
echo "Starting CRAG server"
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=${CRAG_SERVER_PORT}:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0

View File

@@ -6,7 +6,7 @@
WORKPATH=$(dirname "$PWD")/..
export ip_address=${host_ip}
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
export AGENTQNA_TGI_IMAGE=ghcr.io/huggingface/text-generation-inference:2.4.1-rocm
export AGENTQNA_TGI_IMAGE=ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
export AGENTQNA_TGI_SERVICE_PORT="19001"
# LLM related environment variables
@@ -14,7 +14,7 @@ export AGENTQNA_CARD_ID="card1"
export AGENTQNA_RENDER_ID="renderD136"
export HF_CACHE_DIR=${HF_CACHE_DIR}
ls $HF_CACHE_DIR
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct"
export NUM_SHARDS=4
export LLM_ENDPOINT_URL="http://${ip_address}:${AGENTQNA_TGI_SERVICE_PORT}"
export temperature=0.01
@@ -44,19 +44,3 @@ export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete"
echo "Removing chinook data..."
echo Y | rm -R chinook-database
if [ -d "chinook-database" ]; then
rm -rf chinook-database
fi
echo "Chinook data removed!"
echo "Stopping CRAG server"
docker rm kdd-cup-24-crag-service --force
echo "Stopping Agent services"
docker compose -f compose.yaml down
echo "Stopping Retrieval services"
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml down

View File

@@ -1,84 +0,0 @@
# Copyright (C) 2024 Advanced Micro Devices, Inc.
# SPDX-License-Identifier: Apache-2.0
# Before start script:
# export host_ip="your_host_ip_or_host_name"
# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token"
# export LANGCHAIN_API_KEY="your_langchain_api_key"
# export LANGCHAIN_TRACING_V2=""
# Set server hostname or IP address
export ip_address=${host_ip}
# Set services IP ports
export VLLM_SERVICE_PORT="18110"
export WORKER_RAG_AGENT_PORT="18111"
export WORKER_SQL_AGENT_PORT="18112"
export SUPERVISOR_REACT_AGENT_PORT="18113"
export CRAG_SERVER_PORT="18114"
export WORKPATH=$(dirname "$PWD")
export WORKDIR=${WORKPATH}/../../../
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export VLLM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
export HF_CACHE_DIR="./data"
export MODEL_CACHE="./data"
export TOOLSET_PATH=${WORKPATH}/../../../tools/
export recursion_limit_worker=12
export LLM_ENDPOINT_URL=http://${ip_address}:${VLLM_SERVICE_PORT}
export LLM_MODEL_ID=${VLLM_LLM_MODEL_ID}
export temperature=0.01
export max_new_tokens=512
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export LANGCHAIN_API_KEY=${LANGCHAIN_API_KEY}
export LANGCHAIN_TRACING_V2=${LANGCHAIN_TRACING_V2}
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT}
export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions"
export HF_CACHE_DIR=${HF_CACHE_DIR}
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export no_proxy=${no_proxy}
export http_proxy=${http_proxy}
export https_proxy=${https_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
export REDIS_URL="redis://${host_ip}:6379"
export INDEX_NAME="rag-redis"
export RERANK_TYPE="tei"
export MEGA_SERVICE_HOST_IP=${host_ip}
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
export RERANK_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
echo ${WORKER_RAG_AGENT_PORT} > ${WORKPATH}/WORKER_RAG_AGENT_PORT_tmp
echo ${WORKER_SQL_AGENT_PORT} > ${WORKPATH}/WORKER_SQL_AGENT_PORT_tmp
echo ${SUPERVISOR_REACT_AGENT_PORT} > ${WORKPATH}/SUPERVISOR_REACT_AGENT_PORT_tmp
echo ${CRAG_SERVER_PORT} > ${WORKPATH}/CRAG_SERVER_PORT_tmp
echo "Removing chinook data..."
echo Y | rm -R chinook-database
if [ -d "chinook-database" ]; then
rm -rf chinook-database
fi
echo "Chinook data removed!"
echo "Stopping CRAG server"
docker rm kdd-cup-24-crag-service --force
echo "Stopping Agent services"
docker compose -f compose_vllm.yaml down
echo "Stopping Retrieval services"
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml down

View File

@@ -1,3 +1,123 @@
# Single node on-prem deployment with Docker Compose on Xeon Scalable processors
This example showcases a hierarchical multi-agent system for question-answering applications. To deploy the example on Xeon, OpenAI LLM models via API calls are used. For instructions, refer to the deployment guide [here](../../../../README.md).
This example showcases a hierarchical multi-agent system for question-answering applications. We deploy the example on Xeon. For LLMs, we use OpenAI models via API calls. For instructions on using open-source LLMs, please refer to the deployment guide [here](../../../../README.md).
## Deployment with docker
1. First, clone this repo.
```
export WORKDIR=<your-work-directory>
cd $WORKDIR
git clone https://github.com/opea-project/GenAIExamples.git
```
2. Set up environment for this example </br>
```
# Example: host_ip="192.168.1.1" or export host_ip="External_Public_IP"
export host_ip=$(hostname -I | awk '{print $1}')
# if you are in a proxy environment, also set the proxy-related environment variables
export http_proxy="Your_HTTP_Proxy"
export https_proxy="Your_HTTPs_Proxy"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
#OPANAI_API_KEY if you want to use OpenAI models
export OPENAI_API_KEY=<your-openai-key>
```
3. Deploy the retrieval tool (i.e., DocIndexRetriever mega-service)
First, launch the mega-service.
```
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool
bash launch_retrieval_tool.sh
```
Then, ingest data into the vector database. Here we provide an example. You can ingest your own data.
```
bash run_ingest_data.sh
```
4. Prepare SQL database
In this example, we will use the SQLite database provided in the [TAG-Bench](https://github.com/TAG-Research/TAG-Bench/tree/main). Run the commands below.
```
# Download data
cd $WORKDIR
git clone https://github.com/TAG-Research/TAG-Bench.git
cd TAG-Bench/setup
chmod +x get_dbs.sh
./get_dbs.sh
```
5. Launch Tool service
In this example, we will use some of the mock APIs provided in the Meta CRAG KDD Challenge to demonstrate the benefits of gaining additional context from mock knowledge graphs.
```
docker run -d -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
```
6. Launch multi-agent system
The configurations of the supervisor agent and the worker agents are defined in the docker-compose yaml file. We currently use openAI GPT-4o-mini as LLM.
```
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon
bash launch_agent_service_openai.sh
```
7. [Optional] Build `Agent` docker image if pulling images failed.
```
git clone https://github.com/opea-project/GenAIComps.git
cd GenAIComps
docker build -t opea/agent:latest -f comps/agent/src/Dockerfile .
```
## Validate services
First look at logs of the agent docker containers:
```
# worker RAG agent
docker logs rag-agent-endpoint
# worker SQL agent
docker logs sql-agent-endpoint
```
```
# supervisor agent
docker logs react-agent-endpoint
```
You should see something like "HTTP server setup successful" if the docker containers are started successfully.</p>
Second, validate worker RAG agent:
```
curl http://${host_ip}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "Michael Jackson song Thriller"
}'
```
Third, validate worker SQL agent:
```
curl http://${host_ip}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "How many employees are in the company?"
}'
```
Finally, validate supervisor agent:
```
curl http://${host_ip}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "How many albums does Iron Maiden have?"
}'
```
## How to register your own tools with agent
You can take a look at the tools yaml and python files in this example. For more details, please refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/src/README.md).

View File

@@ -13,7 +13,6 @@ services:
environment:
ip_address: ${ip_address}
strategy: rag_agent
with_memory: false
recursion_limit: ${recursion_limit_worker}
llm_engine: openai
OPENAI_API_KEY: ${OPENAI_API_KEY}
@@ -36,17 +35,17 @@ services:
image: opea/agent:latest
container_name: sql-agent-endpoint
volumes:
- ${WORKDIR}/GenAIExamples/AgentQnA/tests:/home/user/chinook-db # SQL database
- ${WORKDIR}/TAG-Bench/:/home/user/TAG-Bench # SQL database
ports:
- "9096:9096"
ipc: host
environment:
ip_address: ${ip_address}
strategy: sql_agent
with_memory: false
db_name: ${db_name}
db_path: ${db_path}
use_hints: false
hints_file: /home/user/TAG-Bench/${db_name}_hints.csv
recursion_limit: ${recursion_limit_worker}
llm_engine: openai
OPENAI_API_KEY: ${OPENAI_API_KEY}
@@ -65,7 +64,6 @@ services:
container_name: react-agent-endpoint
depends_on:
- worker-rag-agent
- worker-sql-agent
volumes:
- ${TOOLSET_PATH}:/home/user/tools/
ports:
@@ -73,15 +71,14 @@ services:
ipc: host
environment:
ip_address: ${ip_address}
strategy: react_llama
with_memory: true
strategy: react_langgraph
recursion_limit: ${recursion_limit_supervisor}
llm_engine: openai
OPENAI_API_KEY: ${OPENAI_API_KEY}
model: ${model}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: true
stream: false
tools: /home/user/tools/supervisor_agent_tools.yaml
require_human_feedback: false
no_proxy: ${no_proxy}
@@ -92,23 +89,4 @@ services:
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
CRAG_SERVER: $CRAG_SERVER
WORKER_AGENT_URL: $WORKER_AGENT_URL
SQL_AGENT_URL: $SQL_AGENT_URL
port: 9090
mock-api:
image: docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
container_name: mock-api
ports:
- "8080:8000"
ipc: host
agent-ui:
image: opea/agent-ui
container_name: agent-ui
volumes:
- ${WORKDIR}/GenAIExamples/AgentQnA/ui/svelte/.env:/home/user/svelte/.env # test db
ports:
- "5173:5173"
ipc: host
networks:
default:
driver: bridge

View File

@@ -0,0 +1,22 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
pushd "../../../../../" > /dev/null
source .set_env.sh
popd > /dev/null
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
export ip_address=$(hostname -I | awk '{print $1}')
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export model="gpt-4o-mini-2024-07-18"
export temperature=0
export max_new_tokens=4096
export OPENAI_API_KEY=${OPENAI_API_KEY}
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:9096/v1/chat/completions"
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export CRAG_SERVER=http://${ip_address}:8080
export db_name=california_schools
export db_path="sqlite:////home/user/TAG-Bench/dev_folder/dev_databases/${db_name}/${db_name}.sqlite"
docker compose -f compose_openai.yaml up -d

View File

@@ -1,57 +0,0 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
pushd "../../../../../" > /dev/null
source .set_env.sh
popd > /dev/null
if [[ -z "${WORKDIR}" ]]; then
echo "Please set WORKDIR environment variable"
exit 0
fi
echo "WORKDIR=${WORKDIR}"
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
export ip_address=$(hostname -I | awk '{print $1}')
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export model="gpt-4o-mini-2024-07-18"
export temperature=0
export max_new_tokens=4096
export OPENAI_API_KEY=${OPENAI_API_KEY}
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:9096/v1/chat/completions"
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export CRAG_SERVER=http://${ip_address}:8080
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
if [ ! -f $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite ]; then
echo "Download Chinook_Sqlite!"
wget -O $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite https://github.com/lerocha/chinook-database/releases/download/v1.4.5/Chinook_Sqlite.sqlite
fi
# retriever
export host_ip=$(hostname -I | awk '{print $1}')
export HF_CACHE_DIR=${HF_CACHE_DIR}
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export no_proxy=${no_proxy}
export http_proxy=${http_proxy}
export https_proxy=${https_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
export REDIS_URL="redis://${host_ip}:6379"
export INDEX_NAME="rag-redis"
export RERANK_TYPE="tei"
export MEGA_SERVICE_HOST_IP=${host_ip}
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
export RERANK_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
export no_proxy="$no_proxy,rag-agent-endpoint,sql-agent-endpoint,react-agent-endpoint,agent-ui"

View File

@@ -1,3 +1,147 @@
# Single node on-prem deployment AgentQnA on Gaudi
This example showcases a hierarchical multi-agent system for question-answering applications. To deploy the example on Gaudi using open-source LLMs, refer to the deployment guide [here](../../../../README.md).
This example showcases a hierarchical multi-agent system for question-answering applications. We deploy the example on Gaudi using open-source LLMs.
For more details, please refer to the deployment guide [here](../../../../README.md).
## Deployment with docker
1. First, clone this repo.
```
export WORKDIR=<your-work-directory>
cd $WORKDIR
git clone https://github.com/opea-project/GenAIExamples.git
```
2. Set up environment for this example </br>
```
# Example: host_ip="192.168.1.1" or export host_ip="External_Public_IP"
export host_ip=$(hostname -I | awk '{print $1}')
# if you are in a proxy environment, also set the proxy-related environment variables
export http_proxy="Your_HTTP_Proxy"
export https_proxy="Your_HTTPs_Proxy"
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
export no_proxy="Your_No_Proxy"
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
# for using open-source llms
export HUGGINGFACEHUB_API_TOKEN=<your-HF-token>
# Example export HF_CACHE_DIR=$WORKDIR so that no need to redownload every time
export HF_CACHE_DIR=<directory-where-llms-are-downloaded>
```
3. Deploy the retrieval tool (i.e., DocIndexRetriever mega-service)
First, launch the mega-service.
```
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool
bash launch_retrieval_tool.sh
```
Then, ingest data into the vector database. Here we provide an example. You can ingest your own data.
```
bash run_ingest_data.sh
```
4. Prepare SQL database
In this example, we will use the Chinook SQLite database. Run the commands below.
```
# Download data
cd $WORKDIR
git clone https://github.com/lerocha/chinook-database.git
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite $WORKDIR/GenAIExamples/AgentQnA/tests/
```
5. Launch Tool service
In this example, we will use some of the mock APIs provided in the Meta CRAG KDD Challenge to demonstrate the benefits of gaining additional context from mock knowledge graphs.
```
docker run -d -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
```
6. Launch multi-agent system
On Gaudi2 we will serve `meta-llama/Meta-Llama-3.1-70B-Instruct` using vllm.
First build vllm-gaudi docker image.
```bash
cd $WORKDIR
git clone https://github.com/vllm-project/vllm.git
cd ./vllm
git checkout v0.6.6
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
```
Then launch vllm on Gaudi2 with the command below.
```bash
vllm_port=8086
model="meta-llama/Meta-Llama-3.1-70B-Instruct"
docker run -d --runtime=habana --rm --name "vllm-gaudi-server" -e HABANA_VISIBLE_DEVICES=0,1,2,3 -p $vllm_port:8000 -v $vllm_volume:/data -e HF_TOKEN=$HF_TOKEN -e HUGGING_FACE_HUB_TOKEN=$HF_TOKEN -e HF_HOME=/data -e OMPI_MCA_btl_vader_single_copy_mechanism=none -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e no_proxy=$no_proxy -e VLLM_SKIP_WARMUP=true --cap-add=sys_nice --ipc=host opea/vllm-gaudi:latest --model ${model} --max-seq-len-to-capture 16384 --tensor-parallel-size 4
```
Then launch Agent microservices.
```bash
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
bash launch_agent_service_gaudi.sh
```
7. [Optional] Build `Agent` docker image if pulling images failed.
If docker image pulling failed in Step 6 above, build the agent docker image with the commands below. After image build, try Step 6 again.
```
git clone https://github.com/opea-project/GenAIComps.git
cd GenAIComps
docker build -t opea/agent:latest -f comps/agent/src/Dockerfile .
```
## Validate services
First look at logs of the agent docker containers:
```
# worker RAG agent
docker logs rag-agent-endpoint
# worker SQL agent
docker logs sql-agent-endpoint
```
```
# supervisor agent
docker logs react-agent-endpoint
```
You should see something like "HTTP server setup successful" if the docker containers are started successfully.</p>
Second, validate worker RAG agent:
```
curl http://${host_ip}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "Michael Jackson song Thriller"
}'
```
Third, validate worker SQL agent:
```
curl http://${host_ip}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "How many employees are in the company?"
}'
```
Finally, validate supervisor agent:
```
curl http://${host_ip}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"messages": "How many albums does Iron Maiden have?"
}'
```
## How to register your own tools with agent
You can take a look at the tools yaml and python files in this example. For more details, please refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/src/README.md).

View File

@@ -1,93 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
services:
tei-embedding-service:
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
tei-reranking-service:
command: --model-id ${RERANK_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
jaeger:
image: jaegertracing/all-in-one:1.67.0
container_name: jaeger
ports:
- "16686:16686"
- "4317:4317"
- "4318:4318"
- "9411:9411"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
COLLECTOR_ZIPKIN_HOST_PORT: 9411
restart: unless-stopped
prometheus:
image: prom/prometheus:v2.52.0
container_name: prometheus
user: root
volumes:
- ./prometheus.yaml:/etc/prometheus/prometheus.yaml
- ./prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yaml'
ports:
- '9091:9090'
ipc: host
restart: unless-stopped
grafana:
image: grafana/grafana:11.0.0
container_name: grafana
volumes:
- ./grafana_data:/var/lib/grafana
- ./grafana/dashboards:/var/lib/grafana/dashboards
- ./grafana/provisioning:/etc/grafana/provisioning
user: root
environment:
GF_SECURITY_ADMIN_PASSWORD: admin
GF_RENDERING_CALLBACK_URL: http://grafana:3000/
GF_LOG_FILTERS: rendering:debug
depends_on:
- prometheus
ports:
- '3000:3000'
ipc: host
restart: unless-stopped
node-exporter:
image: prom/node-exporter
container_name: node-exporter
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
command:
- '--path.procfs=/host/proc'
- '--path.sysfs=/host/sys'
- --collector.filesystem.ignored-mount-points
- "^/(sys|proc|dev|host|etc|rootfs/var/lib/docker/containers|rootfs/var/lib/docker/overlay2|rootfs/run/docker/netns|rootfs/var/lib/docker/aufs)($$|/)"
ports:
- 9100:9100
restart: always
deploy:
mode: global
gaudi-exporter:
image: vault.habana.ai/gaudi-metric-exporter/metric-exporter:1.19.2-32
container_name: gaudi-exporter
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /:/rootfs:ro
- /dev:/dev
ports:
- 41612:41611
restart: always
deploy:
mode: global
worker-rag-agent:
environment:
- TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}
worker-sql-agent:
environment:
- TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}
supervisor-react-agent:
environment:
- TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}

View File

@@ -1,9 +0,0 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
services:
supervisor-react-agent:
environment:
- tools=/home/user/tools/supervisor_agent_webtools.yaml
- GOOGLE_CSE_ID=${GOOGLE_CSE_ID}
- GOOGLE_API_KEY=${GOOGLE_API_KEY}

View File

@@ -13,7 +13,6 @@ services:
environment:
ip_address: ${ip_address}
strategy: rag_agent_llama
with_memory: false
recursion_limit: ${recursion_limit_worker}
llm_engine: vllm
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
@@ -44,7 +43,6 @@ services:
environment:
ip_address: ${ip_address}
strategy: sql_agent_llama
with_memory: false
db_name: ${db_name}
db_path: ${db_path}
use_hints: false
@@ -76,7 +74,6 @@ services:
environment:
ip_address: ${ip_address}
strategy: react_llama
with_memory: true
recursion_limit: ${recursion_limit_supervisor}
llm_engine: vllm
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
@@ -84,7 +81,7 @@ services:
model: ${LLM_MODEL_ID}
temperature: ${temperature}
max_new_tokens: ${max_new_tokens}
stream: true
stream: false
tools: /home/user/tools/supervisor_agent_tools.yaml
require_human_feedback: false
no_proxy: ${no_proxy}
@@ -97,47 +94,3 @@ services:
WORKER_AGENT_URL: $WORKER_AGENT_URL
SQL_AGENT_URL: $SQL_AGENT_URL
port: 9090
mock-api:
image: docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
container_name: mock-api
ports:
- "8080:8000"
ipc: host
agent-ui:
image: opea/agent-ui
container_name: agent-ui
volumes:
- ${WORKDIR}/GenAIExamples/AgentQnA/ui/svelte/.env:/home/user/svelte/.env
environment:
host_ip: ${host_ip}
ports:
- "5173:5173"
ipc: host
vllm-service:
image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest}
container_name: vllm-gaudi-server
ports:
- "8086:8000"
volumes:
- "./data:/data"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
HABANA_VISIBLE_DEVICES: all
OMPI_MCA_btl_vader_single_copy_mechanism: none
LLM_MODEL_ID: ${LLM_MODEL_ID}
VLLM_TORCH_PROFILER_DIR: "/mnt"
VLLM_SKIP_WARMUP: true
PT_HPU_ENABLE_LAZY_COLLECTIVES: true
healthcheck:
test: ["CMD-SHELL", "curl -f http://$host_ip:8086/health || exit 1"]
interval: 10s
timeout: 10s
retries: 100
runtime: habana
cap_add:
- SYS_NICE
ipc: host
command: --model $LLM_MODEL_ID --tensor-parallel-size 4 --host 0.0.0.0 --port 8000 --block-size 128 --max-num-seqs 256 --max-seq_len-to-capture 16384

View File

@@ -1,10 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
rm *.json
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/chatqna_megaservice_grafana.json
mv chatqna_megaservice_grafana.json agentqna_microervices_grafana.json
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/vllm_grafana.json
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/tgi_grafana.json
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/node_grafana.json
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/gaudi_grafana.json

View File

@@ -1,14 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
disableDeletion: false
updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards
options:
path: /var/lib/grafana/dashboards

View File

@@ -1,54 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# config file version
apiVersion: 1
# list of datasources that should be deleted from the database
deleteDatasources:
- name: Prometheus
orgId: 1
# list of datasources to insert/update depending
# what's available in the database
datasources:
# <string, required> name of the datasource. Required
- name: Prometheus
# <string, required> datasource type. Required
type: prometheus
# <string, required> access mode. direct or proxy. Required
access: proxy
# <int> org id. will default to orgId 1 if not specified
orgId: 1
# <string> url
url: http://prometheus:9090
# <string> database password, if used
password:
# <string> database user, if used
user:
# <string> database name, if used
database:
# <bool> enable/disable basic auth
basicAuth: false
# <string> basic auth username, if used
basicAuthUser:
# <string> basic auth password, if used
basicAuthPassword:
# <bool> enable/disable with credentials headers
withCredentials:
# <bool> mark as default datasource. Max one per org
isDefault: true
# <map> fields that will be converted to json and stored in json_data
jsonData:
httpMethod: GET
graphiteVersion: "1.1"
tlsAuth: false
tlsAuthWithCACert: false
# <string> json object of data that will be encrypted.
secureJsonData:
tlsCACert: "..."
tlsClientCert: "..."
tlsClientKey: "..."
version: 1
# <bool> allow users to edit datasources from the UI.
editable: true

View File

@@ -0,0 +1,36 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
pushd "../../../../../" > /dev/null
source .set_env.sh
popd > /dev/null
WORKPATH=$(dirname "$PWD")/..
# export WORKDIR=$WORKPATH/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
# LLM related environment variables
export HF_CACHE_DIR=${HF_CACHE_DIR}
ls $HF_CACHE_DIR
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-70B-Instruct"
export NUM_SHARDS=4
export LLM_ENDPOINT_URL="http://${ip_address}:8086"
export temperature=0
export max_new_tokens=4096
# agent related environment variables
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
echo "TOOLSET_PATH=${TOOLSET_PATH}"
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:9096/v1/chat/completions"
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export CRAG_SERVER=http://${ip_address}:8080
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
docker compose -f compose.yaml up -d

View File

@@ -0,0 +1,25 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
# LLM related environment variables
export HF_CACHE_DIR=${HF_CACHE_DIR}
ls $HF_CACHE_DIR
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-70B-Instruct"
export NUM_SHARDS=4
docker compose -f tgi_gaudi.yaml up -d
sleep 5s
echo "Waiting tgi gaudi ready"
n=0
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
docker logs tgi-server &> tgi-gaudi-service.log
n=$((n+1))
if grep -q Connected tgi-gaudi-service.log; then
break
fi
sleep 5s
done
sleep 5s
echo "Service started successfully"

View File

@@ -1,55 +0,0 @@
# Copyright (C) 2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
global:
scrape_interval: 5s
external_labels:
monitor: "my-monitor"
scrape_configs:
- job_name: "prometheus"
static_configs:
- targets: ["prometheus:9090"]
- job_name: "vllm"
metrics_path: /metrics
static_configs:
- targets: ["vllm-gaudi-server:8000"]
- job_name: "tgi"
metrics_path: /metrics
static_configs:
- targets: ["tgi-gaudi-server:80"]
- job_name: "tei-embedding"
metrics_path: /metrics
static_configs:
- targets: ["tei-embedding-server:80"]
- job_name: "tei-reranking"
metrics_path: /metrics
static_configs:
- targets: ["tei-reranking-server:80"]
- job_name: "retriever"
metrics_path: /metrics
static_configs:
- targets: ["retriever:7000"]
- job_name: "dataprep-redis-service"
metrics_path: /metrics
static_configs:
- targets: ["dataprep-redis-service:5000"]
- job_name: "prometheus-node-exporter"
metrics_path: /metrics
static_configs:
- targets: ["node-exporter:9100"]
- job_name: "prometheus-gaudi-exporter"
metrics_path: /metrics
static_configs:
- targets: ["gaudi-exporter:41611"]
- job_name: "supervisor-react-agent"
metrics_path: /metrics
static_configs:
- targets: ["react-agent-endpoint:9090"]
- job_name: "worker-rag-agent"
metrics_path: /metrics
static_configs:
- targets: ["rag-agent-endpoint:9095"]
- job_name: "worker-sql-agent"
metrics_path: /metrics
static_configs:
- targets: ["sql-agent-endpoint:9096"]

View File

@@ -1,72 +0,0 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
pushd "../../../../../" > /dev/null
source .set_env.sh
popd > /dev/null
WORKPATH=$(dirname "$PWD")/..
# export WORKDIR=$WORKPATH/../../
if [[ -z "${WORKDIR}" ]]; then
echo "Please set WORKDIR environment variable"
exit 0
fi
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
# LLM related environment variables
export HF_CACHE_DIR=${HF_CACHE_DIR}
ls $HF_CACHE_DIR
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export LLM_MODEL_ID="meta-llama/Llama-3.3-70B-Instruct"
export NUM_SHARDS=4
export LLM_ENDPOINT_URL="http://${ip_address}:8086"
export temperature=0
export max_new_tokens=4096
# agent related environment variables
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
echo "TOOLSET_PATH=${TOOLSET_PATH}"
export recursion_limit_worker=12
export recursion_limit_supervisor=10
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
export SQL_AGENT_URL="http://${ip_address}:9096/v1/chat/completions"
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
export CRAG_SERVER=http://${ip_address}:8080
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
if [ ! -f $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite ]; then
echo "Download Chinook_Sqlite!"
wget -O $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite https://github.com/lerocha/chinook-database/releases/download/v1.4.5/Chinook_Sqlite.sqlite
fi
# configure agent ui
echo "AGENT_URL = 'http://$ip_address:9090/v1/chat/completions'" | tee ${WORKDIR}/GenAIExamples/AgentQnA/ui/svelte/.env
# retriever
export host_ip=$(hostname -I | awk '{print $1}')
export no_proxy=${no_proxy}
export http_proxy=${http_proxy}
export https_proxy=${https_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
export REDIS_URL="redis://${host_ip}:6379"
export INDEX_NAME="rag-redis"
export RERANK_TYPE="tei"
export MEGA_SERVICE_HOST_IP=${host_ip}
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
export RERANK_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
# Set OpenTelemetry Tracing Endpoint
export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+')
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317
export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces
export no_proxy="$no_proxy,rag-agent-endpoint,sql-agent-endpoint,react-agent-endpoint,agent-ui,vllm-gaudi-server,jaeger,grafana,prometheus,node-exporter,gaudi-exporter,127.0.0.1,localhost,0.0.0.0,$host_ip,,$JAEGER_IP"

View File

@@ -3,7 +3,7 @@
services:
tgi-server:
image: ghcr.io/huggingface/tgi-gaudi:2.3.1
image: ghcr.io/huggingface/tgi-gaudi:2.0.6
container_name: tgi-server
ports:
- "8085:80"

View File

@@ -17,12 +17,3 @@ services:
dockerfile: ./docker/Dockerfile
extends: agent
image: ${REGISTRY:-opea}/agent-ui:${TAG:-latest}
vllm-rocm:
build:
args:
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
no_proxy: ${no_proxy}
context: GenAIComps
dockerfile: comps/third_parties/vllm/src/Dockerfile.amd_gpu
image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest}

View File

@@ -20,30 +20,23 @@ function stop_agent_and_api_server() {
function stop_retrieval_tool() {
echo "Stopping Retrieval tool"
local RETRIEVAL_TOOL_PATH=$WORKPATH/../DocIndexRetriever
cd $RETRIEVAL_TOOL_PATH/docker_compose/intel/cpu/xeon/
container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2)
for container_name in $container_list; do
cid=$(docker ps -aq --filter "name=$container_name")
echo "Stopping container $container_name"
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
done
docker compose -f $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/docker/docker-compose-retrieval-tool.yaml down
}
echo "=================== #1 Building docker images===================="
bash step1_build_images.sh
bash 1_build_images.sh
echo "=================== #1 Building docker images completed===================="
echo "=================== #2 Start retrieval tool===================="
bash step2_start_retrieval_tool.sh
bash 2_start_retrieval_tool.sh
echo "=================== #2 Retrieval tool started===================="
echo "=================== #3 Ingest data and validate retrieval===================="
bash step3_ingest_data_and_validate_retrieval.sh
bash 3_ingest_data_and_validate_retrieval.sh
echo "=================== #3 Data ingestion and validation completed===================="
echo "=================== #4 Start agent and API server===================="
bash step4_launch_and_validate_agent_openai.sh
bash 4_launch_and_validate_agent_openai.sh
echo "=================== #4 Agent test passed ===================="
echo "=================== #5 Stop agent and API server===================="

View File

@@ -22,7 +22,7 @@ function build_docker_images_for_retrieval_tool(){
echo "Build all the images with --no-cache..."
service_list="doc-index-retriever dataprep embedding retriever reranking"
docker compose -f build.yaml build ${service_list} --no-cache
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
docker images && sleep 1s
}
@@ -42,8 +42,7 @@ function build_vllm_docker_image() {
git clone https://github.com/HabanaAI/vllm-fork.git
fi
cd ./vllm-fork
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
git checkout ${VLLM_VER} &> /dev/null
git checkout v0.6.4.post2+Gaudi-1.19.0
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:ci --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi:ci failed"

View File

@@ -1,64 +0,0 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -e
export WORKPATH=$(dirname "$PWD")
export WORKDIR=${WORKPATH}/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
function get_genai_comps() {
if [ ! -d "GenAIComps" ] ; then
git clone --depth 1 --branch ${opea_branch:-"main"} https://github.com/opea-project/GenAIComps.git
fi
}
function build_docker_images_for_retrieval_tool(){
cd $WORKPATH/../DocIndexRetriever/docker_image_build/
get_genai_comps
echo "Build all the images with --no-cache..."
service_list="doc-index-retriever dataprep embedding retriever reranking"
docker compose -f build.yaml build ${service_list} --no-cache
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
docker images && sleep 3s
}
function build_agent_docker_image() {
cd $WORKPATH/docker_image_build/
get_genai_comps
echo "Build agent image with --no-cache..."
docker compose -f build.yaml build --no-cache
docker images && sleep 3s
}
#function build_vllm_docker_image() {
# echo "Building the vllm docker image"
# cd $WORKPATH/
# docker build --no-cache -t opea/llm-vllm-rocm:ci -f Dockerfile-vllm-rocm .
#
# docker images && sleep 3s
#}
function main() {
echo "==================== Build docker images for retrieval tool ===================="
build_docker_images_for_retrieval_tool
echo "==================== Build docker images for retrieval tool completed ===================="
echo "==================== Build agent docker image ===================="
build_agent_docker_image
echo "==================== Build agent docker image completed ===================="
# echo "==================== Build vllm docker image ===================="
# build_vllm_docker_image
# echo "==================== Build vllm docker image completed ===================="
docker image ls | grep vllm
}
main

View File

@@ -9,7 +9,7 @@ echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export host_ip=${ip_address}
export HF_CACHE_DIR=${model_cache:-"$WORKDIR/hf_cache"}
export HF_CACHE_DIR=$WORKDIR/hf_cache
if [ ! -d "$HF_CACHE_DIR" ]; then
echo "Creating HF_CACHE directory"
mkdir -p "$HF_CACHE_DIR"

View File

@@ -1,49 +0,0 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -e
WORKPATH=$(dirname "$PWD")
export WORKDIR=$WORKPATH/../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export host_ip=${ip_address}
export HF_CACHE_DIR=$WORKPATH/hf_cache
if [ ! -d "$HF_CACHE_DIR" ]; then
echo "Creating HF_CACHE directory"
mkdir -p "$HF_CACHE_DIR"
fi
function start_retrieval_tool() {
echo "Starting Retrieval tool"
cd $WORKPATH/../DocIndexRetriever/docker_compose/intel/cpu/xeon
host_ip=$(hostname -I | awk '{print $1}')
export HF_CACHE_DIR=${HF_CACHE_DIR}
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export no_proxy=${no_proxy}
export http_proxy=${http_proxy}
export https_proxy=${https_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
export REDIS_URL="redis://${host_ip}:6379"
export INDEX_NAME="rag-redis"
export RERANK_TYPE="tei"
export MEGA_SERVICE_HOST_IP=${host_ip}
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
export RERANK_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
docker compose -f compose.yaml up -d
}
echo "==================== Start retrieval tool ===================="
start_retrieval_tool
sleep 20 # needed for downloading the models
echo "==================== Retrieval tool started ===================="

View File

@@ -1,68 +0,0 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -e
WORKPATH=$(dirname "$PWD")
export WORKDIR=$WORKPATH/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export host_ip=$ip_address
echo "ip_address=${ip_address}"
function validate() {
local CONTENT="$1"
local EXPECTED_RESULT="$2"
local SERVICE_NAME="$3"
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT"
echo 0
else
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
echo 1
fi
}
function ingest_data_and_validate() {
echo "Ingesting data"
cd $WORKPATH/retrieval_tool/
echo $PWD
local CONTENT=$(bash run_ingest_data.sh)
local EXIT_CODE=$(validate "$CONTENT" "Data preparation succeeded" "dataprep-redis-server")
echo "$EXIT_CODE"
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "return value is $EXIT_CODE"
if [ "$EXIT_CODE" == "1" ]; then
docker logs dataprep-redis-server
return 1
fi
}
function validate_retrieval_tool() {
echo "----------------Test retrieval tool ----------------"
local CONTENT=$(http_proxy="" curl http://${ip_address}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{
"text": "Who sang Thriller"
}')
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "retrieval-tool")
if [ "$EXIT_CODE" == "1" ]; then
docker logs retrievaltool-xeon-backend-server
exit 1
fi
}
function main(){
echo "==================== Ingest data ===================="
ingest_data_and_validate
echo "==================== Data ingestion completed ===================="
echo "==================== Validate retrieval tool ===================="
validate_retrieval_tool
echo "==================== Retrieval tool validated ===================="
}
main

View File

@@ -11,22 +11,13 @@ echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
function download_chinook_data(){
echo "Downloading chinook data..."
cd $WORKDIR
git clone https://github.com/lerocha/chinook-database.git
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite $WORKDIR/GenAIExamples/AgentQnA/tests/
}
function start_agent_and_api_server() {
echo "Starting CRAG server"
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
echo "Starting Agent services"
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon/
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon
bash launch_agent_service_openai.sh
sleep 2m
}
function validate() {
@@ -44,64 +35,19 @@ function validate() {
}
function validate_agent_service() {
# # test worker rag agent
echo "======================Testing worker rag agent======================"
export agent_port="9095"
prompt="Tell me about Michael Jackson song Thriller"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port)
# echo $CONTENT
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "rag-agent-endpoint")
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "----------------Test agent ----------------"
local CONTENT=$(http_proxy="" curl http://${ip_address}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"query": "Tell me about Michael Jackson song thriller"
}')
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "react-agent-endpoint")
docker logs react-agent-endpoint
if [ "$EXIT_CODE" == "1" ]; then
docker logs rag-agent-endpoint
exit 1
fi
# # test worker sql agent
echo "======================Testing worker sql agent======================"
export agent_port="9096"
prompt="How many employees are there in the company?"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port)
local EXIT_CODE=$(validate "$CONTENT" "8" "sql-agent-endpoint")
echo $CONTENT
# echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
if [ "$EXIT_CODE" == "1" ]; then
docker logs sql-agent-endpoint
exit 1
fi
# test supervisor react agent
echo "======================Testing supervisor react agent======================"
export agent_port="9090"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream)
local EXIT_CODE=$(validate "$CONTENT" "Iron" "react-agent-endpoint")
# echo $CONTENT
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
if [ "$EXIT_CODE" == "1" ]; then
docker logs react-agent-endpoint
exit 1
fi
}
function remove_chinook_data(){
echo "Removing chinook data..."
cd $WORKDIR
if [ -d "chinook-database" ]; then
rm -rf chinook-database
fi
echo "Chinook data removed!"
}
function main() {
echo "==================== Prepare data ===================="
download_chinook_data
echo "==================== Data prepare done ===================="
echo "==================== Start agent ===================="
start_agent_and_api_server
echo "==================== Agent started ===================="
@@ -111,9 +57,4 @@ function main() {
echo "==================== Agent service validated ===================="
}
remove_chinook_data
main
remove_chinook_data

View File

@@ -1,120 +0,0 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -e
WORKPATH=$(dirname "$PWD")
export LOG_PATH=${WORKPATH}
export WORKDIR=$WORKPATH/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export host_ip=${ip_address}
export TOOLSET_PATH=$WORKPATH/tools/
export HF_CACHE_DIR=$WORKPATH/data2/huggingface
if [ ! -d "$HF_CACHE_DIR" ]; then
HF_CACHE_DIR=$WORKDIR/hf_cache
mkdir -p "$HF_CACHE_DIR"
fi
function download_chinook_data(){
echo "Downloading chinook data..."
cd $WORKDIR
git clone https://github.com/lerocha/chinook-database.git
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite ${WORKPATH}/tests/
}
function start_agent_and_api_server() {
echo "Starting Agent services"
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash launch_agent_service_vllm_rocm.sh
}
function validate() {
local CONTENT="$1"
local EXPECTED_RESULT="$2"
local SERVICE_NAME="$3"
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT"
echo 0
else
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
echo 1
fi
}
function validate_agent_service() {
# # test worker rag agent
echo "======================Testing worker rag agent======================"
export agent_port=$(cat ${WORKPATH}/docker_compose/amd/gpu/WORKER_RAG_AGENT_PORT_tmp)
prompt="Tell me about Michael Jackson song Thriller"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port)
# echo $CONTENT
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "rag-agent-endpoint")
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
if [ "$EXIT_CODE" == "1" ]; then
docker logs rag-agent-endpoint
exit 1
fi
# test worker sql agent
echo "======================Testing worker sql agent======================"
export agent_port=$(cat ${WORKPATH}/docker_compose/amd/gpu/WORKER_SQL_AGENT_PORT_tmp)
prompt="How many employees are there in the company?"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port)
local EXIT_CODE=$(validate "$CONTENT" "8" "sql-agent-endpoint")
echo $CONTENT
# echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
if [ "$EXIT_CODE" == "1" ]; then
docker logs sql-agent-endpoint
exit 1
fi
# test supervisor react agent
echo "======================Testing supervisor react agent======================"
export agent_port=$(cat ${WORKPATH}/docker_compose/amd/gpu/SUPERVISOR_REACT_AGENT_PORT_tmp)
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream)
local EXIT_CODE=$(validate "$CONTENT" "Iron" "react-agent-endpoint")
# echo $CONTENT
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
if [ "$EXIT_CODE" == "1" ]; then
docker logs react-agent-endpoint
exit 1
fi
}
function remove_chinook_data(){
echo "Removing chinook data..."
cd $WORKDIR
if [ -d "chinook-database" ]; then
rm -rf chinook-database
fi
echo "Chinook data removed!"
}
function main() {
echo "==================== Prepare data ===================="
download_chinook_data
echo "==================== Data prepare done ===================="
echo "==================== Start agent ===================="
start_agent_and_api_server
echo "==================== Agent started ===================="
echo "==================== Validate agent service ===================="
validate_agent_service
echo "==================== Agent service validated ===================="
}
remove_chinook_data
main
remove_chinook_data

View File

@@ -11,9 +11,9 @@ export ip_address=$(hostname -I | awk '{print $1}')
export TOOLSET_PATH=$WORKPATH/tools/
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
model="meta-llama/Llama-3.3-70B-Instruct" #"meta-llama/Meta-Llama-3.1-70B-Instruct"
model="meta-llama/Meta-Llama-3.1-70B-Instruct"
export HF_CACHE_DIR=${model_cache:-"/data2/huggingface"}
export HF_CACHE_DIR=/data2/huggingface
if [ ! -d "$HF_CACHE_DIR" ]; then
HF_CACHE_DIR=$WORKDIR/hf_cache
mkdir -p "$HF_CACHE_DIR"
@@ -27,20 +27,18 @@ vllm_volume=${HF_CACHE_DIR}
function start_tgi(){
echo "Starting tgi-gaudi server"
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi
source set_env.sh
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml tgi_gaudi.yaml -f compose.telemetry.yaml up -d
bash launch_tgi_gaudi.sh
}
function start_all_services() {
function start_vllm_service_70B() {
echo "token is ${HF_TOKEN}"
echo "start vllm gaudi service"
echo "**************model is $model**************"
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi
source set_env.sh
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml -f compose.telemetry.yaml up -d
vllm_image=opea/vllm-gaudi:ci
docker run -d --runtime=habana --rm --name "vllm-gaudi-server" -e HABANA_VISIBLE_DEVICES=0,1,2,3 -p $vllm_port:8000 -v $vllm_volume:/data -e HF_TOKEN=$HF_TOKEN -e HUGGING_FACE_HUB_TOKEN=$HF_TOKEN -e HF_HOME=/data -e OMPI_MCA_btl_vader_single_copy_mechanism=none -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e no_proxy=$no_proxy -e VLLM_SKIP_WARMUP=true --cap-add=sys_nice --ipc=host $vllm_image --model ${model} --max-seq-len-to-capture 16384 --tensor-parallel-size 4
sleep 5s
echo "Waiting vllm gaudi ready"
n=0
@@ -62,6 +60,23 @@ function start_all_services() {
echo "Service started successfully"
}
function prepare_data() {
cd $WORKDIR
echo "Downloading data..."
git clone https://github.com/TAG-Research/TAG-Bench.git
cd TAG-Bench/setup
chmod +x get_dbs.sh
./get_dbs.sh
echo "Split data..."
cd $WORKPATH/tests/sql_agent_test
bash run_data_split.sh
echo "Data preparation done!"
}
function download_chinook_data(){
echo "Downloading chinook data..."
cd $WORKDIR
@@ -69,6 +84,15 @@ function download_chinook_data(){
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite $WORKDIR/GenAIExamples/AgentQnA/tests/
}
function start_agent_and_api_server() {
echo "Starting CRAG server"
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
echo "Starting Agent services"
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi
bash launch_agent_service_gaudi.sh
sleep 2m
}
function validate() {
local CONTENT="$1"
@@ -88,9 +112,8 @@ function validate_agent_service() {
# # test worker rag agent
echo "======================Testing worker rag agent======================"
export agent_port="9095"
export agent_ip="127.0.0.1"
prompt="Tell me about Michael Jackson song Thriller"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ip_addr $agent_ip --ext_port $agent_port)
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt")
# echo $CONTENT
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "rag-agent-endpoint")
echo $EXIT_CODE
@@ -104,7 +127,7 @@ function validate_agent_service() {
echo "======================Testing worker sql agent======================"
export agent_port="9096"
prompt="How many employees are there in the company?"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ip_addr $agent_ip --ext_port $agent_port)
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt")
local EXIT_CODE=$(validate "$CONTENT" "8" "sql-agent-endpoint")
echo $CONTENT
# echo $EXIT_CODE
@@ -117,8 +140,9 @@ function validate_agent_service() {
# test supervisor react agent
echo "======================Testing supervisor react agent======================"
export agent_port="9090"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ip_addr $agent_ip --ext_port $agent_port --stream)
local EXIT_CODE=$(validate "$CONTENT" "Iron" "react-agent-endpoint")
prompt="How many albums does Iron Maiden have?"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt")
local EXIT_CODE=$(validate "$CONTENT" "21" "react-agent-endpoint")
# echo $CONTENT
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
@@ -129,6 +153,15 @@ function validate_agent_service() {
}
function remove_data() {
echo "Removing data..."
cd $WORKDIR
if [ -d "TAG-Bench" ]; then
rm -rf TAG-Bench
fi
echo "Data removed!"
}
function remove_chinook_data(){
echo "Removing chinook data..."
cd $WORKDIR
@@ -138,77 +171,26 @@ function remove_chinook_data(){
echo "Chinook data removed!"
}
export host_ip=$ip_address
echo "ip_address=${ip_address}"
function validate() {
local CONTENT="$1"
local EXPECTED_RESULT="$2"
local SERVICE_NAME="$3"
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT"
echo 0
else
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
echo 1
fi
}
function ingest_data_and_validate() {
echo "Ingesting data"
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/
echo $PWD
local CONTENT=$(bash run_ingest_data.sh)
local EXIT_CODE=$(validate "$CONTENT" "Data preparation succeeded" "dataprep-redis-server")
echo "$EXIT_CODE"
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "return value is $EXIT_CODE"
if [ "$EXIT_CODE" == "1" ]; then
docker logs dataprep-redis-server
return 1
fi
}
function validate_retrieval_tool() {
echo "----------------Test retrieval tool ----------------"
local CONTENT=$(http_proxy="" curl http://${ip_address}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{
"text": "Who sang Thriller"
}')
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "retrieval-tool")
if [ "$EXIT_CODE" == "1" ]; then
docker logs retrievaltool-xeon-backend-server
exit 1
fi
}
function main() {
echo "==================== Prepare data ===================="
download_chinook_data
echo "==================== Data prepare done ===================="
echo "==================== Start all services ===================="
start_all_services
echo "==================== all services started ===================="
echo "==================== Start VLLM service ===================="
start_vllm_service_70B
echo "==================== VLLM service started ===================="
echo "==================== Ingest data ===================="
ingest_data_and_validate
echo "==================== Data ingestion completed ===================="
echo "==================== Validate retrieval tool ===================="
validate_retrieval_tool
echo "==================== Retrieval tool validated ===================="
echo "==================== Start agent ===================="
start_agent_and_api_server
echo "==================== Agent started ===================="
echo "==================== Validate agent service ===================="
validate_agent_service
echo "==================== Agent service validated ===================="
}
remove_data
remove_chinook_data
main
remove_data
remove_chinook_data

View File

@@ -2,30 +2,26 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -e
set -ex
WORKPATH=$(dirname "$PWD")
export LOG_PATH=${WORKPATH}
export WORKDIR=$WORKPATH/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export host_ip=${ip_address}
export TOOLSET_PATH=$WORKPATH/tools/
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export HF_CACHE_DIR=$WORKPATH/data2/huggingface
export HF_CACHE_DIR=$WORKDIR/hf_cache
if [ ! -d "$HF_CACHE_DIR" ]; then
HF_CACHE_DIR=$WORKDIR/hf_cache
mkdir -p "$HF_CACHE_DIR"
fi
ls $HF_CACHE_DIR
function download_chinook_data(){
echo "Downloading chinook data..."
cd $WORKDIR
git clone https://github.com/lerocha/chinook-database.git
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite ${WORKPATH}/tests/
}
function start_agent_and_api_server() {
echo "Starting CRAG server"
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
echo "Starting Agent services"
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
bash launch_agent_service_tgi_rocm.sh
@@ -46,63 +42,28 @@ function validate() {
}
function validate_agent_service() {
# # test worker rag agent
echo "======================Testing worker rag agent======================"
export agent_port=$(cat ${WORKPATH}/docker_compose/amd/gpu/WORKER_RAG_AGENT_PORT_tmp)
prompt="Tell me about Michael Jackson song Thriller"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port)
# echo $CONTENT
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "rag-agent-endpoint")
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
echo "----------------Test agent ----------------"
local CONTENT=$(http_proxy="" curl http://${ip_address}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"query": "Tell me about Michael Jackson song thriller"
}')
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "react-agent-endpoint")
docker logs rag-agent-endpoint
if [ "$EXIT_CODE" == "1" ]; then
docker logs rag-agent-endpoint
exit 1
fi
# test worker sql agent
echo "======================Testing worker sql agent======================"
export agent_port=$(cat ${WORKPATH}/docker_compose/amd/gpu/WORKER_SQL_AGENT_PORT_tmp)
prompt="How many employees are there in the company?"
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port)
local EXIT_CODE=$(validate "$CONTENT" "8" "sql-agent-endpoint")
echo $CONTENT
# echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
local CONTENT=$(http_proxy="" curl http://${ip_address}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
"query": "Tell me about Michael Jackson song thriller"
}')
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "react-agent-endpoint")
docker logs react-agent-endpoint
if [ "$EXIT_CODE" == "1" ]; then
docker logs sql-agent-endpoint
exit 1
fi
# test supervisor react agent
echo "======================Testing supervisor react agent======================"
export agent_port=$(cat ${WORKPATH}/docker_compose/amd/gpu/SUPERVISOR_REACT_AGENT_PORT_tmp)
local CONTENT=$(python3 $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream)
local EXIT_CODE=$(validate "$CONTENT" "Iron" "react-agent-endpoint")
# echo $CONTENT
echo $EXIT_CODE
local EXIT_CODE="${EXIT_CODE:0-1}"
if [ "$EXIT_CODE" == "1" ]; then
docker logs react-agent-endpoint
exit 1
fi
}
function remove_chinook_data(){
echo "Removing chinook data..."
cd $WORKDIR
if [ -d "chinook-database" ]; then
rm -rf chinook-database
fi
echo "Chinook data removed!"
}
function main() {
echo "==================== Prepare data ===================="
download_chinook_data
echo "==================== Data prepare done ===================="
echo "==================== Start agent ===================="
start_agent_and_api_server
echo "==================== Agent started ===================="
@@ -112,9 +73,4 @@ function main() {
echo "==================== Agent service validated ===================="
}
remove_chinook_data
main
remove_chinook_data

View File

@@ -1,20 +1,34 @@
# Copyright (C) 2025 Intel Corporation
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import argparse
import json
import uuid
import os
import requests
def generate_answer_agent_api(url, prompt):
proxies = {"http": ""}
payload = {
"messages": prompt,
}
response = requests.post(url, json=payload, proxies=proxies)
answer = response.json()["text"]
return answer
def process_request(url, query, is_stream=False):
proxies = {"http": ""}
content = json.dumps(query) if query is not None else None
payload = {
"messages": query,
}
try:
resp = requests.post(url=url, data=content, proxies=proxies, stream=is_stream)
resp = requests.post(url=url, json=payload, proxies=proxies, stream=is_stream)
if not is_stream:
ret = resp.json()["text"]
print(ret)
else:
for line in resp.iter_lines(decode_unicode=True):
print(line)
@@ -24,54 +38,19 @@ def process_request(url, query, is_stream=False):
return ret
except requests.exceptions.RequestException as e:
ret = f"An error occurred:{e}"
return None
def test_worker_agent(args):
url = f"http://{args.ip_addr}:{args.ext_port}/v1/chat/completions"
query = {"role": "user", "messages": args.prompt, "stream": "false"}
ret = process_request(url, query)
print("Response: ", ret)
def add_message_and_run(url, user_message, thread_id, stream=False):
print("User message: ", user_message)
query = {"role": "user", "messages": user_message, "thread_id": thread_id, "stream": stream}
ret = process_request(url, query, is_stream=stream)
print("Response: ", ret)
def test_chat_completion_multi_turn(args):
url = f"http://{args.ip_addr}:{args.ext_port}/v1/chat/completions"
thread_id = f"{uuid.uuid4()}"
# first turn
print("===============First turn==================")
user_message = "Which artist has the most albums in the database?"
add_message_and_run(url, user_message, thread_id, stream=args.stream)
print("===============End of first turn==================")
# second turn
print("===============Second turn==================")
user_message = "Give me a few examples of the artist's albums?"
add_message_and_run(url, user_message, thread_id, stream=args.stream)
print("===============End of second turn==================")
print(ret)
return False
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--ip_addr", type=str, default="127.0.0.1", help="endpoint ip address")
parser.add_argument("--ext_port", type=str, default="9090", help="endpoint port")
parser.add_argument("--stream", action="store_true", help="streaming mode")
parser.add_argument("--prompt", type=str, help="prompt message")
parser.add_argument("--agent_role", type=str, default="supervisor", help="supervisor or worker")
args, _ = parser.parse_known_args()
parser.add_argument("--prompt", type=str)
parser.add_argument("--stream", action="store_true")
args = parser.parse_args()
print(args)
ip_address = os.getenv("ip_address", "localhost")
agent_port = os.getenv("agent_port", "9090")
url = f"http://{ip_address}:{agent_port}/v1/chat/completions"
prompt = args.prompt
if args.agent_role == "supervisor":
test_chat_completion_multi_turn(args)
elif args.agent_role == "worker":
test_worker_agent(args)
else:
raise ValueError("Invalid agent role")
process_request(url, prompt, args.stream)

View File

@@ -1,6 +1,7 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
set -xe
WORKPATH=$(dirname "$PWD")
@@ -9,22 +10,6 @@ echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
export no_proxy="$no_proxy,rag-agent-endpoint,sql-agent-endpoint,react-agent-endpoint,agent-ui,vllm-gaudi-server,jaeger,grafana,prometheus,127.0.0.1,localhost,0.0.0.0,$ip_address"
function get_genai_comps() {
if [ ! -d "GenAIComps" ] ; then
git clone --depth 1 --branch ${opea_branch:-"main"} https://github.com/opea-project/GenAIComps.git
fi
}
function build_agent_docker_image() {
cd $WORKDIR/GenAIExamples/AgentQnA/docker_image_build/
get_genai_comps
echo "Build agent image with --no-cache..."
docker compose -f build.yaml build --no-cache
}
function stop_crag() {
cid=$(docker ps -aq --filter "name=kdd-cup-24-crag-service")
@@ -34,7 +19,12 @@ function stop_crag() {
function stop_agent_docker() {
cd $WORKPATH/docker_compose/intel/hpu/gaudi/
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml down
container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2)
for container_name in $container_list; do
cid=$(docker ps -aq --filter "name=$container_name")
echo "Stopping container $container_name"
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
done
}
function stop_llm(){
@@ -70,21 +60,33 @@ function stop_retrieval_tool() {
echo "workpath: $WORKPATH"
echo "=================== Stop containers ===================="
stop_crag
stop_llm
stop_agent_docker
stop_retrieval_tool
cd $WORKPATH/tests
echo "=================== #1 Building docker images===================="
build_agent_docker_image
bash step1_build_images.sh
echo "=================== #1 Building docker images completed===================="
echo "=================== #4 Start agent, API server, retrieval, and ingest data===================="
bash $WORKPATH/tests/step4_launch_and_validate_agent_gaudi.sh
echo "=================== #4 Agent, retrieval test passed ===================="
echo "=================== #2 Start retrieval tool===================="
bash step2_start_retrieval_tool.sh
echo "=================== #2 Retrieval tool started===================="
echo "=================== #3 Ingest data and validate retrieval===================="
bash step3_ingest_data_and_validate_retrieval.sh
echo "=================== #3 Data ingestion and validation completed===================="
echo "=================== #4 Start agent and API server===================="
bash step4_launch_and_validate_agent_tgi.sh
echo "=================== #4 Agent test passed ===================="
echo "=================== #5 Stop agent and API server===================="
stop_crag
stop_agent_docker
stop_retrieval_tool
stop_llm
echo "=================== #5 Agent and API server stopped===================="
echo y | docker system prune

View File

@@ -5,13 +5,11 @@
set -xe
WORKPATH=$(dirname "$PWD")
ls $WORKPATH
export WORKDIR=$WORKPATH/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export TOOLSET_PATH=$WORKPATH/tools/
export MODEL_CACHE="./data"
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
function stop_crag() {
cid=$(docker ps -aq --filter "name=kdd-cup-24-crag-service")
@@ -21,7 +19,13 @@ function stop_crag() {
function stop_agent_docker() {
cd $WORKPATH/docker_compose/amd/gpu/rocm
bash stop_agent_service_tgi_rocm.sh
# docker compose -f compose.yaml down
container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2)
for container_name in $container_list; do
cid=$(docker ps -aq --filter "name=$container_name")
echo "Stopping container $container_name"
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
done
}
function stop_retrieval_tool() {

View File

@@ -1,66 +0,0 @@
#!/bin/bash
# Copyright (C) 2024 Advanced Micro Devices, Inc.
# SPDX-License-Identifier: Apache-2.0
set -e
WORKPATH=$(dirname "$PWD")
export LOG_PATH=${WORKPATH}
export WORKDIR=${WORKPATH}/../../
echo "WORKDIR=${WORKDIR}"
export ip_address=$(hostname -I | awk '{print $1}')
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
export TOOLSET_PATH=$WORKPATH/tools/
export MODEL_CACHE="./data"
function stop_crag() {
cid=$(docker ps -aq --filter "name=kdd-cup-24-crag-service")
echo "Stopping container kdd-cup-24-crag-service with cid $cid"
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
}
function stop_agent_docker() {
cd $WORKPATH/docker_compose/amd/gpu/rocm
bash stop_agent_service_vllm_rocm.sh
}
function stop_retrieval_tool() {
echo "Stopping Retrieval tool"
local RETRIEVAL_TOOL_PATH=$WORKDIR/GenAIExamples/DocIndexRetriever
cd $RETRIEVAL_TOOL_PATH/docker_compose/intel/cpu/xeon/
docker compose -f compose.yaml down
}
echo "workpath: $WORKPATH"
echo "=================== Stop containers ===================="
stop_crag
stop_agent_docker
stop_retrieval_tool
cd $WORKPATH/tests
echo "=================== #1 Building docker images===================="
bash step1_build_images_rocm_vllm.sh
echo "=================== #1 Building docker images completed===================="
echo "=================== #2 Start retrieval tool===================="
bash step2_start_retrieval_tool_rocm_vllm.sh
echo "=================== #2 Retrieval tool started===================="
echo "=================== #3 Ingest data and validate retrieval===================="
bash step3_ingest_data_and_validate_retrieval_rocm_vllm.sh
echo "=================== #3 Data ingestion and validation completed===================="
echo "=================== #4 Start agent and API server===================="
bash step4_launch_and_validate_agent_rocm_vllm.sh
echo "=================== #4 Agent test passed ===================="
echo "=================== #5 Stop agent and API server===================="
stop_crag
stop_agent_docker
stop_retrieval_tool
echo "=================== #5 Agent and API server stopped===================="
echo y | docker system prune
echo "ALL DONE!!"

View File

@@ -1,77 +0,0 @@
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
search_web_base:
description: Search a web base for a given query. Returns text related to the query.
callable_api: tools.py:search_web_base
args_schema:
query:
type: str
description: query
return_output: retrieved_data
search_knowledge_base:
description: Search a knowledge base for a given query. Returns text related to the query.
callable_api: tools.py:search_knowledge_base
args_schema:
query:
type: str
description: query
return_output: retrieved_data
search_artist_database:
description: Search a SQL database on artists and their music with a natural language query. Returns text related to the query.
callable_api: tools.py:search_sql_database
args_schema:
query:
type: str
description: natural language query
return_output: retrieved_data
get_artist_birth_place:
description: Get the birth place of an artist.
callable_api: tools.py:get_artist_birth_place
args_schema:
artist_name:
type: str
description: artist name
return_output: birth_place
get_billboard_rank_date:
description: Get Billboard ranking for a specific rank and date.
callable_api: tools.py:get_billboard_rank_date
args_schema:
rank:
type: int
description: the rank of interest, for example 1 for top 1
date:
type: str
description: date
return_output: billboard_info
get_song_release_date:
description: Get the release date of a song.
callable_api: tools.py:get_song_release_date
args_schema:
song_name:
type: str
description: song name
return_output: release_date
get_members:
description: Get the member list of a band.
callable_api: tools.py:get_members
args_schema:
band_name:
type: str
description: band name
return_output: members
get_grammy_best_artist_by_year:
description: Get the Grammy Best New Artist for a specific year.
callable_api: tools.py:get_grammy_best_artist_by_year
args_schema:
year:
type: int
description: year
return_output: grammy_best_new_artist

View File

@@ -4,30 +4,9 @@
import os
import requests
from comps.cores.telemetry.opea_telemetry import opea_telemetry, tracer
from tools.pycragapi import CRAG
@opea_telemetry
def search_web_base(query: str) -> str:
import os
from langchain_core.tools import Tool
from langchain_google_community import GoogleSearchAPIWrapper
search = GoogleSearchAPIWrapper()
tool = Tool(
name="google_search",
description="Search Google for recent results.",
func=search.run,
)
response = tool.run(query)
return response
@opea_telemetry
def search_knowledge_base(query: str) -> str:
"""Search a knowledge base about music and singers for a given query.
@@ -43,7 +22,6 @@ def search_knowledge_base(query: str) -> str:
return response.json()["text"]
@opea_telemetry
def search_sql_database(query: str) -> str:
"""Search a SQL database on artists and their music with a natural language query.
@@ -59,7 +37,6 @@ def search_sql_database(query: str) -> str:
return response.json()["text"]
@opea_telemetry
def get_grammy_best_artist_by_year(year: int) -> dict:
"""Get the Grammy Best New Artist for a specific year."""
api = CRAG()
@@ -67,21 +44,18 @@ def get_grammy_best_artist_by_year(year: int) -> dict:
return api.music_grammy_get_best_artist_by_year(year)
@opea_telemetry
def get_members(band_name: str) -> dict:
"""Get the member list of a band."""
api = CRAG()
return api.music_get_members(band_name)
@opea_telemetry
def get_artist_birth_place(artist_name: str) -> dict:
"""Get the birthplace of an artist."""
api = CRAG()
return api.music_get_artist_birth_place(artist_name)
@opea_telemetry
def get_billboard_rank_date(rank: int, date: str = None) -> dict:
"""Get Billboard ranking for a specific rank and date."""
api = CRAG()
@@ -89,7 +63,6 @@ def get_billboard_rank_date(rank: int, date: str = None) -> dict:
return api.music_get_billboard_rank_date(rank, date)
@opea_telemetry
def get_song_release_date(song_name: str) -> dict:
"""Get the release date of a song."""
api = CRAG()

View File

@@ -1,77 +0,0 @@
From 799dcc304b3aecf2e2969df47c8dcac16d2267b0 Mon Sep 17 00:00:00 2001
From: lkk12014402 <kaokao.lv@intel.com>
Date: Fri, 4 Apr 2025 07:40:30 +0000
Subject: [PATCH] deal opea agent tool content.
---
backend/open_webui/utils/middleware.py | 54 ++++++++++++++++++++++++++
1 file changed, 54 insertions(+)
diff --git a/backend/open_webui/utils/middleware.py b/backend/open_webui/utils/middleware.py
index 289d887df..afa0edf1e 100644
--- a/backend/open_webui/utils/middleware.py
+++ b/backend/open_webui/utils/middleware.py
@@ -1486,6 +1486,60 @@ async def process_chat_response(
try:
data = json.loads(data)
+ tool_content_block = []
+ if data.get("tool_name"):
+ sources.append(
+ {
+ "source": {
+ "name": f"TOOL:{data.get('tool_name')}"},
+ "document": [data.get("tool_content")],
+ "metadata": [{
+ "source": f"TOOL:{data.get('tool_name')}"}],
+ }
+ )
+ events.append({"sources": sources})
+
+ await event_emitter(
+ {
+ "type": "chat:completion",
+ "data": {"sources": sources},
+ }
+ )
+ tool_content_block = [
+ {
+ "type": "tool_calls",
+ "content": [
+ {"id": data.get('tool_name'), "function": {"name": data.get('tool_name')}}
+ ]
+ }
+ ]
+
+ await event_emitter(
+ {
+ "type": "chat:completion",
+ "data": {
+ "content": serialize_content_blocks(tool_content_block),
+ },
+ }
+ )
+
+ tool_content_block = [
+ {
+ "type": "tool_calls",
+ "content": [
+ {"id": data.get('tool_name'), "function": {"name": data.get('tool_name')}}
+ ],
+ "results": [
+ {"tool_call_id": data.get('tool_name'), "content": data.get("tool_content")}
+ ]
+ },
+ {
+ "type": "text",
+ "content": "",
+ }
+ ]
+ content_blocks.extend(tool_content_block)
+
data, _ = await process_filter_functions(
request=request,
filter_functions=filter_functions,
--
2.34.1

View File

@@ -0,0 +1,10 @@
[*]
indent_style = tab
[package.json]
indent_style = space
indent_size = 2
[*.md]
indent_style = space
indent_size = 2

1
AgentQnA/ui/svelte/.env Normal file
View File

@@ -0,0 +1 @@
AGENT_URL = '/v1/chat/completions'

View File

@@ -0,0 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

View File

@@ -0,0 +1,20 @@
module.exports = {
root: true,
parser: "@typescript-eslint/parser",
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "prettier"],
plugins: ["svelte3", "@typescript-eslint", "neverthrow"],
ignorePatterns: ["*.cjs"],
overrides: [{ files: ["*.svelte"], processor: "svelte3/svelte3" }],
settings: {
"svelte3/typescript": () => require("typescript"),
},
parserOptions: {
sourceType: "module",
ecmaVersion: 2020,
},
env: {
browser: true,
es2017: true,
node: true,
},
};

View File

@@ -0,0 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

View File

@@ -0,0 +1,13 @@
{
"pluginSearchDirs": [
"."
],
"overrides": [
{
"files": "*.svelte",
"options": {
"parser": "svelte"
}
}
]
}

View File

@@ -0,0 +1,60 @@
# AgentQnA
## 📸 Project Screenshots
![project-screenshot](../../assets/img/agent_ui.png)
![project-screenshot](../../assets/img/agent_ui_result.png)
## 🧐 Features
Here're some of the project's features:
- Create AgentProvide more precise answers based on user queries, showcase the high-quality output process of complex queries across different dimensions, and consolidate information to present comprehensive answers.
## 🛠️ Get it Running
1. Clone the repo.
2. cd command to the current folder.
```
cd AgentQnA/ui
```
3. Modify the required .env variables.
```
AGENT_URL = ''
```
4. **For Local Development:**
- Install the dependencies:
```
npm install
```
- Start the development server:
```
npm run dev
```
- The application will be available at `http://localhost:3000`.
5. **For Docker Setup:**
- Build the Docker image:
```
docker build -t opea:agent-ui .
```
- Run the Docker container:
```
docker run -d -p 3000:3000 --name agent-ui opea:agent-ui
```
- The application will be available at `http://localhost:3000`.

View File

@@ -0,0 +1,60 @@
{
"name": "agent-example",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "vite dev --host 0.0.0.0",
"build": "vite build",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "prettier --check . && eslint .",
"format": "prettier --write ."
},
"devDependencies": {
"@fortawesome/free-solid-svg-icons": "6.2.0",
"@sveltejs/adapter-auto": "1.0.0-next.75",
"@sveltejs/kit": "^1.20.1",
"@tailwindcss/typography": "0.5.7",
"@types/debug": "4.1.7",
"@typescript-eslint/eslint-plugin": "^5.27.0",
"@typescript-eslint/parser": "^5.27.0",
"autoprefixer": "^10.4.7",
"daisyui": "^2.52.0",
"debug": "4.3.4",
"eslint": "^8.16.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-neverthrow": "1.1.4",
"eslint-plugin-svelte3": "^4.0.0",
"neverthrow": "5.0.0",
"pocketbase": "0.7.0",
"postcss": "^8.4.23",
"postcss-load-config": "^4.0.1",
"postcss-preset-env": "^8.3.2",
"prettier": "^2.8.8",
"prettier-plugin-svelte": "^2.7.0",
"prettier-plugin-tailwindcss": "^0.3.0",
"svelte": "^3.59.1",
"svelte-check": "^2.7.1",
"svelte-fa": "3.0.3",
"svelte-preprocess": "^4.10.7",
"tailwindcss": "^3.1.5",
"ts-pattern": "4.0.5",
"tslib": "^2.3.1",
"typescript": "^4.7.4",
"vite": "^4.3.9"
},
"type": "module",
"dependencies": {
"@heroicons/vue": "^2.1.5",
"echarts": "^5.4.2",
"flowbite-svelte": "^0.38.5",
"flowbite-svelte-icons": "^0.3.6",
"fuse.js": "^6.6.2",
"marked": "^15.0.0",
"ramda": "^0.29.0",
"sjcl": "^1.0.8",
"sse.js": "^0.6.1",
"svelte-notifications": "^0.9.98"
}
}

View File

@@ -0,0 +1,13 @@
const tailwindcss = require("tailwindcss");
const autoprefixer = require("autoprefixer");
const config = {
plugins: [
//Some plugins, like tailwindcss/nesting, need to run before Tailwind,
tailwindcss(),
//But others, like autoprefixer, need to run after,
autoprefixer,
],
};
module.exports = config;

50
AgentQnA/ui/svelte/src/app.d.ts vendored Normal file
View File

@@ -0,0 +1,50 @@
// Copyright (C) 2025 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
// See: https://kit.svelte.dev/docs/types#app
// import { Result} from "neverthrow";
declare namespace App {
interface Locals {
user?: User;
}
// interface PageData { }
// interface PageError {}
// interface Platform {}
}
interface User {
id?: string;
email: string;
password?: string;
token?: string;
[key: string]: any;
}
type AuthResponse = Result<User>;
interface AuthAdapter {
login(props: { email: string; password: string }): Promise<AuthResponse>;
signup(props: { email: string; password: string; password_confirm: string }): Promise<AuthResponse>;
validate_session(props: { token: string }): Promise<AuthResponse>;
logout(props: { token: string; email: string }): Promise<Result<void>>;
forgotPassword(props: { email: string; password: string }): Promise<Result<void>>;
}
interface ChatAdapter {
modelList(props: {}): Promise<Result<void>>;
txt2img(props: {}): Promise<Result<void>>;
}
interface ChatMessage {
role: string;
content: string;
}
interface ChatMessageType {
model: string;
knowledge: string;
temperature: string;
max_new_tokens: string;
topk: string;
}

View File

@@ -0,0 +1,17 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
<meta name="viewport" content="width=device-width" />
%sveltekit.head%
</head>
<body>
<div>%sveltekit.body%</div>
</body>
</html>

View File

@@ -0,0 +1,82 @@
/* Write your global styles here, in PostCSS syntax */
@tailwind base;
@tailwind components;
@tailwind utilities;
.btn {
@apply flex-nowrap;
}
a.btn {
@apply no-underline;
}
.input {
@apply text-base;
}
.bg-dark-blue {
background-color: #004a86;
}
.bg-light-blue {
background-color: #0068b5;
}
.bg-turquoise {
background-color: #00a3f6;
}
.bg-header {
background-color: #ffffff;
}
.bg-button {
background-color: #0068b5;
}
.bg-title {
background-color: #f7f7f7;
}
.text-header {
color: #0068b5;
}
.text-button {
color: #0071c5;
}
.text-title-color {
color: rgb(38,38,38);
}
.font-intel {
font-family: "intel-clear","tahoma",Helvetica,"helvetica",Arial,sans-serif;
}
.font-title-intel {
font-family: "intel-one","intel-clear",Helvetica,Arial,sans-serif;
}
.bg-footer {
background-color: #e7e7e7;
}
.bg-light-green {
background-color: #d7f3a1;
}
.bg-purple {
background-color: #653171;
}
.bg-dark-blue {
background-color: #224678;
}
.border-input-color {
border-color: #605e5c;
}
.w-12\/12 {
width: 100%
}

View File

@@ -0,0 +1,25 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1731984271860"
class="w-8 h-8"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="11418"
width="200"
height="200"
><path
d="M0 0m170.666667 0l682.666666 0q170.666667 0 170.666667 170.666667l0 682.666666q0 170.666667-170.666667 170.666667l-682.666666 0q-170.666667 0-170.666667-170.666667l0-682.666666q0-170.666667 170.666667-170.666667Z"
fill="#1890FF"
fill-opacity=".1"
p-id="11419"
/><path
d="M404.352 552.661333a63.018667 63.018667 0 1 0 0-125.994666 63.018667 63.018667 0 0 0 0 125.994666z m0 213.333334a63.018667 63.018667 0 1 0 0-125.994667 63.018667 63.018667 0 0 0 0 125.994667z m-213.333333-426.666667a63.018667 63.018667 0 1 0 0-125.994667 63.018667 63.018667 0 0 0 0 125.994667z m669.653333-10.88H376.362667a35.669333 35.669333 0 0 1-35.114667-36.096c0-19.882667 15.786667-36.096 35.114667-36.096h484.394666c19.370667 0 35.157333 16.213333 35.157334 36.096a35.669333 35.669333 0 0 1-35.242667 36.096z m16.384 213.034667h-260.821333c-10.410667 0-18.901333-16.213333-18.901334-36.096 0-19.925333 8.490667-36.138667 18.901334-36.138667h260.864c10.410667 0 18.901333 16.213333 18.901333 36.138667-0.042667 19.882667-8.490667 36.096-18.944 36.096z m0 212.992h-260.821333c-10.410667 0-18.901333-16.213333-18.901334-36.096 0-19.925333 8.490667-36.096 18.901334-36.096h260.864c10.410667 0 18.901333 16.213333 18.901333 36.096-0.042667 19.882667-8.490667 36.096-18.944 36.096z"
fill="#1890FF"
p-id="11420"
/></svg
>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -0,0 +1,9 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg class="w-3.5 h-3.5 me-2.5" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor" viewBox="0 0 20 20">
<path d="M14.707 7.793a1 1 0 0 0-1.414 0L11 10.086V1.5a1 1 0 0 0-2 0v8.586L6.707 7.793a1 1 0 1 0-1.414 1.414l4 4a1 1 0 0 0 1.416 0l4-4a1 1 0 0 0-.002-1.414Z"/>
<path d="M18 12h-2.55l-2.975 2.975a3.5 3.5 0 0 1-4.95 0L4.55 12H2a2 2 0 0 0-2 2v4a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-4a2 2 0 0 0-2-2Zm-3 5a1 1 0 1 1 0-2 1 1 0 0 1 0 2Z"/>
</svg>

After

Width:  |  Height:  |  Size: 559 B

View File

@@ -0,0 +1,16 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
class="me-2 h-3 w-3"
aria-hidden="true"
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
viewBox="0 0 20 14"
>
<path
d="M10 0C4.612 0 0 5.336 0 7c0 1.742 3.546 7 10 7 6.454 0 10-5.258 10-7 0-1.664-4.612-7-10-7Zm0 10a3 3 0 1 1 0-6 3 3 0 0 1 0 6Z"
/>
</svg>

After

Width:  |  Height:  |  Size: 413 B

View File

@@ -0,0 +1,97 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<!-- <svg class="h-11 w-11 flex-none overflow-visible" fill="none"
><defs
><filter
id="step-icon-2"
x="-3"
y="-1"
width="50"
height="50"
filterUnits="userSpaceOnUse"
color-interpolation-filters="sRGB"
><feFlood flood-opacity="0" result="BackgroundImageFix" /><feColorMatrix
in="SourceAlpha"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
result="hardAlpha"
/><feOffset dy="2" /><feGaussianBlur stdDeviation="2.5" /><feComposite
in2="hardAlpha"
operator="out"
/><feColorMatrix
values="0 0 0 0 0.054902 0 0 0 0 0.647059 0 0 0 0 0.913725 0 0 0 0.12 0"
/><feBlend
in2="BackgroundImageFix"
result="effect1_dropShadow_804_95228"
/><feBlend
in="SourceGraphic"
in2="effect1_dropShadow_804_95228"
result="shape"
/></filter
></defs
><g filter="url(#step-icon-2)"
><path
d="M2.75 10A7.25 7.25 0 0 1 10 2.75h24A7.25 7.25 0 0 1 41.25 10v24A7.25 7.25 0 0 1 34 41.25H10A7.25 7.25 0 0 1 2.75 34V10Z"
fill="#EEF2FF"
/><path
d="M2.75 10A7.25 7.25 0 0 1 10 2.75h24A7.25 7.25 0 0 1 41.25 10v24A7.25 7.25 0 0 1 34 41.25H10A7.25 7.25 0 0 1 2.75 34V10Z"
stroke="#6366F1"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
/></g
><path
fill-rule="evenodd"
clip-rule="evenodd"
d="M23 35.25c.69 0 1.25-.56 1.25-1.25A3.75 3.75 0 0 1 28 30.25a1.25 1.25 0 1 0 0-2.5A3.75 3.75 0 0 1 24.25 24a1.25 1.25 0 1 0-2.5 0A3.75 3.75 0 0 1 18 27.75a1.25 1.25 0 0 0 0 2.5A3.75 3.75 0 0 1 21.75 34c0 .69.56 1.25 1.25 1.25Z"
fill="#fff"
/><path
d="M28 27a.75.75 0 0 0 0 1.5V27Zm-4.5 7a.5.5 0 0 1-.5.5V36a2 2 0 0 0 2-2h-1.5Zm5-5a.5.5 0 0 1-.5.5V31a2 2 0 0 0 2-2h-1.5Zm-.5-.5a.5.5 0 0 1 .5.5H30a2 2 0 0 0-2-2v1.5Zm-5-5a.5.5 0 0 1 .5.5H25a2 2 0 0 0-2-2v1.5Zm-.5.5a.5.5 0 0 1 .5-.5V22a2 2 0 0 0-2 2h1.5Zm-5 5a.5.5 0 0 1 .5-.5V27a2 2 0 0 0-2 2h1.5Zm.5.5a.5.5 0 0 1-.5-.5H16a2 2 0 0 0 2 2v-1.5Zm5 5a.5.5 0 0 1-.5-.5H21a2 2 0 0 0 2 2v-1.5ZM18 31a3 3 0 0 1 3 3h1.5a4.5 4.5 0 0 0-4.5-4.5V31Zm3-7a3 3 0 0 1-3 3v1.5a4.5 4.5 0 0 0 4.5-4.5H21Zm7 3a3 3 0 0 1-3-3h-1.5a4.5 4.5 0 0 0 4.5 4.5V27Zm-3 7a3 3 0 0 1 3-3v-1.5a4.5 4.5 0 0 0-4.5 4.5H25Z"
fill="#6366F1"
/><path
fill-rule="evenodd"
clip-rule="evenodd"
d="M13 27.25c.69 0 1.25-.56 1.25-1.25 0-.966.784-1.75 1.75-1.75a1.25 1.25 0 1 0 0-2.5A1.75 1.75 0 0 1 14.25 20a1.25 1.25 0 1 0-2.5 0A1.75 1.75 0 0 1 10 21.75a1.25 1.25 0 0 0 0 2.5c.966 0 1.75.784 1.75 1.75 0 .69.56 1.25 1.25 1.25Z"
fill="#fff"
/><path
d="M16 21a.75.75 0 0 0 0 1.5V21Zm-2.5 5a.5.5 0 0 1-.5.5V28a2 2 0 0 0 2-2h-1.5Zm3-3a.5.5 0 0 1-.5.5V25a2 2 0 0 0 2-2h-1.5Zm-.5-.5a.5.5 0 0 1 .5.5H18a2 2 0 0 0-2-2v1.5Zm-3-3a.5.5 0 0 1 .5.5H15a2 2 0 0 0-2-2v1.5Zm-.5.5a.5.5 0 0 1 .5-.5V18a2 2 0 0 0-2 2h1.5Zm-3 3a.5.5 0 0 1 .5-.5V21a2 2 0 0 0-2 2h1.5Zm.5.5a.5.5 0 0 1-.5-.5H8a2 2 0 0 0 2 2v-1.5Zm3 3a.5.5 0 0 1-.5-.5H11a2 2 0 0 0 2 2v-1.5ZM10 25a1 1 0 0 1 1 1h1.5a2.5 2.5 0 0 0-2.5-2.5V25Zm1-5a1 1 0 0 1-1 1v1.5a2.5 2.5 0 0 0 2.5-2.5H11Zm5 1a1 1 0 0 1-1-1h-1.5a2.5 2.5 0 0 0 2.5 2.5V21Zm-1 5a1 1 0 0 1 1-1v-1.5a2.5 2.5 0 0 0-2.5 2.5H15Z"
fill="#6366F1"
/><path
opacity=".4"
d="M29.75 35.25h2.5a3 3 0 0 0 3-3v-20.5a3 3 0 0 0-3-3h-20.5a3 3 0 0 0-3 3v5.5M12.75 14.25h18.5"
stroke="#6366F1"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
/></svg
> -->
<svg
t="1731984480564"
class="h-10 w-10"
viewBox="0 0 1114 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="29550"
width="200"
height="200"
><path
d="M1081.916235 788.781176H909.312v172.634353a24.696471 24.696471 0 0 1-49.332706 0V788.781176H687.314824a24.696471 24.696471 0 0 1 0-49.362823H859.949176V566.814118a24.696471 24.696471 0 0 1 49.332706 0v172.634353h172.664471a24.696471 24.696471 0 0 1 0 49.362823z"
fill="#0972E7"
p-id="29551"
/><path
d="M174.772706 143.028706h509.831529c43.550118 0 78.516706 35.689412 78.516706 80.173176v280.576c0 44.453647-34.966588 80.173176-78.516706 80.173177H174.772706c-43.550118 0-78.516706-35.719529-78.516706-80.173177V223.171765c0-43.851294 34.966588-80.173176 78.516706-80.173177z"
fill="#CAE4FF"
p-id="29552"
/><path
d="M335.600941 910.637176H104.899765c-24.545882 0-43.550118-20.028235-43.550118-45.086117V107.098353c0-25.057882 19.636706-45.086118 44.182588-45.086118h742.912c23.913412 0 44.182588 20.028235 44.182589 44.453647V282.503529c0 16.896 13.492706 31.322353 30.659764 31.322353a30.72 30.72 0 0 0 30.689883-31.322353V106.465882C953.976471 47.585882 906.721882 0 849.046588 0H104.899765C47.224471 0 0 48.218353 0 107.098353v758.452706c0 58.88 46.622118 107.098353 104.297412 107.098353h230.671059c16.564706 0 30.659765-13.793882 30.659764-31.322353a30.027294 30.027294 0 0 0-30.057411-30.689883z"
fill="#0972E7"
p-id="29553"
/><path
d="M709.180235 219.196235c0-16.896-13.492706-31.322353-30.659764-31.322353H171.760941c-16.564706 0-30.659765 13.793882-30.659765 31.322353 0 16.926118 13.492706 31.322353 30.659765 31.322353h506.75953a30.72 30.72 0 0 0 30.659764-31.322353zM171.760941 436.525176c-16.564706 0-30.659765 13.793882-30.659765 31.322353 0 16.896 13.492706 31.322353 30.659765 31.322353h344.786824c16.564706 0 30.689882-13.793882 30.689882-31.322353 0-16.926118-13.522824-31.322353-30.689882-31.322353H171.760941z"
fill="#0972E7"
p-id="29554"
/></svg
>

View File

@@ -0,0 +1,8 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg class="h-5 w-5 flex-shrink-0 text-[#1d4dd5]" viewBox="0 0 20 20" fill="currentColor" aria-hidden="true" data-slot="icon">
<path fill-rule="evenodd" d="M15.621 4.379a3 3 0 0 0-4.242 0l-7 7a3 3 0 0 0 4.241 4.243h.001l.497-.5a.75.75 0 0 1 1.064 1.057l-.498.501-.002.002a4.5 4.5 0 0 1-6.364-6.364l7-7a4.5 4.5 0 0 1 6.368 6.36l-3.455 3.553A2.625 2.625 0 1 1 9.52 9.52l3.45-3.451a.75.75 0 1 1 1.061 1.06l-3.45 3.451a1.125 1.125 0 0 0 1.587 1.595l3.454-3.553a3 3 0 0 0 0-4.242Z" clip-rule="evenodd"></path>
</svg>

After

Width:  |  Height:  |  Size: 601 B

View File

@@ -0,0 +1,13 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
class="pointer-events-none absolute left-0 ml-4 hidden h-4 w-4 fill-current text-gray-500 group-hover:text-gray-400 sm:block"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
><path
d="M12.9 14.32a8 8 0 1 1 1.41-1.41l5.35 5.33-1.42 1.42-5.33-5.34zM8 14A6 6 0 1 0 8 2a6 6 0 0 0 0 12z"
/></svg
>

After

Width:  |  Height:  |  Size: 413 B

View File

@@ -0,0 +1,17 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
fill="none"
class="relative h-5 w-5"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
stroke="currentColor"
viewBox="0 0 24 24"
><path
d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z"
/></svg
>

After

Width:  |  Height:  |  Size: 369 B

View File

@@ -0,0 +1,20 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1731987484014"
class="w-5 h-5"
viewBox="0 0 1267 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="49311"
width="200"
height="200"
><path
d="M56.880762 910.214095H1194.666667a57.051429 57.051429 0 0 1 56.880762 56.905143A57.051429 57.051429 0 0 1 1194.666667 1024H56.880762A57.051429 57.051429 0 0 1 0 967.119238a57.051429 57.051429 0 0 1 56.880762-56.905143z m1024-56.880762H170.666667a114.102857 114.102857 0 0 1-113.785905-113.785904V113.785905A114.102857 114.102857 0 0 1 170.666667 0h910.214095A114.102857 114.102857 0 0 1 1194.666667 113.785905l-0.560762 625.761524C1194.105905 802.133333 1143.466667 853.333333 1080.880762 853.333333zM495.006476 227.328a198.948571 198.948571 0 0 0-63.219809 59.977143c-43.227429 63.707429-45.519238 150.747429-3.974096 215.600762 63.146667 99.547429 187.733333 120.027429 277.040762 63.146666l88.185905 88.161524a42.910476 42.910476 0 0 0 60.294095 0 42.910476 42.910476 0 0 0 0-60.294095l-88.746666-88.185905c49.493333-77.360762 40.399238-180.906667-26.745905-248.027428a198.92419 198.92419 0 0 0-242.834286-30.378667z m216.112762 170.910476a113.785905 113.785905 0 1 1-227.571809 0 113.785905 113.785905 0 0 1 227.571809 0z"
fill="#0377FF"
p-id="49312"
/></svg
>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -0,0 +1,22 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1730766012593"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="11065"
class="w-4 h-4"
><path
d="M996.693333 494.933333l-341.333333-126.293333-126.293333-341.333333c-3.413333-13.653333-27.306667-13.653333-30.72 0l-126.293334 341.333333-341.333333 126.293333c-6.826667 3.413333-10.24 10.24-10.24 17.066667s3.413333 13.653333 10.24 17.066667l341.333333 126.293333 126.293334 341.333333c3.413333 6.826667 10.24 10.24 17.066666 10.24s13.653333-3.413333 17.066667-10.24l126.293333-341.333333 341.333334-126.293333c6.826667-3.413333 10.24-10.24 10.24-17.066667s-6.826667-13.653333-13.653334-17.066667z m-314.026666 34.133334h-153.6V682.666667c0 10.24-6.826667 17.066667-17.066667 17.066666s-17.066667-6.826667-17.066667-17.066666v-153.6H341.333333c-10.24 0-17.066667-6.826667-17.066666-17.066667s6.826667-17.066667 17.066666-17.066667h153.6V341.333333c0-10.24 6.826667-17.066667 17.066667-17.066666s17.066667 6.826667 17.066667 17.066666v153.6H682.666667c10.24 0 17.066667 6.826667 17.066666 17.066667s-6.826667 17.066667-17.066666 17.066667z"
fill="#ffffff"
p-id="11066"
/><path
d="M293.546667 703.146667l-136.533334 136.533333c-6.826667 6.826667-6.826667 17.066667 0 23.893333 3.413333 3.413333 6.826667 3.413333 13.653334 3.413334s10.24 0 13.653333-3.413334l136.533333-136.533333c6.826667-6.826667 6.826667-17.066667 0-23.893333s-20.48-6.826667-27.306666 0zM716.8 324.266667c3.413333 0 10.24 0 13.653333-3.413334l136.533334-136.533333c6.826667-6.826667 6.826667-17.066667 0-23.893333s-17.066667-6.826667-23.893334 0l-136.533333 136.533333c-6.826667 6.826667-6.826667 17.066667 0 23.893333 0 0 6.826667 3.413333 10.24 3.413334zM293.546667 317.44c3.413333 3.413333 10.24 6.826667 13.653333 6.826667s10.24 0 13.653333-3.413334c6.826667-6.826667 6.826667-17.066667 0-23.893333l-136.533333-136.533333c-6.826667-6.826667-17.066667-6.826667-23.893333 0s-6.826667 17.066667 0 23.893333l133.12 133.12zM730.453333 703.146667c-6.826667-6.826667-17.066667-6.826667-23.893333 0s-6.826667 17.066667 0 23.893333l136.533333 136.533333c3.413333 3.413333 6.826667 3.413333 13.653334 3.413334s10.24 0 13.653333-3.413334c6.826667-6.826667 6.826667-17.066667 0-23.893333l-139.946667-136.533333z"
fill="#ffffff"
p-id="11067"
/></svg
>

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1,44 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1731984744752"
class="w-12 h-12"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="31753"
width="200"
height="200"
><path
d="M244.224 370.78016h526.336c48.64 0 87.552 39.424 87.552 87.552v292.352c0 48.64-39.424 87.552-87.552 87.552H244.224c-48.64 0-87.552-39.424-87.552-87.552v-292.352c-0.512-48.128 38.912-87.552 87.552-87.552z"
fill="#CAE4FF"
p-id="31754"
/><path
d="M760.832 983.30624H245.76c-114.176 0-206.848-92.672-206.848-206.848v-357.888c0-114.176 92.672-206.336 206.848-206.848h515.072c114.176 0 206.336 92.672 206.848 206.848v357.888c0 114.176-92.672 206.848-206.848 206.848zM245.76 270.09024c-81.92 0-148.48 66.56-148.48 148.48v357.888c0 81.92 66.56 148.48 148.48 148.48h515.072c81.92 0 148.48-66.56 148.48-148.48v-357.888c0-81.92-66.56-148.48-148.48-148.48H245.76z"
fill="#0972E7"
p-id="31755"
/><path
d="M303.616 748.29824c0.512 14.848-11.264 27.648-26.112 28.16-14.848 0.512-27.648-11.264-28.16-26.112v-291.328c0.512-14.848 13.312-26.624 28.16-26.112 14.336 0.512 25.6 11.776 26.112 26.112v289.28z"
fill="#0972E7"
p-id="31756"
/><path
d="M742.912 758.53824c0 13.824-11.264 25.088-25.088 25.088H274.432c-13.824 0.512-25.6-9.728-26.112-23.552-0.512-13.824 9.728-25.6 23.552-26.112h446.464c13.312 0 24.576 11.264 24.576 24.576z m-261.12-224.768c-9.728-10.24-26.112-10.24-36.352-0.512l-78.848 79.36c-10.24 10.24-10.24 26.624 0 36.864 9.728 10.24 26.112 10.24 36.352 0.512l79.36-78.848c9.728-10.752 9.728-27.136-0.512-37.376z"
fill="#0972E7"
p-id="31757"
/><path
d="M564.736 648.97024c10.24-9.728 10.24-26.112 0-36.352l-79.36-78.848c-10.24-10.24-26.624-10.24-36.864 0-10.24 9.728-10.24 26.112 0 36.352l78.848 78.848c10.752 10.24 27.136 10.24 37.376 0z"
fill="#0972E7"
p-id="31758"
/><path
d="M649.216 533.77024c-9.728-10.24-26.112-10.24-36.352-0.512l-79.36 78.848c-10.24 10.24-10.24 26.624 0 36.864 9.728 10.24 26.112 10.24 36.352 0.512l79.36-78.848c9.728-10.24 9.728-26.624 0-36.864z"
fill="#0972E7"
p-id="31759"
/><path
d="M714.24 468.74624c-9.728-10.24-26.112-10.24-36.352-0.512l-79.36 78.848c-10.24 10.24-10.24 26.624 0 36.864 9.728 10.24 26.112 10.24 36.352 0.512l79.36-78.848c10.24-10.24 10.24-27.136 0-36.864zM97.792 404.74624H39.936c0-51.2-0.512-120.832-0.512-120.832 0-112.128 91.136-203.264 203.264-203.264h136.704c123.392 0 194.56 66.56 194.56 182.784h-57.856c0-83.968-44.544-124.928-136.192-124.928H242.688c-80.384 0-145.408 65.024-145.408 145.408 0 0 0.512 69.632 0.512 120.832z"
fill="#0972E7"
p-id="31760"
/></svg
>

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -0,0 +1,24 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1731987065328"
class="w-5 h-5"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="35111"
width="200"
height="200"
><path
d="M740.565333 112c63.146667 0 114.304 51.2 114.304 114.304v457.130667H169.130667V226.304c0-63.146667 51.2-114.304 114.304-114.304h457.130666z m-219.434666 326.826667H331.434667c-32 0-48 16.042667-48 48.042666l0.213333 6.186667c2.005333 27.861333 17.92 41.813333 47.786667 41.813333h189.696c32 0 48-16 48-48l-0.213334-6.186666c-1.962667-27.904-17.92-41.813333-47.786666-41.813334z m171.434666-212.522667H331.434667c-32 0-48 16-48 48l0.213333 6.186667c2.005333 27.861333 17.92 41.813333 47.786667 41.813333h361.130666c32 0 48-16 48-48l-0.213333-6.186667c-2.005333-27.904-17.92-41.813333-47.786667-41.813333z"
fill="#93C0FB"
p-id="35112"
/><path
d="M154.752 422.101333l343.68 196.096a28.586667 28.586667 0 0 0 28.330667 0l342.485333-196.010666a28.586667 28.586667 0 0 1 42.752 24.789333v350.72c0 63.146667-51.2 114.304-114.304 114.304H226.261333c-63.104 0-114.261333-51.2-114.261333-114.304v-350.805333a28.586667 28.586667 0 0 1 42.752-24.789334z"
fill="#4B96F9"
p-id="35113"
/></svg
>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -0,0 +1,60 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1731987759041"
class="w-7 h-7"
viewBox="0 0 1230 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="50480"
width="200"
height="200"
><path
d="M455.756297 168.055915a69.477515 69.477515 0 0 1-1.687986 16.069629c-2.768298 18.837927-9.385204 26.062508-9.385204 31.058948s5.536595 6.076751 10.465515 6.07675c30.991428 0 98.51088-32.679414 196.414084-32.679414 28.695767 0 112.554925 3.848609 112.554926 32.139259C764.117632 276.154556 533.403666 351.101147 533.403666 351.101147a0.540156 0.540156 0 0 0 0.540156 0.540156h15.461954c40.511671-2.228142 140.643017-39.36384 321.662667-39.36384 118.024001 0 291.819069 21.606224 291.819069 62.590531 0 23.901886-56.783859 73.731241-116.94369 121.535013-51.7199 40.849268-122.885402 77.51233-122.885402 118.024001 0 27.007781 30.856389 31.599103 59.012001 31.599103 55.703547 0 139.02255-19.378083 193.645786-19.378083 23.766847 0 54.623236 3.848609 54.623236 27.007781 0 37.743373-262.042991 370.344191-652.710536 370.344191-44.090202 0-102.021891-6.751945-102.021891-49.896875s55.163392-108.031122 103.169721-145.301859c38.080971-29.303442 67.519451-38.216009 67.519452-43.752605 0-3.308453-11.07319-3.916128-14.921799-3.916128-32.00422 0-110.866939 16.137149-183.720427 60.767507-55.703547 34.367401-71.165502 48.208888-121.535013 73.73124-31.396545 15.529474-63.94092 37.135698-110.326783 37.135699-62.320454 0-93.784518-37.743373-93.784518-93.716999 0-63.738362 60.767506-81.495978 60.767506-143.006198C183.180272 657.571937 47.263616 698.556244 47.263616 629.821443c0-29.911117 27.007781-65.966504 33.759726-74.811552 114.242912-152.458921 226.190162-143.613873 253.805618-157.995517 31.464064-16.069629-2.768298-25.522353-25.387314-25.522352-20.998549 0-45.778188 8.304893-67.519452 17.21746-9.385204 3.848609-22.078861 16.069629-40.51167 16.069629-14.921799 0-19.378083-14.989318-19.378083-29.978636C180.95213 282.231307 374.057761 143.681393 434.757748 143.681393c15.461954 0 20.998549 8.845048 20.998549 24.374522z"
fill="#E1EBFF"
p-id="50481"
/><path
d="M898.751418 191.350125H341.513385A24.712119 24.712119 0 0 0 316.666227 216.062244v671.886061a24.712119 24.712119 0 0 0 24.847158 24.374522h557.238033a25.049716 25.049716 0 0 0 24.847158-24.374522V216.062244a24.712119 24.712119 0 0 0-24.847158-24.374521"
fill="#335DFF"
p-id="50482"
/><path
d="M847.436635 853.783463h-458.457075a25.049716 25.049716 0 0 1-24.847159-24.374522V279.463009a24.6446 24.6446 0 0 1 24.847159-24.374522h458.457075a25.049716 25.049716 0 0 1 24.847158 24.374522v549.945932a24.712119 24.712119 0 0 1-24.847158 24.374522z"
fill="#FFFFFF"
p-id="50483"
/><path
d="M58.471845 792.273243H41.389424v-17.21746a8.507451 8.507451 0 0 0-8.304893-8.304893 8.439931 8.439931 0 0 0-8.237373 8.304893v17.21746H8.304893a8.304893 8.304893 0 1 0 0 16.609785h17.082421v17.21746A8.507451 8.507451 0 0 0 33.759726 834.40538a8.57497 8.57497 0 0 0 8.237373-8.304892v-17.21746h17.082421a8.304893 8.304893 0 1 0-0.607675-16.609785z"
fill="#D2DFFF"
p-id="50484"
/><path
d="M809.355664 225.717526h-371.356983a16.204668 16.204668 0 0 1-16.542265-16.069629v-39.903996a16.204668 16.204668 0 0 1 16.542265-16.137149h371.356983a16.204668 16.204668 0 0 1 16.609785 16.069629v40.511671a16.137149 16.137149 0 0 1-16.609785 15.529474z"
fill="#8FAFFF"
p-id="50485"
/><path
d="M677.490175 181.357246H570.471845A16.542266 16.542266 0 0 1 553.659502 165.287617v-55.973625a16.137149 16.137149 0 0 1 16.542265-16.06963h107.018331a16.542266 16.542266 0 0 1 16.609785 16.06963v56.513781a16.474746 16.474746 0 0 1-16.339708 15.529473z"
fill="#8FAFFF"
p-id="50486"
/><path
d="M459.13227 688.02321h280.205723a16.272188 16.272188 0 0 1 16.542265 16.609785v8.912567a16.272188 16.272188 0 0 1-16.542265 16.609785H459.13227a16.272188 16.272188 0 0 1-16.542266-16.002109v-9.520243A17.014902 17.014902 0 0 1 459.13227 688.02321z m0-111.947251h224.569695a16.272188 16.272188 0 0 1 16.542266 16.609785v8.845049a16.272188 16.272188 0 0 1-16.542266 16.677304H459.13227a16.272188 16.272188 0 0 1-16.542266-16.069629 1.890545 1.890545 0 0 1 0-0.607675v-8.845049A16.609785 16.609785 0 0 1 459.13227 576.075959z m0-112.014769h224.569695a16.272188 16.272188 0 0 1 16.542266 16.609785v8.912567a16.272188 16.272188 0 0 1-16.677305 16.812344H459.13227a16.204668 16.204668 0 0 1-16.542266-16.00211 1.890545 1.890545 0 0 1 0-0.607676v-9.115125A16.677304 16.677304 0 0 1 459.13227 464.06119z m0-111.947251h280.205723a16.272188 16.272188 0 0 1 16.879863 16.609785v8.912568a16.272188 16.272188 0 0 1-16.542266 16.609785H459.13227A16.339707 16.339707 0 0 1 442.454965 378.108928v-9.452723A16.609785 16.609785 0 0 1 459.13227 352.113939zM247.526309 0.810233l-10.465515 18.905447a39.093762 39.093762 0 0 1-14.921799 14.921799l-18.230252 10.533034a2.160622 2.160622 0 0 0 0 3.375973l18.230252 10.465515A39.296321 39.296321 0 0 1 237.060794 74.271397l10.465515 18.837926a2.025584 2.025584 0 0 0 3.308453 0L261.300277 74.271397a38.823685 38.823685 0 0 1 14.921799-14.989319l18.230252-10.465515a2.160622 2.160622 0 0 0 0-3.375972l-18.230252-10.533035a38.621126 38.621126 0 0 1-14.921799-15.191876L250.834762 0.810233c-0.540156-0.810233-2.228142-0.810233-3.308453 0zM1057.624687 183.585388a22.754055 22.754055 0 1 1-22.011341 22.686536 21.606224 21.606224 0 0 1 22.011341-22.686536z m0-11.07319a33.759726 33.759726 0 0 0-33.084531 33.759726 33.152051 33.152051 0 1 0 66.236581 0 33.354609 33.354609 0 0 0-33.15205-33.759726z"
fill="#D2DFFF"
p-id="50487"
/><path
d="M642.785177 138.144798a22.821575 22.821575 0 0 1-22.686535 22.686535 22.281419 22.281419 0 0 1-22.551497-22.213899 1.147831 1.147831 0 0 1 0-0.472636 22.821575 22.821575 0 0 1 22.619016-22.754056 22.416458 22.416458 0 0 1 22.686536 22.2139z m68.059607 445.628379A178.791507 178.791507 0 1 0 762.969801 456.971647a179.66926 179.66926 0 0 0-52.125017 126.869049z"
fill="#FFFFFF"
p-id="50488"
/><path
d="M889.298694 436.91837a145.706976 145.706976 0 0 0-145.504417 145.909535v1.012791a145.571937 145.571937 0 1 0 291.076355 0.742714v-0.742714a146.787287 146.787287 0 0 0-145.571938-146.989845z"
fill="#2ED073"
p-id="50489"
/><path
d="M856.230925 638.373959m5.681472-5.681472l95.534667-95.534667q5.681472-5.681472 11.362944 0l0 0q5.681472 5.681472 0 11.362943l-95.534667 95.534668q-5.681472 5.681472-11.362944 0l0 0q-5.681472-5.681472 0-11.362944Z"
fill="#FFFFFF"
p-id="50490"
/><path
d="M804.217647 586.365756m5.681472-5.681472l0 0q5.681472-5.681472 11.362944 0l51.944886 51.944887q5.681472 5.681472 0 11.362944l0 0q-5.681472 5.681472-11.362944 0l-51.944886-51.944887q-5.681472-5.681472 0-11.362944Z"
fill="#FFFFFF"
p-id="50491"
/></svg
>

After

Width:  |  Height:  |  Size: 6.6 KiB

View File

@@ -0,0 +1,8 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg class="w-2.5 h-2.5 text-blue-800 dark:text-blue-300" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor" viewBox="0 0 20 20">
<path d="M20 4a2 2 0 0 0-2-2h-2V1a1 1 0 0 0-2 0v1h-3V1a1 1 0 0 0-2 0v1H6V1a1 1 0 0 0-2 0v1H2a2 2 0 0 0-2 2v2h20V4ZM0 18a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2V8H0v10Zm5-8h10a1 1 0 0 1 0 2H5a1 1 0 0 1 0-2Z"/>
</svg>

After

Width:  |  Height:  |  Size: 451 B

View File

@@ -0,0 +1,36 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1731987374334"
class="w-4 h-4"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="47097"
width="200"
height="200"
><path
d="M210.488889 246.670222m35.043555 0l349.923556 0q35.043556 0 35.043556 35.043556l0 0.056889q0 35.043556-35.043556 35.043555l-349.923556 0q-35.043556 0-35.043555-35.043555l0-0.056889q0-35.043556 35.043555-35.043556Z"
fill="#89BAF7"
p-id="47098"
/><path
d="M210.488889 471.210667m35.043555 0l349.923556 0q35.043556 0 35.043556 35.043555l0 0.056889q0 35.043556-35.043556 35.043556l-349.923556 0q-35.043556 0-35.043555-35.043556l0-0.056889q0-35.043556 35.043555-35.043555Z"
fill="#89BAF7"
p-id="47099"
/><path
d="M210.488889 695.296m35.043555 0l140.344889 0q35.043556 0 35.043556 35.043556l0 0.056888q0 35.043556-35.043556 35.043556l-140.344889 0q-35.043556 0-35.043555-35.043556l0-0.056888q0-35.043556 35.043555-35.043556Z"
fill="#89BAF7"
p-id="47100"
/><path
d="M436.565333 982.186667h-261.176889a175.559111 175.559111 0 0 1-175.331555-175.388445v-631.466666a175.559111 175.559111 0 0 1 175.331555-175.388445h490.951112a175.559111 175.559111 0 0 1 175.331555 175.388445v278.016a35.100444 35.100444 0 1 1-70.144 0v-278.016a105.358222 105.358222 0 0 0-105.187555-105.244445h-490.951112a105.358222 105.358222 0 0 0-105.187555 105.244445v631.466666a105.358222 105.358222 0 0 0 105.187555 105.244445h261.176889a35.100444 35.100444 0 0 1 0 70.144z"
fill="#0A71EF"
p-id="47101"
/><path
d="M1008.184889 628.167111l-5.688889-11.889778-2.104889-2.616889a19.683556 19.683556 0 0 0-24.519111-2.616888h-0.910222l-97.28 97.336888-49.265778-49.265777 101.489778-101.717334-1.080889-1.422222a18.090667 18.090667 0 0 0-4.039111-18.944 16.668444 16.668444 0 0 0-5.688889-3.868444l-10.695111-4.721778a192.056889 192.056889 0 0 0-258.958222 235.292444l-105.927112 105.927111a87.608889 87.608889 0 0 0 0 123.619556 87.608889 87.608889 0 0 0 123.448889 0l105.927111-106.097778a188.757333 188.757333 0 0 0 59.278223 9.500445 192.056889 192.056889 0 0 0 176.355555-268.288z m-176.355556 215.836445a137.728 137.728 0 0 1-55.409777-11.377778l-16.327112-6.997334-130.446222 130.446223a35.669333 35.669333 0 0 1-49.265778 0 34.702222 34.702222 0 0 1 0-49.265778l130.446223-130.446222-6.997334-16.497778a136.192 136.192 0 0 1-11.377777-55.239111 139.719111 139.719111 0 0 1 139.548444-139.548445 111.502222 111.502222 0 0 1 15.303111 0.853334l-79.985778 79.985777a20.650667 20.650667 0 0 0-3.356444 21.219556l-0.512 1.251556 101.489778 101.546666a19.569778 19.569778 0 0 0 24.746666 0.341334l81.009778-81.009778a151.210667 151.210667 0 0 1 0.853333 15.416889 139.719111 139.719111 0 0 1-139.605333 139.320889z"
fill="#FD7733"
p-id="47102"
/></svg
>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@@ -0,0 +1,28 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1699532005309"
class="icon"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="31791"
width="1rem"
height="1rem"
><path
d="M505.088 513.1264m-450.816 0a450.816 450.816 0 1 0 901.632 0 450.816 450.816 0 1 0-901.632 0Z"
fill="#e02424"
p-id="31792"
data-spm-anchor-id="a313x.search_index.0.i28.33343a81AAN1qI"
class="selected"
/><path
d="M356.6592 575.0784c0-54.5792 0.3584-109.1584-0.2048-163.6864-0.1536-15.872 5.5296-24.2176 20.992-29.5424 58.88-20.2752 93.7472-63.1296 110.848-121.9072 5.9392-20.4288 11.4176-41.216 19.7632-60.672 13.4656-31.5904 38.2464-42.7008 72.6528-35.328 26.5216 5.6832 43.3152 28.3648 43.5712 60.16 0.3584 40.4992 0.0512 80.9984 0.1536 121.4976 0.0512 22.2208 3.9424 26.7264 26.5728 26.9824 45.568 0.512 91.1872 1.536 136.704-0.256 40.5504-1.5872 69.9392 24.832 59.7504 69.9904-12.2368 54.0672-27.648 107.4688-42.7008 160.8704-9.2672 32.9216-20.1728 65.4336-30.8736 97.9456-14.1312 43.008-40.448 62.0544-84.8896 62.0544H390.2976c-32.1024 0-33.6384-1.536-33.6384-32.8704v-155.2384zM307.8656 573.9008c0 52.8896 0.1024 105.7792-0.0512 158.6688-0.1024 26.0096-4.9152 30.6176-30.3616 30.6688-7.3216 0-14.6432 0.0512-21.9648 0-29.8496-0.1536-44.032-14.08-44.2368-44.6976-0.3072-55.1424-0.1024-110.2848-0.1024-165.4272 0-40.4992-0.1536-81.0496 0.0512-121.5488 0.2048-32.2048 15.7696-47.616 47.5136-47.7184 49.1008-0.2048 49.152-0.2048 49.152 48.2304 0.0512 47.2576 0.0512 94.5152 0 141.824z"
fill="#ffffff"
p-id="31793"
data-spm-anchor-id="a313x.search_index.0.i26.33343a81AAN1qI"
class=""
/></svg
>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -0,0 +1,24 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1699531880178"
class="icon"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="26552"
width="1rem"
height="1rem"
><path
d="M505.088 513.1264m-450.816 0a450.816 450.816 0 1 0 901.632 0 450.816 450.816 0 1 0-901.632 0Z"
fill="#FFBC42"
p-id="26553"
/><path
d="M356.6592 575.0784c0-54.5792 0.3584-109.1584-0.2048-163.6864-0.1536-15.872 5.5296-24.2176 20.992-29.5424 58.88-20.2752 93.7472-63.1296 110.848-121.9072 5.9392-20.4288 11.4176-41.216 19.7632-60.672 13.4656-31.5904 38.2464-42.7008 72.6528-35.328 26.5216 5.6832 43.3152 28.3648 43.5712 60.16 0.3584 40.4992 0.0512 80.9984 0.1536 121.4976 0.0512 22.2208 3.9424 26.7264 26.5728 26.9824 45.568 0.512 91.1872 1.536 136.704-0.256 40.5504-1.5872 69.9392 24.832 59.7504 69.9904-12.2368 54.0672-27.648 107.4688-42.7008 160.8704-9.2672 32.9216-20.1728 65.4336-30.8736 97.9456-14.1312 43.008-40.448 62.0544-84.8896 62.0544H390.2976c-32.1024 0-33.6384-1.536-33.6384-32.8704v-155.2384zM307.8656 573.9008c0 52.8896 0.1024 105.7792-0.0512 158.6688-0.1024 26.0096-4.9152 30.6176-30.3616 30.6688-7.3216 0-14.6432 0.0512-21.9648 0-29.8496-0.1536-44.032-14.08-44.2368-44.6976-0.3072-55.1424-0.1024-110.2848-0.1024-165.4272 0-40.4992-0.1536-81.0496 0.0512-121.5488 0.2048-32.2048 15.7696-47.616 47.5136-47.7184 49.1008-0.2048 49.152-0.2048 49.152 48.2304 0.0512 47.2576 0.0512 94.5152 0 141.824z"
fill="#FFFFFF"
p-id="26554"
/></svg
>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -0,0 +1,28 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1697617760586"
class="h-10 w-10"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="5216"
width="200"
height="200"
><path
d="M826.9 461.6h-723v391.5c0 21.7 17.7 39.4 39.4 39.4h677c4.1-6.4 6.6-14.1 6.6-22.3V461.6zM826.9 422.3v-64.7c0-22.9-18.6-41.6-41.6-41.6H340l-32.7-61.2c-7.2-13.5-21.3-22-36.7-22H134.9c-12.4 0-23.4 5.5-31 14.2v175.2l723 0.1z"
fill="#FFD524"
p-id="5217"
/><path
d="M687 252.6h11.4c10.8 0 19.6-8.8 19.6-19.6s-8.8-19.6-19.6-19.6H687c-10.8 0-19.6 8.8-19.6 19.6s8.8 19.6 19.6 19.6zM753.3 252.6h18c10.8 0 19.6-8.8 19.6-19.6s-8.8-19.6-19.6-19.6h-18c-10.8 0-19.6 8.8-19.6 19.6s8.7 19.6 19.6 19.6z"
fill="#6B400D"
p-id="5218"
/><path
d="M881.6 213.3h-44.9c-10.8 0-19.6 8.8-19.6 19.6s8.8 19.6 19.6 19.6h44.9c21.7 0 39.4 17.7 39.4 39.4v130.3H103.8V173.9c0-21.7 17.7-39.4 39.4-39.4h193c14.6 0 27.9 8 34.7 20.8l46.5 86.9c3.4 6.4 10.1 10.4 17.3 10.4h84.5c10.8 0 19.6-8.8 19.6-19.6s-8.8-19.6-19.6-19.6h-72.7l-40.9-76.5c-13.7-25.7-40.3-41.6-69.4-41.6H143.3c-43.4 0-78.7 35.3-78.7 78.7v679.3c0 43.4 35.3 78.7 78.7 78.7h738.3c43.4 0 78.7-35.3 78.7-78.7V292c0-43.4-35.3-78.7-78.7-78.7z m39.5 639.8c0 21.7-17.7 39.4-39.4 39.4H143.4c-21.7 0-39.4-17.7-39.4-39.4V461.6h817.2v391.5z"
fill="#6B400D"
p-id="5219"
/></svg
>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -0,0 +1,38 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
class="h-10 w-10"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
width="200"
height="200"
><path
d="M192.028 0.008c-47.06 0-85.34 38.282-85.34 85.342V853.34c0 11.782 9.53 21.312 21.342 21.312h106.654V0.008H192.028z"
fill="#656D78"
/><path
d="M192.028 789.34c-35.374 0-63.998 28.656-63.998 64s28.624 63.996 63.998 63.996H896V789.34H192.028z"
fill="#E6E9ED"
/><path
d="M917.312 21.352c0-11.782-9.562-21.344-21.312-21.344H234.684v810.644H896c11.75 0 21.312-9.532 21.312-21.312V21.352z"
fill="#5D9CEC"
/><path
d="M384.026 874.652H213.34c-11.75 0-21.312-9.532-21.312-21.312s9.562-21.344 21.312-21.344h170.686c11.782 0 21.312 9.562 21.312 21.344s-9.532 21.312-21.312 21.312zM725.316 831.996c-11.75 0-21.312 9.562-21.312 21.344s9.562 21.312 21.312 21.312H896v-42.656h-170.684z"
fill="#CCD1D9"
/><path d="M320.026 149.35h490.662v255.996H320.026z" fill="#E6E9ED" /><path
d="M128.03 170.678c11.782 0 21.312-9.546 21.312-21.328s-9.532-21.344-21.312-21.344H106.686v42.672h21.344zM128.03 298.676c11.782 0 21.312-9.546 21.312-21.328 0-11.78-9.532-21.342-21.312-21.342H106.686v42.67h21.344zM128.03 426.672c11.782 0 21.312-9.546 21.312-21.328 0-11.78-9.532-21.342-21.312-21.342H106.686v42.67h21.344zM128.03 554.656c11.782 0 21.312-9.532 21.312-21.312S139.81 512 128.03 512H106.686v42.656h21.344zM128.03 682.656c11.782 0 21.312-9.532 21.312-21.312S139.81 640 128.03 640H106.686v42.656h21.344zM725.316 298.676h-319.98c-11.75 0-21.312-9.546-21.312-21.328 0-11.78 9.562-21.342 21.312-21.342h319.98c11.812 0 21.376 9.562 21.376 21.342s-9.564 21.328-21.376 21.328z"
fill="#434A54"
/><path
d="M917.312 917.336c0-11.782-9.562-21.344-21.312-21.344H234.684v42.656H896c11.75 0 21.312-9.53 21.312-21.312z"
fill="#5D9CEC"
/><path
d="M234.684 895.992H192.028c-23.53 0-42.686-19.152-42.686-42.652 0-23.532 19.156-42.688 42.686-42.688h42.656v-42.656H192.028c-47.06 0-85.34 38.282-85.34 85.344 0 47.028 38.28 85.308 85.34 85.308h42.656v-42.656z"
fill="#656D78"
/><path
d="M362.682 853.34H213.34v149.308c0 6.876 3.312 13.312 8.876 17.312s12.718 5.094 19.218 2.938l57.25-19.094 57.25 19.094c2.218 0.718 4.468 1.094 6.75 1.094 4.406 0 8.782-1.376 12.468-4.032 5.562-4 8.876-10.438 8.876-17.312V853.34h-21.346z"
fill="#ED5564"
/></svg
>

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1,32 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1699516160889"
class="icon"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="1937"
width="14"
height="14"
><path
d="M226.94 618.816L191.296 751.88l-130.278 2.756C22.084 682.422 0 599.8 0 512c0-84.902 20.648-164.966 57.248-235.464h0.028L173.26 297.8l50.808 115.288c-10.634 31.002-16.43 64.282-16.43 98.912 0.004 37.584 6.812 73.594 19.302 106.816z"
fill="#FBBB00"
p-id="1938"
/><path
d="M1015.054 416.352C1020.934 447.324 1024 479.31 1024 512c0 36.656-3.854 72.412-11.196 106.902-24.924 117.366-90.05 219.85-180.268 292.374l-0.028-0.028-146.088-7.454-20.676-129.07c59.864-35.108 106.648-90.05 131.292-155.822h-273.78V416.352h491.798z"
fill="#518EF8"
p-id="1939"
/><path
d="M832.506 911.248l0.028 0.028C744.792 981.802 633.332 1024 512 1024c-194.982 0-364.504-108.982-450.982-269.362l165.922-135.82c43.238 115.396 154.556 197.542 285.06 197.542 56.094 0 108.646-15.164 153.74-41.636l166.766 136.524z"
fill="#28B446"
p-id="1940"
/><path
d="M838.808 117.872l-165.866 135.792C626.272 224.492 571.104 207.64 512 207.64c-133.458 0-246.858 85.914-287.93 205.448l-166.794-136.552h-0.028C142.46 112.246 314.12 0 512 0c124.23 0 238.136 44.252 326.808 117.872z"
fill="#F14336"
p-id="1941"
/></svg
>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -0,0 +1,41 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1689149335476"
class="icon"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="1964"
width="32"
height="32"
data-spm-anchor-id="a313x.7781069.0.i17"
><path
d="M949.333333 96v704H614.4l-21.333333 25.6v2.133333L512 906.666667l-78.933333-78.933334-2.133334-2.133333-21.333333-25.6H74.666667v-704h874.666666M960 21.333333H64C27.733333 21.333333 0 49.066667 0 85.333333v725.333334c0 36.266667 27.733333 64 64 64h311.466667v2.133333l121.6 121.6c4.266667 4.266667 10.666667 6.4 14.933333 6.4s10.666667-2.133333 14.933333-6.4l121.6-121.6V874.666667H960c36.266667 0 64-27.733333 64-64V85.333333c0-36.266667-27.733333-64-64-64z"
fill="#0071c5"
p-id="1965"
data-spm-anchor-id="a313x.7781069.0.i8"
class=""
/><path
d="M512 601.6c-21.333333 0-38.4-17.066667-38.4-36.266667v-27.733333c0-21.333333 17.066667-36.266667 36.266667-36.266667 21.333333 0 36.266667 17.066667 36.266666 36.266667v27.733333c4.266667 19.2-12.8 36.266667-34.133333 36.266667z"
fill="#0071c5"
p-id="1966"
data-spm-anchor-id="a313x.7781069.0.i9"
class=""
/><path
d="M512 742.4c-23.466667 0-42.666667-19.2-42.666667-42.666667s19.2-42.666667 42.666667-42.666666 42.666667 19.2 42.666667 42.666666-19.2 42.666667-42.666667 42.666667z"
fill="#0071c5"
p-id="1967"
data-spm-anchor-id="a313x.7781069.0.i7"
class="selected"
/><path
d="M512 576c-21.333333 0-38.4-17.066667-38.4-38.4s17.066667-38.4 38.4-38.4c68.266667 0 121.6-55.466667 121.6-123.733333S580.266667 256 512 256c-64 0-119.466667 51.2-121.6 115.2-2.133333 21.333333-19.2 36.266667-38.4 34.133333-21.333333-2.133333-36.266667-19.2-34.133333-38.4 6.4-104.533333 91.733333-185.6 196.266666-185.6 108.8 0 196.266667 87.466667 196.266667 196.266667S620.8 576 512 576z"
fill="#0071c5"
p-id="1968"
data-spm-anchor-id="a313x.7781069.0.i10"
class="selected"
/></svg
>

After

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -0,0 +1,6 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg t="1689232479587" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="20682" width="32" height="32"><path d="M512 512m-384 0a384 384 0 1 0 768 0 384 384 0 1 0-768 0Z" fill="#2196F3" p-id="20683"></path><path d="M465.28 613.333333c0-134.4 109.013333-138.453333 109.013333-217.386666a58.88 58.88 0 0 0-61.013333-64 59.093333 59.093333 0 0 0-64 59.946666h-81.066667c0-21.333333 10.24-128 144.64-128 142.08 0 142.08 108.586667 142.08 128 0 106.666667-114.346667 119.04-114.346666 220.373334z m-5.546667 103.68a42.666667 42.666667 0 0 1 45.653334-43.946666 42.666667 42.666667 0 0 1 45.866666 43.946666 42.666667 42.666667 0 0 1-45.866666 42.666667 42.666667 42.666667 0 0 1-45.653334-42.666667z" fill="#FFFFFF" p-id="20684"></path></svg>

After

Width:  |  Height:  |  Size: 866 B

View File

@@ -0,0 +1,92 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1710303075816"
class="icon"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="32606"
width="6rem"
height="6rem"
><path
d="M241.0496 404.48a31.470933 31.470933 0 0 1-31.1296-28.2624 31.061333 31.061333 0 1 1 33.5872 28.2624z m0-54.613333H238.933333A23.415467 23.415467 0 0 0 217.7024 375.466667a23.7568 23.7568 0 0 0 25.3952 21.2992 23.415467 23.415467 0 0 0-2.048-46.6944z m572.6208 331.3664a22.254933 22.254933 0 0 1-21.777067-19.933867 21.504 21.504 0 0 1 20.0704-23.7568 22.596267 22.596267 0 0 1 15.9744 4.9152 21.162667 21.162667 0 0 1 7.7824 15.1552 21.504 21.504 0 0 1-20.0704 23.7568z m0-36.4544H812.373333a14.882133 14.882133 0 0 0-13.1072 15.5648 14.609067 14.609067 0 0 0 15.5648 13.1072 14.882133 14.882133 0 0 0 13.1072-15.5648 14.609067 14.609067 0 0 0-14.267733-12.970667z m-493.568-375.466667a16.1792 16.1792 0 0 1-11.0592-4.096 17.8176 17.8176 0 0 1-6.144-11.8784 17.271467 17.271467 0 0 1 34.133333-3.2768 16.452267 16.452267 0 0 1-4.096 12.6976 16.1792 16.1792 0 0 1-11.8784 6.144 1.501867 1.501867 0 0 1-0.955733 0.4096z m0-27.306667h-0.8192A9.284267 9.284267 0 0 0 310.6816 252.586667a10.103467 10.103467 0 0 0 3.345067 6.826666 8.8064 8.8064 0 0 0 6.826666 2.048 10.103467 10.103467 0 0 0 6.826667-3.2768 8.8064 8.8064 0 0 0 2.048-6.826666 9.352533 9.352533 0 0 0-9.6256-9.079467z"
fill="#82A0F6"
p-id="32607"
/><path
d="M254.1568 719.0528h-32.3584a3.6864 3.6864 0 1 1 0-7.3728h32.3584a3.6864 3.6864 0 0 1 0 7.3728z"
fill="#2B77EF"
p-id="32608"
/><path
d="M237.7728 735.0272a3.6864 3.6864 0 0 1-3.6864-3.6864v-32.3584a3.6864 3.6864 0 1 1 7.3728 0v32.3584c0.4096 2.048-1.2288 3.6864-3.6864 3.6864z"
fill="#2B77EF"
p-id="32609"
/><path
d="M800.9728 430.6944h-32.3584a3.6864 3.6864 0 1 1 0-7.3728h32.3584a3.6864 3.6864 0 1 1 0 7.3728z"
fill="#82A0F6"
p-id="32610"
/><path
d="M785.066667 446.6688a3.6864 3.6864 0 0 1-3.6864-3.6864v-32.3584a3.6864 3.6864 0 0 1 7.3728 0v32.3584a3.6864 3.6864 0 0 1-3.6864 3.6864z"
fill="#82A0F6"
p-id="32611"
/><path
d="M531.8656 645.7344h-315.392a31.9488 31.9488 0 0 1 0-63.8976h315.392a31.9488 31.9488 0 1 1 0 63.8976z"
fill="#E3EEFF"
p-id="32612"
/><path
d="M533.0944 759.6032h-4.5056a56.9344 56.9344 0 0 1 0-113.8688h4.5056v-63.8976H237.7728v63.8976h107.3152a56.9344 56.9344 0 1 1 0 113.8688H237.7728v63.8976H532.48v-63.8976z"
fill="#E3EEFF"
p-id="32613"
/><path
d="M550.7072 484.693333a21.640533 21.640533 0 0 0-20.8896-21.2992h-211.626667a21.2992 21.2992 0 0 0 0 42.5984h86.016a38.0928 38.0928 0 1 1 0 76.1856H332.8v43.008h197.0176v-43.008h-3.2768a38.0928 38.0928 0 1 1 0-76.1856h3.2768A21.640533 21.640533 0 0 0 550.7072 484.693333zM669.013333 759.6032H216.4736a31.9488 31.9488 0 1 0 0 63.8976H669.013333z m11.8784 0h27.306667v63.8976h-27.306667z m59.8016 0h17.066667v63.8976h-17.6128z m67.1744 0h-40.5504v63.8976h40.5504a31.9488 31.9488 0 0 0 0-63.8976z"
fill="#E3EEFF"
p-id="32614"
/><path
d="M803.4304 273.408a3.6864 3.6864 0 0 1-3.6864-3.6864V245.76l-16.7936 16.7936a3.754667 3.754667 0 0 1-5.3248-5.3248l16.7936-16.7936h-24.1664a3.6864 3.6864 0 1 1 0-7.3728h23.7568l-16.7936-16.7936a3.754667 3.754667 0 0 1 5.3248-5.3248l16.7936 16.7936v-23.552a3.6864 3.6864 0 0 1 7.3728 0v23.7568l16.7936-16.7936a3.754667 3.754667 0 0 1 5.3248 5.3248L812.373333 233.2672h23.7568a3.6864 3.6864 0 0 1 0 7.3728H812.373333l16.7936 16.7936a3.754667 3.754667 0 0 1-5.3248 5.3248L806.7072 245.76v23.7568a3.140267 3.140267 0 0 1-3.2768 3.8912z"
fill="#82A0F6"
p-id="32615"
/><path
d="M669.013333 735.8464c0 20.0704-13.653333 36.4544-30.72 36.4544 16.7936 0 30.72 16.384 30.72 36.4544 0-20.0704 13.653333-36.4544 30.72-36.4544-17.066667 0-30.72-16.384-30.72-36.4544z"
fill="#76A6EF"
p-id="32616"
/><path
d="M644.5056 269.312a22.254933 22.254933 0 0 1-13.653333-4.9152 20.48 20.48 0 0 1-7.3728-14.7456 21.572267 21.572267 0 0 1 4.573866-15.5648 20.48 20.48 0 0 1 14.7456-7.3728 21.572267 21.572267 0 0 1 15.5648 4.9152 20.48 20.48 0 0 1 7.3728 14.7456 21.572267 21.572267 0 0 1-4.9152 15.5648 20.48 20.48 0 0 1-14.7456 7.3728z m0-35.2256h-1.2288a13.653333 13.653333 0 0 0-7.7824 24.1664 15.018667 15.018667 0 0 0 9.8304 3.2768 13.653333 13.653333 0 0 0 7.7824-24.1664 12.424533 12.424533 0 0 0-8.6016-3.2768z"
fill="#82A0F6"
p-id="32617"
/><path
d="M740.7616 442.9824H612.5568A12.970667 12.970667 0 0 1 599.4496 430.08V301.2608a12.970667 12.970667 0 0 0-13.1072-13.1072H437.6576a12.970667 12.970667 0 0 0-13.1072 13.1072v128.2048a12.970667 12.970667 0 0 1-13.1072 13.1072H282.8288a12.970667 12.970667 0 0 0-13.1072 13.1072v148.2752a12.970667 12.970667 0 0 0 13.1072 13.1072h128.2048a12.970667 12.970667 0 0 1 13.1072 13.1072v128.2048a12.970667 12.970667 0 0 0 13.1072 13.038933h148.6848a12.970667 12.970667 0 0 0 13.1072-13.1072V630.5792a12.970667 12.970667 0 0 1 13.1072-13.1072h128.2048a12.970667 12.970667 0 0 0 13.1072-13.1072V456.0896a12.356267 12.356267 0 0 0-12.6976-13.1072z"
fill="#C7DDFF"
p-id="32618"
/><path
d="M399.9744 452.8128h18.432c15.9744 0 29.0816-11.0592 29.0816-24.9856v-137.216h-22.9376v147.456z m2.048 158.1056h18.432c15.9744 0 29.0816 11.0592 29.0816 24.9856v136.533333h-22.9376v-146.773333z"
fill="#FFFFFF"
p-id="32619"
/><path
d="M740.7616 442.9824H612.5568A12.970667 12.970667 0 0 1 599.4496 430.08V301.2608a12.970667 12.970667 0 0 0-13.1072-13.1072H437.6576a12.970667 12.970667 0 0 0-13.1072 13.1072v2.4576h134.7584a12.970667 12.970667 0 0 1 13.1072 13.1072v128.2048a12.970667 12.970667 0 0 0 13.1072 13.1072h128.2048a12.970667 12.970667 0 0 1 13.1072 12.9024v146.0224h13.1072a12.970667 12.970667 0 0 0 13.1072-13.1072V456.0896a11.810133 11.810133 0 0 0-12.288-13.1072z"
fill="#76A6EF"
p-id="32620"
/><path
d="M295.1168 447.0784v166.2976h-12.6976L269.7216 600.746667V459.776l12.6976-12.6976z"
fill="#FFFFFF"
p-id="32621"
/><path
d="M269.7216 536.3712a6.280533 6.280533 0 0 1-6.144-6.144v-19.2512a6.144 6.144 0 1 1 12.288 0v19.2512a6.280533 6.280533 0 0 1-6.144 6.144z m0-47.104a6.280533 6.280533 0 0 1-6.144-6.144v-27.306667A19.2512 19.2512 0 0 1 282.8288 436.906667h128.2048a7.168 7.168 0 0 0 6.826667-6.826667V352.0512a6.144 6.144 0 1 1 12.288 0V430.08a19.2512 19.2512 0 0 1-19.2512 19.2512H282.8288a7.168 7.168 0 0 0-6.826667 6.826667v27.306666a6.280533 6.280533 0 0 1-6.280533 5.802667z"
fill="#2B77EF"
p-id="32622"
/><path
d="M585.9328 778.24H437.6576a19.2512 19.2512 0 0 1-19.2512-19.2512V630.5792a7.168 7.168 0 0 0-6.826667-6.826667H282.8288a19.2512 19.2512 0 0 1-19.2512-19.2512v-45.056a6.144 6.144 0 0 1 12.288 0v45.056a7.168 7.168 0 0 0 6.826667 6.826667h128.341333A19.2512 19.2512 0 0 1 430.08 630.5792v128.2048a7.168 7.168 0 0 0 6.826667 6.826667h149.026133a7.168 7.168 0 0 0 6.826667-6.826667V630.5792a19.2512 19.2512 0 0 1 19.2512-19.2512h128.2048a7.168 7.168 0 0 0 6.826666-6.826667V456.0896a7.168 7.168 0 0 0-6.826666-6.826667H612.1472A19.2512 19.2512 0 0 1 592.896 430.08V301.2608a7.168 7.168 0 0 0-6.826667-6.826667H437.6576a7.168 7.168 0 0 0-6.826667 6.826667v23.3472a6.144 6.144 0 1 1-12.288 0v-23.3472a19.2512 19.2512 0 0 1 19.2512-19.2512h148.138667a19.2512 19.2512 0 0 1 19.2512 19.2512v128.2048a7.168 7.168 0 0 0 6.826667 6.826667h128.2048a19.2512 19.2512 0 0 1 19.2512 19.2512v148.411733a19.2512 19.2512 0 0 1-19.2512 19.2512H612.1472a7.168 7.168 0 0 0-6.826667 6.826667v128.2048a19.456 19.456 0 0 1-19.387733 20.002133z"
fill="#2B77EF"
p-id="32623"
/><path
d="M556.4416 540.4672H474.9312a6.826667 6.826667 0 0 1 0-13.653333h81.5104a6.826667 6.826667 0 0 1 0 13.653333z"
fill="#F68282"
p-id="32624"
/><path
d="M508.928 574.0544V492.9536a6.826667 6.826667 0 0 1 13.653333 0v81.5104a7.168 7.168 0 0 1-6.826666 6.826667 7.509333 7.509333 0 0 1-6.826667-7.236267z"
fill="#F68282"
p-id="32625"
/></svg
>

After

Width:  |  Height:  |  Size: 7.7 KiB

View File

@@ -0,0 +1,229 @@
<!--
Copyright (C) 2025 Intel Corporation
SPDX-License-Identifier: Apache-2.0
-->
<svg
t="1710303147323"
class="icon"
viewBox="0 0 1024 1024"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
p-id="33782"
width="5rem"
height="5rem"
><path
d="M617.813333 701.44C597.333333 659.911111 554.666667 631.466667 505.173333 631.466667c-66.56 0-120.604444 51.2-125.155555 116.622222-48.924444 13.653333-84.195556 56.32-84.195556 106.951111 0 59.733333 49.493333 108.088889 111.502222 111.502222h209.351112c69.404444 0 125.724444-59.164444 125.724444-132.551111 0-70.542222-55.182222-128-124.586667-132.551111z"
fill="#4598F0"
p-id="33783"
/><path
d="M529.066667 797.013333h59.164444c2.275556 0 3.413333-2.275556 3.413333-4.551111v-14.222222c0-2.275556-1.137778-4.551111-3.413333-4.551111H518.257778c-3.413333 0-4.551111 4.551111-2.275556 7.395555l10.808889 15.36c0 0.568889 1.137778 0.568889 2.275556 0.568889z m61.44 9.671111H529.066667c-2.275556 0-6.257778-1.137778-7.395556-4.551111l-23.893333-30.72c-2.275556-2.275556-4.551111-3.413333-8.533334-3.413333h-44.373333c-6.257778 0-10.808889 5.688889-10.808889 11.946667v116.622222c0 7.395556 4.551111 11.946667 10.808889 11.946667h145.635556c6.257778 0 10.808889-5.688889 10.808889-11.946667V819.2c0-7.964444-5.12-12.515556-10.808889-12.515556z m-50.631111 65.422223c0 3.413333-2.275556 5.688889-6.257778 5.688889h-73.955556c-3.413333 0-6.257778-2.275556-6.257778-5.688889s2.275556-5.688889 6.257778-5.688889h75.662222c2.275556 0 4.551111 2.275556 4.551112 5.688889z m31.288888 0c0 3.413333-2.275556 5.688889-6.257777 5.688889h-4.551111c-3.413333 0-6.257778-2.275556-6.257778-5.688889s2.275556-5.688889 6.257778-5.688889h4.551111c3.982222 0 6.257778 2.275556 6.257777 5.688889z"
fill="#FFFFFF"
p-id="33784"
/><path
d="M694.044444 466.488889h73.955556c12.515556 0 22.755556 10.24 22.755556 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755556 22.755556h-73.955556c-12.515556 0-22.755556-10.24-22.755555-22.755556V489.244444c0-12.515556 10.24-22.755556 22.755555-22.755555z"
fill="#4598F0"
p-id="33785"
/><path
d="M704.853333 553.528889c1.137778 0 2.275556-1.137778 2.275556-2.275556v-48.355555c0-1.137778-1.137778-2.275556-2.275556-2.275556s-2.275556 1.137778-2.275555 2.275556v48.355555c0 1.137778 1.137778 2.275556 2.275555 2.275556"
fill="#FFFFFF"
p-id="33786"
/><path
d="M754.915556 553.528889c1.137778 0 2.275556-1.137778 2.275555-2.275556v-48.355555c0-1.137778-1.137778-2.275556-2.275555-2.275556s-2.275556 1.137778-2.275556 2.275556v48.355555c0 1.137778 1.137778 2.275556 2.275556 2.275556"
fill="#FFFFFF"
p-id="33787"
/><path
d="M757.191111 502.897778c0-1.137778-1.137778-2.275556-2.275555-2.275556h-50.062223c-1.137778 0-2.275556 1.137778-2.275555 2.275556s1.137778 2.275556 2.275555 2.275555h50.062223c1.137778 0 2.275556-1.137778 2.275555-2.275555"
fill="#FFFFFF"
p-id="33788"
/><path
d="M746.382222 551.253333c0-1.137778-1.137778-2.275556-2.275555-2.275555H705.422222c-1.137778 0-2.275556 1.137778-2.275555 2.275555s1.137778 2.275556 2.275555 2.275556h38.684445c1.137778 0 2.275556-1.137778 2.275555-2.275556"
fill="#FFFFFF"
p-id="33789"
/><path
d="M725.902222 522.24c-1.137778-0.568889-2.275556-0.568889-3.413333 0.568889l-19.342222 27.306667c-0.568889 1.137778-0.568889 2.275556 0.568889 3.413333 1.137778 0.568889 2.275556 0.568889 3.413333-0.568889l19.342222-27.306667c0.568889-1.137778 0.568889-2.844444-0.568889-3.413333"
fill="#FFFFFF"
p-id="33790"
/><path
d="M723.057778 522.24c1.137778-0.568889 2.275556-0.568889 3.413333 0.568889l19.342222 27.306667c0.568889 1.137778 0.568889 2.275556-0.568889 3.413333-1.137778 0.568889-2.275556 0.568889-3.413333-0.568889l-19.342222-27.306667c-0.568889-1.137778-0.568889-2.844444 0.568889-3.413333"
fill="#FFFFFF"
p-id="33791"
/><path
d="M738.986667 529.635556c1.137778-0.568889 2.275556-0.568889 3.413333 0.568888l14.222222 19.911112c0.568889 1.137778 0.568889 2.275556-0.568889 3.413333-1.137778 0.568889-2.275556 0.568889-3.413333-0.568889l-14.222222-19.911111c-0.568889-1.706667-0.568889-2.844444 0.568889-3.413333"
fill="#FFFFFF"
p-id="33792"
/><path
d="M861.866667 676.977778h74.524444c12.515556 0 22.755556 10.24 22.755556 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755556 22.755555h-74.524444c-12.515556 0-22.755556-10.24-22.755556-22.755555v-73.955556c0-12.515556 10.24-22.755556 22.755556-22.755555z"
fill="#4598F0"
p-id="33793"
/><path
d="M888.604444 718.506667v38.115555c0 5.688889-4.551111 10.24-10.24 10.24s-10.24-4.551111-10.24-10.24 4.551111-10.24 10.24-10.24c1.137778 0 2.275556 1.137778 2.275556 2.275556s-1.137778 2.275556-2.275556 2.275555c-2.844444 0-5.688889 2.275556-5.688888 5.688889s2.275556 5.688889 5.688888 5.688889c2.844444 0 5.688889-2.275556 5.688889-5.688889v-38.115555c0-0.568889 0-1.137778 0.568889-1.706667 0.568889-0.568889 1.137778-0.568889 1.706667-0.568889 1.137778 0 2.275556 1.137778 2.275555 2.275556"
fill="#FFFFFF"
p-id="33794"
/><path
d="M926.72 714.524444v38.115556c0 5.688889-4.551111 10.24-10.24 10.24s-10.24-4.551111-10.24-10.24 4.551111-10.24 10.24-10.24c1.137778 0 2.275556 1.137778 2.275556 2.275556s-1.137778 2.275556-2.275556 2.275555c-2.844444 0-5.688889 2.275556-5.688889 5.688889s2.275556 5.688889 5.688889 5.688889c2.844444 0 5.688889-2.275556 5.688889-5.688889v-38.115556c0-0.568889 0-1.137778 0.568889-1.706666 0.568889-0.568889 1.137778-0.568889 1.706666-0.568889h0.568889c1.137778 0 1.706667 1.137778 1.706667 2.275555"
fill="#FFFFFF"
p-id="33795"
/><path
d="M926.72 713.955556c0-1.137778-1.137778-2.275556-2.844444-2.275556l-38.115556 4.551111c-1.137778 0-2.275556 1.137778-2.275556 2.844445 0 1.137778 1.137778 2.275556 2.844445 2.275555l38.115555-4.551111c1.706667-0.568889 2.844444-1.706667 2.275556-2.844444"
fill="#FFFFFF"
p-id="33796"
/><path
d="M880.64 748.657778c0 1.137778-1.137778 2.275556-2.275556 2.275555s-2.275556-1.137778-2.275555-2.275555 1.137778-2.275556 2.275555-2.275556 2.275556 1.137778 2.275556 2.275556M918.755556 744.106667c0 1.137778-1.137778 2.275556-2.275556 2.275555s-2.275556-1.137778-2.275556-2.275555 1.137778-2.275556 2.275556-2.275556 2.275556 1.137778 2.275556 2.275556"
fill="#FFFFFF"
p-id="33797"
/><path
d="M22.755556 483.555556h73.955555c12.515556 0 22.755556 10.24 22.755556 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755556 22.755555H22.755556c-12.515556 0-22.755556-10.24-22.755556-22.755555V506.311111c0-12.515556 10.24-22.755556 22.755556-22.755555z"
fill="#4598F0"
p-id="33798"
/><path
d="M50.062222 557.511111c-0.568889 1.137778-1.706667 1.706667-2.844444 1.137778-1.137778-0.568889-1.706667-1.706667-1.137778-2.844445 4.551111-11.377778 5.12-23.893333 2.275556-36.408888-0.568889-1.137778 0.568889-2.844444 1.706666-2.844445 1.137778-0.568889 2.275556 0.568889 2.844445 1.706667 2.844444 13.084444 2.275556 26.737778-2.844445 39.253333z"
fill="#FFFFFF"
p-id="33799"
/><path
d="M69.404444 548.408889c1.137778 0 2.275556 1.137778 2.275556 2.275555s-1.137778 2.275556-2.275556 2.275556c-3.413333 0-6.826667 0.568889-10.808888 1.706667l-10.24 3.413333c-1.137778 0.568889-2.275556 0-2.844445-1.137778-0.568889-1.137778 0-2.844444 1.137778-3.413333 3.413333-1.706667 7.395556-2.844444 10.808889-3.982222 4.551111 0 8.533333-0.568889 11.946666-1.137778z"
fill="#FFFFFF"
p-id="33800"
/><path
d="M57.457778 539.875556c-0.568889-1.137778-0.568889-2.844444 0.568889-3.413334 1.137778-0.568889 2.275556-0.568889 3.413333 0.568889 2.844444 4.551111 6.257778 8.533333 10.24 11.946667 1.137778 1.137778 1.137778 2.275556 0 3.413333-0.568889 1.137778-2.275556 1.137778-3.413333 0-3.982222-3.413333-7.964444-7.395556-10.808889-12.515555zM36.977778 566.044444z m0 0c-0.568889 0.568889-0.568889 1.706667 0 2.275556 1.137778 1.137778 1.137778 2.275556 0 3.413333s-2.275556 1.137778-3.413334 0c-2.275556-2.275556-2.844444-6.257778-0.568888-9.102222l0.568888-0.568889c1.706667-1.706667 3.982222-2.844444 6.257778-3.982222l6.826667-3.413333c1.137778-0.568889 2.275556 0 2.844444 1.137777s0 2.844444-1.137777 3.413334c-2.275556 1.137778-3.982222 2.275556-6.257778 3.413333-1.137778 0.568889-2.844444 1.706667-5.12 3.413333 0.568889 0 0.568889 0 0 0 0.568889 0 0.568889 0 0 0z m0 0c0.568889 0 0.568889 0 0 0z m-2.275556-3.982222z"
fill="#FFFFFF"
p-id="33801"
/><path
d="M39.253333 568.32c2.844444-3.982222 5.12-7.964444 6.257778-12.515556 0.568889-1.137778 1.706667-1.706667 2.844445-1.137777 1.137778 0.568889 1.706667 1.706667 1.137777 2.844444-1.706667 4.551111-3.982222 9.102222-6.826666 13.084445v-0.568889 0.568889s0 0.568889-0.568889 0.568888c-2.275556 2.844444-6.257778 2.844444-8.533334 0.568889-1.137778-1.137778-1.137778-2.275556 0-3.413333 1.137778-1.137778 2.275556-1.137778 3.413334 0 0.568889 0.568889 1.706667 0.568889 2.275555 0z m0.568889 0z m3.413334 2.275556v-0.568889c0.568889 0.568889 0 0.568889 0 0.568889z m0 0.568888z m0 0l-1.706667-1.706666 1.706667 1.706666z m0-0.568888zM84.764444 556.942222c1.137778-1.137778 2.275556-1.137778 3.413334 0 1.137778 1.137778 1.137778 2.275556 0 3.413334-1.706667 2.275556-5.12 2.844444-7.395556 1.137777-4.551111-2.275556-8.533333-5.12-12.515555-8.533333-1.137778-1.137778-1.137778-2.275556 0-3.413333 0.568889-1.137778 2.275556-1.137778 3.413333 0 3.413333 2.844444 7.395556 5.688889 11.377778 7.964444 0 0 1.137778 0 1.706666-0.568889z"
fill="#FFFFFF"
p-id="33802"
/><path
d="M69.973333 553.528889c-1.137778 0-2.275556-1.137778-2.275555-2.275556s1.137778-2.275556 2.275555-2.275555c5.12-0.568889 10.24 0 14.791111 1.137778 1.137778 0 2.275556 0.568889 2.844445 1.706666 2.275556 2.275556 2.844444 6.257778 0.568889 9.102222-1.137778 1.137778-2.275556 1.137778-3.413334 0-1.137778-1.137778-1.137778-2.275556 0-3.413333 0.568889-0.568889 0.568889-1.706667 0-2.275555l-0.568888-0.568889c-5.12-1.137778-9.671111-1.706667-14.222223-1.137778z"
fill="#FFFFFF"
p-id="33803"
/><path
d="M460.8 409.6h73.955556c12.515556 0 22.755556 10.24 22.755555 22.755556v73.955555c0 12.515556-10.24 22.755556-22.755555 22.755556H460.8c-12.515556 0-22.755556-10.24-22.755556-22.755556V432.355556c0-12.515556 10.24-22.755556 22.755556-22.755556z"
fill="#4598F0"
p-id="33804"
/><path
d="M498.346667 454.542222c1.137778 0 2.275556-1.137778 2.275555-2.275555v-3.413334c0-1.137778-1.137778-2.275556-2.275555-2.275555s-2.275556 1.137778-2.275556 2.275555v3.413334c0 1.137778 1.137778 2.275556 2.275556 2.275555M498.346667 465.351111c1.137778 0 2.275556-1.137778 2.275555-2.275555v-3.413334c0-1.137778-1.137778-2.275556-2.275555-2.275555s-2.275556 1.137778-2.275556 2.275555v3.413334c0 1.137778 1.137778 2.275556 2.275556 2.275555M498.346667 476.728889c1.137778 0 2.275556-1.137778 2.275555-2.275556v-3.413333c0-1.137778-1.137778-2.275556-2.275555-2.275556s-2.275556 1.137778-2.275556 2.275556v3.413333c0 1.137778 1.137778 2.275556 2.275556 2.275556M508.017778 485.831111H489.244444c-1.137778 0-2.275556-1.137778-2.275555-2.275555s1.137778-2.275556 2.275555-2.275556h18.773334c1.137778 0 2.275556 1.137778 2.275555 2.275556s-1.137778 2.275556-2.275555 2.275555"
fill="#FFFFFF"
p-id="33805"
/><path
d="M510.293333 483.555556v2.844444c0 3.413333-1.137778 6.257778-3.413333 7.964444-2.275556 2.275556-5.12 3.413333-8.533333 3.413334-6.826667 0-11.946667-5.12-11.946667-11.377778V483.555556v-0.568889c0.568889-0.568889 1.137778-1.137778 2.275556-1.137778s2.275556 1.137778 2.275555 2.275555v2.844445c0 3.982222 3.413333 6.826667 7.395556 6.826667 3.982222 0 7.395556-2.844444 7.395555-6.826667V483.555556c0-1.137778 1.137778-2.275556 2.275556-2.275556s1.706667 0.568889 2.275555 1.137778v1.137778c0-0.568889 0-0.568889 0 0"
fill="#FFFFFF"
p-id="33806"
/><path
d="M490.951111 483.555556c0 1.137778-1.137778 2.275556-2.275555 2.275555s-2.275556-1.137778-2.275556-2.275555v-0.568889c0.568889-0.568889 1.137778-1.137778 2.275556-1.137778 1.706667-1.137778 2.275556 0 2.275555 1.706667"
fill="#FFFFFF"
p-id="33807"
/><path
d="M510.293333 483.555556c0 1.137778-1.137778 2.275556-2.275555 2.275555s-2.275556-1.137778-2.275556-2.275555 1.137778-2.275556 2.275556-2.275556 1.706667 0.568889 2.275555 1.137778v1.137778c0-0.568889 0-0.568889 0 0"
fill="#FFFFFF"
p-id="33808"
/><path
d="M341.333333 136.533333h73.955556c12.515556 0 22.755556 10.24 22.755555 22.755556v73.955555c0 12.515556-10.24 22.755556-22.755555 22.755556H341.333333c-12.515556 0-22.755556-10.24-22.755555-22.755556V159.288889c0-12.515556 10.24-22.755556 22.755555-22.755556z"
fill="#4598F0"
p-id="33809"
/><path
d="M349.866667 197.404444c0-1.137778 1.137778-2.275556 2.275555-2.275555h49.493334c1.137778 0 2.275556 1.137778 2.275555 2.275555s-1.137778 2.275556-2.275555 2.275556h-49.493334c-1.137778 0-2.275556-1.137778-2.275555-2.275556"
fill="#FFFFFF"
p-id="33810"
/><path
d="M349.866667 219.022222c0-1.137778 1.137778-2.275556 2.275555-2.275555h49.493334c1.137778 0 2.275556 1.137778 2.275555 2.275555s-1.137778 2.275556-2.275555 2.275556h-49.493334c-1.137778 0-2.275556-1.137778-2.275555-2.275556"
fill="#FFFFFF"
p-id="33811"
/><path
d="M387.413333 175.786667c0-1.137778-1.137778-2.275556-2.275555-2.275556h-22.755556c-1.137778 0-2.275556 1.137778-2.275555 2.275556s1.137778 2.275556 2.275555 2.275555h22.755556c1.137778 0.568889 2.275556-0.568889 2.275555-2.275555M354.986667 175.786667c0-1.137778-1.137778-2.275556-2.275556-2.275556s-2.275556 1.137778-2.275555 2.275556 1.137778 2.275556 2.275555 2.275555c1.137778 0.568889 2.275556-0.568889 2.275556-2.275555"
fill="#FFFFFF"
p-id="33812"
/><path
d="M153.6 676.977778h73.955556c12.515556 0 22.755556 10.24 22.755555 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755555 22.755555H153.6c-12.515556 0-22.755556-10.24-22.755556-22.755555v-73.955556c0-12.515556 10.24-22.755556 22.755556-22.755555z"
fill="#4598F0"
p-id="33813"
/><path
d="M178.062222 714.524444v50.062223c0 1.137778-1.137778 2.275556-2.275555 2.275555s-2.275556-1.137778-2.275556-2.275555v-50.062223c0-1.137778 1.137778-2.275556 2.275556-2.275555 0.568889 0 1.137778 0 1.706666 0.568889 0.568889 0 0.568889 0.568889 0.568889 1.706666"
fill="#FFFFFF"
p-id="33814"
/><path
d="M209.351111 729.884444c0 9.671111-7.964444 18.204444-18.204444 18.204445-3.413333 0-6.257778-1.137778-9.102223-2.275556-0.568889-0.568889-1.137778-1.137778-1.137777-1.706666 0-1.137778 1.137778-2.275556 2.275555-2.275556h1.137778c1.706667 1.137778 4.551111 1.706667 6.826667 1.706667 7.395556 0 13.084444-5.688889 13.084444-13.084445 0-7.395556-5.688889-13.084444-13.084444-13.084444s-13.084444 5.688889-13.084445 13.084444v11.946667c-2.844444-3.413333-4.551111-7.395556-4.551111-11.946667s1.706667-9.102222 4.551111-11.946666c3.413333-3.413333 7.964444-5.688889 13.084445-5.688889 10.24-1.137778 18.204444 7.395556 18.204444 17.066666"
fill="#FFFFFF"
p-id="33815"
/><path
d="M185.457778 743.537778c0 1.137778-1.137778 2.275556-2.275556 2.275555H182.044444c-0.568889-0.568889-1.137778-1.137778-1.137777-1.706666 0-1.137778 1.137778-2.275556 2.275555-2.275556h1.137778c1.137778 0 1.137778 0.568889 1.137778 1.706667"
fill="#FFFFFF"
p-id="33816"
/><path
d="M238.933333 375.466667h73.955556c12.515556 0 22.755556 10.24 22.755555 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755555 22.755555H238.933333c-12.515556 0-22.755556-10.24-22.755555-22.755555V398.222222c0-12.515556 10.24-22.755556 22.755555-22.755555z"
fill="#4598F0"
p-id="33817"
/><path
d="M250.311111 417.564444c-1.137778 0.568889-2.844444-0.568889-2.844444-1.706666-0.568889-1.137778 0.568889-2.275556 1.706666-2.844445 1.137778-0.568889 2.844444 0.568889 2.844445 1.706667 0.568889 1.706667-0.568889 2.844444-1.706667 2.844444"
fill="#FFFFFF"
p-id="33818"
/><path
d="M286.72 463.644444c1.137778 0.568889 2.844444-0.568889 2.844444-1.706666l12.515556-45.511111c0.568889-1.137778-0.568889-2.275556-1.706667-2.844445-1.137778-0.568889-2.844444 0.568889-2.844444 1.706667l-12.515556 45.511111c-0.568889 1.137778 0.568889 2.275556 1.706667 2.844444"
fill="#FFFFFF"
p-id="33819"
/><path
d="M261.688889 463.644444c1.137778 0.568889 2.844444-0.568889 2.844444-1.706666l12.515556-45.511111c0.568889-1.137778-0.568889-2.275556-1.706667-2.844445-1.137778-0.568889-2.844444 0.568889-2.844444 1.706667l-12.515556 45.511111c-0.568889 1.137778 0.568889 2.275556 1.706667 2.844444"
fill="#FFFFFF"
p-id="33820"
/><path
d="M287.857778 463.644444c-1.137778 0.568889-2.844444-0.568889-2.844445-1.706666l-12.515555-45.511111c-0.568889-1.137778 0.568889-2.275556 1.706666-2.844445 1.137778-0.568889 2.844444 0.568889 2.844445 1.706667l12.515555 45.511111c0.568889 1.137778-0.568889 2.275556-1.706666 2.844444"
fill="#FFFFFF"
p-id="33821"
/><path
d="M262.826667 463.644444c-1.137778 0.568889-2.844444-0.568889-2.844445-1.706666l-10.24-36.977778c-0.568889-1.137778 0.568889-2.275556 1.706667-2.844444 1.137778-0.568889 2.844444 0.568889 2.844444 1.706666l10.24 36.977778c0.568889 1.137778-0.568889 2.275556-1.706666 2.844444"
fill="#FFFFFF"
p-id="33822"
/><path
d="M739.555556 45.511111h73.955555c12.515556 0 22.755556 10.24 22.755556 22.755556v73.955555c0 12.515556-10.24 22.755556-22.755556 22.755556h-73.955555c-12.515556 0-22.755556-10.24-22.755556-22.755556V68.266667c0-12.515556 10.24-22.755556 22.755556-22.755556z"
fill="#4598F0"
p-id="33823"
/><path
d="M754.346667 131.413333c-1.137778-1.137778-1.137778-2.275556 0-3.413333l44.373333-43.804444c1.137778-1.137778 2.275556-1.137778 3.413333 0 1.137778 1.137778 1.137778 2.275556 0 3.413333l-44.373333 43.804444c-0.568889 0.568889-2.275556 0.568889-3.413333 0"
fill="#FFFFFF"
p-id="33824"
/><path
d="M802.133333 131.413333c1.137778-1.137778 1.137778-2.275556 0-3.413333l-22.186666-22.186667c-1.137778-1.137778-2.275556-1.137778-3.413334 0-1.137778 1.137778-1.137778 2.275556 0 3.413334l22.186667 22.186666c1.137778 0.568889 2.844444 0.568889 3.413333 0"
fill="#FFFFFF"
p-id="33825"
/><path
d="M774.257778 102.968889c1.137778-1.137778 1.137778-2.275556 0-3.413333l-15.928889-15.928889c-1.137778-1.137778-2.275556-1.137778-3.413333 0-1.137778 1.137778-1.137778 2.275556 0 3.413333l15.928888 15.928889c0.568889 1.137778 2.275556 1.137778 3.413334 0"
fill="#FFFFFF"
p-id="33826"
/><path
d="M927.288889 375.466667h73.955555c12.515556 0 22.755556 10.24 22.755556 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755556 22.755555h-73.955555c-12.515556 0-22.755556-10.24-22.755556-22.755555V398.222222c0-12.515556 10.24-22.755556 22.755556-22.755555z"
fill="#4598F0"
p-id="33827"
/><path
d="M943.786667 409.6h41.528889c1.137778 0 2.275556 1.137778 2.275555 2.275556s-1.137778 2.275556-2.275555 2.275555h-41.528889c-1.137778 0-2.275556-1.137778-2.275556-2.275555s1.137778-2.275556 2.275556-2.275556z"
fill="#FFFFFF"
fill-opacity=".8"
p-id="33828"
/><path
d="M946.631111 411.875556v49.493333c0 1.137778-1.137778 2.275556-2.275555 2.275555s-2.275556-1.137778-2.275556-2.275555v-49.493333c0-1.137778 1.137778-2.275556 2.275556-2.275556s2.275556 1.137778 2.275555 2.275556zM987.591111 411.875556v34.133333c0 1.137778-1.137778 2.275556-2.275555 2.275555s-2.275556-1.137778-2.275556-2.275555v-34.133333c0-1.137778 1.137778-2.275556 2.275556-2.275556s2.275556 1.137778 2.275555 2.275556z"
fill="#FFFFFF"
fill-opacity=".8"
p-id="33829"
/><path
d="M943.786667 459.662222h15.36c1.137778 0 2.275556 1.137778 2.275555 2.275556s-1.137778 2.275556-2.275555 2.275555h-15.36c-1.137778 0-2.275556-1.137778-2.275556-2.275555s1.137778-2.275556 2.275556-2.275556zM955.733333 421.546667h15.36c1.137778 0 2.275556 1.137778 2.275556 2.275555s-1.137778 2.275556-2.275556 2.275556H955.733333c-1.137778 0-2.275556-1.137778-2.275555-2.275556s1.137778-2.275556 2.275555-2.275555zM955.733333 431.786667h15.36c1.137778 0 2.275556 1.137778 2.275556 2.275555s-1.137778 2.275556-2.275556 2.275556H955.733333c-1.137778 0-2.275556-1.137778-2.275555-2.275556s1.137778-2.275556 2.275555-2.275555zM968.248889 460.231111l15.36-15.36c1.137778-1.137778 2.275556-1.137778 3.413333 0 1.137778 1.137778 1.137778 2.275556 0 3.413333l-15.36 15.36c-1.137778 1.137778-2.275556 1.137778-3.413333 0-1.137778-1.137778-1.137778-2.844444 0-3.413333z"
fill="#FFFFFF"
fill-opacity=".8"
p-id="33830"
/><path
d="M969.386667 443.733333h15.36c1.137778 0 2.275556 1.137778 2.275555 2.275556s-1.137778 2.275556-2.275555 2.275555h-15.36c-1.137778 0-2.275556-1.137778-2.275556-2.275555s1.137778-2.275556 2.275556-2.275556z"
fill="#FFFFFF"
fill-opacity=".8"
p-id="33831"
/><path
d="M972.231111 446.008889v15.36c0 1.137778-1.137778 2.275556-2.275555 2.275555s-2.275556-1.137778-2.275556-2.275555v-15.36c0-1.137778 1.137778-2.275556 2.275556-2.275556s2.275556 1.137778 2.275555 2.275556z"
fill="#FFFFFF"
fill-opacity=".8"
p-id="33832"
/><path
d="M585.955556 216.177778h73.955555c12.515556 0 22.755556 10.24 22.755556 22.755555v73.955556c0 12.515556-10.24 22.755556-22.755556 22.755555h-73.955555c-12.515556 0-22.755556-10.24-22.755556-22.755555V238.933333c0-12.515556 10.24-22.755556 22.755556-22.755555z"
fill="#4598F0"
p-id="33833"
/><path
d="M593.351111 267.946667V284.444444c0 4.551111 3.982222 8.533333 8.533333 8.533334h26.737778c4.551111 0 8.533333-3.982222 8.533334-8.533334v-16.497777c0-4.551111-3.982222-8.533333-8.533334-8.533334h-26.737778c-4.551111 0-8.533333 3.982222-8.533333 8.533334zM652.515556 267.946667c2.275556-1.706667 4.551111-1.137778 4.551111 2.275555v11.946667c0 3.413333-2.275556 3.982222-4.551111 2.275555l-6.257778-4.551111c-2.275556-1.706667-2.275556-5.12 0-6.826666l6.257778-5.12z"
fill="#4598F0"
p-id="33834"
/></svg
>

After

Width:  |  Height:  |  Size: 21 KiB

Some files were not shown because too many files have changed in this diff Show More