Compare commits
213 Commits
r0.8.1
...
llama3.2_s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e6fde1456d | ||
|
|
954a22051b | ||
|
|
6f4b00f829 | ||
|
|
3fb60608b3 | ||
|
|
c35fe0b429 | ||
|
|
28f5e4a268 | ||
|
|
d55a33dda1 | ||
|
|
daf2a4fad7 | ||
|
|
3ce395582b | ||
|
|
7eaab93d0b | ||
|
|
bc817700b9 | ||
|
|
bd811bd622 | ||
|
|
05f9828e77 | ||
|
|
6c364487d3 | ||
|
|
21e215c5d5 | ||
|
|
a09395e4a4 | ||
|
|
f04f061f8c | ||
|
|
872e93e4bd | ||
|
|
2f03a3a894 | ||
|
|
372d78c2ac | ||
|
|
933c3d3445 | ||
|
|
88829c9381 | ||
|
|
d85ec0947c | ||
|
|
dc94026d98 | ||
|
|
1e130314d9 | ||
|
|
b205dc7571 | ||
|
|
3b70fb0d42 | ||
|
|
412a0b00c3 | ||
|
|
96d5cd9127 | ||
|
|
0bb0abb0d3 | ||
|
|
e0b3b579a3 | ||
|
|
e5affb93ab | ||
|
|
e2a74f7463 | ||
|
|
79e947e44f | ||
|
|
bceacdc804 | ||
|
|
375ea7a90c | ||
|
|
06117077b0 | ||
|
|
b6cce35a93 | ||
|
|
06696c8e58 | ||
|
|
bc4bbfa849 | ||
|
|
edcc50fe97 | ||
|
|
2d28bebac6 | ||
|
|
b84c98983d | ||
|
|
ba17031198 | ||
|
|
f990f7966e | ||
|
|
87e51d5c36 | ||
|
|
e1b8ce053b | ||
|
|
558ea3bb7f | ||
|
|
4112fd0b69 | ||
|
|
e1022911b6 | ||
|
|
3f2e7b73ac | ||
|
|
264759d85a | ||
|
|
d42292967c | ||
|
|
a3fa0d6469 | ||
|
|
5ab27b63ff | ||
|
|
62dbb6daee | ||
|
|
10fe3c6066 | ||
|
|
262a6f6055 | ||
|
|
e48532e750 | ||
|
|
be8e283f6b | ||
|
|
7b2194f71c | ||
|
|
2dd69dcf73 | ||
|
|
1a934afb3a | ||
|
|
5c67204734 | ||
|
|
d2bab99835 | ||
|
|
d97882ec8e | ||
|
|
63406dc050 | ||
|
|
ff6f841ec0 | ||
|
|
88fde629ad | ||
|
|
1144fae248 | ||
|
|
504228eea2 | ||
|
|
d73129cbf0 | ||
|
|
ba94e0130d | ||
|
|
aebc23f5ae | ||
|
|
36fb9a987d | ||
|
|
0869029ef2 | ||
|
|
fa12083e35 | ||
|
|
a2745b22a7 | ||
|
|
def19b449e | ||
|
|
ebe6b473e9 | ||
|
|
0629696333 | ||
|
|
4bd7841f17 | ||
|
|
1d1e1f90b6 | ||
|
|
67394b88fa | ||
|
|
947936ed7b | ||
|
|
4b0bc263ab | ||
|
|
758d236463 | ||
|
|
ac3486038c | ||
|
|
e0bc5f2a4d | ||
|
|
71857f50c5 | ||
|
|
6b617d6743 | ||
|
|
43b2ae59a1 | ||
|
|
6730b242cc | ||
|
|
4a51874e4d | ||
|
|
995a62c9d9 | ||
|
|
9cf1d88b6d | ||
|
|
a8244c40ea | ||
|
|
55d287dfcf | ||
|
|
3563f5db6b | ||
|
|
8c40204eda | ||
|
|
afc3341156 | ||
|
|
e5ec38c796 | ||
|
|
4c78f8cbbb | ||
|
|
adb157f2e7 | ||
|
|
07baa8f922 | ||
|
|
7f897979c4 | ||
|
|
22d066a8d7 | ||
|
|
c6d811ab11 | ||
|
|
efa4a5aaa4 | ||
|
|
2ef83fc67b | ||
|
|
fbaa0243ee | ||
|
|
675ea4a383 | ||
|
|
2a6af6491a | ||
|
|
f5f1e323bb | ||
|
|
d487093d10 | ||
|
|
e6f5d13ecc | ||
|
|
dba908aa22 | ||
|
|
814164dc4f | ||
|
|
cc84847082 | ||
|
|
f4f4da2dca | ||
|
|
1e47444559 | ||
|
|
2a2ff45e2b | ||
|
|
6a679ba80f | ||
|
|
84a781a1a3 | ||
|
|
32afb6501c | ||
|
|
035f39f0d9 | ||
|
|
6f3e54a22a | ||
|
|
1874dfd148 | ||
|
|
7a0fca73e6 | ||
|
|
beda609b4b | ||
|
|
993688ac91 | ||
|
|
5fde666c43 | ||
|
|
4133757642 | ||
|
|
10c81f1c57 | ||
|
|
dad8eb4b82 | ||
|
|
af21e94a29 | ||
|
|
f78aa9ee2f | ||
|
|
c25063f4bb | ||
|
|
4f3be23efa | ||
|
|
9657f7bc83 | ||
|
|
ac324a9ec2 | ||
|
|
dfaf47978d | ||
|
|
acbaaf8ff0 | ||
|
|
06cb308611 | ||
|
|
e6b4fff05c | ||
|
|
a54ffd2c1e | ||
|
|
f3ffcd50b3 | ||
|
|
947cbe39b8 | ||
|
|
fbb81b67db | ||
|
|
5d39506c5c | ||
|
|
566cf93c34 | ||
|
|
771975510a | ||
|
|
6674832162 | ||
|
|
67df2804de | ||
|
|
46af6f3bc4 | ||
|
|
343d614591 | ||
|
|
87617e761c | ||
|
|
db2d2bd1a1 | ||
|
|
4fa37e7842 | ||
|
|
c73e4e0f26 | ||
|
|
ba78b4c994 | ||
|
|
01c1b7504f | ||
|
|
c016d8264a | ||
|
|
4fd3517f23 | ||
|
|
503a1a9844 | ||
|
|
08f57fa54a | ||
|
|
5a9c109e35 | ||
|
|
c327972776 | ||
|
|
f45e4c6956 | ||
|
|
5dcadf3d3f | ||
|
|
3363a37197 | ||
|
|
b2771ad3f2 | ||
|
|
e81e0e557c | ||
|
|
71363a6b9d | ||
|
|
a39f23a16e | ||
|
|
c9f9acab61 | ||
|
|
040d2b7fd9 | ||
|
|
6296e9f2fb | ||
|
|
c86cf8536d | ||
|
|
039014fbbf | ||
|
|
1c07a38457 | ||
|
|
e93146b33e | ||
|
|
a6385bc6fd | ||
|
|
c26d0f62b8 | ||
|
|
e71aba0080 | ||
|
|
cfcac3f0ec | ||
|
|
d68be058f5 | ||
|
|
45cf553d36 | ||
|
|
1c23d87aa2 | ||
|
|
64bfea9054 | ||
|
|
0a6bad0ab9 | ||
|
|
4f7fc39d66 | ||
|
|
80e3e2a2d3 | ||
|
|
8c384e0314 | ||
|
|
3c9e2aaffd | ||
|
|
acdd712929 | ||
|
|
c297155bea | ||
|
|
923cf69e63 | ||
|
|
7a67298f19 | ||
|
|
a5ed2233b5 | ||
|
|
e12baca3b8 | ||
|
|
939502dba1 | ||
|
|
a072441c06 | ||
|
|
ed483719a8 | ||
|
|
14621f8492 | ||
|
|
2390920b1d | ||
|
|
02a15366bc | ||
|
|
f08d4115db | ||
|
|
5ac77f78da | ||
|
|
ebc165a6aa | ||
|
|
ad8ca8886e | ||
|
|
88eeb0d7e6 | ||
|
|
e22d41362d |
10
.github/CODEOWNERS
vendored
Normal file → Executable file
10
.github/CODEOWNERS
vendored
Normal file → Executable file
@@ -1,9 +1,13 @@
|
||||
/AgentQnA/ xuhui.ren@intel.com
|
||||
/AudioQnA/ sihan.chen@intel.com
|
||||
/ChatQnA/ liang1.lv@intel.com
|
||||
/CodeGen/ liang1.lv@intel.com
|
||||
/CodeTrans/ sihan.chen@intel.com
|
||||
/DocSum/ sihan.chen@intel.com
|
||||
/FaqGen/ letong.han@intel.com
|
||||
/SearchQnA/ letong.han@intel.com
|
||||
/DocSum/ letong.han@intel.com
|
||||
/DocIndexRetriever/ xuhui.ren@intel.com chendi.xue@intel.com
|
||||
/FaqGen/ xinyao.wang@intel.com
|
||||
/SearchQnA/ sihan.chen@intel.com
|
||||
/Translation/ liang1.lv@intel.com
|
||||
/VisualQnA/ liang1.lv@intel.com
|
||||
/ProductivitySuite/ hoong.tee.yeoh@intel.com
|
||||
/*/ liang1.lv@intel.com
|
||||
|
||||
128
.github/ISSUE_TEMPLATE/1_bug_template.yml
vendored
Normal file
128
.github/ISSUE_TEMPLATE/1_bug_template.yml
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Report Bug
|
||||
description: Used to report bug
|
||||
title: "[Bug]"
|
||||
body:
|
||||
- type: dropdown
|
||||
id: priority
|
||||
attributes:
|
||||
label: Priority
|
||||
options:
|
||||
- Undecided
|
||||
- P1-Stopper
|
||||
- P2-High
|
||||
- P3-Medium
|
||||
- P4-Low
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS type
|
||||
options:
|
||||
- Ubuntu
|
||||
- RedHat
|
||||
- SUSE
|
||||
- Windows
|
||||
- Mac
|
||||
- BSD
|
||||
- Other (Please let us know in description)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: hardware
|
||||
attributes:
|
||||
label: Hardware type
|
||||
options:
|
||||
- Xeon-GNR
|
||||
- Xeon-EMR
|
||||
- Xeon-SPR
|
||||
- Xeon-ICX
|
||||
- Xeon-other (Please let us know in description)
|
||||
- Gaudi3
|
||||
- Gaudi2
|
||||
- AI-PC (Please let us know in description)
|
||||
- CPU-other (Please let us know in description)
|
||||
- GPU-PVC
|
||||
- GPU-Flex
|
||||
- GPU-Arc
|
||||
- GPU-Arc-MTL
|
||||
- GPU-Nvidia
|
||||
- GPU-AMD
|
||||
- GPU-other (Please let us know in description)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
id: install
|
||||
attributes:
|
||||
label: Installation method
|
||||
options:
|
||||
- label: Pull docker images from hub.docker.com
|
||||
- label: Build docker images from source
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
id: deploy
|
||||
attributes:
|
||||
label: Deploy method
|
||||
options:
|
||||
- label: Docker compose
|
||||
- label: Docker
|
||||
- label: Kubernetes
|
||||
- label: Helm
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: node
|
||||
attributes:
|
||||
label: Running nodes
|
||||
options:
|
||||
- Single Node
|
||||
- Multiple Nodes
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: What's the version?
|
||||
description: Docker ID in hub.docker.com or commit ID of Dockerfile.
|
||||
placeholder:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Description
|
||||
description: What is the problem, question, or error?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Reproduce steps
|
||||
description:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Raw log
|
||||
description: Provide the log of RESTFul API or docker for micro-service by `docker logs CONTAINER`
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
79
.github/ISSUE_TEMPLATE/2_feaure_template.yml
vendored
Normal file
79
.github/ISSUE_TEMPLATE/2_feaure_template.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Report Feature
|
||||
description: Used to report feature
|
||||
title: "[Feature]"
|
||||
body:
|
||||
- type: dropdown
|
||||
id: priority
|
||||
attributes:
|
||||
label: Priority
|
||||
options:
|
||||
- Undecided
|
||||
- P1-Stopper
|
||||
- P2-High
|
||||
- P3-Medium
|
||||
- P4-Low
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS type
|
||||
options:
|
||||
- Ubuntu
|
||||
- RedHat
|
||||
- SUSE
|
||||
- Windows
|
||||
- Mac
|
||||
- BSD
|
||||
- Other (Please let us know in description)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: hardware
|
||||
attributes:
|
||||
label: Hardware type
|
||||
options:
|
||||
- Xeon-GNR
|
||||
- Xeon-EMR
|
||||
- Xeon-SPR
|
||||
- Xeon-ICX
|
||||
- Xeon-other (Please let us know in description)
|
||||
- Gaudi3
|
||||
- Gaudi2
|
||||
- AI-PC (Please let us know in description)
|
||||
- CPU-other (Please let us know in description)
|
||||
- GPU-PVC
|
||||
- GPU-Flex
|
||||
- GPU-Arc
|
||||
- GPU-Arc-MTL
|
||||
- GPU-Nvidia
|
||||
- GPU-AMD
|
||||
- GPU-other (Please let us know in description)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: node
|
||||
attributes:
|
||||
label: Running nodes
|
||||
options:
|
||||
- Single Node
|
||||
- Multiple Nodes
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe your problem and expectation, avoid to ask solution (we would provide better solution).
|
||||
placeholder: As cloud/AI PC user, I want xxxx, so that I/customer can xxx.
|
||||
validations:
|
||||
required: true
|
||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
4
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
blank_issues_enabled: true
|
||||
50
.github/workflows/VisualQnA.yml
vendored
50
.github/workflows/VisualQnA.yml
vendored
@@ -1,50 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: VisualQnA-test
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- VisualQnA/**
|
||||
- "!**.md"
|
||||
- "!**/ui/**"
|
||||
- .github/workflows/VisualQnA.yml
|
||||
workflow_dispatch:
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
VisualQnA:
|
||||
runs-on: aise-cluster
|
||||
strategy:
|
||||
matrix:
|
||||
job_name: ["basic"]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
|
||||
- name: Run Test
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
run: |
|
||||
cd ${{ github.workspace }}/VisualQnA/tests
|
||||
bash test_${{ matrix.job_name }}_inference.sh
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.job_name }}
|
||||
path: ${{ github.workspace }}/VisualQnA/tests/*.log
|
||||
121
.github/workflows/_example-workflow.yml
vendored
Normal file
121
.github/workflows/_example-workflow.yml
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Example jobs
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
node:
|
||||
required: true
|
||||
type: string
|
||||
example:
|
||||
required: true
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
required: false
|
||||
type: string
|
||||
build:
|
||||
default: true
|
||||
required: false
|
||||
type: boolean
|
||||
test_compose:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
test_k8s:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
test_gmc:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
jobs:
|
||||
####################################################################################################
|
||||
# Image Build
|
||||
####################################################################################################
|
||||
build-images:
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Get Checkout Ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Checkout out GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Clone Required Repo
|
||||
run: |
|
||||
cd ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
|
||||
docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
|
||||
if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then
|
||||
git clone https://github.com/vllm-project/vllm.git
|
||||
cd vllm && git rev-parse HEAD && cd ../
|
||||
fi
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps && git checkout ${{ inputs.opea_branch }} && git rev-parse HEAD && cd ../
|
||||
|
||||
- name: Build Image
|
||||
if: ${{ fromJSON(inputs.build) }}
|
||||
uses: opea-project/validation/actions/image-build@main
|
||||
with:
|
||||
work_dir: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
|
||||
docker_compose_path: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
|
||||
registry: ${OPEA_IMAGE_REPO}opea
|
||||
tag: ${{ inputs.tag }}
|
||||
|
||||
####################################################################################################
|
||||
# Docker Compose Test
|
||||
####################################################################################################
|
||||
test-example-compose:
|
||||
needs: [build-images]
|
||||
if: ${{ fromJSON(inputs.test_compose) }}
|
||||
uses: ./.github/workflows/_run-docker-compose.yml
|
||||
with:
|
||||
tag: ${{ inputs.tag }}
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.node }}
|
||||
secrets: inherit
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# K8S Test
|
||||
####################################################################################################
|
||||
test-k8s-manifest:
|
||||
needs: [build-images]
|
||||
if: ${{ fromJSON(inputs.test_k8s) }}
|
||||
uses: ./.github/workflows/_manifest-e2e.yml
|
||||
with:
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.node }}
|
||||
tag: ${{ inputs.tag }}
|
||||
context: "CD"
|
||||
secrets: inherit
|
||||
|
||||
####################################################################################################
|
||||
# GMC Test
|
||||
####################################################################################################
|
||||
test-gmc-pipeline:
|
||||
needs: [build-images]
|
||||
if: ${{ fromJSON(inputs.test_gmc) }}
|
||||
uses: ./.github/workflows/_gmc-e2e.yml
|
||||
with:
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.node }}
|
||||
secrets: inherit
|
||||
54
.github/workflows/_get-image-list.yml
vendored
Normal file
54
.github/workflows/_get-image-list.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Get Image List
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
examples:
|
||||
default: ""
|
||||
required: false
|
||||
type: string
|
||||
images:
|
||||
default: ""
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
matrix:
|
||||
description: "Image List"
|
||||
value: ${{ jobs.get-image-list.outputs.matrix }}
|
||||
|
||||
jobs:
|
||||
get-image-list:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.get-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
image_list=[]
|
||||
if [[ ! -z "${{ inputs.examples }}" ]]; then
|
||||
pip install yq
|
||||
examples=($(echo ${{ inputs.examples }} | tr ',' ' '))
|
||||
for example in ${examples[@]}
|
||||
do
|
||||
images=$(cat ${{ github.workspace }}/${example}/docker_image_build/build.yaml | yq -r '.[]' | jq 'keys' | jq -c '.')
|
||||
image_list=$(echo ${image_list} | jq -s '.[0] + .[1] | unique' - <(echo ${images}))
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ ! -z "${{ inputs.images }}" ]]; then
|
||||
images=($(echo ${{ inputs.images }} | tr ',' ' '))
|
||||
input_image_list=$(printf '%s\n' "${images[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
image_list=$(echo ${image_list} | jq -s '.[0] + .[1] | unique' - <(echo ${input_image_list}))
|
||||
fi
|
||||
|
||||
echo "print image list..."
|
||||
echo "$image_list" | jq . | jq -r '.[]'
|
||||
echo "end of image list..."
|
||||
echo "matrix=$(echo ${image_list} | jq -c '.')" >> $GITHUB_OUTPUT
|
||||
@@ -11,14 +11,10 @@ on:
|
||||
required: false
|
||||
type: string
|
||||
default: '.github|README.md|*.txt'
|
||||
xeon_server_label:
|
||||
test_mode:
|
||||
required: false
|
||||
type: string
|
||||
default: 'xeon'
|
||||
gaudi_server_label:
|
||||
required: false
|
||||
type: string
|
||||
default: 'gaudi'
|
||||
default: 'docker_compose'
|
||||
outputs:
|
||||
run_matrix:
|
||||
description: "The matrix string"
|
||||
@@ -49,29 +45,19 @@ jobs:
|
||||
- name: Get test matrix
|
||||
id: get-test-matrix
|
||||
run: |
|
||||
set -xe
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
base_commit=${{ github.event.pull_request.base.sha }}
|
||||
LATEST_COMMIT_SHA=$(curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
"https://api.github.com/repos/opea-project/GenAIExamples/commits?sha=${{ github.event.pull_request.base.ref }}" | jq -r '.[0].sha')
|
||||
echo "Latest commit SHA is $LATEST_COMMIT_SHA"
|
||||
base_commit=$LATEST_COMMIT_SHA
|
||||
else
|
||||
base_commit=$(git rev-parse HEAD~1) # push event
|
||||
fi
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-only ${base_commit} ${merged_commit} | \
|
||||
grep -vE '${{ inputs.diff_excluded_files }}')" || true
|
||||
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
|
||||
run_matrix="{\"include\":["
|
||||
for example in ${examples}; do
|
||||
run_hardware=""
|
||||
if [ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep -c gaudi) != 0 ]; then run_hardware="gaudi"; fi
|
||||
if [ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep -c xeon) != 0 ]; then run_hardware="xeon ${run_hardware}"; fi
|
||||
if [ "$run_hardware" == "" ]; then run_hardware="gaudi"; fi
|
||||
for hw in ${run_hardware}; do
|
||||
if [ "$hw" == "gaudi" ] && [ "${{ inputs.gaudi_server_label }}" != "" ]; then
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${{ inputs.gaudi_server_label }}\"},"
|
||||
elif [ "${{ inputs.xeon_server_label }}" != "" ]; then
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${{ inputs.xeon_server_label }}\"},"
|
||||
fi
|
||||
done
|
||||
done
|
||||
run_matrix=$run_matrix"]}"
|
||||
echo "run_matrix=${run_matrix}" >> $GITHUB_OUTPUT
|
||||
echo "changed_files=$changed_files"
|
||||
export changed_files=$changed_files
|
||||
export test_mode=${{ inputs.test_mode }}
|
||||
export WORKSPACE=${{ github.workspace }}
|
||||
bash .github/workflows/scripts/get_test_matrix.sh
|
||||
@@ -1,57 +1,51 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with GMC
|
||||
# This workflow will only test GMC pipeline and will not install GMC any more
|
||||
name: Single GMC E2e Test For CD Workflow Call
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/**"
|
||||
- "**/tests/test_gmc**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- "!**/kubernetes/manifests/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
example:
|
||||
default: "ChatQnA"
|
||||
description: "The example to test on K8s"
|
||||
required: true
|
||||
type: string
|
||||
hardware:
|
||||
default: "xeon"
|
||||
description: "Nodes to run the test, xeon or gaudi"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|deprecated|docker|assets|*.md|*.txt'
|
||||
xeon_server_label: 'xeon'
|
||||
gaudi_server_label: 'gaudi'
|
||||
|
||||
gmc-test:
|
||||
needs: [job1]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
runs-on: "k8s-${{ matrix.hardware }}"
|
||||
runs-on: "k8s-${{ inputs.hardware }}"
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: E2e test gmc
|
||||
run: |
|
||||
echo "Matrix - gmc: ${{ matrix.example }}"
|
||||
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Get checkout ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "checkout ref ${{ env.CHECKOUT_REF }}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
if [ ${{ matrix.hardware }} == "gaudi" ]; then IMAGE_REPO=${{ vars.IMAGE_REPO_GAUDI }}; else IMAGE_REPO=${{ vars.IMAGE_REPO_XEON }}; fi
|
||||
echo "IMAGE_REPO=$OPEA_IMAGE_REPO" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ matrix.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "APP_NAMESPACE=$lower_example-$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ inputs.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "APP_NAMESPACE=$lower_example-$(tr -dc a-z0-9 </dev/urandom | head -c 16)" >> $GITHUB_ENV
|
||||
echo "ROLLOUT_TIMEOUT_SECONDS=1800s" >> $GITHUB_ENV
|
||||
echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV
|
||||
echo "continue_test=true" >> $GITHUB_ENV
|
||||
@@ -65,16 +59,16 @@ jobs:
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
run: |
|
||||
if [[ ! -f ${{ github.workspace }}/${{ matrix.example }}/tests/test_gmc_on_${{ matrix.hardware }}.sh ]]; then
|
||||
if [[ ! -f ${{ github.workspace }}/${{ inputs.example }}/tests/test_gmc_on_${{ inputs.hardware }}.sh ]]; then
|
||||
echo "No test script found, exist test!"
|
||||
exit 0
|
||||
else
|
||||
echo "should_cleanup=true" >> $GITHUB_ENV
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_gmc_on_${{ matrix.hardware }}.sh install_${{ matrix.example }}
|
||||
echo "Testing ${{ matrix.example }}, waiting for pod ready..."
|
||||
${{ github.workspace }}/${{ inputs.example }}/tests/test_gmc_on_${{ inputs.hardware }}.sh install_${{ inputs.example }}
|
||||
echo "Testing ${{ inputs.example }}, waiting for pod ready..."
|
||||
if kubectl rollout status deployment --namespace "$APP_NAMESPACE" --timeout "$ROLLOUT_TIMEOUT_SECONDS"; then
|
||||
echo "Testing gmc ${{ matrix.example }}, running validation test..."
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_gmc_on_${{ matrix.hardware }}.sh validate_${{ matrix.example }}
|
||||
echo "Testing gmc ${{ inputs.example }}, running validation test..."
|
||||
${{ github.workspace }}/${{ inputs.example }}/tests/test_gmc_on_${{ inputs.hardware }}.sh validate_${{ inputs.example }}
|
||||
else
|
||||
echo "Timeout waiting for pods in namespace $APP_NAMESPACE to be ready!"
|
||||
exit 1
|
||||
146
.github/workflows/_gmc-workflow.yml
vendored
Normal file
146
.github/workflows/_gmc-workflow.yml
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Build and deploy GMC system on call and manual
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
default: "latest"
|
||||
required: true
|
||||
type: string
|
||||
description: "Tag to apply to images"
|
||||
node:
|
||||
default: "xeon"
|
||||
required: true
|
||||
type: string
|
||||
description: "Hardware to run test"
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
description: 'OPEA branch for image build'
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
default: "latest"
|
||||
required: true
|
||||
type: string
|
||||
description: "Tag to apply to images"
|
||||
node:
|
||||
default: "xeon"
|
||||
required: true
|
||||
type: string
|
||||
description: "Hardware to run test"
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
description: 'OPEA branch for image build'
|
||||
|
||||
jobs:
|
||||
####################################################################################################
|
||||
# Image Build and Scan
|
||||
####################################################################################################
|
||||
image-build:
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Checkout GenAIInfra repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/GenAIInfra
|
||||
ref: ${{ inputs.opea_branch }}
|
||||
path: GenAIInfra
|
||||
|
||||
- name: Set variables
|
||||
id: set_variables
|
||||
run: |
|
||||
echo "DOCKER_REGISTRY=${OPEA_IMAGE_REPO}opea" >> $GITHUB_ENV
|
||||
echo "IMAGE_REPO=${OPEA_IMAGE_REPO}" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=${{ inputs.tag }}" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ inputs.tag }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build image and push
|
||||
run: |
|
||||
cd ${{github.workspace}}/GenAIInfra/microservices-connector
|
||||
make docker.build
|
||||
make docker.push
|
||||
|
||||
- name: Scan gmcmanager
|
||||
if: ${{ inputs.node == 'gaudi' }}
|
||||
uses: opea-project/validation/actions/trivy-scan@main
|
||||
with:
|
||||
image-ref: ${{ env.DOCKER_REGISTRY }}/gmcmanager:${{ env.VERSION }}
|
||||
output: gmcmanager-scan.txt
|
||||
|
||||
- name: Upload gmcmanager scan result
|
||||
if: ${{ inputs.node == 'gaudi' }}
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: gmcmanager-scan
|
||||
path: gmcmanager-scan.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Scan gmcrouter
|
||||
if: ${{ inputs.node == 'gaudi' }}
|
||||
uses: opea-project/validation/actions/trivy-scan@main
|
||||
with:
|
||||
image-ref: ${{ env.DOCKER_REGISTRY }}/gmcrouter:${{ env.VERSION }}
|
||||
output: gmcrouter-scan.txt
|
||||
|
||||
- name: Upload gmcrouter scan result
|
||||
if: ${{ inputs.node == 'gaudi' }}
|
||||
uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: gmcrouter-scan
|
||||
path: gmcrouter-scan.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Clean up images
|
||||
if: always()
|
||||
run: |
|
||||
docker rmi ${{ env.DOCKER_REGISTRY }}/gmcrouter:${{ env.VERSION }}
|
||||
docker rmi ${{ env.DOCKER_REGISTRY }}/gmcmanager:${{ env.VERSION }}
|
||||
|
||||
- name: Clean up GenAIInfra source codes
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ${{github.workspace}}/GenAIInfra
|
||||
|
||||
####################################################################################################
|
||||
# GMC Install
|
||||
####################################################################################################
|
||||
gmc-install:
|
||||
needs: image-build
|
||||
runs-on: "k8s-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Checkout GenAIInfra repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/GenAIInfra
|
||||
ref: ${{ inputs.opea_branch }}
|
||||
path: GenAIInfra
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
echo "SYSTEM_NAMESPACE=opea-system" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ inputs.tag }}" >> $GITHUB_ENV
|
||||
echo "SET_VERSION=true" >> $GITHUB_ENV # to change the tag of microservice images
|
||||
|
||||
- name: Cleanup existing GMC
|
||||
run: |
|
||||
cd GenAIInfra
|
||||
.github/workflows/scripts/e2e/gmc_install.sh cleanup_gmc
|
||||
cd ..
|
||||
|
||||
- name: Install GMC
|
||||
run: |
|
||||
cd GenAIInfra
|
||||
.github/workflows/scripts/e2e/gmc_install.sh install_gmc
|
||||
cd ..
|
||||
|
||||
- name: Clean up GenAIInfra source codes
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ${{github.workspace}}/GenAIInfra
|
||||
111
.github/workflows/_manifest-e2e.yml
vendored
Normal file
111
.github/workflows/_manifest-e2e.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Single Kubernetes Manifest E2e Test For Call
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
example:
|
||||
default: "ChatQnA"
|
||||
description: "The example to test on K8s"
|
||||
required: true
|
||||
type: string
|
||||
hardware:
|
||||
default: "xeon"
|
||||
description: "Nodes to run the test, xeon or gaudi"
|
||||
required: true
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag to apply to images, default is latest"
|
||||
required: false
|
||||
type: string
|
||||
context:
|
||||
default: "CI"
|
||||
description: "CI or CD"
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
manifest-test:
|
||||
runs-on: "k8s-${{ inputs.hardware }}"
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Get checkout ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "checkout ref ${{ env.CHECKOUT_REF }}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
echo "IMAGE_REPO=$OPEA_IMAGE_REPO" >> $GITHUB_ENV
|
||||
echo "IMAGE_TAG=${{ inputs.tag }}" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ inputs.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "NAMESPACE=$lower_example-$(tr -dc a-z0-9 </dev/urandom | head -c 16)" >> $GITHUB_ENV
|
||||
echo "ROLLOUT_TIMEOUT_SECONDS=1800s" >> $GITHUB_ENV
|
||||
echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV
|
||||
echo "continue_test=true" >> $GITHUB_ENV
|
||||
echo "should_cleanup=false" >> $GITHUB_ENV
|
||||
echo "skip_validate=true" >> $GITHUB_ENV
|
||||
echo "CONTEXT=${{ inputs.context }}" >> $GITHUB_ENV
|
||||
echo "NAMESPACE=$NAMESPACE"
|
||||
|
||||
- name: Kubectl install
|
||||
id: install
|
||||
run: |
|
||||
if [[ ! -f ${{ github.workspace }}/${{ inputs.example }}/tests/test_manifest_on_${{ inputs.hardware }}.sh ]]; then
|
||||
echo "No test script found, exist test!"
|
||||
exit 0
|
||||
else
|
||||
${{ github.workspace }}/${{ inputs.example }}/tests/test_manifest_on_${{ inputs.hardware }}.sh init_${{ inputs.example }}
|
||||
echo "should_cleanup=true" >> $GITHUB_ENV
|
||||
kubectl create ns $NAMESPACE
|
||||
${{ github.workspace }}/${{ inputs.example }}/tests/test_manifest_on_${{ inputs.hardware }}.sh install_${{ inputs.example }} $NAMESPACE
|
||||
echo "Testing ${{ inputs.example }}, waiting for pod ready..."
|
||||
if kubectl rollout status deployment --namespace "$NAMESPACE" --timeout "$ROLLOUT_TIMEOUT_SECONDS"; then
|
||||
echo "Testing manifests ${{ inputs.example }}, waiting for pod ready done!"
|
||||
echo "skip_validate=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Timeout waiting for pods in namespace $NAMESPACE to be ready!"
|
||||
.github/workflows/scripts/k8s-utils.sh dump_pods_status $NAMESPACE
|
||||
exit 1
|
||||
fi
|
||||
sleep 60
|
||||
fi
|
||||
|
||||
- name: Validate e2e test
|
||||
if: always()
|
||||
run: |
|
||||
if $skip_validate; then
|
||||
echo "Skip validate"
|
||||
else
|
||||
if ${{ github.workspace }}/${{ inputs.example }}/tests/test_manifest_on_${{ inputs.hardware }}.sh validate_${{ inputs.example }} $NAMESPACE ; then
|
||||
echo "Validate ${{ inputs.example }} successful!"
|
||||
else
|
||||
echo "Validate ${{ inputs.example }} failure!!!"
|
||||
.github/workflows/scripts/k8s-utils.sh dump_all_pod_logs $NAMESPACE
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Kubectl uninstall
|
||||
if: always()
|
||||
run: |
|
||||
if $should_cleanup; then
|
||||
if ! kubectl delete ns $NAMESPACE --timeout=$KUBECTL_TIMEOUT_SECONDS; then
|
||||
kubectl delete pods --namespace $NAMESPACE --force --grace-period=0 --all
|
||||
kubectl delete ns $NAMESPACE --force --grace-period=0 --timeout=$KUBECTL_TIMEOUT_SECONDS
|
||||
fi
|
||||
fi
|
||||
150
.github/workflows/_run-docker-compose.yml
vendored
Normal file
150
.github/workflows/_run-docker-compose.yml
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Image Build
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
registry:
|
||||
description: Container Registry URL
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
tag:
|
||||
description: Container Tag
|
||||
required: false
|
||||
default: "latest"
|
||||
type: string
|
||||
example:
|
||||
description: Example to test
|
||||
required: true
|
||||
type: string
|
||||
hardware:
|
||||
description: Hardware to run the test on
|
||||
required: true
|
||||
type: string
|
||||
diff_excluded_files:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
jobs:
|
||||
get-test-case:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
test_cases: ${{ steps.test-case-matrix.outputs.test_cases }}
|
||||
CHECKOUT_REF: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
steps:
|
||||
- name: Get checkout ref
|
||||
id: get-checkout-ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge
|
||||
else
|
||||
CHECKOUT_REF=${{ github.ref }}
|
||||
fi
|
||||
echo "CHECKOUT_REF=${CHECKOUT_REF}" >> $GITHUB_OUTPUT
|
||||
echo "checkout ref ${CHECKOUT_REF}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get test matrix
|
||||
shell: bash
|
||||
id: test-case-matrix
|
||||
run: |
|
||||
example_l=$(echo ${{ inputs.example }} | tr '[:upper:]' '[:lower:]')
|
||||
cd ${{ github.workspace }}/${{ inputs.example }}/tests
|
||||
run_test_cases=""
|
||||
|
||||
default_test_case=$(find . -type f -name "test_compose_on_${{ inputs.hardware }}.sh" | cut -d/ -f2)
|
||||
if [ "$default_test_case" ]; then run_test_cases="$default_test_case"; fi
|
||||
other_test_cases=$(find . -type f -name "test_compose_*_on_${{ inputs.hardware }}.sh" | cut -d/ -f2)
|
||||
echo "default_test_case=$default_test_case"
|
||||
echo "other_test_cases=$other_test_cases"
|
||||
|
||||
if [ "${{ inputs.tag }}" == "ci" ]; then
|
||||
base_commit=$(curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
"https://api.github.com/repos/opea-project/GenAIExamples/commits?sha=${{ github.event.pull_request.base.ref }}" | jq -r '.[0].sha')
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-only ${base_commit} ${merged_commit} | grep -vE '${{ inputs.diff_excluded_files }}')" || true
|
||||
fi
|
||||
|
||||
for test_case in $other_test_cases; do
|
||||
if [ "${{ inputs.tag }}" == "ci" ]; then
|
||||
flag=${test_case%_on_*}
|
||||
flag=${flag#test_compose_}
|
||||
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${{ inputs.example }} | grep ${flag}) ]]; then
|
||||
run_test_cases="$run_test_cases $test_case"
|
||||
fi
|
||||
else
|
||||
run_test_cases="$run_test_cases $test_case"
|
||||
fi
|
||||
done
|
||||
|
||||
test_cases=$(echo $run_test_cases | tr ' ' '\n' | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "test_cases=$test_cases"
|
||||
echo "test_cases=$test_cases" >> $GITHUB_OUTPUT
|
||||
|
||||
run-test:
|
||||
needs: [get-test-case]
|
||||
strategy:
|
||||
matrix:
|
||||
test_case: ${{ fromJSON(needs.get-test-case.outputs.test_cases) }}
|
||||
fail-fast: false
|
||||
runs-on: ${{ inputs.hardware }}
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Clean up Working Directory
|
||||
run: |
|
||||
sudo rm -rf ${{github.workspace}}/* || true
|
||||
docker system prune -f
|
||||
docker rmi $(docker images --filter reference="*/*/*:latest" -q) || true
|
||||
docker rmi $(docker images --filter reference="*/*:ci" -q) || true
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ needs.get-test-case.outputs.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run test
|
||||
shell: bash
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
PINECONE_KEY: ${{ secrets.PINECONE_KEY }}
|
||||
IMAGE_REPO: ${{ inputs.registry }}
|
||||
IMAGE_TAG: ${{ inputs.tag }}
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.hardware }}
|
||||
test_case: ${{ matrix.test_case }}
|
||||
run: |
|
||||
cd ${{ github.workspace }}/$example/tests
|
||||
if [[ "$IMAGE_REPO" == "" ]]; then export IMAGE_REPO="${OPEA_IMAGE_REPO}opea"; fi
|
||||
if [ -f ${test_case} ]; then timeout 30m bash ${test_case}; else echo "Test script {${test_case}} not found, skip test!"; fi
|
||||
|
||||
- name: Clean up container
|
||||
shell: bash
|
||||
if: cancelled() || failure()
|
||||
run: |
|
||||
cd ${{ github.workspace }}/${{ inputs.example }}/docker_compose
|
||||
test_case=${{ matrix.test_case }}
|
||||
flag=${test_case%_on_*}
|
||||
flag=${flag#test_}
|
||||
yaml_file=$(find . -type f -wholename "*${{ inputs.hardware }}/${flag}.yaml")
|
||||
echo $yaml_file
|
||||
docker compose -f $yaml_file stop && docker compose -f $yaml_file rm -f || true
|
||||
docker system prune -f
|
||||
docker rmi $(docker images --filter reference="*:5000/*/*" -q) || true
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.example }}_${{ matrix.test_case }}
|
||||
path: ${{ github.workspace }}/${{ inputs.example }}/tests/*.log
|
||||
78
.github/workflows/chatqna_benchmark.yml
vendored
78
.github/workflows/chatqna_benchmark.yml
vendored
@@ -1,78 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: benchmark test with chatqna_benchmark
|
||||
|
||||
on:
|
||||
# pull_request:
|
||||
# branches: [main]
|
||||
# types: [opened, reopened, ready_for_review, synchronize]
|
||||
# # inputs:
|
||||
# # variables:
|
||||
# # hardware:
|
||||
# # description: 'Enter your param' #gaudi or xeon
|
||||
# # required: true
|
||||
# # default: xeon
|
||||
schedule:
|
||||
- cron: "35 0 * * 6"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
hardware:
|
||||
description: 'Enter your hardware' #gaudi or xeon
|
||||
required: true
|
||||
default: gaudi
|
||||
|
||||
jobs:
|
||||
Example-test:
|
||||
runs-on: ${{ github.event.inputs.hardware || 'gaudi' }} #xeon #gaudi
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone repo GenAIEval
|
||||
run: |
|
||||
git clone https://github.com/opea-project/GenAIEval.git
|
||||
cd GenAIEval && git checkout v0.6
|
||||
|
||||
- name: Run test
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
hardware: ${{ github.event.inputs.hardware || 'gaudi' }} #xeon
|
||||
mode: perf
|
||||
IMAGE_TAG: latest
|
||||
IMAGE_REPO_GAUDI: ${{ vars.IMAGE_REPO_GAUDI }}
|
||||
IMAGE_REPO_XEON: ${{ vars.IMAGE_REPO_XEON }}
|
||||
run: |
|
||||
# cd ${{ github.workspace }}/$example/tests
|
||||
cd ${{ github.workspace }}/ChatQnA/tests
|
||||
cp ../../GenAIEval/evals/benchmark/chatqna_benchmark.py .
|
||||
cp ../../GenAIEval/evals/benchmark/data.json ${{ github.workspace }}/ChatQnA/docker/${hardware}/
|
||||
|
||||
if [ "$hardware" == "gaudi" ]; then IMAGE_REPO=$IMAGE_REPO_GAUDI; else IMAGE_REPO=$IMAGE_REPO_XEON; fi
|
||||
export IMAGE_REPO=${IMAGE_REPO}
|
||||
# example_l=$(echo $example | tr '[:upper:]' '[:lower:]')
|
||||
if [ -f test_chatqna_on_${hardware}.sh ]; then timeout 30m bash test_chatqna_on_${hardware}.sh > ${hardware}_output.log; else echo "Test script not found, skip test!"; fi
|
||||
|
||||
- name: Process log and save to JSON
|
||||
env:
|
||||
hardware: ${{ github.event.inputs.hardware || 'gaudi' }} #xeon
|
||||
run: |
|
||||
cd ${{ github.workspace }}/ChatQnA/tests
|
||||
echo '{}' > ${hardware}_output.json
|
||||
echo $(grep -a 'Total Requests:' ${hardware}_output.log | awk '{print "{\"total_requests\": \""$3 "\"}"}') > ${hardware}_output.json
|
||||
echo $(grep -a 'P50 latency is' ${hardware}_output.log | awk '{print "{\"p50_latency\": \""$4 "\"}"}') >> ${hardware}_output.json
|
||||
echo $(grep -a 'P99 latency is' ${hardware}_output.log | awk '{print "{\"p99_latency\": \""$4 "\"}"}') >> ${hardware}_output.json
|
||||
jq -s 'add' ${hardware}_output.json > ${hardware}_final_output.json && mv ${hardware}_final_output.json ${hardware}_output.json
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: |
|
||||
${{ github.workspace }}/ChatQnA/tests/*.log
|
||||
${{ github.workspace }}/ChatQnA/tests/*.json
|
||||
44
.github/workflows/container-build.yml
vendored
44
.github/workflows/container-build.yml
vendored
@@ -1,44 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Container Build
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "35 1 * * 5"
|
||||
jobs:
|
||||
# https://github.com/intel/ai-containers/blob/main/.github/action.yml
|
||||
build-containers:
|
||||
runs-on: docker
|
||||
env:
|
||||
REGISTRY: ${{ secrets.REGISTRY }}
|
||||
REPO: ${{ secrets.REPO }}
|
||||
steps:
|
||||
- uses: step-security/harden-runner@v2
|
||||
with:
|
||||
egress-policy: audit
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
- name: Build Containers
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} build --no-cache
|
||||
working-directory: .github/workflows/docker
|
||||
- name: Print Containers to Summary
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} images --format json | jq -r --arg registry "$REGISTRY" '.[] | select(.Repository | contains($registry)) | .Tag' >> $GITHUB_STEP_SUMMARY
|
||||
- name: Push Containers
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} push
|
||||
working-directory: .github/workflows/docker
|
||||
- name: Un-Tag Containers
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} down --rmi all
|
||||
working-directory: .github/workflows/docker
|
||||
- name: Remove Containers
|
||||
if: always()
|
||||
run: docker system prune --force
|
||||
23
.github/workflows/dependency-review.yml
vendored
Normal file
23
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: "Dependency Review"
|
||||
on: [pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v4
|
||||
- name: Dependency Review
|
||||
uses: actions/dependency-review-action@v4
|
||||
with:
|
||||
comment-summary-in-pr: "always"
|
||||
fail-on-severity: "low"
|
||||
warn-only: true
|
||||
show-openssf-scorecard: false
|
||||
91
.github/workflows/docker-compose-e2e.yml
vendored
91
.github/workflows/docker-compose-e2e.yml
vendored
@@ -1,91 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with docker compose
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/docker/**"
|
||||
- "**/tests/**"
|
||||
- "**/ui/**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- .github/workflows/docker-compose-e2e.yml
|
||||
workflow_dispatch:
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|README.md|*.txt|deprecate|kubernetes|manifest|gmc|assets'
|
||||
|
||||
mega-image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
uses: ./.github/workflows/reuse-image-build.yml
|
||||
with:
|
||||
image_tag: ${{ github.event.pull_request.head.sha }}
|
||||
mega_service: "${{ matrix.example }}"
|
||||
runner_label: "docker-build-${{ matrix.hardware }}"
|
||||
|
||||
Example-test:
|
||||
needs: [job1, mega-image-build]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
runs-on: ${{ matrix.hardware }}
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Test example
|
||||
run: |
|
||||
echo "Matrix - example ${{ matrix.example }}, hardware ${{ matrix.hardware }}"
|
||||
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
|
||||
- name: Run test
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
IMAGE_TAG: ${{ needs.mega-image-build.outputs.image_tag }}
|
||||
IMAGE_REPO_GAUDI: ${{ vars.IMAGE_REPO_GAUDI }}
|
||||
IMAGE_REPO_XEON: ${{ vars.IMAGE_REPO_XEON }}
|
||||
run: |
|
||||
cd ${{ github.workspace }}/$example/tests
|
||||
if [ "$hardware" == "gaudi" ]; then IMAGE_REPO=$IMAGE_REPO_GAUDI; else IMAGE_REPO=$IMAGE_REPO_XEON; fi
|
||||
export IMAGE_REPO=${IMAGE_REPO}
|
||||
example_l=$(echo $example | tr '[:upper:]' '[:lower:]')
|
||||
if [ -f test_${example_l}_on_${hardware}.sh ]; then timeout 30m bash test_${example_l}_on_${hardware}.sh; else echo "Test script not found, skip test!"; fi
|
||||
|
||||
- name: Clean up container
|
||||
env:
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
if: cancelled() || failure()
|
||||
run: |
|
||||
cd ${{ github.workspace }}/$example/docker/$hardware
|
||||
docker compose stop && docker compose rm -f
|
||||
echo y | docker system prune
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.example }}-${{ matrix.hardware }}
|
||||
path: ${{ github.workspace }}/${{ matrix.example }}/tests/*.log
|
||||
29
.github/workflows/docker/docker-compose.yaml
vendored
29
.github/workflows/docker/docker-compose.yaml
vendored
@@ -1,29 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
chatqna-megaservice-server:
|
||||
build:
|
||||
args:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
no_proxy: ${no_proxy}
|
||||
context: ../../../ChatQnA/microservice/xeon
|
||||
dockerfile: docker/Dockerfile
|
||||
image: ${REGISTRY}/${REPO}:chatqna-megaservice-server
|
||||
pull_policy: always
|
||||
chatqna-ui-server:
|
||||
build:
|
||||
context: ../../../ChatQnA/ui
|
||||
extends: chatqna-megaservice-server
|
||||
image: ${REGISTRY}/${REPO}:chatqna-ui-server
|
||||
codegen-megaservice-server:
|
||||
build:
|
||||
context: ../../../CodeGen/microservice/xeon
|
||||
extends: chatqna-megaservice-server
|
||||
image: ${REGISTRY}/${REPO}:codegen-megaservice-server
|
||||
codegen-ui-server:
|
||||
build:
|
||||
context: ../../../CodeGen/ui
|
||||
extends: chatqna-megaservice-server
|
||||
image: ${REGISTRY}/${REPO}:codegen-ui-server
|
||||
33
.github/workflows/image-build-on-push.yml
vendored
33
.github/workflows/image-build-on-push.yml
vendored
@@ -1,33 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Test
|
||||
name: Build latest images on push event
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'main' ]
|
||||
paths:
|
||||
- "**/docker/*.py"
|
||||
- "**/docker/Dockerfile"
|
||||
- "**/docker/ui/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-on-push
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
|
||||
mega-image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix:
|
||||
workload: ${{ fromJSON(needs.job1.outputs.run_matrix).include.*.example }}
|
||||
hardware: ["gaudi","xeon"]
|
||||
uses: ./.github/workflows/reuse-image-build.yml
|
||||
with:
|
||||
image_tag: latest
|
||||
mega_service: "${{ matrix.workload }}"
|
||||
runner_label: docker-build-${{ matrix.hardware }}
|
||||
111
.github/workflows/manifest-e2e.yml
vendored
111
.github/workflows/manifest-e2e.yml
vendored
@@ -1,111 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with manifests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/manifests/**"
|
||||
- "**/tests/test_manifest**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|deprecated|docker|assets|*.md|*.txt'
|
||||
xeon_server_label: 'xeon'
|
||||
gaudi_server_label: 'gaudi'
|
||||
|
||||
mega-image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
uses: ./.github/workflows/reuse-image-build.yml
|
||||
with:
|
||||
image_tag: ${{ github.event.pull_request.head.sha }}
|
||||
mega_service: "${{ matrix.example }}"
|
||||
runner_label: "docker-build-${{ matrix.hardware }}"
|
||||
|
||||
manifest-test:
|
||||
needs: [job1, mega-image-build]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
runs-on: "k8s-${{ matrix.hardware }}"
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: E2e test manifest
|
||||
run: |
|
||||
echo "Matrix - manifest: ${{ matrix.example }}"
|
||||
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
if [ ${{ matrix.hardware }} == "gaudi" ]; then IMAGE_REPO=${{ vars.IMAGE_REPO_GAUDI }}; else IMAGE_REPO=${{ vars.IMAGE_REPO_XEON }}; fi
|
||||
echo "IMAGE_REPO=$OPEA_IMAGE_REPO" >> $GITHUB_ENV
|
||||
echo "IMAGE_TAG=${{needs.mega-image-build.outputs.image_tag}}" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ matrix.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "NAMESPACE=$lower_example-$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
|
||||
echo "ROLLOUT_TIMEOUT_SECONDS=1800s" >> $GITHUB_ENV
|
||||
echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV
|
||||
echo "continue_test=true" >> $GITHUB_ENV
|
||||
echo "should_cleanup=false" >> $GITHUB_ENV
|
||||
echo "skip_validate=true" >> $GITHUB_ENV
|
||||
echo "NAMESPACE=$NAMESPACE"
|
||||
|
||||
- name: Kubectl install
|
||||
id: install
|
||||
run: |
|
||||
if [[ ! -f ${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh ]]; then
|
||||
echo "No test script found, exist test!"
|
||||
exit 0
|
||||
else
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh init_${{ matrix.example }}
|
||||
echo "should_cleanup=true" >> $GITHUB_ENV
|
||||
kubectl create ns $NAMESPACE
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh install_${{ matrix.example }} $NAMESPACE
|
||||
echo "Testing ${{ matrix.example }}, waiting for pod ready..."
|
||||
if kubectl rollout status deployment --namespace "$NAMESPACE" --timeout "$ROLLOUT_TIMEOUT_SECONDS"; then
|
||||
echo "Testing manifests ${{ matrix.example }}, waiting for pod ready done!"
|
||||
echo "skip_validate=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Timeout waiting for pods in namespace $NAMESPACE to be ready!"
|
||||
exit 1
|
||||
fi
|
||||
sleep 60
|
||||
fi
|
||||
|
||||
- name: Validate e2e test
|
||||
if: always()
|
||||
run: |
|
||||
if $skip_validate; then
|
||||
echo "Skip validate"
|
||||
else
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh validate_${{ matrix.example }} $NAMESPACE
|
||||
fi
|
||||
|
||||
- name: Kubectl uninstall
|
||||
if: always()
|
||||
run: |
|
||||
if $should_cleanup; then
|
||||
if ! kubectl delete ns $NAMESPACE --timeout=$KUBECTL_TIMEOUT_SECONDS; then
|
||||
kubectl delete pods --namespace $NAMESPACE --force --grace-period=0 --all
|
||||
kubectl delete ns $NAMESPACE --force --grace-period=0 --timeout=$KUBECTL_TIMEOUT_SECONDS
|
||||
fi
|
||||
fi
|
||||
59
.github/workflows/manual-docker-publish.yml
vendored
Normal file
59
.github/workflows/manual-docker-publish.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Examples publish docker image on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node:
|
||||
default: "gaudi"
|
||||
description: "Hardware to run test"
|
||||
required: true
|
||||
type: string
|
||||
examples:
|
||||
default: "Translation"
|
||||
description: 'List of examples to publish [AudioQnA,ChatQnA,CodeGen,CodeTrans,DocSum,FaqGen,SearchQnA,Translation]'
|
||||
required: false
|
||||
type: string
|
||||
images:
|
||||
default: "gmcmanager,gmcrouter"
|
||||
description: 'List of images to publish [gmcmanager,gmcrouter, ...]'
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "v0.9"
|
||||
description: "Tag to publish"
|
||||
required: true
|
||||
type: string
|
||||
publish_tags:
|
||||
default: "latest,v0.9"
|
||||
description: 'Tag list apply to publish images'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions: read-all
|
||||
jobs:
|
||||
get-image-list:
|
||||
uses: ./.github/workflows/_get-image-list.yml
|
||||
with:
|
||||
examples: ${{ inputs.examples }}
|
||||
images: ${{ inputs.images }}
|
||||
|
||||
publish:
|
||||
needs: [get-image-list]
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }}
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- uses: docker/login-action@v3.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Image Publish
|
||||
uses: opea-project/validation/actions/image-publish@main
|
||||
with:
|
||||
local_image_ref: ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
image_name: opea/${{ matrix.image }}
|
||||
publish_tags: ${{ inputs.publish_tags }}
|
||||
113
.github/workflows/manual-docker-scan.yml
vendored
Normal file
113
.github/workflows/manual-docker-scan.yml
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Examples docker images BoM/CVE scan on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node:
|
||||
default: "gaudi"
|
||||
description: "Hardware to run scan"
|
||||
required: true
|
||||
type: string
|
||||
examples:
|
||||
default: "ChatQnA"
|
||||
description: 'List of examples to scan [AudioQnA,ChatQnA,CodeGen,CodeTrans,DocSum,FaqGen,SearchQnA,Translation]'
|
||||
required: false
|
||||
type: string
|
||||
images:
|
||||
default: "gmcmanager,gmcrouter"
|
||||
description: 'List of images to scan [gmcmanager,gmcrouter, ...]'
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag for images to scan"
|
||||
required: true
|
||||
type: string
|
||||
sbom_scan:
|
||||
default: true
|
||||
description: 'Scan images for BoM'
|
||||
required: false
|
||||
type: boolean
|
||||
trivy_scan:
|
||||
default: true
|
||||
description: 'Scan images for CVE'
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
permissions: read-all
|
||||
jobs:
|
||||
get-image-list:
|
||||
uses: ./.github/workflows/_get-image-list.yml
|
||||
with:
|
||||
examples: ${{ inputs.examples }}
|
||||
images: ${{ inputs.images }}
|
||||
|
||||
scan-docker:
|
||||
needs: get-image-list
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJson(needs.get-image-list.outputs.matrix) }}
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Clean up Working Directory
|
||||
run: |
|
||||
sudo rm -rf ${{github.workspace}}/* || true
|
||||
docker system prune -f
|
||||
|
||||
- name: Pull Image
|
||||
run: |
|
||||
docker pull ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
echo "OPEA_IMAGE_REPO=${OPEA_IMAGE_REPO}" >> $GITHUB_ENV
|
||||
|
||||
- name: SBOM Scan Container
|
||||
uses: anchore/sbom-action@v0.17.1
|
||||
if: ${{ inputs.sbom_scan }}
|
||||
with:
|
||||
image: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
output-file: ${{ matrix.image }}-sbom-scan.txt
|
||||
format: 'spdx-json'
|
||||
|
||||
- name: Security Scan Container
|
||||
uses: aquasecurity/trivy-action@0.24.0
|
||||
if: ${{ inputs.trivy_scan }}
|
||||
with:
|
||||
image-ref: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
output: ${{ matrix.image }}-trivy-scan.txt
|
||||
format: 'table'
|
||||
exit-code: '1'
|
||||
ignore-unfixed: true
|
||||
vuln-type: 'os,library'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: docker rmi -f ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} || true
|
||||
|
||||
- name: Collect Logs
|
||||
if: always()
|
||||
run: |
|
||||
mkdir -p /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
mv ${{ matrix.image }}-*-scan.txt /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
|
||||
upload-artifacts:
|
||||
needs: scan-docker
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
if: always()
|
||||
steps:
|
||||
- uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: sbom-scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
path: /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}/*-sbom-scan.txt
|
||||
overwrite: true
|
||||
|
||||
- uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: trivy-scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
path: /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}/*-trivy-scan.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Remove Logs
|
||||
run: rm -rf /tmp/scan-${{ inputs.tag }}-${{ github.run_number }} && rm -rf /tmp/sbom-action-*
|
||||
104
.github/workflows/manual-example-workflow.yml
vendored
Normal file
104
.github/workflows/manual-example-workflow.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Examples CD workflow on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
nodes:
|
||||
default: "gaudi,xeon"
|
||||
description: "Hardware to run test"
|
||||
required: true
|
||||
type: string
|
||||
examples:
|
||||
default: "ChatQnA"
|
||||
description: 'List of examples to test [AudioQnA,ChatQnA,CodeGen,CodeTrans,DocSum,FaqGen,SearchQnA,Translation]'
|
||||
required: true
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag to apply to images"
|
||||
required: true
|
||||
type: string
|
||||
deploy_gmc:
|
||||
default: false
|
||||
description: 'Whether to deploy gmc'
|
||||
required: true
|
||||
type: boolean
|
||||
build:
|
||||
default: true
|
||||
description: 'Build test required images for Examples'
|
||||
required: false
|
||||
type: boolean
|
||||
test_compose:
|
||||
default: true
|
||||
description: 'Test examples with docker compose'
|
||||
required: false
|
||||
type: boolean
|
||||
test_k8s:
|
||||
default: false
|
||||
description: 'Test examples with k8s'
|
||||
required: false
|
||||
type: boolean
|
||||
test_gmc:
|
||||
default: false
|
||||
description: 'Test examples with gmc'
|
||||
required: false
|
||||
type: boolean
|
||||
opea_branch:
|
||||
default: "main"
|
||||
description: 'OPEA branch for image build'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions: read-all
|
||||
jobs:
|
||||
get-test-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
examples: ${{ steps.get-matrix.outputs.examples }}
|
||||
nodes: ${{ steps.get-matrix.outputs.nodes }}
|
||||
steps:
|
||||
- name: Create Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
examples=($(echo ${{ inputs.examples }} | tr ',' ' '))
|
||||
examples_json=$(printf '%s\n' "${examples[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "examples=$examples_json" >> $GITHUB_OUTPUT
|
||||
nodes=($(echo ${{ inputs.nodes }} | tr ',' ' '))
|
||||
nodes_json=$(printf '%s\n' "${nodes[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "nodes=$nodes_json" >> $GITHUB_OUTPUT
|
||||
|
||||
build-deploy-gmc:
|
||||
needs: [get-test-matrix]
|
||||
if: ${{ fromJSON(inputs.deploy_gmc) }}
|
||||
strategy:
|
||||
matrix:
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_gmc-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
tag: ${{ inputs.tag }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
secrets: inherit
|
||||
|
||||
run-examples:
|
||||
needs: [get-test-matrix, build-deploy-gmc]
|
||||
if: always()
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJson(needs.get-test-matrix.outputs.examples) }}
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
example: ${{ matrix.example }}
|
||||
tag: ${{ inputs.tag }}
|
||||
build: ${{ fromJSON(inputs.build) }}
|
||||
test_compose: ${{ fromJSON(inputs.test_compose) }}
|
||||
test_k8s: ${{ fromJSON(inputs.test_k8s) }}
|
||||
test_gmc: ${{ fromJSON(inputs.test_gmc) }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
secrets: inherit
|
||||
46
.github/workflows/manual-freeze-tag.yml
vendored
Normal file
46
.github/workflows/manual-freeze-tag.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Freeze OPEA images release tag in readme on manual event
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag to apply to images"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
freeze-tag:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "NeuralChatBot"
|
||||
git config --global user.email "grp_neural_chat_bot@intel.com"
|
||||
git remote set-url origin https://NeuralChatBot:"${{ secrets.ACTION_TOKEN }}"@github.com/opea-project/GenAIExamples.git
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
find . -name "*.md" | xargs sed -i "s|^docker\ compose|TAG=${{ github.event.inputs.tag }}\ docker\ compose|g"
|
||||
find . -type f -name "*.yaml" \( -path "*/benchmark/*" -o -path "*/kubernetes/*" \) | xargs sed -i -E 's/(opea\/[A-Za-z0-9\-]*:)latest/\1${{ github.event.inputs.tag }}/g'
|
||||
find . -type f -name "*.md" \( -path "*/benchmark/*" -o -path "*/kubernetes/*" \) | xargs sed -i -E 's/(opea\/[A-Za-z0-9\-]*:)latest/\1${{ github.event.inputs.tag }}/g'
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git add .
|
||||
git commit -s -m "Freeze OPEA images tag"
|
||||
git push
|
||||
44
.github/workflows/path_detection.yml
vendored
44
.github/workflows/path_detection.yml
vendored
@@ -1,44 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check for missing Dockerfile paths in repo comps
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize]
|
||||
|
||||
jobs:
|
||||
check-dockerfile-paths:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone repo GenAIComps
|
||||
run: |
|
||||
cd ..
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
|
||||
- name: Check for missing Dockerfile paths in GenAIComps
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
miss="FALSE"
|
||||
while IFS=: read -r file line content; do
|
||||
dockerfile_path=$(echo "$content" | awk -F '-f ' '{print $2}' | awk '{print $1}')
|
||||
if [[ ! -f "../GenAIComps/${dockerfile_path}" ]]; then
|
||||
miss="TRUE"
|
||||
echo "Missing Dockerfile: GenAIComps/${dockerfile_path} (Referenced in GenAIExamples/${file}:${line})"
|
||||
fi
|
||||
done < <(grep -Ern 'docker build .* -f comps/.+/Dockerfile' --include='*.md' .)
|
||||
|
||||
|
||||
if [[ "$miss" == "TRUE" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
shell: bash
|
||||
46
.github/workflows/pr-docker-compose-e2e.yml
vendored
Normal file
46
.github/workflows/pr-docker-compose-e2e.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with docker compose
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ["main", "*rc"]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/Dockerfile**"
|
||||
- "**.py"
|
||||
- "**/docker_compose/**"
|
||||
- "**/docker_image_build/**"
|
||||
- "**/tests/test_compose**"
|
||||
- "**/ui/**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- .github/workflows/pr-docker-compose-e2e.yml
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
get-test-matrix:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|*.md|*.txt|kubernetes|manifest|gmc|assets|benchmark'
|
||||
|
||||
example-test:
|
||||
needs: [get-test-matrix]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.get-test-matrix.outputs.run_matrix) }}
|
||||
fail-fast: false
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
uses: ./.github/workflows/_run-docker-compose.yml
|
||||
with:
|
||||
registry: "opea"
|
||||
tag: "ci"
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
diff_excluded_files: '.github|*.md|*.txt|kubernetes|manifest|gmc|assets|benchmark'
|
||||
secrets: inherit
|
||||
36
.github/workflows/pr-gmc-e2e.yaml
vendored
Normal file
36
.github/workflows/pr-gmc-e2e.yaml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with GMC
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ["main", "*rc"]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/**/gmc/**"
|
||||
- "**/tests/test_gmc**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- "!**/kubernetes/**/manifests/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|docker_compose|manifest|assets|*.md|*.txt'
|
||||
test_mode: "gmc"
|
||||
|
||||
gmc-test:
|
||||
needs: [job1]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
uses: ./.github/workflows/_gmc-e2e.yml
|
||||
with:
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
secrets: inherit
|
||||
40
.github/workflows/pr-manifest-e2e.yml
vendored
Normal file
40
.github/workflows/pr-manifest-e2e.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with manifests
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ["main", "*rc"]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/**/manifests/**"
|
||||
- "**/tests/test_manifest**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- "!**/kubernetes/**/gmc/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|docker_compose|gmc|assets|*.md|*.txt|benchmark'
|
||||
test_mode: "manifest"
|
||||
|
||||
run-example:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.hardware }}
|
||||
example: ${{ matrix.example }}
|
||||
tag: ${{ github.event.pull_request.head.sha }}
|
||||
test_k8s: true
|
||||
secrets: inherit
|
||||
156
.github/workflows/pr-path-detection.yml
vendored
Normal file
156
.github/workflows/pr-path-detection.yml
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check Paths and Hyperlinks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize]
|
||||
|
||||
jobs:
|
||||
check-dockerfile-paths:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone Repo GenAIComps
|
||||
run: |
|
||||
cd ..
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
|
||||
- name: Check for Missing Dockerfile Paths in GenAIComps
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
miss="FALSE"
|
||||
while IFS=: read -r file line content; do
|
||||
dockerfile_path=$(echo "$content" | awk -F '-f ' '{print $2}' | awk '{print $1}')
|
||||
if [[ ! -f "../GenAIComps/${dockerfile_path}" ]]; then
|
||||
miss="TRUE"
|
||||
echo "Missing Dockerfile: GenAIComps/${dockerfile_path} (Referenced in GenAIExamples/${file}:${line})"
|
||||
fi
|
||||
done < <(grep -Ern 'docker build .* -f comps/.+/Dockerfile' --include='*.md' .)
|
||||
|
||||
|
||||
if [[ "$miss" == "TRUE" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
shell: bash
|
||||
|
||||
check-the-validity-of-hyperlinks-in-README:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check the Validity of Hyperlinks
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
fail="FALSE"
|
||||
url_lines=$(grep -Eo '\]\(http[s]?://[^)]+\)' --include='*.md' -r .)
|
||||
if [ -n "$url_lines" ]; then
|
||||
for url_line in $url_lines; do
|
||||
url=$(echo "$url_line"|cut -d '(' -f2 | cut -d ')' -f1|sed 's/\.git$//')
|
||||
path=$(echo "$url_line"|cut -d':' -f1 | cut -d'/' -f2-)
|
||||
response=$(curl -L -s -o /dev/null -w "%{http_code}" "$url")
|
||||
if [ "$response" -ne 200 ]; then
|
||||
echo "**********Validation failed, try again**********"
|
||||
response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url")
|
||||
if [ "$response_retry" -eq 200 ]; then
|
||||
echo "*****Retry successfully*****"
|
||||
else
|
||||
echo "Invalid link from ${{github.workspace}}/$path: $url"
|
||||
fail="TRUE"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ "$fail" == "TRUE" ]]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All hyperlinks are valid."
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
check-the-validity-of-relative-path:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checking Relative Path Validity
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
fail="FALSE"
|
||||
repo_name=${{ github.event.pull_request.head.repo.full_name }}
|
||||
if [ "$(echo "$repo_name"|cut -d'/' -f1)" != "opea-project" ]; then
|
||||
owner=$(echo "${{ github.event.pull_request.head.repo.full_name }}" |cut -d'/' -f1)
|
||||
branch="https://github.com/$owner/GenAIExamples/tree/${{ github.event.pull_request.head.ref }}"
|
||||
else
|
||||
branch="https://github.com/opea-project/GenAIExamples/blob/${{ github.event.pull_request.head.ref }}"
|
||||
fi
|
||||
link_head="https://github.com/opea-project/GenAIExamples/blob/main"
|
||||
png_lines=$(grep -Eo '\]\([^)]+\)' --include='*.md' -r .|grep -Ev 'http')
|
||||
if [ -n "$png_lines" ]; then
|
||||
for png_line in $png_lines; do
|
||||
refer_path=$(echo "$png_line"|cut -d':' -f1 | cut -d'/' -f2-)
|
||||
png_path=$(echo "$png_line"|cut -d '(' -f2 | cut -d ')' -f1)
|
||||
if [[ "${png_path:0:1}" == "/" ]]; then
|
||||
check_path=${{github.workspace}}$png_path
|
||||
elif [[ "${png_path:0:1}" == "#" ]]; then
|
||||
check_path=${{github.workspace}}/$refer_path$png_path
|
||||
else
|
||||
check_path=${{github.workspace}}/$(dirname "$refer_path")/$png_path
|
||||
fi
|
||||
real_path=$(realpath $check_path)
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Path $png_path in file ${{github.workspace}}/$refer_path does not exist"
|
||||
fail="TRUE"
|
||||
else
|
||||
url=$link_head$(echo "$real_path" | sed 's|.*/GenAIExamples||')
|
||||
response=$(curl -I -L -s -o /dev/null -w "%{http_code}" "$url")
|
||||
if [ "$response" -ne 200 ]; then
|
||||
echo "**********Validation failed, try again**********"
|
||||
response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url")
|
||||
if [ "$response_retry" -eq 200 ]; then
|
||||
echo "*****Retry successfully*****"
|
||||
else
|
||||
echo "Retry failed. Check branch ${{ github.event.pull_request.head.ref }}"
|
||||
url_dev=$branch$(echo "$real_path" | sed 's|.*/GenAIExamples||')
|
||||
response=$(curl -I -L -s -o /dev/null -w "%{http_code}" "$url_dev")
|
||||
if [ "$response" -ne 200 ]; then
|
||||
echo "**********Validation failed, try again**********"
|
||||
response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url_dev")
|
||||
if [ "$response_retry" -eq 200 ]; then
|
||||
echo "*****Retry successfully*****"
|
||||
else
|
||||
echo "Invalid path from ${{github.workspace}}/$refer_path: $png_path"
|
||||
fail="TRUE"
|
||||
fi
|
||||
else
|
||||
echo "Check branch ${{ github.event.pull_request.head.ref }} successfully."
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ "$fail" == "TRUE" ]]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All hyperlinks are valid."
|
||||
fi
|
||||
shell: bash
|
||||
35
.github/workflows/push-image-build.yml
vendored
Normal file
35
.github/workflows/push-image-build.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Test
|
||||
name: Build latest images on push event
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'main' ]
|
||||
paths:
|
||||
- "**.py"
|
||||
- "**Dockerfile"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-on-push
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
test_mode: "docker_image_build/build.yaml"
|
||||
|
||||
image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJSON(needs.job1.outputs.run_matrix).include.*.example }}
|
||||
node: ["gaudi","xeon"]
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
example: ${{ matrix.example }}
|
||||
secrets: inherit
|
||||
@@ -5,7 +5,7 @@ on:
|
||||
push:
|
||||
branches: [ 'main','issue' ]
|
||||
paths:
|
||||
- "**/docker/*/compose.yaml"
|
||||
- "**/docker_compose/**/compose*.yaml"
|
||||
|
||||
name: Create an issue to GenAIInfra on push
|
||||
jobs:
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
base_commit=$(git rev-parse HEAD~1)
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-only ${base_commit} ${merged_commit} | \
|
||||
grep -E '.*/docker/.*/compose.yaml')" || true
|
||||
grep -E '.*/docker_compose/.*/compose.*.yaml')" || true
|
||||
|
||||
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
|
||||
format_examples=$(echo "$examples" | tr '\n' ',')
|
||||
64
.github/workflows/reuse-image-build.yml
vendored
64
.github/workflows/reuse-image-build.yml
vendored
@@ -1,64 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Image Build
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_repo:
|
||||
required: false
|
||||
type: string
|
||||
image_tag:
|
||||
required: true
|
||||
type: string
|
||||
mega_service:
|
||||
required: true
|
||||
type: string
|
||||
runner_label:
|
||||
required: false
|
||||
type: string
|
||||
default: 'docker-build-xeon'
|
||||
outputs:
|
||||
image_repo:
|
||||
description: "The image repository used for the image build"
|
||||
value: ${{ jobs.mega-image-build.outputs.image_repo }}
|
||||
image_tag:
|
||||
description: "The image tag used for the image build"
|
||||
value: ${{ jobs.mega-image-build.outputs.image_tag }}
|
||||
|
||||
jobs:
|
||||
mega-image-build:
|
||||
runs-on: ${{ inputs.runner_label }}
|
||||
outputs:
|
||||
image_repo: ${{ steps.build-megaservice-image.outputs.image_repo }}
|
||||
image_tag: ${{ steps.build-megaservice-image.outputs.image_tag }}
|
||||
steps:
|
||||
- name: Get checkout ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "checkout ref ${{ env.CHECKOUT_REF }}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Building MegaService Docker Image
|
||||
id: build-megaservice-image
|
||||
env:
|
||||
IMAGE_REPO: ${{ inputs.image_repo }}
|
||||
IMAGE_TAG: ${{ inputs.image_tag }}
|
||||
MEGA_SERVICE: ${{ inputs.mega_service }}
|
||||
run: |
|
||||
.github/workflows/scripts/build_push.sh ${{ env.MEGA_SERVICE}}
|
||||
if [ -z "${{ env.IMAGE_REPO }}" ]; then
|
||||
IMAGE_REPO=$OPEA_IMAGE_REPO
|
||||
fi
|
||||
echo "IMAGE_TAG=${IMAGE_TAG}"
|
||||
echo "image_tag=$IMAGE_TAG" >> $GITHUB_OUTPUT
|
||||
72
.github/workflows/scripts/build_push.sh
vendored
72
.github/workflows/scripts/build_push.sh
vendored
@@ -1,72 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -xe
|
||||
|
||||
IMAGE_REPO=${IMAGE_REPO:-$OPEA_IMAGE_REPO}
|
||||
IMAGE_TAG=${IMAGE_TAG:-latest}
|
||||
|
||||
function getImagenameFromMega() {
|
||||
echo $(echo "$1" | tr '[:upper:]' '[:lower:]')
|
||||
}
|
||||
|
||||
function checkExist() {
|
||||
IMAGE_NAME=$1
|
||||
if [ $(curl -X GET http://localhost:5000/v2/opea/${IMAGE_NAME}/tags/list | grep -c ${IMAGE_TAG}) -ne 0 ]; then
|
||||
echo "true"
|
||||
else
|
||||
echo "false"
|
||||
fi
|
||||
}
|
||||
|
||||
function docker_build() {
|
||||
# check if if IMAGE_TAG is not "latest" and the image exists in the registry
|
||||
if [ "$IMAGE_TAG" != "latest" ] && [ "$(checkExist $1)" == "true" ]; then
|
||||
echo "Image ${IMAGE_REPO}opea/$1:$IMAGE_TAG already exists in the registry"
|
||||
return
|
||||
fi
|
||||
# docker_build <service_name> <dockerfile>
|
||||
if [ -z "$2" ]; then
|
||||
DOCKERFILE_PATH=Dockerfile
|
||||
else
|
||||
DOCKERFILE_PATH=$2
|
||||
fi
|
||||
echo "Building ${IMAGE_REPO}opea/$1:$IMAGE_TAG using Dockerfile $DOCKERFILE_PATH"
|
||||
# if https_proxy and http_proxy are set, pass them to docker build
|
||||
if [ -z "$https_proxy" ]; then
|
||||
docker build --no-cache -t ${IMAGE_REPO}opea/$1:$IMAGE_TAG -f $DOCKERFILE_PATH .
|
||||
else
|
||||
docker build --no-cache -t ${IMAGE_REPO}opea/$1:$IMAGE_TAG --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f $DOCKERFILE_PATH .
|
||||
fi
|
||||
docker push ${IMAGE_REPO}opea/$1:$IMAGE_TAG
|
||||
docker rmi ${IMAGE_REPO}opea/$1:$IMAGE_TAG
|
||||
}
|
||||
|
||||
# $1 is like "apple orange pear"
|
||||
for MEGA_SVC in $1; do
|
||||
case $MEGA_SVC in
|
||||
"ChatQnA"|"CodeGen"|"CodeTrans"|"DocSum"|"Translation"|"AudioQnA"|"SearchQnA"|"FaqGen")
|
||||
cd $MEGA_SVC/docker
|
||||
IMAGE_NAME="$(getImagenameFromMega $MEGA_SVC)"
|
||||
docker_build ${IMAGE_NAME}
|
||||
cd ui
|
||||
docker_build ${IMAGE_NAME}-ui docker/Dockerfile
|
||||
if [ "$MEGA_SVC" == "ChatQnA" ];then
|
||||
docker_build ${IMAGE_NAME}-conversation-ui docker/Dockerfile.react
|
||||
fi
|
||||
if [ "$MEGA_SVC" == "DocSum" ];then
|
||||
docker_build ${IMAGE_NAME}-react-ui docker/Dockerfile.react
|
||||
fi
|
||||
if [ "$MEGA_SVC" == "CodeGen" ];then
|
||||
docker_build ${IMAGE_NAME}-react-ui docker/Dockerfile.react
|
||||
fi
|
||||
;;
|
||||
"VisualQnA")
|
||||
echo "Not supported yet"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $MEGA_SVC"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
12
.github/workflows/scripts/codeScan/hadolint.sh
vendored
12
.github/workflows/scripts/codeScan/hadolint.sh
vendored
@@ -5,16 +5,24 @@
|
||||
|
||||
source /GenAIExamples/.github/workflows/scripts/change_color
|
||||
log_dir=/GenAIExamples/.github/workflows/scripts/codeScan
|
||||
ERROR_WARN=false
|
||||
|
||||
find . -type f \( -name "Dockerfile*" \) -print -exec hadolint --ignore DL3006 --ignore DL3007 --ignore DL3008 --ignore DL3013 {} \; 2>&1 | tee ${log_dir}/hadolint.log
|
||||
find . -type f \( -name "Dockerfile*" \) -print -exec hadolint --ignore DL3006 --ignore DL3007 --ignore DL3008 --ignore DL3013 {} \; > ${log_dir}/hadolint.log
|
||||
|
||||
if [[ $(grep -c "error" ${log_dir}/hadolint.log) != 0 ]]; then
|
||||
$BOLD_RED && echo "Error!! Please Click on the artifact button to download and check error details." && $RESET
|
||||
exit 1
|
||||
echo $(grep "error" ${log_dir}/hadolint.log)
|
||||
ERROR_WARN=true
|
||||
fi
|
||||
|
||||
if [[ $(grep -c "warning" ${log_dir}/hadolint.log) != 0 ]]; then
|
||||
$BOLD_RED && echo "Warning!! Please Click on the artifact button to download and check warning details." && $RESET
|
||||
echo $(grep "warning" ${log_dir}/hadolint.log)
|
||||
ERROR_WARN=true
|
||||
fi
|
||||
|
||||
if [ "$ERROR_WARN" = true ]; then
|
||||
echo $ERROR_WARN
|
||||
exit 1
|
||||
fi
|
||||
$BOLD_PURPLE && echo "Congratulations, Hadolint check passed!" && $LIGHT_PURPLE && echo " You can click on the artifact button to see the log details." && $RESET
|
||||
|
||||
37
.github/workflows/scripts/get_test_matrix.sh
vendored
Normal file
37
.github/workflows/scripts/get_test_matrix.sh
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# example: 'ChatQnA', 'CodeGen', ...
|
||||
# hardware: 'xeon', 'gaudi', ...
|
||||
|
||||
set -e
|
||||
changed_files=$changed_files
|
||||
test_mode=$test_mode
|
||||
run_matrix="{\"include\":["
|
||||
hardware_list="xeon gaudi" # current support hardware list
|
||||
|
||||
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
|
||||
for example in ${examples}; do
|
||||
cd $WORKSPACE/$example
|
||||
if [[ ! $(find . -type f | grep ${test_mode}) ]]; then continue; fi
|
||||
|
||||
run_hardware=""
|
||||
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | cut -d'/' -f2 | grep -E '*.py|Dockerfile*|ui|docker_image_build' ) ]]; then
|
||||
# run test on all hardware if megaservice or ui code change
|
||||
run_hardware=$hardware_list
|
||||
else
|
||||
for hardware in ${hardware_list}; do
|
||||
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep -c ${hardware}) != 0 ]]; then
|
||||
run_hardware="${hardware} ${run_hardware}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
for hw in ${run_hardware}; do
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${hw}\"},"
|
||||
done
|
||||
done
|
||||
|
||||
run_matrix=$run_matrix"]}"
|
||||
echo "run_matrix=${run_matrix}"
|
||||
echo "run_matrix=${run_matrix}" >> $GITHUB_OUTPUT
|
||||
71
.github/workflows/scripts/k8s-utils.sh
vendored
Executable file
71
.github/workflows/scripts/k8s-utils.sh
vendored
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#set -xe
|
||||
|
||||
function dump_pod_log() {
|
||||
pod_name=$1
|
||||
namespace=$2
|
||||
echo "-----------Pod: $pod_name---------"
|
||||
echo "#kubectl describe pod $pod_name -n $namespace"
|
||||
kubectl describe pod $pod_name -n $namespace
|
||||
echo "-----------------------------------"
|
||||
echo "#kubectl logs $pod_name -n $namespace"
|
||||
kubectl logs $pod_name -n $namespace
|
||||
echo "-----------------------------------"
|
||||
}
|
||||
|
||||
function dump_pods_status() {
|
||||
namespace=$1
|
||||
echo "-----DUMP POD STATUS in NS $namespace------"
|
||||
kubectl get pods -n $namespace -o wide
|
||||
echo "-----------------------------------"
|
||||
|
||||
# Get all pods in the namespace and their statuses
|
||||
pods=$(kubectl get pods -n $namespace --no-headers)
|
||||
|
||||
# Loop through each pod
|
||||
echo "$pods" | while read -r line; do
|
||||
pod_name=$(echo $line | awk '{print $1}')
|
||||
ready=$(echo $line | awk '{print $2}')
|
||||
status=$(echo $line | awk '{print $3}')
|
||||
|
||||
# Extract the READY count
|
||||
ready_count=$(echo $ready | cut -d'/' -f1)
|
||||
required_count=$(echo $ready | cut -d'/' -f2)
|
||||
|
||||
# Check if the pod is not in "Running" status or READY count is less than required
|
||||
if [[ "$status" != "Running" || "$ready_count" -lt "$required_count" ]]; then
|
||||
dump_pod_log $pod_name $namespace
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function dump_all_pod_logs() {
|
||||
namespace=$1
|
||||
echo "-----DUMP POD STATUS AND LOG in NS $namespace------"
|
||||
|
||||
pods=$(kubectl get pods -n $namespace -o jsonpath='{.items[*].metadata.name}')
|
||||
for pod_name in $pods
|
||||
do
|
||||
dump_pod_log $pod_name $namespace
|
||||
done
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <function_name>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
dump_pods_status)
|
||||
dump_pods_status $2
|
||||
;;
|
||||
dump_all_pod_logs)
|
||||
dump_all_pod_logs $2
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $1"
|
||||
;;
|
||||
esac
|
||||
44
.github/workflows/scripts/update_images_tag.sh
vendored
Normal file
44
.github/workflows/scripts/update_images_tag.sh
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
declare -A dict
|
||||
dict["ghcr.io/huggingface/text-generation-inference"]="docker://ghcr.io/huggingface/text-generation-inference:latest-intel-cpu"
|
||||
|
||||
function get_latest_version() {
|
||||
repo_image=$1
|
||||
if [[ $repo_image == *"huggingface"* ]]; then
|
||||
revision=$(skopeo inspect --config ${dict[$repo_image]} | jq -r '.config.Labels["org.opencontainers.image.revision"][:7]')
|
||||
latest_version="sha-$revision-intel-cpu"
|
||||
else
|
||||
versions=$(skopeo list-tags ${dict[$repo_image]} | jq -r '.Tags[]')
|
||||
printf "version list:\n$versions\n"
|
||||
latest_version=$(printf "%s\n" "${versions[@]}" | grep -E '^[\.0-9\-]+$' | sort -V | tail -n 1)
|
||||
fi
|
||||
echo "latest version: $latest_version"
|
||||
replace_image_version $repo_image $latest_version
|
||||
}
|
||||
|
||||
function replace_image_version() {
|
||||
repo_image=$1
|
||||
version=$2
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "version is empty"
|
||||
else
|
||||
echo "replace $repo_image:tag with $repo_image:$version"
|
||||
find . -name "Dockerfile" | xargs sed -i "s|$repo_image:sha[A-Za-z0-9\-]*|$repo_image:$version|g"
|
||||
find . -name "*.yaml" | xargs sed -i "s|$repo_image:sha[A-Za-z0-9\-]*|$repo_image:$version|g"
|
||||
find . -name "*.md" | xargs sed -i "s|$repo_image:sha[A-Za-z0-9\-]*|$repo_image:$version|g"
|
||||
fi
|
||||
}
|
||||
|
||||
function main() {
|
||||
for repo_image in "${!dict[@]}"; do
|
||||
echo "::group::check $repo_image"
|
||||
get_latest_version $repo_image
|
||||
echo "::endgroup::"
|
||||
done
|
||||
}
|
||||
|
||||
main
|
||||
54
.github/workflows/weekly-update-images.yml
vendored
Normal file
54
.github/workflows/weekly-update-images.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Weekly update base images and 3rd party images
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
freeze-images:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
USER_NAME: "NeuralChatBot"
|
||||
USER_EMAIL: "grp_neural_chat_bot@intel.com"
|
||||
BRANCH_NAME: "update_images_tag"
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: "main"
|
||||
|
||||
- name: Install skopeo
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt -y install skopeo
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name ${{ env.USER_NAME }}
|
||||
git config --global user.email ${{ env.USER_EMAIL }}
|
||||
git remote set-url origin https://${{ env.USER_NAME }}:"${{ secrets.ACTION_TOKEN }}"@github.com/opea-project/GenAIExamples.git
|
||||
git checkout -b ${{ env.BRANCH_NAME }}
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
bash .github/workflows/scripts/update_images_tag.sh
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git add .
|
||||
git commit -s -m "Update third party images tag"
|
||||
git push --set-upstream origin update_images_tag
|
||||
|
||||
- name: create pull request
|
||||
run: gh pr create -B main -H ${{ env.BRANCH_NAME }} --title 'Update ghcr.io/huggingface/text-generation-inference image tag' --body 'Created by Github action'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ACTION_TOKEN }}
|
||||
@@ -14,9 +14,9 @@ repos:
|
||||
- id: check-json
|
||||
exclude: |
|
||||
(?x)^(
|
||||
ChatQnA/docker/ui/svelte/tsconfig.json|
|
||||
ChatQnA/ui/svelte/tsconfig.json|
|
||||
SearchQnA/ui/svelte/tsconfig.json|
|
||||
DocSum/docker/ui/svelte/tsconfig.json
|
||||
DocSum/ui/svelte/tsconfig.json
|
||||
)$
|
||||
- id: check-yaml
|
||||
args: [--allow-multiple-documents]
|
||||
|
||||
106
AgentQnA/README.md
Normal file
106
AgentQnA/README.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# Agents for Question Answering
|
||||
|
||||
## Overview
|
||||
|
||||
This example showcases a hierarchical multi-agent system for question-answering applications. The architecture diagram is shown below. The supervisor agent interfaces with the user and dispatch tasks to the worker agent and other tools to gather information and come up with answers. The worker agent uses the retrieval tool to generate answers to the queries posted by the supervisor agent. Other tools used by the supervisor agent may include APIs to interface knowledge graphs, SQL databases, external knowledge bases, etc.
|
||||

|
||||
|
||||
### Why Agent for question answering?
|
||||
|
||||
1. Improve relevancy of retrieved context.
|
||||
Agent can rephrase user queries, decompose user queries, and iterate to get the most relevant context for answering user's questions. Compared to conventional RAG, RAG agent can significantly improve the correctness and relevancy of the answer.
|
||||
2. Use tools to get additional knowledge.
|
||||
For example, knowledge graphs and SQL databases can be exposed as APIs for Agents to gather knowledge that may be missing in the retrieval vector database.
|
||||
3. Hierarchical agent can further improve performance.
|
||||
Expert worker agents, such as retrieval agent, knowledge graph agent, SQL agent, etc., can provide high-quality output for different aspects of a complex query, and the supervisor agent can aggregate the information together to provide a comprehensive answer.
|
||||
|
||||
### Roadmap
|
||||
|
||||
- v0.9: Worker agent uses open-source websearch tool (duckduckgo), agents use OpenAI GPT-4o-mini as llm backend.
|
||||
- v1.0: Worker agent uses OPEA retrieval megaservice as tool.
|
||||
- v1.0 or later: agents use open-source llm backend.
|
||||
- v1.1 or later: add safeguards
|
||||
|
||||
## Getting started
|
||||
|
||||
1. Build agent docker image </br>
|
||||
First, clone the opea GenAIComps repo
|
||||
|
||||
```
|
||||
export WORKDIR=<your-work-directory>
|
||||
cd $WORKDIR
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
```
|
||||
|
||||
Then build the agent docker image. Both the supervisor agent and the worker agent will use the same docker image, but when we launch the two agents we will specify different strategies and register different tools.
|
||||
|
||||
```
|
||||
cd GenAIComps
|
||||
docker build -t opea/agent-langchain:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/agent/langchain/Dockerfile .
|
||||
```
|
||||
|
||||
2. Launch tool services </br>
|
||||
In this example, we will use some of the mock APIs provided in the Meta CRAG KDD Challenge to demonstrate the benefits of gaining additional context from mock knowledge graphs.
|
||||
|
||||
```
|
||||
docker run -d -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
```
|
||||
|
||||
3. Set up environment for this example </br>
|
||||
First, clone this repo
|
||||
|
||||
```
|
||||
cd $WORKDIR
|
||||
git clone https://github.com/opea-project/GenAIExamples.git
|
||||
```
|
||||
|
||||
Second, set up env vars
|
||||
|
||||
```
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
# optional: OPANAI_API_KEY
|
||||
export OPENAI_API_KEY=<your-openai-key>
|
||||
```
|
||||
|
||||
4. Launch agent services</br>
|
||||
The configurations of the supervisor agent and the worker agent are defined in the docker-compose yaml file. We currently use openAI GPT-4o-mini as LLM, and we plan to add support for llama3.1-70B-instruct (served by TGI-Gaudi) in a subsequent release.
|
||||
To use openai llm, run command below.
|
||||
|
||||
```
|
||||
cd docker_compose/intel/cpu/xeon
|
||||
bash launch_agent_service_openai.sh
|
||||
```
|
||||
|
||||
## Validate services
|
||||
|
||||
First look at logs of the agent docker containers:
|
||||
|
||||
```
|
||||
docker logs docgrader-agent-endpoint
|
||||
```
|
||||
|
||||
```
|
||||
docker logs react-agent-endpoint
|
||||
```
|
||||
|
||||
You should see something like "HTTP server setup successful" if the docker containers are started successfully.</p>
|
||||
|
||||
Second, validate worker agent:
|
||||
|
||||
```
|
||||
curl http://${ip_address}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
|
||||
"query": "Most recent album by Taylor Swift"
|
||||
}'
|
||||
```
|
||||
|
||||
Third, validate supervisor agent:
|
||||
|
||||
```
|
||||
curl http://${ip_address}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
|
||||
"query": "Most recent album by Taylor Swift"
|
||||
}'
|
||||
```
|
||||
|
||||
## How to register your own tools with agent
|
||||
|
||||
You can take a look at the tools yaml and python files in this example. For more details, please refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/langchain#5-customize-agent-strategy).
|
||||
BIN
AgentQnA/assets/agent_qna_arch.png
Normal file
BIN
AgentQnA/assets/agent_qna_arch.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 69 KiB |
65
AgentQnA/docker_compose/intel/cpu/xeon/compose_openai.yaml
Normal file
65
AgentQnA/docker_compose/intel/cpu/xeon/compose_openai.yaml
Normal file
@@ -0,0 +1,65 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
worker-docgrader-agent:
|
||||
image: opea/agent-langchain:latest
|
||||
container_name: docgrader-agent-endpoint
|
||||
volumes:
|
||||
- ${WORKDIR}/GenAIComps/comps/agent/langchain/:/home/user/comps/agent/langchain/
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9095:9095"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: rag_agent
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: openai
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
model: ${model}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
streaming: false
|
||||
tools: /home/user/tools/worker_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-worker-agent-service"
|
||||
port: 9095
|
||||
|
||||
|
||||
supervisor-react-agent:
|
||||
image: opea/agent-langchain:latest
|
||||
container_name: react-agent-endpoint
|
||||
volumes:
|
||||
- ${WORKDIR}/GenAIComps/comps/agent/langchain/:/home/user/comps/agent/langchain/
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9090:9090"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: react_langgraph
|
||||
recursion_limit: ${recursion_limit_supervisor}
|
||||
llm_engine: openai
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
model: ${model}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
streaming: false
|
||||
tools: /home/user/tools/supervisor_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
|
||||
CRAG_SERVER: $CRAG_SERVER
|
||||
WORKER_AGENT_URL: $WORKER_AGENT_URL
|
||||
port: 9090
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export model="gpt-4o-mini-2024-07-18"
|
||||
export temperature=0
|
||||
export max_new_tokens=512
|
||||
export OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export CRAG_SERVER=http://${ip_address}:8080
|
||||
|
||||
docker compose -f compose_openai.yaml up -d
|
||||
98
AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml
Normal file
98
AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml
Normal file
@@ -0,0 +1,98 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
tgi-server:
|
||||
image: ghcr.io/huggingface/tgi-gaudi:2.0.5
|
||||
container_name: tgi-server
|
||||
ports:
|
||||
- "8085:80"
|
||||
volumes:
|
||||
- ${HF_CACHE_DIR}:/data
|
||||
environment:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HF_HUB_DISABLE_PROGRESS_BARS: 1
|
||||
HF_HUB_ENABLE_HF_TRANSFER: 0
|
||||
HABANA_VISIBLE_DEVICES: all
|
||||
OMPI_MCA_btl_vader_single_copy_mechanism: none
|
||||
PT_HPU_ENABLE_LAZY_COLLECTIVES: true
|
||||
ENABLE_HPU_GRAPH: true
|
||||
LIMIT_HPU_GRAPH: true
|
||||
USE_FLASH_ATTENTION: true
|
||||
FLASH_ATTENTION_RECOMPUTE: true
|
||||
runtime: habana
|
||||
cap_add:
|
||||
- SYS_NICE
|
||||
ipc: host
|
||||
command: --model-id ${LLM_MODEL_ID} --max-input-length 4096 --max-total-tokens 8192 --sharded true --num-shard ${NUM_SHARDS}
|
||||
worker-docgrader-agent:
|
||||
image: opea/agent-langchain:latest
|
||||
container_name: docgrader-agent-endpoint
|
||||
depends_on:
|
||||
- tgi-server
|
||||
volumes:
|
||||
# - ${WORKDIR}/GenAIExamples/AgentQnA/docker_image_build/GenAIComps/comps/agent/langchain/:/home/user/comps/agent/langchain/
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9095:9095"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: rag_agent
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: tgi
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
streaming: false
|
||||
tools: /home/user/tools/worker_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-worker-agent-service"
|
||||
port: 9095
|
||||
|
||||
|
||||
supervisor-react-agent:
|
||||
image: opea/agent-langchain:latest
|
||||
container_name: react-agent-endpoint
|
||||
depends_on:
|
||||
- tgi-server
|
||||
- worker-docgrader-agent
|
||||
volumes:
|
||||
# - ${WORKDIR}/GenAIExamples/AgentQnA/docker_image_build/GenAIComps/comps/agent/langchain/:/home/user/comps/agent/langchain/
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9090:9090"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: react_langgraph
|
||||
recursion_limit: ${recursion_limit_supervisor}
|
||||
llm_engine: tgi
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
streaming: false
|
||||
tools: /home/user/tools/supervisor_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
|
||||
CRAG_SERVER: $CRAG_SERVER
|
||||
WORKER_AGENT_URL: $WORKER_AGENT_URL
|
||||
port: 9090
|
||||
@@ -0,0 +1,43 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
WORKPATH=$(dirname "$PWD")/..
|
||||
# export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
|
||||
# LLM related environment variables
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
ls $HF_CACHE_DIR
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-70B-Instruct"
|
||||
export NUM_SHARDS=4
|
||||
export LLM_ENDPOINT_URL="http://${ip_address}:8085"
|
||||
export temperature=0.01
|
||||
export max_new_tokens=512
|
||||
|
||||
# agent related environment variables
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
echo "TOOLSET_PATH=${TOOLSET_PATH}"
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export CRAG_SERVER=http://${ip_address}:8080
|
||||
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
sleep 5s
|
||||
echo "Waiting tgi gaudi ready"
|
||||
n=0
|
||||
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
|
||||
docker logs tgi-server &> tgi-gaudi-service.log
|
||||
n=$((n+1))
|
||||
if grep -q Connected tgi-gaudi-service.log; then
|
||||
break
|
||||
fi
|
||||
sleep 5s
|
||||
done
|
||||
sleep 5s
|
||||
echo "Service started successfully"
|
||||
13
AgentQnA/docker_image_build/build.yaml
Normal file
13
AgentQnA/docker_image_build/build.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
agent-langchain:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/agent/langchain/Dockerfile
|
||||
args:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
no_proxy: ${no_proxy}
|
||||
image: ${REGISTRY:-opea}/agent-langchain:${TAG:-latest}
|
||||
27
AgentQnA/example_data/test_docs_music.jsonl
Normal file
27
AgentQnA/example_data/test_docs_music.jsonl
Normal file
@@ -0,0 +1,27 @@
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Thriller (song) - Wikipedia\nJump to content\nMain menu\nMain menu\nmove to sidebar\nhide\nNavigation\nMain pageContentsCurrent eventsRandom articleAbout WikipediaContact usDonate\nContribute\nHelpLearn to editCommunity portalRecent changesUpload file\nSearch\nSearch\nCreate account\nLog in\nPersonal tools\nCreate account Log in\nPages for logged out editors learn more\nContributionsTalk\nContents\nmove to sidebar\nhide\n(Top)\n1Composition\n2Writing\n3Recording\n4Release\n5Music video\n6Chart performance\n7Critical reception\n8Personnel\n9Charts\nToggle Charts subsection\n9.1Weekly charts\n9.2Year-end charts\n10Certifications\n11See also\n12References\nToggle the table of contents\nThriller (song)\n33 languages\n\u0627\u0644\u0639\u0631\u0628\u064a\u0629Az\u0259rbaycancaDanskDeutsch\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03acEspa\u00f1ol\u0641\u0627\u0631\u0633\u06ccFran\u00e7aisGalego\ud55c\uad6d\uc5b4HrvatskiItaliano\u05e2\u05d1\u05e8\u05d9\u05ea\u10e5\u10d0\u10e0\u10d7\u10e3\u10da\u10d8KiswahiliMagyar\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02Nederlands\u65e5\u672c\u8a9eNorsk bokm\u00e5lPolskiPortugu\u00eas\u0420\u0443\u0441\u0441\u043a\u0438\u0439ShqipSimple English\u0421\u0440\u043f\u0441\u043a\u0438 / srpskiSuomiSvenska\u0ba4\u0bae\u0bbf\u0bb4\u0bcd\u0e44\u0e17\u0e22T\u00fcrk\u00e7e\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430Ti\u1ebfng Vi\u1ec7t\nEdit links\nArticleTalk\nEnglish\nReadEditView history\nTools\nTools\nmove to sidebar\nhide\nActions\nReadEditView history\nGeneral\nWhat links hereRelated changesUpload fileSpecial pagesPermanent linkPage informationCite this pageGet shortened URLDownload QR codeWikidata item\nPrint/export\nDownload as PDFPrintable version\nFrom Wikipedia, the free encyclopedia\n1983 single by Michael Jackson\nFor other songs, see Thriller (disambiguation) \u00a7\u00a0Music.\n\"Thriller\"US 12-inch singleSingle by Michael Jacksonfrom the album\nThriller B-side\"Things I Do for You\"Released\nNovember\u00a01983\u00a0(1983-11) (UK)[1]\nJanuary\u00a023,\u00a01984\u00a0(1984-01-23) (US)[2]\nRecorded1982StudioWestlake (Los Angeles, California)Genre\nDisco\nfunk\nLength\n5:57 (album version)\n4:37 (special edit)\n4:05 (remixed short version)\n5:04 (\"Starlight\" version)\nLabelEpicSongwriter(s)Rod TempertonProducer(s)Quincy JonesMichael Jackson singles chronology\n\"Say Say Say\" (1983)\n\"Thriller\" (1983)\n\"Farewell My Summer Love\" (1984)\nMusic video\"Thriller\" on YouTube"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Recorded1982StudioWestlake (Los Angeles, California)Genre\nDisco\nfunk\nLength\n5:57 (album version)\n4:37 (special edit)\n4:05 (remixed short version)\n5:04 (\"Starlight\" version)\nLabelEpicSongwriter(s)Rod TempertonProducer(s)Quincy JonesMichael Jackson singles chronology\n\"Say Say Say\" (1983)\n\"Thriller\" (1983)\n\"Farewell My Summer Love\" (1984)\nMusic video\"Thriller\" on YouTube\n\"Thriller\" is a song by the American singer Michael Jackson. It was released by Epic Records in November 1983 in the UK and on January 23, 1984, in the US, as the seventh and final single from his sixth studio album, Thriller.[3]\n\"Thriller\" is a funk song featuring a repeating synthesizer bassline and lyrics and sound effects evoking horror films. It ends with a spoken-word sequence performed by the horror actor Vincent Price. It was produced by Quincy Jones and written by Rod Temperton, who wanted to write a theatrical song to suit Jackson's love of film.\nJackson decided to release \"Thriller\" as a single after Thriller left the top of the Billboard 200 chart. The\n\"Thriller\" music video, directed by John Landis, has Jackson dancing with a horde of zombies. It has been named the greatest music video of all time by various publications and readers' polls, and doubled sales of Thriller, helping it become the best-selling album in history."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Jackson decided to release \"Thriller\" as a single after Thriller left the top of the Billboard 200 chart. The\n\"Thriller\" music video, directed by John Landis, has Jackson dancing with a horde of zombies. It has been named the greatest music video of all time by various publications and readers' polls, and doubled sales of Thriller, helping it become the best-selling album in history.\nIt was the album's seventh top-ten single on the Billboard Hot 100, reaching number four. It reached number one in Belgium, France and Spain, and the top ten in many other countries. In the week of Jackson's death in 2009, it was Jackson's bestselling track in the US, with sales of 167,000 copies on the Billboard Hot Digital Tracks chart. It entered the Billboard Hot Digital Singles Chart at number two, and remained in the charts' top ten for three consecutive weeks. \"Thriller\" is certified Diamond by the Recording Industry Association of America. It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween.\nComposition[edit]\n\"Thriller\"\nJackson's song \"Thriller\", released as a single in 1984; Nelson George wrote that it uses cinematic sound effects, horror film motifs, and vocal trickery to convey a sense of danger.[4]\nProblems playing this file? See media help.\n\"Thriller\" is a disco-funk song[5] The introduction features sound effects such as a creaking door, thunder, feet walking on wooden planks, winds and howling wolves.[6]\nWriting[edit]\nHorror actor Vincent Price provided the spoken-word sequence at the end of \"Thriller\"."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Problems playing this file? See media help.\n\"Thriller\" is a disco-funk song[5] The introduction features sound effects such as a creaking door, thunder, feet walking on wooden planks, winds and howling wolves.[6]\nWriting[edit]\nHorror actor Vincent Price provided the spoken-word sequence at the end of \"Thriller\".\n\"Thriller\" was written by the English songwriter Rod Temperton, who had previously written \"Rock with You\" and \"Off the Wall\" for Jackson's 1979 album Off the Wall.[7] Temperton wanted to write something theatrical to suit Jackson's love of film.[8] He improvised with bass and drum patterns until he developed the bassline that runs through the song, then wrote a chord progression that built to a climax.[8] He recalled: \"I wanted it to build and build \u2013 a bit like stretching an elastic band throughout the tune to heighten suspense.\"[8]\nTemperton's first version was titled \"Starlight\", with the chorus lyric: \"Give me some starlight / Starlight sun\".[9] The production team, led by Quincy Jones, felt the song should be the title track, but that \"Starlight\" was not a strong album title. Instead, they wanted something \"mysterious\" to match Jackson's \"evolving persona\".[8] Temperton considered several titles, including \"Midnight Man\", which Jones felt was \"going in the right direction\". Finally, he conceived \"Thriller\", but worried that it was \"a crap word to sing ... It sounded terrible! However, we got Michael to spit it into the microphone a few times and it worked.\"[8]\nWith the title decided, Temperton wrote lyrics within \"a couple of hours\".[8] He envisioned a spoken-word sequence for the ending, but did not know what form it should take. It was decided to have a famous voice from the horror genre perform it, and Jones' then-wife, Peggy Lipton, suggested her friend Vincent Price.[6] Temperton composed the words for Price's part in a taxi on the way to the studio on the day of recording.[6]\nRecording[edit]\nQuincy Jones produced \"Thriller\"."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Recording[edit]\nQuincy Jones produced \"Thriller\".\nAlong with the rest of the album, \"Thriller\" was recorded over eight weeks in 1982.[10] It was recorded at Westlake Recording Studios on Santa Monica Boulevard in Los Angeles, California.[6] The engineer Bruce Swedien had Jackson record his vocals in different approaches, doubling takes and recording at different distances from the microphone. Some background vocals were recorded in the Westlake shower stall.[6]\nThe bassline was performed on an ARP 2600 synthesizer, and the verse pads were performed on a Roland Jupiter-8 layered with a Sequential Circuits Prophet 5 and a Yamaha CS-80.[11] The percussion was created with a LinnDrum drum machine modified with sound chips from two other drum machines: a snare hi-hat and congas from an LM-1 and a clap from a TR-808. \"Thriller\" also features Rhodes piano performed by Greg Phillinganes and guitar performed by David Williams.[12]\nTo record the wolf howls, Swedien set up tape recorders up around his Great Dane in a barn overnight, but the dog never howled. Instead, Jackson recorded the howls himself.[13] For the creaking doors, Swedien rented doors designed for sound effects from the Universal Studios Lot and recorded the hinges.[13] Price recorded his part in two takes; Jones, acknowledging that doing a voice-over for a song is difficult, praised Price and described his takes as \"fabulous\".[6]\nRelease[edit]\nThe album Thriller was released in November 1982 on Epic Records and spent months at the top of the Billboard 200.[14] \"Thriller\" was not initially planned for release as a single, as Epic saw it as a novelty song.[15] The Epic executive Walter Yetnikoff asked: \"Who wants a single about monsters?\"[14]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Release[edit]\nThe album Thriller was released in November 1982 on Epic Records and spent months at the top of the Billboard 200.[14] \"Thriller\" was not initially planned for release as a single, as Epic saw it as a novelty song.[15] The Epic executive Walter Yetnikoff asked: \"Who wants a single about monsters?\"[14]\nBy mid-1983, sales of the album had begun to decline. Jackson, who was \"obsessive\" about his sales figures,[14] urged Yetnikoff and another Epic executive, Larry Stessel, to help conceive a plan to return the album to the top of the charts. Jackson's manager Frank DiLeo suggested releasing \"Thriller\", backed by a new music video.[14][16] It was the final single from the album, released in January 1984.[15]\nAlternative versions of \"Thriller\", including the \"Starlight\" demo, were released on the anniversary reissue Thriller 40 (2022).[17]\nMusic video[edit]\nMain article: Michael Jackson's Thriller (music video)\nThe music video for \"Thriller\" references numerous horror films,[14] and stars Jackson performing a dance routine with a horde of the undead.[14] It was directed by the horror director John Landis and written by Landis and Jackson. Jackson contacted Landis after seeing his film An American Werewolf in London. The pair conceived a 13-minute short film with a budget much larger than previous music videos. Jackson's record company refused to finance it, believing Thriller had peaked, so a making-of documentary, Making Michael Jackson's Thriller, was produced to receive financing from television networks.[14]\nMichael Jackson's Thriller premiered on MTV on December 2, 1983.[18] It was launched to great anticipation and played regularly on MTV.[18]\nIt doubled sales of Thriller, and the documentary sold over a million copies, becoming the best-selling videotape at the time.[14] It is credited for transforming music videos into a serious art form, breaking down racial barriers in popular entertainment, and popularizing the making-of documentary format.[19]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "It doubled sales of Thriller, and the documentary sold over a million copies, becoming the best-selling videotape at the time.[14] It is credited for transforming music videos into a serious art form, breaking down racial barriers in popular entertainment, and popularizing the making-of documentary format.[19]\nMany elements have had a lasting impact on popular culture, such as the zombie dance and Jackson's red jacket, designed by Landis' wife Deborah Nadoolman.[19] Fans worldwide re-enact its zombie dance and it remains popular on YouTube. The Library of Congress described it as \"the most famous music video of all time\". In 2009, it became the first music video inducted into the National Film Registry as \"culturally, historically or aesthetically\" significant.[14]\nChart performance[edit]\n\"Thriller\" entered the Billboard Hot 100 charts at number 20.[20] It reached number seven the following week,[21] number five the next, and peaked the next week at number four, where it stayed for two weeks.[22][23] It finished as the #78 single on Billboard's Hot 100 for the 1984.[24]\n\"Thriller\" charted at number 19 on the Hot R&B/Hip-Hop Songs Chart.[25] On March 10, 1984, it reached its peak at number 3.[26] \"Thriller\" debuted on the UK Singles Chart on November 19, 1983, at number 24, and the following week peaked at number ten; it appeared on the chart for 52 weeks.[27] Beginning on February 5, 1984, \"Thriller\" peaked on the French Singles Chart at number one and topped the chart for four consecutive weeks.[28] \"Thriller\" also topped the Belgian VRT Top 30 Chart for two weeks in January 1984.[29]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Following Jackson's death in 2009, his music surged in popularity.[30] In the week of his death, \"Thriller\" was Jackson's best-selling track in the US, with sales of 167,000 copies on the Billboard Hot Digital Singles Chart.[30] On July 11, 2009, \"Thriller\" charted on the Billboard Hot Digital Singles Chart at number two (its peak), and the song remained in the charts' top ten for three consecutive weeks.[31] In the United Kingdom, the song charted at number 23 the week of Jackson's death.[32] The following week, the song reached its peak at number 12 on the UK Single Chart.[27] On July 12, 2009, \"Thriller\" peaked at number two on the Italian Singles Chart[33] and was later certified gold by the Federation of the Italian Music Industry.[34] \"Thriller\" reached at number three on the Australian ARIA Chart and Swiss Singles Chart and topped the Spanish Singles Charts for one week.[35] The song also placed within the top ten on the German Singles Chart, Norwegian Singles Chart and Irish Singles Chart, at number nine, number seven and number eight respectively.[35] \"Thriller\" also landed at number 25 on the Danish Singles Chart.[36] In the third week of July \"Thriller\" peaked at number 11 in Finland.[37]\n\"Thriller\"\nhas returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. It re-entered the Billboard Hot 100 in October 2013 at number 42,[38] number 31 in November 2018,[39] and number 19 in November 2021, its highest placement since 1984.[40] This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100.[40]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\"Thriller\"\nhas returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. It re-entered the Billboard Hot 100 in October 2013 at number 42,[38] number 31 in November 2018,[39] and number 19 in November 2021, its highest placement since 1984.[40] This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100.[40]\n\"Thriller\" was certified platinum by the Recording Industry Association of America on December 4, 1989, for sales of over one million physical units in the US[41][42] As of August 2016, the song had sold 4,024,398 copies in the US.[43] The song was later certified Diamond by RIAA for sales over 10 million equivalent-units.[44][45] \"Thriller\" reached number one on three different Billboard charts the week of November 8, 2023, more than a decade after Jackson's death. Those charts included: R&B/Hip-Hop Streaming Songs, R&B Streaming Songs and R&B Digital Song Sales charts.[46]\nCritical reception[edit]\nAshley Lasimone, of AOL's Spinner.com, noted that it \"became a signature for Jackson\" and described \"the groove of its bassline, paired with Michael's killer vocals and sleek moves\" as having \"produced a frighteningly great single.\"[47] Jon Pareles of The New York Times noted that \"'Billie Jean', 'Beat It', 'Wanna Be Startin' Somethin' ' and \"the movie in the song 'Thriller'\", were the songs, unlike the \"fluff\" \"P.Y.T.\", that were \"the hits that made Thriller a world-beater; along with Mr. Jackson's stage and video presence, listeners must have identified with his willingness to admit terror.\"[48] Ann Powers of the Los Angeles Times described \"Thriller\" as \"adequately groovy\" with a \"funked-out beat\" and lyrics \"seemingly lifted from some little kid's 'scary storybook'\".[49][50]\nPersonnel[edit]\nWritten and composed by Rod Temperton\nProduced by Quincy Jones\nMichael Jackson: lead and background vocals, LinnDrum drum machine\nRod Temperton and Brian Banks: synthesizers"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Personnel[edit]\nWritten and composed by Rod Temperton\nProduced by Quincy Jones\nMichael Jackson: lead and background vocals, LinnDrum drum machine\nRod Temperton and Brian Banks: synthesizers\nGreg Phillinganes: synthesizers, Rhodes piano\nAnthony Marinelli: synthesizer programming\nDavid Williams: guitar\nJerry Hey, Gary Grant: trumpets, flugelhorns\nLarry Williams: saxophone, flute\nBill Reichenbach: trombone\nVocal, rhythm and synthesizer arrangement by Rod Temperton\nHorn arrangement by Jerry Hey\nEffects by Bruce Cannon and Bruce Swedien\nFeaturing: Narration by Vincent Price (Not featured on original edited single version)\nCharts[edit]\nWeekly charts[edit]\nChart (1983\u20131985)\nPeakposition\nAustralia (Kent Music Report)[51]\n4\nBelgium (Ultratop 50 Flanders)[52]\n1\nCanadian RPM Top Singles[53]\n3\nFinland (Suomen virallinen singlelista)[54]\n7\nFinland Jukebox (Suomen virallinen singlelista)[54]\n3\nFrance (SNEP)[28]\n1\nIreland (IRMA)[55]\n4\nNetherlands (Dutch Top 40)[56]\n3\nNetherlands (Single Top 100)[57]\n4\nNew Zealand (Recorded Music NZ)[58]\n6\nPortugal (AFP)[59]\n1\nSouth Africa (Springbok)[60]\n26\nSpain (AFYVE)[61]\n1\nUK Singles (OCC)[27]\n10\nUS Cashbox[62]\n4\nUS Billboard Hot 100[63]\n4\nUS Billboard Hot Black Singles[64][26]\n3\nUS Billboard Adult Contemporary[65]\n24\nUS Billboard Album Rock Tracks[64][26]\n42\nUS Radio & Records CHR/Pop Airplay Chart[66]\n1\nWest Germany (Official German Charts)[67]\n9\nChart (2006)\nPeakposition\nFrance (SNEP)[68]\n35\nGermany (Media Control Charts)[35]\n9\nIreland (IRMA)[55]\n8\nItaly (FIMI)[69]\n5\nNetherlands (Single Top 100)[57]\n34\nSpain (PROMUSICAE)[35]\n1\nSwitzerland (Schweizer Hitparade)[35]\n3\nChart (2007)\nPeakposition\nSpain (PROMUSICAE)[70]\n20\nUK Singles (OCC)[27]\n57\nChart (2008)\nPeakposition\nAustria (\u00d63 Austria Top 40)[71]\n55\nNorway (VG-lista)[72]\n13\nSwitzerland (Schweizer Hitparade)[73]\n53\nUK Singles (OCC)[27]\n35\nChart (2009)\nPeakposition\nAustralia (ARIA)[74]\n3\nAustria (\u00d63 Austria Top 40)[71]\n5\nBelgium (Ultratop 50 Back Catalogue Singles Flanders)[75]\n3"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "3\nChart (2007)\nPeakposition\nSpain (PROMUSICAE)[70]\n20\nUK Singles (OCC)[27]\n57\nChart (2008)\nPeakposition\nAustria (\u00d63 Austria Top 40)[71]\n55\nNorway (VG-lista)[72]\n13\nSwitzerland (Schweizer Hitparade)[73]\n53\nUK Singles (OCC)[27]\n35\nChart (2009)\nPeakposition\nAustralia (ARIA)[74]\n3\nAustria (\u00d63 Austria Top 40)[71]\n5\nBelgium (Ultratop 50 Back Catalogue Singles Flanders)[75]\n3\nBelgium (Ultratop 30 Back Catalogue Singles Wallonia)[76]\n2\nDenmark (Tracklisten)[36]\n25\nEurope (European Hot 100 Singles)[77]\n16\nFinland (Suomen virallinen lista)[78]\n11\nFrance (SNEP)[79]\n3\nIreland (IRMA)[35]\n8\nItaly (FIMI)[69]\n2\nJapan Singles Top 100 (Oricon)[35]\n41\nNetherlands (Single Top 100)[57]\n9\nNew Zealand (RIANZ)[35]\n12\nNorway (VG-lista)[72]\n7\nSpain (PROMUSICAE)[70]\n1\nSweden (Sverigetopplistan)[80]\n10\nSwitzerland (Schweizer Hitparade)[73]\n3\nUK Singles (OCC)[27]\n12\nUS Digital Song Sales (Billboard)[81]\n2\nChart (2010)\nPeakposition\nSpain (PROMUSICAE)[70]\n12\nSwitzerland (Schweizer Hitparade)[73]\n68\nUK Singles (OCC)[27]\n68\nChart (2012)\nPeakposition\nFrance (SNEP)[68]\n143\nIreland (IRMA)[55]\n30\nUK Singles (OCC)[27]\n49\nChart (2013)\nPeakposition\nFrance (SNEP)[68]\n159\nUK Singles (OCC)[27]\n48\nUS Billboard Hot 100[82]\n42\nChart (2014)\nPeakposition\nFrance (SNEP)[68]\n152\nSpain (PROMUSICAE)[70]\n38\nUK Singles (OCC)[27]\n57\nUS Billboard Hot 100[83]\n35\nChart (2015)\nPeakposition\nFrance (SNEP)[68]\n145\nSpain (PROMUSICAE)[70]\n48\nUK Singles (OCC)[27]\n61\nUS Billboard Hot 100[84]\n45\nChart (2016)\nPeakposition\nFrance (SNEP)[68]\n164\nUK Singles (OCC)[27]\n62\nChart (2017)\nPeakposition\nFrance (SNEP)[68]\n46\nSpain (PROMUSICAE)[70]\n32\nUK Singles (OCC)[27]\n34\nChart (2018)\nPeakposition\nCanada (Canadian Hot 100)[85]\n25\nUK Singles (OCC)[27]\n63\nUS Billboard Hot 100[86][87]\n31\nChart (2019)\nPeakposition\nUS Billboard Hot 100[88]\n44\nChart (2020)\nPeakposition\nGlobal 200[89]\n51\nUK Singles (OCC)[27]\n57\nUS Billboard Hot 100[90]\n48\nChart (2021)\nPeakposition\nCanada (Canadian Hot 100)[91]\n16\nGlobal 200 (Billboard)[92]\n28\nUK Singles (OCC)[93]\n40"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "34\nChart (2018)\nPeakposition\nCanada (Canadian Hot 100)[85]\n25\nUK Singles (OCC)[27]\n63\nUS Billboard Hot 100[86][87]\n31\nChart (2019)\nPeakposition\nUS Billboard Hot 100[88]\n44\nChart (2020)\nPeakposition\nGlobal 200[89]\n51\nUK Singles (OCC)[27]\n57\nUS Billboard Hot 100[90]\n48\nChart (2021)\nPeakposition\nCanada (Canadian Hot 100)[91]\n16\nGlobal 200 (Billboard)[92]\n28\nUK Singles (OCC)[93]\n40\nUK Hip Hop/R&B (OCC)[94]\n3\nUS Billboard Hot 100[95][96]\n19\nUS Billboard Digital Songs Sales[97]\n9\nChart (2022)\nPeakposition\nCanada (Canadian Hot 100)[98]\n25\nGlobal 200[99]\n37\nUK Singles (OCC)[27]\n41\nUS Billboard Hot 100[100]\n26\nChart (2023)\nPeakposition\nCanada (Canadian Hot 100)[101]\n22\nGlobal 200[102]\n39\nUK Singles (OCC)[103]\n20\nUS Billboard Hot 100[104]\n21\nYear-end charts[edit]\nChart (1984)\nPosition\nAustralia (Kent Music Report)[105]\n17\nBelgium (Ultratop Flanders)[106]\n26\nUS Billboard Hot 100[24]\n78\nChart (2009)\nPosition\nSweden (Sverigetopplistan)[107]\n88\nSwitzerland (Schweizer Hitparade)[108]\n81\nUK Singles (Official Charts Company)[109]\n143\nCertifications[edit]\nRegion\nCertification\nCertified units/sales\nAustralia (ARIA)[110]\n6\u00d7 Platinum\n420,000\u2021\nDenmark (IFPI Danmark)[111]\nPlatinum\n90,000\u2021\nFrance (SNEP)[112]\nPlatinum\n1,000,000*\nGermany (BVMI)[113]\nGold\n250,000\u2021\nItaly (FIMI)[114]\nPlatinum\n30,000\u2021\nJapan (RIAJ)[115] Full-length ringtone\nPlatinum\n250,000*\nMexico (AMPROFON)[116]\n4\u00d7 Platinum+Gold\n270,000\u2021\nSpain (PROMUSICAE)[117]\n2\u00d7 Platinum\n100,000*\nUnited Kingdom (BPI)[118] Digital sales since 2004\n2\u00d7 Platinum\n1,200,000\u2021\nUnited Kingdom (BPI)[119] other release\nGold\n500,000\u2021\nUnited States (RIAA)[120]\nDiamond\n10,000,000\u2021\nUnited States (RIAA)[121] Mastertone\nGold\n500,000*\n* Sales figures based on certification alone.\u2021 Sales+streaming figures based on certification alone.\nSee also[edit]\nList of best-selling singles\nList of best-selling singles in the United States\nList of most expensive music videos\nMichael Jackson's Thriller\nThriller (viral video)\nThrill the World\nReferences[edit]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Diamond\n10,000,000\u2021\nUnited States (RIAA)[121] Mastertone\nGold\n500,000*\n* Sales figures based on certification alone.\u2021 Sales+streaming figures based on certification alone.\nSee also[edit]\nList of best-selling singles\nList of best-selling singles in the United States\nList of most expensive music videos\nMichael Jackson's Thriller\nThriller (viral video)\nThrill the World\nReferences[edit]\n^ \"New Singles (for the week ending November 11, 1983)\" (PDF). Music Week: 30. November 5, 1983.\n^ Semigran, Aly (February 7, 2011). \"Michael Jackson's 'Thriller': Story Behind the 'Glee' Cover\". MTV. Retrieved September 17, 2023.\n^ McPhate, Tim (November 2, 2017). \"Michael Jackson's \"Thriller\": For The Record\". The Recording Academy. Retrieved November 17, 2019.\n^ George 2004, p.\u00a023.\n^ Jones, Jel D. Lewis (2005). Michael Jackson, the King of Pop: The Big Picture \u2013 The Music! The Man! The Legend! The Interviews: An Anthology. Amber Books Publishing. p.\u00a06. ISBN\u00a00-9749779-0-X. Retrieved July 22, 2010.\n^ a b c d e f Lyle, Peter (November 25, 2007). \"Michael Jackson's monster smash\". The Daily Telegraph. Archived from the original on January 12, 2022. Retrieved January 24, 2010.\n^ Kreps, Daniel (October 5, 2016). \"Rod Temperton, 'Thriller' songwriter, dead at 66\". Rolling Stone. Retrieved July 25, 2022.\n^ a b c d e f \"Revealed: the story behind Jacko's Thriller\". M magazine. October 31, 2012. Archived from the original on November 3, 2012. Retrieved October 24, 2018.\n^ Glazer, Eliot (September 25, 2009). \"Top 1984 Songs\". AOLRadioBlog.com. AOL Inc. Retrieved January 24, 2010.\n^ Vozick-Levinson, Simon (February 18, 2008). \"Quincy Jones' 'Thriller' Memories\". EW.com. Retrieved January 24, 2010.\n^ Carr, Dan (November 30, 2022). \"The synth sounds of Michael Jackson's Thriller (and how to recreate them in your DAW)\". MusicRadar. Retrieved March 19, 2023."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ Glazer, Eliot (September 25, 2009). \"Top 1984 Songs\". AOLRadioBlog.com. AOL Inc. Retrieved January 24, 2010.\n^ Vozick-Levinson, Simon (February 18, 2008). \"Quincy Jones' 'Thriller' Memories\". EW.com. Retrieved January 24, 2010.\n^ Carr, Dan (November 30, 2022). \"The synth sounds of Michael Jackson's Thriller (and how to recreate them in your DAW)\". MusicRadar. Retrieved March 19, 2023.\n^ Rogerson, Ben (February 10, 2023). \"Watch Greg Phillinganes recreate Michael Jackson's Thriller using the original synths\". MusicRadar. Retrieved March 27, 2023.\n^ a b \"The making of Michael Jackson's Thriller\". MusicRadar. Retrieved October 25, 2018.\n^ a b c d e f g h i Griffin, Nancy (July 2010). \"The \"Thriller\" Diaries\". Vanity Fair. Retrieved January 2, 2011.\n^ a b Romano, Aja (October 31, 2018). \"Michael Jackson's \"Thriller\" is the eternal Halloween bop \u2014 and so much more\". Vox. Retrieved October 25, 2021.\n^ Eagan, Daniel (November 24, 2011). America's Film Legacy, 2009\u20132010: A Viewer's Guide to the 50 Landmark Movies Added To The National Film Registry in 2009\u201310. Bloomsbury Publishing. p.\u00a0175. ISBN\u00a0978-1-4411-9328-5. Retrieved May 14, 2016.\n^ Miles Marshall Lewis (November 30, 2022). \"#Thriller40: Cultural Critics Celebrate Michael Jackson's Impact\". BET.\n^ a b Richin, Leslie (December 2, 2014). \"On This Day In 1983, Michael Jackson's 'Thriller' Premiered On MTV\". Billboard.\n^ a b Hebblethwaite, Phil (November 21, 2013). \"How Michael Jackson's Thriller changed music videos for ever\". The Guardian. Retrieved October 29, 2018.\n^ \"Week of February 11, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved October 10, 2015.\n^ \"Week of February 18, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 3, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 10, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved October 10, 2015."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Week of February 18, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 3, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 10, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved October 10, 2015.\n^ a b \"Billboard Top 100 \u2013 1984\". billboard. Retrieved March 29, 2020.\n^ \"Week of March 3, 1984\". Billboard. Nielsen Business Media, Inc. Archived from the original on January 21, 2010. Retrieved January 23, 2010.\n^ a b c \"Week of March 10, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved January 23, 2010.\n^ a b c d e f g h i j k l m n o p \"Michael Jackson\". Official Charts Company. Retrieved October 10, 2015.\n^ a b \"Toutes les Chansons N\u00b0 1 des Ann\u00e9es 80\". Infodisc.fr. Dominic Durand / InfoDisc. Archived from the original on November 20, 2012. Retrieved January 23, 2010.\n^ \"Michael Jackson \u2013 Thriller\". Top30-3.radio2.be (in Dutch). VRT \u2013 Auguste Reyerslaan. Archived from the original on February 22, 2012. Retrieved January 24, 2010.\n^ a b Ed Christman, Antony Bruno (July 2, 2009). \"Michael Jackson Music Sales Surge Could Last For Months\". Billboard. Nielsen Business Media, Inc. Retrieved January 23, 2010.\n^ \"July 11, 2009\". Billboard. Nielsen Business Media. Retrieved January 23, 2010.\n^ \"Chart For Week Up To 04/07/2009\". Official Charts Company. Retrieved January 23, 2010.\n^ \"Thriller in Italian Chart\". Hung Medien. Retrieved June 21, 2013.\n^ \"Certificazioni Download FIMI\" (PDF) (in Italian). Federation of the Italian Music Industry. Archived from the original (PDF) on June 5, 2012. Retrieved January 2, 2012.\n^ a b c d e f g h \"Michael Jackson \u2013 Thriller \u2013 Music Charts\". Acharts.us. Retrieved January 23, 2010.\n^ a b \"Track Top 40 \u2013 July 10, 2009\". Hitlisterne.dk. IFPI Danmark & Nielsen Music Control. Archived from the original on September 30, 2011. Retrieved January 23, 2010."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ a b c d e f g h \"Michael Jackson \u2013 Thriller \u2013 Music Charts\". Acharts.us. Retrieved January 23, 2010.\n^ a b \"Track Top 40 \u2013 July 10, 2009\". Hitlisterne.dk. IFPI Danmark & Nielsen Music Control. Archived from the original on September 30, 2011. Retrieved January 23, 2010.\n^ \"Thriller in Finnish Chart\". Hung Medien. Retrieved January 23, 2010.\n^ \"Eminem Debuts at No. 1 on Hot R&B/Hip-Hop Songs Chart; Michael Jackson's 'Thriller' Returns\". Billboard. November 8, 2013. Retrieved October 2, 2016.\n^ \"Top 100 Songs | Billboard Hot 100 Chart\". Billboard. Retrieved November 11, 2018.\n^ a b \"Michael Jackson's 'Thriller' Leads Halloween Treats on Billboard Hot 100\". Billboard. Retrieved November 9, 2021.\n^ \"American\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". Recording Industry Association of America.\n^ \"Rock Music, etc., Terms\". Georgetown College. October 26, 1999. Archived from the original on May 18, 2011. Retrieved January 8, 2010.\n^ \"Hip Hop Single Sales: The Weeknd, Zay Hilfigerrr & Drake\". Hip Hop DX. November 13, 2016. Retrieved November 14, 2016.\n^ \"Michael Jackson's Catalogue Garners Major New Gold & Platinum Awards\". RIAA. August 23, 2018. Retrieved December 21, 2018.\n^ Appel, Rich (October 30, 2014). \"Revisionist History, Part 3: Michael Jackson Gets Revenge on Prince! Year-End Hits of the Past, Re-Analyzed\". Billboard. Prometheus Global Media. Retrieved October 30, 2014.\n^ \"Michael Jackson Scores Three No. 1 Hits On The Billboard Charts This Week\". Forbes. Retrieved November 10, 2023.\n^ Lasimone, Ashley (October 28, 2009). \"Clash of the Cover Songs: Michael Jackson vs. Imogen Heap\". Spinner.com. AOL Inc. Retrieved January 23, 2010.\n^ Pareles, Jon (September 3, 1987). \"Critic's Notebook; How Good Is Jackson's 'Bad'?\". The New York Times. Retrieved January 25, 2010.\n^ Powers, Ann (February 15, 2008). \"Nine reasons why Jackson masterpiece remains a 'Thriller'\". SouthCoastToday.com. Dow Jones Local Media Group. Retrieved February 6, 2010."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ Pareles, Jon (September 3, 1987). \"Critic's Notebook; How Good Is Jackson's 'Bad'?\". The New York Times. Retrieved January 25, 2010.\n^ Powers, Ann (February 15, 2008). \"Nine reasons why Jackson masterpiece remains a 'Thriller'\". SouthCoastToday.com. Dow Jones Local Media Group. Retrieved February 6, 2010.\n^ Comstock, Miriam Marcus and Courtney. \"Thriller Chiller For Jackson\". Forbes. Retrieved November 4, 2019.\n^ Kent, David (2003). Australian Chart Book 1970\u20131992. Australian Chart Book. ISBN\u00a00-646-11917-6.\n^ \"Michael Jackson \u2013 Thriller\" (in Dutch). Ultratop 50.\nRetrieved December 14, 2021.\n^ \"Top Singles \u2013 Volume 40, No. 1, March 10, 1984\". RPM. Archived from the original on October 17, 2012. Retrieved August 3, 2010.\n^ a b Pennanen, Timo (2021). \"Michael Jackson\". Sis\u00e4lt\u00e4\u00e4 hitin - 2. laitos Levyt ja esitt\u00e4j\u00e4t Suomen musiikkilistoilla 1.1.1960\u201330.6.2021 (PDF). Helsinki: Kustannusosakeyhti\u00f6 Otava. p.\u00a0113. Retrieved May 29, 2022.\n^ a b c \"Search Results: Thriller\". IrishCharts.ie. Irish Recorded Music Association. Retrieved January 25, 2010.\n^\n\"Nederlandse Top 40 \u2013 week 2, 1984\" (in Dutch). Dutch Top 40.\n^ a b c \"Michael Jackson \u2013 Thriller\" (in Dutch). Single Top 100.\n^ \"Michael Jackson \u2013 Thriller\". Top 40 Singles.\n^ \"Top 3 in Europe\" (PDF). Music & Media. May 14, 1984. p.\u00a012. Retrieved October 29, 2021.\n^ \"SA Charts 1965\u2013March 1989\". Retrieved September 5, 2018.\n^ Salaverri, Fernando (September 2005). S\u00f3lo \u00e9xitos: a\u00f1o a a\u00f1o, 1959\u20132002 (in Spanish) (1st\u00a0ed.). Spain: Fundaci\u00f3n Autor-SGAE. ISBN\u00a084-8048-639-2.\n^ Whitburn, Joel (2014). Cash Box Pop Hits 1952-1996. Sheridan Books, Inc. ISBN\u00a0978-0-89820-209-0.\n^ \"Michael Jackson Chart History (Hot 100)\". Billboard. September 9, 2021. Retrieved September 9, 2021.\n^ a b \"Allmusic (Thriller > Charts & Awards > Billboard Singles)\". Allmusic.com. Rovi Corporation. Retrieved January 23, 2010.\n^ Whitburn, Joel (1993). Top Adult Contemporary: 1961\u20131993. Record Research. p.\u00a0118.\n^ \"Michael Jackson\"."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Michael Jackson Chart History (Hot 100)\". Billboard. September 9, 2021. Retrieved September 9, 2021.\n^ a b \"Allmusic (Thriller > Charts & Awards > Billboard Singles)\". Allmusic.com. Rovi Corporation. Retrieved January 23, 2010.\n^ Whitburn, Joel (1993). Top Adult Contemporary: 1961\u20131993. Record Research. p.\u00a0118.\n^ \"Michael Jackson\".\n^ \"Offiziellecharts.de \u2013 Michael Jackson \u2013 Thriller\" (in German). GfK Entertainment charts.\nRetrieved March 18, 2019.\n^ a b c d e f g \"Michael Jackson \u2013 Thriller\" (in French). Les classement single.\n^ a b \"Michael Jackson \u2013 Thriller\". Top Digital Download.\n^ a b c d e f \"Michael Jackson \u2013 Thriller\" Canciones Top 50.\n^ a b \"Michael Jackson \u2013 Thriller\" (in German). \u00d63 Austria Top 40.\n^ a b \"Michael Jackson \u2013 Thriller\". VG-lista.\n^ a b c \"Michael Jackson \u2013 Thriller\". Swiss Singles Chart.\n^ \"Michael Jackson \u2013 Thriller\". ARIA Top 50 Singles.\n^ \"30 Back Catalogue Singles \u2013 July 18, 2009\". UltraTop.be. Hung Medien. Retrieved January 24, 2010.\n^ \"30 Back Catalogue Singles \u2013 July 4, 2009\". UltraTop.be. Hung Medien. Retrieved January 24, 2010.\n^ \"Michael Jackson Album & Song Chart History\". Billboard.com. Nielsen Business Media. Retrieved October 29, 2011.\n^ \"Michael Jackson: Thriller\" (in Finnish). Musiikkituottajat.\n^ \"Download Single Top 50 \u2013 04/07/2009\". Lescharts.com. Hung Medien. Retrieved January 23, 2010.\n^ \"Michael Jackson \u2013 Thriller\". Singles Top 100.\n^ \"Michael Jackson Chart History (Digital Song Sales)\". Billboard.\n^ \"The Hot 100, Week of November 16, 2013\". Billboard. Prometheus Global Media. Retrieved November 12, 2015.\n^ \"The Hot 100, Week of November 15, 2014\". Billboard. Prometheus Global Media.\n^ \"The Hot 100, Week of November 21, 2015\". Billboard. Prometheus Global Media. Retrieved November 12, 2015.\n^ \"Michael Jackson Chart History (Canadian Hot 100)\". Billboard.\nRetrieved November 6, 2018.\n^ Zellner, Xander. \"Michael Jackson's 'Thriller' Returns to Hot 100, Thanks to Halloween Gains\". Billboard. Retrieved November 6, 2018."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"The Hot 100, Week of November 21, 2015\". Billboard. Prometheus Global Media. Retrieved November 12, 2015.\n^ \"Michael Jackson Chart History (Canadian Hot 100)\". Billboard.\nRetrieved November 6, 2018.\n^ Zellner, Xander. \"Michael Jackson's 'Thriller' Returns to Hot 100, Thanks to Halloween Gains\". Billboard. Retrieved November 6, 2018.\n^ \"The Hot 100, Week of November 10, 2018\". Billboard. Retrieved September 9, 2021.\n^ \"The Hot 100, Week of November 9, 2019\". Billboard. Retrieved March 20, 2021.\n^ \"The Global 200, Week of November 14, 2020\". Billboard. Retrieved November 8, 2023.\n^ \"The Hot 100, Week of November 14, 2020\". Billboard. Retrieved November 14, 2020.\n^ \"Canadian Hot 100, Week of November 13, 2021\". Billboard. Retrieved November 23, 2021.\n^ \"Michael Jackson Chart History (Global 200)\". Billboard.\nRetrieved November 9, 2021.\n^ \"Official Singles Chart Top 100\". Official Charts Company.\nRetrieved November 8, 2023.\n^ \"Official Hip Hop and R&B Singles Chart Top 40\". Official Charts Company.\nRetrieved November 5, 2021.\n^ \"Michael Jackson's 'Thriller' Leads Halloween Treats on Billboard Hot 100\". Billboard. Retrieved November 8, 2021.\n^ \"The Hot 100, Week of November 13, 2021\". Billboard. Retrieved November 5, 2022.\n^ \"Digital Song Sales Chart, Week of November 13, 2021\". Billboard. Retrieved November 11, 2021.\n^ \"Canadian Hot 100, Week of November 12, 2022\". Billboard. Retrieved November 8, 2022.\n^ \"The Global 200, Week of November 12, 2022\". Billboard. Retrieved November 8, 2022.\n^ \"The Hot 100, Week of November 12, 2022\". Billboard. Retrieved November 8, 2022.\n^ \"Canadian Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"The Global 200, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"Official Singles Chart Top 100\". Official Charts Company.\nRetrieved November 8, 2023.\n^ \"The Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Canadian Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"The Global 200, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"Official Singles Chart Top 100\". Official Charts Company.\nRetrieved November 8, 2023.\n^ \"The Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"Kent Music Report No 548 \u2013 31 December 1984 > National Top 100 Singles for 1984\". Kent Music Report. Retrieved January 23, 2023 \u2013 via Imgur.com.\n^ \"Jaaroverzichten 1984\". Ultratop. Retrieved December 14, 2021.\n^ \"\u00c5rslista Singular \u2013 \u00c5r 2009\" (in Swedish). Sverigetopplistan. Retrieved March 29, 2020.\n^ \"Schweizer Jahreshitparade 2009 \u2013 hitparade.ch\". Hung Medien. Retrieved March 29, 2020.\n^ \"Charts Plus Year end 2009\" (PDF). Charts Plus. Retrieved May 16, 2020.\n^ \"ARIA Charts \u2013 Accreditations \u2013 2021 Singles\" (PDF). Australian Recording Industry Association.\n^ \"Danish\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". IFPI Danmark. Retrieved July 2, 2023.\n^ \"French\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\" (in French). InfoDisc. Retrieved November 28, 2022. Select MICHAEL JACKSON and click OK.\n^ \"Gold-/Platin-Datenbank (Michael Jackson;\u00a0'Thriller')\" (in German). Bundesverband Musikindustrie. Retrieved February 18, 2023.\n^ \"Italian\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\" (in Italian). Federazione Industria Musicale Italiana. Select \"2014\" in the \"Anno\" drop-down menu. Select \"Thriller\" in the \"Filtra\" field. Select \"Singoli\" under \"Sezione\".\n^ \"Japanese\nringtone\ncertifications \u2013 Michael Jackson \u2013 Thriller\" (in Japanese). Recording Industry Association of Japan. Retrieved December 30, 2020. Select 2009\u5e7411\u6708 on the drop-down menu\n^ \"Certificaciones\" (in Spanish). Asociaci\u00f3n Mexicana de Productores de Fonogramas y Videogramas. Retrieved November 28, 2022. Type Michael Jackson in the box under the ARTISTA column heading\u00a0and Thriller in the box under the T\u00cdTULO column heading.\n^ \"Spanish\nsingle"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Certificaciones\" (in Spanish). Asociaci\u00f3n Mexicana de Productores de Fonogramas y Videogramas. Retrieved November 28, 2022. Type Michael Jackson in the box under the ARTISTA column heading\u00a0and Thriller in the box under the T\u00cdTULO column heading.\n^ \"Spanish\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". El portal de M\u00fasica. Productores de M\u00fasica de Espa\u00f1a.\n^ \"British\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". British Phonographic Industry. Retrieved March 17, 2023.\n^ \"British\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". British Phonographic Industry. Retrieved March 17, 2023.\n^ \"American\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". Recording Industry Association of America. Retrieved August 29, 2022.\n^ \"American\nringtone\ncertifications \u2013 Michael Jackson \u2013 Thriller\". Recording Industry Association of America.\nBibliography\nBrooks, Darren (2002). Michael Jackson: An Exceptional Journey. Chrome Dreams. ISBN\u00a01-84240-178-5.\nGeorge, Nelson (2004). Michael Jackson: The Ultimate Collection (booklet). Sony BMG.\nGrant, Adrian (2009). Michael Jackson: The Visual Documentary. Omnibus Press. ISBN\u00a0978-1-84938-261-8.\nJones, Jel (2005). Michael Jackson, the King of Pop: The Big Picture: the Music! the Man! the Legend! the Interviews!. Amber Books Publishing. ISBN\u00a00-9749779-0-X.\nTaraborrelli, J. Randy (2004). The Magic and the Madness. Terra Alta, WV: Headline. ISBN\u00a00-330-42005-4.\nHalstead, Craig (2003). Michael Jackson The Solo Years. On-Line Ltd. ISBN\u00a0978-0-7552-0091-7.\nvteMichael Jackson: ThrillerSide one\n\"Wanna Be Startin' Somethin'\"\n\"Baby Be Mine\"\n\"The Girl Is Mine\"\n\"Thriller\"\nSide two\n\"Beat It\"\n\"Billie Jean\"\n\"Human Nature\"\n\"P.Y.T. (Pretty Young Thing)\"\n\"The Lady in My Life\"\nRelated articles\nE.T. the Extra-Terrestrial (audiobook)\nFarewell My Summer Love\nVictory\nVictory Tour\nMichael Jackson's Thriller\nThriller jacket\nThriller 25\nThriller 40\nThriller 40 (film)\nThrill the World\nThriller viral video\nDonga\nThriller \u2013 Live"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\"Baby Be Mine\"\n\"The Girl Is Mine\"\n\"Thriller\"\nSide two\n\"Beat It\"\n\"Billie Jean\"\n\"Human Nature\"\n\"P.Y.T. (Pretty Young Thing)\"\n\"The Lady in My Life\"\nRelated articles\nE.T. the Extra-Terrestrial (audiobook)\nFarewell My Summer Love\nVictory\nVictory Tour\nMichael Jackson's Thriller\nThriller jacket\nThriller 25\nThriller 40\nThriller 40 (film)\nThrill the World\nThriller viral video\nDonga\nThriller \u2013 Live\nMichael Jackson albums discography\nvteMichael Jackson songs\nSingles\nSongs\nUnreleased songs\n1970s\n\"Got to Be There\"\n\"Ain't No Sunshine\"\n\"I Wanna Be Where You Are\"\n\"Rockin' Robin\"\n\"Love Is Here and Now You're Gone\"\n\"You've Got a Friend\"\n\"Ben\"\n\"Everybody's Somebody's Fool\"\n\"My Girl\"\n\"Shoo-Be-Doo-Be-Doo-Da-Day\"\n\"We've Got a Good Thing Going\"\n\"With a Child's Heart\"\n\"Morning Glow\"\n\"All the Things You Are\"\n\"Happy\"\n\"Too Young\"\n\"Music and Me\"\n\"We're Almost There\"\n\"Just a Little Bit of You\"\n\"You Can't Win\"\n\"Don't Stop 'Til You Get Enough\"\n\"Rock with You\"\n\"Working Day and Night\"\n\"It's the Falling in Love\"\n1980s\n\"Off the Wall\"\n\"Girlfriend\"\n\"She's Out of My Life\"\n\"One Day in Your Life\"\n\"The Girl Is Mine\"\n\"Billie Jean\"\n\"Beat It\"\n\"Wanna Be Startin' Somethin'\"\n\"Human Nature\"\n\"P.Y.T. (Pretty Young Thing)\"\n\"Thriller\"\n\"You've Really Got a Hold on Me\"\n\"Here I Am (Come and Take Me)\"\n\"Lonely Teardrops\"\n\"That's What Love Is Made Of\"\n\"Farewell My Summer Love\"\n\"Girl You're So Together\"\n\"I Just Can't Stop Loving You\"\n\"Bad\"\n\"The Way You Make Me Feel\"\n\"Speed Demon\"\n\"Liberian Girl\"\n\"Just Good Friends\"\n\"Another Part of Me\"\n\"Man in the Mirror\"\n\"Dirty Diana\"\n\"Smooth Criminal\"\n\"Leave Me Alone\"\n\"Twenty-Five Miles\"\n1990s\n\"Black or White\"\n\"Jam\"\n\"In the Closet\"\n\"Remember the Time\"\n\"Heal the World\"\n\"Who Is It\"\n\"Give In to Me\"\n\"Will You Be There\"\n\"Gone Too Soon\"\n\"Dangerous\"\n\"Come Together\"\n\"Scream\"\n\"Childhood\"\n\"They Don't Care About Us\"\n\"Stranger in Moscow\"\n\"This Time Around\"\n\"Earth Song\"\n\"D.S.\"\n\"You Are Not Alone\"\n\"Tabloid Junkie\"\n\"HIStory\"\n\"Smile\"\n\"Blood on the Dance Floor\"\n\"Ghosts\"\n\"Is It Scary\"\n\"On the Line\"\n2000s"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\"Jam\"\n\"In the Closet\"\n\"Remember the Time\"\n\"Heal the World\"\n\"Who Is It\"\n\"Give In to Me\"\n\"Will You Be There\"\n\"Gone Too Soon\"\n\"Dangerous\"\n\"Come Together\"\n\"Scream\"\n\"Childhood\"\n\"They Don't Care About Us\"\n\"Stranger in Moscow\"\n\"This Time Around\"\n\"Earth Song\"\n\"D.S.\"\n\"You Are Not Alone\"\n\"Tabloid Junkie\"\n\"HIStory\"\n\"Smile\"\n\"Blood on the Dance Floor\"\n\"Ghosts\"\n\"Is It Scary\"\n\"On the Line\"\n2000s\n\"Speechless\"\n\"You Rock My World\"\n\"Heaven Can Wait\"\n\"Butterflies\"\n\"Cry\"\n\"One More Chance\"\n\"Cheater\"\n\"(I Like) The Way You Love Me\"\n\"Fall Again\"\n\"This Is It\"\n2010s\n\"Hold My Hand\"\n\"Hollywood Tonight\"\n\"(I Can't Make It) Another Day\"\n\"Behind the Mask\"\n\"Don't Be Messin' 'Round\"\n\"I'm So Blue\"\n\"Price of Fame\"\n\"Love Never Felt So Good\"\n\"Chicago\"\n\"Loving You\"\n\"A Place with No Name\"\n\"Slave to the Rhythm\"\n\"Blue Gangsta\"\n2020s\n\"She's Trouble\"\nOther\n\"We Are the World\"\n\"Mind Is the Magic\"\n\"What More Can I Give\"\n\"We Are the World 25 for Haiti\"\n\"Blood on the Dance Floor x Dangerous\"\n\"Diamonds Are Invincible\"\nFeatured\n\"Ease on Down the Road\"\n\"A Brand New Day\"\n\"Night Time Lover\"\n\"Papa Was a Rollin' Stone\"\n\"State of Independence\"\n\"Muscles\"\n\"Say Say Say\"\n\"Somebody's Watching Me\"\n\"Don't Stand Another Chance\"\n\"Centipede\"\n\"Tell Me I'm Not Dreamin' (Too Good to Be True)\"\n\"Eaten Alive\"\n\"Get It\"\n\"2300 Jackson Street\"\n\"Do the Bartman\"\n\"Whatzupwitu\"\n\"Why\"\n\"I Need You\"\n\"We Be Ballin'\"\n\"Girls, Girls, Girls\"\n\"All in Your Name\"\n\"There Must Be More to Life Than This\"\n\"Low\"\n\"Don't Matter to Me\"\nCategory\nAuthority control databases\nMusicBrainz work\nRetrieved from \"https://en.wikipedia.org/w/index.php?title=Thriller_(song)&oldid=1212467768\""}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\nCategories: 1982 songs1983 singles1984 singlesCBS Records singlesColumbia Records singlesCompositions with a narratorEpic Records singlesHalloween songsMichael Jackson songsNumber-one singles in SpainSNEP Top Singles number-one singlesSong recordings produced by Quincy JonesSongs about monstersSongs written by Rod TempertonHidden categories: CS1 Dutch-language sources (nl)CS1 Italian-language sources (it)CS1 Spanish-language sources (es)Articles with German-language sources (de)CS1 Swedish-language sources (sv)CS1 French-language sources (fr)CS1 German-language sources (de)Cite certification used for Italy without IDCS1 Japanese-language sources (ja)Articles with short descriptionShort description is different from WikidataGood articlesUse American English from November 2021All Wikipedia articles written in American EnglishUse mdy dates from November 2014Articles with hAudio microformatsCertification Cite Ref usages outside Certification Table EntrySingle chart usages for FlandersSingle chart usages for Dutch40Single chart called without artistSingle chart called without songSingle chart usages for Dutch100Single chart usages for New ZealandSingle chart usages for West GermanySingle chart usages for FranceSingle chart usages for ItalySingle chart making named refSingle chart usages for SpainSingle chart usages for AustriaSingle chart usages for NorwaySingle chart usages for SwissSingle chart usages for AustraliaSingle chart usages for FinlandSingle chart usages for SwedenSingle chart usages for BillboarddigitalsongsSingle chart usages for CanadaSingle chart usages for Billboardglobal200Single chart usages for UKSingle chart usages for UKrandbCertification Table Entry usages for AustraliaPages using certification Table Entry with streaming figuresCertification Table Entry usages for DenmarkCertification Table Entry usages for FrancePages using certification Table Entry with sales figuresCertification Table Entry usages for GermanyCertification Table Entry usages for ItalyCertification Table Entry usages for JapanCertification Table Entry usages for MexicoCertification Table Entry usages for SpainCertification Table Entry usages for United KingdomCertification Table Entry usages for United StatesPages using certification Table Entry with sales footnotePages using certification Table Entry with streaming footnoteArticles with MusicBrainz work identifiers"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "This page was last edited on 8 March 2024, at 01:12\u00a0(UTC).\nText is available under the Creative Commons Attribution-ShareAlike License 4.0;\nadditional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia\u00ae is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization.\nPrivacy policy\nAbout Wikipedia\nDisclaimers\nContact Wikipedia\nCode of Conduct\nDevelopers\nStatistics\nCookie statement\nMobile view\nToggle limited content width"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Jon Pareles of The New York Times noted that "'Billie Jean', 'Beat It', 'Wanna Be Starting' Something' ' and "the movie in the song 'Thriller'", were the songs, unlike the "fluff" "P.Y.T.", that were "the hits that made Thriller a world-beater; along with Mr. Jackson's stage and video presence, ...Jon Pareles of The New York Times noted that \"'Billie Jean', 'Beat It', 'Wanna Be Starting' Something' ' and \"the movie in the song 'Thriller'\", were the songs, unlike the \"fluff\" \"P.Y.T.\", that were \"the hits that made Thriller a world-beater; along with Mr. Jackson's stage and video presence, listeners must have identified with his willingness to admit terror.\" It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. \"Thriller\" is a disco-funk song The introduction features sound effects such as a creaking door, thunder, feet walking on wooden planks, winds and howling wolves. This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100. \"Thriller\" was certified platinum by the Recording Industry Association of America on December 4, 1989, for sales of over one million physical units in the US As of August 2016, the song had sold 4,024,398 copies in the US. \"Thriller\" is certified Diamond by the Recording Industry Association of America. It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. \"Thriller\" has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": ". \"Thriller\" is certified Diamond by the Recording Industry Association of America. It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. \"Thriller\" has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. It re-entered the Billboard Hot 100 in October 2013 at number 42, number 31 in November 2018, and number 19 in November 2021, its highest placement since 1984. This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100."}
|
||||
37
AgentQnA/retrieval_tool/README.md
Normal file
37
AgentQnA/retrieval_tool/README.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Retrieval tool for agent
|
||||
|
||||
The retrieval tool in this example is an OPEA megaservice that is comprised of a query embedder, a document retriever and a document reranker.
|
||||
|
||||
## Launch microservices
|
||||
|
||||
```
|
||||
bash launch_retrieval_tool.sh
|
||||
```
|
||||
|
||||
## Index data into vector database
|
||||
|
||||
In this example, we use an example jsonl file to ingest example documents into the vector database. For more ways to ingest data and the type of documents supported by OPEA dataprep microservices, please refer to the documentation in the opea-project/GenAIComps repo.
|
||||
|
||||
1. create a conda env
|
||||
2. Run commands below
|
||||
|
||||
```
|
||||
bash run_ingest_data.sh
|
||||
```
|
||||
|
||||
## Validate services
|
||||
|
||||
```
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
curl http://${ip_address}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{
|
||||
"text": "Taylor Swift hometown"
|
||||
}'
|
||||
```
|
||||
|
||||
## Consume retrieval tool
|
||||
|
||||
The endpoint for the retrieval tool is
|
||||
|
||||
```
|
||||
http://${ip_address}:8889/v1/retrievaltool
|
||||
```
|
||||
77
AgentQnA/retrieval_tool/index_data.py
Normal file
77
AgentQnA/retrieval_tool/index_data.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
|
||||
import requests
|
||||
import tqdm
|
||||
|
||||
|
||||
def get_args():
|
||||
parser = argparse.ArgumentParser(description="Index data")
|
||||
parser.add_argument("--host_ip", type=str, default="localhost", help="Host IP")
|
||||
parser.add_argument("--port", type=int, default=6007, help="Port")
|
||||
parser.add_argument("--filedir", type=str, default=None, help="file directory")
|
||||
parser.add_argument("--filename", type=str, default=None, help="file name")
|
||||
parser.add_argument("--chunk_size", type=int, default=10000, help="Chunk size")
|
||||
parser.add_argument("--chunk_overlap", type=int, default=0, help="Chunk overlap")
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def split_jsonl_into_txts(jsonl_file):
|
||||
docs = []
|
||||
n = 0
|
||||
with open(jsonl_file, "r") as f:
|
||||
for line in f:
|
||||
data = json.loads(line)
|
||||
docs.append(data["doc"])
|
||||
return docs
|
||||
|
||||
|
||||
def write_docs_to_disk(docs, output_folder):
|
||||
output_files = []
|
||||
for i, text in enumerate(docs):
|
||||
output = os.path.join(output_folder, str(i) + ".txt")
|
||||
output_files.append(output)
|
||||
with open(output, "w") as f:
|
||||
f.write(text)
|
||||
return output_files
|
||||
|
||||
|
||||
def delete_files(files):
|
||||
for file in files:
|
||||
os.remove(file)
|
||||
|
||||
|
||||
def main():
|
||||
args = get_args()
|
||||
print(args)
|
||||
|
||||
host_ip = args.host_ip
|
||||
port = args.port
|
||||
proxies = {"http": ""}
|
||||
url = "http://{host_ip}:{port}/v1/dataprep".format(host_ip=host_ip, port=port)
|
||||
|
||||
# Split jsonl file into json files
|
||||
files = split_jsonl_into_txts(os.path.join(args.filedir, args.filename))
|
||||
file_list = write_docs_to_disk(files, args.filedir)
|
||||
|
||||
print(file_list)
|
||||
|
||||
for file in tqdm.tqdm(file_list):
|
||||
print("Indexing file: ", file)
|
||||
files = [("files", (f, open(f, "rb"))) for f in [file]]
|
||||
payload = {"chunk_size": args.chunk_size, "chunk_overlap": args.chunk_overlap}
|
||||
resp = requests.request("POST", url=url, headers={}, files=files, data=payload, proxies=proxies)
|
||||
print(resp.text)
|
||||
|
||||
print("Removing temp files....")
|
||||
delete_files(file_list)
|
||||
print("ALL DONE!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
25
AgentQnA/retrieval_tool/launch_retrieval_tool.sh
Normal file
25
AgentQnA/retrieval_tool/launch_retrieval_tool.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
host_ip=$(hostname -I | awk '{print $1}')
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export no_proxy=${no_proxy}
|
||||
export http_proxy=${http_proxy}
|
||||
export https_proxy=${https_proxy}
|
||||
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
|
||||
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:6379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get_file"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete_file"
|
||||
|
||||
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml up -d
|
||||
7
AgentQnA/retrieval_tool/run_ingest_data.sh
Normal file
7
AgentQnA/retrieval_tool/run_ingest_data.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
FILEDIR=${WORKDIR}/GenAIExamples/AgentQnA/example_data/
|
||||
FILENAME=test_docs_music.jsonl
|
||||
|
||||
python3 index_data.py --filedir ${FILEDIR} --filename ${FILENAME} --host_ip $host_ip
|
||||
48
AgentQnA/tests/1_build_images.sh
Normal file
48
AgentQnA/tests/1_build_images.sh
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
|
||||
function get_genai_comps() {
|
||||
if [ ! -d "GenAIComps" ] ; then
|
||||
git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function build_docker_images_for_retrieval_tool(){
|
||||
cd $WORKDIR/GenAIExamples/DocIndexRetriever/docker_image_build/
|
||||
# git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../
|
||||
get_genai_comps
|
||||
echo "Build all the images with --no-cache..."
|
||||
service_list="doc-index-retriever dataprep-redis embedding-tei retriever-redis reranking-tei"
|
||||
docker compose -f build.yaml build ${service_list} --no-cache
|
||||
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
|
||||
docker images && sleep 1s
|
||||
}
|
||||
|
||||
function build_agent_docker_image() {
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_image_build/
|
||||
get_genai_comps
|
||||
echo "Build agent image with --no-cache..."
|
||||
docker compose -f build.yaml build --no-cache
|
||||
}
|
||||
|
||||
function main() {
|
||||
echo "==================== Build docker images for retrieval tool ===================="
|
||||
build_docker_images_for_retrieval_tool
|
||||
echo "==================== Build docker images for retrieval tool completed ===================="
|
||||
|
||||
echo "==================== Build agent docker image ===================="
|
||||
build_agent_docker_image
|
||||
echo "==================== Build agent docker image completed ===================="
|
||||
}
|
||||
|
||||
main
|
||||
26
AgentQnA/tests/2_start_retrieval_tool.sh
Normal file
26
AgentQnA/tests/2_start_retrieval_tool.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
export HF_CACHE_DIR=$WORKDIR/hf_cache
|
||||
if [ ! -d "$HF_CACHE_DIR" ]; then
|
||||
echo "Creating HF_CACHE directory"
|
||||
mkdir -p "$HF_CACHE_DIR"
|
||||
fi
|
||||
|
||||
function start_retrieval_tool() {
|
||||
echo "Starting Retrieval tool"
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/
|
||||
bash launch_retrieval_tool.sh
|
||||
}
|
||||
|
||||
echo "==================== Start retrieval tool ===================="
|
||||
start_retrieval_tool
|
||||
sleep 20 # needed for downloading the models
|
||||
echo "==================== Retrieval tool started ===================="
|
||||
68
AgentQnA/tests/3_ingest_data_and_validate_retrieval.sh
Normal file
68
AgentQnA/tests/3_ingest_data_and_validate_retrieval.sh
Normal file
@@ -0,0 +1,68 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export host_ip=$ip_address
|
||||
echo "ip_address=${ip_address}"
|
||||
|
||||
|
||||
function validate() {
|
||||
local CONTENT="$1"
|
||||
local EXPECTED_RESULT="$2"
|
||||
local SERVICE_NAME="$3"
|
||||
|
||||
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
|
||||
echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT"
|
||||
echo 0
|
||||
else
|
||||
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
|
||||
echo 1
|
||||
fi
|
||||
}
|
||||
|
||||
function ingest_data_and_validate() {
|
||||
echo "Ingesting data"
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/
|
||||
echo $PWD
|
||||
local CONTENT=$(bash run_ingest_data.sh)
|
||||
local EXIT_CODE=$(validate "$CONTENT" "Data preparation succeeded" "dataprep-redis-server")
|
||||
echo "$EXIT_CODE"
|
||||
local EXIT_CODE="${EXIT_CODE:0-1}"
|
||||
echo "return value is $EXIT_CODE"
|
||||
if [ "$EXIT_CODE" == "1" ]; then
|
||||
docker logs dataprep-redis-server
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function validate_retrieval_tool() {
|
||||
echo "----------------Test retrieval tool ----------------"
|
||||
local CONTENT=$(http_proxy="" curl http://${ip_address}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{
|
||||
"text": "Who sang Thriller"
|
||||
}')
|
||||
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "retrieval-tool")
|
||||
|
||||
if [ "$EXIT_CODE" == "1" ]; then
|
||||
docker logs retrievaltool-xeon-backend-server
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function main(){
|
||||
|
||||
echo "==================== Ingest data ===================="
|
||||
ingest_data_and_validate
|
||||
echo "==================== Data ingestion completed ===================="
|
||||
|
||||
echo "==================== Validate retrieval tool ===================="
|
||||
validate_retrieval_tool
|
||||
echo "==================== Retrieval tool validated ===================="
|
||||
}
|
||||
|
||||
main
|
||||
60
AgentQnA/tests/4_launch_and_validate_agent_openai.sh
Normal file
60
AgentQnA/tests/4_launch_and_validate_agent_openai.sh
Normal file
@@ -0,0 +1,60 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
echo "OPENAI_API_KEY=${OPENAI_API_KEY}"
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
|
||||
function start_agent_and_api_server() {
|
||||
echo "Starting CRAG server"
|
||||
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
|
||||
echo "Starting Agent services"
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon
|
||||
bash launch_agent_service_openai.sh
|
||||
}
|
||||
|
||||
function validate() {
|
||||
local CONTENT="$1"
|
||||
local EXPECTED_RESULT="$2"
|
||||
local SERVICE_NAME="$3"
|
||||
|
||||
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
|
||||
echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT"
|
||||
echo 0
|
||||
else
|
||||
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
|
||||
echo 1
|
||||
fi
|
||||
}
|
||||
|
||||
function validate_agent_service() {
|
||||
echo "----------------Test agent ----------------"
|
||||
local CONTENT=$(http_proxy="" curl http://${ip_address}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
|
||||
"query": "Tell me about Michael Jackson song thriller"
|
||||
}')
|
||||
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "react-agent-endpoint")
|
||||
docker logs react-agent-endpoint
|
||||
if [ "$EXIT_CODE" == "1" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function main() {
|
||||
echo "==================== Start agent ===================="
|
||||
start_agent_and_api_server
|
||||
echo "==================== Agent started ===================="
|
||||
|
||||
echo "==================== Validate agent service ===================="
|
||||
validate_agent_service
|
||||
echo "==================== Agent service validated ===================="
|
||||
}
|
||||
|
||||
main
|
||||
76
AgentQnA/tests/4_launch_and_validate_agent_tgi.sh
Normal file
76
AgentQnA/tests/4_launch_and_validate_agent_tgi.sh
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
|
||||
export HF_CACHE_DIR=$WORKDIR/hf_cache
|
||||
if [ ! -d "$HF_CACHE_DIR" ]; then
|
||||
mkdir -p "$HF_CACHE_DIR"
|
||||
fi
|
||||
ls $HF_CACHE_DIR
|
||||
|
||||
|
||||
function start_agent_and_api_server() {
|
||||
echo "Starting CRAG server"
|
||||
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=8080:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
|
||||
echo "Starting Agent services"
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi
|
||||
bash launch_agent_service_tgi_gaudi.sh
|
||||
}
|
||||
|
||||
function validate() {
|
||||
local CONTENT="$1"
|
||||
local EXPECTED_RESULT="$2"
|
||||
local SERVICE_NAME="$3"
|
||||
|
||||
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
|
||||
echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT"
|
||||
echo 0
|
||||
else
|
||||
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
|
||||
echo 1
|
||||
fi
|
||||
}
|
||||
|
||||
function validate_agent_service() {
|
||||
echo "----------------Test agent ----------------"
|
||||
local CONTENT=$(http_proxy="" curl http://${ip_address}:9095/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
|
||||
"query": "Tell me about Michael Jackson song thriller"
|
||||
}')
|
||||
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "react-agent-endpoint")
|
||||
docker logs docgrader-agent-endpoint
|
||||
if [ "$EXIT_CODE" == "1" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local CONTENT=$(http_proxy="" curl http://${ip_address}:9090/v1/chat/completions -X POST -H "Content-Type: application/json" -d '{
|
||||
"query": "Tell me about Michael Jackson song thriller"
|
||||
}')
|
||||
local EXIT_CODE=$(validate "$CONTENT" "Thriller" "react-agent-endpoint")
|
||||
docker logs react-agent-endpoint
|
||||
if [ "$EXIT_CODE" == "1" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function main() {
|
||||
echo "==================== Start agent ===================="
|
||||
start_agent_and_api_server
|
||||
echo "==================== Agent started ===================="
|
||||
|
||||
echo "==================== Validate agent service ===================="
|
||||
validate_agent_service
|
||||
echo "==================== Agent service validated ===================="
|
||||
}
|
||||
|
||||
main
|
||||
50
AgentQnA/tests/_test_compose_openai_on_xeon.sh
Normal file
50
AgentQnA/tests/_test_compose_openai_on_xeon.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
|
||||
echo "OPENAI_API_KEY=${OPENAI_API_KEY}"
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
|
||||
function stop_agent_and_api_server() {
|
||||
echo "Stopping CRAG server"
|
||||
docker stop $(docker ps -q --filter ancestor=docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0)
|
||||
echo "Stopping Agent services"
|
||||
docker stop $(docker ps -q --filter ancestor=opea/agent-langchain:latest)
|
||||
}
|
||||
|
||||
function stop_retrieval_tool() {
|
||||
echo "Stopping Retrieval tool"
|
||||
docker compose -f $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/docker/docker-compose-retrieval-tool.yaml down
|
||||
}
|
||||
|
||||
echo "=================== #1 Building docker images===================="
|
||||
bash 1_build_images.sh
|
||||
echo "=================== #1 Building docker images completed===================="
|
||||
|
||||
echo "=================== #2 Start retrieval tool===================="
|
||||
bash 2_start_retrieval_tool.sh
|
||||
echo "=================== #2 Retrieval tool started===================="
|
||||
|
||||
echo "=================== #3 Ingest data and validate retrieval===================="
|
||||
bash 3_ingest_data_and_validate_retrieval.sh
|
||||
echo "=================== #3 Data ingestion and validation completed===================="
|
||||
|
||||
echo "=================== #4 Start agent and API server===================="
|
||||
bash 4_launch_and_validate_agent_openai.sh
|
||||
echo "=================== #4 Agent test passed ===================="
|
||||
|
||||
echo "=================== #5 Stop agent and API server===================="
|
||||
stop_agent_and_api_server
|
||||
echo "=================== #5 Agent and API server stopped===================="
|
||||
|
||||
echo "=================== #6 Stop retrieval tool===================="
|
||||
stop_retrieval_tool
|
||||
echo "=================== #6 Retrieval tool stopped===================="
|
||||
|
||||
echo "ALL DONE!"
|
||||
73
AgentQnA/tests/test_compose_on_gaudi.sh
Normal file
73
AgentQnA/tests/test_compose_on_gaudi.sh
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=$WORKPATH/../../
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
|
||||
function stop_crag() {
|
||||
cid=$(docker ps -aq --filter "name=kdd-cup-24-crag-service")
|
||||
echo "Stopping container kdd-cup-24-crag-service with cid $cid"
|
||||
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
|
||||
}
|
||||
|
||||
function stop_agent_docker() {
|
||||
cd $WORKPATH/docker_compose/intel/hpu/gaudi/
|
||||
# docker compose -f compose.yaml down
|
||||
container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2)
|
||||
for container_name in $container_list; do
|
||||
cid=$(docker ps -aq --filter "name=$container_name")
|
||||
echo "Stopping container $container_name"
|
||||
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
|
||||
done
|
||||
}
|
||||
|
||||
function stop_retrieval_tool() {
|
||||
echo "Stopping Retrieval tool"
|
||||
local RETRIEVAL_TOOL_PATH=$WORKPATH/../DocIndexRetriever
|
||||
cd $RETRIEVAL_TOOL_PATH/docker_compose/intel/cpu/xeon/
|
||||
# docker compose -f compose.yaml down
|
||||
container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2)
|
||||
for container_name in $container_list; do
|
||||
cid=$(docker ps -aq --filter "name=$container_name")
|
||||
echo "Stopping container $container_name"
|
||||
if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi
|
||||
done
|
||||
}
|
||||
echo "workpath: $WORKPATH"
|
||||
echo "=================== Stop containers ===================="
|
||||
stop_crag
|
||||
stop_agent_docker
|
||||
stop_retrieval_tool
|
||||
|
||||
cd $WORKPATH/tests
|
||||
|
||||
echo "=================== #1 Building docker images===================="
|
||||
bash 1_build_images.sh
|
||||
echo "=================== #1 Building docker images completed===================="
|
||||
|
||||
echo "=================== #2 Start retrieval tool===================="
|
||||
bash 2_start_retrieval_tool.sh
|
||||
echo "=================== #2 Retrieval tool started===================="
|
||||
|
||||
echo "=================== #3 Ingest data and validate retrieval===================="
|
||||
bash 3_ingest_data_and_validate_retrieval.sh
|
||||
echo "=================== #3 Data ingestion and validation completed===================="
|
||||
|
||||
echo "=================== #4 Start agent and API server===================="
|
||||
bash 4_launch_and_validate_agent_tgi.sh
|
||||
echo "=================== #4 Agent test passed ===================="
|
||||
|
||||
echo "=================== #5 Stop agent and API server===================="
|
||||
stop_crag
|
||||
stop_agent_docker
|
||||
stop_retrieval_tool
|
||||
echo "=================== #5 Agent and API server stopped===================="
|
||||
|
||||
echo "ALL DONE!"
|
||||
330
AgentQnA/tools/pycragapi.py
Normal file
330
AgentQnA/tools/pycragapi.py
Normal file
@@ -0,0 +1,330 @@
|
||||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
|
||||
# This source code is licensed under the license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class CRAG(object):
|
||||
"""A client for interacting with the CRAG server, offering methods to query various domains such as Open, Movie, Finance, Music, and Sports. Each method corresponds to an API endpoint on the CRAG server.
|
||||
|
||||
Attributes:
|
||||
server (str): The base URL of the CRAG server. Defaults to "http://127.0.0.1:8080".
|
||||
|
||||
Methods:
|
||||
open_search_entity_by_name(query: str) -> dict: Search for entities by name in the Open domain.
|
||||
open_get_entity(entity: str) -> dict: Retrieve detailed information about an entity in the Open domain.
|
||||
movie_get_person_info(person_name: str) -> dict: Get information about a person related to movies.
|
||||
movie_get_movie_info(movie_name: str) -> dict: Get information about a movie.
|
||||
movie_get_year_info(year: str) -> dict: Get information about movies released in a specific year.
|
||||
movie_get_movie_info_by_id(movie_id: int) -> dict: Get movie information by its unique ID.
|
||||
movie_get_person_info_by_id(person_id: int) -> dict: Get person information by their unique ID.
|
||||
finance_get_company_name(query: str) -> dict: Search for company names in the finance domain.
|
||||
finance_get_ticker_by_name(query: str) -> dict: Retrieve the ticker symbol for a given company name.
|
||||
finance_get_price_history(ticker_name: str) -> dict: Get the price history for a given ticker symbol.
|
||||
finance_get_detailed_price_history(ticker_name: str) -> dict: Get detailed price history for a ticker symbol.
|
||||
finance_get_dividends_history(ticker_name: str) -> dict: Get dividend history for a ticker symbol.
|
||||
finance_get_market_capitalization(ticker_name: str) -> dict: Retrieve market capitalization for a ticker symbol.
|
||||
finance_get_eps(ticker_name: str) -> dict: Get earnings per share (EPS) for a ticker symbol.
|
||||
finance_get_pe_ratio(ticker_name: str) -> dict: Get the price-to-earnings (PE) ratio for a ticker symbol.
|
||||
finance_get_info(ticker_name: str) -> dict: Get financial information for a ticker symbol.
|
||||
music_search_artist_entity_by_name(artist_name: str) -> dict: Search for music artists by name.
|
||||
music_search_song_entity_by_name(song_name: str) -> dict: Search for songs by name.
|
||||
music_get_billboard_rank_date(rank: int, date: str = None) -> dict: Get Billboard ranking for a specific rank and date.
|
||||
music_get_billboard_attributes(date: str, attribute: str, song_name: str) -> dict: Get attributes of a song from Billboard rankings.
|
||||
music_grammy_get_best_artist_by_year(year: int) -> dict: Get the Grammy Best New Artist for a specific year.
|
||||
music_grammy_get_award_count_by_artist(artist_name: str) -> dict: Get the total Grammy awards won by an artist.
|
||||
music_grammy_get_award_count_by_song(song_name: str) -> dict: Get the total Grammy awards won by a song.
|
||||
music_grammy_get_best_song_by_year(year: int) -> dict: Get the Grammy Song of the Year for a specific year.
|
||||
music_grammy_get_award_date_by_artist(artist_name: str) -> dict: Get the years an artist won a Grammy award.
|
||||
music_grammy_get_best_album_by_year(year: int) -> dict: Get the Grammy Album of the Year for a specific year.
|
||||
music_grammy_get_all_awarded_artists() -> dict: Get all artists awarded the Grammy Best New Artist.
|
||||
music_get_artist_birth_place(artist_name: str) -> dict: Get the birthplace of an artist.
|
||||
music_get_artist_birth_date(artist_name: str) -> dict: Get the birth date of an artist.
|
||||
music_get_members(band_name: str) -> dict: Get the member list of a band.
|
||||
music_get_lifespan(artist_name: str) -> dict: Get the lifespan of an artist.
|
||||
music_get_song_author(song_name: str) -> dict: Get the author of a song.
|
||||
music_get_song_release_country(song_name: str) -> dict: Get the release country of a song.
|
||||
music_get_song_release_date(song_name: str) -> dict: Get the release date of a song.
|
||||
music_get_artist_all_works(artist_name: str) -> dict: Get all works by an artist.
|
||||
sports_soccer_get_games_on_date(team_name: str, date: str) -> dict: Get soccer games on a specific date.
|
||||
sports_nba_get_games_on_date(team_name: str, date: str) -> dict: Get NBA games on a specific date.
|
||||
sports_nba_get_play_by_play_data_by_game_ids(game_ids: List[str]) -> dict: Get NBA play by play data for a set of game ids.
|
||||
|
||||
Note:
|
||||
Each method performs a POST request to the corresponding API endpoint and returns the response as a JSON dictionary.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.server = os.environ.get("CRAG_SERVER", "http://127.0.0.1:8080")
|
||||
|
||||
def open_search_entity_by_name(self, query: str):
|
||||
url = self.server + "/open/search_entity_by_name"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": query}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def open_get_entity(self, entity: str):
|
||||
url = self.server + "/open/get_entity"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": entity}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def movie_get_person_info(self, person_name: str):
|
||||
url = self.server + "/movie/get_person_info"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": person_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def movie_get_movie_info(self, movie_name: str):
|
||||
url = self.server + "/movie/get_movie_info"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": movie_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def movie_get_year_info(self, year: str):
|
||||
url = self.server + "/movie/get_year_info"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": year}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def movie_get_movie_info_by_id(self, movid_id: int):
|
||||
url = self.server + "/movie/get_movie_info_by_id"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": movid_id}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def movie_get_person_info_by_id(self, person_id: int):
|
||||
url = self.server + "/movie/get_person_info_by_id"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": person_id}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_company_name(self, query: str):
|
||||
url = self.server + "/finance/get_company_name"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": query}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_ticker_by_name(self, query: str):
|
||||
url = self.server + "/finance/get_ticker_by_name"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": query}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_price_history(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_price_history"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_detailed_price_history(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_detailed_price_history"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_dividends_history(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_dividends_history"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_market_capitalization(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_market_capitalization"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_eps(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_eps"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_pe_ratio(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_pe_ratio"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def finance_get_info(self, ticker_name: str):
|
||||
url = self.server + "/finance/get_info"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": ticker_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_search_artist_entity_by_name(self, artist_name: str):
|
||||
url = self.server + "/music/search_artist_entity_by_name"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": artist_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_search_song_entity_by_name(self, song_name: str):
|
||||
url = self.server + "/music/search_song_entity_by_name"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_billboard_rank_date(self, rank: int, date: str = None):
|
||||
url = self.server + "/music/get_billboard_rank_date"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"rank": rank, "date": date}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_billboard_attributes(self, date: str, attribute: str, song_name: str):
|
||||
url = self.server + "/music/get_billboard_attributes"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"date": date, "attribute": attribute, "song_name": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_best_artist_by_year(self, year: int):
|
||||
url = self.server + "/music/grammy_get_best_artist_by_year"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": year}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_award_count_by_artist(self, artist_name: str):
|
||||
url = self.server + "/music/grammy_get_award_count_by_artist"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": artist_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_award_count_by_song(self, song_name: str):
|
||||
url = self.server + "/music/grammy_get_award_count_by_song"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_best_song_by_year(self, year: int):
|
||||
url = self.server + "/music/grammy_get_best_song_by_year"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": year}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_award_date_by_artist(self, artist_name: str):
|
||||
url = self.server + "/music/grammy_get_award_date_by_artist"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": artist_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_best_album_by_year(self, year: int):
|
||||
url = self.server + "/music/grammy_get_best_album_by_year"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": year}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_grammy_get_all_awarded_artists(self):
|
||||
url = self.server + "/music/grammy_get_all_awarded_artists"
|
||||
headers = {"accept": "application/json"}
|
||||
result = requests.post(url, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_artist_birth_place(self, artist_name: str):
|
||||
url = self.server + "/music/get_artist_birth_place"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": artist_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_artist_birth_date(self, artist_name: str):
|
||||
url = self.server + "/music/get_artist_birth_date"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": artist_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_members(self, band_name: str):
|
||||
url = self.server + "/music/get_members"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": band_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_lifespan(self, artist_name: str):
|
||||
url = self.server + "/music/get_lifespan"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": artist_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_song_author(self, song_name: str):
|
||||
url = self.server + "/music/get_song_author"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_song_release_country(self, song_name: str):
|
||||
url = self.server + "/music/get_song_release_country"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_song_release_date(self, song_name: str):
|
||||
url = self.server + "/music/get_song_release_date"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def music_get_artist_all_works(self, song_name: str):
|
||||
url = self.server + "/music/get_artist_all_works"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"query": song_name}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def sports_soccer_get_games_on_date(self, date: str, team_name: str = None):
|
||||
url = self.server + "/sports/soccer/get_games_on_date"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"team_name": team_name, "date": date}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def sports_nba_get_games_on_date(self, date: str, team_name: str = None):
|
||||
url = self.server + "/sports/nba/get_games_on_date"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"team_name": team_name, "date": date}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
|
||||
def sports_nba_get_play_by_play_data_by_game_ids(self, game_ids: List[str]):
|
||||
url = self.server + "/sports/nba/get_play_by_play_data_by_game_ids"
|
||||
headers = {"accept": "application/json"}
|
||||
data = {"game_ids": game_ids}
|
||||
result = requests.post(url, json=data, headers=headers)
|
||||
return json.loads(result.text)
|
||||
59
AgentQnA/tools/supervisor_agent_tools.yaml
Normal file
59
AgentQnA/tools/supervisor_agent_tools.yaml
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
search_knowledge_base:
|
||||
description: Search knowledge base for a given query. Returns text related to the query.
|
||||
callable_api: tools.py:search_knowledge_base
|
||||
args_schema:
|
||||
query:
|
||||
type: str
|
||||
description: query
|
||||
return_output: retrieved_data
|
||||
|
||||
get_artist_birth_place:
|
||||
description: Get the birth place of an artist.
|
||||
callable_api: tools.py:get_artist_birth_place
|
||||
args_schema:
|
||||
artist_name:
|
||||
type: str
|
||||
description: artist name
|
||||
return_output: birth_place
|
||||
|
||||
get_billboard_rank_date:
|
||||
description: Get Billboard ranking for a specific rank and date.
|
||||
callable_api: tools.py:get_billboard_rank_date
|
||||
args_schema:
|
||||
rank:
|
||||
type: int
|
||||
description: song name
|
||||
date:
|
||||
type: str
|
||||
description: date
|
||||
return_output: billboard_info
|
||||
|
||||
get_song_release_date:
|
||||
description: Get the release date of a song.
|
||||
callable_api: tools.py:get_song_release_date
|
||||
args_schema:
|
||||
song_name:
|
||||
type: str
|
||||
description: song name
|
||||
return_output: release_date
|
||||
|
||||
get_members:
|
||||
description: Get the member list of a band.
|
||||
callable_api: tools.py:get_members
|
||||
args_schema:
|
||||
band_name:
|
||||
type: str
|
||||
description: band name
|
||||
return_output: members
|
||||
|
||||
get_grammy_best_artist_by_year:
|
||||
description: Get the Grammy Best New Artist for a specific year.
|
||||
callable_api: tools.py:get_grammy_best_artist_by_year
|
||||
args_schema:
|
||||
year:
|
||||
type: int
|
||||
description: year
|
||||
return_output: grammy_best_new_artist
|
||||
52
AgentQnA/tools/tools.py
Normal file
52
AgentQnA/tools/tools.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os
|
||||
|
||||
import requests
|
||||
from tools.pycragapi import CRAG
|
||||
|
||||
|
||||
def search_knowledge_base(query: str) -> str:
|
||||
"""Search the knowledge base for a specific query."""
|
||||
# use worker agent (DocGrader) to search the knowledge base
|
||||
url = os.environ.get("WORKER_AGENT_URL")
|
||||
print(url)
|
||||
proxies = {"http": ""}
|
||||
payload = {
|
||||
"query": query,
|
||||
}
|
||||
response = requests.post(url, json=payload, proxies=proxies)
|
||||
return response.json()["text"]
|
||||
|
||||
|
||||
def get_grammy_best_artist_by_year(year: int) -> dict:
|
||||
"""Get the Grammy Best New Artist for a specific year."""
|
||||
api = CRAG()
|
||||
year = int(year)
|
||||
return api.music_grammy_get_best_artist_by_year(year)
|
||||
|
||||
|
||||
def get_members(band_name: str) -> dict:
|
||||
"""Get the member list of a band."""
|
||||
api = CRAG()
|
||||
return api.music_get_members(band_name)
|
||||
|
||||
|
||||
def get_artist_birth_place(artist_name: str) -> dict:
|
||||
"""Get the birthplace of an artist."""
|
||||
api = CRAG()
|
||||
return api.music_get_artist_birth_place(artist_name)
|
||||
|
||||
|
||||
def get_billboard_rank_date(rank: int, date: str = None) -> dict:
|
||||
"""Get Billboard ranking for a specific rank and date."""
|
||||
api = CRAG()
|
||||
rank = int(rank)
|
||||
return api.music_get_billboard_rank_date(rank, date)
|
||||
|
||||
|
||||
def get_song_release_date(song_name: str) -> dict:
|
||||
"""Get the release date of a song."""
|
||||
api = CRAG()
|
||||
return api.music_get_song_release_date(song_name)
|
||||
27
AgentQnA/tools/worker_agent_tools.py
Normal file
27
AgentQnA/tools/worker_agent_tools.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def search_knowledge_base(query: str) -> str:
|
||||
"""Search the knowledge base for a specific query."""
|
||||
url = os.environ.get("RETRIEVAL_TOOL_URL")
|
||||
print(url)
|
||||
proxies = {"http": ""}
|
||||
payload = {
|
||||
"text": query,
|
||||
}
|
||||
response = requests.post(url, json=payload, proxies=proxies)
|
||||
print(response)
|
||||
docs = response.json()["documents"]
|
||||
context = ""
|
||||
for i, doc in enumerate(docs):
|
||||
if i == 0:
|
||||
context = doc
|
||||
else:
|
||||
context += "\n" + doc
|
||||
print(context)
|
||||
return context
|
||||
11
AgentQnA/tools/worker_agent_tools.yaml
Normal file
11
AgentQnA/tools/worker_agent_tools.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
search_knowledge_base:
|
||||
description: Search knowledge base for a given query. Returns text related to the query.
|
||||
callable_api: worker_agent_tools.py:search_knowledge_base
|
||||
args_schema:
|
||||
query:
|
||||
type: str
|
||||
description: query
|
||||
return_output: retrieved_data
|
||||
@@ -8,7 +8,6 @@ FROM python:3.11-slim
|
||||
RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \
|
||||
libgl1-mesa-glx \
|
||||
libjemalloc-dev \
|
||||
vim \
|
||||
git
|
||||
|
||||
RUN useradd -m -s /bin/bash user && \
|
||||
34
AudioQnA/README.md
Normal file
34
AudioQnA/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# AudioQnA Application
|
||||
|
||||
AudioQnA is an example that demonstrates the integration of Generative AI (GenAI) models for performing question-answering (QnA) on audio files, with the added functionality of Text-to-Speech (TTS) for generating spoken responses. The example showcases how to convert audio input to text using Automatic Speech Recognition (ASR), generate answers to user queries using a language model, and then convert those answers back to speech using Text-to-Speech (TTS).
|
||||
|
||||
## Deploy AudioQnA Service
|
||||
|
||||
The AudioQnA service can be deployed on either Intel Gaudi2 or Intel Xeon Scalable Processor.
|
||||
|
||||
### Deploy AudioQnA on Gaudi
|
||||
|
||||
Refer to the [Gaudi Guide](./docker_compose/intel/hpu/gaudi/README.md) for instructions on deploying AudioQnA on Gaudi.
|
||||
|
||||
### Deploy AudioQnA on Xeon
|
||||
|
||||
Refer to the [Xeon Guide](./docker_compose/intel/cpu/xeon/README.md) for instructions on deploying AudioQnA on Xeon.
|
||||
|
||||
## Supported Models
|
||||
|
||||
### ASR
|
||||
|
||||
The default model is [openai/whisper-small](https://huggingface.co/openai/whisper-small). It also supports all models in the Whisper family, such as `openai/whisper-large-v3`, `openai/whisper-medium`, `openai/whisper-base`, `openai/whisper-tiny`, etc.
|
||||
|
||||
To replace the model, please edit the `compose.yaml` and add the `command` line to pass the name of the model you want to use:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
whisper-service:
|
||||
...
|
||||
command: --model_name_or_path openai/whisper-tiny
|
||||
```
|
||||
|
||||
### TTS
|
||||
|
||||
The default model is [microsoft/SpeechT5](https://huggingface.co/microsoft/speecht5_tts). We currently do not support replacing the model. More models under the commercial license will be added in the future.
|
||||
51
AudioQnA/benchmark/accuracy/README.md
Normal file
51
AudioQnA/benchmark/accuracy/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# AudioQnA accuracy Evaluation
|
||||
|
||||
AudioQnA is an example that demonstrates the integration of Generative AI (GenAI) models for performing question-answering (QnA) on audio scene, which contains Automatic Speech Recognition (ASR) and Text-to-Speech (TTS). The following is the piepline for evaluating the ASR accuracy.
|
||||
|
||||
## Dataset
|
||||
|
||||
We evaluate the ASR accuracy on the test set of librispeech [dataset](https://huggingface.co/datasets/andreagasparini/librispeech_test_only), which contains 2620 records of audio and texts.
|
||||
|
||||
## Metrics
|
||||
|
||||
We evaluate the WER (Word Error Rate) metric of the ASR microservice.
|
||||
|
||||
## Evaluation
|
||||
|
||||
### Launch ASR microservice
|
||||
|
||||
Launch the ASR microserice with the following commands. For more details please refer to [doc](https://github.com/opea-project/GenAIComps/tree/main/comps/asr).
|
||||
|
||||
```bash
|
||||
git clone https://github.com/opea-project/GenAIComps
|
||||
cd GenAIComps
|
||||
docker build -t opea/whisper:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/Dockerfile .
|
||||
# change the name of model by editing model_name_or_path you want to evaluate
|
||||
docker run -p 7066:7066 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy opea/whisper:latest --model_name_or_path "openai/whisper-tiny"
|
||||
```
|
||||
|
||||
### Evaluate
|
||||
|
||||
Install dependencies:
|
||||
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Evaluate the performance with the LLM:
|
||||
|
||||
```py
|
||||
# validate the offline model
|
||||
# python offline_evaluate.py
|
||||
# validate the online asr microservice accuracy
|
||||
python online_evaluate.py
|
||||
```
|
||||
|
||||
### Performance Result
|
||||
|
||||
Here is the tested result for your reference
|
||||
|| WER |
|
||||
| --- | ---- |
|
||||
|whisper-large-v2| 2.87|
|
||||
|whisper-large| 2.7 |
|
||||
|whisper-medium| 3.45 |
|
||||
35
AudioQnA/benchmark/accuracy/local_eval.py
Normal file
35
AudioQnA/benchmark/accuracy/local_eval.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import torch
|
||||
from datasets import load_dataset
|
||||
from evaluate import load
|
||||
from transformers import WhisperForConditionalGeneration, WhisperProcessor
|
||||
|
||||
device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
|
||||
MODEL_NAME = "openai/whisper-large-v2"
|
||||
|
||||
librispeech_test_clean = load_dataset(
|
||||
"andreagasparini/librispeech_test_only", "clean", split="test", trust_remote_code=True
|
||||
)
|
||||
processor = WhisperProcessor.from_pretrained(MODEL_NAME)
|
||||
model = WhisperForConditionalGeneration.from_pretrained(MODEL_NAME).to(device)
|
||||
|
||||
|
||||
def map_to_pred(batch):
|
||||
audio = batch["audio"]
|
||||
input_features = processor(audio["array"], sampling_rate=audio["sampling_rate"], return_tensors="pt").input_features
|
||||
batch["reference"] = processor.tokenizer._normalize(batch["text"])
|
||||
|
||||
with torch.no_grad():
|
||||
predicted_ids = model.generate(input_features.to(device))[0]
|
||||
transcription = processor.decode(predicted_ids)
|
||||
batch["prediction"] = processor.tokenizer._normalize(transcription)
|
||||
return batch
|
||||
|
||||
|
||||
result = librispeech_test_clean.map(map_to_pred)
|
||||
|
||||
wer = load("wer")
|
||||
print(100 * wer.compute(references=result["reference"], predictions=result["prediction"]))
|
||||
56
AudioQnA/benchmark/accuracy/online_eval.py
Normal file
56
AudioQnA/benchmark/accuracy/online_eval.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import base64
|
||||
import json
|
||||
|
||||
import requests
|
||||
import torch
|
||||
from datasets import load_dataset
|
||||
from evaluate import load
|
||||
from pydub import AudioSegment
|
||||
from transformers import WhisperForConditionalGeneration, WhisperProcessor
|
||||
|
||||
MODEL_NAME = "openai/whisper-large-v2"
|
||||
processor = WhisperProcessor.from_pretrained(MODEL_NAME)
|
||||
|
||||
librispeech_test_clean = load_dataset(
|
||||
"andreagasparini/librispeech_test_only", "clean", split="test", trust_remote_code=True
|
||||
)
|
||||
|
||||
|
||||
def map_to_pred(batch):
|
||||
batch["reference"] = processor.tokenizer._normalize(batch["text"])
|
||||
|
||||
file_path = batch["file"]
|
||||
# process the file_path
|
||||
pidx = file_path.rfind("/")
|
||||
sidx = file_path.rfind(".")
|
||||
|
||||
file_path_prefix = file_path[: pidx + 1]
|
||||
file_path_suffix = file_path[sidx:]
|
||||
file_path_mid = file_path[pidx + 1 : sidx]
|
||||
splits = file_path_mid.split("-")
|
||||
file_path_mid = f"LibriSpeech/test-clean/{splits[0]}/{splits[1]}/{file_path_mid}"
|
||||
|
||||
file_path = file_path_prefix + file_path_mid + file_path_suffix
|
||||
|
||||
audio = AudioSegment.from_file(file_path)
|
||||
audio.export("tmp.wav")
|
||||
with open("tmp.wav", "rb") as f:
|
||||
test_audio_base64_str = base64.b64encode(f.read()).decode("utf-8")
|
||||
|
||||
inputs = {"audio": test_audio_base64_str}
|
||||
endpoint = "http://localhost:7066/v1/asr"
|
||||
response = requests.post(url=endpoint, data=json.dumps(inputs), proxies={"http": None})
|
||||
|
||||
result_str = response.json()["asr_result"]
|
||||
|
||||
batch["prediction"] = processor.tokenizer._normalize(result_str)
|
||||
return batch
|
||||
|
||||
|
||||
result = librispeech_test_clean.map(map_to_pred)
|
||||
|
||||
wer = load("wer")
|
||||
print(100 * wer.compute(references=result["reference"], predictions=result["prediction"]))
|
||||
8
AudioQnA/benchmark/accuracy/requirements.txt
Normal file
8
AudioQnA/benchmark/accuracy/requirements.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
datasets
|
||||
evaluate
|
||||
jiwer
|
||||
librosa
|
||||
pydub
|
||||
soundfile
|
||||
torch
|
||||
transformers
|
||||
@@ -14,10 +14,10 @@ cd GenAIComps
|
||||
### 2. Build ASR Image
|
||||
|
||||
```bash
|
||||
docker build -t opea/whisper:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/Dockerfile .
|
||||
docker build -t opea/whisper:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/dependency/Dockerfile .
|
||||
|
||||
|
||||
docker build -t opea/asr:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/Dockerfile .
|
||||
docker build -t opea/asr:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/Dockerfile .
|
||||
```
|
||||
|
||||
### 3. Build LLM Image
|
||||
@@ -29,9 +29,9 @@ docker build --no-cache -t opea/llm-tgi:latest --build-arg https_proxy=$https_pr
|
||||
### 4. Build TTS Image
|
||||
|
||||
```bash
|
||||
docker build -t opea/speecht5:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/speecht5/Dockerfile .
|
||||
docker build -t opea/speecht5:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/speecht5/dependency/Dockerfile .
|
||||
|
||||
docker build -t opea/tts:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/Dockerfile .
|
||||
docker build -t opea/tts:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/speecht5/Dockerfile .
|
||||
```
|
||||
|
||||
### 6. Build MegaService Docker Image
|
||||
@@ -40,7 +40,7 @@ To construct the Mega Service, we utilize the [GenAIComps](https://github.com/op
|
||||
|
||||
```bash
|
||||
git clone https://github.com/opea-project/GenAIExamples.git
|
||||
cd GenAIExamples/AudioQnA/docker
|
||||
cd GenAIExamples/AudioQnA/
|
||||
docker build --no-cache -t opea/audioqna:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile .
|
||||
```
|
||||
|
||||
@@ -80,7 +80,7 @@ export LLM_SERVICE_PORT=3007
|
||||
## 🚀 Start the MegaService
|
||||
|
||||
```bash
|
||||
cd GenAIExamples/AudioQnA/docker/xeon/
|
||||
cd GenAIExamples/AudioQnA/docker_compose/intel/cpu/xeon/
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
@@ -108,7 +108,7 @@ curl http://${host_ip}:3006/generate \
|
||||
# llm microservice
|
||||
curl http://${host_ip}:3007/v1/chat/completions\
|
||||
-X POST \
|
||||
-d '{"query":"What is Deep Learning?","max_new_tokens":17,"top_k":10,"top_p":0.95,"typical_p":0.95,"temperature":0.01,"repetition_penalty":1.03,"streaming":false}' \
|
||||
-d '{"query":"What is Deep Learning?","max_tokens":17,"top_k":10,"top_p":0.95,"typical_p":0.95,"temperature":0.01,"repetition_penalty":1.03,"streaming":false}' \
|
||||
-H 'Content-Type: application/json'
|
||||
|
||||
# speecht5 service
|
||||
@@ -1,12 +1,9 @@
|
||||
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
whisper-service:
|
||||
image: opea/whisper:latest
|
||||
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}
|
||||
container_name: whisper-service
|
||||
ports:
|
||||
- "7066:7066"
|
||||
@@ -17,7 +14,7 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
restart: unless-stopped
|
||||
asr:
|
||||
image: opea/asr:latest
|
||||
image: ${REGISTRY:-opea}/asr:${TAG:-latest}
|
||||
container_name: asr-service
|
||||
ports:
|
||||
- "3001:9099"
|
||||
@@ -25,7 +22,7 @@ services:
|
||||
environment:
|
||||
ASR_ENDPOINT: ${ASR_ENDPOINT}
|
||||
speecht5-service:
|
||||
image: opea/speecht5:latest
|
||||
image: ${REGISTRY:-opea}/speecht5:${TAG:-latest}
|
||||
container_name: speecht5-service
|
||||
ports:
|
||||
- "7055:7055"
|
||||
@@ -36,7 +33,7 @@ services:
|
||||
https_proxy: ${https_proxy}
|
||||
restart: unless-stopped
|
||||
tts:
|
||||
image: opea/tts:latest
|
||||
image: ${REGISTRY:-opea}/tts:${TAG:-latest}
|
||||
container_name: tts-service
|
||||
ports:
|
||||
- "3002:9088"
|
||||
@@ -44,7 +41,7 @@ services:
|
||||
environment:
|
||||
TTS_ENDPOINT: ${TTS_ENDPOINT}
|
||||
tgi-service:
|
||||
image: ghcr.io/huggingface/text-generation-inference:1.4
|
||||
image: ghcr.io/huggingface/text-generation-inference:sha-e4201f4-intel-cpu
|
||||
container_name: tgi-service
|
||||
ports:
|
||||
- "3006:80"
|
||||
@@ -56,9 +53,9 @@ services:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
command: --model-id ${LLM_MODEL_ID}
|
||||
command: --model-id ${LLM_MODEL_ID} --cuda-graphs 0
|
||||
llm:
|
||||
image: opea/llm-tgi:latest
|
||||
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
|
||||
container_name: llm-tgi-server
|
||||
depends_on:
|
||||
- tgi-service
|
||||
@@ -73,7 +70,7 @@ services:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
restart: unless-stopped
|
||||
audioqna-xeon-backend-server:
|
||||
image: opea/audioqna:latest
|
||||
image: ${REGISTRY:-opea}/audioqna:${TAG:-latest}
|
||||
container_name: audioqna-xeon-backend-server
|
||||
depends_on:
|
||||
- asr
|
||||
@@ -14,10 +14,10 @@ cd GenAIComps
|
||||
### 2. Build ASR Image
|
||||
|
||||
```bash
|
||||
docker build -t opea/whisper-gaudi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/Dockerfile_hpu .
|
||||
docker build -t opea/whisper-gaudi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/dependency/Dockerfile.intel_hpu .
|
||||
|
||||
|
||||
docker build -t opea/asr:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/Dockerfile .
|
||||
docker build -t opea/asr:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/asr/whisper/Dockerfile .
|
||||
```
|
||||
|
||||
### 3. Build LLM Image
|
||||
@@ -29,9 +29,9 @@ docker build --no-cache -t opea/llm-tgi:latest --build-arg https_proxy=$https_pr
|
||||
### 4. Build TTS Image
|
||||
|
||||
```bash
|
||||
docker build -t opea/speecht5-gaudi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/speecht5/Dockerfile_hpu .
|
||||
docker build -t opea/speecht5-gaudi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/speecht5/dependency/Dockerfile.intel_hpu .
|
||||
|
||||
docker build -t opea/tts:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/Dockerfile .
|
||||
docker build -t opea/tts:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/tts/speecht5/Dockerfile .
|
||||
```
|
||||
|
||||
### 6. Build MegaService Docker Image
|
||||
@@ -40,7 +40,7 @@ To construct the Mega Service, we utilize the [GenAIComps](https://github.com/op
|
||||
|
||||
```bash
|
||||
git clone https://github.com/opea-project/GenAIExamples.git
|
||||
cd GenAIExamples/AudioQnA/docker
|
||||
cd GenAIExamples/AudioQnA/
|
||||
docker build --no-cache -t opea/audioqna:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile .
|
||||
```
|
||||
|
||||
@@ -80,7 +80,7 @@ export LLM_SERVICE_PORT=3007
|
||||
## 🚀 Start the MegaService
|
||||
|
||||
```bash
|
||||
cd GenAIExamples/AudioQnA/docker/gaudi/
|
||||
cd GenAIExamples/AudioQnA/docker_compose/intel/hpu/gaudi/
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
@@ -108,7 +108,7 @@ curl http://${host_ip}:3006/generate \
|
||||
# llm microservice
|
||||
curl http://${host_ip}:3007/v1/chat/completions\
|
||||
-X POST \
|
||||
-d '{"query":"What is Deep Learning?","max_new_tokens":17,"top_k":10,"top_p":0.95,"typical_p":0.95,"temperature":0.01,"repetition_penalty":1.03,"streaming":false}' \
|
||||
-d '{"query":"What is Deep Learning?","max_tokens":17,"top_k":10,"top_p":0.95,"typical_p":0.95,"temperature":0.01,"repetition_penalty":1.03,"streaming":false}' \
|
||||
-H 'Content-Type: application/json'
|
||||
|
||||
# speecht5 service
|
||||
@@ -1,12 +1,9 @@
|
||||
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
whisper-service:
|
||||
image: opea/whisper-gaudi:latest
|
||||
image: ${REGISTRY:-opea}/whisper-gaudi:${TAG:-latest}
|
||||
container_name: whisper-service
|
||||
ports:
|
||||
- "7066:7066"
|
||||
@@ -22,7 +19,7 @@ services:
|
||||
- SYS_NICE
|
||||
restart: unless-stopped
|
||||
asr:
|
||||
image: opea/asr:latest
|
||||
image: ${REGISTRY:-opea}/asr:${TAG:-latest}
|
||||
container_name: asr-service
|
||||
ports:
|
||||
- "3001:9099"
|
||||
@@ -30,7 +27,7 @@ services:
|
||||
environment:
|
||||
ASR_ENDPOINT: ${ASR_ENDPOINT}
|
||||
speecht5-service:
|
||||
image: opea/speecht5-gaudi:latest
|
||||
image: ${REGISTRY:-opea}/speecht5-gaudi:${TAG:-latest}
|
||||
container_name: speecht5-service
|
||||
ports:
|
||||
- "7055:7055"
|
||||
@@ -46,7 +43,7 @@ services:
|
||||
- SYS_NICE
|
||||
restart: unless-stopped
|
||||
tts:
|
||||
image: opea/tts:latest
|
||||
image: ${REGISTRY:-opea}/tts:${TAG:-latest}
|
||||
container_name: tts-service
|
||||
ports:
|
||||
- "3002:9088"
|
||||
@@ -54,7 +51,7 @@ services:
|
||||
environment:
|
||||
TTS_ENDPOINT: ${TTS_ENDPOINT}
|
||||
tgi-service:
|
||||
image: ghcr.io/huggingface/tgi-gaudi:2.0.1
|
||||
image: ghcr.io/huggingface/tgi-gaudi:2.0.5
|
||||
container_name: tgi-gaudi-server
|
||||
ports:
|
||||
- "3006:80"
|
||||
@@ -64,18 +61,22 @@ services:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HF_HUB_DISABLE_PROGRESS_BARS: 1
|
||||
HF_HUB_ENABLE_HF_TRANSFER: 0
|
||||
HABANA_VISIBLE_DEVICES: all
|
||||
OMPI_MCA_btl_vader_single_copy_mechanism: none
|
||||
ENABLE_HPU_GRAPH: true
|
||||
LIMIT_HPU_GRAPH: true
|
||||
USE_FLASH_ATTENTION: true
|
||||
FLASH_ATTENTION_RECOMPUTE: true
|
||||
runtime: habana
|
||||
cap_add:
|
||||
- SYS_NICE
|
||||
ipc: host
|
||||
command: --model-id ${LLM_MODEL_ID} --max-input-length 1024 --max-total-tokens 2048
|
||||
llm:
|
||||
image: opea/llm-tgi:latest
|
||||
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
|
||||
container_name: llm-tgi-gaudi-server
|
||||
depends_on:
|
||||
- tgi-service
|
||||
@@ -90,7 +91,7 @@ services:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
restart: unless-stopped
|
||||
audioqna-gaudi-backend-server:
|
||||
image: opea/audioqna:latest
|
||||
image: ${REGISTRY:-opea}/audioqna:${TAG:-latest}
|
||||
container_name: audioqna-gaudi-backend-server
|
||||
depends_on:
|
||||
- asr
|
||||
55
AudioQnA/docker_image_build/build.yaml
Normal file
55
AudioQnA/docker_image_build/build.yaml
Normal file
@@ -0,0 +1,55 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
audioqna:
|
||||
build:
|
||||
args:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
no_proxy: ${no_proxy}
|
||||
context: ../
|
||||
dockerfile: ./Dockerfile
|
||||
image: ${REGISTRY:-opea}/audioqna:${TAG:-latest}
|
||||
whisper-gaudi:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/asr/whisper/dependency/Dockerfile.intel_hpu
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/whisper-gaudi:${TAG:-latest}
|
||||
whisper:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/asr/whisper/dependency/Dockerfile
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}
|
||||
asr:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/asr/whisper/Dockerfile
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/asr:${TAG:-latest}
|
||||
llm-tgi:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/llms/text-generation/tgi/Dockerfile
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/llm-tgi:${TAG:-latest}
|
||||
speecht5-gaudi:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/tts/speecht5/dependency/Dockerfile.intel_hpu
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/speecht5-gaudi:${TAG:-latest}
|
||||
speecht5:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/tts/speecht5/dependency/Dockerfile
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/speecht5:${TAG:-latest}
|
||||
tts:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/tts/speecht5/Dockerfile
|
||||
extends: audioqna
|
||||
image: ${REGISTRY:-opea}/tts:${TAG:-latest}
|
||||
32
AudioQnA/kubernetes/intel/README.md
Normal file
32
AudioQnA/kubernetes/intel/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Deploy AudioQnA in a Kubernetes Cluster
|
||||
|
||||
> [NOTE]
|
||||
> The following values must be set before you can deploy:
|
||||
> HUGGINGFACEHUB_API_TOKEN
|
||||
> You can also customize the "MODEL_ID" and "model-volume"
|
||||
|
||||
## Deploy On Xeon
|
||||
```
|
||||
cd GenAIExamples/AudioQnA/kubernetes/intel/cpu/xeon/manifests
|
||||
export HUGGINGFACEHUB_API_TOKEN="YourOwnToken"
|
||||
sed -i "s/insert-your-huggingface-token-here/${HUGGINGFACEHUB_API_TOKEN}/g" audioqna.yaml
|
||||
kubectl apply -f audioqna.yaml
|
||||
```
|
||||
## Deploy On Gaudi
|
||||
```
|
||||
cd GenAIExamples/AudioQnA/kubernetes/intel/hpu/gaudi/manifests
|
||||
export HUGGINGFACEHUB_API_TOKEN="YourOwnToken"
|
||||
sed -i "s/insert-your-huggingface-token-here/${HUGGINGFACEHUB_API_TOKEN}/g" audioqna.yaml
|
||||
kubectl apply -f audioqna.yaml
|
||||
```
|
||||
|
||||
|
||||
## Verify Services
|
||||
|
||||
Make sure all the pods are running, and restart the audioqna-xxxx pod if necessary.
|
||||
|
||||
```bash
|
||||
kubectl get pods
|
||||
|
||||
curl http://${host_ip}:3008/v1/audioqna -X POST -d '{"audio": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA", "max_tokens":64}' -H 'Content-Type: application/json'
|
||||
```
|
||||
74
AudioQnA/kubernetes/intel/README_gmc.md
Normal file
74
AudioQnA/kubernetes/intel/README_gmc.md
Normal file
@@ -0,0 +1,74 @@
|
||||
# Deploy AudioQnA in Kubernetes Cluster on Xeon and Gaudi
|
||||
|
||||
This document outlines the deployment process for a AudioQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservice pipeline components on Intel Xeon server and Gaudi machines.
|
||||
|
||||
The AudioQnA Service leverages a Kubernetes operator called genai-microservices-connector(GMC). GMC supports connecting microservices to create pipelines based on the specification in the pipeline yaml file in addition to allowing the user to dynamically control which model is used in a service such as an LLM or embedder. The underlying pipeline language also supports using external services that may be running in public or private cloud elsewhere.
|
||||
|
||||
Install GMC in your Kubernetes cluster, if you have not already done so, by following the steps in Section "Getting Started" at [GMC Install](https://github.com/opea-project/GenAIInfra/tree/main/microservices-connector). Soon as we publish images to Docker Hub, at which point no builds will be required, simplifying install.
|
||||
|
||||
|
||||
The AudioQnA application is defined as a Custom Resource (CR) file that the above GMC operator acts upon. It first checks if the microservices listed in the CR yaml file are running, if not starts them and then proceeds to connect them. When the AudioQnA pipeline is ready, the service endpoint details are returned, letting you use the application. Should you use "kubectl get pods" commands you will see all the component microservices, in particular `asr`, `tts`, and `llm`.
|
||||
|
||||
|
||||
## Using prebuilt images
|
||||
|
||||
The AudioQnA uses the below prebuilt images if you choose a Xeon deployment
|
||||
|
||||
- tgi-service: ghcr.io/huggingface/text-generation-inference:1.4
|
||||
- llm: opea/llm-tgi:latest
|
||||
- asr: opea/asr:latest
|
||||
- whisper: opea/whisper:latest
|
||||
- tts: opea/tts:latest
|
||||
- speecht5: opea/speecht5:latest
|
||||
|
||||
|
||||
Should you desire to use the Gaudi accelerator, two alternate images are used for the embedding and llm services.
|
||||
For Gaudi:
|
||||
|
||||
- tgi-service: ghcr.io/huggingface/tgi-gaudi:2.0.5
|
||||
- whisper-gaudi: opea/whisper-gaudi:latest
|
||||
- speecht5-gaudi: opea/speecht5-gaudi:latest
|
||||
|
||||
> [NOTE]
|
||||
> Please refer to [Xeon README](https://github.com/opea-project/GenAIExamples/blob/main/AudioQnA/docker_compose/intel/cpu/xeon/README.md) or [Gaudi README](https://github.com/opea-project/GenAIExamples/blob/main/AudioQnA/docker_compose/intel/hpu/gaudi/README.md) to build the OPEA images. These too will be available on Docker Hub soon to simplify use.
|
||||
|
||||
## Deploy AudioQnA pipeline
|
||||
This involves deploying the AudioQnA custom resource. You can use audioQnA_xeon.yaml or if you have a Gaudi cluster, you could use audioQnA_gaudi.yaml.
|
||||
|
||||
1. Create namespace and deploy application
|
||||
```sh
|
||||
kubectl create ns audioqa
|
||||
kubectl apply -f $(pwd)/audioQnA_xeon.yaml
|
||||
```
|
||||
|
||||
2. GMC will reconcile the AudioQnA custom resource and get all related components/services ready. Check if the service up.
|
||||
|
||||
```sh
|
||||
kubectl get service -n audioqa
|
||||
```
|
||||
|
||||
3. Retrieve the application access URL
|
||||
|
||||
```sh
|
||||
kubectl get gmconnectors.gmc.opea.io -n audioqa
|
||||
NAME URL READY AGE
|
||||
audioqa http://router-service.audioqa.svc.cluster.local:8080 6/0/6 5m
|
||||
```
|
||||
|
||||
4. Deploy a client pod to test the application
|
||||
|
||||
```sh
|
||||
kubectl create deployment client-test -n audioqa --image=python:3.8.13 -- sleep infinity
|
||||
```
|
||||
|
||||
5. Access the application using the above URL from the client pod
|
||||
|
||||
```sh
|
||||
export CLIENT_POD=$(kubectl get pod -n audioqa -l app=client-test -o jsonpath={.items..metadata.name})
|
||||
export accessUrl=$(kubectl get gmc -n audioqa -o jsonpath="{.items[?(@.metadata.name=='audioqa')].status.accessUrl}")
|
||||
kubectl exec "$CLIENT_POD" -n audioqa -- curl -s --no-buffer $accessUrl -X POST -d '{"byte_str": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA", "parameters":{"max_new_tokens":64, "do_sample": true, "streaming":false}}' -H 'Content-Type: application/json'
|
||||
```
|
||||
|
||||
> [NOTE]
|
||||
|
||||
You can remove your AudioQnA pipeline by executing standard Kubernetes kubectl commands to remove a custom resource. Verify it was removed by executing kubectl get pods in the audioqa namespace.
|
||||
58
AudioQnA/kubernetes/intel/cpu/xeon/gmc/audioQnA_xeon.yaml
Normal file
58
AudioQnA/kubernetes/intel/cpu/xeon/gmc/audioQnA_xeon.yaml
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
apiVersion: gmc.opea.io/v1alpha3
|
||||
kind: GMConnector
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: gmconnector
|
||||
app.kubernetes.io/managed-by: kustomize
|
||||
gmc/platform: xeon
|
||||
name: audioqa
|
||||
namespace: audioqa
|
||||
spec:
|
||||
routerConfig:
|
||||
name: router
|
||||
serviceName: router-service
|
||||
nodes:
|
||||
root:
|
||||
routerType: Sequence
|
||||
steps:
|
||||
- name: Asr
|
||||
internalService:
|
||||
serviceName: asr-svc
|
||||
config:
|
||||
endpoint: /v1/audio/transcriptions
|
||||
ASR_ENDPOINT: whisper-svc
|
||||
- name: Whisper
|
||||
internalService:
|
||||
serviceName: whisper-svc
|
||||
config:
|
||||
endpoint: /v1/asr
|
||||
isDownstreamService: true
|
||||
- name: Llm
|
||||
data: $response
|
||||
internalService:
|
||||
serviceName: llm-svc
|
||||
config:
|
||||
endpoint: /v1/chat/completions
|
||||
TGI_LLM_ENDPOINT: tgi-svc
|
||||
- name: Tgi
|
||||
internalService:
|
||||
serviceName: tgi-svc
|
||||
config:
|
||||
endpoint: /generate
|
||||
isDownstreamService: true
|
||||
- name: Tts
|
||||
data: $response
|
||||
internalService:
|
||||
serviceName: tts-svc
|
||||
config:
|
||||
endpoint: /v1/audio/speech
|
||||
TTS_ENDPOINT: speecht5-svc
|
||||
- name: SpeechT5
|
||||
internalService:
|
||||
serviceName: speecht5-svc
|
||||
config:
|
||||
endpoint: /v1/tts
|
||||
isDownstreamService: true
|
||||
395
AudioQnA/kubernetes/intel/cpu/xeon/manifest/audioqna.yaml
Normal file
395
AudioQnA/kubernetes/intel/cpu/xeon/manifest/audioqna.yaml
Normal file
@@ -0,0 +1,395 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: audio-qna-config
|
||||
namespace: default
|
||||
data:
|
||||
ASR_ENDPOINT: http://whisper-svc.default.svc.cluster.local:7066
|
||||
TTS_ENDPOINT: http://speecht5-svc.default.svc.cluster.local:7055
|
||||
LLM_MODEL_ID: Intel/neural-chat-7b-v3-3
|
||||
HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here"
|
||||
TGI_LLM_ENDPOINT: http://llm-dependency-svc.default.svc.cluster.local:3006
|
||||
MEGA_SERVICE_HOST_IP: audioqna-backend-server-svc
|
||||
ASR_SERVICE_HOST_IP: asr-svc
|
||||
ASR_SERVICE_PORT: "3001"
|
||||
LLM_SERVICE_HOST_IP: llm-svc
|
||||
LLM_SERVICE_PORT: "3007"
|
||||
TTS_SERVICE_HOST_IP: tts-svc
|
||||
TTS_SERVICE_PORT: "3002"
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: asr-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: asr-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: asr-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: asr-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/asr:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: asr-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 9099
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: asr-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: asr-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3001
|
||||
targetPort: 9099
|
||||
---
|
||||
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: whisper-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: whisper-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: whisper-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: whisper-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/whisper:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: whisper-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 7066
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: whisper-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: whisper-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 7066
|
||||
targetPort: 7066
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: tts-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: tts-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: tts-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: tts-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/tts:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: tts-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 9088
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: tts-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: tts-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3002
|
||||
targetPort: 9088
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: speecht5-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: speecht5-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: speecht5-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: speecht5-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/speecht5:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: speecht5-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 7055
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: speecht5-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: speecht5-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 7055
|
||||
targetPort: 7055
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: llm-dependency-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: llm-dependency-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: llm-dependency-deploy
|
||||
spec:
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: "ghcr.io/huggingface/text-generation-inference:sha-e4201f4-intel-cpu"
|
||||
name: llm-dependency-deploy-demo
|
||||
securityContext:
|
||||
capabilities:
|
||||
add:
|
||||
- SYS_NICE
|
||||
args:
|
||||
- --model-id
|
||||
- $(LLM_MODEL_ID)
|
||||
- --max-input-length
|
||||
- '2048'
|
||||
- --max-total-tokens
|
||||
- '4096'
|
||||
volumeMounts:
|
||||
- mountPath: /data
|
||||
name: model-volume
|
||||
- mountPath: /dev/shm
|
||||
name: shm
|
||||
ports:
|
||||
- containerPort: 80
|
||||
serviceAccountName: default
|
||||
volumes:
|
||||
- name: model-volume
|
||||
hostPath:
|
||||
path: /home/sdp/cesg
|
||||
type: Directory
|
||||
- name: shm
|
||||
emptyDir:
|
||||
medium: Memory
|
||||
sizeLimit: 1Gi
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: llm-dependency-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: llm-dependency-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3006
|
||||
targetPort: 80
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: llm-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: llm-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: llm-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: llm-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/llm-tgi:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: llm-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 9000
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: llm-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: llm-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3007
|
||||
targetPort: 9000
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: audioqna-backend-server-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: audioqna-backend-server-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: audioqna-backend-server-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: audioqna-backend-server-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/audioqna:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: audioqna-backend-server-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: audioqna-backend-server-svc
|
||||
spec:
|
||||
type: NodePort
|
||||
selector:
|
||||
app: audioqna-backend-server-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3008
|
||||
targetPort: 8888
|
||||
nodePort: 30666
|
||||
58
AudioQnA/kubernetes/intel/hpu/gaudi/gmc/audioQnA_gaudi.yaml
Normal file
58
AudioQnA/kubernetes/intel/hpu/gaudi/gmc/audioQnA_gaudi.yaml
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
apiVersion: gmc.opea.io/v1alpha3
|
||||
kind: GMConnector
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/name: gmconnector
|
||||
app.kubernetes.io/managed-by: kustomize
|
||||
gmc/platform: gaudi
|
||||
name: audioqa
|
||||
namespace: audioqa
|
||||
spec:
|
||||
routerConfig:
|
||||
name: router
|
||||
serviceName: router-service
|
||||
nodes:
|
||||
root:
|
||||
routerType: Sequence
|
||||
steps:
|
||||
- name: Asr
|
||||
internalService:
|
||||
serviceName: asr-svc
|
||||
config:
|
||||
endpoint: /v1/audio/transcriptions
|
||||
ASR_ENDPOINT: whisper-gaudi-svc
|
||||
- name: WhisperGaudi
|
||||
internalService:
|
||||
serviceName: whisper-gaudi-svc
|
||||
config:
|
||||
endpoint: /v1/asr
|
||||
isDownstreamService: true
|
||||
- name: Llm
|
||||
data: $response
|
||||
internalService:
|
||||
serviceName: llm-svc
|
||||
config:
|
||||
endpoint: /v1/chat/completions
|
||||
TGI_LLM_ENDPOINT: tgi-gaudi-svc
|
||||
- name: TgiGaudi
|
||||
internalService:
|
||||
serviceName: tgi-gaudi-svc
|
||||
config:
|
||||
endpoint: /generate
|
||||
isDownstreamService: true
|
||||
- name: Tts
|
||||
data: $response
|
||||
internalService:
|
||||
serviceName: tts-svc
|
||||
config:
|
||||
endpoint: /v1/audio/speech
|
||||
TTS_ENDPOINT: speecht5-gaudi-svc
|
||||
- name: SpeechT5Gaudi
|
||||
internalService:
|
||||
serviceName: speecht5-gaudi-svc
|
||||
config:
|
||||
endpoint: /v1/tts
|
||||
isDownstreamService: true
|
||||
447
AudioQnA/kubernetes/intel/hpu/gaudi/manifest/audioqna.yaml
Normal file
447
AudioQnA/kubernetes/intel/hpu/gaudi/manifest/audioqna.yaml
Normal file
@@ -0,0 +1,447 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: audio-qna-config
|
||||
namespace: default
|
||||
data:
|
||||
ASR_ENDPOINT: http://whisper-svc.default.svc.cluster.local:7066
|
||||
TTS_ENDPOINT: http://speecht5-svc.default.svc.cluster.local:7055
|
||||
LLM_MODEL_ID: Intel/neural-chat-7b-v3-3
|
||||
HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here"
|
||||
TGI_LLM_ENDPOINT: http://llm-dependency-svc.default.svc.cluster.local:3006
|
||||
MEGA_SERVICE_HOST_IP: audioqna-backend-server-svc
|
||||
ASR_SERVICE_HOST_IP: asr-svc
|
||||
ASR_SERVICE_PORT: "3001"
|
||||
LLM_SERVICE_HOST_IP: llm-svc
|
||||
LLM_SERVICE_PORT: "3007"
|
||||
TTS_SERVICE_HOST_IP: tts-svc
|
||||
TTS_SERVICE_PORT: "3002"
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: asr-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: asr-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: asr-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: asr-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/asr:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: asr-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 9099
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: asr-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: asr-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3001
|
||||
targetPort: 9099
|
||||
---
|
||||
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: whisper-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: whisper-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: whisper-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: whisper-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/whisper-gaudi:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: whisper-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 7066
|
||||
resources:
|
||||
limits:
|
||||
habana.ai/gaudi: 1
|
||||
env:
|
||||
- name: OMPI_MCA_btl_vader_single_copy_mechanism
|
||||
value: none
|
||||
- name: PT_HPU_ENABLE_LAZY_COLLECTIVES
|
||||
value: 'true'
|
||||
- name: runtime
|
||||
value: habana
|
||||
- name: HABANA_VISIBLE_DEVICES
|
||||
value: all
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: whisper-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: whisper-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 7066
|
||||
targetPort: 7066
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: tts-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: tts-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: tts-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: tts-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/tts:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: tts-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 9088
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: tts-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: tts-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3002
|
||||
targetPort: 9088
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: speecht5-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: speecht5-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: speecht5-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: speecht5-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/speecht5-gaudi:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: speecht5-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 7055
|
||||
resources:
|
||||
limits:
|
||||
habana.ai/gaudi: 1
|
||||
env:
|
||||
- name: OMPI_MCA_btl_vader_single_copy_mechanism
|
||||
value: none
|
||||
- name: PT_HPU_ENABLE_LAZY_COLLECTIVES
|
||||
value: 'true'
|
||||
- name: runtime
|
||||
value: habana
|
||||
- name: HABANA_VISIBLE_DEVICES
|
||||
value: all
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: speecht5-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: speecht5-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 7055
|
||||
targetPort: 7055
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: llm-dependency-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: llm-dependency-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: llm-dependency-deploy
|
||||
spec:
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: ghcr.io/huggingface/tgi-gaudi:2.0.5
|
||||
name: llm-dependency-deploy-demo
|
||||
securityContext:
|
||||
capabilities:
|
||||
add:
|
||||
- SYS_NICE
|
||||
args:
|
||||
- --model-id
|
||||
- $(LLM_MODEL_ID)
|
||||
- --max-input-length
|
||||
- '2048'
|
||||
- --max-total-tokens
|
||||
- '4096'
|
||||
- --max-batch-total-tokens
|
||||
- '65536'
|
||||
- --max-batch-prefill-tokens
|
||||
- '4096'
|
||||
volumeMounts:
|
||||
- mountPath: /data
|
||||
name: model-volume
|
||||
- mountPath: /dev/shm
|
||||
name: shm
|
||||
ports:
|
||||
- containerPort: 80
|
||||
resources:
|
||||
limits:
|
||||
habana.ai/gaudi: 1
|
||||
env:
|
||||
- name: OMPI_MCA_btl_vader_single_copy_mechanism
|
||||
value: none
|
||||
- name: PT_HPU_ENABLE_LAZY_COLLECTIVES
|
||||
value: 'true'
|
||||
- name: ENABLE_HPU_GRAPH
|
||||
value: 'true'
|
||||
- name: LIMIT_HPU_GRAPH
|
||||
value: 'true'
|
||||
- name: USE_FLASH_ATTENTION
|
||||
value: 'true'
|
||||
- name: FLASH_ATTENTION_RECOMPUTE
|
||||
value: 'true'
|
||||
- name: runtime
|
||||
value: habana
|
||||
- name: HABANA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: PREFILL_BATCH_BUCKET_SIZE
|
||||
value: "1"
|
||||
- name: BATCH_BUCKET_SIZE
|
||||
value: "8"
|
||||
serviceAccountName: default
|
||||
volumes:
|
||||
- name: model-volume
|
||||
hostPath:
|
||||
path: /mnt/models
|
||||
type: Directory
|
||||
- name: shm
|
||||
emptyDir:
|
||||
medium: Memory
|
||||
sizeLimit: 1Gi
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: llm-dependency-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: llm-dependency-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3006
|
||||
targetPort: 80
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: llm-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: llm-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: llm-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: llm-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/llm-tgi:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: llm-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 9000
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: llm-svc
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: llm-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3007
|
||||
targetPort: 9000
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: audioqna-backend-server-deploy
|
||||
namespace: default
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: audioqna-backend-server-deploy
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
sidecar.istio.io/rewriteAppHTTPProbers: 'true'
|
||||
labels:
|
||||
app: audioqna-backend-server-deploy
|
||||
spec:
|
||||
topologySpreadConstraints:
|
||||
- maxSkew: 1
|
||||
topologyKey: kubernetes.io/hostname
|
||||
whenUnsatisfiable: ScheduleAnyway
|
||||
labelSelector:
|
||||
matchLabels:
|
||||
app: audioqna-backend-server-deploy
|
||||
hostIPC: true
|
||||
containers:
|
||||
- envFrom:
|
||||
- configMapRef:
|
||||
name: audio-qna-config
|
||||
image: opea/audioqna:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
name: audioqna-backend-server-deploy
|
||||
args: null
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
serviceAccountName: default
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
name: audioqna-backend-server-svc
|
||||
spec:
|
||||
type: NodePort
|
||||
selector:
|
||||
app: audioqna-backend-server-deploy
|
||||
ports:
|
||||
- name: service
|
||||
port: 3008
|
||||
targetPort: 8888
|
||||
nodePort: 30666
|
||||
@@ -3,39 +3,31 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
echo "IMAGE_REPO=${IMAGE_REPO}"
|
||||
IMAGE_REPO=${IMAGE_REPO:-"opea"}
|
||||
IMAGE_TAG=${IMAGE_TAG:-"latest"}
|
||||
echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}"
|
||||
echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
|
||||
export REGISTRY=${IMAGE_REPO}
|
||||
export TAG=${IMAGE_TAG}
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
LOG_PATH="$WORKPATH/tests"
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
function build_docker_images() {
|
||||
cd $WORKPATH
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps
|
||||
cd $WORKPATH/docker_image_build
|
||||
git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../
|
||||
|
||||
docker build -t opea/whisper-gaudi:latest -f comps/asr/whisper/Dockerfile_hpu .
|
||||
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
|
||||
service_list="audioqna whisper-gaudi asr llm-tgi speecht5-gaudi tts"
|
||||
docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
|
||||
|
||||
docker build -t opea/asr:latest -f comps/asr/Dockerfile .
|
||||
docker build -t opea/llm-tgi:latest -f comps/llms/text-generation/tgi/Dockerfile .
|
||||
docker build -t opea/speecht5-gaudi:latest -f comps/tts/speecht5/Dockerfile_hpu .
|
||||
docker build -t opea/tts:latest -f comps/tts/Dockerfile .
|
||||
|
||||
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.1
|
||||
|
||||
cd ..
|
||||
|
||||
cd $WORKPATH/docker
|
||||
docker build --no-cache -t opea/audioqna:latest -f Dockerfile .
|
||||
|
||||
# cd $WORKPATH/docker/ui
|
||||
# docker build --no-cache -t opea/audioqna-ui:latest -f docker/Dockerfile .
|
||||
|
||||
docker images
|
||||
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.5
|
||||
docker images && sleep 1s
|
||||
}
|
||||
|
||||
function start_services() {
|
||||
cd $WORKPATH/docker/gaudi
|
||||
cd $WORKPATH/docker_compose/intel/hpu/gaudi
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
|
||||
export TGI_LLM_ENDPOINT=http://$ip_address:3006
|
||||
@@ -53,27 +45,27 @@ function start_services() {
|
||||
export TTS_SERVICE_PORT=3002
|
||||
export LLM_SERVICE_PORT=3007
|
||||
|
||||
# sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
|
||||
|
||||
if [[ "$IMAGE_REPO" != "" ]]; then
|
||||
# Replace the container name with a test-specific name
|
||||
echo "using image repository $IMAGE_REPO and image tag $IMAGE_TAG"
|
||||
sed -i "s#image: opea/audioqna:latest#image: opea/audioqna:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/audioqna-ui:latest#image: opea/audioqna-ui:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/*#image: ${IMAGE_REPO}opea/#g" compose.yaml
|
||||
echo "cat compose.yaml"
|
||||
cat compose.yaml
|
||||
fi
|
||||
# sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env
|
||||
|
||||
# Start Docker Containers
|
||||
docker compose up -d
|
||||
docker compose up -d > ${LOG_PATH}/start_services_with_compose.log
|
||||
n=0
|
||||
until [[ "$n" -ge 500 ]]; do
|
||||
until [[ "$n" -ge 100 ]]; do
|
||||
docker logs tgi-gaudi-server > $LOG_PATH/tgi_service_start.log
|
||||
if grep -q Connected $LOG_PATH/tgi_service_start.log; then
|
||||
break
|
||||
fi
|
||||
sleep 1s
|
||||
sleep 5s
|
||||
n=$((n+1))
|
||||
done
|
||||
|
||||
n=0
|
||||
until [[ "$n" -ge 100 ]]; do
|
||||
docker logs whisper-service > $LOG_PATH/whisper_service_start.log
|
||||
if grep -q "Uvicorn server setup on port" $LOG_PATH/whisper_service_start.log; then
|
||||
break
|
||||
fi
|
||||
sleep 5s
|
||||
n=$((n+1))
|
||||
done
|
||||
}
|
||||
@@ -99,7 +91,7 @@ function validate_megaservice() {
|
||||
}
|
||||
|
||||
#function validate_frontend() {
|
||||
# cd $WORKPATH/docker/ui/svelte
|
||||
# cd $WORKPATH/ui/svelte
|
||||
# local conda_env_name="OPEA_e2e"
|
||||
# export PATH=${HOME}/miniforge3/bin/:$PATH
|
||||
## conda remove -n ${conda_env_name} --all -y
|
||||
@@ -124,14 +116,14 @@ function validate_megaservice() {
|
||||
#}
|
||||
|
||||
function stop_docker() {
|
||||
cd $WORKPATH/docker/gaudi
|
||||
cd $WORKPATH/docker_compose/intel/hpu/gaudi
|
||||
docker compose stop && docker compose rm -f
|
||||
}
|
||||
|
||||
function main() {
|
||||
|
||||
stop_docker
|
||||
if [[ "$IMAGE_REPO" == "" ]]; then build_docker_images; fi
|
||||
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
|
||||
start_services
|
||||
|
||||
# validate_microservices
|
||||
@@ -3,36 +3,31 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
echo "IMAGE_REPO=${IMAGE_REPO}"
|
||||
IMAGE_REPO=${IMAGE_REPO:-"opea"}
|
||||
IMAGE_TAG=${IMAGE_TAG:-"latest"}
|
||||
echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}"
|
||||
echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
|
||||
export REGISTRY=${IMAGE_REPO}
|
||||
export TAG=${IMAGE_TAG}
|
||||
|
||||
WORKPATH=$(dirname "$PWD")
|
||||
LOG_PATH="$WORKPATH/tests"
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
function build_docker_images() {
|
||||
cd $WORKPATH
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps
|
||||
cd $WORKPATH/docker_image_build
|
||||
git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../
|
||||
|
||||
docker build -t opea/whisper:latest -f comps/asr/whisper/Dockerfile .
|
||||
docker build -t opea/asr:latest -f comps/asr/Dockerfile .
|
||||
docker build -t opea/llm-tgi:latest -f comps/llms/text-generation/tgi/Dockerfile .
|
||||
docker build -t opea/speecht5:latest -f comps/tts/speecht5/Dockerfile .
|
||||
docker build -t opea/tts:latest -f comps/tts/Dockerfile .
|
||||
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
|
||||
service_list="audioqna whisper asr llm-tgi speecht5 tts"
|
||||
docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
|
||||
|
||||
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.1
|
||||
|
||||
cd $WORKPATH/docker
|
||||
docker build --no-cache -t opea/audioqna:latest -f Dockerfile .
|
||||
|
||||
# cd $WORKPATH/docker/ui
|
||||
# docker build --no-cache -t opea/audioqna-ui:latest -f docker/Dockerfile .
|
||||
|
||||
docker images
|
||||
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.5
|
||||
docker images && sleep 1s
|
||||
}
|
||||
|
||||
function start_services() {
|
||||
cd $WORKPATH/docker/xeon
|
||||
cd $WORKPATH/docker_compose/intel/cpu/xeon/
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export TGI_LLM_ENDPOINT=http://$ip_address:3006
|
||||
export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3
|
||||
@@ -49,27 +44,17 @@ function start_services() {
|
||||
export TTS_SERVICE_PORT=3002
|
||||
export LLM_SERVICE_PORT=3007
|
||||
|
||||
# sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
|
||||
|
||||
if [[ "$IMAGE_REPO" != "" ]]; then
|
||||
# Replace the container name with a test-specific name
|
||||
echo "using image repository $IMAGE_REPO and image tag $IMAGE_TAG"
|
||||
sed -i "s#image: opea/audioqna:latest#image: opea/audioqna:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/audioqna-ui:latest#image: opea/audioqna-ui:${IMAGE_TAG}#g" compose.yaml
|
||||
sed -i "s#image: opea/*#image: ${IMAGE_REPO}opea/#g" compose.yaml
|
||||
echo "cat compose.yaml"
|
||||
cat compose.yaml
|
||||
fi
|
||||
# sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env
|
||||
|
||||
# Start Docker Containers
|
||||
docker compose up -d
|
||||
docker compose up -d > ${LOG_PATH}/start_services_with_compose.log
|
||||
n=0
|
||||
until [[ "$n" -ge 500 ]]; do
|
||||
until [[ "$n" -ge 100 ]]; do
|
||||
docker logs tgi-service > $LOG_PATH/tgi_service_start.log
|
||||
if grep -q Connected $LOG_PATH/tgi_service_start.log; then
|
||||
break
|
||||
fi
|
||||
sleep 1s
|
||||
sleep 5s
|
||||
n=$((n+1))
|
||||
done
|
||||
}
|
||||
@@ -96,7 +81,7 @@ function validate_megaservice() {
|
||||
}
|
||||
|
||||
#function validate_frontend() {
|
||||
# cd $WORKPATH/docker/ui/svelte
|
||||
# cd $WORKPATH/ui/svelte
|
||||
# local conda_env_name="OPEA_e2e"
|
||||
# export PATH=${HOME}/miniforge3/bin/:$PATH
|
||||
## conda remove -n ${conda_env_name} --all -y
|
||||
@@ -121,14 +106,14 @@ function validate_megaservice() {
|
||||
#}
|
||||
|
||||
function stop_docker() {
|
||||
cd $WORKPATH/docker/xeon
|
||||
cd $WORKPATH/docker_compose/intel/cpu/xeon/
|
||||
docker compose stop && docker compose rm -f
|
||||
}
|
||||
|
||||
function main() {
|
||||
|
||||
stop_docker
|
||||
if [[ "$IMAGE_REPO" == "" ]]; then build_docker_images; fi
|
||||
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
|
||||
start_services
|
||||
|
||||
validate_megaservice
|
||||
111
AudioQnA/tests/test_gmc_on_gaudi.sh
Executable file
111
AudioQnA/tests/test_gmc_on_gaudi.sh
Executable file
@@ -0,0 +1,111 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -xe
|
||||
USER_ID=$(whoami)
|
||||
LOG_PATH=/home/$(whoami)/logs
|
||||
MOUNT_DIR=/home/$USER_ID/.cache/huggingface/hub
|
||||
IMAGE_REPO=${IMAGE_REPO:-}
|
||||
|
||||
function install_audioqa() {
|
||||
kubectl create ns $APP_NAMESPACE
|
||||
sed -i "s|namespace: audioqa|namespace: $APP_NAMESPACE|g" ./audioQnA_gaudi.yaml
|
||||
kubectl apply -f ./audioQnA_gaudi.yaml
|
||||
|
||||
# Wait until the router service is ready
|
||||
echo "Waiting for the audioqa router service to be ready..."
|
||||
wait_until_pod_ready "audioqa router" $APP_NAMESPACE "router-service"
|
||||
output=$(kubectl get pods -n $APP_NAMESPACE)
|
||||
echo $output
|
||||
}
|
||||
|
||||
function validate_audioqa() {
|
||||
# deploy client pod for testing
|
||||
kubectl create deployment client-test -n $APP_NAMESPACE --image=python:3.8.13 -- sleep infinity
|
||||
|
||||
# wait for client pod ready
|
||||
wait_until_pod_ready "client-test" $APP_NAMESPACE "client-test"
|
||||
# giving time to populating data
|
||||
sleep 60
|
||||
|
||||
kubectl get pods -n $APP_NAMESPACE
|
||||
# send request to audioqa
|
||||
export CLIENT_POD=$(kubectl get pod -n $APP_NAMESPACE -l app=client-test -o jsonpath={.items..metadata.name})
|
||||
echo "$CLIENT_POD"
|
||||
accessUrl=$(kubectl get gmc -n $APP_NAMESPACE -o jsonpath="{.items[?(@.metadata.name=='audioqa')].status.accessUrl}")
|
||||
byte_str=$(kubectl exec "$CLIENT_POD" -n $APP_NAMESPACE -- curl $accessUrl -s -X POST -d '{"byte_str": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA", "parameters":{"max_tokens":64, "do_sample": true, "streaming":false}}' -H 'Content-Type: application/json' | jq .byte_str)
|
||||
echo "$byte_str" > $LOG_PATH/curl_audioqa.log
|
||||
if [ -z "$byte_str" ]; then
|
||||
echo "audioqa failed, please check the logs in ${LOG_PATH}!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Audioqa response check succeed!"
|
||||
}
|
||||
|
||||
function wait_until_pod_ready() {
|
||||
echo "Waiting for the $1 to be ready..."
|
||||
max_retries=30
|
||||
retry_count=0
|
||||
while ! is_pod_ready $2 $3; do
|
||||
if [ $retry_count -ge $max_retries ]; then
|
||||
echo "$1 is not ready after waiting for a significant amount of time"
|
||||
get_gmc_controller_logs
|
||||
exit 1
|
||||
fi
|
||||
echo "$1 is not ready yet. Retrying in 10 seconds..."
|
||||
sleep 10
|
||||
output=$(kubectl get pods -n $2)
|
||||
echo $output
|
||||
retry_count=$((retry_count + 1))
|
||||
done
|
||||
}
|
||||
|
||||
function is_pod_ready() {
|
||||
if [ "$2" == "gmc-controller" ]; then
|
||||
pod_status=$(kubectl get pods -n $1 -o jsonpath='{.items[].status.conditions[?(@.type=="Ready")].status}')
|
||||
else
|
||||
pod_status=$(kubectl get pods -n $1 -l app=$2 -o jsonpath='{.items[].status.conditions[?(@.type=="Ready")].status}')
|
||||
fi
|
||||
if [ "$pod_status" == "True" ]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function get_gmc_controller_logs() {
|
||||
# Fetch the name of the pod with the app-name gmc-controller in the specified namespace
|
||||
pod_name=$(kubectl get pods -n $SYSTEM_NAMESPACE -l control-plane=gmc-controller -o jsonpath='{.items[0].metadata.name}')
|
||||
|
||||
# Check if the pod name was found
|
||||
if [ -z "$pod_name" ]; then
|
||||
echo "No pod found with app-name gmc-controller in namespace $SYSTEM_NAMESPACE"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Get the logs of the found pod
|
||||
echo "Fetching logs for pod $pod_name in namespace $SYSTEM_NAMESPACE..."
|
||||
kubectl logs $pod_name -n $SYSTEM_NAMESPACE
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <function_name>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
install_AudioQnA)
|
||||
pushd AudioQnA/kubernetes/intel/hpu/gaudi/gmc
|
||||
install_audioqa
|
||||
popd
|
||||
;;
|
||||
validate_AudioQnA)
|
||||
pushd AudioQnA/kubernetes/intel/hpu/gaudi/gmc
|
||||
validate_audioqa
|
||||
popd
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $1"
|
||||
;;
|
||||
esac
|
||||
111
AudioQnA/tests/test_gmc_on_xeon.sh
Executable file
111
AudioQnA/tests/test_gmc_on_xeon.sh
Executable file
@@ -0,0 +1,111 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -xe
|
||||
USER_ID=$(whoami)
|
||||
LOG_PATH=/home/$(whoami)/logs
|
||||
MOUNT_DIR=/home/$USER_ID/.cache/huggingface/hub
|
||||
IMAGE_REPO=${IMAGE_REPO:-}
|
||||
|
||||
function install_audioqa() {
|
||||
kubectl create ns $APP_NAMESPACE
|
||||
sed -i "s|namespace: audioqa|namespace: $APP_NAMESPACE|g" ./audioQnA_xeon.yaml
|
||||
kubectl apply -f ./audioQnA_xeon.yaml
|
||||
|
||||
# Wait until the router service is ready
|
||||
echo "Waiting for the audioqa router service to be ready..."
|
||||
wait_until_pod_ready "audioqa router" $APP_NAMESPACE "router-service"
|
||||
output=$(kubectl get pods -n $APP_NAMESPACE)
|
||||
echo $output
|
||||
}
|
||||
|
||||
function validate_audioqa() {
|
||||
# deploy client pod for testing
|
||||
kubectl create deployment client-test -n $APP_NAMESPACE --image=python:3.8.13 -- sleep infinity
|
||||
|
||||
# wait for client pod ready
|
||||
wait_until_pod_ready "client-test" $APP_NAMESPACE "client-test"
|
||||
# giving time to populating data
|
||||
sleep 60
|
||||
|
||||
kubectl get pods -n $APP_NAMESPACE
|
||||
# send request to audioqa
|
||||
export CLIENT_POD=$(kubectl get pod -n $APP_NAMESPACE -l app=client-test -o jsonpath={.items..metadata.name})
|
||||
echo "$CLIENT_POD"
|
||||
accessUrl=$(kubectl get gmc -n $APP_NAMESPACE -o jsonpath="{.items[?(@.metadata.name=='audioqa')].status.accessUrl}")
|
||||
byte_str=$(kubectl exec "$CLIENT_POD" -n $APP_NAMESPACE -- curl $accessUrl -s -X POST -d '{"byte_str": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA", "parameters":{"max_tokens":64, "do_sample": true, "streaming":false}}' -H 'Content-Type: application/json' | jq .byte_str)
|
||||
echo "$byte_str" > $LOG_PATH/curl_audioqa.log
|
||||
if [ -z "$byte_str" ]; then
|
||||
echo "audioqa failed, please check the logs in ${LOG_PATH}!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Audioqa response check succeed!"
|
||||
}
|
||||
|
||||
function wait_until_pod_ready() {
|
||||
echo "Waiting for the $1 to be ready..."
|
||||
max_retries=30
|
||||
retry_count=0
|
||||
while ! is_pod_ready $2 $3; do
|
||||
if [ $retry_count -ge $max_retries ]; then
|
||||
echo "$1 is not ready after waiting for a significant amount of time"
|
||||
get_gmc_controller_logs
|
||||
exit 1
|
||||
fi
|
||||
echo "$1 is not ready yet. Retrying in 10 seconds..."
|
||||
sleep 10
|
||||
output=$(kubectl get pods -n $2)
|
||||
echo $output
|
||||
retry_count=$((retry_count + 1))
|
||||
done
|
||||
}
|
||||
|
||||
function is_pod_ready() {
|
||||
if [ "$2" == "gmc-controller" ]; then
|
||||
pod_status=$(kubectl get pods -n $1 -o jsonpath='{.items[].status.conditions[?(@.type=="Ready")].status}')
|
||||
else
|
||||
pod_status=$(kubectl get pods -n $1 -l app=$2 -o jsonpath='{.items[].status.conditions[?(@.type=="Ready")].status}')
|
||||
fi
|
||||
if [ "$pod_status" == "True" ]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function get_gmc_controller_logs() {
|
||||
# Fetch the name of the pod with the app-name gmc-controller in the specified namespace
|
||||
pod_name=$(kubectl get pods -n $SYSTEM_NAMESPACE -l control-plane=gmc-controller -o jsonpath='{.items[0].metadata.name}')
|
||||
|
||||
# Check if the pod name was found
|
||||
if [ -z "$pod_name" ]; then
|
||||
echo "No pod found with app-name gmc-controller in namespace $SYSTEM_NAMESPACE"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Get the logs of the found pod
|
||||
echo "Fetching logs for pod $pod_name in namespace $SYSTEM_NAMESPACE..."
|
||||
kubectl logs $pod_name -n $SYSTEM_NAMESPACE
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <function_name>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
install_AudioQnA)
|
||||
pushd AudioQnA/kubernetes/intel/cpu/xeon/gmc
|
||||
install_audioqa
|
||||
popd
|
||||
;;
|
||||
validate_AudioQnA)
|
||||
pushd AudioQnA/kubernetes/intel/cpu/xeon/gmc
|
||||
validate_audioqa
|
||||
popd
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $1"
|
||||
;;
|
||||
esac
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
## 📸 Project Screenshots
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
## 🧐 Features
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user