Compare commits
777 Commits
r0.8.1
...
yao531441/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8eac02e58b | ||
|
|
9f80a18cb5 | ||
|
|
f2c8e0b4ff | ||
|
|
fb53c536a3 | ||
|
|
26d07019d0 | ||
|
|
bd6726c53a | ||
|
|
a0bdf8eab2 | ||
|
|
99f2f940b6 | ||
|
|
2596671d3f | ||
|
|
7ffb4107e6 | ||
|
|
7590b055aa | ||
|
|
4efb1e0833 | ||
|
|
ebb7c24ca8 | ||
|
|
bfefdfad34 | ||
|
|
b467a13ec3 | ||
|
|
05011ebaac | ||
|
|
7bb05585b6 | ||
|
|
f6013b8679 | ||
|
|
505ec6d4b6 | ||
|
|
ff66600ab4 | ||
|
|
5375332fb3 | ||
|
|
df33800945 | ||
|
|
40e44dfcd6 | ||
|
|
9259ba41a5 | ||
|
|
5c7f5718ed | ||
|
|
d334f5c8fd | ||
|
|
670d9f3d18 | ||
|
|
555c4100b3 | ||
|
|
04d527d3b0 | ||
|
|
13c4749ca3 | ||
|
|
99b62ae49e | ||
|
|
c546d96e98 | ||
|
|
be5933ad85 | ||
|
|
18b4f39f27 | ||
|
|
ef9290f245 | ||
|
|
3b0bcb80a8 | ||
|
|
ccc145ea1a | ||
|
|
bb7a675665 | ||
|
|
f90a6d2a8e | ||
|
|
1fdab591d9 | ||
|
|
13ea13862a | ||
|
|
1787d1ee98 | ||
|
|
db4bf1a4c3 | ||
|
|
f7002fcb70 | ||
|
|
c39c875211 | ||
|
|
c2e9a259fe | ||
|
|
48eaf9c1c9 | ||
|
|
a39824f142 | ||
|
|
e10e6dd002 | ||
|
|
ea17b38ac5 | ||
|
|
ea9d444bbf | ||
|
|
262ad7d6ec | ||
|
|
608dc963c9 | ||
|
|
ef2156fbf4 | ||
|
|
52c4db2fc6 | ||
|
|
697f78ea71 | ||
|
|
e96f5a1ac5 | ||
|
|
82ef639ee3 | ||
|
|
29d449b3ca | ||
|
|
338f81430d | ||
|
|
87e3c0f59f | ||
|
|
27813b3bf9 | ||
|
|
c7f06d5e54 | ||
|
|
0967fcac86 | ||
|
|
a3eba01879 | ||
|
|
bc168f1732 | ||
|
|
1eb2e36a18 | ||
|
|
1a9a2dd53c | ||
|
|
c63e2cd067 | ||
|
|
c793dd0b51 | ||
|
|
1b3f1f632a | ||
|
|
4c05e7fd1c | ||
|
|
90cfe89e21 | ||
|
|
62f7f5bd34 | ||
|
|
7c6189cf43 | ||
|
|
ae31e4fb75 | ||
|
|
4fc19c7d73 | ||
|
|
b80449b8ab | ||
|
|
8aa96c6278 | ||
|
|
a7ef8333ee | ||
|
|
71fe886ce9 | ||
|
|
1a6f821c95 | ||
|
|
1095d88c5f | ||
|
|
c73b09a758 | ||
|
|
13dd27e6d5 | ||
|
|
a222d1cfbb | ||
|
|
1852e6bcc3 | ||
|
|
72ce335663 | ||
|
|
15d76c0889 | ||
|
|
c4763434b8 | ||
|
|
58b47c15c6 | ||
|
|
8d421b7912 | ||
|
|
e9cafb3343 | ||
|
|
1737d4b2b4 | ||
|
|
177da5e6fc | ||
|
|
063547fb66 | ||
|
|
c3bb59a354 | ||
|
|
8c763cbe11 | ||
|
|
411bb28f41 | ||
|
|
00d7a65dd8 | ||
|
|
795c29fe87 | ||
|
|
094ca7aefe | ||
|
|
398441a10c | ||
|
|
892624f539 | ||
|
|
8b7cb3539e | ||
|
|
5f4b3a6d12 | ||
|
|
0392610776 | ||
|
|
2d8a7e25f6 | ||
|
|
4d652719c2 | ||
|
|
7b7728c6c3 | ||
|
|
6917d5bdb1 | ||
|
|
46ebb78aa3 | ||
|
|
b14db6dbd3 | ||
|
|
ff8008b6d0 | ||
|
|
d4952d1e7c | ||
|
|
12932477ee | ||
|
|
42735d0d7d | ||
|
|
073e5443ec | ||
|
|
e8cdf7d668 | ||
|
|
c48cd651e4 | ||
|
|
d627209ee3 | ||
|
|
c50dfb2510 | ||
|
|
4ce847cdb7 | ||
|
|
319dbdaa6b | ||
|
|
1a0c5f03c6 | ||
|
|
bbd53443ab | ||
|
|
2764a6dcd8 | ||
|
|
11fa7d5e99 | ||
|
|
76c088dc0b | ||
|
|
cee24a083c | ||
|
|
5cc047ce34 | ||
|
|
46a29cc253 | ||
|
|
8fe2d5d0be | ||
|
|
68747a9688 | ||
|
|
1bd56af994 | ||
|
|
583428c6a7 | ||
|
|
853f1302af | ||
|
|
340fa075bd | ||
|
|
b7f24762a3 | ||
|
|
d4dcbd18ef | ||
|
|
87baeb833d | ||
|
|
03179296b4 | ||
|
|
139f2aeeeb | ||
|
|
61a8befe05 | ||
|
|
4582e53b8a | ||
|
|
566ffb2edc | ||
|
|
a04463d5e3 | ||
|
|
31b1d69e40 | ||
|
|
fe2a6674e0 | ||
|
|
60591d8d56 | ||
|
|
7636de02e4 | ||
|
|
d397e3f631 | ||
|
|
0736912c69 | ||
|
|
e8f2313e07 | ||
|
|
6d24c1c77a | ||
|
|
5a50ae0471 | ||
|
|
fecc22719a | ||
|
|
2204fe8e36 | ||
|
|
b50dd8f47a | ||
|
|
bf8d03425c | ||
|
|
1b6342aa5b | ||
|
|
527b146a80 | ||
|
|
7159ce3731 | ||
|
|
671dff7f51 | ||
|
|
8fe19291c8 | ||
|
|
35c5cf5de8 | ||
|
|
63b789ae91 | ||
|
|
d670dbf0aa | ||
|
|
0701b8cfff | ||
|
|
effa2a28cf | ||
|
|
adcd113f53 | ||
|
|
4269669f73 | ||
|
|
12657ac945 | ||
|
|
43d0a18270 | ||
|
|
5362321d3a | ||
|
|
eb245fd085 | ||
|
|
4cab86260f | ||
|
|
694207f76b | ||
|
|
555e2405b9 | ||
|
|
7a92435269 | ||
|
|
c9085c3c68 | ||
|
|
36aaed748b | ||
|
|
9180f1066d | ||
|
|
5aecea8e47 | ||
|
|
6723395e31 | ||
|
|
785ffb9a1e | ||
|
|
428ba481b2 | ||
|
|
2dfcfa0436 | ||
|
|
8a5ad1fc72 | ||
|
|
24cacaaa48 | ||
|
|
6ead1b12db | ||
|
|
8dac9d1035 | ||
|
|
c1b5ba281f | ||
|
|
8f8d3af7c3 | ||
|
|
e4de76da78 | ||
|
|
ce38a84372 | ||
|
|
e8b07c28ec | ||
|
|
7b3a125bdf | ||
|
|
fba0de45d2 | ||
|
|
f2a5644d9c | ||
|
|
6cd7827365 | ||
|
|
3d8009aa91 | ||
|
|
78f8ae524d | ||
|
|
6abf7652e8 | ||
|
|
25c1aefc27 | ||
|
|
d46df4331d | ||
|
|
23a77df302 | ||
|
|
852bc7027c | ||
|
|
a7eced4161 | ||
|
|
caec354324 | ||
|
|
d482554a6b | ||
|
|
2ae6871fc5 | ||
|
|
2ac5be9921 | ||
|
|
799881a3fa | ||
|
|
e5c6418c81 | ||
|
|
0c0edffc5b | ||
|
|
9f36e84c1c | ||
|
|
8c547c2ba5 | ||
|
|
80dd86f122 | ||
|
|
6d781f7b2b | ||
|
|
abafd5de20 | ||
|
|
970b869838 | ||
|
|
87ff149f61 | ||
|
|
c39a569ab2 | ||
|
|
81b02bb947 | ||
|
|
47069ac70c | ||
|
|
6ce7730863 | ||
|
|
ad5523bac7 | ||
|
|
88a8235f21 | ||
|
|
63ad850052 | ||
|
|
9a0c547112 | ||
|
|
26a6da4123 | ||
|
|
45d5da2ddd | ||
|
|
1b3291a1c8 | ||
|
|
7ac8cf517a | ||
|
|
44a689b0bf | ||
|
|
388d3eb5c5 | ||
|
|
ef9ad61440 | ||
|
|
4c41a5db83 | ||
|
|
9adf7a6af0 | ||
|
|
a4d028e8ea | ||
|
|
32d4f714fd | ||
|
|
fdbc27a9b5 | ||
|
|
5f4b1828a5 | ||
|
|
39abef8be8 | ||
|
|
ed163087ba | ||
|
|
259099d19f | ||
|
|
9a1118730b | ||
|
|
ffce7068aa | ||
|
|
9b0f98be8b | ||
|
|
f0fea7b706 | ||
|
|
1864fac978 | ||
|
|
94f71f2322 | ||
|
|
6600c32a9b | ||
|
|
d953332f43 | ||
|
|
cbe5805f47 | ||
|
|
27fdbcab58 | ||
|
|
f07cf1dad2 | ||
|
|
ee0e5cc8d9 | ||
|
|
5c36443b11 | ||
|
|
62cea74a23 | ||
|
|
b721c256f9 | ||
|
|
927698e23e | ||
|
|
c3e84b5ffa | ||
|
|
6b2a041f25 | ||
|
|
842f46326b | ||
|
|
284db982be | ||
|
|
fc96fe83e2 | ||
|
|
0316114c4b | ||
|
|
0408453fa2 | ||
|
|
d0cd0aaf53 | ||
|
|
0ba3decb6b | ||
|
|
3d3ac59bfb | ||
|
|
f11ab458d8 | ||
|
|
f3562bef36 | ||
|
|
7a54064d65 | ||
|
|
0f7e5a37ac | ||
|
|
2d5898244c | ||
|
|
59722d2bc9 | ||
|
|
6bfd156573 | ||
|
|
528770a8d7 | ||
|
|
239995da16 | ||
|
|
f65e8d8668 | ||
|
|
a49a36cebc | ||
|
|
742cb6ddd3 | ||
|
|
00e9da9ced | ||
|
|
277222a922 | ||
|
|
5c68effc9f | ||
|
|
39409d7f61 | ||
|
|
71e3c57366 | ||
|
|
5ad24af2ee | ||
|
|
3a9a24a51a | ||
|
|
301b5e9a69 | ||
|
|
b4269d6c4f | ||
|
|
4cabd55778 | ||
|
|
698a06edbf | ||
|
|
0eae391fda | ||
|
|
23d885bf60 | ||
|
|
287f03a834 | ||
|
|
a65a1e5598 | ||
|
|
9812c2fb45 | ||
|
|
7d218b9f36 | ||
|
|
ba9892f8ee | ||
|
|
ff1310b11a | ||
|
|
ca15fe9bdb | ||
|
|
f48bd8e74f | ||
|
|
91ff520baa | ||
|
|
3ca78867eb | ||
|
|
7a3dfa90ca | ||
|
|
c795ef2203 | ||
|
|
99120f4cd2 | ||
|
|
9fe480b010 | ||
|
|
113281d073 | ||
|
|
370d6928c1 | ||
|
|
2b26450bb9 | ||
|
|
81022355a7 | ||
|
|
ddacb7e86d | ||
|
|
5128c2d650 | ||
|
|
b3c405a5f6 | ||
|
|
5638075d65 | ||
|
|
23117871c2 | ||
|
|
9970605460 | ||
|
|
28206311fd | ||
|
|
589bfb2b7a | ||
|
|
d2b49bbc82 | ||
|
|
41374d865b | ||
|
|
2c624e1f5f | ||
|
|
00241d01d2 | ||
|
|
ed2b8ed983 | ||
|
|
a6e702e4d5 | ||
|
|
aa5c91d7ee | ||
|
|
b88d09e23f | ||
|
|
464e2d3125 | ||
|
|
1f29eca288 | ||
|
|
1d7ac82979 | ||
|
|
5c7a5bd850 | ||
|
|
72f8079289 | ||
|
|
6169ea4921 | ||
|
|
75b0961a48 | ||
|
|
cc1d97f816 | ||
|
|
250ffb8b66 | ||
|
|
1e9d111982 | ||
|
|
597f17b979 | ||
|
|
b9790d809b | ||
|
|
b27b48c488 | ||
|
|
0bf1d0be65 | ||
|
|
a01729a5c2 | ||
|
|
6b6a08df78 | ||
|
|
0b23cba505 | ||
|
|
50dd959d60 | ||
|
|
05365b6140 | ||
|
|
fd706d1a70 | ||
|
|
3b9e55cb8e | ||
|
|
7d9b34cf5e | ||
|
|
84a6a6e9bc | ||
|
|
89a7f9e001 | ||
|
|
236ea6bcce | ||
|
|
67634dfd22 | ||
|
|
df7c192835 | ||
|
|
f930638844 | ||
|
|
5613add4dd | ||
|
|
e18369ba0d | ||
|
|
2af1ea0f8e | ||
|
|
c760cac2f4 | ||
|
|
a50e4e6f9f | ||
|
|
00b526c8e5 | ||
|
|
4c01e14642 | ||
|
|
6f9f6f0bad | ||
|
|
893f324d07 | ||
|
|
77e640e2f3 | ||
|
|
07e47a1f38 | ||
|
|
bde285dfce | ||
|
|
f5c08d4fbb | ||
|
|
3a371ac102 | ||
|
|
031cf6e1ff | ||
|
|
3299e5c9f5 | ||
|
|
340796bbae | ||
|
|
8182a83382 | ||
|
|
8192c3166f | ||
|
|
240054ac52 | ||
|
|
c9caf1c083 | ||
|
|
a426a9a51d | ||
|
|
bb466b3791 | ||
|
|
0f8344e4f5 | ||
|
|
ed8dbaac47 | ||
|
|
e8cffc6146 | ||
|
|
907b30b7fe | ||
|
|
545aa571bf | ||
|
|
5422bcb970 | ||
|
|
736155ca95 | ||
|
|
39fa25e03a | ||
|
|
ac470421d0 | ||
|
|
edcd7c9d6a | ||
|
|
ef2047b070 | ||
|
|
94231584aa | ||
|
|
c5177c5e2f | ||
|
|
006c61bcbb | ||
|
|
cc108b5a18 | ||
|
|
f70d9c3853 | ||
|
|
8808b51e42 | ||
|
|
17d4b0c97f | ||
|
|
3a03d31f8f | ||
|
|
179fd84362 | ||
|
|
9ba034b22d | ||
|
|
c3e6f43ece | ||
|
|
1ac756a1c7 | ||
|
|
56f770cb28 | ||
|
|
0cdeb946e4 | ||
|
|
5648839411 | ||
|
|
eb91d1f054 | ||
|
|
2587179224 | ||
|
|
7e62175c2e | ||
|
|
152adf8012 | ||
|
|
83172e9a99 | ||
|
|
fb514bb8ba | ||
|
|
b1bb6db52d | ||
|
|
7949045176 | ||
|
|
cbe952ec5e | ||
|
|
3b1a9fe9e1 | ||
|
|
e66d7fe381 | ||
|
|
6d3a017609 | ||
|
|
dbf4ba03fa | ||
|
|
4f96d9e605 | ||
|
|
a8f4245384 | ||
|
|
096a37aacc | ||
|
|
6f8fa6a689 | ||
|
|
39f68d5d6b | ||
|
|
00d9bb6128 | ||
|
|
59b624c677 | ||
|
|
2b2c7ee2f5 | ||
|
|
6b9a27dd83 | ||
|
|
5720cd45c0 | ||
|
|
73879d3cec | ||
|
|
7c9ed04132 | ||
|
|
9ff7df9202 | ||
|
|
b5f95f735e | ||
|
|
393367e9f1 | ||
|
|
7adbba6add | ||
|
|
0d52c2f003 | ||
|
|
1ff85f6a85 | ||
|
|
f7a7f8aa3f | ||
|
|
e3187be819 | ||
|
|
abd9d12937 | ||
|
|
a7353bbaa4 | ||
|
|
aa314f6757 | ||
|
|
3744bb8c1b | ||
|
|
82801d0121 | ||
|
|
f7026773b8 | ||
|
|
edc09ece5c | ||
|
|
dfed2aead2 | ||
|
|
049517f977 | ||
|
|
ee83a6d5b4 | ||
|
|
e2bdd19fd4 | ||
|
|
c9088eb824 | ||
|
|
9c3023a12e | ||
|
|
bbc95bb708 | ||
|
|
dd9623d3d5 | ||
|
|
4c27a3d30c | ||
|
|
40386d9bd6 | ||
|
|
fe97e88c7a | ||
|
|
11d8b24c8a | ||
|
|
4635a927fa | ||
|
|
1da44d99a1 | ||
|
|
e9b164505e | ||
|
|
6263b517b9 | ||
|
|
2de7c0ba89 | ||
|
|
944ae47948 | ||
|
|
2d9aeb3715 | ||
|
|
a0921f127f | ||
|
|
cf86aceb18 | ||
|
|
c2b7bd25d9 | ||
|
|
78331ee678 | ||
|
|
7f7ad0e256 | ||
|
|
0306c620b5 | ||
|
|
3372b9d480 | ||
|
|
5eb3d2869f | ||
|
|
ced68e1834 | ||
|
|
bf5c391e47 | ||
|
|
c65d7d40fb | ||
|
|
9d124161e0 | ||
|
|
0f5a9c4a5e | ||
|
|
a65640b4a5 | ||
|
|
7197286a14 | ||
|
|
960805a57b | ||
|
|
002f0e2b11 | ||
|
|
fde5996192 | ||
|
|
bc47930ce1 | ||
|
|
2332d22950 | ||
|
|
a2afce1675 | ||
|
|
89f4c5fb41 | ||
|
|
98f66405ac | ||
|
|
90c2d49050 | ||
|
|
95b58b51fa | ||
|
|
d3ce6f5357 | ||
|
|
a10b4a1f1d | ||
|
|
085d859a70 | ||
|
|
15cc457cea | ||
|
|
cfffb4c005 | ||
|
|
41955f65ad | ||
|
|
def39cfcdc | ||
|
|
35a4fef70d | ||
|
|
a3f9811f7e | ||
|
|
0eedbbfce0 | ||
|
|
9438d392b4 | ||
|
|
1929dfd3a0 | ||
|
|
c7e33647ad | ||
|
|
184e9a43b8 | ||
|
|
658867fce4 | ||
|
|
620ef76d16 | ||
|
|
23b820e740 | ||
|
|
3c164f3aa2 | ||
|
|
7669c42085 | ||
|
|
256b58c07e | ||
|
|
3c3a5bed67 | ||
|
|
37c74b232c | ||
|
|
4a265abb73 | ||
|
|
b0487fe92b | ||
|
|
d486bbbe10 | ||
|
|
b0f7c9cfc2 | ||
|
|
eeced9b31c | ||
|
|
b377c2b8f8 | ||
|
|
5dae713793 | ||
|
|
c930bea172 | ||
|
|
0edff26ee5 | ||
|
|
778afb50ac | ||
|
|
40800b0848 | ||
|
|
f2f6c09a0f | ||
|
|
c6fc92d37c | ||
|
|
c0643b71e8 | ||
|
|
088ab98f31 | ||
|
|
441f8cc6ba | ||
|
|
b056ce6617 | ||
|
|
773c32b38b | ||
|
|
619d941047 | ||
|
|
b71a12d424 | ||
|
|
12469c92d8 | ||
|
|
fbde15b40d | ||
|
|
ae10712fe8 | ||
|
|
373fa88033 | ||
|
|
e2f9037344 | ||
|
|
afc39fa4c0 | ||
|
|
e1c476c185 | ||
|
|
77920613dc | ||
|
|
7dec00176e | ||
|
|
bf28c7f098 | ||
|
|
63bad29794 | ||
|
|
36d3ef2b17 | ||
|
|
0c6b044139 | ||
|
|
d23cd799e9 | ||
|
|
644c3a67ce | ||
|
|
ffecd182db | ||
|
|
d16c80e493 | ||
|
|
2de1bfc5bb | ||
|
|
75df2c9979 | ||
|
|
62e06a0aff | ||
|
|
bd32b03e3c | ||
|
|
9d0b49c2d6 | ||
|
|
75ce2a3ca6 | ||
|
|
99c10933b4 | ||
|
|
8bcd82e82d | ||
|
|
c1038d2193 | ||
|
|
33b9d4e421 | ||
|
|
c9553c6f9a | ||
|
|
3e796ba73d | ||
|
|
5ed776709d | ||
|
|
954a22051b | ||
|
|
6f4b00f829 | ||
|
|
3fb60608b3 | ||
|
|
c35fe0b429 | ||
|
|
28f5e4a268 | ||
|
|
d55a33dda1 | ||
|
|
daf2a4fad7 | ||
|
|
3ce395582b | ||
|
|
7eaab93d0b | ||
|
|
bc817700b9 | ||
|
|
bd811bd622 | ||
|
|
05f9828e77 | ||
|
|
6c364487d3 | ||
|
|
21e215c5d5 | ||
|
|
a09395e4a4 | ||
|
|
f04f061f8c | ||
|
|
872e93e4bd | ||
|
|
2f03a3a894 | ||
|
|
372d78c2ac | ||
|
|
933c3d3445 | ||
|
|
88829c9381 | ||
|
|
d85ec0947c | ||
|
|
dc94026d98 | ||
|
|
1e130314d9 | ||
|
|
b205dc7571 | ||
|
|
3b70fb0d42 | ||
|
|
412a0b00c3 | ||
|
|
96d5cd9127 | ||
|
|
0bb0abb0d3 | ||
|
|
e0b3b579a3 | ||
|
|
e5affb93ab | ||
|
|
e2a74f7463 | ||
|
|
79e947e44f | ||
|
|
bceacdc804 | ||
|
|
375ea7a90c | ||
|
|
06117077b0 | ||
|
|
b6cce35a93 | ||
|
|
06696c8e58 | ||
|
|
bc4bbfa849 | ||
|
|
edcc50fe97 | ||
|
|
2d28bebac6 | ||
|
|
b84c98983d | ||
|
|
ba17031198 | ||
|
|
f990f7966e | ||
|
|
87e51d5c36 | ||
|
|
e1b8ce053b | ||
|
|
558ea3bb7f | ||
|
|
4112fd0b69 | ||
|
|
e1022911b6 | ||
|
|
3f2e7b73ac | ||
|
|
264759d85a | ||
|
|
d42292967c | ||
|
|
a3fa0d6469 | ||
|
|
5ab27b63ff | ||
|
|
62dbb6daee | ||
|
|
10fe3c6066 | ||
|
|
262a6f6055 | ||
|
|
e48532e750 | ||
|
|
be8e283f6b | ||
|
|
7b2194f71c | ||
|
|
2dd69dcf73 | ||
|
|
1a934afb3a | ||
|
|
5c67204734 | ||
|
|
d2bab99835 | ||
|
|
d97882ec8e | ||
|
|
63406dc050 | ||
|
|
ff6f841ec0 | ||
|
|
88fde629ad | ||
|
|
1144fae248 | ||
|
|
504228eea2 | ||
|
|
d73129cbf0 | ||
|
|
ba94e0130d | ||
|
|
aebc23f5ae | ||
|
|
36fb9a987d | ||
|
|
0869029ef2 | ||
|
|
fa12083e35 | ||
|
|
a2745b22a7 | ||
|
|
def19b449e | ||
|
|
ebe6b473e9 | ||
|
|
0629696333 | ||
|
|
4bd7841f17 | ||
|
|
1d1e1f90b6 | ||
|
|
67394b88fa | ||
|
|
947936ed7b | ||
|
|
4b0bc263ab | ||
|
|
758d236463 | ||
|
|
ac3486038c | ||
|
|
e0bc5f2a4d | ||
|
|
71857f50c5 | ||
|
|
6b617d6743 | ||
|
|
43b2ae59a1 | ||
|
|
6730b242cc | ||
|
|
4a51874e4d | ||
|
|
995a62c9d9 | ||
|
|
9cf1d88b6d | ||
|
|
a8244c40ea | ||
|
|
55d287dfcf | ||
|
|
3563f5db6b | ||
|
|
8c40204eda | ||
|
|
afc3341156 | ||
|
|
e5ec38c796 | ||
|
|
4c78f8cbbb | ||
|
|
adb157f2e7 | ||
|
|
07baa8f922 | ||
|
|
7f897979c4 | ||
|
|
22d066a8d7 | ||
|
|
c6d811ab11 | ||
|
|
efa4a5aaa4 | ||
|
|
2ef83fc67b | ||
|
|
fbaa0243ee | ||
|
|
675ea4a383 | ||
|
|
2a6af6491a | ||
|
|
f5f1e323bb | ||
|
|
d487093d10 | ||
|
|
e6f5d13ecc | ||
|
|
dba908aa22 | ||
|
|
814164dc4f | ||
|
|
cc84847082 | ||
|
|
f4f4da2dca | ||
|
|
1e47444559 | ||
|
|
2a2ff45e2b | ||
|
|
6a679ba80f | ||
|
|
84a781a1a3 | ||
|
|
32afb6501c | ||
|
|
035f39f0d9 | ||
|
|
6f3e54a22a | ||
|
|
1874dfd148 | ||
|
|
7a0fca73e6 | ||
|
|
beda609b4b | ||
|
|
993688ac91 | ||
|
|
5fde666c43 | ||
|
|
4133757642 | ||
|
|
10c81f1c57 | ||
|
|
dad8eb4b82 | ||
|
|
af21e94a29 | ||
|
|
f78aa9ee2f | ||
|
|
c25063f4bb | ||
|
|
4f3be23efa | ||
|
|
9657f7bc83 | ||
|
|
ac324a9ec2 | ||
|
|
dfaf47978d | ||
|
|
acbaaf8ff0 | ||
|
|
06cb308611 | ||
|
|
e6b4fff05c | ||
|
|
a54ffd2c1e | ||
|
|
f3ffcd50b3 | ||
|
|
947cbe39b8 | ||
|
|
fbb81b67db | ||
|
|
5d39506c5c | ||
|
|
566cf93c34 | ||
|
|
771975510a | ||
|
|
6674832162 | ||
|
|
67df2804de | ||
|
|
46af6f3bc4 | ||
|
|
343d614591 | ||
|
|
87617e761c | ||
|
|
db2d2bd1a1 | ||
|
|
4fa37e7842 | ||
|
|
c73e4e0f26 | ||
|
|
ba78b4c994 | ||
|
|
01c1b7504f | ||
|
|
c016d8264a | ||
|
|
4fd3517f23 | ||
|
|
503a1a9844 | ||
|
|
08f57fa54a | ||
|
|
5a9c109e35 | ||
|
|
c327972776 | ||
|
|
f45e4c6956 | ||
|
|
5dcadf3d3f | ||
|
|
3363a37197 | ||
|
|
b2771ad3f2 | ||
|
|
e81e0e557c | ||
|
|
71363a6b9d | ||
|
|
a39f23a16e | ||
|
|
c9f9acab61 | ||
|
|
040d2b7fd9 | ||
|
|
6296e9f2fb | ||
|
|
c86cf8536d | ||
|
|
039014fbbf | ||
|
|
1c07a38457 | ||
|
|
e93146b33e | ||
|
|
a6385bc6fd | ||
|
|
c26d0f62b8 | ||
|
|
e71aba0080 | ||
|
|
cfcac3f0ec | ||
|
|
d68be058f5 | ||
|
|
45cf553d36 | ||
|
|
1c23d87aa2 | ||
|
|
64bfea9054 | ||
|
|
0a6bad0ab9 | ||
|
|
4f7fc39d66 | ||
|
|
80e3e2a2d3 | ||
|
|
8c384e0314 | ||
|
|
3c9e2aaffd | ||
|
|
acdd712929 | ||
|
|
c297155bea | ||
|
|
923cf69e63 | ||
|
|
7a67298f19 | ||
|
|
a5ed2233b5 | ||
|
|
e12baca3b8 | ||
|
|
939502dba1 | ||
|
|
a072441c06 | ||
|
|
ed483719a8 | ||
|
|
14621f8492 | ||
|
|
2390920b1d | ||
|
|
02a15366bc | ||
|
|
f08d4115db | ||
|
|
5ac77f78da | ||
|
|
ebc165a6aa | ||
|
|
ad8ca8886e | ||
|
|
88eeb0d7e6 | ||
|
|
e22d41362d |
31
.github/CODEOWNERS
vendored
Normal file → Executable file
31
.github/CODEOWNERS
vendored
Normal file → Executable file
@@ -1,9 +1,26 @@
|
||||
/AudioQnA/ sihan.chen@intel.com
|
||||
/ChatQnA/ liang1.lv@intel.com
|
||||
# Code owners will review PRs within their respective folders.
|
||||
|
||||
* liang1.lv@intel.com feng.tian@intel.com suyue.chen@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com
|
||||
/.github/ suyue.chen@intel.com ze.pan@intel.com
|
||||
/AgentQnA/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com
|
||||
/AudioQnA/ sihan.chen@intel.com wenjiao.yue@intel.com
|
||||
/AvatarChatbot/ chun.tao@intel.com kaokao.lv@intel.com
|
||||
/ChatQnA/ liang1.lv@intel.com letong.han@intel.com
|
||||
/CodeGen/ liang1.lv@intel.com
|
||||
/CodeTrans/ sihan.chen@intel.com
|
||||
/DocSum/ sihan.chen@intel.com
|
||||
/FaqGen/ letong.han@intel.com
|
||||
/SearchQnA/ letong.han@intel.com
|
||||
/Translation/ liang1.lv@intel.com
|
||||
/VisualQnA/ liang1.lv@intel.com
|
||||
/DBQnA/ supriya.krishnamurthi@intel.com liang1.lv@intel.com
|
||||
/DocIndexRetriever/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com chendi.xue@intel.com
|
||||
/DocSum/ letong.han@intel.com
|
||||
/EdgeCraftRAG/ yongbo.zhu@intel.com mingyuan.qi@intel.com
|
||||
/FinanceAgent/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com
|
||||
/GraphRAG/ rita.brugarolas.brufau@intel.com abolfazl.shahbazi@intel.com
|
||||
/InstructionTuning/ xinyu.ye@intel.com kaokao.lv@intel.com
|
||||
/MultimodalQnA/ melanie.h.buehler@intel.com tiep.le@intel.com
|
||||
/ProductivitySuite/ jaswanth.karani@intel.com hoong.tee.yeoh@intel.com
|
||||
/RerankFinetuning/ xinyu.ye@intel.com kaokao.lv@intel.com
|
||||
/SearchQnA/ sihan.chen@intel.com letong.han@intel.com
|
||||
/Text2Image/ wenjiao.yue@intel.com xinyu.ye@intel.com
|
||||
/Translation/ liang1.lv@intel.com sihan.chen@intel.com
|
||||
/VideoQnA/ huiling.bao@intel.com
|
||||
/VisualQnA/ liang1.lv@intel.com sihan.chen@intel.com
|
||||
/WorkflowExecAgent/ joshua.jian.ern.liew@intel.com kaokao.lv@intel.com
|
||||
146
.github/ISSUE_TEMPLATE/1_bug_template.yml
vendored
Normal file
146
.github/ISSUE_TEMPLATE/1_bug_template.yml
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Report Bug
|
||||
description: Used to report bug
|
||||
title: "[Bug]"
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: dropdown
|
||||
id: priority
|
||||
attributes:
|
||||
label: Priority
|
||||
options:
|
||||
- Undecided
|
||||
- P1-Stopper
|
||||
- P2-High
|
||||
- P3-Medium
|
||||
- P4-Low
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS type
|
||||
options:
|
||||
- Ubuntu
|
||||
- RedHat
|
||||
- SUSE
|
||||
- Windows
|
||||
- Mac
|
||||
- BSD
|
||||
- Other (Please let us know in description)
|
||||
- N/A
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: hardware
|
||||
attributes:
|
||||
label: Hardware type
|
||||
options:
|
||||
- Xeon-GNR
|
||||
- Xeon-EMR
|
||||
- Xeon-SPR
|
||||
- Xeon-ICX
|
||||
- Xeon-other (Please let us know in description)
|
||||
- Gaudi3
|
||||
- Gaudi2
|
||||
- AI-PC (Please let us know in description)
|
||||
- CPU-other (Please let us know in description)
|
||||
- GPU-PVC
|
||||
- GPU-Flex
|
||||
- GPU-Arc
|
||||
- GPU-Arc-MTL
|
||||
- GPU-Nvidia
|
||||
- GPU-AMD
|
||||
- GPU-other (Please let us know in description)
|
||||
- N/A
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
id: install
|
||||
attributes:
|
||||
label: Installation method
|
||||
options:
|
||||
- label: Pull docker images from hub.docker.com
|
||||
- label: Build docker images from source
|
||||
- label: Other
|
||||
- label: N/A
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
id: deploy
|
||||
attributes:
|
||||
label: Deploy method
|
||||
options:
|
||||
- label: Docker
|
||||
- label: Docker Compose
|
||||
- label: Kubernetes Helm Charts
|
||||
- label: Kubernetes GMC
|
||||
- label: Other
|
||||
- label: N/A
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: node
|
||||
attributes:
|
||||
label: Running nodes
|
||||
options:
|
||||
- Single Node
|
||||
- Multiple Nodes
|
||||
- Other
|
||||
- N/A
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: version
|
||||
attributes:
|
||||
label: What's the version?
|
||||
description: Docker ID in hub.docker.com or commit ID of Dockerfile.
|
||||
placeholder:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Description
|
||||
description: What is the problem, question, or error?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Reproduce steps
|
||||
description:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
- type: textarea
|
||||
id: log
|
||||
attributes:
|
||||
label: Raw log
|
||||
description: Provide the log of RESTFul API or docker for micro-service by `docker logs CONTAINER`
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
|
||||
|
||||
- type: textarea
|
||||
id: attachments
|
||||
attributes:
|
||||
label: Attachments
|
||||
description: Attach any relevant files or screenshots.
|
||||
validations:
|
||||
required: false
|
||||
84
.github/ISSUE_TEMPLATE/2_feature_template.yml
vendored
Normal file
84
.github/ISSUE_TEMPLATE/2_feature_template.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Report Feature
|
||||
description: Used to report feature
|
||||
title: "[Feature]"
|
||||
labels: ["feature"]
|
||||
body:
|
||||
- type: dropdown
|
||||
id: priority
|
||||
attributes:
|
||||
label: Priority
|
||||
options:
|
||||
- Undecided
|
||||
- P1-Stopper
|
||||
- P2-High
|
||||
- P3-Medium
|
||||
- P4-Low
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS type
|
||||
options:
|
||||
- Ubuntu
|
||||
- RedHat
|
||||
- SUSE
|
||||
- Windows
|
||||
- Mac
|
||||
- BSD
|
||||
- Other (Please let us know in description)
|
||||
- N/A
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: hardware
|
||||
attributes:
|
||||
label: Hardware type
|
||||
options:
|
||||
- Xeon-GNR
|
||||
- Xeon-EMR
|
||||
- Xeon-SPR
|
||||
- Xeon-ICX
|
||||
- Xeon-other (Please let us know in description)
|
||||
- Gaudi3
|
||||
- Gaudi2
|
||||
- AI-PC (Please let us know in description)
|
||||
- CPU-other (Please let us know in description)
|
||||
- GPU-PVC
|
||||
- GPU-Flex
|
||||
- GPU-Arc
|
||||
- GPU-Arc-MTL
|
||||
- GPU-Nvidia
|
||||
- GPU-AMD
|
||||
- GPU-other (Please let us know in description)
|
||||
- N/A
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: node
|
||||
attributes:
|
||||
label: Running nodes
|
||||
options:
|
||||
- Single Node
|
||||
- Multiple Nodes
|
||||
- Other
|
||||
- N/A
|
||||
default: 0
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe your problem and expectation, avoid to ask solution (we would provide better solution).
|
||||
placeholder: As cloud/AI PC user, I want xxxx, so that I/customer can xxx.
|
||||
validations:
|
||||
required: true
|
||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
4
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
blank_issues_enabled: true
|
||||
3
.github/code_spell_ignore.txt
vendored
3
.github/code_spell_ignore.txt
vendored
@@ -0,0 +1,3 @@
|
||||
ModelIn
|
||||
modelin
|
||||
pressEnter
|
||||
5
.github/env/_build_image.sh
vendored
Normal file
5
.github/env/_build_image.sh
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
export VLLM_VER=v0.8.3
|
||||
export VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0
|
||||
4
.github/license_template.txt
vendored
4
.github/license_template.txt
vendored
@@ -1,2 +1,2 @@
|
||||
Copyright (C) 2024 Intel Corporation
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
Copyright (C) 2025 Intel Corporation
|
||||
SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
50
.github/workflows/VisualQnA.yml
vendored
50
.github/workflows/VisualQnA.yml
vendored
@@ -1,50 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: VisualQnA-test
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- VisualQnA/**
|
||||
- "!**.md"
|
||||
- "!**/ui/**"
|
||||
- .github/workflows/VisualQnA.yml
|
||||
workflow_dispatch:
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
VisualQnA:
|
||||
runs-on: aise-cluster
|
||||
strategy:
|
||||
matrix:
|
||||
job_name: ["basic"]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
|
||||
- name: Run Test
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
run: |
|
||||
cd ${{ github.workspace }}/VisualQnA/tests
|
||||
bash test_${{ matrix.job_name }}_inference.sh
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.job_name }}
|
||||
path: ${{ github.workspace }}/VisualQnA/tests/*.log
|
||||
65
.github/workflows/_build_comps_base_image.yml
vendored
Normal file
65
.github/workflows/_build_comps_base_image.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Build Comps Base Image
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
node:
|
||||
required: true
|
||||
type: string
|
||||
build:
|
||||
default: true
|
||||
required: false
|
||||
type: boolean
|
||||
tag:
|
||||
default: "latest"
|
||||
required: false
|
||||
type: string
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
inject_commit:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
pre-build-image-check:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_skip: ${{ steps.check-skip.outputs.should_skip }}
|
||||
steps:
|
||||
- name: Check if job should be skipped
|
||||
id: check-skip
|
||||
run: |
|
||||
should_skip=true
|
||||
if [[ "${{ inputs.node }}" == "gaudi" || "${{ inputs.node }}" == "xeon" ]]; then
|
||||
should_skip=false
|
||||
fi
|
||||
echo "should_skip=$should_skip"
|
||||
echo "should_skip=$should_skip" >> $GITHUB_OUTPUT
|
||||
|
||||
build-images:
|
||||
needs: [ pre-build-image-check ]
|
||||
if: ${{ needs.pre-build-image-check.outputs.should_skip == 'false' && fromJSON(inputs.build) }}
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Clone Required Repo
|
||||
run: |
|
||||
git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps && git rev-parse HEAD && cd ../ && ls -l
|
||||
|
||||
- name: Build Image
|
||||
uses: opea-project/validation/actions/image-build@main
|
||||
with:
|
||||
work_dir: ${{ github.workspace }}/GenAIComps
|
||||
docker_compose_path: ${{ github.workspace }}/GenAIComps/.github/workflows/docker/compose/base-compose.yaml
|
||||
registry: ${OPEA_IMAGE_REPO}opea
|
||||
inject_commit: ${{ inputs.inject_commit }}
|
||||
tag: ${{ inputs.tag }}
|
||||
96
.github/workflows/_build_image.yml
vendored
Normal file
96
.github/workflows/_build_image.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Build Images
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
node:
|
||||
required: true
|
||||
type: string
|
||||
build:
|
||||
default: true
|
||||
required: false
|
||||
type: boolean
|
||||
example:
|
||||
required: true
|
||||
type: string
|
||||
services:
|
||||
default: ""
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
required: false
|
||||
type: string
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
inject_commit:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
pre-build-image-check:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_skip: ${{ steps.check-skip.outputs.should_skip }}
|
||||
steps:
|
||||
- name: Check if job should be skipped
|
||||
id: check-skip
|
||||
run: |
|
||||
should_skip=true
|
||||
if [[ "${{ inputs.node }}" == "gaudi" || "${{ inputs.node }}" == "xeon" ]]; then
|
||||
should_skip=false
|
||||
fi
|
||||
echo "should_skip=$should_skip"
|
||||
echo "should_skip=$should_skip" >> $GITHUB_OUTPUT
|
||||
|
||||
build-images:
|
||||
needs: [ pre-build-image-check ]
|
||||
if: ${{ needs.pre-build-image-check.outputs.should_skip == 'false' && fromJSON(inputs.build) }}
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Get Checkout Ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Checkout out GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Clone Required Repo
|
||||
run: |
|
||||
cd ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
|
||||
docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
|
||||
source ${{ github.workspace }}/.github/env/_build_image.sh
|
||||
if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then
|
||||
git clone -b ${VLLM_VER} --single-branch https://github.com/vllm-project/vllm.git
|
||||
fi
|
||||
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
|
||||
git clone -b ${VLLM_FORK_VER} --single-branch https://github.com/HabanaAI/vllm-fork.git
|
||||
fi
|
||||
git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git
|
||||
cd GenAIComps && git rev-parse HEAD && cd ../
|
||||
|
||||
- name: Build Image
|
||||
uses: opea-project/validation/actions/image-build@main
|
||||
with:
|
||||
work_dir: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build
|
||||
docker_compose_path: ${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml
|
||||
service_list: ${{ inputs.services }}
|
||||
registry: ${OPEA_IMAGE_REPO}opea
|
||||
inject_commit: ${{ inputs.inject_commit }}
|
||||
tag: ${{ inputs.tag }}
|
||||
106
.github/workflows/_example-workflow.yml
vendored
Normal file
106
.github/workflows/_example-workflow.yml
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Example jobs
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
node:
|
||||
required: true
|
||||
type: string
|
||||
example:
|
||||
required: true
|
||||
type: string
|
||||
services:
|
||||
default: ""
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
required: false
|
||||
type: string
|
||||
build:
|
||||
default: true
|
||||
required: false
|
||||
type: boolean
|
||||
test_compose:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
test_helmchart:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
test_gmc:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
inject_commit:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
use_model_cache:
|
||||
default: false
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
####################################################################################################
|
||||
# Image Build
|
||||
####################################################################################################
|
||||
build-images:
|
||||
uses: ./.github/workflows/_build_image.yml
|
||||
with:
|
||||
node: ${{ inputs.node }}
|
||||
build: ${{ fromJSON(inputs.build) }}
|
||||
example: ${{ inputs.example }}
|
||||
services: ${{ inputs.services }}
|
||||
tag: ${{ inputs.tag }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
inject_commit: ${{ inputs.inject_commit }}
|
||||
|
||||
####################################################################################################
|
||||
# Docker Compose Test
|
||||
####################################################################################################
|
||||
test-example-compose:
|
||||
needs: [build-images]
|
||||
if: ${{ inputs.test_compose }}
|
||||
uses: ./.github/workflows/_run-docker-compose.yml
|
||||
with:
|
||||
tag: ${{ inputs.tag }}
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.node }}
|
||||
use_model_cache: ${{ inputs.use_model_cache }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
secrets: inherit
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# helmchart Test
|
||||
####################################################################################################
|
||||
test-helmchart:
|
||||
if: ${{ fromJSON(inputs.test_helmchart) }}
|
||||
uses: ./.github/workflows/_helm-e2e.yml
|
||||
with:
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.node }}
|
||||
tag: ${{ inputs.tag }}
|
||||
mode: "CD"
|
||||
secrets: inherit
|
||||
|
||||
####################################################################################################
|
||||
# GMC Test
|
||||
####################################################################################################
|
||||
test-gmc-pipeline:
|
||||
needs: [build-images]
|
||||
if: false # ${{ fromJSON(inputs.test_gmc) }}
|
||||
uses: ./.github/workflows/_gmc-e2e.yml
|
||||
with:
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.node }}
|
||||
secrets: inherit
|
||||
54
.github/workflows/_get-image-list.yml
vendored
Normal file
54
.github/workflows/_get-image-list.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Get Image List
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
examples:
|
||||
default: ""
|
||||
required: false
|
||||
type: string
|
||||
images:
|
||||
default: ""
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
matrix:
|
||||
description: "Image List"
|
||||
value: ${{ jobs.get-image-list.outputs.matrix }}
|
||||
|
||||
jobs:
|
||||
get-image-list:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.get-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
image_list=[]
|
||||
if [[ ! -z "${{ inputs.examples }}" ]]; then
|
||||
pip install yq
|
||||
examples=($(echo ${{ inputs.examples }} | tr ',' ' '))
|
||||
for example in ${examples[@]}
|
||||
do
|
||||
images=$(cat ${{ github.workspace }}/${example}/docker_image_build/build.yaml | yq -r '.[]' | jq 'keys' | jq -c '.')
|
||||
image_list=$(echo ${image_list} | jq -s '.[0] + .[1] | unique' - <(echo ${images}))
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ ! -z "${{ inputs.images }}" ]]; then
|
||||
images=($(echo ${{ inputs.images }} | tr ',' ' '))
|
||||
input_image_list=$(printf '%s\n' "${images[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
image_list=$(echo ${image_list} | jq -s '.[0] + .[1] | unique' - <(echo ${input_image_list}))
|
||||
fi
|
||||
|
||||
echo "print image list..."
|
||||
echo "$image_list" | jq . | jq -r '.[]'
|
||||
echo "end of image list..."
|
||||
echo "matrix=$(echo ${image_list} | jq -c '.')" >> $GITHUB_OUTPUT
|
||||
@@ -11,14 +11,10 @@ on:
|
||||
required: false
|
||||
type: string
|
||||
default: '.github|README.md|*.txt'
|
||||
xeon_server_label:
|
||||
test_mode:
|
||||
required: false
|
||||
type: string
|
||||
default: 'xeon'
|
||||
gaudi_server_label:
|
||||
required: false
|
||||
type: string
|
||||
default: 'gaudi'
|
||||
default: 'compose'
|
||||
outputs:
|
||||
run_matrix:
|
||||
description: "The matrix string"
|
||||
@@ -46,32 +42,30 @@ jobs:
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check Dangerous Command Injection
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target'
|
||||
uses: opea-project/validation/actions/check-cmd@main
|
||||
with:
|
||||
work_dir: ${{ github.workspace }}
|
||||
|
||||
- name: Get test matrix
|
||||
id: get-test-matrix
|
||||
run: |
|
||||
set -xe
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
base_commit=${{ github.event.pull_request.base.sha }}
|
||||
LATEST_COMMIT_SHA=$(curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
"https://api.github.com/repos/opea-project/GenAIExamples/commits?sha=${{ github.event.pull_request.base.ref }}" | jq -r '.[0].sha')
|
||||
echo "Latest commit SHA is $LATEST_COMMIT_SHA"
|
||||
base_commit=$LATEST_COMMIT_SHA
|
||||
else
|
||||
base_commit=$(git rev-parse HEAD~1) # push event
|
||||
fi
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
echo "print all changed files..."
|
||||
git diff --name-only ${base_commit} ${merged_commit}
|
||||
changed_files="$(git diff --name-only ${base_commit} ${merged_commit} | \
|
||||
grep -vE '${{ inputs.diff_excluded_files }}')" || true
|
||||
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
|
||||
run_matrix="{\"include\":["
|
||||
for example in ${examples}; do
|
||||
run_hardware=""
|
||||
if [ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep -c gaudi) != 0 ]; then run_hardware="gaudi"; fi
|
||||
if [ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep -c xeon) != 0 ]; then run_hardware="xeon ${run_hardware}"; fi
|
||||
if [ "$run_hardware" == "" ]; then run_hardware="gaudi"; fi
|
||||
for hw in ${run_hardware}; do
|
||||
if [ "$hw" == "gaudi" ] && [ "${{ inputs.gaudi_server_label }}" != "" ]; then
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${{ inputs.gaudi_server_label }}\"},"
|
||||
elif [ "${{ inputs.xeon_server_label }}" != "" ]; then
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${{ inputs.xeon_server_label }}\"},"
|
||||
fi
|
||||
done
|
||||
done
|
||||
run_matrix=$run_matrix"]}"
|
||||
echo "run_matrix=${run_matrix}" >> $GITHUB_OUTPUT
|
||||
echo "filtered changed_files=$changed_files"
|
||||
export changed_files=$changed_files
|
||||
export test_mode=${{ inputs.test_mode }}
|
||||
export WORKSPACE=${{ github.workspace }}
|
||||
bash .github/workflows/scripts/get_test_matrix.sh
|
||||
@@ -1,57 +1,51 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with GMC
|
||||
# This workflow will only test GMC pipeline and will not install GMC any more
|
||||
name: Single GMC E2e Test For CD Workflow Call
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/**"
|
||||
- "**/tests/test_gmc**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- "!**/kubernetes/manifests/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
example:
|
||||
default: "ChatQnA"
|
||||
description: "The example to test on K8s"
|
||||
required: true
|
||||
type: string
|
||||
hardware:
|
||||
default: "xeon"
|
||||
description: "Nodes to run the test, xeon or gaudi"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|deprecated|docker|assets|*.md|*.txt'
|
||||
xeon_server_label: 'xeon'
|
||||
gaudi_server_label: 'gaudi'
|
||||
|
||||
gmc-test:
|
||||
needs: [job1]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
runs-on: "k8s-${{ matrix.hardware }}"
|
||||
runs-on: "k8s-${{ inputs.hardware }}"
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: E2e test gmc
|
||||
run: |
|
||||
echo "Matrix - gmc: ${{ matrix.example }}"
|
||||
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Get checkout ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "checkout ref ${{ env.CHECKOUT_REF }}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
if [ ${{ matrix.hardware }} == "gaudi" ]; then IMAGE_REPO=${{ vars.IMAGE_REPO_GAUDI }}; else IMAGE_REPO=${{ vars.IMAGE_REPO_XEON }}; fi
|
||||
echo "IMAGE_REPO=$OPEA_IMAGE_REPO" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ matrix.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "APP_NAMESPACE=$lower_example-$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ inputs.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "APP_NAMESPACE=$lower_example-$(tr -dc a-z0-9 </dev/urandom | head -c 16)" >> $GITHUB_ENV
|
||||
echo "ROLLOUT_TIMEOUT_SECONDS=1800s" >> $GITHUB_ENV
|
||||
echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV
|
||||
echo "continue_test=true" >> $GITHUB_ENV
|
||||
@@ -65,16 +59,16 @@ jobs:
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
run: |
|
||||
if [[ ! -f ${{ github.workspace }}/${{ matrix.example }}/tests/test_gmc_on_${{ matrix.hardware }}.sh ]]; then
|
||||
if [[ ! -f ${{ github.workspace }}/${{ inputs.example }}/tests/test_gmc_on_${{ inputs.hardware }}.sh ]]; then
|
||||
echo "No test script found, exist test!"
|
||||
exit 0
|
||||
else
|
||||
echo "should_cleanup=true" >> $GITHUB_ENV
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_gmc_on_${{ matrix.hardware }}.sh install_${{ matrix.example }}
|
||||
echo "Testing ${{ matrix.example }}, waiting for pod ready..."
|
||||
${{ github.workspace }}/${{ inputs.example }}/tests/test_gmc_on_${{ inputs.hardware }}.sh install_${{ inputs.example }}
|
||||
echo "Testing ${{ inputs.example }}, waiting for pod ready..."
|
||||
if kubectl rollout status deployment --namespace "$APP_NAMESPACE" --timeout "$ROLLOUT_TIMEOUT_SECONDS"; then
|
||||
echo "Testing gmc ${{ matrix.example }}, running validation test..."
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_gmc_on_${{ matrix.hardware }}.sh validate_${{ matrix.example }}
|
||||
echo "Testing gmc ${{ inputs.example }}, running validation test..."
|
||||
${{ github.workspace }}/${{ inputs.example }}/tests/test_gmc_on_${{ inputs.hardware }}.sh validate_${{ inputs.example }}
|
||||
else
|
||||
echo "Timeout waiting for pods in namespace $APP_NAMESPACE to be ready!"
|
||||
exit 1
|
||||
116
.github/workflows/_gmc-workflow.yml
vendored
Normal file
116
.github/workflows/_gmc-workflow.yml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Build and deploy GMC system on call and manual
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
default: "latest"
|
||||
required: true
|
||||
type: string
|
||||
description: "Tag to apply to images"
|
||||
node:
|
||||
default: "xeon"
|
||||
required: true
|
||||
type: string
|
||||
description: "Hardware to run test"
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
description: 'OPEA branch for image build'
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
default: "latest"
|
||||
required: true
|
||||
type: string
|
||||
description: "Tag to apply to images"
|
||||
node:
|
||||
default: "xeon"
|
||||
required: true
|
||||
type: string
|
||||
description: "Hardware to run test"
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
description: 'OPEA branch for image build'
|
||||
|
||||
jobs:
|
||||
####################################################################################################
|
||||
# Image Build and Scan
|
||||
####################################################################################################
|
||||
image-build:
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Checkout GenAIInfra repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/GenAIInfra
|
||||
ref: ${{ inputs.opea_branch }}
|
||||
path: GenAIInfra
|
||||
|
||||
- name: Set variables
|
||||
id: set_variables
|
||||
run: |
|
||||
echo "DOCKER_REGISTRY=${OPEA_IMAGE_REPO}opea" >> $GITHUB_ENV
|
||||
echo "IMAGE_REPO=${OPEA_IMAGE_REPO}" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=${{ inputs.tag }}" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ inputs.tag }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build image and push
|
||||
run: |
|
||||
cd ${{github.workspace}}/GenAIInfra/microservices-connector
|
||||
make docker.build
|
||||
make docker.push
|
||||
|
||||
- name: Clean up images
|
||||
if: always()
|
||||
run: |
|
||||
docker rmi ${{ env.DOCKER_REGISTRY }}/gmcrouter:${{ env.VERSION }}
|
||||
docker rmi ${{ env.DOCKER_REGISTRY }}/gmcmanager:${{ env.VERSION }}
|
||||
|
||||
- name: Clean up GenAIInfra source codes
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ${{github.workspace}}/GenAIInfra
|
||||
|
||||
####################################################################################################
|
||||
# GMC Install
|
||||
####################################################################################################
|
||||
gmc-install:
|
||||
needs: image-build
|
||||
runs-on: "k8s-${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Checkout GenAIInfra repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/GenAIInfra
|
||||
ref: ${{ inputs.opea_branch }}
|
||||
path: GenAIInfra
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
echo "SYSTEM_NAMESPACE=opea-system" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ inputs.tag }}" >> $GITHUB_ENV
|
||||
echo "SET_VERSION=true" >> $GITHUB_ENV # to change the tag of microservice images
|
||||
|
||||
- name: Cleanup existing GMC
|
||||
run: |
|
||||
cd GenAIInfra
|
||||
.github/workflows/scripts/e2e/gmc_install.sh cleanup_gmc
|
||||
cd ..
|
||||
|
||||
- name: Install GMC
|
||||
run: |
|
||||
cd GenAIInfra
|
||||
.github/workflows/scripts/e2e/gmc_install.sh install_gmc
|
||||
cd ..
|
||||
|
||||
- name: Clean up GenAIInfra source codes
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ${{github.workspace}}/GenAIInfra
|
||||
252
.github/workflows/_helm-e2e.yml
vendored
Normal file
252
.github/workflows/_helm-e2e.yml
vendored
Normal file
@@ -0,0 +1,252 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Helm Chart E2e Test For Call
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
example:
|
||||
default: "chatqna"
|
||||
required: true
|
||||
type: string
|
||||
description: "example to test, chatqna or common/asr"
|
||||
hardware:
|
||||
default: "xeon"
|
||||
required: true
|
||||
type: string
|
||||
dockerhub:
|
||||
default: "false"
|
||||
required: false
|
||||
type: string
|
||||
description: "Set to true if you want to use released docker images at dockerhub. By default using internal docker registry."
|
||||
mode:
|
||||
default: "CD"
|
||||
description: "Whether the test range is CI, CD or CICD"
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
required: false
|
||||
type: string
|
||||
version:
|
||||
default: "0-latest"
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
get-test-case:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
value_files: ${{ steps.get-test-files.outputs.value_files }}
|
||||
CHECKOUT_REF: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
steps:
|
||||
- name: Get checkout ref
|
||||
id: get-checkout-ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge
|
||||
else
|
||||
CHECKOUT_REF=${{ github.ref }}
|
||||
fi
|
||||
echo "CHECKOUT_REF=${CHECKOUT_REF}" >> $GITHUB_OUTPUT
|
||||
echo "checkout ref ${CHECKOUT_REF}"
|
||||
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get test Services
|
||||
id: get-test-files
|
||||
run: |
|
||||
set -x
|
||||
if [ "${{ inputs.mode }}" = "CI" ]; then
|
||||
base_commit=${{ github.event.pull_request.base.sha }}
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
values_files=$(git diff --name-only ${base_commit} ${merged_commit} | \
|
||||
grep "${{ inputs.example }}/kubernetes/helm" | \
|
||||
grep "values.yaml" |\
|
||||
sort -u)
|
||||
echo $values_files
|
||||
elif [ "${{ inputs.mode }}" = "CD" ]; then
|
||||
values_files=$(ls ${{ inputs.example }}/kubernetes/helm/*values.yaml || true)
|
||||
fi
|
||||
value_files="["
|
||||
for file in ${values_files}; do
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file")
|
||||
if [[ "$filename" == *"gaudi"* ]]; then
|
||||
if [[ "${{ inputs.hardware }}" == "gaudi" ]]; then
|
||||
value_files="${value_files}\"${filename}\","
|
||||
fi
|
||||
elif [[ "$filename" == *"rocm"* ]]; then
|
||||
if [[ "${{ inputs.hardware }}" == "rocm" ]]; then
|
||||
value_files="${value_files}\"${filename}\","
|
||||
fi
|
||||
elif [[ "$filename" == *"nv"* ]]; then
|
||||
continue
|
||||
else
|
||||
if [[ "${{ inputs.hardware }}" == "xeon" ]]; then
|
||||
value_files="${value_files}\"${filename}\","
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
value_files="${value_files%,}]"
|
||||
|
||||
echo "value_files=${value_files}"
|
||||
echo "value_files=${value_files}" >> $GITHUB_OUTPUT
|
||||
|
||||
helm-test:
|
||||
needs: [get-test-case]
|
||||
if: ${{ needs.get-test-case.outputs.value_files != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
value_file: ${{ fromJSON(needs.get-test-case.outputs.value_files) }}
|
||||
fail-fast: false
|
||||
runs-on: k8s-${{ inputs.hardware }}
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: |
|
||||
echo "value_file=${{ matrix.value_file }}"
|
||||
sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Get checkout ref
|
||||
id: get-checkout-ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge
|
||||
else
|
||||
CHECKOUT_REF=${{ github.ref }}
|
||||
fi
|
||||
echo "CHECKOUT_REF=${CHECKOUT_REF}" >> $GITHUB_OUTPUT
|
||||
echo "checkout ref ${CHECKOUT_REF}"
|
||||
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set variables
|
||||
env:
|
||||
example: ${{ inputs.example }}
|
||||
run: |
|
||||
if [[ ! "$example" =~ ^[a-zA-Z0-9]{1,20}$ ]] || [[ "$example" =~ \.\. ]] || [[ "$example" == -* || "$example" == *- ]]; then
|
||||
echo "Error: Invalid input - only lowercase alphanumeric and internal hyphens allowed"
|
||||
exit 1
|
||||
fi
|
||||
# SAFE_PREFIX="kb-"
|
||||
CHART_NAME="${SAFE_PREFIX}$(echo "$example" | tr '[:upper:]' '[:lower:]')"
|
||||
RAND_SUFFIX=$(openssl rand -hex 2 | tr -dc 'a-f0-9')
|
||||
|
||||
cat <<EOF >> $GITHUB_ENV
|
||||
CHART_NAME=${CHART_NAME}
|
||||
RELEASE_NAME=${CHART_NAME}-$(date +%s)
|
||||
NAMESPACE=ns-${CHART_NAME}-${RAND_SUFFIX}
|
||||
ROLLOUT_TIMEOUT_SECONDS=600s
|
||||
TEST_TIMEOUT_SECONDS=600s
|
||||
KUBECTL_TIMEOUT_SECONDS=60s
|
||||
should_cleanup=false
|
||||
skip_validate=false
|
||||
CHART_FOLDER=${example}/kubernetes/helm
|
||||
EOF
|
||||
|
||||
echo "Generated safe variables:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- CHART_NAME: ${CHART_NAME}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Helm install
|
||||
id: install
|
||||
env:
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
HFTOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
value_file: ${{ matrix.value_file }}
|
||||
run: |
|
||||
set -xe
|
||||
echo "should_cleanup=true" >> $GITHUB_ENV
|
||||
if [[ ! -f ${{ github.workspace }}/${{ env.CHART_FOLDER }}/${value_file} ]]; then
|
||||
echo "No value file found, exiting test!"
|
||||
echo "skip_validate=true" >> $GITHUB_ENV
|
||||
echo "should_cleanup=false" >> $GITHUB_ENV
|
||||
exit 0
|
||||
fi
|
||||
|
||||
for img in `helm template -n $NAMESPACE $RELEASE_NAME oci://ghcr.io/opea-project/charts/${CHART_NAME} -f ${{ inputs.example }}/kubernetes/helm/${value_file} --version ${{ inputs.version }} | grep 'image:' | grep 'opea/' | awk '{print $2}' | xargs`;
|
||||
do
|
||||
# increase helm install wait for for vllm-gaudi case
|
||||
if [[ $img == *"vllm-gaudi"* ]]; then
|
||||
ROLLOUT_TIMEOUT_SECONDS=900s
|
||||
fi
|
||||
done
|
||||
if ! helm install \
|
||||
--create-namespace \
|
||||
--namespace $NAMESPACE \
|
||||
$RELEASE_NAME \
|
||||
oci://ghcr.io/opea-project/charts/${CHART_NAME} \
|
||||
--set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \
|
||||
--set global.modelUseHostPath=/data2/hf_model \
|
||||
--set GOOGLE_API_KEY=${{ env.GOOGLE_API_KEY}} \
|
||||
--set GOOGLE_CSE_ID=${{ env.GOOGLE_CSE_ID}} \
|
||||
--set web-retriever.GOOGLE_API_KEY=${{ env.GOOGLE_API_KEY}} \
|
||||
--set web-retriever.GOOGLE_CSE_ID=${{ env.GOOGLE_CSE_ID}} \
|
||||
-f ${{ inputs.example }}/kubernetes/helm/${value_file} \
|
||||
--version ${{ inputs.version }} \
|
||||
--wait --timeout "$ROLLOUT_TIMEOUT_SECONDS"; then
|
||||
echo "Failed to install chart ${{ inputs.example }}"
|
||||
echo "skip_validate=true" >> $GITHUB_ENV
|
||||
.github/workflows/scripts/k8s-utils.sh dump_pods_status $NAMESPACE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate e2e test
|
||||
if: always()
|
||||
run: |
|
||||
set -xe
|
||||
if $skip_validate; then
|
||||
echo "Skip validate"
|
||||
else
|
||||
LOG_PATH=/home/$(whoami)/helm-logs
|
||||
chart=${{ env.CHART_NAME }}
|
||||
helm test -n $NAMESPACE $RELEASE_NAME --logs --timeout "$TEST_TIMEOUT_SECONDS" | tee ${LOG_PATH}/charts-${chart}.log
|
||||
exit_code=$?
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
echo "Chart ${chart} test failed, please check the logs in ${LOG_PATH}!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Checking response results, make sure the output is reasonable. "
|
||||
teststatus=false
|
||||
if [[ -f $LOG_PATH/charts-${chart}.log ]] && \
|
||||
[[ $(grep -c "^Phase:.*Failed" $LOG_PATH/charts-${chart}.log) != 0 ]]; then
|
||||
teststatus=false
|
||||
${{ github.workspace }}/.github/workflows/scripts/k8s-utils.sh dump_all_pod_logs $NAMESPACE
|
||||
else
|
||||
teststatus=true
|
||||
fi
|
||||
|
||||
if [ $teststatus == false ]; then
|
||||
echo "Response check failed, please check the logs in artifacts!"
|
||||
exit 1
|
||||
else
|
||||
echo "Response check succeeded!"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Helm uninstall
|
||||
if: always()
|
||||
run: |
|
||||
if $should_cleanup; then
|
||||
helm uninstall $RELEASE_NAME --namespace $NAMESPACE
|
||||
if ! kubectl delete ns $NAMESPACE --timeout=$KUBECTL_TIMEOUT_SECONDS; then
|
||||
kubectl delete pods --namespace $NAMESPACE --force --grace-period=0 --all
|
||||
kubectl delete ns $NAMESPACE --force --grace-period=0 --timeout=$KUBECTL_TIMEOUT_SECONDS
|
||||
fi
|
||||
fi
|
||||
223
.github/workflows/_run-docker-compose.yml
vendored
Normal file
223
.github/workflows/_run-docker-compose.yml
vendored
Normal file
@@ -0,0 +1,223 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Image Build
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
registry:
|
||||
description: Container Registry URL
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
tag:
|
||||
description: Container Tag
|
||||
required: false
|
||||
default: "latest"
|
||||
type: string
|
||||
example:
|
||||
description: Example to test
|
||||
required: true
|
||||
type: string
|
||||
hardware:
|
||||
description: Hardware to run the test on
|
||||
required: true
|
||||
type: string
|
||||
diff_excluded_files:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
use_model_cache:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
opea_branch:
|
||||
default: "main"
|
||||
required: false
|
||||
type: string
|
||||
jobs:
|
||||
get-test-case:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
test_cases: ${{ steps.test-case-matrix.outputs.test_cases }}
|
||||
CHECKOUT_REF: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
steps:
|
||||
- name: Get checkout ref
|
||||
id: get-checkout-ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge
|
||||
else
|
||||
CHECKOUT_REF=${{ github.ref }}
|
||||
fi
|
||||
echo "CHECKOUT_REF=${CHECKOUT_REF}" >> $GITHUB_OUTPUT
|
||||
echo "checkout ref ${CHECKOUT_REF}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get test matrix
|
||||
shell: bash
|
||||
id: test-case-matrix
|
||||
run: |
|
||||
example_l=$(echo ${{ inputs.example }} | tr '[:upper:]' '[:lower:]')
|
||||
cd ${{ github.workspace }}/${{ inputs.example }}/tests
|
||||
run_test_cases=""
|
||||
|
||||
if [[ "${{ inputs.hardware }}" == "gaudi"* ]]; then
|
||||
hardware="gaudi"
|
||||
elif [[ "${{ inputs.hardware }}" == "xeon"* ]]; then
|
||||
hardware="xeon"
|
||||
else
|
||||
hardware="${{ inputs.hardware }}"
|
||||
fi
|
||||
default_test_case=$(find . -type f -name "test_compose_on_$hardware.sh" | cut -d/ -f2)
|
||||
if [ "$default_test_case" ]; then run_test_cases="$default_test_case"; fi
|
||||
other_test_cases=$(find . -type f -name "test_compose_*_on_$hardware.sh" | cut -d/ -f2)
|
||||
echo "default_test_case=$default_test_case"
|
||||
echo "other_test_cases=$other_test_cases"
|
||||
|
||||
if [ "${{ inputs.tag }}" == "ci" ]; then
|
||||
base_commit=$(curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
|
||||
"https://api.github.com/repos/opea-project/GenAIExamples/commits?sha=${{ github.event.pull_request.base.ref }}" | jq -r '.[0].sha')
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-only ${base_commit} ${merged_commit} | grep -vE '${{ inputs.diff_excluded_files }}')" || true
|
||||
fi
|
||||
|
||||
for test_case in $other_test_cases; do
|
||||
if [ "${{ inputs.tag }}" == "ci" ]; then
|
||||
flag=${test_case%_on_*}
|
||||
flag=${flag#test_compose_}
|
||||
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${{ inputs.example }} | grep ${flag}) ]]; then
|
||||
run_test_cases="$run_test_cases $test_case"
|
||||
fi
|
||||
else
|
||||
run_test_cases="$run_test_cases $test_case"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$run_test_cases" ] && [[ $(printf '%s\n' "${changed_files[@]}" | grep ${{ inputs.example }} | grep /tests/) ]]; then
|
||||
run_test_cases=$other_test_cases
|
||||
fi
|
||||
|
||||
test_cases=$(echo $run_test_cases | tr ' ' '\n' | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "test_cases=$test_cases"
|
||||
echo "test_cases=$test_cases" >> $GITHUB_OUTPUT
|
||||
|
||||
compose-test:
|
||||
needs: [get-test-case]
|
||||
if: ${{ needs.get-test-case.outputs.test_cases != '[""]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
test_case: ${{ fromJSON(needs.get-test-case.outputs.test_cases) }}
|
||||
fail-fast: false
|
||||
runs-on: ${{ inputs.hardware }}
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Clean up Working Directory
|
||||
run: |
|
||||
sudo rm -rf ${{github.workspace}}/* || true
|
||||
|
||||
echo "Cleaning up containers using ports..."
|
||||
cid=$(docker ps --format '{{.Names}} : {{.Ports}}' | grep -v ' : $' | grep -v 0.0.0.0:5000 | awk -F' : ' '{print $1}')
|
||||
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
|
||||
docker system prune -f
|
||||
|
||||
echo "Cleaning up images ..."
|
||||
docker images --filter reference="*/*/*:latest" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images --filter reference="*/*:ci" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images --filter reference="*:5000/*/*" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images --filter reference="opea/comps-base" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ needs.get-test-case.outputs.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Clean up container before test
|
||||
shell: bash
|
||||
run: |
|
||||
docker ps
|
||||
cd ${{ github.workspace }}/${{ inputs.example }}
|
||||
export test_case=${{ matrix.test_case }}
|
||||
export hardware=${{ inputs.hardware }}
|
||||
bash ${{ github.workspace }}/.github/workflows/scripts/docker_compose_clean_up.sh "containers"
|
||||
bash ${{ github.workspace }}/.github/workflows/scripts/docker_compose_clean_up.sh "ports"
|
||||
docker ps
|
||||
|
||||
- name: Log in DockerHub
|
||||
uses: docker/login-action@v3.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Run test
|
||||
shell: bash
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
HF_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
PINECONE_KEY: ${{ secrets.PINECONE_KEY }}
|
||||
PINECONE_KEY_LANGCHAIN_TEST: ${{ secrets.PINECONE_KEY_LANGCHAIN_TEST }}
|
||||
SDK_BASE_URL: ${{ secrets.SDK_BASE_URL }}
|
||||
SERVING_TOKEN: ${{ secrets.SERVING_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
FINNHUB_API_KEY: ${{ secrets.FINNHUB_API_KEY }}
|
||||
FINANCIAL_DATASETS_API_KEY: ${{ secrets.FINANCIAL_DATASETS_API_KEY }}
|
||||
IMAGE_REPO: ${{ inputs.registry }}
|
||||
IMAGE_TAG: ${{ inputs.tag }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
example: ${{ inputs.example }}
|
||||
hardware: ${{ inputs.hardware }}
|
||||
test_case: ${{ matrix.test_case }}
|
||||
use_model_cache: ${{ inputs.use_model_cache }}
|
||||
run: |
|
||||
cd ${{ github.workspace }}/$example/tests
|
||||
if [[ "$IMAGE_REPO" == "" ]]; then export IMAGE_REPO="${OPEA_IMAGE_REPO}opea"; fi
|
||||
if [[ "$use_model_cache" == "true" ]]; then
|
||||
if [ -d "/data2/hf_model" ]; then
|
||||
export model_cache="/data2/hf_model"
|
||||
else
|
||||
echo "Model cache directory /data2/hf_model does not exist"
|
||||
export model_cache="$HOME/.cache/huggingface/hub"
|
||||
fi
|
||||
if [[ "$test_case" == *"rocm"* ]]; then
|
||||
export model_cache="/var/lib/GenAI/data"
|
||||
fi
|
||||
fi
|
||||
if [ -f "${test_case}" ]; then timeout 60m bash "${test_case}"; else echo "Test script {${test_case}} not found, skip test!"; fi
|
||||
|
||||
- name: Clean up container after test
|
||||
if: always()
|
||||
run: |
|
||||
set -x
|
||||
|
||||
echo "Cleaning up containers using ports..."
|
||||
cid=$(docker ps --format '{{.Names}} : {{.Ports}}' | grep -v ' : $' | grep -v 0.0.0.0:5000 | awk -F' : ' '{print $1}')
|
||||
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
|
||||
|
||||
echo "Cleaning up images ..."
|
||||
if [[ "${{ inputs.hardware }}" == "xeon"* ]]; then
|
||||
docker system prune -a -f
|
||||
else
|
||||
docker images --filter reference="*/*/*:latest" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images --filter reference="*/*:ci" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images --filter reference="*:5000/*/*" -q | xargs -r docker rmi && sleep 1s
|
||||
docker images --filter reference="opea/comps-base" -q | xargs -r docker rmi && sleep 1s
|
||||
docker system prune -f
|
||||
fi
|
||||
docker images
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.hardware }}_${{ inputs.example }}_${{ matrix.test_case }}
|
||||
path: ${{ github.workspace }}/${{ inputs.example }}/tests/*.log
|
||||
50
.github/workflows/bum_list_check.yml
vendored
50
.github/workflows/bum_list_check.yml
vendored
@@ -1,50 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check Requirements
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check-requirements:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout PR branch
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Save PR requirements
|
||||
run: |
|
||||
find . -name "requirements.txt" -exec cat {} \; | \
|
||||
grep -v '^\s*#' | \
|
||||
grep -v '^\s*$' | \
|
||||
grep -v '^\s*-' | \
|
||||
sed 's/^\s*//' | \
|
||||
awk -F'[>=<]' '{print $1}' | \
|
||||
sort -u > pr-requirements.txt
|
||||
cat pr-requirements.txt
|
||||
|
||||
- name: Checkout main branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
path: main-branch
|
||||
|
||||
- name: Save main branch requirements
|
||||
run: |
|
||||
find ./main-branch -name "requirements.txt" -exec cat {} \; | \
|
||||
grep -v '^\s*#' | \
|
||||
grep -v '^\s*$' | \
|
||||
grep -v '^\s*-' | \
|
||||
sed 's/^\s*//' | \
|
||||
awk -F'[>=<]' '{print $1}' | \
|
||||
sort -u > main-requirements.txt
|
||||
cat main-requirements.txt
|
||||
|
||||
- name: Compare requirements
|
||||
run: |
|
||||
comm -23 pr-requirements.txt main-requirements.txt > added-packages.txt
|
||||
if [ -s added-packages.txt ]; then
|
||||
echo "New packages found in PR:" && cat added-packages.txt
|
||||
else
|
||||
echo "No new packages found😊."
|
||||
fi
|
||||
78
.github/workflows/chatqna_benchmark.yml
vendored
78
.github/workflows/chatqna_benchmark.yml
vendored
@@ -1,78 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: benchmark test with chatqna_benchmark
|
||||
|
||||
on:
|
||||
# pull_request:
|
||||
# branches: [main]
|
||||
# types: [opened, reopened, ready_for_review, synchronize]
|
||||
# # inputs:
|
||||
# # variables:
|
||||
# # hardware:
|
||||
# # description: 'Enter your param' #gaudi or xeon
|
||||
# # required: true
|
||||
# # default: xeon
|
||||
schedule:
|
||||
- cron: "35 0 * * 6"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
hardware:
|
||||
description: 'Enter your hardware' #gaudi or xeon
|
||||
required: true
|
||||
default: gaudi
|
||||
|
||||
jobs:
|
||||
Example-test:
|
||||
runs-on: ${{ github.event.inputs.hardware || 'gaudi' }} #xeon #gaudi
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone repo GenAIEval
|
||||
run: |
|
||||
git clone https://github.com/opea-project/GenAIEval.git
|
||||
cd GenAIEval && git checkout v0.6
|
||||
|
||||
- name: Run test
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
hardware: ${{ github.event.inputs.hardware || 'gaudi' }} #xeon
|
||||
mode: perf
|
||||
IMAGE_TAG: latest
|
||||
IMAGE_REPO_GAUDI: ${{ vars.IMAGE_REPO_GAUDI }}
|
||||
IMAGE_REPO_XEON: ${{ vars.IMAGE_REPO_XEON }}
|
||||
run: |
|
||||
# cd ${{ github.workspace }}/$example/tests
|
||||
cd ${{ github.workspace }}/ChatQnA/tests
|
||||
cp ../../GenAIEval/evals/benchmark/chatqna_benchmark.py .
|
||||
cp ../../GenAIEval/evals/benchmark/data.json ${{ github.workspace }}/ChatQnA/docker/${hardware}/
|
||||
|
||||
if [ "$hardware" == "gaudi" ]; then IMAGE_REPO=$IMAGE_REPO_GAUDI; else IMAGE_REPO=$IMAGE_REPO_XEON; fi
|
||||
export IMAGE_REPO=${IMAGE_REPO}
|
||||
# example_l=$(echo $example | tr '[:upper:]' '[:lower:]')
|
||||
if [ -f test_chatqna_on_${hardware}.sh ]; then timeout 30m bash test_chatqna_on_${hardware}.sh > ${hardware}_output.log; else echo "Test script not found, skip test!"; fi
|
||||
|
||||
- name: Process log and save to JSON
|
||||
env:
|
||||
hardware: ${{ github.event.inputs.hardware || 'gaudi' }} #xeon
|
||||
run: |
|
||||
cd ${{ github.workspace }}/ChatQnA/tests
|
||||
echo '{}' > ${hardware}_output.json
|
||||
echo $(grep -a 'Total Requests:' ${hardware}_output.log | awk '{print "{\"total_requests\": \""$3 "\"}"}') > ${hardware}_output.json
|
||||
echo $(grep -a 'P50 latency is' ${hardware}_output.log | awk '{print "{\"p50_latency\": \""$4 "\"}"}') >> ${hardware}_output.json
|
||||
echo $(grep -a 'P99 latency is' ${hardware}_output.log | awk '{print "{\"p99_latency\": \""$4 "\"}"}') >> ${hardware}_output.json
|
||||
jq -s 'add' ${hardware}_output.json > ${hardware}_final_output.json && mv ${hardware}_final_output.json ${hardware}_output.json
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: |
|
||||
${{ github.workspace }}/ChatQnA/tests/*.log
|
||||
${{ github.workspace }}/ChatQnA/tests/*.json
|
||||
35
.github/workflows/check-online-doc-build.yml
vendored
Normal file
35
.github/workflows/check-online-doc-build.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check Online Document Building
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "**.md"
|
||||
- "**.rst"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: GenAIExamples
|
||||
|
||||
- name: Checkout docs
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/docs
|
||||
path: docs
|
||||
|
||||
- name: Build Online Document
|
||||
shell: bash
|
||||
run: |
|
||||
echo "build online doc"
|
||||
cd docs
|
||||
bash scripts/build.sh
|
||||
44
.github/workflows/container-build.yml
vendored
44
.github/workflows/container-build.yml
vendored
@@ -1,44 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Container Build
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "35 1 * * 5"
|
||||
jobs:
|
||||
# https://github.com/intel/ai-containers/blob/main/.github/action.yml
|
||||
build-containers:
|
||||
runs-on: docker
|
||||
env:
|
||||
REGISTRY: ${{ secrets.REGISTRY }}
|
||||
REPO: ${{ secrets.REPO }}
|
||||
steps:
|
||||
- uses: step-security/harden-runner@v2
|
||||
with:
|
||||
egress-policy: audit
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.REGISTRY }}
|
||||
username: ${{ secrets.REGISTRY_USER }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
- name: Build Containers
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} build --no-cache
|
||||
working-directory: .github/workflows/docker
|
||||
- name: Print Containers to Summary
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} images --format json | jq -r --arg registry "$REGISTRY" '.[] | select(.Repository | contains($registry)) | .Tag' >> $GITHUB_STEP_SUMMARY
|
||||
- name: Push Containers
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} push
|
||||
working-directory: .github/workflows/docker
|
||||
- name: Un-Tag Containers
|
||||
run: |
|
||||
docker compose -p ${GITHUB_RUN_NUMBER} down --rmi all
|
||||
working-directory: .github/workflows/docker
|
||||
- name: Remove Containers
|
||||
if: always()
|
||||
run: docker system prune --force
|
||||
94
.github/workflows/daily-update-vllm-version.yml
vendored
Normal file
94
.github/workflows/daily-update-vllm-version.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Daily update vLLM & vLLM-fork version
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 22 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BRANCH_NAME: "update"
|
||||
USER_NAME: "CICD-at-OPEA"
|
||||
USER_EMAIL: "CICD@opea.dev"
|
||||
|
||||
jobs:
|
||||
freeze-tag:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- repo: vLLM
|
||||
repo_name: vllm-project/vllm
|
||||
ver_name: VLLM_VER
|
||||
- repo: vLLM-fork
|
||||
repo_name: HabanaAI/vllm-fork
|
||||
ver_name: VLLM_FORK_VER
|
||||
fail-fast: false
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name ${{ env.USER_NAME }}
|
||||
git config --global user.email ${{ env.USER_EMAIL }}
|
||||
git remote set-url origin https://${{ env.USER_NAME }}:"${{ secrets.ACTION_TOKEN }}"@github.com/${{ github.repository }}.git
|
||||
git fetch
|
||||
|
||||
if git ls-remote https://github.com/${{ github.repository }}.git "refs/heads/${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | grep -q "refs/heads/${{ env.BRANCH_NAME }}_${{ matrix.repo }}"; then
|
||||
echo "branch ${{ env.BRANCH_NAME }}_${{ matrix.repo }} exists"
|
||||
git checkout ${{ env.BRANCH_NAME }}_${{ matrix.repo }}
|
||||
else
|
||||
echo "branch ${{ env.BRANCH_NAME }}_${{ matrix.repo }} not exists"
|
||||
git checkout -b ${{ env.BRANCH_NAME }}_${{ matrix.repo }}
|
||||
git push origin ${{ env.BRANCH_NAME }}_${{ matrix.repo }}
|
||||
echo "branch ${{ env.BRANCH_NAME }}_${{ matrix.repo }} created successfully"
|
||||
fi
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
latest_vllm_ver=$(curl -s "https://api.github.com/repos/${{ matrix.repo_name }}/tags" | jq '.[0].name' -)
|
||||
latest_vllm_ver=$(echo "$latest_vllm_ver" | sed 's/"//g')
|
||||
echo "latest_vllm_ver=${latest_vllm_ver}" >> "$GITHUB_ENV"
|
||||
find . -type f -name "*.sh" -exec sed -i "s/${{ matrix.ver_name }}=.*/${{ matrix.ver_name }}=${latest_vllm_ver}/" {} \;
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git add .
|
||||
if git diff-index --quiet HEAD --; then
|
||||
echo "No changes detected, skipping commit."
|
||||
exit 1
|
||||
else
|
||||
git commit -s -m "Update ${{ matrix.repo }} version to ${latest_vllm_ver}"
|
||||
git push --set-upstream origin ${{ env.BRANCH_NAME }}_${{ matrix.repo }}
|
||||
fi
|
||||
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ACTION_TOKEN }}
|
||||
run: |
|
||||
pr_count=$(curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -s "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&head=${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | jq '. | length')
|
||||
if [ $pr_count -gt 0 ]; then
|
||||
echo "Pull Request exists"
|
||||
pr_number=$(curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -s "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&head=${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | jq '.[0].number')
|
||||
gh pr edit ${pr_number} \
|
||||
--title "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" \
|
||||
--body "Update ${{ matrix.repo }} version to ${latest_vllm_ver}"
|
||||
echo "Pull Request updated successfully"
|
||||
else
|
||||
echo "Pull Request does not exists..."
|
||||
gh pr create \
|
||||
-B main \
|
||||
-H ${{ env.BRANCH_NAME }}_${{ matrix.repo }} \
|
||||
--title "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" \
|
||||
--body "Update ${{ matrix.repo }} version to ${latest_vllm_ver}"
|
||||
echo "Pull Request created successfully"
|
||||
fi
|
||||
29
.github/workflows/daily_check_issue_and_pr.yml
vendored
Normal file
29
.github/workflows/daily_check_issue_and_pr.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check stale issue and pr
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 22 * * *"
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
days-before-issue-stale: 30
|
||||
days-before-pr-stale: 30
|
||||
days-before-issue-close: 7
|
||||
days-before-pr-close: 7
|
||||
stale-issue-message: "This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 7 days."
|
||||
stale-pr-message: "This PR is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 7 days."
|
||||
close-issue-message: "This issue was closed because it has been stalled for 7 days with no activity."
|
||||
close-pr-message: "This PR was closed because it has been stalled for 7 days with no activity."
|
||||
repo-token: ${{ secrets.ACTION_TOKEN }}
|
||||
start-date: "2025-03-01T00:00:00Z"
|
||||
exempt-issue-labels: "Backlog"
|
||||
91
.github/workflows/docker-compose-e2e.yml
vendored
91
.github/workflows/docker-compose-e2e.yml
vendored
@@ -1,91 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with docker compose
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/docker/**"
|
||||
- "**/tests/**"
|
||||
- "**/ui/**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- .github/workflows/docker-compose-e2e.yml
|
||||
workflow_dispatch:
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|README.md|*.txt|deprecate|kubernetes|manifest|gmc|assets'
|
||||
|
||||
mega-image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
uses: ./.github/workflows/reuse-image-build.yml
|
||||
with:
|
||||
image_tag: ${{ github.event.pull_request.head.sha }}
|
||||
mega_service: "${{ matrix.example }}"
|
||||
runner_label: "docker-build-${{ matrix.hardware }}"
|
||||
|
||||
Example-test:
|
||||
needs: [job1, mega-image-build]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
runs-on: ${{ matrix.hardware }}
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: Test example
|
||||
run: |
|
||||
echo "Matrix - example ${{ matrix.example }}, hardware ${{ matrix.hardware }}"
|
||||
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
|
||||
- name: Run test
|
||||
env:
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
IMAGE_TAG: ${{ needs.mega-image-build.outputs.image_tag }}
|
||||
IMAGE_REPO_GAUDI: ${{ vars.IMAGE_REPO_GAUDI }}
|
||||
IMAGE_REPO_XEON: ${{ vars.IMAGE_REPO_XEON }}
|
||||
run: |
|
||||
cd ${{ github.workspace }}/$example/tests
|
||||
if [ "$hardware" == "gaudi" ]; then IMAGE_REPO=$IMAGE_REPO_GAUDI; else IMAGE_REPO=$IMAGE_REPO_XEON; fi
|
||||
export IMAGE_REPO=${IMAGE_REPO}
|
||||
example_l=$(echo $example | tr '[:upper:]' '[:lower:]')
|
||||
if [ -f test_${example_l}_on_${hardware}.sh ]; then timeout 30m bash test_${example_l}_on_${hardware}.sh; else echo "Test script not found, skip test!"; fi
|
||||
|
||||
- name: Clean up container
|
||||
env:
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
if: cancelled() || failure()
|
||||
run: |
|
||||
cd ${{ github.workspace }}/$example/docker/$hardware
|
||||
docker compose stop && docker compose rm -f
|
||||
echo y | docker system prune
|
||||
|
||||
- name: Publish pipeline artifact
|
||||
if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.example }}-${{ matrix.hardware }}
|
||||
path: ${{ github.workspace }}/${{ matrix.example }}/tests/*.log
|
||||
29
.github/workflows/docker/docker-compose.yaml
vendored
29
.github/workflows/docker/docker-compose.yaml
vendored
@@ -1,29 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
chatqna-megaservice-server:
|
||||
build:
|
||||
args:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
no_proxy: ${no_proxy}
|
||||
context: ../../../ChatQnA/microservice/xeon
|
||||
dockerfile: docker/Dockerfile
|
||||
image: ${REGISTRY}/${REPO}:chatqna-megaservice-server
|
||||
pull_policy: always
|
||||
chatqna-ui-server:
|
||||
build:
|
||||
context: ../../../ChatQnA/ui
|
||||
extends: chatqna-megaservice-server
|
||||
image: ${REGISTRY}/${REPO}:chatqna-ui-server
|
||||
codegen-megaservice-server:
|
||||
build:
|
||||
context: ../../../CodeGen/microservice/xeon
|
||||
extends: chatqna-megaservice-server
|
||||
image: ${REGISTRY}/${REPO}:codegen-megaservice-server
|
||||
codegen-ui-server:
|
||||
build:
|
||||
context: ../../../CodeGen/ui
|
||||
extends: chatqna-megaservice-server
|
||||
image: ${REGISTRY}/${REPO}:codegen-ui-server
|
||||
117
.github/workflows/dockerhub-description.yml
vendored
Normal file
117
.github/workflows/dockerhub-description.yml
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Update Docker Hub Description
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
get-images-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
examples_json: ${{ steps.extract.outputs.examples_json }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Extract images info and generate JSON matrix
|
||||
id: extract
|
||||
run: |
|
||||
#!/bin/bash
|
||||
set -e
|
||||
images=$(awk -F'|' '/^\| *\[opea\// {
|
||||
gsub(/^ +| +$/, "", $2);
|
||||
gsub(/^ +| +$/, "", $4);
|
||||
gsub(/^ +| +$/, "", $5);
|
||||
|
||||
# Extract the path portion of the dockerHub link from the Example Images column
|
||||
match($2, /\(https:\/\/hub\.docker\.com\/r\/[^)]*\)/);
|
||||
repository = substr($2, RSTART, RLENGTH);
|
||||
# Remove the prefix and the trailing right bracket
|
||||
sub(/^\(https:\/\/hub\.docker\.com\/r\//, "", repository);
|
||||
sub(/\)$/, "", repository);
|
||||
|
||||
# Description Direct assignment
|
||||
description = $4;
|
||||
|
||||
# Extract the content of the github link from the Readme column
|
||||
match($5, /\(https:\/\/github\.com\/[^)]*\)/);
|
||||
readme_url = substr($5, RSTART, RLENGTH);
|
||||
# Remove the prefix and the trailing right bracket
|
||||
sub(/^\(https:\/\/github\.com\//, "", readme_url);
|
||||
sub(/\)$/, "", readme_url);
|
||||
# Remove blob information, such as "blob/main/" or "blob/habana_main/"
|
||||
gsub(/blob\/[^/]+\//, "", readme_url);
|
||||
# Remove the organization name and keep only the file path, such as changing "opea-project/GenAIExamples/AudioQnA/README.md" to "GenAIExamples/AudioQnA/README.md"
|
||||
sub(/^[^\/]+\//, "", readme_url);
|
||||
|
||||
# Generate JSON object string
|
||||
printf "{\"repository\":\"%s\",\"short-description\":\"%s\",\"readme-filepath\":\"%s\"}\n", repository, description, readme_url;
|
||||
}' docker_images_list.md)
|
||||
|
||||
# Concatenate all JSON objects into a JSON array, using paste to separate them with commas
|
||||
json="[$(echo "$images" | paste -sd, -)]"
|
||||
echo "$json"
|
||||
# Set as output variable for subsequent jobs to use
|
||||
echo "::set-output name=examples_json::$json"
|
||||
|
||||
check-images-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
needs: get-images-matrix
|
||||
if: ${{ needs.get-images-matrix.outputs.examples_json != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJSON(needs.get-images-matrix.outputs.examples_json) }}
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Check dockerhub description
|
||||
run: |
|
||||
echo "dockerhub description for ${{ matrix.image.repository }}"
|
||||
echo "short-description: ${{ matrix.image.short-description }}"
|
||||
echo "readme-filepath: ${{ matrix.image.readme-filepath }}"
|
||||
|
||||
dockerHubDescription:
|
||||
runs-on: ubuntu-latest
|
||||
needs: get-images-matrix
|
||||
if: ${{ needs.get-images-matrix.outputs.examples_json != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJSON(needs.get-images-matrix.outputs.examples_json) }}
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/GenAIExamples
|
||||
path: GenAIExamples
|
||||
|
||||
- name: Checkout GenAIComps
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: opea-project/GenAIComps
|
||||
path: GenAIComps
|
||||
|
||||
- name: Checkout vllm-openvino
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: vllm-project/vllm
|
||||
path: vllm
|
||||
|
||||
- name: Checkout vllm-gaudi
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: HabanaAI/vllm-fork
|
||||
ref: habana_main
|
||||
path: vllm-fork
|
||||
|
||||
- name: add dockerhub description
|
||||
uses: peter-evans/dockerhub-description@v4
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
repository: ${{ matrix.image.repository }}
|
||||
short-description: ${{ matrix.image.short-description }}
|
||||
readme-filepath: ${{ matrix.image.readme-filepath }}
|
||||
enable-url-completion: false
|
||||
33
.github/workflows/image-build-on-push.yml
vendored
33
.github/workflows/image-build-on-push.yml
vendored
@@ -1,33 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Test
|
||||
name: Build latest images on push event
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'main' ]
|
||||
paths:
|
||||
- "**/docker/*.py"
|
||||
- "**/docker/Dockerfile"
|
||||
- "**/docker/ui/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-on-push
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
|
||||
mega-image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix:
|
||||
workload: ${{ fromJSON(needs.job1.outputs.run_matrix).include.*.example }}
|
||||
hardware: ["gaudi","xeon"]
|
||||
uses: ./.github/workflows/reuse-image-build.yml
|
||||
with:
|
||||
image_tag: latest
|
||||
mega_service: "${{ matrix.workload }}"
|
||||
runner_label: docker-build-${{ matrix.hardware }}
|
||||
111
.github/workflows/manifest-e2e.yml
vendored
111
.github/workflows/manifest-e2e.yml
vendored
@@ -1,111 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with manifests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/manifests/**"
|
||||
- "**/tests/test_manifest**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/reuse-get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '.github|deprecated|docker|assets|*.md|*.txt'
|
||||
xeon_server_label: 'xeon'
|
||||
gaudi_server_label: 'gaudi'
|
||||
|
||||
mega-image-build:
|
||||
needs: job1
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
uses: ./.github/workflows/reuse-image-build.yml
|
||||
with:
|
||||
image_tag: ${{ github.event.pull_request.head.sha }}
|
||||
mega_service: "${{ matrix.example }}"
|
||||
runner_label: "docker-build-${{ matrix.hardware }}"
|
||||
|
||||
manifest-test:
|
||||
needs: [job1, mega-image-build]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
runs-on: "k8s-${{ matrix.hardware }}"
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- name: E2e test manifest
|
||||
run: |
|
||||
echo "Matrix - manifest: ${{ matrix.example }}"
|
||||
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set variables
|
||||
run: |
|
||||
if [ ${{ matrix.hardware }} == "gaudi" ]; then IMAGE_REPO=${{ vars.IMAGE_REPO_GAUDI }}; else IMAGE_REPO=${{ vars.IMAGE_REPO_XEON }}; fi
|
||||
echo "IMAGE_REPO=$OPEA_IMAGE_REPO" >> $GITHUB_ENV
|
||||
echo "IMAGE_TAG=${{needs.mega-image-build.outputs.image_tag}}" >> $GITHUB_ENV
|
||||
lower_example=$(echo "${{ matrix.example }}" | tr '[:upper:]' '[:lower:]')
|
||||
echo "NAMESPACE=$lower_example-$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV
|
||||
echo "ROLLOUT_TIMEOUT_SECONDS=1800s" >> $GITHUB_ENV
|
||||
echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV
|
||||
echo "continue_test=true" >> $GITHUB_ENV
|
||||
echo "should_cleanup=false" >> $GITHUB_ENV
|
||||
echo "skip_validate=true" >> $GITHUB_ENV
|
||||
echo "NAMESPACE=$NAMESPACE"
|
||||
|
||||
- name: Kubectl install
|
||||
id: install
|
||||
run: |
|
||||
if [[ ! -f ${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh ]]; then
|
||||
echo "No test script found, exist test!"
|
||||
exit 0
|
||||
else
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh init_${{ matrix.example }}
|
||||
echo "should_cleanup=true" >> $GITHUB_ENV
|
||||
kubectl create ns $NAMESPACE
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh install_${{ matrix.example }} $NAMESPACE
|
||||
echo "Testing ${{ matrix.example }}, waiting for pod ready..."
|
||||
if kubectl rollout status deployment --namespace "$NAMESPACE" --timeout "$ROLLOUT_TIMEOUT_SECONDS"; then
|
||||
echo "Testing manifests ${{ matrix.example }}, waiting for pod ready done!"
|
||||
echo "skip_validate=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "Timeout waiting for pods in namespace $NAMESPACE to be ready!"
|
||||
exit 1
|
||||
fi
|
||||
sleep 60
|
||||
fi
|
||||
|
||||
- name: Validate e2e test
|
||||
if: always()
|
||||
run: |
|
||||
if $skip_validate; then
|
||||
echo "Skip validate"
|
||||
else
|
||||
${{ github.workspace }}/${{ matrix.example }}/tests/test_manifest_on_${{ matrix.hardware }}.sh validate_${{ matrix.example }} $NAMESPACE
|
||||
fi
|
||||
|
||||
- name: Kubectl uninstall
|
||||
if: always()
|
||||
run: |
|
||||
if $should_cleanup; then
|
||||
if ! kubectl delete ns $NAMESPACE --timeout=$KUBECTL_TIMEOUT_SECONDS; then
|
||||
kubectl delete pods --namespace $NAMESPACE --force --grace-period=0 --all
|
||||
kubectl delete ns $NAMESPACE --force --grace-period=0 --timeout=$KUBECTL_TIMEOUT_SECONDS
|
||||
fi
|
||||
fi
|
||||
54
.github/workflows/manifest-validate.yml
vendored
54
.github/workflows/manifest-validate.yml
vendored
@@ -1,54 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Manifests Validate
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/manifests/**"
|
||||
- .github/workflows/manifest-validate.yml
|
||||
workflow_dispatch:
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
MANIFEST_DIR: "manifests"
|
||||
|
||||
jobs:
|
||||
manifests-validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
set -xe
|
||||
changed_folder=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | \
|
||||
grep "kubernetes/manifests" | grep -vE '.github|README.md|*.txt|*.sh' | cut -d'/' -f1 | sort -u )
|
||||
echo "changed_folder: $changed_folder"
|
||||
if [ -z "$changed_folder" ]; then
|
||||
echo "No changes in manifests folder"
|
||||
echo "SKIP=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "SKIP=false" >> $GITHUB_OUTPUT
|
||||
for folder in $changed_folder; do
|
||||
folder_str="$folder_str $folder/kubernetes/manifests/"
|
||||
done
|
||||
echo "folder_str=$folder_str"
|
||||
echo "folder_str=$folder_str" >> $GITHUB_ENV
|
||||
|
||||
- uses: docker://ghcr.io/yannh/kubeconform:latest
|
||||
if: steps.changed_files.outputs.SKIP == 'false'
|
||||
with:
|
||||
args: "-summary -output json ${{env.folder_str}}"
|
||||
31
.github/workflows/manual-docker-clean.yml
vendored
Normal file
31
.github/workflows/manual-docker-clean.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Clean up container on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node:
|
||||
default: "rocm"
|
||||
description: "Hardware to clean"
|
||||
required: true
|
||||
type: string
|
||||
clean_list:
|
||||
default: ""
|
||||
description: "docker command to clean"
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
clean:
|
||||
runs-on: "${{ inputs.node }}"
|
||||
steps:
|
||||
- name: Clean up container
|
||||
run: |
|
||||
docker ps
|
||||
if [ "${{ inputs.clean_list }}" ]; then
|
||||
echo "----------stop and remove containers----------"
|
||||
docker stop ${{ inputs.clean_list }} && docker rm ${{ inputs.clean_list }}
|
||||
echo "----------container removed----------"
|
||||
docker ps
|
||||
fi
|
||||
61
.github/workflows/manual-docker-publish.yml
vendored
Normal file
61
.github/workflows/manual-docker-publish.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Examples publish docker image on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node:
|
||||
default: "gaudi"
|
||||
description: "Hardware to run test"
|
||||
required: true
|
||||
type: string
|
||||
examples:
|
||||
default: ""
|
||||
description: 'List of examples to publish [AgentQnA,AudioQnA,ChatQnA,CodeGen,CodeTrans,DocIndexRetriever,DocSum,FaqGen,InstructionTuning,MultimodalQnA,ProductivitySuite,RerankFinetuning,SearchQnA,Translation,VideoQnA,VisualQnA]'
|
||||
required: false
|
||||
type: string
|
||||
images:
|
||||
default: ""
|
||||
description: 'List of images to publish [gmcmanager,gmcrouter]'
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "rc"
|
||||
description: "Tag to publish, like [1.0rc]"
|
||||
required: true
|
||||
type: string
|
||||
publish_tags:
|
||||
default: "latest,1.x"
|
||||
description: "Tag list apply to publish images, like [latest,1.0]"
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions: read-all
|
||||
jobs:
|
||||
get-image-list:
|
||||
uses: ./.github/workflows/_get-image-list.yml
|
||||
with:
|
||||
examples: ${{ inputs.examples }}
|
||||
images: ${{ inputs.images }}
|
||||
|
||||
publish:
|
||||
needs: [get-image-list]
|
||||
if: ${{ needs.get-image-list.outputs.matrix != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }}
|
||||
fail-fast: false
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
steps:
|
||||
- uses: docker/login-action@v3.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Image Publish
|
||||
uses: opea-project/validation/actions/image-publish@main
|
||||
with:
|
||||
local_image_ref: ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
image_name: opea/${{ matrix.image }}
|
||||
publish_tags: ${{ inputs.publish_tags }}
|
||||
114
.github/workflows/manual-docker-scan.yml
vendored
Normal file
114
.github/workflows/manual-docker-scan.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Examples docker images BoM/CVE scan on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node:
|
||||
default: "gaudi"
|
||||
description: "Hardware to run scan"
|
||||
required: true
|
||||
type: string
|
||||
examples:
|
||||
default: ""
|
||||
description: 'List of examples to publish "AgentQnA,AudioQnA,ChatQnA,CodeGen,CodeTrans,DocIndexRetriever,DocSum,InstructionTuning,MultimodalQnA,ProductivitySuite,RerankFinetuning,SearchQnA,Translation,VideoQnA,VisualQnA"'
|
||||
required: false
|
||||
type: string
|
||||
images:
|
||||
default: ""
|
||||
description: 'List of images to publish "gmcmanager,gmcrouter"'
|
||||
required: false
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag for images to scan"
|
||||
required: true
|
||||
type: string
|
||||
sbom_scan:
|
||||
default: true
|
||||
description: 'Scan images for BoM'
|
||||
required: false
|
||||
type: boolean
|
||||
trivy_scan:
|
||||
default: true
|
||||
description: 'Scan images for CVE'
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
permissions: read-all
|
||||
jobs:
|
||||
get-image-list:
|
||||
uses: ./.github/workflows/_get-image-list.yml
|
||||
with:
|
||||
examples: ${{ inputs.examples }}
|
||||
images: ${{ inputs.images }}
|
||||
|
||||
scan-docker:
|
||||
needs: get-image-list
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
if: ${{ needs.get-image-list.outputs.matrix != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJson(needs.get-image-list.outputs.matrix) }}
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Clean up Working Directory
|
||||
run: |
|
||||
sudo rm -rf ${{github.workspace}}/* || true
|
||||
docker system prune -f
|
||||
|
||||
- name: Pull Image
|
||||
run: |
|
||||
docker pull ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
echo "OPEA_IMAGE_REPO=${OPEA_IMAGE_REPO}" >> $GITHUB_ENV
|
||||
|
||||
- name: SBOM Scan Container
|
||||
uses: anchore/sbom-action@v0.17.1
|
||||
if: ${{ inputs.sbom_scan }}
|
||||
with:
|
||||
image: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
output-file: ${{ matrix.image }}-sbom-scan.txt
|
||||
format: 'spdx-json'
|
||||
|
||||
- name: Security Scan Container
|
||||
uses: aquasecurity/trivy-action@0.24.0
|
||||
if: ${{ inputs.trivy_scan }}
|
||||
with:
|
||||
image-ref: ${{ env.OPEA_IMAGE_REPO }}opea/${{ matrix.image }}:${{ inputs.tag }}
|
||||
output: ${{ matrix.image }}-trivy-scan.txt
|
||||
format: 'table'
|
||||
exit-code: '1'
|
||||
ignore-unfixed: true
|
||||
vuln-type: 'os,library'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: docker rmi -f ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ inputs.tag }} || true
|
||||
|
||||
- name: Collect Logs
|
||||
if: always()
|
||||
run: |
|
||||
mkdir -p /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
mv ${{ matrix.image }}-*-scan.txt /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
|
||||
upload-artifacts:
|
||||
needs: scan-docker
|
||||
runs-on: "docker-build-${{ inputs.node }}"
|
||||
if: always()
|
||||
steps:
|
||||
- uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: sbom-scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
path: /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}/*-sbom-scan.txt
|
||||
overwrite: true
|
||||
|
||||
- uses: actions/upload-artifact@v4.3.4
|
||||
with:
|
||||
name: trivy-scan-${{ inputs.tag }}-${{ github.run_number }}
|
||||
path: /tmp/scan-${{ inputs.tag }}-${{ github.run_number }}/*-trivy-scan.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Remove Logs
|
||||
run: rm -rf /tmp/scan-${{ inputs.tag }}-${{ github.run_number }} && rm -rf /tmp/sbom-action-*
|
||||
102
.github/workflows/manual-example-workflow.yml
vendored
Normal file
102
.github/workflows/manual-example-workflow.yml
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Examples CD workflow on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
nodes:
|
||||
default: "gaudi,xeon"
|
||||
description: "Hardware to run test gaudi,xeon,rocm,arc,gaudi3,xeon-gnr"
|
||||
required: true
|
||||
type: string
|
||||
examples:
|
||||
default: "ChatQnA"
|
||||
description: 'List of examples to test [AgentQnA,AudioQnA,ChatQnA,CodeGen,CodeTrans,DocIndexRetriever,DocSum,FaqGen,InstructionTuning,MultimodalQnA,ProductivitySuite,RerankFinetuning,SearchQnA,Translation,VideoQnA,VisualQnA,AvatarChatbot,Text2Image,WorkflowExecAgent,DBQnA,EdgeCraftRAG,GraphRAG]'
|
||||
required: true
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag to apply to images"
|
||||
required: true
|
||||
type: string
|
||||
build:
|
||||
default: true
|
||||
description: 'Build test required images for Examples'
|
||||
required: false
|
||||
type: boolean
|
||||
test_compose:
|
||||
default: true
|
||||
description: 'Test examples with docker compose'
|
||||
required: false
|
||||
type: boolean
|
||||
test_helmchart:
|
||||
default: true
|
||||
description: 'Test examples with helm charts'
|
||||
required: false
|
||||
type: boolean
|
||||
opea_branch:
|
||||
default: "main"
|
||||
description: 'OPEA branch for image build'
|
||||
required: false
|
||||
type: string
|
||||
inject_commit:
|
||||
default: false
|
||||
description: "inject commit to docker images"
|
||||
required: false
|
||||
type: boolean
|
||||
use_model_cache:
|
||||
default: false
|
||||
description: "use model cache"
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
permissions: read-all
|
||||
jobs:
|
||||
get-test-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
examples: ${{ steps.get-matrix.outputs.examples }}
|
||||
nodes: ${{ steps.get-matrix.outputs.nodes }}
|
||||
steps:
|
||||
- name: Create Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
examples=($(echo ${{ inputs.examples }} | tr ',' ' '))
|
||||
examples_json=$(printf '%s\n' "${examples[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "examples=$examples_json" >> $GITHUB_OUTPUT
|
||||
nodes=($(echo ${{ inputs.nodes }} | tr ',' ' '))
|
||||
nodes_json=$(printf '%s\n' "${nodes[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "nodes=$nodes_json" >> $GITHUB_OUTPUT
|
||||
|
||||
build-comps-base:
|
||||
needs: [get-test-matrix]
|
||||
strategy:
|
||||
matrix:
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
uses: ./.github/workflows/_build_comps_base_image.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
build: ${{ fromJSON(inputs.build) }}
|
||||
tag: ${{ inputs.tag }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
|
||||
run-examples:
|
||||
needs: [get-test-matrix, build-comps-base]
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJson(needs.get-test-matrix.outputs.examples) }}
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
example: ${{ matrix.example }}
|
||||
tag: ${{ inputs.tag }}
|
||||
build: ${{ fromJSON(inputs.build) }}
|
||||
test_compose: ${{ fromJSON(inputs.test_compose) }}
|
||||
test_helmchart: ${{ fromJSON(inputs.test_helmchart) }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
inject_commit: ${{ inputs.inject_commit }}
|
||||
use_model_cache: ${{ inputs.use_model_cache }}
|
||||
secrets: inherit
|
||||
43
.github/workflows/manual-freeze-tag.yml
vendored
Normal file
43
.github/workflows/manual-freeze-tag.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Freeze OPEA images release tag
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
default: "1.1.0"
|
||||
description: "Tag to apply to images"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
freeze-tag:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name "CICD-at-OPEA"
|
||||
git config --global user.email "CICD@opea.dev"
|
||||
git remote set-url origin https://CICD-at-OPEA:"${{ secrets.ACTION_TOKEN }}"@github.com/opea-project/GenAIExamples.git
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
IFS='.' read -r major minor patch <<< "${{ github.event.inputs.tag }}"
|
||||
echo "VERSION_MAJOR ${major}" > version.txt
|
||||
echo "VERSION_MINOR ${minor}" >> version.txt
|
||||
echo "VERSION_PATCH ${patch}" >> version.txt
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git add .
|
||||
git commit -s -m "Freeze OPEA images tag"
|
||||
git push
|
||||
67
.github/workflows/manual-image-build.yml
vendored
Normal file
67
.github/workflows/manual-image-build.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Build specific images on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
nodes:
|
||||
default: "gaudi,xeon"
|
||||
description: "Hardware to run test"
|
||||
required: true
|
||||
type: string
|
||||
example:
|
||||
default: "ChatQnA"
|
||||
description: 'Build images belong to which example? [AgentQnA,AudioQnA,ChatQnA,CodeGen,CodeTrans,DocIndexRetriever,DocSum,FaqGen,InstructionTuning,MultimodalQnA,ProductivitySuite,RerankFinetuning,SearchQnA,Translation,VideoQnA,VisualQnA,AvatarChatbot,Text2Image,WorkflowExecAgent,DBQnA,EdgeCraftRAG,GraphRAG]'
|
||||
required: true
|
||||
type: string
|
||||
services:
|
||||
default: "chatqna,chatqna-without-rerank"
|
||||
description: 'Service list to build'
|
||||
required: true
|
||||
type: string
|
||||
tag:
|
||||
default: "latest"
|
||||
description: "Tag to apply to images"
|
||||
required: true
|
||||
type: string
|
||||
opea_branch:
|
||||
default: "main"
|
||||
description: 'OPEA branch for image build'
|
||||
required: false
|
||||
type: string
|
||||
inject_commit:
|
||||
default: false
|
||||
description: "inject commit to docker images"
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
get-test-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
nodes: ${{ steps.get-matrix.outputs.nodes }}
|
||||
steps:
|
||||
- name: Create Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
nodes=($(echo ${{ inputs.nodes }} | tr ',' ' '))
|
||||
nodes_json=$(printf '%s\n' "${nodes[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "nodes=$nodes_json" >> $GITHUB_OUTPUT
|
||||
|
||||
image-build:
|
||||
needs: get-test-matrix
|
||||
if: ${{ needs.get-test-matrix.outputs.nodes != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
example: ${{ inputs.example }}
|
||||
services: ${{ inputs.services }}
|
||||
tag: ${{ inputs.tag }}
|
||||
opea_branch: ${{ inputs.opea_branch }}
|
||||
inject_commit: ${{ inputs.inject_commit }}
|
||||
secrets: inherit
|
||||
61
.github/workflows/manual-reset-local-registry.yml
vendored
Normal file
61
.github/workflows/manual-reset-local-registry.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Clean up Local Registry on manual event
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
nodes:
|
||||
default: "gaudi,xeon"
|
||||
description: "Hardware to clean up"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
EXAMPLES: ${{ vars.NIGHTLY_RELEASE_EXAMPLES }}
|
||||
|
||||
jobs:
|
||||
get-build-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
examples: ${{ steps.get-matrix.outputs.examples }}
|
||||
nodes: ${{ steps.get-matrix.outputs.nodes }}
|
||||
steps:
|
||||
- name: Create Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
examples=($(echo ${EXAMPLES} | tr ',' ' '))
|
||||
examples_json=$(printf '%s\n' "${examples[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "examples=$examples_json" >> $GITHUB_OUTPUT
|
||||
nodes=($(echo ${{ inputs.nodes }} | tr ',' ' '))
|
||||
nodes_json=$(printf '%s\n' "${nodes[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "nodes=$nodes_json" >> $GITHUB_OUTPUT
|
||||
|
||||
clean-up:
|
||||
needs: get-build-matrix
|
||||
if: ${{ needs.get-image-list.outputs.matrix != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
node: ${{ fromJson(needs.get-build-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
runs-on: "docker-build-${{ matrix.node }}"
|
||||
steps:
|
||||
- name: Clean Up Local Registry
|
||||
run: |
|
||||
echo "Cleaning up local registry on ${{ matrix.node }}"
|
||||
bash /home/sdp/workspace/fully_registry_cleanup.sh
|
||||
docker ps | grep registry
|
||||
|
||||
build:
|
||||
needs: [get-build-matrix, clean-up]
|
||||
if: ${{ needs.get-image-list.outputs.matrix != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJson(needs.get-build-matrix.outputs.examples) }}
|
||||
node: ${{ fromJson(needs.get-build-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
example: ${{ matrix.example }}
|
||||
secrets: inherit
|
||||
95
.github/workflows/nightly-docker-build-publish.yml
vendored
Normal file
95
.github/workflows/nightly-docker-build-publish.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Nightly build/publish latest docker images
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 14 * * 1-5" # UTC time
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
EXAMPLES: ${{ vars.NIGHTLY_RELEASE_EXAMPLES }}
|
||||
TAG: "latest"
|
||||
PUBLISH_TAGS: "latest"
|
||||
|
||||
jobs:
|
||||
get-build-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
examples_json: ${{ steps.get-matrix.outputs.examples_json }}
|
||||
EXAMPLES: ${{ steps.get-matrix.outputs.EXAMPLES }}
|
||||
TAG: ${{ steps.get-matrix.outputs.TAG }}
|
||||
PUBLISH_TAGS: ${{ steps.get-matrix.outputs.PUBLISH_TAGS }}
|
||||
steps:
|
||||
- name: Create Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
examples=($(echo ${EXAMPLES} | tr ',' ' '))
|
||||
examples_json=$(printf '%s\n' "${examples[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "examples_json=$examples_json" >> $GITHUB_OUTPUT
|
||||
echo "EXAMPLES=$EXAMPLES" >> $GITHUB_OUTPUT
|
||||
echo "TAG=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "PUBLISH_TAGS=$PUBLISH_TAGS" >> $GITHUB_OUTPUT
|
||||
|
||||
build-comps-base:
|
||||
needs: [get-build-matrix]
|
||||
uses: ./.github/workflows/_build_comps_base_image.yml
|
||||
with:
|
||||
node: gaudi
|
||||
|
||||
build-images:
|
||||
needs: [get-build-matrix, build-comps-base]
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJSON(needs.get-build-matrix.outputs.examples_json) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_build_image.yml
|
||||
with:
|
||||
node: gaudi
|
||||
example: ${{ matrix.example }}
|
||||
inject_commit: true
|
||||
secrets: inherit
|
||||
|
||||
test-example:
|
||||
needs: [get-build-matrix]
|
||||
if: ${{ needs.get-build-matrix.outputs.examples_json != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJSON(needs.get-build-matrix.outputs.examples_json) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: xeon
|
||||
build: false
|
||||
example: ${{ matrix.example }}
|
||||
test_compose: true
|
||||
inject_commit: true
|
||||
secrets: inherit
|
||||
|
||||
get-image-list:
|
||||
needs: [get-build-matrix]
|
||||
uses: ./.github/workflows/_get-image-list.yml
|
||||
with:
|
||||
examples: ${{ needs.get-build-matrix.outputs.EXAMPLES }}
|
||||
|
||||
publish:
|
||||
needs: [get-build-matrix, get-image-list, build-images]
|
||||
if: always()
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }}
|
||||
fail-fast: false
|
||||
runs-on: "docker-build-gaudi"
|
||||
steps:
|
||||
- uses: docker/login-action@v3.2.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Image Publish
|
||||
uses: opea-project/validation/actions/image-publish@main
|
||||
with:
|
||||
local_image_ref: ${OPEA_IMAGE_REPO}opea/${{ matrix.image }}:${{ needs.get-build-matrix.outputs.TAG }}
|
||||
image_name: opea/${{ matrix.image }}
|
||||
publish_tags: ${{ needs.get-build-matrix.outputs.PUBLISH_TAGS }}
|
||||
44
.github/workflows/path_detection.yml
vendored
44
.github/workflows/path_detection.yml
vendored
@@ -1,44 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check for missing Dockerfile paths in repo comps
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize]
|
||||
|
||||
jobs:
|
||||
check-dockerfile-paths:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone repo GenAIComps
|
||||
run: |
|
||||
cd ..
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
|
||||
- name: Check for missing Dockerfile paths in GenAIComps
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
miss="FALSE"
|
||||
while IFS=: read -r file line content; do
|
||||
dockerfile_path=$(echo "$content" | awk -F '-f ' '{print $2}' | awk '{print $1}')
|
||||
if [[ ! -f "../GenAIComps/${dockerfile_path}" ]]; then
|
||||
miss="TRUE"
|
||||
echo "Missing Dockerfile: GenAIComps/${dockerfile_path} (Referenced in GenAIExamples/${file}:${line})"
|
||||
fi
|
||||
done < <(grep -Ern 'docker build .* -f comps/.+/Dockerfile' --include='*.md' .)
|
||||
|
||||
|
||||
if [[ "$miss" == "TRUE" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
shell: bash
|
||||
81
.github/workflows/pr-chart-e2e.yml
vendored
Normal file
81
.github/workflows/pr-chart-e2e.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E Test with Helm Charts
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "!**.md"
|
||||
- "**/helm/**"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
name: Get-Test-Matrix
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
run_matrix: ${{ steps.get-test-matrix.outputs.run_matrix }}
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get Test Matrix
|
||||
id: get-test-matrix
|
||||
run: |
|
||||
set -x
|
||||
echo "base_commit=${{ github.event.pull_request.base.sha }}"
|
||||
base_commit=${{ github.event.pull_request.base.sha }}
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
values_files=$(git diff --name-only ${base_commit} ${merged_commit} | \
|
||||
grep "values.yaml" | \
|
||||
sort -u ) #CodeGen/kubernetes/helm/cpu-values.yaml
|
||||
run_matrix="{\"include\":["
|
||||
for values_file in ${values_files}; do
|
||||
if [ -f "$values_file" ]; then
|
||||
valuefile=$(basename "$values_file") # cpu-values.yaml
|
||||
example=$(echo "$values_file" | cut -d'/' -f1) # CodeGen
|
||||
if [[ "$valuefile" == *"gaudi"* ]]; then
|
||||
hardware="gaudi"
|
||||
elif [[ "$valuefile" == *"rocm"* ]]; then
|
||||
hardware="rocm"
|
||||
elif [[ "$valuefile" == *"nv"* ]]; then
|
||||
continue
|
||||
else
|
||||
hardware="xeon"
|
||||
fi
|
||||
echo "example=${example}, hardware=${hardware}, valuefile=${valuefile}"
|
||||
if [[ $(echo ${run_matrix} | grep -c "{\"example\":\"${example}\",\"hardware\":\"${hardware}\"},") == 0 ]]; then
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${hardware}\"},"
|
||||
echo "------------------ add one values file ------------------"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
run_matrix="${run_matrix%,}"
|
||||
run_matrix=$run_matrix"]}"
|
||||
echo "run_matrix="${run_matrix}""
|
||||
echo "run_matrix="${run_matrix}"" >> $GITHUB_OUTPUT
|
||||
|
||||
helm-chart-test:
|
||||
needs: [job1]
|
||||
if: always() && ${{ fromJSON(needs.job1.outputs.run_matrix).length != 0 }}
|
||||
uses: ./.github/workflows/_helm-e2e.yml
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
with:
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
mode: "CI"
|
||||
secrets: inherit
|
||||
40
.github/workflows/pr-check-duplicated-image.yml
vendored
Normal file
40
.github/workflows/pr-check-duplicated-image.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check Duplicated Images
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize]
|
||||
paths:
|
||||
- "**/docker_image_build/*.yaml"
|
||||
- ".github/workflows/pr-check-duplicated-image.yml"
|
||||
- ".github/workflows/scripts/check_duplicated_image.py"
|
||||
workflow_dispatch:
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-duplicated-image:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check all the docker image build files
|
||||
run: |
|
||||
pip install PyYAML
|
||||
cd ${{github.workspace}}
|
||||
build_files=""
|
||||
for f in `find . -path "*/docker_image_build/build.yaml"`; do
|
||||
build_files="$build_files $f"
|
||||
done
|
||||
python3 .github/workflows/scripts/check_duplicated_image.py $build_files
|
||||
shell: bash
|
||||
@@ -34,6 +34,11 @@ jobs:
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check Dangerous Command Injection
|
||||
uses: opea-project/validation/actions/check-cmd@main
|
||||
with:
|
||||
work_dir: ${{ github.workspace }}
|
||||
|
||||
- name: Docker Build
|
||||
run: |
|
||||
docker build -f ${{ github.workspace }}/.github/workflows/docker/${{ env.DOCKER_FILE_NAME }}.dockerfile -t ${{ env.REPO_NAME }}:${{ env.REPO_TAG }} .
|
||||
23
.github/workflows/pr-dependency-review.yml
vendored
Normal file
23
.github/workflows/pr-dependency-review.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: "Dependency Review"
|
||||
on: [pull_request_target]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: "Checkout Repository"
|
||||
uses: actions/checkout@v4
|
||||
- name: Dependency Review
|
||||
uses: actions/dependency-review-action@v4
|
||||
with:
|
||||
comment-summary-in-pr: "always"
|
||||
fail-on-severity: "low"
|
||||
warn-only: true
|
||||
show-openssf-scorecard: false
|
||||
47
.github/workflows/pr-docker-compose-e2e.yml
vendored
Normal file
47
.github/workflows/pr-docker-compose-e2e.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with docker compose
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ["main", "*rc"]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/Dockerfile**"
|
||||
- "**.py"
|
||||
- "**/docker_compose/**"
|
||||
- "**/docker_image_build/**"
|
||||
- "**/tests/test_compose**"
|
||||
- "**/ui/**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
- .github/workflows/pr-docker-compose-e2e.yml
|
||||
|
||||
# If there is a new commit, the previous jobs will be canceled
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
get-test-matrix:
|
||||
if: ${{ !github.event.pull_request.draft }}
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '\.github|\.md|\.txt|kubernetes|gmc|assets|benchmark'
|
||||
|
||||
example-test:
|
||||
needs: [get-test-matrix]
|
||||
if: ${{ needs.get-test-matrix.outputs.run_matrix != '' }}
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.get-test-matrix.outputs.run_matrix) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_run-docker-compose.yml
|
||||
with:
|
||||
registry: "opea"
|
||||
tag: "ci"
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
use_model_cache: true
|
||||
diff_excluded_files: '\.github|\.md|\.txt|kubernetes|gmc|assets|benchmark'
|
||||
secrets: inherit
|
||||
109
.github/workflows/pr-dockerfile-path-and-build-yaml-scan.yml
vendored
Normal file
109
.github/workflows/pr-dockerfile-path-and-build-yaml-scan.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Compose file and dockerfile path checking
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize]
|
||||
|
||||
jobs:
|
||||
check-dockerfile-paths-in-README:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clone Repo GenAIComps
|
||||
run: |
|
||||
cd ..
|
||||
git clone --depth 1 https://github.com/opea-project/GenAIComps.git
|
||||
|
||||
- name: Check for Missing Dockerfile Paths in GenAIComps
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
miss="FALSE"
|
||||
while IFS=: read -r file line content; do
|
||||
dockerfile_path=$(echo "$content" | awk -F '-f ' '{print $2}' | awk '{print $1}')
|
||||
if [[ ! -f "../GenAIComps/${dockerfile_path}" ]]; then
|
||||
miss="TRUE"
|
||||
echo "Missing Dockerfile: GenAIComps/${dockerfile_path} (Referenced in GenAIExamples/${file}:${line})"
|
||||
fi
|
||||
done < <(grep -Ern 'docker build .* -f comps/.+/Dockerfile' --include='*.md' .)
|
||||
|
||||
|
||||
if [[ "$miss" == "TRUE" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
shell: bash
|
||||
|
||||
check-Dockerfile-in-build-yamls:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check Dockerfile path included in image build yaml
|
||||
if: always()
|
||||
run: |
|
||||
set -e
|
||||
shopt -s globstar
|
||||
no_add="FALSE"
|
||||
cd ${{github.workspace}}
|
||||
Dockerfiles=$(realpath $(find ./ -name '*Dockerfile*' ! -path '*/tests/*'))
|
||||
if [ -n "$Dockerfiles" ]; then
|
||||
for dockerfile in $Dockerfiles; do
|
||||
service=$(echo "$dockerfile" | awk -F '/GenAIExamples/' '{print $2}' | awk -F '/' '{print $2}')
|
||||
cd ${{github.workspace}}/$service/docker_image_build
|
||||
all_paths=$(realpath $(awk ' /context:/ { context = $2 } /dockerfile:/ { dockerfile = $2; combined = context "/" dockerfile; gsub(/\/+/, "/", combined); if (index(context, ".") > 0) {print combined}}' build.yaml) 2> /dev/null || true )
|
||||
if ! echo "$all_paths" | grep -q "$dockerfile"; then
|
||||
echo "AR: Update $dockerfile to GenAIExamples/$service/docker_image_build/build.yaml. The yaml is used for release images build."
|
||||
no_add="TRUE"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ "$no_add" == "TRUE" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
check-image-and-service-names-in-build-yaml:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check name agreement in build.yaml
|
||||
run: |
|
||||
pip install ruamel.yaml
|
||||
cd ${{github.workspace}}
|
||||
consistency="TRUE"
|
||||
build_yamls=$(find . -name 'build.yaml')
|
||||
for build_yaml in $build_yamls; do
|
||||
message=$(python3 .github/workflows/scripts/check-name-agreement.py "$build_yaml")
|
||||
if [[ "$message" != *"consistent"* ]]; then
|
||||
consistency="FALSE"
|
||||
echo "Inconsistent service name and image name found in file $build_yaml."
|
||||
echo "$message"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$consistency" == "FALSE" ]]; then
|
||||
echo "Please ensure that the service and image names are consistent in build.yaml, otherwise we cannot guarantee that your image will be published correctly."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
shell: bash
|
||||
35
.github/workflows/pr-gmc-e2e.yaml.disabled
vendored
Normal file
35
.github/workflows/pr-gmc-e2e.yaml.disabled
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: E2E test with GMC
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ["main", "*rc"]
|
||||
types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
|
||||
paths:
|
||||
- "**/kubernetes/gmc/**"
|
||||
- "**/tests/test_gmc**"
|
||||
- "!**.md"
|
||||
- "!**.txt"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
diff_excluded_files: '\.github|docker_compose|assets|\.md|\.txt'
|
||||
test_mode: "gmc"
|
||||
|
||||
gmc-test:
|
||||
needs: [job1]
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
uses: ./.github/workflows/_gmc-e2e.yml
|
||||
with:
|
||||
example: ${{ matrix.example }}
|
||||
hardware: ${{ matrix.hardware }}
|
||||
secrets: inherit
|
||||
141
.github/workflows/pr-link-path-scan.yml
vendored
Normal file
141
.github/workflows/pr-link-path-scan.yml
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Check hyperlinks and relative path validity
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, reopened, ready_for_review, synchronize]
|
||||
|
||||
jobs:
|
||||
check-the-validity-of-hyperlinks-in-README:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean Up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check the Validity of Hyperlinks
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
delay=15
|
||||
fail="FALSE"
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-status --diff-filter=ARM ${{ github.event.pull_request.base.sha }} ${merged_commit} | awk '/\.md$/ {print $NF}')"
|
||||
if [ -n "$changed_files" ]; then
|
||||
for changed_file in $changed_files; do
|
||||
# echo $changed_file
|
||||
url_lines=$(grep -H -Eo '\]\(http[s]?://[^)]+\)' "$changed_file" | grep -Ev 'GenAIExamples/blob/main') || true
|
||||
if [ -n "$url_lines" ]; then
|
||||
for url_line in $url_lines; do
|
||||
# echo $url_line
|
||||
url=$(echo "$url_line"|cut -d '(' -f2 | cut -d ')' -f1|sed 's/\.git$//')
|
||||
path=$(echo "$url_line"|cut -d':' -f1 | cut -d'/' -f2-)
|
||||
if [[ "$url" == "https://platform.openai.com/api-keys"* ]]; then
|
||||
echo "Link "$url" from ${{github.workspace}}/$path needs to be verified by a real person."
|
||||
else
|
||||
sleep $delay
|
||||
response=$(curl -L -s -o /dev/null -w "%{http_code}" "$url")|| true
|
||||
if [ "$response" -ne 200 ]; then
|
||||
echo "**********Validation failed ($response), try again**********"
|
||||
response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url")
|
||||
if [ "$response_retry" -eq 200 ]; then
|
||||
echo "*****Retry successfully*****"
|
||||
else
|
||||
echo "Invalid link ($response_retry) from ${{github.workspace}}/$path: $url"
|
||||
fail="TRUE"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "No changed .md file."
|
||||
fi
|
||||
|
||||
if [[ "$fail" == "TRUE" ]]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All hyperlinks are valid."
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
check-the-validity-of-relative-path:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Clean up Working Directory
|
||||
run: sudo rm -rf ${{github.workspace}}/*
|
||||
|
||||
- name: Checkout Repo GenAIExamples
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checking Relative Path Validity
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
fail="FALSE"
|
||||
repo_name=${{ github.event.pull_request.head.repo.full_name }}
|
||||
branch="https://github.com/$repo_name/blob/${{ github.event.pull_request.head.ref }}"
|
||||
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-status --diff-filter=ARM ${{ github.event.pull_request.base.sha }} ${merged_commit} | awk '/\.md$/ {print $NF}')"
|
||||
png_lines=$(grep -Eo '\]\([^)]+\)' --include='*.md' -r .|grep -Ev 'http')
|
||||
if [ -n "$png_lines" ]; then
|
||||
for png_line in $png_lines; do
|
||||
refer_path=$(echo "$png_line"|cut -d':' -f1 | cut -d'/' -f2-)
|
||||
png_path=$(echo "$png_line"|cut -d '(' -f2 | cut -d ')' -f1)
|
||||
|
||||
if [[ "${png_path:0:1}" == "/" ]]; then
|
||||
check_path=$png_path
|
||||
elif [[ "$png_path" == *#* ]]; then
|
||||
relative_path=$(echo "$png_path" | cut -d '#' -f1)
|
||||
if [ -n "$relative_path" ]; then
|
||||
check_path=$(dirname "$refer_path")/$relative_path
|
||||
png_path=$(echo "$png_path" | awk -F'#' '{print "#" $2}')
|
||||
else
|
||||
check_path=$refer_path
|
||||
fi
|
||||
else
|
||||
check_path=$(dirname "$refer_path")/$png_path
|
||||
fi
|
||||
|
||||
if [ -e "$check_path" ]; then
|
||||
real_path=$(realpath $check_path)
|
||||
if [[ "$png_line" == *#* ]]; then
|
||||
if [ -n "changed_files" ] && echo "$changed_files" | grep -q "^${refer_path}$"; then
|
||||
url_dev=$branch$(echo "$real_path" | sed 's|.*/GenAIExamples||')$png_path
|
||||
response=$(curl -I -L -s -o /dev/null -w "%{http_code}" "$url_dev")
|
||||
if [ "$response" -ne 200 ]; then
|
||||
echo "**********Validation failed, try again**********"
|
||||
response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url_dev")
|
||||
if [ "$response_retry" -eq 200 ]; then
|
||||
echo "*****Retry successfully*****"
|
||||
else
|
||||
echo "Invalid path from ${{github.workspace}}/$refer_path: $png_path"
|
||||
fail="TRUE"
|
||||
fi
|
||||
else
|
||||
echo "Validation succeed $png_line"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "${{github.workspace}}/$refer_path:$png_path does not exist"
|
||||
fail="TRUE"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ "$fail" == "TRUE" ]]; then
|
||||
exit 1
|
||||
else
|
||||
echo "All hyperlinks are valid."
|
||||
fi
|
||||
shell: bash
|
||||
35
.github/workflows/push-image-build.yml
vendored
Normal file
35
.github/workflows/push-image-build.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# Test
|
||||
name: Build latest images on push event
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'main' ]
|
||||
paths:
|
||||
- "**.py"
|
||||
- "**Dockerfile*"
|
||||
- "**docker_image_build/build.yaml"
|
||||
- "**/ui/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-on-push
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
job1:
|
||||
uses: ./.github/workflows/_get-test-matrix.yml
|
||||
with:
|
||||
test_mode: "docker_image_build"
|
||||
|
||||
image-build:
|
||||
needs: job1
|
||||
if: ${{ needs.job1.outputs.run_matrix != '{"include":[]}' }}
|
||||
strategy:
|
||||
matrix: ${{ fromJSON(needs.job1.outputs.run_matrix) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.hardware }}
|
||||
example: ${{ matrix.example }}
|
||||
secrets: inherit
|
||||
@@ -5,7 +5,7 @@ on:
|
||||
push:
|
||||
branches: [ 'main','issue' ]
|
||||
paths:
|
||||
- "**/docker/*/compose.yaml"
|
||||
- "**/docker_compose/**/compose*.yaml"
|
||||
|
||||
name: Create an issue to GenAIInfra on push
|
||||
jobs:
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
base_commit=$(git rev-parse HEAD~1)
|
||||
merged_commit=$(git log -1 --format='%H')
|
||||
changed_files="$(git diff --name-only ${base_commit} ${merged_commit} | \
|
||||
grep -E '.*/docker/.*/compose.yaml')" || true
|
||||
grep -E '.*/docker_compose/.*/compose.*.yaml')" || true
|
||||
|
||||
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
|
||||
format_examples=$(echo "$examples" | tr '\n' ',')
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
- name: Create Issue
|
||||
uses: daisy-ycguo/create-issue-action@stable
|
||||
with:
|
||||
token: ${{ secrets.Infra_Issue_Token }}
|
||||
token: ${{ secrets.ACTION_TOKEN }}
|
||||
owner: opea-project
|
||||
repo: GenAIInfra
|
||||
title: |
|
||||
@@ -54,6 +54,6 @@ jobs:
|
||||
|
||||
${{ env.changed_files }}
|
||||
|
||||
Please verify if the helm charts and manifests need to be changed accordingly.
|
||||
Please verify if the helm charts need to be changed accordingly.
|
||||
|
||||
> This issue was created automatically by CI.
|
||||
64
.github/workflows/reuse-image-build.yml
vendored
64
.github/workflows/reuse-image-build.yml
vendored
@@ -1,64 +0,0 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Image Build
|
||||
permissions: read-all
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image_repo:
|
||||
required: false
|
||||
type: string
|
||||
image_tag:
|
||||
required: true
|
||||
type: string
|
||||
mega_service:
|
||||
required: true
|
||||
type: string
|
||||
runner_label:
|
||||
required: false
|
||||
type: string
|
||||
default: 'docker-build-xeon'
|
||||
outputs:
|
||||
image_repo:
|
||||
description: "The image repository used for the image build"
|
||||
value: ${{ jobs.mega-image-build.outputs.image_repo }}
|
||||
image_tag:
|
||||
description: "The image tag used for the image build"
|
||||
value: ${{ jobs.mega-image-build.outputs.image_tag }}
|
||||
|
||||
jobs:
|
||||
mega-image-build:
|
||||
runs-on: ${{ inputs.runner_label }}
|
||||
outputs:
|
||||
image_repo: ${{ steps.build-megaservice-image.outputs.image_repo }}
|
||||
image_tag: ${{ steps.build-megaservice-image.outputs.image_tag }}
|
||||
steps:
|
||||
- name: Get checkout ref
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "pull_request" ] || [ "${{ github.event_name }}" == "pull_request_target" ]; then
|
||||
echo "CHECKOUT_REF=refs/pull/${{ github.event.number }}/merge" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
||||
fi
|
||||
echo "checkout ref ${{ env.CHECKOUT_REF }}"
|
||||
|
||||
- name: Checkout out Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Building MegaService Docker Image
|
||||
id: build-megaservice-image
|
||||
env:
|
||||
IMAGE_REPO: ${{ inputs.image_repo }}
|
||||
IMAGE_TAG: ${{ inputs.image_tag }}
|
||||
MEGA_SERVICE: ${{ inputs.mega_service }}
|
||||
run: |
|
||||
.github/workflows/scripts/build_push.sh ${{ env.MEGA_SERVICE}}
|
||||
if [ -z "${{ env.IMAGE_REPO }}" ]; then
|
||||
IMAGE_REPO=$OPEA_IMAGE_REPO
|
||||
fi
|
||||
echo "IMAGE_TAG=${IMAGE_TAG}"
|
||||
echo "image_tag=$IMAGE_TAG" >> $GITHUB_OUTPUT
|
||||
72
.github/workflows/scripts/build_push.sh
vendored
72
.github/workflows/scripts/build_push.sh
vendored
@@ -1,72 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -xe
|
||||
|
||||
IMAGE_REPO=${IMAGE_REPO:-$OPEA_IMAGE_REPO}
|
||||
IMAGE_TAG=${IMAGE_TAG:-latest}
|
||||
|
||||
function getImagenameFromMega() {
|
||||
echo $(echo "$1" | tr '[:upper:]' '[:lower:]')
|
||||
}
|
||||
|
||||
function checkExist() {
|
||||
IMAGE_NAME=$1
|
||||
if [ $(curl -X GET http://localhost:5000/v2/opea/${IMAGE_NAME}/tags/list | grep -c ${IMAGE_TAG}) -ne 0 ]; then
|
||||
echo "true"
|
||||
else
|
||||
echo "false"
|
||||
fi
|
||||
}
|
||||
|
||||
function docker_build() {
|
||||
# check if if IMAGE_TAG is not "latest" and the image exists in the registry
|
||||
if [ "$IMAGE_TAG" != "latest" ] && [ "$(checkExist $1)" == "true" ]; then
|
||||
echo "Image ${IMAGE_REPO}opea/$1:$IMAGE_TAG already exists in the registry"
|
||||
return
|
||||
fi
|
||||
# docker_build <service_name> <dockerfile>
|
||||
if [ -z "$2" ]; then
|
||||
DOCKERFILE_PATH=Dockerfile
|
||||
else
|
||||
DOCKERFILE_PATH=$2
|
||||
fi
|
||||
echo "Building ${IMAGE_REPO}opea/$1:$IMAGE_TAG using Dockerfile $DOCKERFILE_PATH"
|
||||
# if https_proxy and http_proxy are set, pass them to docker build
|
||||
if [ -z "$https_proxy" ]; then
|
||||
docker build --no-cache -t ${IMAGE_REPO}opea/$1:$IMAGE_TAG -f $DOCKERFILE_PATH .
|
||||
else
|
||||
docker build --no-cache -t ${IMAGE_REPO}opea/$1:$IMAGE_TAG --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f $DOCKERFILE_PATH .
|
||||
fi
|
||||
docker push ${IMAGE_REPO}opea/$1:$IMAGE_TAG
|
||||
docker rmi ${IMAGE_REPO}opea/$1:$IMAGE_TAG
|
||||
}
|
||||
|
||||
# $1 is like "apple orange pear"
|
||||
for MEGA_SVC in $1; do
|
||||
case $MEGA_SVC in
|
||||
"ChatQnA"|"CodeGen"|"CodeTrans"|"DocSum"|"Translation"|"AudioQnA"|"SearchQnA"|"FaqGen")
|
||||
cd $MEGA_SVC/docker
|
||||
IMAGE_NAME="$(getImagenameFromMega $MEGA_SVC)"
|
||||
docker_build ${IMAGE_NAME}
|
||||
cd ui
|
||||
docker_build ${IMAGE_NAME}-ui docker/Dockerfile
|
||||
if [ "$MEGA_SVC" == "ChatQnA" ];then
|
||||
docker_build ${IMAGE_NAME}-conversation-ui docker/Dockerfile.react
|
||||
fi
|
||||
if [ "$MEGA_SVC" == "DocSum" ];then
|
||||
docker_build ${IMAGE_NAME}-react-ui docker/Dockerfile.react
|
||||
fi
|
||||
if [ "$MEGA_SVC" == "CodeGen" ];then
|
||||
docker_build ${IMAGE_NAME}-react-ui docker/Dockerfile.react
|
||||
fi
|
||||
;;
|
||||
"VisualQnA")
|
||||
echo "Not supported yet"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $MEGA_SVC"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
46
.github/workflows/scripts/check-name-agreement.py
vendored
Normal file
46
.github/workflows/scripts/check-name-agreement.py
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
|
||||
def parse_yaml_file(file_path):
|
||||
yaml = YAML()
|
||||
with open(file_path, "r") as file:
|
||||
data = yaml.load(file)
|
||||
return data
|
||||
|
||||
|
||||
def check_service_image_consistency(data):
|
||||
inconsistencies = []
|
||||
for service_name, service_details in data.get("services", {}).items():
|
||||
image_name = service_details.get("image", "")
|
||||
# Extract the image name part after the last '/'
|
||||
image_name_part = image_name.split("/")[-1].split(":")[0]
|
||||
# Check if the service name is a substring of the image name part
|
||||
if service_name not in image_name_part:
|
||||
# Get the line number of the service name
|
||||
line_number = service_details.lc.line + 1
|
||||
inconsistencies.append((service_name, image_name, line_number))
|
||||
return inconsistencies
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Check service name and image name consistency in a YAML file.")
|
||||
parser.add_argument("file_path", type=str, help="The path to the YAML file.")
|
||||
args = parser.parse_args()
|
||||
|
||||
data = parse_yaml_file(args.file_path)
|
||||
|
||||
inconsistencies = check_service_image_consistency(data)
|
||||
if inconsistencies:
|
||||
for service_name, image_name, line_number in inconsistencies:
|
||||
print(f"Service name: {service_name}, Image name: {image_name}, Line number: {line_number}")
|
||||
else:
|
||||
print("All consistent")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
79
.github/workflows/scripts/check_duplicated_image.py
vendored
Normal file
79
.github/workflows/scripts/check_duplicated_image.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
images = {}
|
||||
dockerfiles = {}
|
||||
errors = []
|
||||
|
||||
|
||||
def check_docker_compose_build_definition(file_path):
|
||||
with open(file_path, "r") as f:
|
||||
data = yaml.load(f, Loader=yaml.FullLoader)
|
||||
for service in data["services"]:
|
||||
if "build" in data["services"][service] and "image" in data["services"][service]:
|
||||
bash_command = "echo " + data["services"][service]["image"]
|
||||
image = (
|
||||
subprocess.run(["bash", "-c", bash_command], check=True, capture_output=True)
|
||||
.stdout.decode("utf-8")
|
||||
.strip()
|
||||
)
|
||||
build = data["services"][service]["build"]
|
||||
context = build.get("context", "")
|
||||
dockerfile = os.path.normpath(
|
||||
os.path.join(os.path.dirname(file_path), context, build.get("dockerfile", ""))
|
||||
)
|
||||
if not os.path.isfile(dockerfile):
|
||||
# dockerfile not exists in the current repo context, assume it's in 3rd party context
|
||||
dockerfile = os.path.normpath(os.path.join(context, build.get("dockerfile", "")))
|
||||
item = {"file_path": file_path, "service": service, "dockerfile": dockerfile, "image": image}
|
||||
if image in images and dockerfile != images[image]["dockerfile"]:
|
||||
errors.append(
|
||||
f"ERROR: !!! Found Conflicts !!!\n"
|
||||
f"Image: {image}, Dockerfile: {dockerfile}, defined in Service: {service}, File: {file_path}\n"
|
||||
f"Image: {image}, Dockerfile: {images[image]['dockerfile']}, defined in Service: {images[image]['service']}, File: {images[image]['file_path']}"
|
||||
)
|
||||
else:
|
||||
# print(f"Add Image: {image} Dockerfile: {dockerfile}")
|
||||
images[image] = item
|
||||
|
||||
if dockerfile in dockerfiles and image != dockerfiles[dockerfile]["image"]:
|
||||
errors.append(
|
||||
f"WARNING: Different images using the same Dockerfile\n"
|
||||
f"Dockerfile: {dockerfile}, Image: {image}, defined in Service: {service}, File: {file_path}\n"
|
||||
f"Dockerfile: {dockerfile}, Image: {dockerfiles[dockerfile]['image']}, defined in Service: {dockerfiles[dockerfile]['service']}, File: {dockerfiles[dockerfile]['file_path']}"
|
||||
)
|
||||
else:
|
||||
dockerfiles[dockerfile] = item
|
||||
|
||||
|
||||
def parse_arg():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check for conflicts in image build definition in docker-compose.yml files"
|
||||
)
|
||||
parser.add_argument("files", nargs="+", help="list of files to be checked")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_arg()
|
||||
for file_path in args.files:
|
||||
check_docker_compose_build_definition(file_path)
|
||||
print("SUCCESS: No Conlicts Found.")
|
||||
if errors:
|
||||
for error in errors:
|
||||
print(error)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("SUCCESS: No Conflicts Found.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
12
.github/workflows/scripts/codeScan/hadolint.sh
vendored
12
.github/workflows/scripts/codeScan/hadolint.sh
vendored
@@ -5,16 +5,24 @@
|
||||
|
||||
source /GenAIExamples/.github/workflows/scripts/change_color
|
||||
log_dir=/GenAIExamples/.github/workflows/scripts/codeScan
|
||||
ERROR_WARN=false
|
||||
|
||||
find . -type f \( -name "Dockerfile*" \) -print -exec hadolint --ignore DL3006 --ignore DL3007 --ignore DL3008 --ignore DL3013 {} \; 2>&1 | tee ${log_dir}/hadolint.log
|
||||
find . -type f \( -name "Dockerfile*" \) -print -exec hadolint --ignore DL3006 --ignore DL3007 --ignore DL3008 --ignore DL3013 --ignore DL3018 --ignore DL3016 {} \; > ${log_dir}/hadolint.log
|
||||
|
||||
if [[ $(grep -c "error" ${log_dir}/hadolint.log) != 0 ]]; then
|
||||
$BOLD_RED && echo "Error!! Please Click on the artifact button to download and check error details." && $RESET
|
||||
exit 1
|
||||
echo $(grep "error" ${log_dir}/hadolint.log)
|
||||
ERROR_WARN=true
|
||||
fi
|
||||
|
||||
if [[ $(grep -c "warning" ${log_dir}/hadolint.log) != 0 ]]; then
|
||||
$BOLD_RED && echo "Warning!! Please Click on the artifact button to download and check warning details." && $RESET
|
||||
echo $(grep "warning" ${log_dir}/hadolint.log)
|
||||
ERROR_WARN=true
|
||||
fi
|
||||
|
||||
if [ "$ERROR_WARN" = true ]; then
|
||||
echo $ERROR_WARN
|
||||
exit 1
|
||||
fi
|
||||
$BOLD_PURPLE && echo "Congratulations, Hadolint check passed!" && $LIGHT_PURPLE && echo " You can click on the artifact button to see the log details." && $RESET
|
||||
|
||||
55
.github/workflows/scripts/docker_compose_clean_up.sh
vendored
Normal file
55
.github/workflows/scripts/docker_compose_clean_up.sh
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# The test machine used by several opea projects, so the test scripts can't use `docker compose down` to clean up
|
||||
# the all the containers, ports and networks directly.
|
||||
# So we need to use the following script to minimize the impact of the clean up.
|
||||
|
||||
test_case=${test_case:-"test_compose_on_gaudi.sh"}
|
||||
hardware=${hardware:-"gaudi"}
|
||||
flag=${test_case%_on_*}
|
||||
flag=${flag#test_}
|
||||
yaml_file=$(find . -type f -wholename "*${hardware}/${flag}.yaml")
|
||||
echo $yaml_file
|
||||
|
||||
case "$1" in
|
||||
containers)
|
||||
echo "Stop and remove all containers used by the services in $yaml_file ..."
|
||||
containers=$(cat $yaml_file | grep container_name | cut -d':' -f2)
|
||||
for container_name in $containers; do
|
||||
cid=$(docker ps -aq --filter "name=$container_name")
|
||||
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
|
||||
done
|
||||
;;
|
||||
ports)
|
||||
echo "Release all ports used by the services in $yaml_file ..."
|
||||
pip install jq yq
|
||||
ports=$(yq '.services[].ports[] | split(":")[0]' $yaml_file | grep -o '[0-9a-zA-Z_-]\+')
|
||||
echo "All ports list..."
|
||||
echo "$ports"
|
||||
for port in $ports; do
|
||||
if [[ $port =~ [a-zA-Z_-] ]]; then
|
||||
echo "Search port value $port from the test case..."
|
||||
port_fix=$(grep -E "export $port=" tests/$test_case | cut -d'=' -f2)
|
||||
if [[ "$port_fix" == "" ]]; then
|
||||
echo "Can't find the port value from the test case, use the default value in yaml..."
|
||||
port_fix=$(yq '.services[].ports[]' $yaml_file | grep $port | cut -d':' -f2 | grep -o '[0-9a-zA-Z]\+')
|
||||
fi
|
||||
port=$port_fix
|
||||
fi
|
||||
if [[ $port =~ [0-9] ]]; then
|
||||
if [[ $port == 5000 ]]; then
|
||||
echo "Error: Port 5000 is used by local docker registry, please DO NOT use it in docker compose deployment!!!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Check port $port..."
|
||||
cid=$(docker ps --filter "publish=${port}" --format "{{.ID}}")
|
||||
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && echo "release $port"; fi
|
||||
fi
|
||||
done
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $1"
|
||||
;;
|
||||
esac
|
||||
49
.github/workflows/scripts/get_test_matrix.sh
vendored
Normal file
49
.github/workflows/scripts/get_test_matrix.sh
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# example: 'ChatQnA', 'CodeGen', ...
|
||||
# hardware: 'xeon', 'gaudi', ...
|
||||
|
||||
set -e
|
||||
changed_files=$changed_files
|
||||
test_mode=$test_mode
|
||||
run_matrix="{\"include\":["
|
||||
|
||||
examples=$(printf '%s\n' "${changed_files[@]}" | grep '/' | cut -d'/' -f1 | sort -u)
|
||||
for example in ${examples}; do
|
||||
if [[ ! -d $WORKSPACE/$example ]]; then continue; fi
|
||||
cd $WORKSPACE/$example
|
||||
if [[ ! $(find . -type f | grep ${test_mode}) ]]; then continue; fi
|
||||
cd tests
|
||||
ls -l
|
||||
if [[ "$test_mode" == "docker_image_build" ]]; then
|
||||
hardware_list="gaudi xeon"
|
||||
else
|
||||
find_name="test_${test_mode}*_on_*.sh"
|
||||
hardware_list=$(find . -type f -name "${find_name}" | cut -d/ -f2 | cut -d. -f1 | awk -F'_on_' '{print $2}'| sort -u)
|
||||
fi
|
||||
echo -e "Test supported hardware list: \n${hardware_list}"
|
||||
|
||||
run_hardware=""
|
||||
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | cut -d'/' -f2 | grep -E '\.py|Dockerfile*|ui|docker_image_build' ) ]]; then
|
||||
echo "run test on all hardware if megaservice or ui code change..."
|
||||
run_hardware=$hardware_list
|
||||
elif [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep 'tests'| cut -d'/' -f3 | grep -vE '^test_|^_test' ) ]]; then
|
||||
echo "run test on all hardware if common test scripts change..."
|
||||
run_hardware=$hardware_list
|
||||
else
|
||||
for hardware in ${hardware_list}; do
|
||||
if [[ $(printf '%s\n' "${changed_files[@]}" | grep ${example} | grep -c ${hardware}) != 0 ]]; then
|
||||
run_hardware="${hardware} ${run_hardware}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
for hw in ${run_hardware}; do
|
||||
run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${hw}\"},"
|
||||
done
|
||||
done
|
||||
|
||||
run_matrix=$run_matrix"]}"
|
||||
echo "run_matrix=${run_matrix}"
|
||||
echo "run_matrix=${run_matrix}" >> $GITHUB_OUTPUT
|
||||
76
.github/workflows/scripts/k8s-utils.sh
vendored
Executable file
76
.github/workflows/scripts/k8s-utils.sh
vendored
Executable file
@@ -0,0 +1,76 @@
|
||||
#!/bin/bash
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
set -e
|
||||
|
||||
function dump_pod_log() {
|
||||
pod_name=$1
|
||||
namespace=$2
|
||||
echo "-----------Pod: $pod_name---------"
|
||||
echo "#kubectl describe pod $pod_name -n $namespace"
|
||||
kubectl describe pod $pod_name -n $namespace
|
||||
echo "-----------------------------------"
|
||||
echo "#kubectl logs $pod_name -n $namespace"
|
||||
kubectl logs $pod_name -n $namespace --all-containers --prefix=true
|
||||
echo "-----------------------------------"
|
||||
}
|
||||
|
||||
function dump_pods_status() {
|
||||
namespace=$1
|
||||
echo "-----DUMP POD STATUS in NS $namespace------"
|
||||
kubectl get pods -n $namespace -o wide
|
||||
echo "-----------------------------------"
|
||||
|
||||
# Get all pods in the namespace and their statuses
|
||||
pods=$(kubectl get pods -n $namespace --no-headers)
|
||||
|
||||
# Loop through each pod
|
||||
echo "$pods" | while read -r line; do
|
||||
pod_name=$(echo $line | awk '{print $1}')
|
||||
ready=$(echo $line | awk '{print $2}')
|
||||
status=$(echo $line | awk '{print $3}')
|
||||
|
||||
# Extract the READY count
|
||||
ready_count=$(echo $ready | cut -d'/' -f1)
|
||||
required_count=$(echo $ready | cut -d'/' -f2)
|
||||
|
||||
# Check if the pod is not in "Running" status or READY count is less than required
|
||||
if [[ "$status" != "Running" || "$ready_count" -lt "$required_count" ]]; then
|
||||
dump_pod_log $pod_name $namespace
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function dump_all_pod_logs() {
|
||||
namespace=$1
|
||||
echo "------SUMMARY of POD STATUS in NS $namespace------"
|
||||
kubectl get pods -n $namespace -o wide
|
||||
echo "------SUMMARY of SVC STATUS in NS $namespace------"
|
||||
kubectl get services -n $namespace -o wide
|
||||
echo "------SUMMARY of endpoint STATUS in NS $namespace------"
|
||||
kubectl get endpoints -n $namespace -o wide
|
||||
echo "-----DUMP POD STATUS AND LOG in NS $namespace------"
|
||||
pods=$(kubectl get pods -n $namespace -o jsonpath='{.items[*].metadata.name}')
|
||||
for pod_name in $pods
|
||||
do
|
||||
dump_pod_log $pod_name $namespace
|
||||
done
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <function_name>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
dump_pods_status)
|
||||
dump_pods_status $2
|
||||
;;
|
||||
dump_all_pod_logs)
|
||||
dump_all_pod_logs $2
|
||||
;;
|
||||
*)
|
||||
echo "Unknown function: $1"
|
||||
;;
|
||||
esac
|
||||
44
.github/workflows/scripts/update_images_tag.sh
vendored
Normal file
44
.github/workflows/scripts/update_images_tag.sh
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
declare -A dict
|
||||
dict["ghcr.io/huggingface/text-generation-inference"]="docker://ghcr.io/huggingface/text-generation-inference:latest-intel-cpu"
|
||||
|
||||
function get_latest_version() {
|
||||
repo_image=$1
|
||||
if [[ $repo_image == *"huggingface"* ]]; then
|
||||
revision=$(skopeo inspect --config ${dict[$repo_image]} | jq -r '.config.Labels["org.opencontainers.image.revision"][:7]')
|
||||
latest_version="sha-$revision-intel-cpu"
|
||||
else
|
||||
versions=$(skopeo list-tags ${dict[$repo_image]} | jq -r '.Tags[]')
|
||||
printf "version list:\n$versions\n"
|
||||
latest_version=$(printf "%s\n" "${versions[@]}" | grep -E '^[\.0-9\-]+$' | sort -V | tail -n 1)
|
||||
fi
|
||||
echo "latest version: $latest_version"
|
||||
replace_image_version $repo_image $latest_version
|
||||
}
|
||||
|
||||
function replace_image_version() {
|
||||
repo_image=$1
|
||||
version=$2
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "version is empty"
|
||||
else
|
||||
echo "replace $repo_image:tag with $repo_image:$version"
|
||||
find . -name "Dockerfile" | xargs sed -i "s|$repo_image:sha[A-Za-z0-9\-]*|$repo_image:$version|g"
|
||||
find . -name "*.yaml" | xargs sed -i "s|$repo_image:sha[A-Za-z0-9\-]*|$repo_image:$version|g"
|
||||
find . -name "*.md" | xargs sed -i "s|$repo_image:sha[A-Za-z0-9\-]*|$repo_image:$version|g"
|
||||
fi
|
||||
}
|
||||
|
||||
function main() {
|
||||
for repo_image in "${!dict[@]}"; do
|
||||
echo "::group::check $repo_image"
|
||||
get_latest_version $repo_image
|
||||
echo "::endgroup::"
|
||||
done
|
||||
}
|
||||
|
||||
main
|
||||
55
.github/workflows/weekly-example-test.yml
vendored
Normal file
55
.github/workflows/weekly-example-test.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Weekly test all examples on multiple HWs
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 2 * * 6" # UTC time
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
EXAMPLES: ${{ vars.NIGHTLY_RELEASE_EXAMPLES }}
|
||||
NODES: "gaudi,xeon,rocm,arc"
|
||||
|
||||
jobs:
|
||||
get-test-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
examples: ${{ steps.get-matrix.outputs.examples }}
|
||||
nodes: ${{ steps.get-matrix.outputs.nodes }}
|
||||
steps:
|
||||
- name: Create Matrix
|
||||
id: get-matrix
|
||||
run: |
|
||||
examples=($(echo ${EXAMPLES} | tr ',' ' '))
|
||||
examples_json=$(printf '%s\n' "${examples[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "examples=$examples_json" >> $GITHUB_OUTPUT
|
||||
nodes=($(echo ${NODES} | tr ',' ' '))
|
||||
nodes_json=$(printf '%s\n' "${nodes[@]}" | sort -u | jq -R '.' | jq -sc '.')
|
||||
echo "nodes=$nodes_json" >> $GITHUB_OUTPUT
|
||||
|
||||
build-comps-base:
|
||||
needs: [get-test-matrix]
|
||||
strategy:
|
||||
matrix:
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
uses: ./.github/workflows/_build_comps_base_image.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
|
||||
run-examples:
|
||||
needs: [get-test-matrix, build-comps-base]
|
||||
strategy:
|
||||
matrix:
|
||||
example: ${{ fromJson(needs.get-test-matrix.outputs.examples) }}
|
||||
node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_example-workflow.yml
|
||||
with:
|
||||
node: ${{ matrix.node }}
|
||||
example: ${{ matrix.example }}
|
||||
build: true
|
||||
test_compose: true
|
||||
test_helmchart: true
|
||||
secrets: inherit
|
||||
52
.github/workflows/weekly-update-images.yml
vendored
Normal file
52
.github/workflows/weekly-update-images.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Weekly update 3rd party images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
freeze-images:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
USER_NAME: "CICD-at-OPEA"
|
||||
USER_EMAIL: "CICD@opea.dev"
|
||||
BRANCH_NAME: "update_images_tag"
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: "main"
|
||||
|
||||
- name: Install skopeo
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt -y install skopeo
|
||||
|
||||
- name: Set up Git
|
||||
run: |
|
||||
git config --global user.name ${{ env.USER_NAME }}
|
||||
git config --global user.email ${{ env.USER_EMAIL }}
|
||||
git remote set-url origin https://${{ env.USER_NAME }}:"${{ secrets.ACTION_TOKEN }}"@github.com/opea-project/GenAIExamples.git
|
||||
git checkout -b ${{ env.BRANCH_NAME }}
|
||||
|
||||
- name: Run script
|
||||
run: |
|
||||
bash .github/workflows/scripts/update_images_tag.sh
|
||||
|
||||
- name: Commit changes
|
||||
run: |
|
||||
git add .
|
||||
git commit -s -m "Update third party images tag"
|
||||
git push --set-upstream origin update_images_tag
|
||||
|
||||
- name: create pull request
|
||||
run: gh pr create -B main -H ${{ env.BRANCH_NAME }} --title 'Update ghcr.io/huggingface/text-generation-inference image tag' --body 'Created by Github action'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ACTION_TOKEN }}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,4 +5,4 @@
|
||||
**/playwright/.cache/
|
||||
**/test-results/
|
||||
|
||||
__pycache__/
|
||||
__pycache__/
|
||||
|
||||
@@ -7,19 +7,17 @@ ci:
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
files: (.*\.(py|md|rst|yaml|yml|json|ts|js|html|svelte|sh))$
|
||||
- id: check-json
|
||||
exclude: |
|
||||
(?x)^(
|
||||
ChatQnA/docker/ui/svelte/tsconfig.json|
|
||||
ChatQnA/ui/svelte/tsconfig.json|
|
||||
SearchQnA/ui/svelte/tsconfig.json|
|
||||
DocSum/docker/ui/svelte/tsconfig.json
|
||||
DocSum/ui/svelte/tsconfig.json
|
||||
)$
|
||||
- id: check-yaml
|
||||
args: [--allow-multiple-documents]
|
||||
- id: debug-statements
|
||||
- id: requirements-txt-fixer
|
||||
- id: trailing-whitespace
|
||||
@@ -76,12 +74,12 @@ repos:
|
||||
name: Unused noqa
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.13.2
|
||||
rev: 6.0.1
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/PyCQA/docformatter
|
||||
rev: v1.7.5
|
||||
rev: 06907d0
|
||||
hooks:
|
||||
- id: docformatter
|
||||
args: [
|
||||
@@ -102,21 +100,21 @@ repos:
|
||||
- prettier@3.2.5
|
||||
|
||||
- repo: https://github.com/psf/black.git
|
||||
rev: 24.4.2
|
||||
rev: 25.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
files: (.*\.py)$
|
||||
|
||||
- repo: https://github.com/asottile/blacken-docs
|
||||
rev: 1.18.0
|
||||
rev: 1.19.1
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
args: [--line-length=120, --skip-errors]
|
||||
additional_dependencies:
|
||||
- black==24.4.2
|
||||
- black==24.10.0
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
rev: v2.4.1
|
||||
hooks:
|
||||
- id: codespell
|
||||
args: [-w]
|
||||
@@ -124,7 +122,7 @@ repos:
|
||||
- tomli
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.0
|
||||
rev: v0.11.4
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix, --no-cache]
|
||||
|
||||
@@ -1 +1 @@
|
||||
**/kubernetes/
|
||||
**/kubernetes/
|
||||
|
||||
16
.set_env.sh
Normal file
16
.set_env.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
#To anounce the version of the codes, please create a version.txt and have following format.
|
||||
#VERSION_MAJOR 1
|
||||
#VERSION_MINOR 0
|
||||
#VERSION_PATCH 0
|
||||
|
||||
VERSION_FILE="version.txt"
|
||||
if [ -f $VERSION_FILE ]; then
|
||||
VER_OPEA_MAJOR=$(grep "VERSION_MAJOR" $VERSION_FILE | cut -d " " -f 2)
|
||||
VER_OPEA_MINOR=$(grep "VERSION_MINOR" $VERSION_FILE | cut -d " " -f 2)
|
||||
VER_OPEA_PATCH=$(grep "VERSION_PATCH" $VERSION_FILE | cut -d " " -f 2)
|
||||
export TAG=$VER_OPEA_MAJOR.$VER_OPEA_MINOR
|
||||
echo OPEA Version:$TAG
|
||||
fi
|
||||
302
AgentQnA/README.md
Normal file
302
AgentQnA/README.md
Normal file
@@ -0,0 +1,302 @@
|
||||
# Agents for Question Answering
|
||||
|
||||
## Table of contents
|
||||
|
||||
1. [Overview](#overview)
|
||||
2. [Deploy with Docker](#deploy-with-docker)
|
||||
3. [How to interact with the agent system with UI](#how-to-interact-with-the-agent-system-with-ui)
|
||||
4. [Validate Services](#validate-services)
|
||||
5. [Register Tools](#how-to-register-other-tools-with-the-ai-agent)
|
||||
6. [Monitoring and Tracing](#monitor-and-tracing)
|
||||
|
||||
## Overview
|
||||
|
||||
This example showcases a hierarchical multi-agent system for question-answering applications. The architecture diagram below shows a supervisor agent that interfaces with the user and dispatches tasks to two worker agents to gather information and come up with answers. The worker RAG agent uses the retrieval tool to retrieve relevant documents from a knowledge base - a vector database. The worker SQL agent retrieves relevant data from a SQL database. Although not included in this example by default, other tools such as a web search tool or a knowledge graph query tool can be used by the supervisor agent to gather information from additional sources.
|
||||

|
||||
|
||||
The AgentQnA example is implemented using the component-level microservices defined in [GenAIComps](https://github.com/opea-project/GenAIComps). The flow chart below shows the information flow between different microservices for this example.
|
||||
|
||||
```mermaid
|
||||
---
|
||||
config:
|
||||
flowchart:
|
||||
nodeSpacing: 400
|
||||
rankSpacing: 100
|
||||
curve: linear
|
||||
themeVariables:
|
||||
fontSize: 50px
|
||||
---
|
||||
flowchart LR
|
||||
%% Colors %%
|
||||
classDef blue fill:#ADD8E6,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5
|
||||
classDef orange fill:#FBAA60,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5
|
||||
classDef orchid fill:#C26DBC,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5
|
||||
classDef invisible fill:transparent,stroke:transparent;
|
||||
|
||||
%% Subgraphs %%
|
||||
subgraph DocIndexRetriever-MegaService["DocIndexRetriever MegaService "]
|
||||
direction LR
|
||||
EM([Embedding MicroService]):::blue
|
||||
RET([Retrieval MicroService]):::blue
|
||||
RER([Rerank MicroService]):::blue
|
||||
end
|
||||
subgraph UserInput[" User Input "]
|
||||
direction LR
|
||||
a([User Input Query]):::orchid
|
||||
Ingest([Ingest data]):::orchid
|
||||
end
|
||||
AG_REACT([Agent MicroService - react]):::blue
|
||||
AG_RAG([Agent MicroService - rag]):::blue
|
||||
AG_SQL([Agent MicroService - sql]):::blue
|
||||
LLM_gen{{LLM Service <br>}}
|
||||
DP([Data Preparation MicroService]):::blue
|
||||
TEI_RER{{Reranking service<br>}}
|
||||
TEI_EM{{Embedding service <br>}}
|
||||
VDB{{Vector DB<br><br>}}
|
||||
R_RET{{Retriever service <br>}}
|
||||
|
||||
|
||||
|
||||
%% Questions interaction
|
||||
direction LR
|
||||
a[User Input Query] --> AG_REACT
|
||||
AG_REACT --> AG_RAG
|
||||
AG_REACT --> AG_SQL
|
||||
AG_RAG --> DocIndexRetriever-MegaService
|
||||
EM ==> RET
|
||||
RET ==> RER
|
||||
Ingest[Ingest data] --> DP
|
||||
|
||||
%% Embedding service flow
|
||||
direction LR
|
||||
AG_RAG <-.-> LLM_gen
|
||||
AG_SQL <-.-> LLM_gen
|
||||
AG_REACT <-.-> LLM_gen
|
||||
EM <-.-> TEI_EM
|
||||
RET <-.-> R_RET
|
||||
RER <-.-> TEI_RER
|
||||
|
||||
direction TB
|
||||
%% Vector DB interaction
|
||||
R_RET <-.-> VDB
|
||||
DP <-.-> VDB
|
||||
|
||||
|
||||
```
|
||||
|
||||
### Why should AI Agents be used for question-answering?
|
||||
|
||||
1. **Improve relevancy of retrieved context.**
|
||||
RAG agents can rephrase user queries, decompose user queries, and iterate to get the most relevant context for answering a user's question. Compared to conventional RAG, RAG agents significantly improve the correctness and relevancy of the answer because of the iterations it goes through.
|
||||
2. **Expand scope of skills.**
|
||||
The supervisor agent interacts with multiple worker agents that specialize in different skills (e.g., retrieve documents, write SQL queries, etc.). Thus, it can answer questions with different methods.
|
||||
3. **Hierarchical multi-agents improve performance.**
|
||||
Expert worker agents, such as RAG agents and SQL agents, can provide high-quality output for different aspects of a complex query, and the supervisor agent can aggregate the information to provide a comprehensive answer. If only one agent is used and all tools are provided to this single agent, it can lead to large overhead or not use the best tool to provide accurate answers.
|
||||
|
||||
## Deploy with docker
|
||||
|
||||
### 1. Set up environment </br>
|
||||
|
||||
#### First, clone the `GenAIExamples` repo.
|
||||
|
||||
```bash
|
||||
export WORKDIR=<your-work-directory>
|
||||
cd $WORKDIR
|
||||
git clone https://github.com/opea-project/GenAIExamples.git
|
||||
```
|
||||
|
||||
#### Second, set up environment variables.
|
||||
|
||||
##### For proxy environments only
|
||||
|
||||
```bash
|
||||
export http_proxy="Your_HTTP_Proxy"
|
||||
export https_proxy="Your_HTTPs_Proxy"
|
||||
# Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1"
|
||||
export no_proxy="Your_No_Proxy"
|
||||
```
|
||||
|
||||
##### For using open-source llms
|
||||
|
||||
Set up a [HuggingFace](https://huggingface.co/) account and generate a [user access token](https://huggingface.co/docs/transformers.js/en/guides/private#step-1-generating-a-user-access-token).
|
||||
|
||||
Then set an environment variable with the token and another for a directory to download the models:
|
||||
|
||||
```bash
|
||||
export HUGGINGFACEHUB_API_TOKEN=<your-HF-token>
|
||||
export HF_CACHE_DIR=<directory-where-llms-are-downloaded> # to avoid redownloading models
|
||||
```
|
||||
|
||||
##### [Optional] OPENAI_API_KEY to use OpenAI models or Intel® AI for Enterprise Inference
|
||||
|
||||
To use OpenAI models, generate a key following these [instructions](https://platform.openai.com/api-keys).
|
||||
|
||||
To use a remote server running Intel® AI for Enterprise Inference, contact the cloud service provider or owner of the on-prem machine for a key to access the desired model on the server.
|
||||
|
||||
Then set the environment variable `OPENAI_API_KEY` with the key contents:
|
||||
|
||||
```bash
|
||||
export OPENAI_API_KEY=<your-openai-key>
|
||||
```
|
||||
|
||||
#### Third, set up environment variables for the selected hardware using the corresponding `set_env.sh`
|
||||
|
||||
##### Gaudi
|
||||
|
||||
```bash
|
||||
source $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh
|
||||
```
|
||||
|
||||
##### Xeon
|
||||
|
||||
```bash
|
||||
source $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh
|
||||
```
|
||||
|
||||
For running
|
||||
|
||||
### 2. Launch the multi-agent system. </br>
|
||||
|
||||
We make it convenient to launch the whole system with docker compose, which includes microservices for LLM, agents, UI, retrieval tool, vector database, dataprep, and telemetry. There are 3 docker compose files, which make it easy for users to pick and choose. Users can choose a different retrieval tool other than the `DocIndexRetriever` example provided in our GenAIExamples repo. Users can choose not to launch the telemetry containers.
|
||||
|
||||
#### Launch on Gaudi
|
||||
|
||||
On Gaudi, `meta-llama/Meta-Llama-3.3-70B-Instruct` will be served using vllm. The command below will launch the multi-agent system with the `DocIndexRetriever` as the retrieval tool for the Worker RAG agent.
|
||||
|
||||
```bash
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
|
||||
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml up -d
|
||||
```
|
||||
|
||||
> **Note**: To enable the web search tool, skip this step and proceed to the "[Optional] Web Search Tool Support" section.
|
||||
|
||||
To enable Open Telemetry Tracing, compose.telemetry.yaml file need to be merged along with default compose.yaml file.
|
||||
Gaudi example with Open Telemetry feature:
|
||||
|
||||
```bash
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
|
||||
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml -f compose.telemetry.yaml up -d
|
||||
```
|
||||
|
||||
##### [Optional] Web Search Tool Support
|
||||
|
||||
<details>
|
||||
<summary> Instructions </summary>
|
||||
A web search tool is supported in this example and can be enabled by running docker compose with the `compose.webtool.yaml` file.
|
||||
The Google Search API is used. Follow the [instructions](https://python.langchain.com/docs/integrations/tools/google_search) to create an API key and enable the Custom Search API on a Google account. The environment variables `GOOGLE_CSE_ID` and `GOOGLE_API_KEY` need to be set.
|
||||
|
||||
```bash
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/
|
||||
export GOOGLE_CSE_ID="YOUR_ID"
|
||||
export GOOGLE_API_KEY="YOUR_API_KEY"
|
||||
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose.yaml -f compose.webtool.yaml up -d
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
#### Launch on Xeon
|
||||
|
||||
On Xeon, OpenAI models and models deployed on a remote server are supported. Both methods require an API key.
|
||||
|
||||
```bash
|
||||
export OPENAI_API_KEY=<your-openai-key>
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon
|
||||
```
|
||||
|
||||
##### OpenAI Models
|
||||
|
||||
The command below will launch the multi-agent system with the `DocIndexRetriever` as the retrieval tool for the Worker RAG agent.
|
||||
|
||||
```bash
|
||||
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose_openai.yaml up -d
|
||||
```
|
||||
|
||||
##### Models on Remote Server
|
||||
|
||||
When models are deployed on a remote server with Intel® AI for Enterprise Inference, a base URL and an API key are required to access them. To run the Agent microservice on Xeon while using models deployed on a remote server, add `compose_remote.yaml` to the `docker compose` command and set additional environment variables.
|
||||
|
||||
###### Notes
|
||||
|
||||
- `OPENAI_API_KEY` is already set in a previous step.
|
||||
- `model` is used to overwrite the value set for this environment variable in `set_env.sh`.
|
||||
- `LLM_ENDPOINT_URL` is the base URL given from the owner of the on-prem machine or cloud service provider. It will follow this format: "https://<DNS>". Here is an example: "https://api.inference.example.com".
|
||||
|
||||
```bash
|
||||
export model=<name-of-model-card>
|
||||
export LLM_ENDPOINT_URL=<http-endpoint-of-remote-server>
|
||||
docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose_openai.yaml -f compose_remote.yaml up -d
|
||||
```
|
||||
|
||||
### 3. Ingest Data into the vector database
|
||||
|
||||
The `run_ingest_data.sh` script will use an example jsonl file to ingest example documents into a vector database. Other ways to ingest data and other types of documents supported can be found in the OPEA dataprep microservice located in the opea-project/GenAIComps repo.
|
||||
|
||||
```bash
|
||||
cd $WORKDIR/GenAIExamples/AgentQnA/retrieval_tool/
|
||||
bash run_ingest_data.sh
|
||||
```
|
||||
|
||||
> **Note**: This is a one-time operation.
|
||||
|
||||
## How to interact with the agent system with UI
|
||||
|
||||
The UI microservice is launched in the previous step with the other microservices.
|
||||
To see the UI, open a web browser to `http://${ip_address}:5173` to access the UI. Note the `ip_address` here is the host IP of the UI microservice.
|
||||
|
||||
1. Click on the arrow above `Get started`. Create an admin account with a name, email, and password.
|
||||
2. Add an OpenAI-compatible API endpoint. In the upper right, click on the circle button with the user's initial, go to `Admin Settings`->`Connections`. Under `Manage OpenAI API Connections`, click on the `+` to add a connection. Fill in these fields:
|
||||
|
||||
- **URL**: `http://${ip_address}:9090/v1`, do not forget the `v1`
|
||||
- **Key**: any value
|
||||
- **Model IDs**: any name i.e. `opea-agent`, then press `+` to add it
|
||||
|
||||
Click "Save".
|
||||
|
||||

|
||||
|
||||
3. Test OPEA agent with UI. Return to `New Chat` and ensure the model (i.e. `opea-agent`) is selected near the upper left. Enter in any prompt to interact with the agent.
|
||||
|
||||

|
||||
|
||||
## [Optional] Deploy using Helm Charts
|
||||
|
||||
Refer to the [AgentQnA helm chart](./kubernetes/helm/README.md) for instructions on deploying AgentQnA on Kubernetes.
|
||||
|
||||
## Validate Services
|
||||
|
||||
1. First look at logs for each of the agent docker containers:
|
||||
|
||||
```bash
|
||||
# worker RAG agent
|
||||
docker logs rag-agent-endpoint
|
||||
|
||||
# worker SQL agent
|
||||
docker logs sql-agent-endpoint
|
||||
|
||||
# supervisor agent
|
||||
docker logs react-agent-endpoint
|
||||
```
|
||||
|
||||
Look for the message "HTTP server setup successful" to confirm the agent docker container has started successfully.</p>
|
||||
|
||||
2. Use python to validate each agent is working properly:
|
||||
|
||||
```bash
|
||||
# RAG worker agent
|
||||
python $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "Tell me about Michael Jackson song Thriller" --agent_role "worker" --ext_port 9095
|
||||
|
||||
# SQL agent
|
||||
python $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --prompt "How many employees in company" --agent_role "worker" --ext_port 9096
|
||||
|
||||
# supervisor agent: this will test a two-turn conversation
|
||||
python $WORKDIR/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port 9090
|
||||
```
|
||||
|
||||
## How to register other tools with the AI agent
|
||||
|
||||
The [tools](./tools) folder contains YAML and Python files for additional tools for the supervisor and worker agents. Refer to the "Provide your own tools" section in the instructions [here](https://github.com/opea-project/GenAIComps/tree/main/comps/agent/src/README.md) to add tools and customize the AI agents.
|
||||
|
||||
## Monitor and Tracing
|
||||
|
||||
Follow [OpenTelemetry OPEA Guide](https://opea-project.github.io/latest/tutorial/OpenTelemetry/OpenTelemetry_OPEA_Guide.html) to understand how to use OpenTelemetry tracing and metrics in OPEA.
|
||||
For AgentQnA specific tracing and metrics monitoring, follow [OpenTelemetry on AgentQnA](https://opea-project.github.io/latest/tutorial/OpenTelemetry/deploy/AgentQnA.html) section.
|
||||
BIN
AgentQnA/assets/img/agent_qna_arch.png
Normal file
BIN
AgentQnA/assets/img/agent_qna_arch.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 207 KiB |
BIN
AgentQnA/assets/img/agent_ui.png
Normal file
BIN
AgentQnA/assets/img/agent_ui.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 56 KiB |
BIN
AgentQnA/assets/img/agent_ui_result.png
Normal file
BIN
AgentQnA/assets/img/agent_ui_result.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 57 KiB |
BIN
AgentQnA/assets/img/opea-agent-setting.png
Normal file
BIN
AgentQnA/assets/img/opea-agent-setting.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 71 KiB |
BIN
AgentQnA/assets/img/opea-agent-test.png
Normal file
BIN
AgentQnA/assets/img/opea-agent-test.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 99 KiB |
342
AgentQnA/docker_compose/amd/gpu/rocm/README.md
Normal file
342
AgentQnA/docker_compose/amd/gpu/rocm/README.md
Normal file
@@ -0,0 +1,342 @@
|
||||
# Build Mega Service of AgentQnA on AMD ROCm GPU
|
||||
|
||||
## Build Docker Images
|
||||
|
||||
### 1. Build Docker Image
|
||||
|
||||
- #### Create application install directory and go to it:
|
||||
|
||||
```bash
|
||||
mkdir ~/agentqna-install && cd agentqna-install
|
||||
```
|
||||
|
||||
- #### Clone the repository GenAIExamples (the default repository branch "main" is used here):
|
||||
|
||||
```bash
|
||||
git clone https://github.com/opea-project/GenAIExamples.git
|
||||
```
|
||||
|
||||
If you need to use a specific branch/tag of the GenAIExamples repository, then (v1.3 replace with its own value):
|
||||
|
||||
```bash
|
||||
git clone https://github.com/opea-project/GenAIExamples.git && cd GenAIExamples && git checkout v1.3
|
||||
```
|
||||
|
||||
We remind you that when using a specific version of the code, you need to use the README from this version:
|
||||
|
||||
- #### Go to build directory:
|
||||
|
||||
```bash
|
||||
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_image_build
|
||||
```
|
||||
|
||||
- Cleaning up the GenAIComps repository if it was previously cloned in this directory.
|
||||
This is necessary if the build was performed earlier and the GenAIComps folder exists and is not empty:
|
||||
|
||||
```bash
|
||||
echo Y | rm -R GenAIComps
|
||||
```
|
||||
|
||||
- #### Clone the repository GenAIComps (the default repository branch "main" is used here):
|
||||
|
||||
```bash
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
```
|
||||
|
||||
We remind you that when using a specific version of the code, you need to use the README from this version.
|
||||
|
||||
- #### Setting the list of images for the build (from the build file.yaml)
|
||||
|
||||
If you want to deploy a vLLM-based or TGI-based application, then the set of services is installed as follows:
|
||||
|
||||
#### vLLM-based application
|
||||
|
||||
```bash
|
||||
service_list="vllm-rocm agent agent-ui"
|
||||
```
|
||||
|
||||
#### TGI-based application
|
||||
|
||||
```bash
|
||||
service_list="agent agent-ui"
|
||||
```
|
||||
|
||||
- #### Optional. Pull TGI Docker Image (Do this if you want to use TGI)
|
||||
|
||||
```bash
|
||||
docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
|
||||
```
|
||||
|
||||
- #### Build Docker Images
|
||||
|
||||
```bash
|
||||
docker compose -f build.yaml build ${service_list} --no-cache
|
||||
```
|
||||
|
||||
- #### Build DocIndexRetriever Docker Images
|
||||
|
||||
```bash
|
||||
cd ~/agentqna-install/GenAIExamples/DocIndexRetriever/docker_image_build/
|
||||
git clone https://github.com/opea-project/GenAIComps.git
|
||||
service_list="doc-index-retriever dataprep embedding retriever reranking"
|
||||
docker compose -f build.yaml build ${service_list} --no-cache
|
||||
```
|
||||
|
||||
- #### Pull DocIndexRetriever Docker Images
|
||||
|
||||
```bash
|
||||
docker pull redis/redis-stack:7.2.0-v9
|
||||
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
```
|
||||
|
||||
After the build, we check the list of images with the command:
|
||||
|
||||
```bash
|
||||
docker image ls
|
||||
```
|
||||
|
||||
The list of images should include:
|
||||
|
||||
##### vLLM-based application:
|
||||
|
||||
- opea/vllm-rocm:latest
|
||||
- opea/agent:latest
|
||||
- redis/redis-stack:7.2.0-v9
|
||||
- ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
- opea/embedding:latest
|
||||
- opea/retriever:latest
|
||||
- opea/reranking:latest
|
||||
- opea/doc-index-retriever:latest
|
||||
|
||||
##### TGI-based application:
|
||||
|
||||
- ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
|
||||
- opea/agent:latest
|
||||
- redis/redis-stack:7.2.0-v9
|
||||
- ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
|
||||
- opea/embedding:latest
|
||||
- opea/retriever:latest
|
||||
- opea/reranking:latest
|
||||
- opea/doc-index-retriever:latest
|
||||
|
||||
---
|
||||
|
||||
## Deploy the AgentQnA Application
|
||||
|
||||
### Docker Compose Configuration for AMD GPUs
|
||||
|
||||
To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose file:
|
||||
|
||||
- compose_vllm.yaml - for vLLM-based application
|
||||
- compose.yaml - for TGI-based
|
||||
|
||||
```yaml
|
||||
shm_size: 1g
|
||||
devices:
|
||||
- /dev/kfd:/dev/kfd
|
||||
- /dev/dri:/dev/dri
|
||||
cap_add:
|
||||
- SYS_PTRACE
|
||||
group_add:
|
||||
- video
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
```
|
||||
|
||||
This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs. For example:
|
||||
|
||||
```yaml
|
||||
shm_size: 1g
|
||||
devices:
|
||||
- /dev/kfd:/dev/kfd
|
||||
- /dev/dri/card0:/dev/dri/card0
|
||||
- /dev/dri/render128:/dev/dri/render128
|
||||
cap_add:
|
||||
- SYS_PTRACE
|
||||
group_add:
|
||||
- video
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
```
|
||||
|
||||
**How to Identify GPU Device IDs:**
|
||||
Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU.
|
||||
|
||||
### Set deploy environment variables
|
||||
|
||||
#### Setting variables in the operating system environment:
|
||||
|
||||
```bash
|
||||
### Replace the string 'server_address' with your local server IP address
|
||||
export host_ip='server_address'
|
||||
### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token.
|
||||
export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token'
|
||||
### Replace the string 'your_langchain_api_key' with your LANGCHAIN API KEY.
|
||||
export LANGCHAIN_API_KEY='your_langchain_api_key'
|
||||
export LANGCHAIN_TRACING_V2=""
|
||||
```
|
||||
|
||||
### Start the services:
|
||||
|
||||
#### If you use vLLM
|
||||
|
||||
```bash
|
||||
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
|
||||
bash launch_agent_service_vllm_rocm.sh
|
||||
```
|
||||
|
||||
#### If you use TGI
|
||||
|
||||
```bash
|
||||
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
|
||||
bash launch_agent_service_tgi_rocm.sh
|
||||
```
|
||||
|
||||
All containers should be running and should not restart:
|
||||
|
||||
##### If you use vLLM:
|
||||
|
||||
- dataprep-redis-server
|
||||
- doc-index-retriever-server
|
||||
- embedding-server
|
||||
- rag-agent-endpoint
|
||||
- react-agent-endpoint
|
||||
- redis-vector-db
|
||||
- reranking-tei-xeon-server
|
||||
- retriever-redis-server
|
||||
- sql-agent-endpoint
|
||||
- tei-embedding-server
|
||||
- tei-reranking-server
|
||||
- vllm-service
|
||||
|
||||
##### If you use TGI:
|
||||
|
||||
- dataprep-redis-server
|
||||
- doc-index-retriever-server
|
||||
- embedding-server
|
||||
- rag-agent-endpoint
|
||||
- react-agent-endpoint
|
||||
- redis-vector-db
|
||||
- reranking-tei-xeon-server
|
||||
- retriever-redis-server
|
||||
- sql-agent-endpoint
|
||||
- tei-embedding-server
|
||||
- tei-reranking-server
|
||||
- tgi-service
|
||||
|
||||
---
|
||||
|
||||
## Validate the Services
|
||||
|
||||
### 1. Validate the vLLM/TGI Service
|
||||
|
||||
#### If you use vLLM:
|
||||
|
||||
```bash
|
||||
DATA='{"model": "Intel/neural-chat-7b-v3-3t", '\
|
||||
'"messages": [{"role": "user", "content": "What is Deep Learning?"}], "max_tokens": 256}'
|
||||
|
||||
curl http://${HOST_IP}:${VLLM_SERVICE_PORT}/v1/chat/completions \
|
||||
-X POST \
|
||||
-d "$DATA" \
|
||||
-H 'Content-Type: application/json'
|
||||
```
|
||||
|
||||
Checking the response from the service. The response should be similar to JSON:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "chatcmpl-142f34ef35b64a8db3deedd170fed951",
|
||||
"object": "chat.completion",
|
||||
"created": 1742270316,
|
||||
"model": "Intel/neural-chat-7b-v3-3",
|
||||
"choices": [
|
||||
{
|
||||
"index": 0,
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": []
|
||||
},
|
||||
"logprobs": null,
|
||||
"finish_reason": "length",
|
||||
"stop_reason": null
|
||||
}
|
||||
],
|
||||
"usage": { "prompt_tokens": 66, "total_tokens": 322, "completion_tokens": 256, "prompt_tokens_details": null },
|
||||
"prompt_logprobs": null
|
||||
}
|
||||
```
|
||||
|
||||
If the service response has a meaningful response in the value of the "choices.message.content" key,
|
||||
then we consider the vLLM service to be successfully launched
|
||||
|
||||
#### If you use TGI:
|
||||
|
||||
```bash
|
||||
DATA='{"inputs":"What is Deep Learning?",'\
|
||||
'"parameters":{"max_new_tokens":256,"do_sample": true}}'
|
||||
|
||||
curl http://${HOST_IP}:${TGI_SERVICE_PORT}/generate \
|
||||
-X POST \
|
||||
-d "$DATA" \
|
||||
-H 'Content-Type: application/json'
|
||||
```
|
||||
|
||||
Checking the response from the service. The response should be similar to JSON:
|
||||
|
||||
```json
|
||||
{
|
||||
"generated_text": " "
|
||||
}
|
||||
```
|
||||
|
||||
If the service response has a meaningful response in the value of the "generated_text" key,
|
||||
then we consider the TGI service to be successfully launched
|
||||
|
||||
### 2. Validate Agent Services
|
||||
|
||||
#### Validate Rag Agent Service
|
||||
|
||||
```bash
|
||||
export agent_port=${WORKER_RAG_AGENT_PORT}
|
||||
prompt="Tell me about Michael Jackson song Thriller"
|
||||
python3 ~/agentqna-install/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port
|
||||
```
|
||||
|
||||
The response must contain the meaningful text of the response to the request from the "prompt" variable
|
||||
|
||||
#### Validate Sql Agent Service
|
||||
|
||||
```bash
|
||||
export agent_port=${WORKER_SQL_AGENT_PORT}
|
||||
prompt="How many employees are there in the company?"
|
||||
python3 ~/agentqna-install/GenAIExamples/AgentQnA/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port
|
||||
```
|
||||
|
||||
The answer should make sense - "8 employees in the company"
|
||||
|
||||
#### Validate React (Supervisor) Agent Service
|
||||
|
||||
```bash
|
||||
export agent_port=${SUPERVISOR_REACT_AGENT_PORT}
|
||||
python3 ~/agentqna-install/GenAIExamples/AgentQnA/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream
|
||||
```
|
||||
|
||||
The response should contain "Iron Maiden"
|
||||
|
||||
### 3. Stop application
|
||||
|
||||
#### If you use vLLM
|
||||
|
||||
```bash
|
||||
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
|
||||
bash stop_agent_service_vllm_rocm.sh
|
||||
```
|
||||
|
||||
#### If you use TGI
|
||||
|
||||
```bash
|
||||
cd ~/agentqna-install/GenAIExamples/AgentQnA/docker_compose/amd/gpu/rocm
|
||||
bash stop_agent_service_tgi_rocm.sh
|
||||
```
|
||||
124
AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml
Normal file
124
AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright (C) 2025 Advanced Micro Devices, Inc.
|
||||
|
||||
services:
|
||||
tgi-service:
|
||||
image: ghcr.io/huggingface/text-generation-inference:3.0.0-rocm
|
||||
container_name: tgi-service
|
||||
ports:
|
||||
- "${TGI_SERVICE_PORT-8085}:80"
|
||||
volumes:
|
||||
- "${MODEL_CACHE:-./data}:/data"
|
||||
environment:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
TGI_LLM_ENDPOINT: "http://${ip_address}:${TGI_SERVICE_PORT}"
|
||||
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
shm_size: 32g
|
||||
devices:
|
||||
- /dev/kfd:/dev/kfd
|
||||
- /dev/dri:/dev/dri
|
||||
cap_add:
|
||||
- SYS_PTRACE
|
||||
group_add:
|
||||
- video
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
ipc: host
|
||||
command: --model-id ${LLM_MODEL_ID} --max-input-length 4096 --max-total-tokens 8192
|
||||
|
||||
worker-rag-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: rag-agent-endpoint
|
||||
volumes:
|
||||
- "${TOOLSET_PATH}:/home/user/tools/"
|
||||
ports:
|
||||
- "${WORKER_RAG_AGENT_PORT:-9095}:9095"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: rag_agent_llama
|
||||
with_memory: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: tgi
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
tools: /home/user/tools/worker_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-worker-agent-service"
|
||||
port: 9095
|
||||
|
||||
worker-sql-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: sql-agent-endpoint
|
||||
volumes:
|
||||
- "${WORKDIR}/tests/Chinook_Sqlite.sqlite:/home/user/chinook-db/Chinook_Sqlite.sqlite:rw"
|
||||
ports:
|
||||
- "${WORKER_SQL_AGENT_PORT:-9096}:9096"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: sql_agent_llama
|
||||
with_memory: false
|
||||
db_name: ${db_name}
|
||||
db_path: ${db_path}
|
||||
use_hints: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
port: 9096
|
||||
|
||||
supervisor-react-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: react-agent-endpoint
|
||||
depends_on:
|
||||
- worker-rag-agent
|
||||
volumes:
|
||||
- "${TOOLSET_PATH}:/home/user/tools/"
|
||||
ports:
|
||||
- "${SUPERVISOR_REACT_AGENT_PORT:-9090}:9090"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: react_llama
|
||||
with_memory: true
|
||||
recursion_limit: ${recursion_limit_supervisor}
|
||||
llm_engine: tgi
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: true
|
||||
tools: /home/user/tools/supervisor_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
|
||||
CRAG_SERVER: ${CRAG_SERVER}
|
||||
WORKER_AGENT_URL: ${WORKER_AGENT_URL}
|
||||
SQL_AGENT_URL: ${SQL_AGENT_URL}
|
||||
port: 9090
|
||||
128
AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml
Normal file
128
AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml
Normal file
@@ -0,0 +1,128 @@
|
||||
# Copyright (C) 2025 Advanced Micro Devices, Inc.
|
||||
|
||||
services:
|
||||
vllm-service:
|
||||
image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest}
|
||||
container_name: vllm-service
|
||||
ports:
|
||||
- "${VLLM_SERVICE_PORT:-8081}:8011"
|
||||
environment:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HF_HUB_DISABLE_PROGRESS_BARS: 1
|
||||
HF_HUB_ENABLE_HF_TRANSFER: 0
|
||||
WILM_USE_TRITON_FLASH_ATTENTION: 0
|
||||
PYTORCH_JIT: 0
|
||||
volumes:
|
||||
- "${MODEL_CACHE:-./data}:/data"
|
||||
shm_size: 20G
|
||||
devices:
|
||||
- /dev/kfd:/dev/kfd
|
||||
- /dev/dri/:/dev/dri/
|
||||
cap_add:
|
||||
- SYS_PTRACE
|
||||
group_add:
|
||||
- video
|
||||
security_opt:
|
||||
- seccomp:unconfined
|
||||
- apparmor=unconfined
|
||||
command: "--model ${VLLM_LLM_MODEL_ID} --swap-space 16 --disable-log-requests --dtype float16 --tensor-parallel-size 4 --host 0.0.0.0 --port 8011 --num-scheduler-steps 1 --distributed-executor-backend \"mp\""
|
||||
ipc: host
|
||||
|
||||
worker-rag-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: rag-agent-endpoint
|
||||
volumes:
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "${WORKER_RAG_AGENT_PORT:-9095}:9095"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: rag_agent_llama
|
||||
with_memory: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
tools: /home/user/tools/worker_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-worker-agent-service"
|
||||
port: 9095
|
||||
|
||||
worker-sql-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: sql-agent-endpoint
|
||||
volumes:
|
||||
- "${WORKDIR}/tests/Chinook_Sqlite.sqlite:/home/user/chinook-db/Chinook_Sqlite.sqlite:rw"
|
||||
ports:
|
||||
- "${WORKER_SQL_AGENT_PORT:-9096}:9096"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: sql_agent_llama
|
||||
with_memory: false
|
||||
db_name: ${db_name}
|
||||
db_path: ${db_path}
|
||||
use_hints: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
port: 9096
|
||||
|
||||
supervisor-react-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: react-agent-endpoint
|
||||
depends_on:
|
||||
- worker-rag-agent
|
||||
volumes:
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "${SUPERVISOR_REACT_AGENT_PORT:-9090}:9090"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: react_llama
|
||||
with_memory: true
|
||||
recursion_limit: ${recursion_limit_supervisor}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: true
|
||||
tools: /home/user/tools/supervisor_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
|
||||
CRAG_SERVER: ${CRAG_SERVER}
|
||||
WORKER_AGENT_URL: ${WORKER_AGENT_URL}
|
||||
SQL_AGENT_URL: ${SQL_AGENT_URL}
|
||||
port: 9090
|
||||
@@ -0,0 +1,87 @@
|
||||
# Copyright (C) 2024 Advanced Micro Devices, Inc.
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# Before start script:
|
||||
# export host_ip="your_host_ip_or_host_name"
|
||||
# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token"
|
||||
# export LANGCHAIN_API_KEY="your_langchain_api_key"
|
||||
# export LANGCHAIN_TRACING_V2=""
|
||||
|
||||
# Set server hostname or IP address
|
||||
export ip_address=${host_ip}
|
||||
|
||||
# Set services IP ports
|
||||
export TGI_SERVICE_PORT="18110"
|
||||
export WORKER_RAG_AGENT_PORT="18111"
|
||||
export WORKER_SQL_AGENT_PORT="18112"
|
||||
export SUPERVISOR_REACT_AGENT_PORT="18113"
|
||||
export CRAG_SERVER_PORT="18114"
|
||||
|
||||
export WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=${WORKPATH}/../../../
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
|
||||
export HF_CACHE_DIR="./data"
|
||||
export MODEL_CACHE="./data"
|
||||
export TOOLSET_PATH=${WORKPATH}/../../../tools/
|
||||
export recursion_limit_worker=12
|
||||
export LLM_ENDPOINT_URL=http://${ip_address}:${TGI_SERVICE_PORT}
|
||||
export temperature=0.01
|
||||
export max_new_tokens=512
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export LANGCHAIN_API_KEY=${LANGCHAIN_API_KEY}
|
||||
export LANGCHAIN_TRACING_V2=${LANGCHAIN_TRACING_V2}
|
||||
export db_name=Chinook
|
||||
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT}
|
||||
export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions"
|
||||
export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions"
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export no_proxy=${no_proxy}
|
||||
export http_proxy=${http_proxy}
|
||||
export https_proxy=${https_proxy}
|
||||
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
|
||||
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:6379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export RERANK_TYPE="tei"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
|
||||
|
||||
echo ${WORKER_RAG_AGENT_PORT} > ${WORKPATH}/WORKER_RAG_AGENT_PORT_tmp
|
||||
echo ${WORKER_SQL_AGENT_PORT} > ${WORKPATH}/WORKER_SQL_AGENT_PORT_tmp
|
||||
echo ${SUPERVISOR_REACT_AGENT_PORT} > ${WORKPATH}/SUPERVISOR_REACT_AGENT_PORT_tmp
|
||||
echo ${CRAG_SERVER_PORT} > ${WORKPATH}/CRAG_SERVER_PORT_tmp
|
||||
|
||||
echo "Downloading chinook data..."
|
||||
echo Y | rm -R chinook-database
|
||||
git clone https://github.com/lerocha/chinook-database.git
|
||||
echo Y | rm -R ../../../../../AgentQnA/tests/Chinook_Sqlite.sqlite
|
||||
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite ../../../../../AgentQnA/tests
|
||||
|
||||
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml up -d
|
||||
docker compose -f compose.yaml up -d
|
||||
|
||||
n=0
|
||||
until [[ "$n" -ge 100 ]]; do
|
||||
docker logs tgi-service > ${WORKPATH}/tgi_service_start.log
|
||||
if grep -q Connected ${WORKPATH}/tgi_service_start.log; then
|
||||
break
|
||||
fi
|
||||
sleep 10s
|
||||
n=$((n+1))
|
||||
done
|
||||
|
||||
echo "Starting CRAG server"
|
||||
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=${CRAG_SERVER_PORT}:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
@@ -0,0 +1,88 @@
|
||||
# Copyright (C) 2024 Advanced Micro Devices, Inc.
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# Before start script:
|
||||
# export host_ip="your_host_ip_or_host_name"
|
||||
# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token"
|
||||
# export LANGCHAIN_API_KEY="your_langchain_api_key"
|
||||
# export LANGCHAIN_TRACING_V2=""
|
||||
|
||||
# Set server hostname or IP address
|
||||
export ip_address=${host_ip}
|
||||
|
||||
# Set services IP ports
|
||||
export VLLM_SERVICE_PORT="18110"
|
||||
export WORKER_RAG_AGENT_PORT="18111"
|
||||
export WORKER_SQL_AGENT_PORT="18112"
|
||||
export SUPERVISOR_REACT_AGENT_PORT="18113"
|
||||
export CRAG_SERVER_PORT="18114"
|
||||
|
||||
export WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=${WORKPATH}/../../../
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export VLLM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
|
||||
export HF_CACHE_DIR="./data"
|
||||
export MODEL_CACHE="./data"
|
||||
export TOOLSET_PATH=${WORKPATH}/../../../tools/
|
||||
export recursion_limit_worker=12
|
||||
export LLM_ENDPOINT_URL=http://${ip_address}:${VLLM_SERVICE_PORT}
|
||||
export LLM_MODEL_ID=${VLLM_LLM_MODEL_ID}
|
||||
export temperature=0.01
|
||||
export max_new_tokens=512
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export LANGCHAIN_API_KEY=${LANGCHAIN_API_KEY}
|
||||
export LANGCHAIN_TRACING_V2=${LANGCHAIN_TRACING_V2}
|
||||
export db_name=Chinook
|
||||
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT}
|
||||
export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions"
|
||||
export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions"
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export no_proxy=${no_proxy}
|
||||
export http_proxy=${http_proxy}
|
||||
export https_proxy=${https_proxy}
|
||||
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
|
||||
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:6379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export RERANK_TYPE="tei"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
|
||||
|
||||
echo ${WORKER_RAG_AGENT_PORT} > ${WORKPATH}/WORKER_RAG_AGENT_PORT_tmp
|
||||
echo ${WORKER_SQL_AGENT_PORT} > ${WORKPATH}/WORKER_SQL_AGENT_PORT_tmp
|
||||
echo ${SUPERVISOR_REACT_AGENT_PORT} > ${WORKPATH}/SUPERVISOR_REACT_AGENT_PORT_tmp
|
||||
echo ${CRAG_SERVER_PORT} > ${WORKPATH}/CRAG_SERVER_PORT_tmp
|
||||
|
||||
echo "Downloading chinook data..."
|
||||
echo Y | rm -R chinook-database
|
||||
git clone https://github.com/lerocha/chinook-database.git
|
||||
echo Y | rm -R ../../../../../AgentQnA/tests/Chinook_Sqlite.sqlite
|
||||
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite ../../../../../AgentQnA/tests
|
||||
|
||||
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml up -d
|
||||
docker compose -f compose_vllm.yaml up -d
|
||||
|
||||
n=0
|
||||
until [[ "$n" -ge 500 ]]; do
|
||||
docker logs vllm-service >& "${WORKPATH}"/vllm-service_start.log
|
||||
if grep -q "Application startup complete" "${WORKPATH}"/vllm-service_start.log; then
|
||||
break
|
||||
fi
|
||||
sleep 20s
|
||||
n=$((n+1))
|
||||
done
|
||||
|
||||
echo "Starting CRAG server"
|
||||
docker run -d --runtime=runc --name=kdd-cup-24-crag-service -p=${CRAG_SERVER_PORT}:8000 docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (C) 2024 Advanced Micro Devices, Inc.
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
WORKPATH=$(dirname "$PWD")/..
|
||||
export ip_address=${host_ip}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
|
||||
export AGENTQNA_TGI_IMAGE=ghcr.io/huggingface/text-generation-inference:2.4.1-rocm
|
||||
export AGENTQNA_TGI_SERVICE_PORT="19001"
|
||||
|
||||
# LLM related environment variables
|
||||
export AGENTQNA_CARD_ID="card1"
|
||||
export AGENTQNA_RENDER_ID="renderD136"
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
ls $HF_CACHE_DIR
|
||||
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
|
||||
export NUM_SHARDS=4
|
||||
export LLM_ENDPOINT_URL="http://${ip_address}:${AGENTQNA_TGI_SERVICE_PORT}"
|
||||
export temperature=0.01
|
||||
export max_new_tokens=512
|
||||
|
||||
# agent related environment variables
|
||||
export AGENTQNA_WORKER_AGENT_SERVICE_PORT="9095"
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export WORKER_AGENT_URL="http://${ip_address}:${AGENTQNA_WORKER_AGENT_SERVICE_PORT}/v1/chat/completions"
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export CRAG_SERVER=http://${ip_address}:18881
|
||||
|
||||
export AGENTQNA_FRONTEND_PORT="15557"
|
||||
|
||||
#retrieval_tool
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:26379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete"
|
||||
|
||||
echo "Removing chinook data..."
|
||||
echo Y | rm -R chinook-database
|
||||
if [ -d "chinook-database" ]; then
|
||||
rm -rf chinook-database
|
||||
fi
|
||||
echo "Chinook data removed!"
|
||||
|
||||
echo "Stopping CRAG server"
|
||||
docker rm kdd-cup-24-crag-service --force
|
||||
|
||||
echo "Stopping Agent services"
|
||||
docker compose -f compose.yaml down
|
||||
|
||||
echo "Stopping Retrieval services"
|
||||
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml down
|
||||
@@ -0,0 +1,84 @@
|
||||
# Copyright (C) 2024 Advanced Micro Devices, Inc.
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
|
||||
# Before start script:
|
||||
# export host_ip="your_host_ip_or_host_name"
|
||||
# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token"
|
||||
# export LANGCHAIN_API_KEY="your_langchain_api_key"
|
||||
# export LANGCHAIN_TRACING_V2=""
|
||||
|
||||
# Set server hostname or IP address
|
||||
export ip_address=${host_ip}
|
||||
|
||||
# Set services IP ports
|
||||
export VLLM_SERVICE_PORT="18110"
|
||||
export WORKER_RAG_AGENT_PORT="18111"
|
||||
export WORKER_SQL_AGENT_PORT="18112"
|
||||
export SUPERVISOR_REACT_AGENT_PORT="18113"
|
||||
export CRAG_SERVER_PORT="18114"
|
||||
|
||||
export WORKPATH=$(dirname "$PWD")
|
||||
export WORKDIR=${WORKPATH}/../../../
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export VLLM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
|
||||
export HF_CACHE_DIR="./data"
|
||||
export MODEL_CACHE="./data"
|
||||
export TOOLSET_PATH=${WORKPATH}/../../../tools/
|
||||
export recursion_limit_worker=12
|
||||
export LLM_ENDPOINT_URL=http://${ip_address}:${VLLM_SERVICE_PORT}
|
||||
export LLM_MODEL_ID=${VLLM_LLM_MODEL_ID}
|
||||
export temperature=0.01
|
||||
export max_new_tokens=512
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export LANGCHAIN_API_KEY=${LANGCHAIN_API_KEY}
|
||||
export LANGCHAIN_TRACING_V2=${LANGCHAIN_TRACING_V2}
|
||||
export db_name=Chinook
|
||||
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT}
|
||||
export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions"
|
||||
export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions"
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export no_proxy=${no_proxy}
|
||||
export http_proxy=${http_proxy}
|
||||
export https_proxy=${https_proxy}
|
||||
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
|
||||
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:6379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export RERANK_TYPE="tei"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
|
||||
|
||||
echo ${WORKER_RAG_AGENT_PORT} > ${WORKPATH}/WORKER_RAG_AGENT_PORT_tmp
|
||||
echo ${WORKER_SQL_AGENT_PORT} > ${WORKPATH}/WORKER_SQL_AGENT_PORT_tmp
|
||||
echo ${SUPERVISOR_REACT_AGENT_PORT} > ${WORKPATH}/SUPERVISOR_REACT_AGENT_PORT_tmp
|
||||
echo ${CRAG_SERVER_PORT} > ${WORKPATH}/CRAG_SERVER_PORT_tmp
|
||||
|
||||
echo "Removing chinook data..."
|
||||
echo Y | rm -R chinook-database
|
||||
if [ -d "chinook-database" ]; then
|
||||
rm -rf chinook-database
|
||||
fi
|
||||
echo "Chinook data removed!"
|
||||
|
||||
echo "Stopping CRAG server"
|
||||
docker rm kdd-cup-24-crag-service --force
|
||||
|
||||
echo "Stopping Agent services"
|
||||
docker compose -f compose_vllm.yaml down
|
||||
|
||||
echo "Stopping Retrieval services"
|
||||
docker compose -f ../../../../../DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml down
|
||||
3
AgentQnA/docker_compose/intel/cpu/xeon/README.md
Normal file
3
AgentQnA/docker_compose/intel/cpu/xeon/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Single node on-prem deployment with Docker Compose on Xeon Scalable processors
|
||||
|
||||
This example showcases a hierarchical multi-agent system for question-answering applications. To deploy the example on Xeon, OpenAI LLM models via API calls are used. For instructions, refer to the deployment guide [here](../../../../README.md).
|
||||
112
AgentQnA/docker_compose/intel/cpu/xeon/compose_openai.yaml
Normal file
112
AgentQnA/docker_compose/intel/cpu/xeon/compose_openai.yaml
Normal file
@@ -0,0 +1,112 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
worker-rag-agent:
|
||||
image: opea/agent:latest
|
||||
container_name: rag-agent-endpoint
|
||||
volumes:
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9095:9095"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: rag_agent
|
||||
with_memory: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: openai
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
model: ${model}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
tools: /home/user/tools/worker_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-worker-agent-service"
|
||||
port: 9095
|
||||
|
||||
worker-sql-agent:
|
||||
image: opea/agent:latest
|
||||
container_name: sql-agent-endpoint
|
||||
volumes:
|
||||
- ${WORKDIR}/GenAIExamples/AgentQnA/tests:/home/user/chinook-db # SQL database
|
||||
ports:
|
||||
- "9096:9096"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: sql_agent
|
||||
with_memory: false
|
||||
db_name: ${db_name}
|
||||
db_path: ${db_path}
|
||||
use_hints: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: openai
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
model: ${model}
|
||||
temperature: 0
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
port: 9096
|
||||
|
||||
supervisor-react-agent:
|
||||
image: opea/agent:latest
|
||||
container_name: react-agent-endpoint
|
||||
depends_on:
|
||||
- worker-rag-agent
|
||||
- worker-sql-agent
|
||||
volumes:
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9090:9090"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: react_llama
|
||||
with_memory: true
|
||||
recursion_limit: ${recursion_limit_supervisor}
|
||||
llm_engine: openai
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
||||
model: ${model}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: true
|
||||
tools: /home/user/tools/supervisor_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
|
||||
CRAG_SERVER: $CRAG_SERVER
|
||||
WORKER_AGENT_URL: $WORKER_AGENT_URL
|
||||
SQL_AGENT_URL: $SQL_AGENT_URL
|
||||
port: 9090
|
||||
mock-api:
|
||||
image: docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
container_name: mock-api
|
||||
ports:
|
||||
- "8080:8000"
|
||||
ipc: host
|
||||
agent-ui:
|
||||
image: opea/agent-ui
|
||||
container_name: agent-ui
|
||||
ports:
|
||||
- "5173:8080"
|
||||
ipc: host
|
||||
|
||||
networks:
|
||||
default:
|
||||
driver: bridge
|
||||
18
AgentQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml
Normal file
18
AgentQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
worker-rag-agent:
|
||||
environment:
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
api_key: ${OPENAI_API_KEY}
|
||||
|
||||
worker-sql-agent:
|
||||
environment:
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
api_key: ${OPENAI_API_KEY}
|
||||
|
||||
supervisor-react-agent:
|
||||
environment:
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
api_key: ${OPENAI_API_KEY}
|
||||
57
AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh
Normal file
57
AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh
Normal file
@@ -0,0 +1,57 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
pushd "../../../../../" > /dev/null
|
||||
source .set_env.sh
|
||||
popd > /dev/null
|
||||
|
||||
if [[ -z "${WORKDIR}" ]]; then
|
||||
echo "Please set WORKDIR environment variable"
|
||||
exit 0
|
||||
fi
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export model="gpt-4o-mini-2024-07-18"
|
||||
export temperature=0
|
||||
export max_new_tokens=4096
|
||||
export OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
|
||||
export SQL_AGENT_URL="http://${ip_address}:9096/v1/chat/completions"
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export CRAG_SERVER=http://${ip_address}:8080
|
||||
export db_name=Chinook
|
||||
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
|
||||
|
||||
if [ ! -f $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite ]; then
|
||||
echo "Download Chinook_Sqlite!"
|
||||
wget -O $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite https://github.com/lerocha/chinook-database/releases/download/v1.4.5/Chinook_Sqlite.sqlite
|
||||
fi
|
||||
|
||||
# retriever
|
||||
export host_ip=$(hostname -I | awk '{print $1}')
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export no_proxy=${no_proxy}
|
||||
export http_proxy=${http_proxy}
|
||||
export https_proxy=${https_proxy}
|
||||
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
|
||||
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:6379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export RERANK_TYPE="tei"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
|
||||
|
||||
|
||||
export no_proxy="$no_proxy,rag-agent-endpoint,sql-agent-endpoint,react-agent-endpoint,agent-ui"
|
||||
3
AgentQnA/docker_compose/intel/hpu/gaudi/README.md
Normal file
3
AgentQnA/docker_compose/intel/hpu/gaudi/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Single node on-prem deployment AgentQnA on Gaudi
|
||||
|
||||
This example showcases a hierarchical multi-agent system for question-answering applications. To deploy the example on Gaudi using open-source LLMs, refer to the deployment guide [here](../../../../README.md).
|
||||
@@ -0,0 +1,93 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
tei-embedding-service:
|
||||
command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
|
||||
tei-reranking-service:
|
||||
command: --model-id ${RERANK_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT
|
||||
jaeger:
|
||||
image: jaegertracing/all-in-one:1.67.0
|
||||
container_name: jaeger
|
||||
ports:
|
||||
- "16686:16686"
|
||||
- "4317:4317"
|
||||
- "4318:4318"
|
||||
- "9411:9411"
|
||||
ipc: host
|
||||
environment:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
COLLECTOR_ZIPKIN_HOST_PORT: 9411
|
||||
restart: unless-stopped
|
||||
prometheus:
|
||||
image: prom/prometheus:v2.52.0
|
||||
container_name: prometheus
|
||||
user: root
|
||||
volumes:
|
||||
- ./prometheus.yaml:/etc/prometheus/prometheus.yaml
|
||||
- ./prometheus_data:/prometheus
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yaml'
|
||||
ports:
|
||||
- '9091:9090'
|
||||
ipc: host
|
||||
restart: unless-stopped
|
||||
grafana:
|
||||
image: grafana/grafana:11.0.0
|
||||
container_name: grafana
|
||||
volumes:
|
||||
- ./grafana_data:/var/lib/grafana
|
||||
- ./grafana/dashboards:/var/lib/grafana/dashboards
|
||||
- ./grafana/provisioning:/etc/grafana/provisioning
|
||||
user: root
|
||||
environment:
|
||||
GF_SECURITY_ADMIN_PASSWORD: admin
|
||||
GF_RENDERING_CALLBACK_URL: http://grafana:3000/
|
||||
GF_LOG_FILTERS: rendering:debug
|
||||
depends_on:
|
||||
- prometheus
|
||||
ports:
|
||||
- '3000:3000'
|
||||
ipc: host
|
||||
restart: unless-stopped
|
||||
node-exporter:
|
||||
image: prom/node-exporter
|
||||
container_name: node-exporter
|
||||
volumes:
|
||||
- /proc:/host/proc:ro
|
||||
- /sys:/host/sys:ro
|
||||
- /:/rootfs:ro
|
||||
command:
|
||||
- '--path.procfs=/host/proc'
|
||||
- '--path.sysfs=/host/sys'
|
||||
- --collector.filesystem.ignored-mount-points
|
||||
- "^/(sys|proc|dev|host|etc|rootfs/var/lib/docker/containers|rootfs/var/lib/docker/overlay2|rootfs/run/docker/netns|rootfs/var/lib/docker/aufs)($$|/)"
|
||||
ports:
|
||||
- 9100:9100
|
||||
restart: always
|
||||
deploy:
|
||||
mode: global
|
||||
gaudi-exporter:
|
||||
image: vault.habana.ai/gaudi-metric-exporter/metric-exporter:1.19.2-32
|
||||
container_name: gaudi-exporter
|
||||
volumes:
|
||||
- /proc:/host/proc:ro
|
||||
- /sys:/host/sys:ro
|
||||
- /:/rootfs:ro
|
||||
- /dev:/dev
|
||||
ports:
|
||||
- 41612:41611
|
||||
restart: always
|
||||
deploy:
|
||||
mode: global
|
||||
worker-rag-agent:
|
||||
environment:
|
||||
- TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}
|
||||
worker-sql-agent:
|
||||
environment:
|
||||
- TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}
|
||||
supervisor-react-agent:
|
||||
environment:
|
||||
- TELEMETRY_ENDPOINT=${TELEMETRY_ENDPOINT}
|
||||
@@ -0,0 +1,9 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
supervisor-react-agent:
|
||||
environment:
|
||||
- tools=/home/user/tools/supervisor_agent_webtools.yaml
|
||||
- GOOGLE_CSE_ID=${GOOGLE_CSE_ID}
|
||||
- GOOGLE_API_KEY=${GOOGLE_API_KEY}
|
||||
141
AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml
Normal file
141
AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml
Normal file
@@ -0,0 +1,141 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
worker-rag-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: rag-agent-endpoint
|
||||
volumes:
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9095:9095"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: rag_agent_llama
|
||||
with_memory: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
tools: /home/user/tools/worker_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
RETRIEVAL_TOOL_URL: ${RETRIEVAL_TOOL_URL}
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-worker-agent-service"
|
||||
port: 9095
|
||||
|
||||
worker-sql-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: sql-agent-endpoint
|
||||
volumes:
|
||||
- ${WORKDIR}/GenAIExamples/AgentQnA/tests:/home/user/chinook-db # test db
|
||||
ports:
|
||||
- "9096:9096"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: sql_agent_llama
|
||||
with_memory: false
|
||||
db_name: ${db_name}
|
||||
db_path: ${db_path}
|
||||
use_hints: false
|
||||
recursion_limit: ${recursion_limit_worker}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: false
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
port: 9096
|
||||
|
||||
supervisor-react-agent:
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
container_name: react-agent-endpoint
|
||||
depends_on:
|
||||
- worker-rag-agent
|
||||
- worker-sql-agent
|
||||
volumes:
|
||||
- ${TOOLSET_PATH}:/home/user/tools/
|
||||
ports:
|
||||
- "9090:9090"
|
||||
ipc: host
|
||||
environment:
|
||||
ip_address: ${ip_address}
|
||||
strategy: react_llama
|
||||
with_memory: true
|
||||
recursion_limit: ${recursion_limit_supervisor}
|
||||
llm_engine: vllm
|
||||
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
llm_endpoint_url: ${LLM_ENDPOINT_URL}
|
||||
model: ${LLM_MODEL_ID}
|
||||
temperature: ${temperature}
|
||||
max_new_tokens: ${max_new_tokens}
|
||||
stream: true
|
||||
tools: /home/user/tools/supervisor_agent_tools.yaml
|
||||
require_human_feedback: false
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
|
||||
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
|
||||
LANGCHAIN_PROJECT: "opea-supervisor-agent-service"
|
||||
CRAG_SERVER: $CRAG_SERVER
|
||||
WORKER_AGENT_URL: $WORKER_AGENT_URL
|
||||
SQL_AGENT_URL: $SQL_AGENT_URL
|
||||
port: 9090
|
||||
mock-api:
|
||||
image: docker.io/aicrowd/kdd-cup-24-crag-mock-api:v0
|
||||
container_name: mock-api
|
||||
ports:
|
||||
- "8080:8000"
|
||||
ipc: host
|
||||
agent-ui:
|
||||
image: ${REGISTRY:-opea}/agent-ui:${TAG:-latest}
|
||||
container_name: agent-ui
|
||||
environment:
|
||||
host_ip: ${host_ip}
|
||||
ports:
|
||||
- "5173:8080"
|
||||
ipc: host
|
||||
vllm-service:
|
||||
image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest}
|
||||
container_name: vllm-gaudi-server
|
||||
ports:
|
||||
- "8086:8000"
|
||||
volumes:
|
||||
- "${MODEL_CACHE:-./data}:/data"
|
||||
environment:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HABANA_VISIBLE_DEVICES: all
|
||||
OMPI_MCA_btl_vader_single_copy_mechanism: none
|
||||
LLM_MODEL_ID: ${LLM_MODEL_ID}
|
||||
VLLM_TORCH_PROFILER_DIR: "/mnt"
|
||||
VLLM_SKIP_WARMUP: true
|
||||
PT_HPU_ENABLE_LAZY_COLLECTIVES: true
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://$host_ip:8086/health || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 100
|
||||
runtime: habana
|
||||
cap_add:
|
||||
- SYS_NICE
|
||||
ipc: host
|
||||
command: --model $LLM_MODEL_ID --tensor-parallel-size 4 --host 0.0.0.0 --port 8000 --block-size 128 --max-num-seqs 256 --max-seq-len-to-capture 16384
|
||||
@@ -0,0 +1,10 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
rm *.json
|
||||
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/chatqna_megaservice_grafana.json
|
||||
mv chatqna_megaservice_grafana.json agentqna_microervices_grafana.json
|
||||
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/vllm_grafana.json
|
||||
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/tgi_grafana.json
|
||||
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/node_grafana.json
|
||||
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/gaudi_grafana.json
|
||||
@@ -0,0 +1,14 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
- name: 'default'
|
||||
orgId: 1
|
||||
folder: ''
|
||||
type: file
|
||||
disableDeletion: false
|
||||
updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards
|
||||
options:
|
||||
path: /var/lib/grafana/dashboards
|
||||
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# config file version
|
||||
apiVersion: 1
|
||||
|
||||
# list of datasources that should be deleted from the database
|
||||
deleteDatasources:
|
||||
- name: Prometheus
|
||||
orgId: 1
|
||||
|
||||
# list of datasources to insert/update depending
|
||||
# what's available in the database
|
||||
datasources:
|
||||
# <string, required> name of the datasource. Required
|
||||
- name: Prometheus
|
||||
# <string, required> datasource type. Required
|
||||
type: prometheus
|
||||
# <string, required> access mode. direct or proxy. Required
|
||||
access: proxy
|
||||
# <int> org id. will default to orgId 1 if not specified
|
||||
orgId: 1
|
||||
# <string> url
|
||||
url: http://prometheus:9090
|
||||
# <string> database password, if used
|
||||
password:
|
||||
# <string> database user, if used
|
||||
user:
|
||||
# <string> database name, if used
|
||||
database:
|
||||
# <bool> enable/disable basic auth
|
||||
basicAuth: false
|
||||
# <string> basic auth username, if used
|
||||
basicAuthUser:
|
||||
# <string> basic auth password, if used
|
||||
basicAuthPassword:
|
||||
# <bool> enable/disable with credentials headers
|
||||
withCredentials:
|
||||
# <bool> mark as default datasource. Max one per org
|
||||
isDefault: true
|
||||
# <map> fields that will be converted to json and stored in json_data
|
||||
jsonData:
|
||||
httpMethod: GET
|
||||
graphiteVersion: "1.1"
|
||||
tlsAuth: false
|
||||
tlsAuthWithCACert: false
|
||||
# <string> json object of data that will be encrypted.
|
||||
secureJsonData:
|
||||
tlsCACert: "..."
|
||||
tlsClientCert: "..."
|
||||
tlsClientKey: "..."
|
||||
version: 1
|
||||
# <bool> allow users to edit datasources from the UI.
|
||||
editable: true
|
||||
55
AgentQnA/docker_compose/intel/hpu/gaudi/prometheus.yaml
Normal file
55
AgentQnA/docker_compose/intel/hpu/gaudi/prometheus.yaml
Normal file
@@ -0,0 +1,55 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
global:
|
||||
scrape_interval: 5s
|
||||
external_labels:
|
||||
monitor: "my-monitor"
|
||||
scrape_configs:
|
||||
- job_name: "prometheus"
|
||||
static_configs:
|
||||
- targets: ["prometheus:9090"]
|
||||
- job_name: "vllm"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["vllm-gaudi-server:8000"]
|
||||
- job_name: "tgi"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["tgi-gaudi-server:80"]
|
||||
- job_name: "tei-embedding"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["tei-embedding-server:80"]
|
||||
- job_name: "tei-reranking"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["tei-reranking-server:80"]
|
||||
- job_name: "retriever"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["retriever:7000"]
|
||||
- job_name: "dataprep-redis-service"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["dataprep-redis-service:5000"]
|
||||
- job_name: "prometheus-node-exporter"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["node-exporter:9100"]
|
||||
- job_name: "prometheus-gaudi-exporter"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["gaudi-exporter:41611"]
|
||||
- job_name: "supervisor-react-agent"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["react-agent-endpoint:9090"]
|
||||
- job_name: "worker-rag-agent"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["rag-agent-endpoint:9095"]
|
||||
- job_name: "worker-sql-agent"
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["sql-agent-endpoint:9096"]
|
||||
72
AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh
Normal file
72
AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh
Normal file
@@ -0,0 +1,72 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
pushd "../../../../../" > /dev/null
|
||||
source .set_env.sh
|
||||
popd > /dev/null
|
||||
WORKPATH=$(dirname "$PWD")/..
|
||||
# export WORKDIR=$WORKPATH/../../
|
||||
if [[ -z "${WORKDIR}" ]]; then
|
||||
echo "Please set WORKDIR environment variable"
|
||||
exit 0
|
||||
fi
|
||||
echo "WORKDIR=${WORKDIR}"
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
# LLM related environment variables
|
||||
export HF_CACHE_DIR=${HF_CACHE_DIR}
|
||||
ls $HF_CACHE_DIR
|
||||
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
|
||||
export LLM_MODEL_ID="meta-llama/Llama-3.3-70B-Instruct"
|
||||
export NUM_SHARDS=4
|
||||
export LLM_ENDPOINT_URL="http://${ip_address}:8086"
|
||||
export temperature=0
|
||||
export max_new_tokens=4096
|
||||
|
||||
# agent related environment variables
|
||||
export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/
|
||||
echo "TOOLSET_PATH=${TOOLSET_PATH}"
|
||||
export recursion_limit_worker=12
|
||||
export recursion_limit_supervisor=10
|
||||
export WORKER_AGENT_URL="http://${ip_address}:9095/v1/chat/completions"
|
||||
export SQL_AGENT_URL="http://${ip_address}:9096/v1/chat/completions"
|
||||
export RETRIEVAL_TOOL_URL="http://${ip_address}:8889/v1/retrievaltool"
|
||||
export CRAG_SERVER=http://${ip_address}:8080
|
||||
|
||||
export db_name=Chinook
|
||||
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
|
||||
if [ ! -f $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite ]; then
|
||||
echo "Download Chinook_Sqlite!"
|
||||
wget -O $WORKDIR/GenAIExamples/AgentQnA/tests/Chinook_Sqlite.sqlite https://github.com/lerocha/chinook-database/releases/download/v1.4.5/Chinook_Sqlite.sqlite
|
||||
fi
|
||||
|
||||
# configure agent ui
|
||||
# echo "AGENT_URL = 'http://$ip_address:9090/v1/chat/completions'" | tee ${WORKDIR}/GenAIExamples/AgentQnA/ui/svelte/.env
|
||||
|
||||
# retriever
|
||||
export host_ip=$(hostname -I | awk '{print $1}')
|
||||
export no_proxy=${no_proxy}
|
||||
export http_proxy=${http_proxy}
|
||||
export https_proxy=${https_proxy}
|
||||
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
|
||||
export RERANK_MODEL_ID="BAAI/bge-reranker-base"
|
||||
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006"
|
||||
export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808"
|
||||
export REDIS_URL="redis://${host_ip}:6379"
|
||||
export INDEX_NAME="rag-redis"
|
||||
export RERANK_TYPE="tei"
|
||||
export MEGA_SERVICE_HOST_IP=${host_ip}
|
||||
export EMBEDDING_SERVICE_HOST_IP=${host_ip}
|
||||
export RETRIEVER_SERVICE_HOST_IP=${host_ip}
|
||||
export RERANK_SERVICE_HOST_IP=${host_ip}
|
||||
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8889/v1/retrievaltool"
|
||||
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest"
|
||||
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get"
|
||||
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete"
|
||||
# Set OpenTelemetry Tracing Endpoint
|
||||
export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+')
|
||||
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317
|
||||
export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces
|
||||
|
||||
export no_proxy="$no_proxy,rag-agent-endpoint,sql-agent-endpoint,react-agent-endpoint,agent-ui,vllm-gaudi-server,jaeger,grafana,prometheus,node-exporter,gaudi-exporter,127.0.0.1,localhost,0.0.0.0,$host_ip,,$JAEGER_IP"
|
||||
30
AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml
Normal file
30
AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml
Normal file
@@ -0,0 +1,30 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
tgi-server:
|
||||
image: ghcr.io/huggingface/tgi-gaudi:2.3.1
|
||||
container_name: tgi-server
|
||||
ports:
|
||||
- "8085:80"
|
||||
volumes:
|
||||
- ${HF_CACHE_DIR}:/data
|
||||
environment:
|
||||
no_proxy: ${no_proxy}
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
|
||||
HF_HUB_DISABLE_PROGRESS_BARS: 1
|
||||
HF_HUB_ENABLE_HF_TRANSFER: 0
|
||||
HABANA_VISIBLE_DEVICES: all
|
||||
OMPI_MCA_btl_vader_single_copy_mechanism: none
|
||||
PT_HPU_ENABLE_LAZY_COLLECTIVES: true
|
||||
ENABLE_HPU_GRAPH: true
|
||||
LIMIT_HPU_GRAPH: true
|
||||
USE_FLASH_ATTENTION: true
|
||||
FLASH_ATTENTION_RECOMPUTE: true
|
||||
runtime: habana
|
||||
cap_add:
|
||||
- SYS_NICE
|
||||
ipc: host
|
||||
command: --model-id ${LLM_MODEL_ID} --max-input-length 4096 --max-total-tokens 8192 --sharded true --num-shard ${NUM_SHARDS}
|
||||
31
AgentQnA/docker_image_build/build.yaml
Normal file
31
AgentQnA/docker_image_build/build.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
services:
|
||||
agent:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/agent/src/Dockerfile
|
||||
args:
|
||||
http_proxy: ${http_proxy}
|
||||
https_proxy: ${https_proxy}
|
||||
no_proxy: ${no_proxy}
|
||||
image: ${REGISTRY:-opea}/agent:${TAG:-latest}
|
||||
agent-ui:
|
||||
build:
|
||||
context: ../ui
|
||||
dockerfile: ./docker/Dockerfile
|
||||
extends: agent
|
||||
image: ${REGISTRY:-opea}/agent-ui:${TAG:-latest}
|
||||
vllm-gaudi:
|
||||
build:
|
||||
context: vllm-fork
|
||||
dockerfile: Dockerfile.hpu
|
||||
extends: agent
|
||||
image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest}
|
||||
vllm-rocm:
|
||||
build:
|
||||
context: GenAIComps
|
||||
dockerfile: comps/third_parties/vllm/src/Dockerfile.amd_gpu
|
||||
extends: agent
|
||||
image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest}
|
||||
27
AgentQnA/example_data/test_docs_music.jsonl
Normal file
27
AgentQnA/example_data/test_docs_music.jsonl
Normal file
@@ -0,0 +1,27 @@
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Thriller (song) - Wikipedia\nJump to content\nMain menu\nMain menu\nmove to sidebar\nhide\nNavigation\nMain pageContentsCurrent eventsRandom articleAbout WikipediaContact usDonate\nContribute\nHelpLearn to editCommunity portalRecent changesUpload file\nSearch\nSearch\nCreate account\nLog in\nPersonal tools\nCreate account Log in\nPages for logged out editors learn more\nContributionsTalk\nContents\nmove to sidebar\nhide\n(Top)\n1Composition\n2Writing\n3Recording\n4Release\n5Music video\n6Chart performance\n7Critical reception\n8Personnel\n9Charts\nToggle Charts subsection\n9.1Weekly charts\n9.2Year-end charts\n10Certifications\n11See also\n12References\nToggle the table of contents\nThriller (song)\n33 languages\n\u0627\u0644\u0639\u0631\u0628\u064a\u0629Az\u0259rbaycancaDanskDeutsch\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03acEspa\u00f1ol\u0641\u0627\u0631\u0633\u06ccFran\u00e7aisGalego\ud55c\uad6d\uc5b4HrvatskiItaliano\u05e2\u05d1\u05e8\u05d9\u05ea\u10e5\u10d0\u10e0\u10d7\u10e3\u10da\u10d8KiswahiliMagyar\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02Nederlands\u65e5\u672c\u8a9eNorsk bokm\u00e5lPolskiPortugu\u00eas\u0420\u0443\u0441\u0441\u043a\u0438\u0439ShqipSimple English\u0421\u0440\u043f\u0441\u043a\u0438 / srpskiSuomiSvenska\u0ba4\u0bae\u0bbf\u0bb4\u0bcd\u0e44\u0e17\u0e22T\u00fcrk\u00e7e\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430Ti\u1ebfng Vi\u1ec7t\nEdit links\nArticleTalk\nEnglish\nReadEditView history\nTools\nTools\nmove to sidebar\nhide\nActions\nReadEditView history\nGeneral\nWhat links hereRelated changesUpload fileSpecial pagesPermanent linkPage informationCite this pageGet shortened URLDownload QR codeWikidata item\nPrint/export\nDownload as PDFPrintable version\nFrom Wikipedia, the free encyclopedia\n1983 single by Michael Jackson\nFor other songs, see Thriller (disambiguation) \u00a7\u00a0Music.\n\"Thriller\"US 12-inch singleSingle by Michael Jacksonfrom the album\nThriller B-side\"Things I Do for You\"Released\nNovember\u00a01983\u00a0(1983-11) (UK)[1]\nJanuary\u00a023,\u00a01984\u00a0(1984-01-23) (US)[2]\nRecorded1982StudioWestlake (Los Angeles, California)Genre\nDisco\nfunk\nLength\n5:57 (album version)\n4:37 (special edit)\n4:05 (remixed short version)\n5:04 (\"Starlight\" version)\nLabelEpicSongwriter(s)Rod TempertonProducer(s)Quincy JonesMichael Jackson singles chronology\n\"Say Say Say\" (1983)\n\"Thriller\" (1983)\n\"Farewell My Summer Love\" (1984)\nMusic video\"Thriller\" on YouTube"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Recorded1982StudioWestlake (Los Angeles, California)Genre\nDisco\nfunk\nLength\n5:57 (album version)\n4:37 (special edit)\n4:05 (remixed short version)\n5:04 (\"Starlight\" version)\nLabelEpicSongwriter(s)Rod TempertonProducer(s)Quincy JonesMichael Jackson singles chronology\n\"Say Say Say\" (1983)\n\"Thriller\" (1983)\n\"Farewell My Summer Love\" (1984)\nMusic video\"Thriller\" on YouTube\n\"Thriller\" is a song by the American singer Michael Jackson. It was released by Epic Records in November 1983 in the UK and on January 23, 1984, in the US, as the seventh and final single from his sixth studio album, Thriller.[3]\n\"Thriller\" is a funk song featuring a repeating synthesizer bassline and lyrics and sound effects evoking horror films. It ends with a spoken-word sequence performed by the horror actor Vincent Price. It was produced by Quincy Jones and written by Rod Temperton, who wanted to write a theatrical song to suit Jackson's love of film.\nJackson decided to release \"Thriller\" as a single after Thriller left the top of the Billboard 200 chart. The\n\"Thriller\" music video, directed by John Landis, has Jackson dancing with a horde of zombies. It has been named the greatest music video of all time by various publications and readers' polls, and doubled sales of Thriller, helping it become the best-selling album in history."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Jackson decided to release \"Thriller\" as a single after Thriller left the top of the Billboard 200 chart. The\n\"Thriller\" music video, directed by John Landis, has Jackson dancing with a horde of zombies. It has been named the greatest music video of all time by various publications and readers' polls, and doubled sales of Thriller, helping it become the best-selling album in history.\nIt was the album's seventh top-ten single on the Billboard Hot 100, reaching number four. It reached number one in Belgium, France and Spain, and the top ten in many other countries. In the week of Jackson's death in 2009, it was Jackson's bestselling track in the US, with sales of 167,000 copies on the Billboard Hot Digital Tracks chart. It entered the Billboard Hot Digital Singles Chart at number two, and remained in the charts' top ten for three consecutive weeks. \"Thriller\" is certified Diamond by the Recording Industry Association of America. It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween.\nComposition[edit]\n\"Thriller\"\nJackson's song \"Thriller\", released as a single in 1984; Nelson George wrote that it uses cinematic sound effects, horror film motifs, and vocal trickery to convey a sense of danger.[4]\nProblems playing this file? See media help.\n\"Thriller\" is a disco-funk song[5] The introduction features sound effects such as a creaking door, thunder, feet walking on wooden planks, winds and howling wolves.[6]\nWriting[edit]\nHorror actor Vincent Price provided the spoken-word sequence at the end of \"Thriller\"."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Problems playing this file? See media help.\n\"Thriller\" is a disco-funk song[5] The introduction features sound effects such as a creaking door, thunder, feet walking on wooden planks, winds and howling wolves.[6]\nWriting[edit]\nHorror actor Vincent Price provided the spoken-word sequence at the end of \"Thriller\".\n\"Thriller\" was written by the English songwriter Rod Temperton, who had previously written \"Rock with You\" and \"Off the Wall\" for Jackson's 1979 album Off the Wall.[7] Temperton wanted to write something theatrical to suit Jackson's love of film.[8] He improvised with bass and drum patterns until he developed the bassline that runs through the song, then wrote a chord progression that built to a climax.[8] He recalled: \"I wanted it to build and build \u2013 a bit like stretching an elastic band throughout the tune to heighten suspense.\"[8]\nTemperton's first version was titled \"Starlight\", with the chorus lyric: \"Give me some starlight / Starlight sun\".[9] The production team, led by Quincy Jones, felt the song should be the title track, but that \"Starlight\" was not a strong album title. Instead, they wanted something \"mysterious\" to match Jackson's \"evolving persona\".[8] Temperton considered several titles, including \"Midnight Man\", which Jones felt was \"going in the right direction\". Finally, he conceived \"Thriller\", but worried that it was \"a crap word to sing ... It sounded terrible! However, we got Michael to spit it into the microphone a few times and it worked.\"[8]\nWith the title decided, Temperton wrote lyrics within \"a couple of hours\".[8] He envisioned a spoken-word sequence for the ending, but did not know what form it should take. It was decided to have a famous voice from the horror genre perform it, and Jones' then-wife, Peggy Lipton, suggested her friend Vincent Price.[6] Temperton composed the words for Price's part in a taxi on the way to the studio on the day of recording.[6]\nRecording[edit]\nQuincy Jones produced \"Thriller\"."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Recording[edit]\nQuincy Jones produced \"Thriller\".\nAlong with the rest of the album, \"Thriller\" was recorded over eight weeks in 1982.[10] It was recorded at Westlake Recording Studios on Santa Monica Boulevard in Los Angeles, California.[6] The engineer Bruce Swedien had Jackson record his vocals in different approaches, doubling takes and recording at different distances from the microphone. Some background vocals were recorded in the Westlake shower stall.[6]\nThe bassline was performed on an ARP 2600 synthesizer, and the verse pads were performed on a Roland Jupiter-8 layered with a Sequential Circuits Prophet 5 and a Yamaha CS-80.[11] The percussion was created with a LinnDrum drum machine modified with sound chips from two other drum machines: a snare hi-hat and congas from an LM-1 and a clap from a TR-808. \"Thriller\" also features Rhodes piano performed by Greg Phillinganes and guitar performed by David Williams.[12]\nTo record the wolf howls, Swedien set up tape recorders up around his Great Dane in a barn overnight, but the dog never howled. Instead, Jackson recorded the howls himself.[13] For the creaking doors, Swedien rented doors designed for sound effects from the Universal Studios Lot and recorded the hinges.[13] Price recorded his part in two takes; Jones, acknowledging that doing a voice-over for a song is difficult, praised Price and described his takes as \"fabulous\".[6]\nRelease[edit]\nThe album Thriller was released in November 1982 on Epic Records and spent months at the top of the Billboard 200.[14] \"Thriller\" was not initially planned for release as a single, as Epic saw it as a novelty song.[15] The Epic executive Walter Yetnikoff asked: \"Who wants a single about monsters?\"[14]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Release[edit]\nThe album Thriller was released in November 1982 on Epic Records and spent months at the top of the Billboard 200.[14] \"Thriller\" was not initially planned for release as a single, as Epic saw it as a novelty song.[15] The Epic executive Walter Yetnikoff asked: \"Who wants a single about monsters?\"[14]\nBy mid-1983, sales of the album had begun to decline. Jackson, who was \"obsessive\" about his sales figures,[14] urged Yetnikoff and another Epic executive, Larry Stessel, to help conceive a plan to return the album to the top of the charts. Jackson's manager Frank DiLeo suggested releasing \"Thriller\", backed by a new music video.[14][16] It was the final single from the album, released in January 1984.[15]\nAlternative versions of \"Thriller\", including the \"Starlight\" demo, were released on the anniversary reissue Thriller 40 (2022).[17]\nMusic video[edit]\nMain article: Michael Jackson's Thriller (music video)\nThe music video for \"Thriller\" references numerous horror films,[14] and stars Jackson performing a dance routine with a horde of the undead.[14] It was directed by the horror director John Landis and written by Landis and Jackson. Jackson contacted Landis after seeing his film An American Werewolf in London. The pair conceived a 13-minute short film with a budget much larger than previous music videos. Jackson's record company refused to finance it, believing Thriller had peaked, so a making-of documentary, Making Michael Jackson's Thriller, was produced to receive financing from television networks.[14]\nMichael Jackson's Thriller premiered on MTV on December 2, 1983.[18] It was launched to great anticipation and played regularly on MTV.[18]\nIt doubled sales of Thriller, and the documentary sold over a million copies, becoming the best-selling videotape at the time.[14] It is credited for transforming music videos into a serious art form, breaking down racial barriers in popular entertainment, and popularizing the making-of documentary format.[19]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "It doubled sales of Thriller, and the documentary sold over a million copies, becoming the best-selling videotape at the time.[14] It is credited for transforming music videos into a serious art form, breaking down racial barriers in popular entertainment, and popularizing the making-of documentary format.[19]\nMany elements have had a lasting impact on popular culture, such as the zombie dance and Jackson's red jacket, designed by Landis' wife Deborah Nadoolman.[19] Fans worldwide re-enact its zombie dance and it remains popular on YouTube. The Library of Congress described it as \"the most famous music video of all time\". In 2009, it became the first music video inducted into the National Film Registry as \"culturally, historically or aesthetically\" significant.[14]\nChart performance[edit]\n\"Thriller\" entered the Billboard Hot 100 charts at number 20.[20] It reached number seven the following week,[21] number five the next, and peaked the next week at number four, where it stayed for two weeks.[22][23] It finished as the #78 single on Billboard's Hot 100 for the 1984.[24]\n\"Thriller\" charted at number 19 on the Hot R&B/Hip-Hop Songs Chart.[25] On March 10, 1984, it reached its peak at number 3.[26] \"Thriller\" debuted on the UK Singles Chart on November 19, 1983, at number 24, and the following week peaked at number ten; it appeared on the chart for 52 weeks.[27] Beginning on February 5, 1984, \"Thriller\" peaked on the French Singles Chart at number one and topped the chart for four consecutive weeks.[28] \"Thriller\" also topped the Belgian VRT Top 30 Chart for two weeks in January 1984.[29]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Following Jackson's death in 2009, his music surged in popularity.[30] In the week of his death, \"Thriller\" was Jackson's best-selling track in the US, with sales of 167,000 copies on the Billboard Hot Digital Singles Chart.[30] On July 11, 2009, \"Thriller\" charted on the Billboard Hot Digital Singles Chart at number two (its peak), and the song remained in the charts' top ten for three consecutive weeks.[31] In the United Kingdom, the song charted at number 23 the week of Jackson's death.[32] The following week, the song reached its peak at number 12 on the UK Single Chart.[27] On July 12, 2009, \"Thriller\" peaked at number two on the Italian Singles Chart[33] and was later certified gold by the Federation of the Italian Music Industry.[34] \"Thriller\" reached at number three on the Australian ARIA Chart and Swiss Singles Chart and topped the Spanish Singles Charts for one week.[35] The song also placed within the top ten on the German Singles Chart, Norwegian Singles Chart and Irish Singles Chart, at number nine, number seven and number eight respectively.[35] \"Thriller\" also landed at number 25 on the Danish Singles Chart.[36] In the third week of July \"Thriller\" peaked at number 11 in Finland.[37]\n\"Thriller\"\nhas returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. It re-entered the Billboard Hot 100 in October 2013 at number 42,[38] number 31 in November 2018,[39] and number 19 in November 2021, its highest placement since 1984.[40] This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100.[40]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\"Thriller\"\nhas returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. It re-entered the Billboard Hot 100 in October 2013 at number 42,[38] number 31 in November 2018,[39] and number 19 in November 2021, its highest placement since 1984.[40] This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100.[40]\n\"Thriller\" was certified platinum by the Recording Industry Association of America on December 4, 1989, for sales of over one million physical units in the US[41][42] As of August 2016, the song had sold 4,024,398 copies in the US.[43] The song was later certified Diamond by RIAA for sales over 10 million equivalent-units.[44][45] \"Thriller\" reached number one on three different Billboard charts the week of November 8, 2023, more than a decade after Jackson's death. Those charts included: R&B/Hip-Hop Streaming Songs, R&B Streaming Songs and R&B Digital Song Sales charts.[46]\nCritical reception[edit]\nAshley Lasimone, of AOL's Spinner.com, noted that it \"became a signature for Jackson\" and described \"the groove of its bassline, paired with Michael's killer vocals and sleek moves\" as having \"produced a frighteningly great single.\"[47] Jon Pareles of The New York Times noted that \"'Billie Jean', 'Beat It', 'Wanna Be Startin' Somethin' ' and \"the movie in the song 'Thriller'\", were the songs, unlike the \"fluff\" \"P.Y.T.\", that were \"the hits that made Thriller a world-beater; along with Mr. Jackson's stage and video presence, listeners must have identified with his willingness to admit terror.\"[48] Ann Powers of the Los Angeles Times described \"Thriller\" as \"adequately groovy\" with a \"funked-out beat\" and lyrics \"seemingly lifted from some little kid's 'scary storybook'\".[49][50]\nPersonnel[edit]\nWritten and composed by Rod Temperton\nProduced by Quincy Jones\nMichael Jackson: lead and background vocals, LinnDrum drum machine\nRod Temperton and Brian Banks: synthesizers"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Personnel[edit]\nWritten and composed by Rod Temperton\nProduced by Quincy Jones\nMichael Jackson: lead and background vocals, LinnDrum drum machine\nRod Temperton and Brian Banks: synthesizers\nGreg Phillinganes: synthesizers, Rhodes piano\nAnthony Marinelli: synthesizer programming\nDavid Williams: guitar\nJerry Hey, Gary Grant: trumpets, flugelhorns\nLarry Williams: saxophone, flute\nBill Reichenbach: trombone\nVocal, rhythm and synthesizer arrangement by Rod Temperton\nHorn arrangement by Jerry Hey\nEffects by Bruce Cannon and Bruce Swedien\nFeaturing: Narration by Vincent Price (Not featured on original edited single version)\nCharts[edit]\nWeekly charts[edit]\nChart (1983\u20131985)\nPeakposition\nAustralia (Kent Music Report)[51]\n4\nBelgium (Ultratop 50 Flanders)[52]\n1\nCanadian RPM Top Singles[53]\n3\nFinland (Suomen virallinen singlelista)[54]\n7\nFinland Jukebox (Suomen virallinen singlelista)[54]\n3\nFrance (SNEP)[28]\n1\nIreland (IRMA)[55]\n4\nNetherlands (Dutch Top 40)[56]\n3\nNetherlands (Single Top 100)[57]\n4\nNew Zealand (Recorded Music NZ)[58]\n6\nPortugal (AFP)[59]\n1\nSouth Africa (Springbok)[60]\n26\nSpain (AFYVE)[61]\n1\nUK Singles (OCC)[27]\n10\nUS Cashbox[62]\n4\nUS Billboard Hot 100[63]\n4\nUS Billboard Hot Black Singles[64][26]\n3\nUS Billboard Adult Contemporary[65]\n24\nUS Billboard Album Rock Tracks[64][26]\n42\nUS Radio & Records CHR/Pop Airplay Chart[66]\n1\nWest Germany (Official German Charts)[67]\n9\nChart (2006)\nPeakposition\nFrance (SNEP)[68]\n35\nGermany (Media Control Charts)[35]\n9\nIreland (IRMA)[55]\n8\nItaly (FIMI)[69]\n5\nNetherlands (Single Top 100)[57]\n34\nSpain (PROMUSICAE)[35]\n1\nSwitzerland (Schweizer Hitparade)[35]\n3\nChart (2007)\nPeakposition\nSpain (PROMUSICAE)[70]\n20\nUK Singles (OCC)[27]\n57\nChart (2008)\nPeakposition\nAustria (\u00d63 Austria Top 40)[71]\n55\nNorway (VG-lista)[72]\n13\nSwitzerland (Schweizer Hitparade)[73]\n53\nUK Singles (OCC)[27]\n35\nChart (2009)\nPeakposition\nAustralia (ARIA)[74]\n3\nAustria (\u00d63 Austria Top 40)[71]\n5\nBelgium (Ultratop 50 Back Catalogue Singles Flanders)[75]\n3"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "3\nChart (2007)\nPeakposition\nSpain (PROMUSICAE)[70]\n20\nUK Singles (OCC)[27]\n57\nChart (2008)\nPeakposition\nAustria (\u00d63 Austria Top 40)[71]\n55\nNorway (VG-lista)[72]\n13\nSwitzerland (Schweizer Hitparade)[73]\n53\nUK Singles (OCC)[27]\n35\nChart (2009)\nPeakposition\nAustralia (ARIA)[74]\n3\nAustria (\u00d63 Austria Top 40)[71]\n5\nBelgium (Ultratop 50 Back Catalogue Singles Flanders)[75]\n3\nBelgium (Ultratop 30 Back Catalogue Singles Wallonia)[76]\n2\nDenmark (Tracklisten)[36]\n25\nEurope (European Hot 100 Singles)[77]\n16\nFinland (Suomen virallinen lista)[78]\n11\nFrance (SNEP)[79]\n3\nIreland (IRMA)[35]\n8\nItaly (FIMI)[69]\n2\nJapan Singles Top 100 (Oricon)[35]\n41\nNetherlands (Single Top 100)[57]\n9\nNew Zealand (RIANZ)[35]\n12\nNorway (VG-lista)[72]\n7\nSpain (PROMUSICAE)[70]\n1\nSweden (Sverigetopplistan)[80]\n10\nSwitzerland (Schweizer Hitparade)[73]\n3\nUK Singles (OCC)[27]\n12\nUS Digital Song Sales (Billboard)[81]\n2\nChart (2010)\nPeakposition\nSpain (PROMUSICAE)[70]\n12\nSwitzerland (Schweizer Hitparade)[73]\n68\nUK Singles (OCC)[27]\n68\nChart (2012)\nPeakposition\nFrance (SNEP)[68]\n143\nIreland (IRMA)[55]\n30\nUK Singles (OCC)[27]\n49\nChart (2013)\nPeakposition\nFrance (SNEP)[68]\n159\nUK Singles (OCC)[27]\n48\nUS Billboard Hot 100[82]\n42\nChart (2014)\nPeakposition\nFrance (SNEP)[68]\n152\nSpain (PROMUSICAE)[70]\n38\nUK Singles (OCC)[27]\n57\nUS Billboard Hot 100[83]\n35\nChart (2015)\nPeakposition\nFrance (SNEP)[68]\n145\nSpain (PROMUSICAE)[70]\n48\nUK Singles (OCC)[27]\n61\nUS Billboard Hot 100[84]\n45\nChart (2016)\nPeakposition\nFrance (SNEP)[68]\n164\nUK Singles (OCC)[27]\n62\nChart (2017)\nPeakposition\nFrance (SNEP)[68]\n46\nSpain (PROMUSICAE)[70]\n32\nUK Singles (OCC)[27]\n34\nChart (2018)\nPeakposition\nCanada (Canadian Hot 100)[85]\n25\nUK Singles (OCC)[27]\n63\nUS Billboard Hot 100[86][87]\n31\nChart (2019)\nPeakposition\nUS Billboard Hot 100[88]\n44\nChart (2020)\nPeakposition\nGlobal 200[89]\n51\nUK Singles (OCC)[27]\n57\nUS Billboard Hot 100[90]\n48\nChart (2021)\nPeakposition\nCanada (Canadian Hot 100)[91]\n16\nGlobal 200 (Billboard)[92]\n28\nUK Singles (OCC)[93]\n40"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "34\nChart (2018)\nPeakposition\nCanada (Canadian Hot 100)[85]\n25\nUK Singles (OCC)[27]\n63\nUS Billboard Hot 100[86][87]\n31\nChart (2019)\nPeakposition\nUS Billboard Hot 100[88]\n44\nChart (2020)\nPeakposition\nGlobal 200[89]\n51\nUK Singles (OCC)[27]\n57\nUS Billboard Hot 100[90]\n48\nChart (2021)\nPeakposition\nCanada (Canadian Hot 100)[91]\n16\nGlobal 200 (Billboard)[92]\n28\nUK Singles (OCC)[93]\n40\nUK Hip Hop/R&B (OCC)[94]\n3\nUS Billboard Hot 100[95][96]\n19\nUS Billboard Digital Songs Sales[97]\n9\nChart (2022)\nPeakposition\nCanada (Canadian Hot 100)[98]\n25\nGlobal 200[99]\n37\nUK Singles (OCC)[27]\n41\nUS Billboard Hot 100[100]\n26\nChart (2023)\nPeakposition\nCanada (Canadian Hot 100)[101]\n22\nGlobal 200[102]\n39\nUK Singles (OCC)[103]\n20\nUS Billboard Hot 100[104]\n21\nYear-end charts[edit]\nChart (1984)\nPosition\nAustralia (Kent Music Report)[105]\n17\nBelgium (Ultratop Flanders)[106]\n26\nUS Billboard Hot 100[24]\n78\nChart (2009)\nPosition\nSweden (Sverigetopplistan)[107]\n88\nSwitzerland (Schweizer Hitparade)[108]\n81\nUK Singles (Official Charts Company)[109]\n143\nCertifications[edit]\nRegion\nCertification\nCertified units/sales\nAustralia (ARIA)[110]\n6\u00d7 Platinum\n420,000\u2021\nDenmark (IFPI Danmark)[111]\nPlatinum\n90,000\u2021\nFrance (SNEP)[112]\nPlatinum\n1,000,000*\nGermany (BVMI)[113]\nGold\n250,000\u2021\nItaly (FIMI)[114]\nPlatinum\n30,000\u2021\nJapan (RIAJ)[115] Full-length ringtone\nPlatinum\n250,000*\nMexico (AMPROFON)[116]\n4\u00d7 Platinum+Gold\n270,000\u2021\nSpain (PROMUSICAE)[117]\n2\u00d7 Platinum\n100,000*\nUnited Kingdom (BPI)[118] Digital sales since 2004\n2\u00d7 Platinum\n1,200,000\u2021\nUnited Kingdom (BPI)[119] other release\nGold\n500,000\u2021\nUnited States (RIAA)[120]\nDiamond\n10,000,000\u2021\nUnited States (RIAA)[121] Mastertone\nGold\n500,000*\n* Sales figures based on certification alone.\u2021 Sales+streaming figures based on certification alone.\nSee also[edit]\nList of best-selling singles\nList of best-selling singles in the United States\nList of most expensive music videos\nMichael Jackson's Thriller\nThriller (viral video)\nThrill the World\nReferences[edit]"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Diamond\n10,000,000\u2021\nUnited States (RIAA)[121] Mastertone\nGold\n500,000*\n* Sales figures based on certification alone.\u2021 Sales+streaming figures based on certification alone.\nSee also[edit]\nList of best-selling singles\nList of best-selling singles in the United States\nList of most expensive music videos\nMichael Jackson's Thriller\nThriller (viral video)\nThrill the World\nReferences[edit]\n^ \"New Singles (for the week ending November 11, 1983)\" (PDF). Music Week: 30. November 5, 1983.\n^ Semigran, Aly (February 7, 2011). \"Michael Jackson's 'Thriller': Story Behind the 'Glee' Cover\". MTV. Retrieved September 17, 2023.\n^ McPhate, Tim (November 2, 2017). \"Michael Jackson's \"Thriller\": For The Record\". The Recording Academy. Retrieved November 17, 2019.\n^ George 2004, p.\u00a023.\n^ Jones, Jel D. Lewis (2005). Michael Jackson, the King of Pop: The Big Picture \u2013 The Music! The Man! The Legend! The Interviews: An Anthology. Amber Books Publishing. p.\u00a06. ISBN\u00a00-9749779-0-X. Retrieved July 22, 2010.\n^ a b c d e f Lyle, Peter (November 25, 2007). \"Michael Jackson's monster smash\". The Daily Telegraph. Archived from the original on January 12, 2022. Retrieved January 24, 2010.\n^ Kreps, Daniel (October 5, 2016). \"Rod Temperton, 'Thriller' songwriter, dead at 66\". Rolling Stone. Retrieved July 25, 2022.\n^ a b c d e f \"Revealed: the story behind Jacko's Thriller\". M magazine. October 31, 2012. Archived from the original on November 3, 2012. Retrieved October 24, 2018.\n^ Glazer, Eliot (September 25, 2009). \"Top 1984 Songs\". AOLRadioBlog.com. AOL Inc. Retrieved January 24, 2010.\n^ Vozick-Levinson, Simon (February 18, 2008). \"Quincy Jones' 'Thriller' Memories\". EW.com. Retrieved January 24, 2010.\n^ Carr, Dan (November 30, 2022). \"The synth sounds of Michael Jackson's Thriller (and how to recreate them in your DAW)\". MusicRadar. Retrieved March 19, 2023."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ Glazer, Eliot (September 25, 2009). \"Top 1984 Songs\". AOLRadioBlog.com. AOL Inc. Retrieved January 24, 2010.\n^ Vozick-Levinson, Simon (February 18, 2008). \"Quincy Jones' 'Thriller' Memories\". EW.com. Retrieved January 24, 2010.\n^ Carr, Dan (November 30, 2022). \"The synth sounds of Michael Jackson's Thriller (and how to recreate them in your DAW)\". MusicRadar. Retrieved March 19, 2023.\n^ Rogerson, Ben (February 10, 2023). \"Watch Greg Phillinganes recreate Michael Jackson's Thriller using the original synths\". MusicRadar. Retrieved March 27, 2023.\n^ a b \"The making of Michael Jackson's Thriller\". MusicRadar. Retrieved October 25, 2018.\n^ a b c d e f g h i Griffin, Nancy (July 2010). \"The \"Thriller\" Diaries\". Vanity Fair. Retrieved January 2, 2011.\n^ a b Romano, Aja (October 31, 2018). \"Michael Jackson's \"Thriller\" is the eternal Halloween bop \u2014 and so much more\". Vox. Retrieved October 25, 2021.\n^ Eagan, Daniel (November 24, 2011). America's Film Legacy, 2009\u20132010: A Viewer's Guide to the 50 Landmark Movies Added To The National Film Registry in 2009\u201310. Bloomsbury Publishing. p.\u00a0175. ISBN\u00a0978-1-4411-9328-5. Retrieved May 14, 2016.\n^ Miles Marshall Lewis (November 30, 2022). \"#Thriller40: Cultural Critics Celebrate Michael Jackson's Impact\". BET.\n^ a b Richin, Leslie (December 2, 2014). \"On This Day In 1983, Michael Jackson's 'Thriller' Premiered On MTV\". Billboard.\n^ a b Hebblethwaite, Phil (November 21, 2013). \"How Michael Jackson's Thriller changed music videos for ever\". The Guardian. Retrieved October 29, 2018.\n^ \"Week of February 11, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved October 10, 2015.\n^ \"Week of February 18, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 3, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 10, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved October 10, 2015."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Week of February 18, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 3, 1984\". Billboard. Nielsen Business Media, Inc. January 2, 2013. Retrieved October 10, 2015.\n^ \"Week of March 10, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved October 10, 2015.\n^ a b \"Billboard Top 100 \u2013 1984\". billboard. Retrieved March 29, 2020.\n^ \"Week of March 3, 1984\". Billboard. Nielsen Business Media, Inc. Archived from the original on January 21, 2010. Retrieved January 23, 2010.\n^ a b c \"Week of March 10, 1984\". Billboard. Nielsen Business Media, Inc. Retrieved January 23, 2010.\n^ a b c d e f g h i j k l m n o p \"Michael Jackson\". Official Charts Company. Retrieved October 10, 2015.\n^ a b \"Toutes les Chansons N\u00b0 1 des Ann\u00e9es 80\". Infodisc.fr. Dominic Durand / InfoDisc. Archived from the original on November 20, 2012. Retrieved January 23, 2010.\n^ \"Michael Jackson \u2013 Thriller\". Top30-3.radio2.be (in Dutch). VRT \u2013 Auguste Reyerslaan. Archived from the original on February 22, 2012. Retrieved January 24, 2010.\n^ a b Ed Christman, Antony Bruno (July 2, 2009). \"Michael Jackson Music Sales Surge Could Last For Months\". Billboard. Nielsen Business Media, Inc. Retrieved January 23, 2010.\n^ \"July 11, 2009\". Billboard. Nielsen Business Media. Retrieved January 23, 2010.\n^ \"Chart For Week Up To 04/07/2009\". Official Charts Company. Retrieved January 23, 2010.\n^ \"Thriller in Italian Chart\". Hung Medien. Retrieved June 21, 2013.\n^ \"Certificazioni Download FIMI\" (PDF) (in Italian). Federation of the Italian Music Industry. Archived from the original (PDF) on June 5, 2012. Retrieved January 2, 2012.\n^ a b c d e f g h \"Michael Jackson \u2013 Thriller \u2013 Music Charts\". Acharts.us. Retrieved January 23, 2010.\n^ a b \"Track Top 40 \u2013 July 10, 2009\". Hitlisterne.dk. IFPI Danmark & Nielsen Music Control. Archived from the original on September 30, 2011. Retrieved January 23, 2010."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ a b c d e f g h \"Michael Jackson \u2013 Thriller \u2013 Music Charts\". Acharts.us. Retrieved January 23, 2010.\n^ a b \"Track Top 40 \u2013 July 10, 2009\". Hitlisterne.dk. IFPI Danmark & Nielsen Music Control. Archived from the original on September 30, 2011. Retrieved January 23, 2010.\n^ \"Thriller in Finnish Chart\". Hung Medien. Retrieved January 23, 2010.\n^ \"Eminem Debuts at No. 1 on Hot R&B/Hip-Hop Songs Chart; Michael Jackson's 'Thriller' Returns\". Billboard. November 8, 2013. Retrieved October 2, 2016.\n^ \"Top 100 Songs | Billboard Hot 100 Chart\". Billboard. Retrieved November 11, 2018.\n^ a b \"Michael Jackson's 'Thriller' Leads Halloween Treats on Billboard Hot 100\". Billboard. Retrieved November 9, 2021.\n^ \"American\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". Recording Industry Association of America.\n^ \"Rock Music, etc., Terms\". Georgetown College. October 26, 1999. Archived from the original on May 18, 2011. Retrieved January 8, 2010.\n^ \"Hip Hop Single Sales: The Weeknd, Zay Hilfigerrr & Drake\". Hip Hop DX. November 13, 2016. Retrieved November 14, 2016.\n^ \"Michael Jackson's Catalogue Garners Major New Gold & Platinum Awards\". RIAA. August 23, 2018. Retrieved December 21, 2018.\n^ Appel, Rich (October 30, 2014). \"Revisionist History, Part 3: Michael Jackson Gets Revenge on Prince! Year-End Hits of the Past, Re-Analyzed\". Billboard. Prometheus Global Media. Retrieved October 30, 2014.\n^ \"Michael Jackson Scores Three No. 1 Hits On The Billboard Charts This Week\". Forbes. Retrieved November 10, 2023.\n^ Lasimone, Ashley (October 28, 2009). \"Clash of the Cover Songs: Michael Jackson vs. Imogen Heap\". Spinner.com. AOL Inc. Retrieved January 23, 2010.\n^ Pareles, Jon (September 3, 1987). \"Critic's Notebook; How Good Is Jackson's 'Bad'?\". The New York Times. Retrieved January 25, 2010.\n^ Powers, Ann (February 15, 2008). \"Nine reasons why Jackson masterpiece remains a 'Thriller'\". SouthCoastToday.com. Dow Jones Local Media Group. Retrieved February 6, 2010."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ Pareles, Jon (September 3, 1987). \"Critic's Notebook; How Good Is Jackson's 'Bad'?\". The New York Times. Retrieved January 25, 2010.\n^ Powers, Ann (February 15, 2008). \"Nine reasons why Jackson masterpiece remains a 'Thriller'\". SouthCoastToday.com. Dow Jones Local Media Group. Retrieved February 6, 2010.\n^ Comstock, Miriam Marcus and Courtney. \"Thriller Chiller For Jackson\". Forbes. Retrieved November 4, 2019.\n^ Kent, David (2003). Australian Chart Book 1970\u20131992. Australian Chart Book. ISBN\u00a00-646-11917-6.\n^ \"Michael Jackson \u2013 Thriller\" (in Dutch). Ultratop 50.\nRetrieved December 14, 2021.\n^ \"Top Singles \u2013 Volume 40, No. 1, March 10, 1984\". RPM. Archived from the original on October 17, 2012. Retrieved August 3, 2010.\n^ a b Pennanen, Timo (2021). \"Michael Jackson\". Sis\u00e4lt\u00e4\u00e4 hitin - 2. laitos Levyt ja esitt\u00e4j\u00e4t Suomen musiikkilistoilla 1.1.1960\u201330.6.2021 (PDF). Helsinki: Kustannusosakeyhti\u00f6 Otava. p.\u00a0113. Retrieved May 29, 2022.\n^ a b c \"Search Results: Thriller\". IrishCharts.ie. Irish Recorded Music Association. Retrieved January 25, 2010.\n^\n\"Nederlandse Top 40 \u2013 week 2, 1984\" (in Dutch). Dutch Top 40.\n^ a b c \"Michael Jackson \u2013 Thriller\" (in Dutch). Single Top 100.\n^ \"Michael Jackson \u2013 Thriller\". Top 40 Singles.\n^ \"Top 3 in Europe\" (PDF). Music & Media. May 14, 1984. p.\u00a012. Retrieved October 29, 2021.\n^ \"SA Charts 1965\u2013March 1989\". Retrieved September 5, 2018.\n^ Salaverri, Fernando (September 2005). S\u00f3lo \u00e9xitos: a\u00f1o a a\u00f1o, 1959\u20132002 (in Spanish) (1st\u00a0ed.). Spain: Fundaci\u00f3n Autor-SGAE. ISBN\u00a084-8048-639-2.\n^ Whitburn, Joel (2014). Cash Box Pop Hits 1952-1996. Sheridan Books, Inc. ISBN\u00a0978-0-89820-209-0.\n^ \"Michael Jackson Chart History (Hot 100)\". Billboard. September 9, 2021. Retrieved September 9, 2021.\n^ a b \"Allmusic (Thriller > Charts & Awards > Billboard Singles)\". Allmusic.com. Rovi Corporation. Retrieved January 23, 2010.\n^ Whitburn, Joel (1993). Top Adult Contemporary: 1961\u20131993. Record Research. p.\u00a0118.\n^ \"Michael Jackson\"."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Michael Jackson Chart History (Hot 100)\". Billboard. September 9, 2021. Retrieved September 9, 2021.\n^ a b \"Allmusic (Thriller > Charts & Awards > Billboard Singles)\". Allmusic.com. Rovi Corporation. Retrieved January 23, 2010.\n^ Whitburn, Joel (1993). Top Adult Contemporary: 1961\u20131993. Record Research. p.\u00a0118.\n^ \"Michael Jackson\".\n^ \"Offiziellecharts.de \u2013 Michael Jackson \u2013 Thriller\" (in German). GfK Entertainment charts.\nRetrieved March 18, 2019.\n^ a b c d e f g \"Michael Jackson \u2013 Thriller\" (in French). Les classement single.\n^ a b \"Michael Jackson \u2013 Thriller\". Top Digital Download.\n^ a b c d e f \"Michael Jackson \u2013 Thriller\" Canciones Top 50.\n^ a b \"Michael Jackson \u2013 Thriller\" (in German). \u00d63 Austria Top 40.\n^ a b \"Michael Jackson \u2013 Thriller\". VG-lista.\n^ a b c \"Michael Jackson \u2013 Thriller\". Swiss Singles Chart.\n^ \"Michael Jackson \u2013 Thriller\". ARIA Top 50 Singles.\n^ \"30 Back Catalogue Singles \u2013 July 18, 2009\". UltraTop.be. Hung Medien. Retrieved January 24, 2010.\n^ \"30 Back Catalogue Singles \u2013 July 4, 2009\". UltraTop.be. Hung Medien. Retrieved January 24, 2010.\n^ \"Michael Jackson Album & Song Chart History\". Billboard.com. Nielsen Business Media. Retrieved October 29, 2011.\n^ \"Michael Jackson: Thriller\" (in Finnish). Musiikkituottajat.\n^ \"Download Single Top 50 \u2013 04/07/2009\". Lescharts.com. Hung Medien. Retrieved January 23, 2010.\n^ \"Michael Jackson \u2013 Thriller\". Singles Top 100.\n^ \"Michael Jackson Chart History (Digital Song Sales)\". Billboard.\n^ \"The Hot 100, Week of November 16, 2013\". Billboard. Prometheus Global Media. Retrieved November 12, 2015.\n^ \"The Hot 100, Week of November 15, 2014\". Billboard. Prometheus Global Media.\n^ \"The Hot 100, Week of November 21, 2015\". Billboard. Prometheus Global Media. Retrieved November 12, 2015.\n^ \"Michael Jackson Chart History (Canadian Hot 100)\". Billboard.\nRetrieved November 6, 2018.\n^ Zellner, Xander. \"Michael Jackson's 'Thriller' Returns to Hot 100, Thanks to Halloween Gains\". Billboard. Retrieved November 6, 2018."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"The Hot 100, Week of November 21, 2015\". Billboard. Prometheus Global Media. Retrieved November 12, 2015.\n^ \"Michael Jackson Chart History (Canadian Hot 100)\". Billboard.\nRetrieved November 6, 2018.\n^ Zellner, Xander. \"Michael Jackson's 'Thriller' Returns to Hot 100, Thanks to Halloween Gains\". Billboard. Retrieved November 6, 2018.\n^ \"The Hot 100, Week of November 10, 2018\". Billboard. Retrieved September 9, 2021.\n^ \"The Hot 100, Week of November 9, 2019\". Billboard. Retrieved March 20, 2021.\n^ \"The Global 200, Week of November 14, 2020\". Billboard. Retrieved November 8, 2023.\n^ \"The Hot 100, Week of November 14, 2020\". Billboard. Retrieved November 14, 2020.\n^ \"Canadian Hot 100, Week of November 13, 2021\". Billboard. Retrieved November 23, 2021.\n^ \"Michael Jackson Chart History (Global 200)\". Billboard.\nRetrieved November 9, 2021.\n^ \"Official Singles Chart Top 100\". Official Charts Company.\nRetrieved November 8, 2023.\n^ \"Official Hip Hop and R&B Singles Chart Top 40\". Official Charts Company.\nRetrieved November 5, 2021.\n^ \"Michael Jackson's 'Thriller' Leads Halloween Treats on Billboard Hot 100\". Billboard. Retrieved November 8, 2021.\n^ \"The Hot 100, Week of November 13, 2021\". Billboard. Retrieved November 5, 2022.\n^ \"Digital Song Sales Chart, Week of November 13, 2021\". Billboard. Retrieved November 11, 2021.\n^ \"Canadian Hot 100, Week of November 12, 2022\". Billboard. Retrieved November 8, 2022.\n^ \"The Global 200, Week of November 12, 2022\". Billboard. Retrieved November 8, 2022.\n^ \"The Hot 100, Week of November 12, 2022\". Billboard. Retrieved November 8, 2022.\n^ \"Canadian Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"The Global 200, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"Official Singles Chart Top 100\". Official Charts Company.\nRetrieved November 8, 2023.\n^ \"The Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023."}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Canadian Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"The Global 200, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"Official Singles Chart Top 100\". Official Charts Company.\nRetrieved November 8, 2023.\n^ \"The Hot 100, Week of November 11, 2023\". Billboard. Retrieved November 8, 2023.\n^ \"Kent Music Report No 548 \u2013 31 December 1984 > National Top 100 Singles for 1984\". Kent Music Report. Retrieved January 23, 2023 \u2013 via Imgur.com.\n^ \"Jaaroverzichten 1984\". Ultratop. Retrieved December 14, 2021.\n^ \"\u00c5rslista Singular \u2013 \u00c5r 2009\" (in Swedish). Sverigetopplistan. Retrieved March 29, 2020.\n^ \"Schweizer Jahreshitparade 2009 \u2013 hitparade.ch\". Hung Medien. Retrieved March 29, 2020.\n^ \"Charts Plus Year end 2009\" (PDF). Charts Plus. Retrieved May 16, 2020.\n^ \"ARIA Charts \u2013 Accreditations \u2013 2021 Singles\" (PDF). Australian Recording Industry Association.\n^ \"Danish\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". IFPI Danmark. Retrieved July 2, 2023.\n^ \"French\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\" (in French). InfoDisc. Retrieved November 28, 2022. Select MICHAEL JACKSON and click OK.\n^ \"Gold-/Platin-Datenbank (Michael Jackson;\u00a0'Thriller')\" (in German). Bundesverband Musikindustrie. Retrieved February 18, 2023.\n^ \"Italian\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\" (in Italian). Federazione Industria Musicale Italiana. Select \"2014\" in the \"Anno\" drop-down menu. Select \"Thriller\" in the \"Filtra\" field. Select \"Singoli\" under \"Sezione\".\n^ \"Japanese\nringtone\ncertifications \u2013 Michael Jackson \u2013 Thriller\" (in Japanese). Recording Industry Association of Japan. Retrieved December 30, 2020. Select 2009\u5e7411\u6708 on the drop-down menu\n^ \"Certificaciones\" (in Spanish). Asociaci\u00f3n Mexicana de Productores de Fonogramas y Videogramas. Retrieved November 28, 2022. Type Michael Jackson in the box under the ARTISTA column heading\u00a0and Thriller in the box under the T\u00cdTULO column heading.\n^ \"Spanish\nsingle"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "^ \"Certificaciones\" (in Spanish). Asociaci\u00f3n Mexicana de Productores de Fonogramas y Videogramas. Retrieved November 28, 2022. Type Michael Jackson in the box under the ARTISTA column heading\u00a0and Thriller in the box under the T\u00cdTULO column heading.\n^ \"Spanish\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". El portal de M\u00fasica. Productores de M\u00fasica de Espa\u00f1a.\n^ \"British\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". British Phonographic Industry. Retrieved March 17, 2023.\n^ \"British\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". British Phonographic Industry. Retrieved March 17, 2023.\n^ \"American\nsingle\ncertifications \u2013 Michael Jackson \u2013 Thriller\". Recording Industry Association of America. Retrieved August 29, 2022.\n^ \"American\nringtone\ncertifications \u2013 Michael Jackson \u2013 Thriller\". Recording Industry Association of America.\nBibliography\nBrooks, Darren (2002). Michael Jackson: An Exceptional Journey. Chrome Dreams. ISBN\u00a01-84240-178-5.\nGeorge, Nelson (2004). Michael Jackson: The Ultimate Collection (booklet). Sony BMG.\nGrant, Adrian (2009). Michael Jackson: The Visual Documentary. Omnibus Press. ISBN\u00a0978-1-84938-261-8.\nJones, Jel (2005). Michael Jackson, the King of Pop: The Big Picture: the Music! the Man! the Legend! the Interviews!. Amber Books Publishing. ISBN\u00a00-9749779-0-X.\nTaraborrelli, J. Randy (2004). The Magic and the Madness. Terra Alta, WV: Headline. ISBN\u00a00-330-42005-4.\nHalstead, Craig (2003). Michael Jackson The Solo Years. On-Line Ltd. ISBN\u00a0978-0-7552-0091-7.\nvteMichael Jackson: ThrillerSide one\n\"Wanna Be Startin' Somethin'\"\n\"Baby Be Mine\"\n\"The Girl Is Mine\"\n\"Thriller\"\nSide two\n\"Beat It\"\n\"Billie Jean\"\n\"Human Nature\"\n\"P.Y.T. (Pretty Young Thing)\"\n\"The Lady in My Life\"\nRelated articles\nE.T. the Extra-Terrestrial (audiobook)\nFarewell My Summer Love\nVictory\nVictory Tour\nMichael Jackson's Thriller\nThriller jacket\nThriller 25\nThriller 40\nThriller 40 (film)\nThrill the World\nThriller viral video\nDonga\nThriller \u2013 Live"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\"Baby Be Mine\"\n\"The Girl Is Mine\"\n\"Thriller\"\nSide two\n\"Beat It\"\n\"Billie Jean\"\n\"Human Nature\"\n\"P.Y.T. (Pretty Young Thing)\"\n\"The Lady in My Life\"\nRelated articles\nE.T. the Extra-Terrestrial (audiobook)\nFarewell My Summer Love\nVictory\nVictory Tour\nMichael Jackson's Thriller\nThriller jacket\nThriller 25\nThriller 40\nThriller 40 (film)\nThrill the World\nThriller viral video\nDonga\nThriller \u2013 Live\nMichael Jackson albums discography\nvteMichael Jackson songs\nSingles\nSongs\nUnreleased songs\n1970s\n\"Got to Be There\"\n\"Ain't No Sunshine\"\n\"I Wanna Be Where You Are\"\n\"Rockin' Robin\"\n\"Love Is Here and Now You're Gone\"\n\"You've Got a Friend\"\n\"Ben\"\n\"Everybody's Somebody's Fool\"\n\"My Girl\"\n\"Shoo-Be-Doo-Be-Doo-Da-Day\"\n\"We've Got a Good Thing Going\"\n\"With a Child's Heart\"\n\"Morning Glow\"\n\"All the Things You Are\"\n\"Happy\"\n\"Too Young\"\n\"Music and Me\"\n\"We're Almost There\"\n\"Just a Little Bit of You\"\n\"You Can't Win\"\n\"Don't Stop 'Til You Get Enough\"\n\"Rock with You\"\n\"Working Day and Night\"\n\"It's the Falling in Love\"\n1980s\n\"Off the Wall\"\n\"Girlfriend\"\n\"She's Out of My Life\"\n\"One Day in Your Life\"\n\"The Girl Is Mine\"\n\"Billie Jean\"\n\"Beat It\"\n\"Wanna Be Startin' Somethin'\"\n\"Human Nature\"\n\"P.Y.T. (Pretty Young Thing)\"\n\"Thriller\"\n\"You've Really Got a Hold on Me\"\n\"Here I Am (Come and Take Me)\"\n\"Lonely Teardrops\"\n\"That's What Love Is Made Of\"\n\"Farewell My Summer Love\"\n\"Girl You're So Together\"\n\"I Just Can't Stop Loving You\"\n\"Bad\"\n\"The Way You Make Me Feel\"\n\"Speed Demon\"\n\"Liberian Girl\"\n\"Just Good Friends\"\n\"Another Part of Me\"\n\"Man in the Mirror\"\n\"Dirty Diana\"\n\"Smooth Criminal\"\n\"Leave Me Alone\"\n\"Twenty-Five Miles\"\n1990s\n\"Black or White\"\n\"Jam\"\n\"In the Closet\"\n\"Remember the Time\"\n\"Heal the World\"\n\"Who Is It\"\n\"Give In to Me\"\n\"Will You Be There\"\n\"Gone Too Soon\"\n\"Dangerous\"\n\"Come Together\"\n\"Scream\"\n\"Childhood\"\n\"They Don't Care About Us\"\n\"Stranger in Moscow\"\n\"This Time Around\"\n\"Earth Song\"\n\"D.S.\"\n\"You Are Not Alone\"\n\"Tabloid Junkie\"\n\"HIStory\"\n\"Smile\"\n\"Blood on the Dance Floor\"\n\"Ghosts\"\n\"Is It Scary\"\n\"On the Line\"\n2000s"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\"Jam\"\n\"In the Closet\"\n\"Remember the Time\"\n\"Heal the World\"\n\"Who Is It\"\n\"Give In to Me\"\n\"Will You Be There\"\n\"Gone Too Soon\"\n\"Dangerous\"\n\"Come Together\"\n\"Scream\"\n\"Childhood\"\n\"They Don't Care About Us\"\n\"Stranger in Moscow\"\n\"This Time Around\"\n\"Earth Song\"\n\"D.S.\"\n\"You Are Not Alone\"\n\"Tabloid Junkie\"\n\"HIStory\"\n\"Smile\"\n\"Blood on the Dance Floor\"\n\"Ghosts\"\n\"Is It Scary\"\n\"On the Line\"\n2000s\n\"Speechless\"\n\"You Rock My World\"\n\"Heaven Can Wait\"\n\"Butterflies\"\n\"Cry\"\n\"One More Chance\"\n\"Cheater\"\n\"(I Like) The Way You Love Me\"\n\"Fall Again\"\n\"This Is It\"\n2010s\n\"Hold My Hand\"\n\"Hollywood Tonight\"\n\"(I Can't Make It) Another Day\"\n\"Behind the Mask\"\n\"Don't Be Messin' 'Round\"\n\"I'm So Blue\"\n\"Price of Fame\"\n\"Love Never Felt So Good\"\n\"Chicago\"\n\"Loving You\"\n\"A Place with No Name\"\n\"Slave to the Rhythm\"\n\"Blue Gangsta\"\n2020s\n\"She's Trouble\"\nOther\n\"We Are the World\"\n\"Mind Is the Magic\"\n\"What More Can I Give\"\n\"We Are the World 25 for Haiti\"\n\"Blood on the Dance Floor x Dangerous\"\n\"Diamonds Are Invincible\"\nFeatured\n\"Ease on Down the Road\"\n\"A Brand New Day\"\n\"Night Time Lover\"\n\"Papa Was a Rollin' Stone\"\n\"State of Independence\"\n\"Muscles\"\n\"Say Say Say\"\n\"Somebody's Watching Me\"\n\"Don't Stand Another Chance\"\n\"Centipede\"\n\"Tell Me I'm Not Dreamin' (Too Good to Be True)\"\n\"Eaten Alive\"\n\"Get It\"\n\"2300 Jackson Street\"\n\"Do the Bartman\"\n\"Whatzupwitu\"\n\"Why\"\n\"I Need You\"\n\"We Be Ballin'\"\n\"Girls, Girls, Girls\"\n\"All in Your Name\"\n\"There Must Be More to Life Than This\"\n\"Low\"\n\"Don't Matter to Me\"\nCategory\nAuthority control databases\nMusicBrainz work\nRetrieved from \"https://en.wikipedia.org/w/index.php?title=Thriller_(song)&oldid=1212467768\""}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "\nCategories: 1982 songs1983 singles1984 singlesCBS Records singlesColumbia Records singlesCompositions with a narratorEpic Records singlesHalloween songsMichael Jackson songsNumber-one singles in SpainSNEP Top Singles number-one singlesSong recordings produced by Quincy JonesSongs about monstersSongs written by Rod TempertonHidden categories: CS1 Dutch-language sources (nl)CS1 Italian-language sources (it)CS1 Spanish-language sources (es)Articles with German-language sources (de)CS1 Swedish-language sources (sv)CS1 French-language sources (fr)CS1 German-language sources (de)Cite certification used for Italy without IDCS1 Japanese-language sources (ja)Articles with short descriptionShort description is different from WikidataGood articlesUse American English from November 2021All Wikipedia articles written in American EnglishUse mdy dates from November 2014Articles with hAudio microformatsCertification Cite Ref usages outside Certification Table EntrySingle chart usages for FlandersSingle chart usages for Dutch40Single chart called without artistSingle chart called without songSingle chart usages for Dutch100Single chart usages for New ZealandSingle chart usages for West GermanySingle chart usages for FranceSingle chart usages for ItalySingle chart making named refSingle chart usages for SpainSingle chart usages for AustriaSingle chart usages for NorwaySingle chart usages for SwissSingle chart usages for AustraliaSingle chart usages for FinlandSingle chart usages for SwedenSingle chart usages for BillboarddigitalsongsSingle chart usages for CanadaSingle chart usages for Billboardglobal200Single chart usages for UKSingle chart usages for UKrandbCertification Table Entry usages for AustraliaPages using certification Table Entry with streaming figuresCertification Table Entry usages for DenmarkCertification Table Entry usages for FrancePages using certification Table Entry with sales figuresCertification Table Entry usages for GermanyCertification Table Entry usages for ItalyCertification Table Entry usages for JapanCertification Table Entry usages for MexicoCertification Table Entry usages for SpainCertification Table Entry usages for United KingdomCertification Table Entry usages for United StatesPages using certification Table Entry with sales footnotePages using certification Table Entry with streaming footnoteArticles with MusicBrainz work identifiers"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "This page was last edited on 8 March 2024, at 01:12\u00a0(UTC).\nText is available under the Creative Commons Attribution-ShareAlike License 4.0;\nadditional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia\u00ae is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization.\nPrivacy policy\nAbout Wikipedia\nDisclaimers\nContact Wikipedia\nCode of Conduct\nDevelopers\nStatistics\nCookie statement\nMobile view\nToggle limited content width"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": "Jon Pareles of The New York Times noted that "'Billie Jean', 'Beat It', 'Wanna Be Starting' Something' ' and "the movie in the song 'Thriller'", were the songs, unlike the "fluff" "P.Y.T.", that were "the hits that made Thriller a world-beater; along with Mr. Jackson's stage and video presence, ...Jon Pareles of The New York Times noted that \"'Billie Jean', 'Beat It', 'Wanna Be Starting' Something' ' and \"the movie in the song 'Thriller'\", were the songs, unlike the \"fluff\" \"P.Y.T.\", that were \"the hits that made Thriller a world-beater; along with Mr. Jackson's stage and video presence, listeners must have identified with his willingness to admit terror.\" It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. \"Thriller\" is a disco-funk song The introduction features sound effects such as a creaking door, thunder, feet walking on wooden planks, winds and howling wolves. This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100. \"Thriller\" was certified platinum by the Recording Industry Association of America on December 4, 1989, for sales of over one million physical units in the US As of August 2016, the song had sold 4,024,398 copies in the US. \"Thriller\" is certified Diamond by the Recording Industry Association of America. It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. \"Thriller\" has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween"}
|
||||
{"query": "who sang the hit song \"thriller\"?", "domain": "music", "doc": ". \"Thriller\" is certified Diamond by the Recording Industry Association of America. It appears on several of Jackson's greatest-hits albums and has been covered by numerous artists. The song has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. \"Thriller\" has returned to the Billboard Hot 100 chart multiple times due to its popularity around Halloween. It re-entered the Billboard Hot 100 in October 2013 at number 42, number 31 in November 2018, and number 19 in November 2021, its highest placement since 1984. This gave Jackson at least one top-20 hit across seven consecutive decades from 1969 on the Billboard Hot 100."}
|
||||
11
AgentQnA/kubernetes/helm/README.md
Normal file
11
AgentQnA/kubernetes/helm/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Deploy AgentQnA on Kubernetes cluster
|
||||
|
||||
- You should have Helm (version >= 3.15) installed. Refer to the [Helm Installation Guide](https://helm.sh/docs/intro/install/) for more information.
|
||||
- For more deploy options, refer to [helm charts README](https://github.com/opea-project/GenAIInfra/tree/main/helm-charts#readme).
|
||||
|
||||
## Deploy on Gaudi
|
||||
|
||||
```
|
||||
export HFTOKEN="insert-your-huggingface-token-here"
|
||||
helm install agentqna oci://ghcr.io/opea-project/charts/agentqna --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} -f gaudi-values.yaml
|
||||
```
|
||||
22
AgentQnA/kubernetes/helm/cpu-values.yaml
Normal file
22
AgentQnA/kubernetes/helm/cpu-values.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright (C) 2025 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
tgi:
|
||||
enabled: false
|
||||
vllm:
|
||||
enabled: true
|
||||
LLM_MODEL_ID: "meta-llama/Meta-Llama-3-8B-Instruct"
|
||||
extraCmdArgs: ["--max-seq-len-to-capture", "16384", "--enable-auto-tool-choice", "--tool-call-parser", "llama3_json"]
|
||||
|
||||
supervisor:
|
||||
llm_endpoint_url: http://{{ .Release.Name }}-vllm
|
||||
llm_engine: vllm
|
||||
model: "meta-llama/Meta-Llama-3-8B-Instruct"
|
||||
ragagent:
|
||||
llm_endpoint_url: http://{{ .Release.Name }}-vllm
|
||||
llm_engine: vllm
|
||||
model: "meta-llama/Meta-Llama-3-8B-Instruct"
|
||||
sqlagent:
|
||||
llm_endpoint_url: http://{{ .Release.Name }}-vllm
|
||||
llm_engine: vllm
|
||||
model: "meta-llama/Meta-Llama-3-8B-Instruct"
|
||||
35
AgentQnA/kubernetes/helm/gaudi-values.yaml
Normal file
35
AgentQnA/kubernetes/helm/gaudi-values.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
# Accelerate inferencing in heaviest components to improve performance
|
||||
# by overriding their subchart values
|
||||
|
||||
tgi:
|
||||
enabled: false
|
||||
vllm:
|
||||
enabled: true
|
||||
accelDevice: "gaudi"
|
||||
image:
|
||||
repository: opea/vllm-gaudi
|
||||
resources:
|
||||
limits:
|
||||
habana.ai/gaudi: 4
|
||||
LLM_MODEL_ID: "meta-llama/Llama-3.3-70B-Instruct"
|
||||
OMPI_MCA_btl_vader_single_copy_mechanism: none
|
||||
PT_HPU_ENABLE_LAZY_COLLECTIVES: true
|
||||
VLLM_SKIP_WARMUP: true
|
||||
shmSize: 16Gi
|
||||
extraCmdArgs: ["--tensor-parallel-size", "4", "--max-seq-len-to-capture", "16384", "--enable-auto-tool-choice", "--tool-call-parser", "llama3_json"]
|
||||
|
||||
supervisor:
|
||||
llm_endpoint_url: http://{{ .Release.Name }}-vllm
|
||||
llm_engine: vllm
|
||||
model: "meta-llama/Llama-3.3-70B-Instruct"
|
||||
ragagent:
|
||||
llm_endpoint_url: http://{{ .Release.Name }}-vllm
|
||||
llm_engine: vllm
|
||||
model: "meta-llama/Llama-3.3-70B-Instruct"
|
||||
sqlagent:
|
||||
llm_endpoint_url: http://{{ .Release.Name }}-vllm
|
||||
llm_engine: vllm
|
||||
model: "meta-llama/Llama-3.3-70B-Instruct"
|
||||
37
AgentQnA/retrieval_tool/README.md
Normal file
37
AgentQnA/retrieval_tool/README.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Retrieval tool for agent
|
||||
|
||||
The retrieval tool in this example is an OPEA megaservice that is comprised of a query embedder, a document retriever and a document reranker.
|
||||
|
||||
## Launch microservices
|
||||
|
||||
```
|
||||
bash launch_retrieval_tool.sh
|
||||
```
|
||||
|
||||
## Index data into vector database
|
||||
|
||||
In this example, we use an example jsonl file to ingest example documents into the vector database. For more ways to ingest data and the type of documents supported by OPEA dataprep microservices, please refer to the documentation in the opea-project/GenAIComps repo.
|
||||
|
||||
1. create a conda env
|
||||
2. Run commands below
|
||||
|
||||
```
|
||||
bash run_ingest_data.sh
|
||||
```
|
||||
|
||||
## Validate services
|
||||
|
||||
```
|
||||
export ip_address=$(hostname -I | awk '{print $1}')
|
||||
curl http://${ip_address}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{
|
||||
"text": "Taylor Swift hometown"
|
||||
}'
|
||||
```
|
||||
|
||||
## Consume retrieval tool
|
||||
|
||||
The endpoint for the retrieval tool is
|
||||
|
||||
```
|
||||
http://${ip_address}:8889/v1/retrievaltool
|
||||
```
|
||||
77
AgentQnA/retrieval_tool/index_data.py
Normal file
77
AgentQnA/retrieval_tool/index_data.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# Copyright (C) 2024 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
|
||||
import requests
|
||||
import tqdm
|
||||
|
||||
|
||||
def get_args():
|
||||
parser = argparse.ArgumentParser(description="Index data")
|
||||
parser.add_argument("--host_ip", type=str, default="localhost", help="Host IP")
|
||||
parser.add_argument("--port", type=int, default=6007, help="Port")
|
||||
parser.add_argument("--filedir", type=str, default=None, help="file directory")
|
||||
parser.add_argument("--filename", type=str, default=None, help="file name")
|
||||
parser.add_argument("--chunk_size", type=int, default=10000, help="Chunk size")
|
||||
parser.add_argument("--chunk_overlap", type=int, default=0, help="Chunk overlap")
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def split_jsonl_into_txts(jsonl_file):
|
||||
docs = []
|
||||
n = 0
|
||||
with open(jsonl_file, "r") as f:
|
||||
for line in f:
|
||||
data = json.loads(line)
|
||||
docs.append(data["doc"])
|
||||
return docs
|
||||
|
||||
|
||||
def write_docs_to_disk(docs, output_folder):
|
||||
output_files = []
|
||||
for i, text in enumerate(docs):
|
||||
output = os.path.join(output_folder, str(i) + ".txt")
|
||||
output_files.append(output)
|
||||
with open(output, "w") as f:
|
||||
f.write(text)
|
||||
return output_files
|
||||
|
||||
|
||||
def delete_files(files):
|
||||
for file in files:
|
||||
os.remove(file)
|
||||
|
||||
|
||||
def main():
|
||||
args = get_args()
|
||||
print(args)
|
||||
|
||||
host_ip = args.host_ip
|
||||
port = args.port
|
||||
proxies = {"http": ""}
|
||||
url = "http://{host_ip}:{port}/v1/dataprep/ingest".format(host_ip=host_ip, port=port)
|
||||
|
||||
# Split jsonl file into json files
|
||||
files = split_jsonl_into_txts(os.path.join(args.filedir, args.filename))
|
||||
file_list = write_docs_to_disk(files, args.filedir)
|
||||
|
||||
print(file_list)
|
||||
|
||||
for file in tqdm.tqdm(file_list):
|
||||
print("Indexing file: ", file)
|
||||
files = [("files", (f, open(f, "rb"))) for f in [file]]
|
||||
payload = {"chunk_size": args.chunk_size, "chunk_overlap": args.chunk_overlap}
|
||||
resp = requests.request("POST", url=url, headers={}, files=files, data=payload, proxies=proxies)
|
||||
print(resp.text)
|
||||
|
||||
print("Removing temp files....")
|
||||
delete_files(file_list)
|
||||
print("ALL DONE!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user