mirror of
https://github.com/langgenius/dify.git
synced 2026-01-08 07:14:14 +00:00
Compare commits
611 Commits
fix/toolti
...
fix/redis-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c6ad1df64 | ||
|
|
40fb522f56 | ||
|
|
96d9951d5c | ||
|
|
d36201f7ff | ||
|
|
b46c7935b1 | ||
|
|
206e6e1e7c | ||
|
|
8685c0d48b | ||
|
|
b674c598f9 | ||
|
|
710230a294 | ||
|
|
169f7440ac | ||
|
|
57ec12eb6b | ||
|
|
2c26f77a25 | ||
|
|
95dc90e6b2 | ||
|
|
400392230b | ||
|
|
eca66f9577 | ||
|
|
121bb99cc2 | ||
|
|
cac1ef7ade | ||
|
|
d74d79b3d8 | ||
|
|
c6b28bc193 | ||
|
|
5d05574518 | ||
|
|
bf478aeba2 | ||
|
|
c9dfe1ad92 | ||
|
|
926609eb59 | ||
|
|
e32116b9a3 | ||
|
|
e11d5ac708 | ||
|
|
f6c3d4cadc | ||
|
|
3e9d271b52 | ||
|
|
ecc8beef3f | ||
|
|
b9afb7bcec | ||
|
|
b4041759f7 | ||
|
|
c3473b5b4f | ||
|
|
1b9bf9c62d | ||
|
|
ed96a6b6c0 | ||
|
|
4989d0c904 | ||
|
|
9a5bdae07f | ||
|
|
67016feb96 | ||
|
|
22bdfb7e56 | ||
|
|
ceb2c4f3ef | ||
|
|
d5a93a6400 | ||
|
|
01a2513812 | ||
|
|
8e7a752b2a | ||
|
|
999d3f1539 | ||
|
|
a7ee51e5d8 | ||
|
|
0e965b6529 | ||
|
|
a9db06f5e7 | ||
|
|
6827c4038b | ||
|
|
e8a6e90a61 | ||
|
|
ff956cb546 | ||
|
|
7d7e0f9800 | ||
|
|
3ae05a672d | ||
|
|
d700abff0a | ||
|
|
5267f34e76 | ||
|
|
d6e8290a1c | ||
|
|
36f66d40e5 | ||
|
|
5f12616cb9 | ||
|
|
bc43efba75 | ||
|
|
ef5f476cd6 | ||
|
|
98bf7710e4 | ||
|
|
7263af13ed | ||
|
|
d992a809f5 | ||
|
|
04f8d39860 | ||
|
|
b7bf14ab72 | ||
|
|
e8abbe0623 | ||
|
|
b14d59e977 | ||
|
|
5f12c17355 | ||
|
|
d170d78530 | ||
|
|
4d9160ca9f | ||
|
|
8f670f31b8 | ||
|
|
5838345f48 | ||
|
|
3f1c84f65a | ||
|
|
83b2b8fe60 | ||
|
|
ac24300274 | ||
|
|
2e657b7b12 | ||
|
|
c063617553 | ||
|
|
38a4f0234d | ||
|
|
740a723072 | ||
|
|
495cf58014 | ||
|
|
8e98759359 | ||
|
|
4ae0bb83f1 | ||
|
|
5459d812e7 | ||
|
|
831c222541 | ||
|
|
faad247d85 | ||
|
|
1e829ceaf3 | ||
|
|
79fe175440 | ||
|
|
9b32bfb3db | ||
|
|
37fea072bc | ||
|
|
31a603e905 | ||
|
|
ca21c285b0 | ||
|
|
5a3eaa85bf | ||
|
|
a5777683f3 | ||
|
|
90dd91c6cd | ||
|
|
8d8a8fe295 | ||
|
|
65e22bb76a | ||
|
|
f83ed19dfe | ||
|
|
53b14bde4d | ||
|
|
7742a5dac2 | ||
|
|
bddcb31fe2 | ||
|
|
b411a89703 | ||
|
|
e61752bd3a | ||
|
|
7a1d6fe509 | ||
|
|
4fd2743efa | ||
|
|
2c0eaaec3d | ||
|
|
3898fe3311 | ||
|
|
853b0e84cc | ||
|
|
42fe208eda | ||
|
|
444dc01931 | ||
|
|
95ce10f23b | ||
|
|
660fc3bb34 | ||
|
|
c71af7f610 | ||
|
|
ce476f2e5c | ||
|
|
a9fc85027d | ||
|
|
b2aa385942 | ||
|
|
424a7da470 | ||
|
|
b92504bebc | ||
|
|
e0846792d2 | ||
|
|
b9bf60ea23 | ||
|
|
bd27b4c162 | ||
|
|
28de676956 | ||
|
|
b3cde9900c | ||
|
|
2155bba5b0 | ||
|
|
a53fdc7126 | ||
|
|
3fc0ebdd51 | ||
|
|
211f416806 | ||
|
|
b90ad587c2 | ||
|
|
e7aecb89dd | ||
|
|
a45f8969a0 | ||
|
|
d3c06a3f76 | ||
|
|
f447ee7b9d | ||
|
|
3e168ce2ca | ||
|
|
c64edd2706 | ||
|
|
8a1f106c72 | ||
|
|
4ac99ffe0e | ||
|
|
fdcf87c70c | ||
|
|
5aabb83f5a | ||
|
|
bd678f9ca1 | ||
|
|
a87890b3cc | ||
|
|
86594851cb | ||
|
|
a83ccccffc | ||
|
|
50635e9c15 | ||
|
|
7d3dad3d1d | ||
|
|
dd22e78515 | ||
|
|
5df1cb0566 | ||
|
|
423df67042 | ||
|
|
568d5c46ed | ||
|
|
da25b91980 | ||
|
|
bc0dad6c1c | ||
|
|
9b8aa9b75d | ||
|
|
d5bc125617 | ||
|
|
4ffaabcc04 | ||
|
|
b597a0d31c | ||
|
|
fb32e5ca9a | ||
|
|
cd7ab6231f | ||
|
|
5908fd6552 | ||
|
|
6d2c6caa23 | ||
|
|
5eb00502ec | ||
|
|
3f9d6759d4 | ||
|
|
aba70207ab | ||
|
|
a8134a49c4 | ||
|
|
fa47f0c707 | ||
|
|
8501af298f | ||
|
|
5c7b1358d4 | ||
|
|
3938d8863e | ||
|
|
7838f9f3a3 | ||
|
|
de3c5751db | ||
|
|
5ee7e03c1b | ||
|
|
7a405b86c9 | ||
|
|
ffc3f33670 | ||
|
|
857055b797 | ||
|
|
d15ba3939d | ||
|
|
1ec83e4969 | ||
|
|
9275760599 | ||
|
|
d97d3ff5fc | ||
|
|
ea6734f550 | ||
|
|
f73751843f | ||
|
|
dbfbc56de7 | ||
|
|
70c5b23089 | ||
|
|
2ec6ffe478 | ||
|
|
793205afc5 | ||
|
|
c6b74daa0a | ||
|
|
23ce1fb1ba | ||
|
|
29188e0562 | ||
|
|
d9773c963f | ||
|
|
1206b1eb96 | ||
|
|
93af87a9e0 | ||
|
|
7a6970e570 | ||
|
|
ea584e94bd | ||
|
|
42b02b3a5f | ||
|
|
44f6a536d2 | ||
|
|
d7b8e071dd | ||
|
|
3f1aa1f9e2 | ||
|
|
82024a65cd | ||
|
|
f4ce08211d | ||
|
|
7c6ae96a09 | ||
|
|
80b62d50f5 | ||
|
|
d498f4e55e | ||
|
|
1c1e008dcf | ||
|
|
fe41e8bc18 | ||
|
|
ccc6723a8e | ||
|
|
1e7ef46e9c | ||
|
|
e0c8189f1a | ||
|
|
8bcad002df | ||
|
|
5c76131d3d | ||
|
|
cabdb4ef17 | ||
|
|
a34891851b | ||
|
|
05c1ef75c4 | ||
|
|
be2f1e59f2 | ||
|
|
75f0a5e36a | ||
|
|
68107fe355 | ||
|
|
477beae3bb | ||
|
|
6689b592ff | ||
|
|
6b6e94da08 | ||
|
|
fc60b554a1 | ||
|
|
bffb0919cc | ||
|
|
e947103b6d | ||
|
|
62051d5171 | ||
|
|
783a6b866b | ||
|
|
2024a6c941 | ||
|
|
37d5c166ca | ||
|
|
060897b25b | ||
|
|
499cc57082 | ||
|
|
b279d19040 | ||
|
|
cb35e84f51 | ||
|
|
511ffa4698 | ||
|
|
7a1da2409d | ||
|
|
cbd3ffe056 | ||
|
|
ec0aa32cd4 | ||
|
|
55679b4389 | ||
|
|
c0b71f8286 | ||
|
|
240b66d737 | ||
|
|
b360feb4c1 | ||
|
|
5fcd614186 | ||
|
|
4be1aa516c | ||
|
|
5e250403d3 | ||
|
|
f8af9c6ad0 | ||
|
|
57994e4a24 | ||
|
|
0540995e5c | ||
|
|
3a0734d94c | ||
|
|
5b7de7705e | ||
|
|
7121afdd44 | ||
|
|
5213650fed | ||
|
|
8204e0e14a | ||
|
|
e741ee2f45 | ||
|
|
0564e8a284 | ||
|
|
966e65bb66 | ||
|
|
896998ef3f | ||
|
|
4abca8614f | ||
|
|
7c0b159a81 | ||
|
|
a8b4d1ac2a | ||
|
|
b933c9d206 | ||
|
|
f45042aa8e | ||
|
|
2ab8bc679f | ||
|
|
2571b0c4e3 | ||
|
|
959a81a41b | ||
|
|
4480b469a6 | ||
|
|
fcfa1252a0 | ||
|
|
e1e2d0b364 | ||
|
|
9815a0911b | ||
|
|
dc5839b6bb | ||
|
|
4373777871 | ||
|
|
415d27c8bf | ||
|
|
5366820a2f | ||
|
|
5f8a27074e | ||
|
|
24ba9fdf6c | ||
|
|
824a0dd63e | ||
|
|
c2d606d587 | ||
|
|
2deaece7e2 | ||
|
|
0d84221b2c | ||
|
|
cdd7e55a88 | ||
|
|
1f5cc071f8 | ||
|
|
625e4c4c72 | ||
|
|
7850a28ec8 | ||
|
|
730d3a6d7c | ||
|
|
d6a44e9990 | ||
|
|
3069b5cf57 | ||
|
|
7873e455bb | ||
|
|
a651b73db0 | ||
|
|
d2ce4960f1 | ||
|
|
1af4ca344e | ||
|
|
fa837b2dfd | ||
|
|
824a71388a | ||
|
|
4585cffce1 | ||
|
|
13046709a9 | ||
|
|
9d221a5e19 | ||
|
|
77aef9ff1d | ||
|
|
503561f464 | ||
|
|
ada9d408ac | ||
|
|
3af65b2f45 | ||
|
|
369e1e6f58 | ||
|
|
fb49413a41 | ||
|
|
42dfde6546 | ||
|
|
c531b4a911 | ||
|
|
e4ed916baa | ||
|
|
4ec977eaba | ||
|
|
74f58f29f9 | ||
|
|
f97607370a | ||
|
|
850492dafa | ||
|
|
61c89a9168 | ||
|
|
49af18fbd6 | ||
|
|
6cd22f3bca | ||
|
|
a2e2f8a8c9 | ||
|
|
27e33fb15c | ||
|
|
55e6123db9 | ||
|
|
c828a5dfdf | ||
|
|
0603359e2d | ||
|
|
bb781764b8 | ||
|
|
29275c7447 | ||
|
|
4c1063e1c5 | ||
|
|
d6b9587a97 | ||
|
|
6fbaabc1bc | ||
|
|
a36117e12d | ||
|
|
e5efd09ebb | ||
|
|
ecc951609d | ||
|
|
063474f408 | ||
|
|
3dfbc348e3 | ||
|
|
9a4b53a212 | ||
|
|
03edfbe6f5 | ||
|
|
3d2cb25a67 | ||
|
|
6df14e50b2 | ||
|
|
008e0efeb0 | ||
|
|
128a66f7fe | ||
|
|
62406991df | ||
|
|
d1173a69f8 | ||
|
|
a0b0809b1c | ||
|
|
4c9ef6e830 | ||
|
|
0c96f0aa51 | ||
|
|
ac73763726 | ||
|
|
5ba19d64e9 | ||
|
|
fefbc43fb0 | ||
|
|
a8b837c4a9 | ||
|
|
02ff6cca70 | ||
|
|
ef47f68e4a | ||
|
|
2ef8b187fa | ||
|
|
b0927c39fb | ||
|
|
d0e0111f88 | ||
|
|
2328944987 | ||
|
|
cb1942c242 | ||
|
|
bf64ff215b | ||
|
|
68c7e68a8a | ||
|
|
91f70d0bd9 | ||
|
|
4669eb24be | ||
|
|
debe5953a8 | ||
|
|
1c7877b048 | ||
|
|
9ca2e2c968 | ||
|
|
f42ef0624d | ||
|
|
64baedb484 | ||
|
|
4638f99aaa | ||
|
|
aebe5fc68c | ||
|
|
1ecf70dca0 | ||
|
|
7c485f8bb8 | ||
|
|
21e9608b23 | ||
|
|
7f1b028840 | ||
|
|
bef83a4d2e | ||
|
|
8cc9e68363 | ||
|
|
d7aada38a1 | ||
|
|
4f69adc8ab | ||
|
|
52da5b16e7 | ||
|
|
11d09a92d0 | ||
|
|
c7eacd1aac | ||
|
|
a126d535cf | ||
|
|
3554a803e7 | ||
|
|
c66cecaa55 | ||
|
|
b37954b966 | ||
|
|
86f90fd9ff | ||
|
|
4c7beb9d7b | ||
|
|
3618a97c20 | ||
|
|
03fdf5e7f8 | ||
|
|
cae73b9a32 | ||
|
|
e34f04380d | ||
|
|
6df77038a2 | ||
|
|
45c0a44411 | ||
|
|
2d869d6831 | ||
|
|
eaa7e9b1f0 | ||
|
|
6e37750fbd | ||
|
|
8fd297f8b4 | ||
|
|
ddf6569dc5 | ||
|
|
97895ec41a | ||
|
|
6d56d5c1f6 | ||
|
|
6c2fa8defc | ||
|
|
c9f1e18df1 | ||
|
|
740fad06c1 | ||
|
|
0665268578 | ||
|
|
c8b9bdebfe | ||
|
|
a587f0d3f1 | ||
|
|
8c51d06222 | ||
|
|
b32a7713e0 | ||
|
|
831c5a93af | ||
|
|
1a8dcae10e | ||
|
|
8219f9e090 | ||
|
|
5ddb601e43 | ||
|
|
5541248264 | ||
|
|
b3cb97f0ad | ||
|
|
e75c33a561 | ||
|
|
483ead55d5 | ||
|
|
d63a5a1c3c | ||
|
|
e0a3307563 | ||
|
|
7f3282ec04 | ||
|
|
b773ebdab1 | ||
|
|
1583283635 | ||
|
|
c87f710d58 | ||
|
|
1568c5cae9 | ||
|
|
a03919c3b3 | ||
|
|
7411bcf167 | ||
|
|
d96f5ba1ca | ||
|
|
d6de96c4b4 | ||
|
|
ffd2f61dd9 | ||
|
|
54b9e1f6d1 | ||
|
|
2721cb8dee | ||
|
|
41bea4cafa | ||
|
|
6f222b49f2 | ||
|
|
8dfe8c773a | ||
|
|
cf645c3ba1 | ||
|
|
e896d1e9d7 | ||
|
|
6dba68f62d | ||
|
|
3d083b758f | ||
|
|
aa5b2db10a | ||
|
|
b73faae0d0 | ||
|
|
4788e1c8c8 | ||
|
|
bf16de50fe | ||
|
|
7e611ffbf3 | ||
|
|
65162a87b6 | ||
|
|
445497cf89 | ||
|
|
fa1af8e47b | ||
|
|
624331472a | ||
|
|
72b7f8a949 | ||
|
|
88c9834ef2 | ||
|
|
d882348f39 | ||
|
|
b6ad7a1e06 | ||
|
|
6f7625fa47 | ||
|
|
de7bc22649 | ||
|
|
52857dc0a6 | ||
|
|
032dd93b2f | ||
|
|
5b18e851d2 | ||
|
|
f01602b570 | ||
|
|
0123498452 | ||
|
|
f55e06d8bf | ||
|
|
b613b11422 | ||
|
|
8efae1cba2 | ||
|
|
bf55b1910f | ||
|
|
71b4480c4a | ||
|
|
b6b1057a18 | ||
|
|
5b98acde2f | ||
|
|
aad6f340b3 | ||
|
|
a1104ab97e | ||
|
|
1ab81b4972 | ||
|
|
06b66216d7 | ||
|
|
cd3eaed335 | ||
|
|
9d80d7def7 | ||
|
|
84ac5ccc8f | ||
|
|
5dfd7abb2b | ||
|
|
24af4b9313 | ||
|
|
6613b8f2e0 | ||
|
|
08c486452f | ||
|
|
a45ac6ab98 | ||
|
|
80a322aaa2 | ||
|
|
82f7875a52 | ||
|
|
4637ddaa7f | ||
|
|
8d2269f762 | ||
|
|
5f03e66489 | ||
|
|
a9c1f1a041 | ||
|
|
49cee773c5 | ||
|
|
c78828ab7c | ||
|
|
e90d3c29ab | ||
|
|
153807f243 | ||
|
|
5db0b56c5b | ||
|
|
404db1ae5b | ||
|
|
02c4b1af71 | ||
|
|
aa11659062 | ||
|
|
d4985fb3aa | ||
|
|
8815511ccb | ||
|
|
40fb4d16ef | ||
|
|
c69f5b07ba | ||
|
|
56c90e212a | ||
|
|
0f14873255 | ||
|
|
0bb7569d46 | ||
|
|
ec57922bb6 | ||
|
|
781d294f49 | ||
|
|
f515af2232 | ||
|
|
fe8191b899 | ||
|
|
4d2cd6703b | ||
|
|
292220c596 | ||
|
|
53f37a6704 | ||
|
|
75c1a82556 | ||
|
|
c5b3777d93 | ||
|
|
678bbf8fe8 | ||
|
|
342607f4a4 | ||
|
|
5f4cdd66fa | ||
|
|
91942e37ff | ||
|
|
60913970dc | ||
|
|
82c42b9ec5 | ||
|
|
2a3d8c25bc | ||
|
|
cee0c51dbb | ||
|
|
fdbbdb706f | ||
|
|
f6dfe23cf8 | ||
|
|
ffd4bf8bf0 | ||
|
|
bb3002b173 | ||
|
|
d4dc54447a | ||
|
|
d109881410 | ||
|
|
d1605952b0 | ||
|
|
2cf1187b32 | ||
|
|
178730266d | ||
|
|
dabfd74622 | ||
|
|
5da0182800 | ||
|
|
ed37439ef7 | ||
|
|
af92f19291 | ||
|
|
86f7f245e4 | ||
|
|
2d690801d1 | ||
|
|
fede54be77 | ||
|
|
85ff82a694 | ||
|
|
c8df92d0eb | ||
|
|
144d30d7ef | ||
|
|
4313d92e6b | ||
|
|
0695543f63 | ||
|
|
0bec6a037c | ||
|
|
3ff9a1f24a | ||
|
|
a771eea4f6 | ||
|
|
61a0ca9e0d | ||
|
|
551b33c8e5 | ||
|
|
fa34b9aed6 | ||
|
|
bbb609179f | ||
|
|
a27d4d58ec | ||
|
|
50d92f0fd4 | ||
|
|
a15791e788 | ||
|
|
954580a4af | ||
|
|
ab7d79275e | ||
|
|
d3658166fb | ||
|
|
54b72bdd0a | ||
|
|
d28446301f | ||
|
|
9050f92e5b | ||
|
|
feefeb44d7 | ||
|
|
d542b15cc0 | ||
|
|
2d7954c7da | ||
|
|
b1918dae5e | ||
|
|
031a0b576d | ||
|
|
0cef25ef8c | ||
|
|
cdb08be951 | ||
|
|
900fd82a92 | ||
|
|
44f963f281 | ||
|
|
01858e1caf | ||
|
|
2060db8e11 | ||
|
|
9ded063417 | ||
|
|
d72da2777c | ||
|
|
89aede80cc | ||
|
|
e0d3cd91c6 | ||
|
|
1a054ac1f4 | ||
|
|
3230f4a0ec | ||
|
|
dadca0f91a | ||
|
|
d489b8b3e0 | ||
|
|
bd0992275c | ||
|
|
3e7597f2bd | ||
|
|
0e71f6db84 | ||
|
|
f6b9982c23 | ||
|
|
fb113a9479 | ||
|
|
15791510c8 | ||
|
|
0f72a8e89d | ||
|
|
14af87527f | ||
|
|
83e84865be | ||
|
|
c2a3c5a748 | ||
|
|
83494cb4f5 | ||
|
|
0bc19c3fbf | ||
|
|
571415d1a4 | ||
|
|
7b2cf8215f | ||
|
|
fee4d3f6ca | ||
|
|
161cc0cda9 | ||
|
|
71bff9fcf3 | ||
|
|
80d14c9b22 | ||
|
|
c5bdf08558 | ||
|
|
596f160a1e | ||
|
|
d8b6c053a2 | ||
|
|
4b262cae58 | ||
|
|
1a5116cba0 | ||
|
|
01581dd35f | ||
|
|
7fdd964379 | ||
|
|
0cfcc97e9d | ||
|
|
8986be0aab | ||
|
|
f76bbbf5e6 | ||
|
|
fe217da05c | ||
|
|
80aa7c4019 | ||
|
|
6f33351eb3 | ||
|
|
35f13c7327 | ||
|
|
a8b9e01b3e | ||
|
|
7193e189f3 | ||
|
|
3f2a806abe | ||
|
|
5e4907e940 | ||
|
|
bf63c5d1e3 | ||
|
|
78989e9049 | ||
|
|
1510bdbcf6 | ||
|
|
024d688b77 | ||
|
|
ef82a29e23 | ||
|
|
1f56a20b62 | ||
|
|
0c2a62f847 | ||
|
|
ea748b50f2 | ||
|
|
62bfc4dba6 | ||
|
|
ceb2b150ff | ||
|
|
dc015c380a | ||
|
|
c9e0f0bf20 | ||
|
|
bd6d4d0553 | ||
|
|
f0273f00e1 | ||
|
|
962cdbbebd | ||
|
|
2c51e3a327 | ||
|
|
8e311cc45c | ||
|
|
c441bea4d1 | ||
|
|
ad30668eb6 | ||
|
|
62f4801523 | ||
|
|
ec1408346e | ||
|
|
0e0a703496 | ||
|
|
54b693d5b1 | ||
|
|
1262277714 | ||
|
|
3a67fc6c5a | ||
|
|
26abbe8e5b | ||
|
|
5d0914daea | ||
|
|
7541a492b7 |
12
.github/workflows/api-tests.yml
vendored
12
.github/workflows/api-tests.yml
vendored
@@ -27,19 +27,18 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: |
|
||||
api/pyproject.toml
|
||||
api/poetry.lock
|
||||
|
||||
- name: Poetry check
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v3
|
||||
|
||||
- name: Check Poetry lockfile
|
||||
run: |
|
||||
poetry check -C api --lock
|
||||
poetry show -C api
|
||||
@@ -47,6 +46,9 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: poetry install -C api --with dev
|
||||
|
||||
- name: Check dependencies in pyproject.toml
|
||||
run: poetry run -C api bash dev/pytest/pytest_artifacts.sh
|
||||
|
||||
- name: Run Unit tests
|
||||
run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
|
||||
3
.github/workflows/build-push.yml
vendored
3
.github/workflows/build-push.yml
vendored
@@ -5,6 +5,7 @@ on:
|
||||
branches:
|
||||
- "main"
|
||||
- "deploy/dev"
|
||||
- "fix/redis-slow-in-gevent"
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -125,7 +126,7 @@ jobs:
|
||||
with:
|
||||
images: ${{ env[matrix.image_name_env] }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') && !contains(github.ref, '-') }}
|
||||
type=ref,event=branch
|
||||
type=sha,enable=true,priority=100,prefix=,suffix=,format=long
|
||||
type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
|
||||
7
.github/workflows/db-migration-test.yml
vendored
7
.github/workflows/db-migration-test.yml
vendored
@@ -23,18 +23,17 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: |
|
||||
api/pyproject.toml
|
||||
api/poetry.lock
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v3
|
||||
|
||||
- name: Install dependencies
|
||||
run: poetry install -C api
|
||||
|
||||
|
||||
15
.github/workflows/style.yml
vendored
15
.github/workflows/style.yml
vendored
@@ -20,19 +20,20 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: api/**
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install Poetry
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
uses: abatilo/actions-poetry@v3
|
||||
|
||||
- name: Python dependencies
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: poetry install -C api --only lint
|
||||
@@ -66,7 +67,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: web/**
|
||||
|
||||
@@ -97,7 +98,7 @@ jobs:
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: |
|
||||
**.sh
|
||||
@@ -107,7 +108,7 @@ jobs:
|
||||
dev/**
|
||||
|
||||
- name: Super-linter
|
||||
uses: super-linter/super-linter/slim@v6
|
||||
uses: super-linter/super-linter/slim@v7
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
env:
|
||||
BASH_SEVERITY: warning
|
||||
|
||||
54
.github/workflows/translate-i18n-base-on-english.yml
vendored
Normal file
54
.github/workflows/translate-i18n-base-on-english.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: Check i18n Files and Create PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
check-and-update:
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: web
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # last 2 commits
|
||||
|
||||
- name: Check for file changes in i18n/en-US
|
||||
id: check_files
|
||||
run: |
|
||||
recent_commit_sha=$(git rev-parse HEAD)
|
||||
second_recent_commit_sha=$(git rev-parse HEAD~1)
|
||||
changed_files=$(git diff --name-only $recent_commit_sha $second_recent_commit_sha -- 'i18n/en-US/*.ts')
|
||||
echo "Changed files: $changed_files"
|
||||
if [ -n "$changed_files" ]; then
|
||||
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Set up Node.js
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
|
||||
- name: Install dependencies
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
run: yarn install --frozen-lockfile
|
||||
|
||||
- name: Run npm script
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
run: npm run auto-gen-i18n
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
commit-message: Update i18n files based on en-US changes
|
||||
title: 'chore: translate i18n files'
|
||||
body: This PR was automatically created to update i18n files based on changes in en-US locale.
|
||||
branch: chore/automated-i18n-updates
|
||||
46
.github/workflows/web-tests.yml
vendored
Normal file
46
.github/workflows/web-tests.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Web Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- web/**
|
||||
|
||||
concurrency:
|
||||
group: web-tests-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Web Tests
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: web/**
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
with:
|
||||
node-version: 20
|
||||
cache: yarn
|
||||
cache-dependency-path: ./web/package.json
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: yarn install --frozen-lockfile
|
||||
|
||||
- name: Run tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: yarn test
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -153,6 +153,9 @@ docker-legacy/volumes/etcd/*
|
||||
docker-legacy/volumes/minio/*
|
||||
docker-legacy/volumes/milvus/*
|
||||
docker-legacy/volumes/chroma/*
|
||||
docker-legacy/volumes/opensearch/data/*
|
||||
docker-legacy/volumes/pgvectors/data/*
|
||||
docker-legacy/volumes/pgvector/data/*
|
||||
|
||||
docker/volumes/app/storage/*
|
||||
docker/volumes/certbot/*
|
||||
@@ -164,8 +167,16 @@ docker/volumes/etcd/*
|
||||
docker/volumes/minio/*
|
||||
docker/volumes/milvus/*
|
||||
docker/volumes/chroma/*
|
||||
docker/volumes/opensearch/data/*
|
||||
docker/volumes/myscale/data/*
|
||||
docker/volumes/myscale/log/*
|
||||
docker/volumes/unstructured/*
|
||||
docker/volumes/pgvector/data/*
|
||||
docker/volumes/pgvecto_rs/data/*
|
||||
|
||||
docker/nginx/conf.d/default.conf
|
||||
docker/nginx/ssl/*
|
||||
!docker/nginx/ssl/.gitkeep
|
||||
docker/middleware.env
|
||||
|
||||
sdks/python-client/build
|
||||
|
||||
@@ -8,7 +8,7 @@ In terms of licensing, please take a minute to read our short [License and Contr
|
||||
|
||||
## Before you jump in
|
||||
|
||||
[Find](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) an existing issue, or [open](https://github.com/langgenius/dify/issues/new/choose) a new one. We categorize issues into 2 types:
|
||||
[Find](https://github.com/langgenius/dify/issues?q=is:issue+is:open) an existing issue, or [open](https://github.com/langgenius/dify/issues/new/choose) a new one. We categorize issues into 2 types:
|
||||
|
||||
### Feature requests:
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
## 在开始之前
|
||||
|
||||
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或 [创建](https://github.com/langgenius/dify/issues/new/choose) 一个新问题。我们将问题分为两类:
|
||||
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:open)现有问题,或 [创建](https://github.com/langgenius/dify/issues/new/choose) 一个新问题。我们将问题分为两类:
|
||||
|
||||
### 功能请求:
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
| 被团队成员标记为高优先级的功能 | 高优先级 |
|
||||
| 在 [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) 内反馈的常见功能请求 | 中等优先级 |
|
||||
| 非核心功能和小幅改进 | 低优先级 |
|
||||
| 有价值当不紧急 | 未来功能 |
|
||||
| 有价值但不紧急 | 未来功能 |
|
||||
|
||||
### 其他任何事情(例如 bug 报告、性能优化、拼写错误更正):
|
||||
* 立即开始编码。
|
||||
@@ -138,7 +138,7 @@ Dify 的后端使用 Python 编写,使用 [Flask](https://flask.palletsproject
|
||||
├── models // 描述数据模型和 API 响应的形状
|
||||
├── public // 如 favicon 等元资源
|
||||
├── service // 定义 API 操作的形状
|
||||
├── test
|
||||
├── test
|
||||
├── types // 函数参数和返回值的描述
|
||||
└── utils // 共享的实用函数
|
||||
```
|
||||
|
||||
@@ -10,7 +10,7 @@ Dify にコントリビュートしたいとお考えなのですね。それは
|
||||
|
||||
## 飛び込む前に
|
||||
|
||||
[既存の Issue](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) を探すか、[新しい Issue](https://github.com/langgenius/dify/issues/new/choose) を作成してください。私たちは Issue を 2 つのタイプに分類しています。
|
||||
[既存の Issue](https://github.com/langgenius/dify/issues?q=is:issue+is:open) を探すか、[新しい Issue](https://github.com/langgenius/dify/issues/new/choose) を作成してください。私たちは Issue を 2 つのタイプに分類しています。
|
||||
|
||||
### 機能リクエスト
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ Về vấn đề cấp phép, xin vui lòng dành chút thời gian đọc qua [
|
||||
|
||||
## Trước khi bắt đầu
|
||||
|
||||
[Tìm kiếm](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) một vấn đề hiện có, hoặc [tạo mới](https://github.com/langgenius/dify/issues/new/choose) một vấn đề. Chúng tôi phân loại các vấn đề thành 2 loại:
|
||||
[Tìm kiếm](https://github.com/langgenius/dify/issues?q=is:issue+is:open) một vấn đề hiện có, hoặc [tạo mới](https://github.com/langgenius/dify/issues/new/choose) một vấn đề. Chúng tôi phân loại các vấn đề thành 2 loại:
|
||||
|
||||
### Yêu cầu tính năng:
|
||||
|
||||
|
||||
7
LICENSE
7
LICENSE
@@ -4,10 +4,11 @@ Dify is licensed under the Apache License 2.0, with the following additional con
|
||||
|
||||
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
|
||||
|
||||
a. Multi-tenant SaaS service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
||||
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
|
||||
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
|
||||
|
||||
b. LOGO and copyright information: In the process of using Dify's frontend components, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend components.
|
||||
|
||||
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
|
||||
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
|
||||
|
||||
Please contact business@dify.ai by email to inquire about licensing matters.
|
||||
|
||||
|
||||
12
README.md
12
README.md
@@ -17,7 +17,7 @@
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on Twitter"></a>
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -168,7 +168,7 @@ Star Dify on GitHub and be instantly notified of new releases.
|
||||
> Before installing Dify, make sure your machine meets the following minimum system requirements:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4GB
|
||||
>- RAM >= 4 GiB
|
||||
|
||||
</br>
|
||||
|
||||
@@ -196,10 +196,14 @@ If you'd like to configure a highly-available setup, there are community-contrib
|
||||
|
||||
#### Using Terraform for Deployment
|
||||
|
||||
Deploy Dify to Cloud Platform with a single click using [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
Deploy Dify to Azure with a single click using [terraform](https://www.terraform.io/).
|
||||
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Contributing
|
||||
|
||||
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
@@ -219,7 +223,7 @@ At the same time, please consider supporting Dify by sharing it on social media
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
|
||||
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||
|
||||
## Star history
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on Twitter"></a>
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -179,10 +179,13 @@ docker compose up -d
|
||||
|
||||
#### استخدام Terraform للتوزيع
|
||||
|
||||
انشر Dify إلى منصة السحابة بنقرة واحدة باستخدام [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
استخدم [terraform](https://www.terraform.io/) لنشر Dify على Azure بنقرة واحدة.
|
||||
- [Azure Terraform بواسطة @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform بواسطة @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## المساهمة
|
||||
|
||||
|
||||
14
README_CN.md
14
README_CN.md
@@ -17,7 +17,7 @@
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on Twitter"></a>
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -154,7 +154,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
|
||||
我们提供[ Dify 云服务](https://dify.ai),任何人都可以零设置尝试。它提供了自部署版本的所有功能,并在沙盒计划中包含 200 次免费的 GPT-4 调用。
|
||||
|
||||
- **自托管 Dify 社区版</br>**
|
||||
使用这个[入门指南](#quick-start)快速在您的环境中运行 Dify。
|
||||
使用这个[入门指南](#快速启动)快速在您的环境中运行 Dify。
|
||||
使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。
|
||||
|
||||
- **面向企业/组织的 Dify</br>**
|
||||
@@ -174,7 +174,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
|
||||
在安装 Dify 之前,请确保您的机器满足以下最低系统要求:
|
||||
|
||||
- CPU >= 2 Core
|
||||
- RAM >= 4GB
|
||||
- RAM >= 4 GiB
|
||||
|
||||
### 快速启动
|
||||
|
||||
@@ -202,10 +202,14 @@ docker compose up -d
|
||||
|
||||
#### 使用 Terraform 部署
|
||||
|
||||
使用 [terraform](https://www.terraform.io/) 一键将 Dify 部署到云平台
|
||||
|
||||
##### Azure Global
|
||||
使用 [terraform](https://www.terraform.io/) 一键部署 Dify 到 Azure。
|
||||
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
@@ -232,7 +236,7 @@ docker compose up -d
|
||||
- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。
|
||||
- [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。
|
||||
- [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。
|
||||
- [Twitter](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。
|
||||
- [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。
|
||||
- [商业许可](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)。👉:有关商业用途许可 Dify.AI 的商业咨询。
|
||||
- [微信]() 👉:扫描下方二维码,添加微信好友,备注 Dify,我们将邀请您加入 Dify 社区。
|
||||
<img src="./images/wechat.png" alt="wechat" width="100"/>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
alt="chat en Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="seguir en Twitter"></a>
|
||||
alt="seguir en X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Descargas de Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -204,10 +204,13 @@ Si desea configurar una configuración de alta disponibilidad, la comunidad prop
|
||||
|
||||
#### Uso de Terraform para el despliegue
|
||||
|
||||
Despliega Dify en una plataforma en la nube con un solo clic utilizando [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
Utiliza [terraform](https://www.terraform.io/) para desplegar Dify en Azure con un solo clic.
|
||||
- [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Contribuir
|
||||
|
||||
@@ -228,7 +231,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en
|
||||
* [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas.
|
||||
* [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
|
||||
* [Twitter](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad.
|
||||
|
||||
## Historial de Estrellas
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
alt="chat sur Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="suivre sur Twitter"></a>
|
||||
alt="suivre sur X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -202,10 +202,13 @@ Si vous souhaitez configurer une configuration haute disponibilité, la communau
|
||||
|
||||
#### Utilisation de Terraform pour le déploiement
|
||||
|
||||
Déployez Dify sur une plateforme cloud en un clic en utilisant [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
Utilisez [terraform](https://www.terraform.io/) pour déployer Dify sur Azure en un clic.
|
||||
- [Azure Terraform par @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform par @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Contribuer
|
||||
|
||||
@@ -226,7 +229,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le
|
||||
* [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions.
|
||||
* [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté.
|
||||
* [Twitter](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.
|
||||
|
||||
## Historique des étoiles
|
||||
|
||||
|
||||
15
README_JA.md
15
README_JA.md
@@ -17,7 +17,7 @@
|
||||
alt="Discordでチャット"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="Twitterでフォロー"></a>
|
||||
alt="X(Twitter)でフォロー"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -68,7 +68,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
|
||||
プロンプトの作成、モデルパフォーマンスの比較が行え、チャットベースのアプリに音声合成などの機能も追加できます。
|
||||
|
||||
**4. RAGパイプライン**:
|
||||
ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサーポイントも提供します。
|
||||
ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサポートも提供します。
|
||||
|
||||
**5. エージェント機能**:
|
||||
LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DALL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。
|
||||
@@ -201,10 +201,13 @@ docker compose up -d
|
||||
|
||||
#### Terraformを使用したデプロイ
|
||||
|
||||
##### Azure Global
|
||||
[terraform](https://www.terraform.io/) を使用して、AzureにDifyをワンクリックでデプロイします。
|
||||
- [nikawangのAzure Terraform](https://github.com/nikawang/dify-azure-terraform)
|
||||
[terraform](https://www.terraform.io/) を使用して、ワンクリックでDifyをクラウドプラットフォームにデプロイします
|
||||
|
||||
##### Azure Global
|
||||
- [@nikawangによるAzure Terraform](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [@sotazumによるGoogle Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## 貢献
|
||||
|
||||
@@ -225,7 +228,7 @@ docker compose up -d
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
* [Twitter](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
|
||||
|
||||
|
||||
|
||||
13
README_KL.md
13
README_KL.md
@@ -17,7 +17,7 @@
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on Twitter"></a>
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -202,10 +202,13 @@ If you'd like to configure a highly-available setup, there are community-contrib
|
||||
|
||||
#### Terraform atorlugu pilersitsineq
|
||||
|
||||
##### Azure Global
|
||||
Atoruk [terraform](https://www.terraform.io/) Dify-mik Azure-mut ataatsikkut ikkussuilluarlugu.
|
||||
- [Azure Terraform atorlugu @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
wa'logh nIqHom neH ghun deployment toy'wI' [terraform](https://www.terraform.io/) lo'laH.
|
||||
|
||||
##### Azure Global
|
||||
- [Azure Terraform mung @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform qachlot @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -228,7 +231,7 @@ At the same time, please consider supporting Dify by sharing it on social media
|
||||
). Best for: sharing feedback and asking questions.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
|
||||
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||
|
||||
## Star History
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on Twitter"></a>
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -39,7 +39,6 @@
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
|
||||
</p>
|
||||
|
||||
|
||||
@@ -195,10 +194,14 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
|
||||
|
||||
#### Terraform을 사용한 배포
|
||||
|
||||
[terraform](https://www.terraform.io/)을 사용하여 단 한 번의 클릭으로 Dify를 클라우드 플랫폼에 배포하십시오
|
||||
|
||||
##### Azure Global
|
||||
[terraform](https://www.terraform.io/)을 사용하여 Azure에 Dify를 원클릭으로 배포하세요.
|
||||
- [nikawang의 Azure Terraform](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [sotazum의 Google Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## 기여
|
||||
|
||||
코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
|
||||
|
||||
12
README_TR.md
12
README_TR.md
@@ -17,7 +17,7 @@
|
||||
alt="Discord'da sohbet et"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="Twitter'da takip et"></a>
|
||||
alt="X(Twitter)'da takip et"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -200,9 +200,13 @@ Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'
|
||||
|
||||
#### Dağıtım için Terraform Kullanımı
|
||||
|
||||
Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.terraform.io/) kullanarak
|
||||
|
||||
##### Azure Global
|
||||
[Terraform](https://www.terraform.io/) kullanarak Dify'ı Azure'a tek tıklamayla dağıtın.
|
||||
- [@nikawang tarafından Azure Terraform](https://github.com/nikawang/dify-azure-terraform)
|
||||
- [Azure Terraform tarafından @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform tarafından @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Katkıda Bulunma
|
||||
|
||||
@@ -222,7 +226,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p
|
||||
* [Github Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
|
||||
* [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın.
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
|
||||
* [Twitter](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
|
||||
|
||||
## Star history
|
||||
|
||||
|
||||
10
README_VI.md
10
README_VI.md
@@ -17,7 +17,7 @@
|
||||
alt="chat trên Discord"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="theo dõi trên Twitter"></a>
|
||||
alt="theo dõi trên X(Twitter)"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
@@ -196,10 +196,14 @@ Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có
|
||||
|
||||
#### Sử dụng Terraform để Triển khai
|
||||
|
||||
Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
Triển khai Dify lên Azure chỉ với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/).
|
||||
- [Azure Terraform bởi @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform bởi @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
## Đóng góp
|
||||
|
||||
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
|
||||
@@ -219,7 +223,7 @@ Triển khai Dify lên Azure chỉ với một cú nhấp chuột bằng cách s
|
||||
* [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi.
|
||||
* [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
|
||||
* [Twitter](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
|
||||
|
||||
## Lịch sử Yêu thích
|
||||
|
||||
|
||||
@@ -20,6 +20,9 @@ FILES_URL=http://127.0.0.1:5001
|
||||
# The time in seconds after the signature is rejected
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
# celery configuration
|
||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
|
||||
|
||||
@@ -39,7 +42,7 @@ DB_DATABASE=dify
|
||||
|
||||
# Storage configuration
|
||||
# use for store upload files, private keys...
|
||||
# storage type: local, s3, azure-blob, google-storage
|
||||
# storage type: local, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
|
||||
STORAGE_TYPE=local
|
||||
STORAGE_LOCAL_PATH=storage
|
||||
S3_USE_AWS_MANAGED_IAM=false
|
||||
@@ -60,7 +63,8 @@ ALIYUN_OSS_SECRET_KEY=your-secret-key
|
||||
ALIYUN_OSS_ENDPOINT=your-endpoint
|
||||
ALIYUN_OSS_AUTH_VERSION=v1
|
||||
ALIYUN_OSS_REGION=your-region
|
||||
|
||||
# Don't start with '/'. OSS doesn't support leading slash in object names.
|
||||
ALIYUN_OSS_PATH=your-path
|
||||
# Google Storage configuration
|
||||
GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
|
||||
@@ -72,6 +76,18 @@ TENCENT_COS_SECRET_ID=your-secret-id
|
||||
TENCENT_COS_REGION=your-region
|
||||
TENCENT_COS_SCHEME=your-scheme
|
||||
|
||||
# Huawei OBS Storage Configuration
|
||||
HUAWEI_OBS_BUCKET_NAME=your-bucket-name
|
||||
HUAWEI_OBS_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_ACCESS_KEY=your-access-key
|
||||
HUAWEI_OBS_SERVER=your-server-url
|
||||
|
||||
# Baidu OBS Storage Configuration
|
||||
BAIDU_OBS_BUCKET_NAME=your-bucket-name
|
||||
BAIDU_OBS_SECRET_KEY=your-secret-key
|
||||
BAIDU_OBS_ACCESS_KEY=your-access-key
|
||||
BAIDU_OBS_ENDPOINT=your-server-url
|
||||
|
||||
# OCI Storage configuration
|
||||
OCI_ENDPOINT=your-endpoint
|
||||
OCI_BUCKET_NAME=your-bucket-name
|
||||
@@ -79,11 +95,23 @@ OCI_ACCESS_KEY=your-access-key
|
||||
OCI_SECRET_KEY=your-secret-key
|
||||
OCI_REGION=your-region
|
||||
|
||||
# Volcengine tos Storage configuration
|
||||
VOLCENGINE_TOS_ENDPOINT=your-endpoint
|
||||
VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name
|
||||
VOLCENGINE_TOS_ACCESS_KEY=your-access-key
|
||||
VOLCENGINE_TOS_SECRET_KEY=your-secret-key
|
||||
VOLCENGINE_TOS_REGION=your-region
|
||||
|
||||
# Supabase Storage Configuration
|
||||
SUPABASE_BUCKET_NAME=your-bucket-name
|
||||
SUPABASE_API_KEY=your-access-key
|
||||
SUPABASE_URL=your-server-url
|
||||
|
||||
# CORS configuration
|
||||
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
|
||||
# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector
|
||||
# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, vikingdb, upstash
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# Weaviate configuration
|
||||
@@ -100,11 +128,10 @@ QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
|
||||
# Milvus configuration
|
||||
MILVUS_HOST=127.0.0.1
|
||||
MILVUS_PORT=19530
|
||||
MILVUS_URI=http://127.0.0.1:19530
|
||||
MILVUS_TOKEN=
|
||||
MILVUS_USER=root
|
||||
MILVUS_PASSWORD=Milvus
|
||||
MILVUS_SECURE=false
|
||||
|
||||
# MyScale configuration
|
||||
MYSCALE_HOST=127.0.0.1
|
||||
@@ -149,6 +176,8 @@ PGVECTOR_PORT=5433
|
||||
PGVECTOR_USER=postgres
|
||||
PGVECTOR_PASSWORD=postgres
|
||||
PGVECTOR_DATABASE=postgres
|
||||
PGVECTOR_MIN_CONNECTION=1
|
||||
PGVECTOR_MAX_CONNECTION=5
|
||||
|
||||
# Tidb Vector configuration
|
||||
TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
|
||||
@@ -182,14 +211,39 @@ OPENSEARCH_USER=admin
|
||||
OPENSEARCH_PASSWORD=admin
|
||||
OPENSEARCH_SECURE=true
|
||||
|
||||
# Baidu configuration
|
||||
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
||||
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000
|
||||
BAIDU_VECTOR_DB_ACCOUNT=root
|
||||
BAIDU_VECTOR_DB_API_KEY=dify
|
||||
BAIDU_VECTOR_DB_DATABASE=dify
|
||||
BAIDU_VECTOR_DB_SHARD=1
|
||||
BAIDU_VECTOR_DB_REPLICAS=3
|
||||
|
||||
# Upstash configuration
|
||||
UPSTASH_VECTOR_URL=your-server-url
|
||||
UPSTASH_VECTOR_TOKEN=your-access-token
|
||||
|
||||
# ViKingDB configuration
|
||||
VIKINGDB_ACCESS_KEY=your-ak
|
||||
VIKINGDB_SECRET_KEY=your-sk
|
||||
VIKINGDB_REGION=cn-shanghai
|
||||
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
||||
VIKINGDB_SCHEMA=http
|
||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||
VIKINGDB_SOCKET_TIMEOUT=30
|
||||
|
||||
# Upload configuration
|
||||
UPLOAD_FILE_SIZE_LIMIT=15
|
||||
UPLOAD_FILE_BATCH_LIMIT=5
|
||||
UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
|
||||
UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
|
||||
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
|
||||
|
||||
# Model Configuration
|
||||
MULTIMODAL_SEND_IMAGE_FORMAT=base64
|
||||
PROMPT_GENERATION_MAX_TOKENS=512
|
||||
CODE_GENERATION_MAX_TOKENS=1024
|
||||
|
||||
# Mail configuration, support: resend, smtp
|
||||
MAIL_TYPE=
|
||||
@@ -250,8 +304,15 @@ HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||
|
||||
# Respect X-* headers to redirect clients
|
||||
RESPECT_XFORWARD_HEADERS_ENABLED=false
|
||||
|
||||
# Log file path
|
||||
LOG_FILE=
|
||||
# Log file max size, the unit is MB
|
||||
LOG_FILE_MAX_SIZE=20
|
||||
# Log file max backup count
|
||||
LOG_FILE_BACKUP_COUNT=5
|
||||
|
||||
# Indexing configuration
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000
|
||||
@@ -260,6 +321,7 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000
|
||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||
WORKFLOW_CALL_MAX_DEPTH=5
|
||||
MAX_VARIABLE_SIZE=204800
|
||||
|
||||
# App configuration
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
@@ -277,3 +339,6 @@ POSITION_TOOL_EXCLUDES=
|
||||
POSITION_PROVIDER_PINS=
|
||||
POSITION_PROVIDER_INCLUDES=
|
||||
POSITION_PROVIDER_EXCLUDES=
|
||||
|
||||
# Reset password token expiry minutes
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
|
||||
15
api/.vscode/launch.json.example
vendored
15
api/.vscode/launch.json.example
vendored
@@ -1,8 +1,15 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Launch Flask and Celery",
|
||||
"configurations": ["Python: Flask", "Python: Celery"]
|
||||
}
|
||||
],
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Flask",
|
||||
"consoleName": "Flask",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "${workspaceFolder}/.venv/bin/python",
|
||||
@@ -17,12 +24,12 @@
|
||||
},
|
||||
"args": [
|
||||
"run",
|
||||
"--host=0.0.0.0",
|
||||
"--port=5001"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Python: Celery",
|
||||
"consoleName": "Celery",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "${workspaceFolder}/.venv/bin/python",
|
||||
@@ -45,10 +52,10 @@
|
||||
"-c",
|
||||
"1",
|
||||
"--loglevel",
|
||||
"info",
|
||||
"DEBUG",
|
||||
"-Q",
|
||||
"dataset,generation,mail,ops_trace,app_deletion"
|
||||
]
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ RUN apt-get update \
|
||||
&& echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
|
||||
&& apt-get update \
|
||||
# For Security
|
||||
&& apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.2-1 libldap-2.5-0=2.5.18+dfsg-3 perl=5.38.2-5 libsqlite3-0=3.46.0-1 \
|
||||
&& apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.3-1 libldap-2.5-0=2.5.18+dfsg-3 perl=5.38.2-5 libsqlite3-0=3.46.1-1 \
|
||||
&& apt-get autoremove -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
@@ -65,14 +65,12 @@
|
||||
|
||||
8. Start Dify [web](../web) service.
|
||||
9. Setup your application by visiting `http://localhost:3000`...
|
||||
10. If you need to debug local async processing, please start the worker service.
|
||||
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
|
||||
|
||||
```bash
|
||||
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
|
||||
```
|
||||
|
||||
The started celery app handles the async tasks, e.g. dataset importing and documents indexing.
|
||||
|
||||
## Testing
|
||||
|
||||
1. Install dependencies for both the backend and the test environment
|
||||
@@ -87,3 +85,4 @@
|
||||
cd ../
|
||||
poetry run -C api bash dev/pytest/pytest_all_tests.sh
|
||||
```
|
||||
|
||||
|
||||
230
api/app.py
230
api/app.py
@@ -1,5 +1,7 @@
|
||||
import os
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
if os.environ.get("DEBUG", "false").lower() != "true":
|
||||
from gevent import monkey
|
||||
|
||||
@@ -10,243 +12,37 @@ if os.environ.get("DEBUG", "false").lower() != "true":
|
||||
grpc.experimental.gevent.init_gevent()
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import warnings
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from flask import Flask, Response, request
|
||||
from flask_cors import CORS
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
from flask import Response
|
||||
|
||||
import contexts
|
||||
from commands import register_commands
|
||||
from configs import dify_config
|
||||
from app_factory import create_app
|
||||
|
||||
# DO NOT REMOVE BELOW
|
||||
from events import event_handlers
|
||||
from extensions import (
|
||||
ext_celery,
|
||||
ext_code_based_extension,
|
||||
ext_compress,
|
||||
ext_database,
|
||||
ext_hosting_provider,
|
||||
ext_login,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
ext_redis,
|
||||
ext_sentry,
|
||||
ext_storage,
|
||||
)
|
||||
from events import event_handlers # noqa: F401
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_login import login_manager
|
||||
from libs.passport import PassportService
|
||||
|
||||
# TODO: Find a way to avoid importing models here
|
||||
from models import account, dataset, model, source, task, tool, tools, web
|
||||
from services.account_service import AccountService
|
||||
from models import account, dataset, model, source, task, tool, tools, web # noqa: F401
|
||||
|
||||
# DO NOT REMOVE ABOVE
|
||||
|
||||
|
||||
warnings.simplefilter("ignore", ResourceWarning)
|
||||
|
||||
# fix windows platform
|
||||
if os.name == "nt":
|
||||
os.system('tzutil /s "UTC"')
|
||||
else:
|
||||
os.environ["TZ"] = "UTC"
|
||||
os.environ["TZ"] = "UTC"
|
||||
# windows platform not support tzset
|
||||
if hasattr(time, "tzset"):
|
||||
time.tzset()
|
||||
|
||||
|
||||
class DifyApp(Flask):
|
||||
pass
|
||||
|
||||
|
||||
# -------------
|
||||
# Configuration
|
||||
# -------------
|
||||
|
||||
|
||||
config_type = os.getenv("EDITION", default="SELF_HOSTED") # ce edition first
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Application Factory Function
|
||||
# ----------------------------
|
||||
|
||||
|
||||
def create_flask_app_with_configs() -> Flask:
|
||||
"""
|
||||
create a raw flask app
|
||||
with configs loaded from .env file
|
||||
"""
|
||||
dify_app = DifyApp(__name__)
|
||||
dify_app.config.from_mapping(dify_config.model_dump())
|
||||
|
||||
# populate configs into system environment variables
|
||||
for key, value in dify_app.config.items():
|
||||
if isinstance(value, str):
|
||||
os.environ[key] = value
|
||||
elif isinstance(value, int | float | bool):
|
||||
os.environ[key] = str(value)
|
||||
elif value is None:
|
||||
os.environ[key] = ""
|
||||
|
||||
return dify_app
|
||||
|
||||
|
||||
def create_app() -> Flask:
|
||||
app = create_flask_app_with_configs()
|
||||
|
||||
app.secret_key = app.config["SECRET_KEY"]
|
||||
|
||||
log_handlers = None
|
||||
log_file = app.config.get("LOG_FILE")
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
log_handlers = [
|
||||
RotatingFileHandler(
|
||||
filename=log_file,
|
||||
maxBytes=1024 * 1024 * 1024,
|
||||
backupCount=5,
|
||||
),
|
||||
logging.StreamHandler(sys.stdout),
|
||||
]
|
||||
|
||||
logging.basicConfig(
|
||||
level=app.config.get("LOG_LEVEL"),
|
||||
format=app.config.get("LOG_FORMAT"),
|
||||
datefmt=app.config.get("LOG_DATEFORMAT"),
|
||||
handlers=log_handlers,
|
||||
force=True,
|
||||
)
|
||||
log_tz = app.config.get("LOG_TZ")
|
||||
if log_tz:
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
|
||||
timezone = pytz.timezone(log_tz)
|
||||
|
||||
def time_converter(seconds):
|
||||
return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple()
|
||||
|
||||
for handler in logging.root.handlers:
|
||||
handler.formatter.converter = time_converter
|
||||
initialize_extensions(app)
|
||||
register_blueprints(app)
|
||||
register_commands(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def initialize_extensions(app):
|
||||
# Since the application instance is now created, pass it to each Flask
|
||||
# extension instance to bind it to the Flask application instance (app)
|
||||
ext_compress.init_app(app)
|
||||
ext_code_based_extension.init()
|
||||
ext_database.init_app(app)
|
||||
ext_migrate.init(app, db)
|
||||
ext_redis.init_app(app)
|
||||
ext_storage.init_app(app)
|
||||
ext_celery.init_app(app)
|
||||
ext_login.init_app(app)
|
||||
ext_mail.init_app(app)
|
||||
ext_hosting_provider.init_app(app)
|
||||
ext_sentry.init_app(app)
|
||||
|
||||
|
||||
# Flask-Login configuration
|
||||
@login_manager.request_loader
|
||||
def load_user_from_request(request_from_flask_login):
|
||||
"""Load user based on the request."""
|
||||
if request.blueprint not in ["console", "inner_api"]:
|
||||
return None
|
||||
# Check if the user_id contains a dot, indicating the old format
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if not auth_header:
|
||||
auth_token = request.args.get("_token")
|
||||
if not auth_token:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
else:
|
||||
if " " not in auth_header:
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
auth_scheme, auth_token = auth_header.split(None, 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
||||
decoded = PassportService().verify(auth_token)
|
||||
user_id = decoded.get("user_id")
|
||||
|
||||
account = AccountService.load_logged_in_account(account_id=user_id, token=auth_token)
|
||||
if account:
|
||||
contexts.tenant_id.set(account.current_tenant_id)
|
||||
return account
|
||||
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
def unauthorized_handler():
|
||||
"""Handle unauthorized requests."""
|
||||
return Response(
|
||||
json.dumps({"code": "unauthorized", "message": "Unauthorized."}),
|
||||
status=401,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
# register blueprint routers
|
||||
def register_blueprints(app):
|
||||
from controllers.console import bp as console_app_bp
|
||||
from controllers.files import bp as files_bp
|
||||
from controllers.inner_api import bp as inner_api_bp
|
||||
from controllers.service_api import bp as service_api_bp
|
||||
from controllers.web import bp as web_bp
|
||||
|
||||
CORS(
|
||||
service_api_bp,
|
||||
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
)
|
||||
app.register_blueprint(service_api_bp)
|
||||
|
||||
CORS(
|
||||
web_bp,
|
||||
resources={r"/*": {"origins": app.config["WEB_API_CORS_ALLOW_ORIGINS"]}},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
expose_headers=["X-Version", "X-Env"],
|
||||
)
|
||||
|
||||
app.register_blueprint(web_bp)
|
||||
|
||||
CORS(
|
||||
console_app_bp,
|
||||
resources={r"/*": {"origins": app.config["CONSOLE_CORS_ALLOW_ORIGINS"]}},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Content-Type", "Authorization"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
expose_headers=["X-Version", "X-Env"],
|
||||
)
|
||||
|
||||
app.register_blueprint(console_app_bp)
|
||||
|
||||
CORS(files_bp, allow_headers=["Content-Type"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"])
|
||||
app.register_blueprint(files_bp)
|
||||
|
||||
app.register_blueprint(inner_api_bp)
|
||||
|
||||
|
||||
# create app
|
||||
app = create_app()
|
||||
celery = app.extensions["celery"]
|
||||
|
||||
if app.config.get("TESTING"):
|
||||
if dify_config.TESTING:
|
||||
print("App is running in TESTING mode")
|
||||
|
||||
|
||||
@@ -254,15 +50,15 @@ if app.config.get("TESTING"):
|
||||
def after_request(response):
|
||||
"""Add Version headers to the response."""
|
||||
response.set_cookie("remember_token", "", expires=0)
|
||||
response.headers.add("X-Version", app.config["CURRENT_VERSION"])
|
||||
response.headers.add("X-Env", app.config["DEPLOY_ENV"])
|
||||
response.headers.add("X-Version", dify_config.CURRENT_VERSION)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
return Response(
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": app.config["CURRENT_VERSION"]}),
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
176
api/app_factory.py
Normal file
176
api/app_factory.py
Normal file
@@ -0,0 +1,176 @@
|
||||
import os
|
||||
|
||||
if os.environ.get("DEBUG", "false").lower() != "true":
|
||||
from gevent import monkey
|
||||
|
||||
monkey.patch_all()
|
||||
|
||||
import grpc.experimental.gevent
|
||||
|
||||
grpc.experimental.gevent.init_gevent()
|
||||
|
||||
import json
|
||||
|
||||
from flask import Flask, Response, request
|
||||
from flask_cors import CORS
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
import contexts
|
||||
from commands import register_commands
|
||||
from configs import dify_config
|
||||
from extensions import (
|
||||
ext_celery,
|
||||
ext_code_based_extension,
|
||||
ext_compress,
|
||||
ext_database,
|
||||
ext_hosting_provider,
|
||||
ext_logging,
|
||||
ext_login,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
ext_proxy_fix,
|
||||
ext_redis,
|
||||
ext_sentry,
|
||||
ext_storage,
|
||||
)
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_login import login_manager
|
||||
from libs.passport import PassportService
|
||||
from services.account_service import AccountService
|
||||
|
||||
|
||||
class DifyApp(Flask):
|
||||
pass
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Application Factory Function
|
||||
# ----------------------------
|
||||
def create_flask_app_with_configs() -> Flask:
|
||||
"""
|
||||
create a raw flask app
|
||||
with configs loaded from .env file
|
||||
"""
|
||||
dify_app = DifyApp(__name__)
|
||||
dify_app.config.from_mapping(dify_config.model_dump())
|
||||
|
||||
# populate configs into system environment variables
|
||||
for key, value in dify_app.config.items():
|
||||
if isinstance(value, str):
|
||||
os.environ[key] = value
|
||||
elif isinstance(value, int | float | bool):
|
||||
os.environ[key] = str(value)
|
||||
elif value is None:
|
||||
os.environ[key] = ""
|
||||
|
||||
return dify_app
|
||||
|
||||
|
||||
def create_app() -> Flask:
|
||||
app = create_flask_app_with_configs()
|
||||
app.secret_key = dify_config.SECRET_KEY
|
||||
initialize_extensions(app)
|
||||
register_blueprints(app)
|
||||
register_commands(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def initialize_extensions(app):
|
||||
# Since the application instance is now created, pass it to each Flask
|
||||
# extension instance to bind it to the Flask application instance (app)
|
||||
ext_logging.init_app(app)
|
||||
ext_compress.init_app(app)
|
||||
ext_code_based_extension.init()
|
||||
ext_database.init_app(app)
|
||||
ext_migrate.init(app, db)
|
||||
ext_redis.init_app(app)
|
||||
ext_storage.init_app(app)
|
||||
ext_celery.init_app(app)
|
||||
ext_login.init_app(app)
|
||||
ext_mail.init_app(app)
|
||||
ext_hosting_provider.init_app(app)
|
||||
ext_sentry.init_app(app)
|
||||
ext_proxy_fix.init_app(app)
|
||||
|
||||
|
||||
# Flask-Login configuration
|
||||
@login_manager.request_loader
|
||||
def load_user_from_request(request_from_flask_login):
|
||||
"""Load user based on the request."""
|
||||
if request.blueprint not in {"console", "inner_api"}:
|
||||
return None
|
||||
# Check if the user_id contains a dot, indicating the old format
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if not auth_header:
|
||||
auth_token = request.args.get("_token")
|
||||
if not auth_token:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
else:
|
||||
if " " not in auth_header:
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
auth_scheme, auth_token = auth_header.split(None, 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
||||
decoded = PassportService().verify(auth_token)
|
||||
user_id = decoded.get("user_id")
|
||||
|
||||
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
|
||||
if logged_in_account:
|
||||
contexts.tenant_id.set(logged_in_account.current_tenant_id)
|
||||
return logged_in_account
|
||||
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
def unauthorized_handler():
|
||||
"""Handle unauthorized requests."""
|
||||
return Response(
|
||||
json.dumps({"code": "unauthorized", "message": "Unauthorized."}),
|
||||
status=401,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
|
||||
# register blueprint routers
|
||||
def register_blueprints(app):
|
||||
from controllers.console import bp as console_app_bp
|
||||
from controllers.files import bp as files_bp
|
||||
from controllers.inner_api import bp as inner_api_bp
|
||||
from controllers.service_api import bp as service_api_bp
|
||||
from controllers.web import bp as web_bp
|
||||
|
||||
CORS(
|
||||
service_api_bp,
|
||||
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
)
|
||||
app.register_blueprint(service_api_bp)
|
||||
|
||||
CORS(
|
||||
web_bp,
|
||||
resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
expose_headers=["X-Version", "X-Env"],
|
||||
)
|
||||
|
||||
app.register_blueprint(web_bp)
|
||||
|
||||
CORS(
|
||||
console_app_bp,
|
||||
resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Content-Type", "Authorization"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
expose_headers=["X-Version", "X-Env"],
|
||||
)
|
||||
|
||||
app.register_blueprint(console_app_bp)
|
||||
|
||||
CORS(files_bp, allow_headers=["Content-Type"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"])
|
||||
app.register_blueprint(files_bp)
|
||||
|
||||
app.register_blueprint(inner_api_bp)
|
||||
231
api/commands.py
231
api/commands.py
@@ -19,7 +19,7 @@ from extensions.ext_redis import redis_client
|
||||
from libs.helper import email as email_validate
|
||||
from libs.password import hash_password, password_pattern, valid_password
|
||||
from libs.rsa import generate_key_pair
|
||||
from models.account import Tenant
|
||||
from models import Tenant
|
||||
from models.dataset import Dataset, DatasetCollectionBinding, DocumentSegment
|
||||
from models.dataset import Document as DatasetDocument
|
||||
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
|
||||
@@ -28,28 +28,28 @@ from services.account_service import RegisterService, TenantService
|
||||
|
||||
|
||||
@click.command("reset-password", help="Reset the account password.")
|
||||
@click.option("--email", prompt=True, help="The email address of the account whose password you need to reset")
|
||||
@click.option("--new-password", prompt=True, help="the new password.")
|
||||
@click.option("--password-confirm", prompt=True, help="the new password confirm.")
|
||||
@click.option("--email", prompt=True, help="Account email to reset password for")
|
||||
@click.option("--new-password", prompt=True, help="New password")
|
||||
@click.option("--password-confirm", prompt=True, help="Confirm new password")
|
||||
def reset_password(email, new_password, password_confirm):
|
||||
"""
|
||||
Reset password of owner account
|
||||
Only available in SELF_HOSTED mode
|
||||
"""
|
||||
if str(new_password).strip() != str(password_confirm).strip():
|
||||
click.echo(click.style("sorry. The two passwords do not match.", fg="red"))
|
||||
click.echo(click.style("Passwords do not match.", fg="red"))
|
||||
return
|
||||
|
||||
account = db.session.query(Account).filter(Account.email == email).one_or_none()
|
||||
|
||||
if not account:
|
||||
click.echo(click.style("sorry. the account: [{}] not exist .".format(email), fg="red"))
|
||||
click.echo(click.style("Account not found for email: {}".format(email), fg="red"))
|
||||
return
|
||||
|
||||
try:
|
||||
valid_password(new_password)
|
||||
except:
|
||||
click.echo(click.style("sorry. The passwords must match {} ".format(password_pattern), fg="red"))
|
||||
click.echo(click.style("Invalid password. Must match {}".format(password_pattern), fg="red"))
|
||||
return
|
||||
|
||||
# generate password salt
|
||||
@@ -62,37 +62,37 @@ def reset_password(email, new_password, password_confirm):
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
db.session.commit()
|
||||
click.echo(click.style("Congratulations! Password has been reset.", fg="green"))
|
||||
click.echo(click.style("Password reset successfully.", fg="green"))
|
||||
|
||||
|
||||
@click.command("reset-email", help="Reset the account email.")
|
||||
@click.option("--email", prompt=True, help="The old email address of the account whose email you need to reset")
|
||||
@click.option("--new-email", prompt=True, help="the new email.")
|
||||
@click.option("--email-confirm", prompt=True, help="the new email confirm.")
|
||||
@click.option("--email", prompt=True, help="Current account email")
|
||||
@click.option("--new-email", prompt=True, help="New email")
|
||||
@click.option("--email-confirm", prompt=True, help="Confirm new email")
|
||||
def reset_email(email, new_email, email_confirm):
|
||||
"""
|
||||
Replace account email
|
||||
:return:
|
||||
"""
|
||||
if str(new_email).strip() != str(email_confirm).strip():
|
||||
click.echo(click.style("Sorry, new email and confirm email do not match.", fg="red"))
|
||||
click.echo(click.style("New emails do not match.", fg="red"))
|
||||
return
|
||||
|
||||
account = db.session.query(Account).filter(Account.email == email).one_or_none()
|
||||
|
||||
if not account:
|
||||
click.echo(click.style("sorry. the account: [{}] not exist .".format(email), fg="red"))
|
||||
click.echo(click.style("Account not found for email: {}".format(email), fg="red"))
|
||||
return
|
||||
|
||||
try:
|
||||
email_validate(new_email)
|
||||
except:
|
||||
click.echo(click.style("sorry. {} is not a valid email. ".format(email), fg="red"))
|
||||
click.echo(click.style("Invalid email: {}".format(new_email), fg="red"))
|
||||
return
|
||||
|
||||
account.email = new_email
|
||||
db.session.commit()
|
||||
click.echo(click.style("Congratulations!, email has been reset.", fg="green"))
|
||||
click.echo(click.style("Email updated successfully.", fg="green"))
|
||||
|
||||
|
||||
@click.command(
|
||||
@@ -104,7 +104,7 @@ def reset_email(email, new_email, email_confirm):
|
||||
)
|
||||
@click.confirmation_option(
|
||||
prompt=click.style(
|
||||
"Are you sure you want to reset encrypt key pair?" " this operation cannot be rolled back!", fg="red"
|
||||
"Are you sure you want to reset encrypt key pair? This operation cannot be rolled back!", fg="red"
|
||||
)
|
||||
)
|
||||
def reset_encrypt_key_pair():
|
||||
@@ -114,13 +114,13 @@ def reset_encrypt_key_pair():
|
||||
Only support SELF_HOSTED mode.
|
||||
"""
|
||||
if dify_config.EDITION != "SELF_HOSTED":
|
||||
click.echo(click.style("Sorry, only support SELF_HOSTED mode.", fg="red"))
|
||||
click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
|
||||
return
|
||||
|
||||
tenants = db.session.query(Tenant).all()
|
||||
for tenant in tenants:
|
||||
if not tenant:
|
||||
click.echo(click.style("Sorry, no workspace found. Please enter /install to initialize.", fg="red"))
|
||||
click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
|
||||
return
|
||||
|
||||
tenant.encrypt_public_key = generate_key_pair(tenant.id)
|
||||
@@ -131,18 +131,18 @@ def reset_encrypt_key_pair():
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
"Congratulations! " "the asymmetric key pair of workspace {} has been reset.".format(tenant.id),
|
||||
"Congratulations! The asymmetric key pair of workspace {} has been reset.".format(tenant.id),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.command("vdb-migrate", help="migrate vector db.")
|
||||
@click.command("vdb-migrate", help="Migrate vector db.")
|
||||
@click.option("--scope", default="all", prompt=False, help="The scope of vector database to migrate, Default is All.")
|
||||
def vdb_migrate(scope: str):
|
||||
if scope in ["knowledge", "all"]:
|
||||
if scope in {"knowledge", "all"}:
|
||||
migrate_knowledge_vector_database()
|
||||
if scope in ["annotation", "all"]:
|
||||
if scope in {"annotation", "all"}:
|
||||
migrate_annotation_vector_database()
|
||||
|
||||
|
||||
@@ -150,7 +150,7 @@ def migrate_annotation_vector_database():
|
||||
"""
|
||||
Migrate annotation datas to target vector database .
|
||||
"""
|
||||
click.echo(click.style("Start migrate annotation data.", fg="green"))
|
||||
click.echo(click.style("Starting annotation data migration.", fg="green"))
|
||||
create_count = 0
|
||||
skipped_count = 0
|
||||
total_count = 0
|
||||
@@ -174,14 +174,14 @@ def migrate_annotation_vector_database():
|
||||
f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
|
||||
)
|
||||
try:
|
||||
click.echo("Create app annotation index: {}".format(app.id))
|
||||
click.echo("Creating app annotation index: {}".format(app.id))
|
||||
app_annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app.id).first()
|
||||
)
|
||||
|
||||
if not app_annotation_setting:
|
||||
skipped_count = skipped_count + 1
|
||||
click.echo("App annotation setting is disabled: {}".format(app.id))
|
||||
click.echo("App annotation setting disabled: {}".format(app.id))
|
||||
continue
|
||||
# get dataset_collection_binding info
|
||||
dataset_collection_binding = (
|
||||
@@ -190,7 +190,7 @@ def migrate_annotation_vector_database():
|
||||
.first()
|
||||
)
|
||||
if not dataset_collection_binding:
|
||||
click.echo("App annotation collection binding is not exist: {}".format(app.id))
|
||||
click.echo("App annotation collection binding not found: {}".format(app.id))
|
||||
continue
|
||||
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app.id).all()
|
||||
dataset = Dataset(
|
||||
@@ -211,11 +211,11 @@ def migrate_annotation_vector_database():
|
||||
documents.append(document)
|
||||
|
||||
vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
|
||||
click.echo(f"Start to migrate annotation, app_id: {app.id}.")
|
||||
click.echo(f"Migrating annotations for app: {app.id}.")
|
||||
|
||||
try:
|
||||
vector.delete()
|
||||
click.echo(click.style(f"Successfully delete vector index for app: {app.id}.", fg="green"))
|
||||
click.echo(click.style(f"Deleted vector index for app {app.id}.", fg="green"))
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Failed to delete vector index for app {app.id}.", fg="red"))
|
||||
raise e
|
||||
@@ -223,12 +223,12 @@ def migrate_annotation_vector_database():
|
||||
try:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Start to created vector index with {len(documents)} annotations for app {app.id}.",
|
||||
f"Creating vector index with {len(documents)} annotations for app {app.id}.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
vector.create(documents)
|
||||
click.echo(click.style(f"Successfully created vector index for app {app.id}.", fg="green"))
|
||||
click.echo(click.style(f"Created vector index for app {app.id}.", fg="green"))
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Failed to created vector index for app {app.id}.", fg="red"))
|
||||
raise e
|
||||
@@ -237,14 +237,14 @@ def migrate_annotation_vector_database():
|
||||
except Exception as e:
|
||||
click.echo(
|
||||
click.style(
|
||||
"Create app annotation index error: {} {}".format(e.__class__.__name__, str(e)), fg="red"
|
||||
"Error creating app annotation index: {} {}".format(e.__class__.__name__, str(e)), fg="red"
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Congratulations! Create {create_count} app annotation indexes, and skipped {skipped_count} apps.",
|
||||
f"Migration complete. Created {create_count} app annotation indexes. Skipped {skipped_count} apps.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@@ -254,11 +254,31 @@ def migrate_knowledge_vector_database():
|
||||
"""
|
||||
Migrate vector database datas to target vector database .
|
||||
"""
|
||||
click.echo(click.style("Start migrate vector db.", fg="green"))
|
||||
click.echo(click.style("Starting vector database migration.", fg="green"))
|
||||
create_count = 0
|
||||
skipped_count = 0
|
||||
total_count = 0
|
||||
vector_type = dify_config.VECTOR_STORE
|
||||
upper_colletion_vector_types = {
|
||||
VectorType.MILVUS,
|
||||
VectorType.PGVECTOR,
|
||||
VectorType.RELYT,
|
||||
VectorType.WEAVIATE,
|
||||
VectorType.ORACLE,
|
||||
VectorType.ELASTICSEARCH,
|
||||
}
|
||||
lower_colletion_vector_types = {
|
||||
VectorType.ANALYTICDB,
|
||||
VectorType.CHROMA,
|
||||
VectorType.MYSCALE,
|
||||
VectorType.PGVECTO_RS,
|
||||
VectorType.TIDB_VECTOR,
|
||||
VectorType.OPENSEARCH,
|
||||
VectorType.TENCENT,
|
||||
VectorType.BAIDU,
|
||||
VectorType.VIKINGDB,
|
||||
VectorType.UPSTASH,
|
||||
}
|
||||
page = 1
|
||||
while True:
|
||||
try:
|
||||
@@ -275,21 +295,18 @@ def migrate_knowledge_vector_database():
|
||||
for dataset in datasets:
|
||||
total_count = total_count + 1
|
||||
click.echo(
|
||||
f"Processing the {total_count} dataset {dataset.id}. "
|
||||
+ f"{create_count} created, {skipped_count} skipped."
|
||||
f"Processing the {total_count} dataset {dataset.id}. {create_count} created, {skipped_count} skipped."
|
||||
)
|
||||
try:
|
||||
click.echo("Create dataset vdb index: {}".format(dataset.id))
|
||||
click.echo("Creating dataset vector database index: {}".format(dataset.id))
|
||||
if dataset.index_struct_dict:
|
||||
if dataset.index_struct_dict["type"] == vector_type:
|
||||
skipped_count = skipped_count + 1
|
||||
continue
|
||||
collection_name = ""
|
||||
if vector_type == VectorType.WEAVIATE:
|
||||
dataset_id = dataset.id
|
||||
dataset_id = dataset.id
|
||||
if vector_type in upper_colletion_vector_types:
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": VectorType.WEAVIATE, "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.QDRANT:
|
||||
if dataset.collection_binding_id:
|
||||
dataset_collection_binding = (
|
||||
@@ -300,66 +317,24 @@ def migrate_knowledge_vector_database():
|
||||
if dataset_collection_binding:
|
||||
collection_name = dataset_collection_binding.collection_name
|
||||
else:
|
||||
raise ValueError("Dataset Collection Bindings is not exist!")
|
||||
raise ValueError("Dataset Collection Binding not found")
|
||||
else:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": VectorType.QDRANT, "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
|
||||
elif vector_type == VectorType.MILVUS:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": VectorType.MILVUS, "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.RELYT:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": "relyt", "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.TENCENT:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": VectorType.TENCENT, "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.PGVECTOR:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": VectorType.PGVECTOR, "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.OPENSEARCH:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {
|
||||
"type": VectorType.OPENSEARCH,
|
||||
"vector_store": {"class_prefix": collection_name},
|
||||
}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.ANALYTICDB:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {
|
||||
"type": VectorType.ANALYTICDB,
|
||||
"vector_store": {"class_prefix": collection_name},
|
||||
}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type == VectorType.ELASTICSEARCH:
|
||||
dataset_id = dataset.id
|
||||
index_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
index_struct_dict = {"type": "elasticsearch", "vector_store": {"class_prefix": index_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
elif vector_type in lower_colletion_vector_types:
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
|
||||
else:
|
||||
raise ValueError(f"Vector store {vector_type} is not supported.")
|
||||
|
||||
index_struct_dict = {"type": vector_type, "vector_store": {"class_prefix": collection_name}}
|
||||
dataset.index_struct = json.dumps(index_struct_dict)
|
||||
vector = Vector(dataset)
|
||||
click.echo(f"Start to migrate dataset {dataset.id}.")
|
||||
click.echo(f"Migrating dataset {dataset.id}.")
|
||||
|
||||
try:
|
||||
vector.delete()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Successfully delete vector index {collection_name} for dataset {dataset.id}.", fg="green"
|
||||
)
|
||||
click.style(f"Deleted vector index {collection_name} for dataset {dataset.id}.", fg="green")
|
||||
)
|
||||
except Exception as e:
|
||||
click.echo(
|
||||
@@ -411,14 +386,13 @@ def migrate_knowledge_vector_database():
|
||||
try:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Start to created vector index with {len(documents)} documents of {segments_count} segments for dataset {dataset.id}.",
|
||||
f"Creating vector index with {len(documents)} documents of {segments_count}"
|
||||
f" segments for dataset {dataset.id}.",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
vector.create(documents)
|
||||
click.echo(
|
||||
click.style(f"Successfully created vector index for dataset {dataset.id}.", fg="green")
|
||||
)
|
||||
click.echo(click.style(f"Created vector index for dataset {dataset.id}.", fg="green"))
|
||||
except Exception as e:
|
||||
click.echo(click.style(f"Failed to created vector index for dataset {dataset.id}.", fg="red"))
|
||||
raise e
|
||||
@@ -429,13 +403,13 @@ def migrate_knowledge_vector_database():
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
click.echo(
|
||||
click.style("Create dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
|
||||
click.style("Error creating dataset index: {} {}".format(e.__class__.__name__, str(e)), fg="red")
|
||||
)
|
||||
continue
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Congratulations! Create {create_count} dataset indexes, and skipped {skipped_count} datasets.", fg="green"
|
||||
f"Migration complete. Created {create_count} dataset indexes. Skipped {skipped_count} datasets.", fg="green"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -445,7 +419,7 @@ def convert_to_agent_apps():
|
||||
"""
|
||||
Convert Agent Assistant to Agent App.
|
||||
"""
|
||||
click.echo(click.style("Start convert to agent apps.", fg="green"))
|
||||
click.echo(click.style("Starting convert to agent apps.", fg="green"))
|
||||
|
||||
proceeded_app_ids = []
|
||||
|
||||
@@ -453,14 +427,14 @@ def convert_to_agent_apps():
|
||||
# fetch first 1000 apps
|
||||
sql_query = """SELECT a.id AS id FROM apps a
|
||||
INNER JOIN app_model_configs am ON a.app_model_config_id=am.id
|
||||
WHERE a.mode = 'chat'
|
||||
AND am.agent_mode is not null
|
||||
WHERE a.mode = 'chat'
|
||||
AND am.agent_mode is not null
|
||||
AND (
|
||||
am.agent_mode like '%"strategy": "function_call"%'
|
||||
am.agent_mode like '%"strategy": "function_call"%'
|
||||
OR am.agent_mode like '%"strategy": "react"%'
|
||||
)
|
||||
)
|
||||
AND (
|
||||
am.agent_mode like '{"enabled": true%'
|
||||
am.agent_mode like '{"enabled": true%'
|
||||
OR am.agent_mode like '{"max_iteration": %'
|
||||
) ORDER BY a.created_at DESC LIMIT 1000
|
||||
"""
|
||||
@@ -496,23 +470,23 @@ def convert_to_agent_apps():
|
||||
except Exception as e:
|
||||
click.echo(click.style("Convert app error: {} {}".format(e.__class__.__name__, str(e)), fg="red"))
|
||||
|
||||
click.echo(click.style("Congratulations! Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green"))
|
||||
click.echo(click.style("Conversion complete. Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green"))
|
||||
|
||||
|
||||
@click.command("add-qdrant-doc-id-index", help="add qdrant doc_id index.")
|
||||
@click.option("--field", default="metadata.doc_id", prompt=False, help="index field , default is metadata.doc_id.")
|
||||
@click.command("add-qdrant-doc-id-index", help="Add Qdrant doc_id index.")
|
||||
@click.option("--field", default="metadata.doc_id", prompt=False, help="Index field , default is metadata.doc_id.")
|
||||
def add_qdrant_doc_id_index(field: str):
|
||||
click.echo(click.style("Start add qdrant doc_id index.", fg="green"))
|
||||
click.echo(click.style("Starting Qdrant doc_id index creation.", fg="green"))
|
||||
vector_type = dify_config.VECTOR_STORE
|
||||
if vector_type != "qdrant":
|
||||
click.echo(click.style("Sorry, only support qdrant vector store.", fg="red"))
|
||||
click.echo(click.style("This command only supports Qdrant vector store.", fg="red"))
|
||||
return
|
||||
create_count = 0
|
||||
|
||||
try:
|
||||
bindings = db.session.query(DatasetCollectionBinding).all()
|
||||
if not bindings:
|
||||
click.echo(click.style("Sorry, no dataset collection bindings found.", fg="red"))
|
||||
click.echo(click.style("No dataset collection bindings found.", fg="red"))
|
||||
return
|
||||
import qdrant_client
|
||||
from qdrant_client.http.exceptions import UnexpectedResponse
|
||||
@@ -522,7 +496,7 @@ def add_qdrant_doc_id_index(field: str):
|
||||
|
||||
for binding in bindings:
|
||||
if dify_config.QDRANT_URL is None:
|
||||
raise ValueError("Qdrant url is required.")
|
||||
raise ValueError("Qdrant URL is required.")
|
||||
qdrant_config = QdrantConfig(
|
||||
endpoint=dify_config.QDRANT_URL,
|
||||
api_key=dify_config.QDRANT_API_KEY,
|
||||
@@ -539,40 +513,39 @@ def add_qdrant_doc_id_index(field: str):
|
||||
except UnexpectedResponse as e:
|
||||
# Collection does not exist, so return
|
||||
if e.status_code == 404:
|
||||
click.echo(
|
||||
click.style(f"Collection not found, collection_name:{binding.collection_name}.", fg="red")
|
||||
)
|
||||
click.echo(click.style(f"Collection not found: {binding.collection_name}.", fg="red"))
|
||||
continue
|
||||
# Some other error occurred, so re-raise the exception
|
||||
else:
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Failed to create qdrant index, collection_name:{binding.collection_name}.", fg="red"
|
||||
f"Failed to create Qdrant index for collection: {binding.collection_name}.", fg="red"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
click.echo(click.style("Failed to create qdrant client.", fg="red"))
|
||||
click.echo(click.style("Failed to create Qdrant client.", fg="red"))
|
||||
|
||||
click.echo(click.style(f"Congratulations! Create {create_count} collection indexes.", fg="green"))
|
||||
click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
|
||||
|
||||
|
||||
@click.command("create-tenant", help="Create account and tenant.")
|
||||
@click.option("--email", prompt=True, help="The email address of the tenant account.")
|
||||
@click.option("--email", prompt=True, help="Tenant account email.")
|
||||
@click.option("--name", prompt=True, help="Workspace name.")
|
||||
@click.option("--language", prompt=True, help="Account language, default: en-US.")
|
||||
def create_tenant(email: str, language: Optional[str] = None):
|
||||
def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
|
||||
"""
|
||||
Create tenant account
|
||||
"""
|
||||
if not email:
|
||||
click.echo(click.style("Sorry, email is required.", fg="red"))
|
||||
click.echo(click.style("Email is required.", fg="red"))
|
||||
return
|
||||
|
||||
# Create account
|
||||
email = email.strip()
|
||||
|
||||
if "@" not in email:
|
||||
click.echo(click.style("Sorry, invalid email address.", fg="red"))
|
||||
click.echo(click.style("Invalid email address.", fg="red"))
|
||||
return
|
||||
|
||||
account_name = email.split("@")[0]
|
||||
@@ -580,29 +553,31 @@ def create_tenant(email: str, language: Optional[str] = None):
|
||||
if language not in languages:
|
||||
language = "en-US"
|
||||
|
||||
name = name.strip()
|
||||
|
||||
# generate random password
|
||||
new_password = secrets.token_urlsafe(16)
|
||||
|
||||
# register account
|
||||
account = RegisterService.register(email=email, name=account_name, password=new_password, language=language)
|
||||
|
||||
TenantService.create_owner_tenant_if_not_exist(account)
|
||||
TenantService.create_owner_tenant_if_not_exist(account, name)
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
"Congratulations! Account and tenant created.\n" "Account: {}\nPassword: {}".format(email, new_password),
|
||||
"Account and tenant created.\nAccount: {}\nPassword: {}".format(email, new_password),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@click.command("upgrade-db", help="upgrade the database")
|
||||
@click.command("upgrade-db", help="Upgrade the database")
|
||||
def upgrade_db():
|
||||
click.echo("Preparing database migration...")
|
||||
lock = redis_client.lock(name="db_upgrade_lock", timeout=60)
|
||||
if lock.acquire(blocking=False):
|
||||
try:
|
||||
click.echo(click.style("Start database migration.", fg="green"))
|
||||
click.echo(click.style("Starting database migration.", fg="green"))
|
||||
|
||||
# run db migration
|
||||
import flask_migrate
|
||||
@@ -612,7 +587,7 @@ def upgrade_db():
|
||||
click.echo(click.style("Database migration successful!", fg="green"))
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Database migration failed, error: {e}")
|
||||
logging.exception(f"Database migration failed: {e}")
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
@@ -624,7 +599,7 @@ def fix_app_site_missing():
|
||||
"""
|
||||
Fix app related site missing issue.
|
||||
"""
|
||||
click.echo(click.style("Start fix app related site missing issue.", fg="green"))
|
||||
click.echo(click.style("Starting fix for missing app-related sites.", fg="green"))
|
||||
|
||||
failed_app_ids = []
|
||||
while True:
|
||||
@@ -647,22 +622,22 @@ where sites.id is null limit 1000"""
|
||||
if tenant:
|
||||
accounts = tenant.get_accounts()
|
||||
if not accounts:
|
||||
print("Fix app {} failed.".format(app.id))
|
||||
print("Fix failed for app {}".format(app.id))
|
||||
continue
|
||||
|
||||
account = accounts[0]
|
||||
print("Fix app {} related site missing issue.".format(app.id))
|
||||
print("Fixing missing site for app {}".format(app.id))
|
||||
app_was_created.send(app, account=account)
|
||||
except Exception as e:
|
||||
failed_app_ids.append(app_id)
|
||||
click.echo(click.style("Fix app {} related site missing issue failed!".format(app_id), fg="red"))
|
||||
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
|
||||
logging.exception(f"Fix app related site missing issue failed, error: {e}")
|
||||
continue
|
||||
|
||||
if not processed_count:
|
||||
break
|
||||
|
||||
click.echo(click.style("Congratulations! Fix app related site missing issue successful!", fg="green"))
|
||||
click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
|
||||
|
||||
|
||||
def register_commands(app):
|
||||
|
||||
@@ -4,30 +4,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class DeploymentConfig(BaseSettings):
|
||||
"""
|
||||
Deployment configs
|
||||
Configuration settings for application deployment
|
||||
"""
|
||||
|
||||
APPLICATION_NAME: str = Field(
|
||||
description="application name",
|
||||
description="Name of the application, used for identification and logging purposes",
|
||||
default="langgenius/dify",
|
||||
)
|
||||
|
||||
DEBUG: bool = Field(
|
||||
description="whether to enable debug mode.",
|
||||
description="Enable debug mode for additional logging and development features",
|
||||
default=False,
|
||||
)
|
||||
|
||||
TESTING: bool = Field(
|
||||
description="",
|
||||
description="Enable testing mode for running automated tests",
|
||||
default=False,
|
||||
)
|
||||
|
||||
EDITION: str = Field(
|
||||
description="deployment edition",
|
||||
description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
|
||||
default="SELF_HOSTED",
|
||||
)
|
||||
|
||||
DEPLOY_ENV: str = Field(
|
||||
description="deployment environment, default to PRODUCTION.",
|
||||
description="Deployment environment (e.g., 'PRODUCTION', 'DEVELOPMENT'), default to PRODUCTION",
|
||||
default="PRODUCTION",
|
||||
)
|
||||
|
||||
@@ -4,17 +4,17 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class EnterpriseFeatureConfig(BaseSettings):
|
||||
"""
|
||||
Enterprise feature configs.
|
||||
Configuration for enterprise-level features.
|
||||
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**
|
||||
"""
|
||||
|
||||
ENTERPRISE_ENABLED: bool = Field(
|
||||
description="whether to enable enterprise features."
|
||||
description="Enable or disable enterprise-level features."
|
||||
"Before using, please contact business@dify.ai by email to inquire about licensing matters.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
CAN_REPLACE_LOGO: bool = Field(
|
||||
description="whether to allow replacing enterprise logo.",
|
||||
description="Allow customization of the enterprise logo.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,31 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class NotionConfig(BaseSettings):
|
||||
"""
|
||||
Notion integration configs
|
||||
Configuration settings for Notion integration
|
||||
"""
|
||||
|
||||
NOTION_CLIENT_ID: Optional[str] = Field(
|
||||
description="Notion client ID",
|
||||
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_CLIENT_SECRET: Optional[str] = Field(
|
||||
description="Notion client secret key",
|
||||
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
|
||||
description="Notion integration type, default to None, available values: internal.",
|
||||
description="Type of Notion integration."
|
||||
" Set to 'internal' for internal integrations, or None for public integrations.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTERNAL_SECRET: Optional[str] = Field(
|
||||
description="Notion internal secret key",
|
||||
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
|
||||
description="Notion integration token",
|
||||
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -6,20 +6,23 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class SentryConfig(BaseSettings):
|
||||
"""
|
||||
Sentry configs
|
||||
Configuration settings for Sentry error tracking and performance monitoring
|
||||
"""
|
||||
|
||||
SENTRY_DSN: Optional[str] = Field(
|
||||
description="Sentry DSN",
|
||||
description="Sentry Data Source Name (DSN)."
|
||||
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
|
||||
description="Sentry trace sample rate",
|
||||
description="Sample rate for Sentry performance monitoring traces."
|
||||
" Value between 0.0 and 1.0, where 1.0 means 100% of traces are sent to Sentry.",
|
||||
default=1.0,
|
||||
)
|
||||
|
||||
SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
|
||||
description="Sentry profiles sample rate",
|
||||
description="Sample rate for Sentry profiling."
|
||||
" Value between 0.0 and 1.0, where 1.0 means 100% of profiles are sent to Sentry.",
|
||||
default=1.0,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
from typing import Optional
|
||||
from typing import Annotated, Literal, Optional
|
||||
|
||||
from pydantic import AliasChoices, Field, HttpUrl, NegativeInt, NonNegativeInt, PositiveInt, computed_field
|
||||
from pydantic import (
|
||||
AliasChoices,
|
||||
Field,
|
||||
HttpUrl,
|
||||
NegativeInt,
|
||||
NonNegativeInt,
|
||||
PositiveFloat,
|
||||
PositiveInt,
|
||||
computed_field,
|
||||
)
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
from configs.feature.hosted_service import HostedServiceConfig
|
||||
@@ -8,145 +17,158 @@ from configs.feature.hosted_service import HostedServiceConfig
|
||||
|
||||
class SecurityConfig(BaseSettings):
|
||||
"""
|
||||
Secret Key configs
|
||||
Security-related configurations for the application
|
||||
"""
|
||||
|
||||
SECRET_KEY: Optional[str] = Field(
|
||||
description="Your App secret key will be used for securely signing the session cookie"
|
||||
SECRET_KEY: str = Field(
|
||||
description="Secret key for secure session cookie signing."
|
||||
"Make sure you are changing this key for your deployment with a strong key."
|
||||
"You can generate a strong key using `openssl rand -base64 42`."
|
||||
"Alternatively you can set it with `SECRET_KEY` environment variable.",
|
||||
default=None,
|
||||
"Generate a strong key using `openssl rand -base64 42` or set via the `SECRET_KEY` environment variable.",
|
||||
default="",
|
||||
)
|
||||
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_HOURS: PositiveInt = Field(
|
||||
description="Expiry time in hours for reset token",
|
||||
default=24,
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
||||
description="Duration in minutes for which a password reset token remains valid",
|
||||
default=5,
|
||||
)
|
||||
|
||||
LOGIN_DISABLED: bool = Field(
|
||||
description="Whether to disable login checks",
|
||||
default=False,
|
||||
)
|
||||
|
||||
ADMIN_API_KEY_ENABLE: bool = Field(
|
||||
description="Whether to enable admin api key for authentication",
|
||||
default=False,
|
||||
)
|
||||
|
||||
ADMIN_API_KEY: Optional[str] = Field(
|
||||
description="admin api key for authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class AppExecutionConfig(BaseSettings):
|
||||
"""
|
||||
App Execution configs
|
||||
Configuration parameters for application execution
|
||||
"""
|
||||
|
||||
APP_MAX_EXECUTION_TIME: PositiveInt = Field(
|
||||
description="execution timeout in seconds for app execution",
|
||||
description="Maximum allowed execution time for the application in seconds",
|
||||
default=1200,
|
||||
)
|
||||
APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
|
||||
description="max active request per app, 0 means unlimited",
|
||||
description="Maximum number of concurrent active requests per app (0 for unlimited)",
|
||||
default=0,
|
||||
)
|
||||
|
||||
|
||||
class CodeExecutionSandboxConfig(BaseSettings):
|
||||
"""
|
||||
Code Execution Sandbox configs
|
||||
Configuration for the code execution sandbox environment
|
||||
"""
|
||||
|
||||
CODE_EXECUTION_ENDPOINT: HttpUrl = Field(
|
||||
description="endpoint URL of code execution servcie",
|
||||
description="URL endpoint for the code execution service",
|
||||
default="http://sandbox:8194",
|
||||
)
|
||||
|
||||
CODE_EXECUTION_API_KEY: str = Field(
|
||||
description="API key for code execution service",
|
||||
description="API key for accessing the code execution service",
|
||||
default="dify-sandbox",
|
||||
)
|
||||
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
|
||||
description="connect timeout in seconds for code execution request",
|
||||
description="Connection timeout in seconds for code execution requests",
|
||||
default=10.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
|
||||
description="read timeout in seconds for code execution request",
|
||||
description="Read timeout in seconds for code execution requests",
|
||||
default=60.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
|
||||
description="write timeout in seconds for code execution request",
|
||||
description="Write timeout in seconds for code execution request",
|
||||
default=10.0,
|
||||
)
|
||||
|
||||
CODE_MAX_NUMBER: PositiveInt = Field(
|
||||
description="max depth for code execution",
|
||||
description="Maximum allowed numeric value in code execution",
|
||||
default=9223372036854775807,
|
||||
)
|
||||
|
||||
CODE_MIN_NUMBER: NegativeInt = Field(
|
||||
description="",
|
||||
description="Minimum allowed numeric value in code execution",
|
||||
default=-9223372036854775807,
|
||||
)
|
||||
|
||||
CODE_MAX_DEPTH: PositiveInt = Field(
|
||||
description="max depth for code execution",
|
||||
description="Maximum allowed depth for nested structures in code execution",
|
||||
default=5,
|
||||
)
|
||||
|
||||
CODE_MAX_PRECISION: PositiveInt = Field(
|
||||
description="max precision digits for float type in code execution",
|
||||
description="mMaximum number of decimal places for floating-point numbers in code execution",
|
||||
default=20,
|
||||
)
|
||||
|
||||
CODE_MAX_STRING_LENGTH: PositiveInt = Field(
|
||||
description="max string length for code execution",
|
||||
description="Maximum allowed length for strings in code execution",
|
||||
default=80000,
|
||||
)
|
||||
|
||||
CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
|
||||
description="",
|
||||
description="Maximum allowed length for string arrays in code execution",
|
||||
default=30,
|
||||
)
|
||||
|
||||
CODE_MAX_OBJECT_ARRAY_LENGTH: PositiveInt = Field(
|
||||
description="",
|
||||
description="Maximum allowed length for object arrays in code execution",
|
||||
default=30,
|
||||
)
|
||||
|
||||
CODE_MAX_NUMBER_ARRAY_LENGTH: PositiveInt = Field(
|
||||
description="",
|
||||
description="Maximum allowed length for numeric arrays in code execution",
|
||||
default=1000,
|
||||
)
|
||||
|
||||
|
||||
class EndpointConfig(BaseSettings):
|
||||
"""
|
||||
Module URL configs
|
||||
Configuration for various application endpoints and URLs
|
||||
"""
|
||||
|
||||
CONSOLE_API_URL: str = Field(
|
||||
description="The backend URL prefix of the console API."
|
||||
"used to concatenate the login authorization callback or notion integration callback.",
|
||||
description="Base URL for the console API,"
|
||||
"used for login authentication callback or notion integration callbacks",
|
||||
default="",
|
||||
)
|
||||
|
||||
CONSOLE_WEB_URL: str = Field(
|
||||
description="The front-end URL prefix of the console web."
|
||||
"used to concatenate some front-end addresses and for CORS configuration use.",
|
||||
description="Base URL for the console web interface," "used for frontend references and CORS configuration",
|
||||
default="",
|
||||
)
|
||||
|
||||
SERVICE_API_URL: str = Field(
|
||||
description="Service API Url prefix." "used to display Service API Base Url to the front-end.",
|
||||
description="Base URL for the service API, displayed to users for API access",
|
||||
default="",
|
||||
)
|
||||
|
||||
APP_WEB_URL: str = Field(
|
||||
description="WebApp Url prefix." "used to display WebAPP API Base Url to the front-end.",
|
||||
description="Base URL for the web application, used for frontend references",
|
||||
default="",
|
||||
)
|
||||
|
||||
|
||||
class FileAccessConfig(BaseSettings):
|
||||
"""
|
||||
File Access configs
|
||||
Configuration for file access and handling
|
||||
"""
|
||||
|
||||
FILES_URL: str = Field(
|
||||
description="File preview or download Url prefix."
|
||||
" used to display File preview or download Url to the front-end or as Multi-model inputs;"
|
||||
description="Base URL for file preview or download,"
|
||||
" used for frontend display and multi-model inputs"
|
||||
"Url is signed and has expiration time.",
|
||||
validation_alias=AliasChoices("FILES_URL", "CONSOLE_API_URL"),
|
||||
alias_priority=1,
|
||||
@@ -154,49 +176,59 @@ class FileAccessConfig(BaseSettings):
|
||||
)
|
||||
|
||||
FILES_ACCESS_TIMEOUT: int = Field(
|
||||
description="timeout in seconds for file accessing",
|
||||
description="Expiration time in seconds for file access URLs",
|
||||
default=300,
|
||||
)
|
||||
|
||||
|
||||
class FileUploadConfig(BaseSettings):
|
||||
"""
|
||||
File Uploading configs
|
||||
Configuration for file upload limitations
|
||||
"""
|
||||
|
||||
UPLOAD_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
||||
description="size limit in Megabytes for uploading files",
|
||||
description="Maximum allowed file size for uploads in megabytes",
|
||||
default=15,
|
||||
)
|
||||
|
||||
UPLOAD_FILE_BATCH_LIMIT: NonNegativeInt = Field(
|
||||
description="batch size limit for uploading files",
|
||||
description="Maximum number of files allowed in a single upload batch",
|
||||
default=5,
|
||||
)
|
||||
|
||||
UPLOAD_IMAGE_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
||||
description="image file size limit in Megabytes for uploading files",
|
||||
description="Maximum allowed image file size for uploads in megabytes",
|
||||
default=10,
|
||||
)
|
||||
|
||||
UPLOAD_VIDEO_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
||||
description="video file size limit in Megabytes for uploading files",
|
||||
default=100,
|
||||
)
|
||||
|
||||
UPLOAD_AUDIO_FILE_SIZE_LIMIT: NonNegativeInt = Field(
|
||||
description="audio file size limit in Megabytes for uploading files",
|
||||
default=50,
|
||||
)
|
||||
|
||||
BATCH_UPLOAD_LIMIT: NonNegativeInt = Field(
|
||||
description="", # todo: to be clarified
|
||||
description="Maximum number of files allowed in a batch upload operation",
|
||||
default=20,
|
||||
)
|
||||
|
||||
|
||||
class HttpConfig(BaseSettings):
|
||||
"""
|
||||
HTTP configs
|
||||
HTTP-related configurations for the application
|
||||
"""
|
||||
|
||||
API_COMPRESSION_ENABLED: bool = Field(
|
||||
description="whether to enable HTTP response compression of gzip",
|
||||
description="Enable or disable gzip compression for HTTP responses",
|
||||
default=False,
|
||||
)
|
||||
|
||||
inner_CONSOLE_CORS_ALLOW_ORIGINS: str = Field(
|
||||
description="",
|
||||
description="Comma-separated list of allowed origins for CORS in the console",
|
||||
validation_alias=AliasChoices("CONSOLE_CORS_ALLOW_ORIGINS", "CONSOLE_WEB_URL"),
|
||||
default="",
|
||||
)
|
||||
@@ -217,363 +249,393 @@ class HttpConfig(BaseSettings):
|
||||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: NonNegativeInt = Field(
|
||||
description="",
|
||||
default=300,
|
||||
)
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
|
||||
] = 10
|
||||
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: NonNegativeInt = Field(
|
||||
description="",
|
||||
default=600,
|
||||
)
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
|
||||
] = 60
|
||||
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: NonNegativeInt = Field(
|
||||
description="",
|
||||
default=600,
|
||||
)
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
|
||||
] = 20
|
||||
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
|
||||
description="",
|
||||
description="Maximum allowed size in bytes for binary data in HTTP requests",
|
||||
default=10 * 1024 * 1024,
|
||||
)
|
||||
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: PositiveInt = Field(
|
||||
description="",
|
||||
description="Maximum allowed size in bytes for text data in HTTP requests",
|
||||
default=1 * 1024 * 1024,
|
||||
)
|
||||
|
||||
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
|
||||
description="HTTP URL for SSRF proxy",
|
||||
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
|
||||
description="HTTPS URL for SSRF proxy",
|
||||
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
||||
description="Enable or disable the X-Forwarded-For Proxy Fix middleware from Werkzeug"
|
||||
" to respect X-* headers to redirect clients",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class InnerAPIConfig(BaseSettings):
|
||||
"""
|
||||
Inner API configs
|
||||
Configuration for internal API functionality
|
||||
"""
|
||||
|
||||
INNER_API: bool = Field(
|
||||
description="whether to enable the inner API",
|
||||
description="Enable or disable the internal API",
|
||||
default=False,
|
||||
)
|
||||
|
||||
INNER_API_KEY: Optional[str] = Field(
|
||||
description="The inner API key is used to authenticate the inner API",
|
||||
description="API key for accessing the internal API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class LoggingConfig(BaseSettings):
|
||||
"""
|
||||
Logging configs
|
||||
Configuration for application logging
|
||||
"""
|
||||
|
||||
LOG_LEVEL: str = Field(
|
||||
description="Log output level, default to INFO." "It is recommended to set it to ERROR for production.",
|
||||
description="Logging level, default to INFO. Set to ERROR for production environments.",
|
||||
default="INFO",
|
||||
)
|
||||
|
||||
LOG_FILE: Optional[str] = Field(
|
||||
description="logging output file path",
|
||||
description="File path for log output.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
LOG_FILE_MAX_SIZE: PositiveInt = Field(
|
||||
description="Maximum file size for file rotation retention, the unit is megabytes (MB)",
|
||||
default=20,
|
||||
)
|
||||
|
||||
LOG_FILE_BACKUP_COUNT: PositiveInt = Field(
|
||||
description="Maximum file backup count file rotation retention",
|
||||
default=5,
|
||||
)
|
||||
|
||||
LOG_FORMAT: str = Field(
|
||||
description="log format",
|
||||
description="Format string for log messages",
|
||||
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
|
||||
)
|
||||
|
||||
LOG_DATEFORMAT: Optional[str] = Field(
|
||||
description="log date format",
|
||||
description="Date format string for log timestamps",
|
||||
default=None,
|
||||
)
|
||||
|
||||
LOG_TZ: Optional[str] = Field(
|
||||
description="specify log timezone, eg: America/New_York",
|
||||
description="Timezone for log timestamps (e.g., 'America/New_York')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class ModelLoadBalanceConfig(BaseSettings):
|
||||
"""
|
||||
Model load balance configs
|
||||
Configuration for model load balancing
|
||||
"""
|
||||
|
||||
MODEL_LB_ENABLED: bool = Field(
|
||||
description="whether to enable model load balancing",
|
||||
description="Enable or disable load balancing for models",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class BillingConfig(BaseSettings):
|
||||
"""
|
||||
Platform Billing Configurations
|
||||
Configuration for platform billing features
|
||||
"""
|
||||
|
||||
BILLING_ENABLED: bool = Field(
|
||||
description="whether to enable billing",
|
||||
description="Enable or disable billing functionality",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class UpdateConfig(BaseSettings):
|
||||
"""
|
||||
Update configs
|
||||
Configuration for application update checks
|
||||
"""
|
||||
|
||||
CHECK_UPDATE_URL: str = Field(
|
||||
description="url for checking updates",
|
||||
description="URL to check for application updates",
|
||||
default="https://updates.dify.ai",
|
||||
)
|
||||
|
||||
|
||||
class WorkflowConfig(BaseSettings):
|
||||
"""
|
||||
Workflow feature configs
|
||||
Configuration for workflow execution
|
||||
"""
|
||||
|
||||
WORKFLOW_MAX_EXECUTION_STEPS: PositiveInt = Field(
|
||||
description="max execution steps in single workflow execution",
|
||||
description="Maximum number of steps allowed in a single workflow execution",
|
||||
default=500,
|
||||
)
|
||||
|
||||
WORKFLOW_MAX_EXECUTION_TIME: PositiveInt = Field(
|
||||
description="max execution time in seconds in single workflow execution",
|
||||
description="Maximum execution time in seconds for a single workflow",
|
||||
default=1200,
|
||||
)
|
||||
|
||||
WORKFLOW_CALL_MAX_DEPTH: PositiveInt = Field(
|
||||
description="max depth of calling in single workflow execution",
|
||||
description="Maximum allowed depth for nested workflow calls",
|
||||
default=5,
|
||||
)
|
||||
|
||||
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
||||
description="The maximum size in bytes of a variable. default to 5KB.",
|
||||
default=5 * 1024,
|
||||
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
||||
default=200 * 1024,
|
||||
)
|
||||
|
||||
|
||||
class OAuthConfig(BaseSettings):
|
||||
class AuthConfig(BaseSettings):
|
||||
"""
|
||||
oauth configs
|
||||
Configuration for authentication and OAuth
|
||||
"""
|
||||
|
||||
OAUTH_REDIRECT_PATH: str = Field(
|
||||
description="redirect path for OAuth",
|
||||
description="Redirect path for OAuth authentication callbacks",
|
||||
default="/console/api/oauth/authorize",
|
||||
)
|
||||
|
||||
GITHUB_CLIENT_ID: Optional[str] = Field(
|
||||
description="GitHub client id for OAuth",
|
||||
description="GitHub OAuth client ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GITHUB_CLIENT_SECRET: Optional[str] = Field(
|
||||
description="GitHub client secret key for OAuth",
|
||||
description="GitHub OAuth client secret",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_CLIENT_ID: Optional[str] = Field(
|
||||
description="Google client id for OAuth",
|
||||
description="Google OAuth client ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
|
||||
description="Google client secret key for OAuth",
|
||||
description="Google OAuth client secret",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: PositiveInt = Field(
|
||||
description="Expiration time for access tokens in minutes",
|
||||
default=60,
|
||||
)
|
||||
|
||||
|
||||
class ModerationConfig(BaseSettings):
|
||||
"""
|
||||
Moderation in app configs.
|
||||
Configuration for content moderation
|
||||
"""
|
||||
|
||||
MODERATION_BUFFER_SIZE: PositiveInt = Field(
|
||||
description="buffer size for moderation",
|
||||
description="Size of the buffer for content moderation processing",
|
||||
default=300,
|
||||
)
|
||||
|
||||
|
||||
class ToolConfig(BaseSettings):
|
||||
"""
|
||||
Tool configs
|
||||
Configuration for tool management
|
||||
"""
|
||||
|
||||
TOOL_ICON_CACHE_MAX_AGE: PositiveInt = Field(
|
||||
description="max age in seconds for tool icon caching",
|
||||
description="Maximum age in seconds for caching tool icons",
|
||||
default=3600,
|
||||
)
|
||||
|
||||
|
||||
class MailConfig(BaseSettings):
|
||||
"""
|
||||
Mail Configurations
|
||||
Configuration for email services
|
||||
"""
|
||||
|
||||
MAIL_TYPE: Optional[str] = Field(
|
||||
description="Mail provider type name, default to None, availabile values are `smtp` and `resend`.",
|
||||
description="Email service provider type ('smtp' or 'resend'), default to None.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
|
||||
description="default email address for sending from ",
|
||||
description="Default email address to use as the sender",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESEND_API_KEY: Optional[str] = Field(
|
||||
description="API key for Resend",
|
||||
description="API key for Resend email service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESEND_API_URL: Optional[str] = Field(
|
||||
description="API URL for Resend",
|
||||
description="API URL for Resend email service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_SERVER: Optional[str] = Field(
|
||||
description="smtp server host",
|
||||
description="SMTP server hostname",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_PORT: Optional[int] = Field(
|
||||
description="smtp server port",
|
||||
description="SMTP server port number",
|
||||
default=465,
|
||||
)
|
||||
|
||||
SMTP_USERNAME: Optional[str] = Field(
|
||||
description="smtp server username",
|
||||
description="Username for SMTP authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_PASSWORD: Optional[str] = Field(
|
||||
description="smtp server password",
|
||||
description="Password for SMTP authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_USE_TLS: bool = Field(
|
||||
description="whether to use TLS connection to smtp server",
|
||||
description="Enable TLS encryption for SMTP connections",
|
||||
default=False,
|
||||
)
|
||||
|
||||
SMTP_OPPORTUNISTIC_TLS: bool = Field(
|
||||
description="whether to use opportunistic TLS connection to smtp server",
|
||||
description="Enable opportunistic TLS for SMTP connections",
|
||||
default=False,
|
||||
)
|
||||
|
||||
EMAIL_SEND_IP_LIMIT_PER_MINUTE: PositiveInt = Field(
|
||||
description="Maximum number of emails allowed to be sent from the same IP address in a minute",
|
||||
default=50,
|
||||
)
|
||||
|
||||
|
||||
class RagEtlConfig(BaseSettings):
|
||||
"""
|
||||
RAG ETL Configurations.
|
||||
Configuration for RAG ETL processes
|
||||
"""
|
||||
|
||||
# TODO: This config is not only for rag etl, it is also for file upload, we should move it to file upload config
|
||||
ETL_TYPE: str = Field(
|
||||
description="RAG ETL type name, default to `dify`, available values are `dify` and `Unstructured`. ",
|
||||
description="RAG ETL type ('dify' or 'Unstructured'), default to 'dify'",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
KEYWORD_DATA_SOURCE_TYPE: str = Field(
|
||||
description="source type for keyword data, default to `database`, available values are `database` .",
|
||||
description="Data source type for keyword extraction"
|
||||
" ('database' or other supported types), default to 'database'",
|
||||
default="database",
|
||||
)
|
||||
|
||||
UNSTRUCTURED_API_URL: Optional[str] = Field(
|
||||
description="API URL for Unstructured",
|
||||
description="API URL for Unstructured.io service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
||||
description="API key for Unstructured",
|
||||
description="API key for Unstructured.io service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class DataSetConfig(BaseSettings):
|
||||
"""
|
||||
Dataset configs
|
||||
Configuration for dataset management
|
||||
"""
|
||||
|
||||
CLEAN_DAY_SETTING: PositiveInt = Field(
|
||||
description="interval in days for cleaning up dataset",
|
||||
PLAN_SANDBOX_CLEAN_DAY_SETTING: PositiveInt = Field(
|
||||
description="Interval in days for dataset cleanup operations - plan: sandbox",
|
||||
default=30,
|
||||
)
|
||||
|
||||
PLAN_PRO_CLEAN_DAY_SETTING: PositiveInt = Field(
|
||||
description="Interval in days for dataset cleanup operations - plan: pro and team",
|
||||
default=7,
|
||||
)
|
||||
|
||||
DATASET_OPERATOR_ENABLED: bool = Field(
|
||||
description="whether to enable dataset operator",
|
||||
description="Enable or disable dataset operator functionality",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceConfig(BaseSettings):
|
||||
"""
|
||||
Workspace configs
|
||||
Configuration for workspace management
|
||||
"""
|
||||
|
||||
INVITE_EXPIRY_HOURS: PositiveInt = Field(
|
||||
description="workspaces invitation expiration in hours",
|
||||
description="Expiration time in hours for workspace invitation links",
|
||||
default=72,
|
||||
)
|
||||
|
||||
|
||||
class IndexingConfig(BaseSettings):
|
||||
"""
|
||||
Indexing configs.
|
||||
Configuration for indexing operations
|
||||
"""
|
||||
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field(
|
||||
description="max segmentation token length for indexing",
|
||||
description="Maximum token length for text segmentation during indexing",
|
||||
default=1000,
|
||||
)
|
||||
|
||||
|
||||
class ImageFormatConfig(BaseSettings):
|
||||
MULTIMODAL_SEND_IMAGE_FORMAT: str = Field(
|
||||
description="multi model send image format, support base64, url, default is base64",
|
||||
MULTIMODAL_SEND_IMAGE_FORMAT: Literal["base64", "url"] = Field(
|
||||
description="Format for sending images in multimodal contexts ('base64' or 'url'), default is base64",
|
||||
default="base64",
|
||||
)
|
||||
|
||||
|
||||
class CeleryBeatConfig(BaseSettings):
|
||||
CELERY_BEAT_SCHEDULER_TIME: int = Field(
|
||||
description="the time of the celery scheduler, default to 1 day",
|
||||
description="Interval in days for Celery Beat scheduler execution, default to 1 day",
|
||||
default=1,
|
||||
)
|
||||
|
||||
|
||||
class PositionConfig(BaseSettings):
|
||||
POSITION_PROVIDER_PINS: str = Field(
|
||||
description="The heads of model providers",
|
||||
description="Comma-separated list of pinned model providers",
|
||||
default="",
|
||||
)
|
||||
|
||||
POSITION_PROVIDER_INCLUDES: str = Field(
|
||||
description="The included model providers",
|
||||
description="Comma-separated list of included model providers",
|
||||
default="",
|
||||
)
|
||||
|
||||
POSITION_PROVIDER_EXCLUDES: str = Field(
|
||||
description="The excluded model providers",
|
||||
description="Comma-separated list of excluded model providers",
|
||||
default="",
|
||||
)
|
||||
|
||||
POSITION_TOOL_PINS: str = Field(
|
||||
description="The heads of tools",
|
||||
description="Comma-separated list of pinned tools",
|
||||
default="",
|
||||
)
|
||||
|
||||
POSITION_TOOL_INCLUDES: str = Field(
|
||||
description="The included tools",
|
||||
description="Comma-separated list of included tools",
|
||||
default="",
|
||||
)
|
||||
|
||||
POSITION_TOOL_EXCLUDES: str = Field(
|
||||
description="The excluded tools",
|
||||
description="Comma-separated list of excluded tools",
|
||||
default="",
|
||||
)
|
||||
|
||||
@@ -602,9 +664,37 @@ class PositionConfig(BaseSettings):
|
||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||
|
||||
|
||||
class LoginConfig(BaseSettings):
|
||||
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
|
||||
description="whether to enable email code login",
|
||||
default=False,
|
||||
)
|
||||
ENABLE_EMAIL_PASSWORD_LOGIN: bool = Field(
|
||||
description="whether to enable email password login",
|
||||
default=True,
|
||||
)
|
||||
ENABLE_SOCIAL_OAUTH_LOGIN: bool = Field(
|
||||
description="whether to enable github/google oauth login",
|
||||
default=False,
|
||||
)
|
||||
EMAIL_CODE_LOGIN_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
||||
description="expiry time in minutes for email code login token",
|
||||
default=5,
|
||||
)
|
||||
ALLOW_REGISTER: bool = Field(
|
||||
description="whether to enable register",
|
||||
default=False,
|
||||
)
|
||||
ALLOW_CREATE_WORKSPACE: bool = Field(
|
||||
description="whether to enable create workspace",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class FeatureConfig(
|
||||
# place the configs in alphabet order
|
||||
AppExecutionConfig,
|
||||
AuthConfig, # Changed from OAuthConfig to AuthConfig
|
||||
BillingConfig,
|
||||
CodeExecutionSandboxConfig,
|
||||
DataSetConfig,
|
||||
@@ -619,14 +709,14 @@ class FeatureConfig(
|
||||
MailConfig,
|
||||
ModelLoadBalanceConfig,
|
||||
ModerationConfig,
|
||||
OAuthConfig,
|
||||
PositionConfig,
|
||||
RagEtlConfig,
|
||||
SecurityConfig,
|
||||
ToolConfig,
|
||||
UpdateConfig,
|
||||
WorkflowConfig,
|
||||
WorkspaceConfig,
|
||||
PositionConfig,
|
||||
LoginConfig,
|
||||
# hosted services config
|
||||
HostedServiceConfig,
|
||||
CeleryBeatConfig,
|
||||
|
||||
@@ -6,31 +6,31 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class HostedOpenAiConfig(BaseSettings):
|
||||
"""
|
||||
Hosted OpenAI service config
|
||||
Configuration for hosted OpenAI service
|
||||
"""
|
||||
|
||||
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
|
||||
description="",
|
||||
description="API key for hosted OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
|
||||
description="",
|
||||
description="Base URL for hosted OpenAI API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
|
||||
description="",
|
||||
description="Organization ID for hosted OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable trial access to hosted OpenAI service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
|
||||
description="",
|
||||
description="Comma-separated list of available models for trial access",
|
||||
default="gpt-3.5-turbo,"
|
||||
"gpt-3.5-turbo-1106,"
|
||||
"gpt-3.5-turbo-instruct,"
|
||||
@@ -42,17 +42,17 @@ class HostedOpenAiConfig(BaseSettings):
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
|
||||
description="",
|
||||
description="Quota limit for hosted OpenAI service usage",
|
||||
default=200,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable paid access to hosted OpenAI service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_PAID_MODELS: str = Field(
|
||||
description="",
|
||||
description="Comma-separated list of available models for paid access",
|
||||
default="gpt-4,"
|
||||
"gpt-4-turbo-preview,"
|
||||
"gpt-4-turbo-2024-04-09,"
|
||||
@@ -71,124 +71,122 @@ class HostedOpenAiConfig(BaseSettings):
|
||||
|
||||
class HostedAzureOpenAiConfig(BaseSettings):
|
||||
"""
|
||||
Hosted OpenAI service config
|
||||
Configuration for hosted Azure OpenAI service
|
||||
"""
|
||||
|
||||
HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable hosted Azure OpenAI service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
|
||||
description="",
|
||||
description="API key for hosted Azure OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
|
||||
description="",
|
||||
description="Base URL for hosted Azure OpenAI API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
|
||||
description="",
|
||||
description="Quota limit for hosted Azure OpenAI service usage",
|
||||
default=200,
|
||||
)
|
||||
|
||||
|
||||
class HostedAnthropicConfig(BaseSettings):
|
||||
"""
|
||||
Hosted Azure OpenAI service config
|
||||
Configuration for hosted Anthropic service
|
||||
"""
|
||||
|
||||
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
|
||||
description="",
|
||||
description="Base URL for hosted Anthropic API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
|
||||
description="",
|
||||
description="API key for hosted Anthropic service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable trial access to hosted Anthropic service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
|
||||
description="",
|
||||
description="Quota limit for hosted Anthropic service usage",
|
||||
default=600000,
|
||||
)
|
||||
|
||||
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable paid access to hosted Anthropic service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class HostedMinmaxConfig(BaseSettings):
|
||||
"""
|
||||
Hosted Minmax service config
|
||||
Configuration for hosted Minmax service
|
||||
"""
|
||||
|
||||
HOSTED_MINIMAX_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable hosted Minmax service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class HostedSparkConfig(BaseSettings):
|
||||
"""
|
||||
Hosted Spark service config
|
||||
Configuration for hosted Spark service
|
||||
"""
|
||||
|
||||
HOSTED_SPARK_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable hosted Spark service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class HostedZhipuAIConfig(BaseSettings):
|
||||
"""
|
||||
Hosted Minmax service config
|
||||
Configuration for hosted ZhipuAI service
|
||||
"""
|
||||
|
||||
HOSTED_ZHIPUAI_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable hosted ZhipuAI service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class HostedModerationConfig(BaseSettings):
|
||||
"""
|
||||
Hosted Moderation service config
|
||||
Configuration for hosted Moderation service
|
||||
"""
|
||||
|
||||
HOSTED_MODERATION_ENABLED: bool = Field(
|
||||
description="",
|
||||
description="Enable hosted Moderation service",
|
||||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_MODERATION_PROVIDERS: str = Field(
|
||||
description="",
|
||||
description="Comma-separated list of moderation providers",
|
||||
default="",
|
||||
)
|
||||
|
||||
|
||||
class HostedFetchAppTemplateConfig(BaseSettings):
|
||||
"""
|
||||
Hosted Moderation service config
|
||||
Configuration for fetching app templates
|
||||
"""
|
||||
|
||||
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
|
||||
description="the mode for fetching app templates,"
|
||||
" default to remote,"
|
||||
" available values: remote, db, builtin",
|
||||
description="Mode for fetching app templates: remote, db, or builtin" " default to remote,",
|
||||
default="remote",
|
||||
)
|
||||
|
||||
HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
|
||||
description="the domain for fetching remote app templates",
|
||||
description="Domain for fetching remote app templates",
|
||||
default="https://tmpl.dify.ai",
|
||||
)
|
||||
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt, computed_field
|
||||
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
from configs.middleware.cache.redis_config import RedisConfig
|
||||
from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
||||
from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig
|
||||
from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig
|
||||
from configs.middleware.storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
||||
from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
||||
from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
||||
from configs.middleware.storage.oci_storage_config import OCIStorageConfig
|
||||
from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig
|
||||
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
|
||||
from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
||||
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
|
||||
from configs.middleware.vdb.chroma_config import ChromaConfig
|
||||
from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig
|
||||
@@ -24,75 +28,79 @@ from configs.middleware.vdb.qdrant_config import QdrantConfig
|
||||
from configs.middleware.vdb.relyt_config import RelytConfig
|
||||
from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig
|
||||
from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig
|
||||
from configs.middleware.vdb.upstash_config import UpstashConfig
|
||||
from configs.middleware.vdb.vikingdb_config import VikingDBConfig
|
||||
from configs.middleware.vdb.weaviate_config import WeaviateConfig
|
||||
|
||||
|
||||
class StorageConfig(BaseSettings):
|
||||
STORAGE_TYPE: str = Field(
|
||||
description="storage type,"
|
||||
" default to `local`,"
|
||||
" available values are `local`, `s3`, `azure-blob`, `aliyun-oss`, `google-storage`.",
|
||||
description="Type of storage to use."
|
||||
" Options: 'local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', 'huawei-obs', "
|
||||
"'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'local'.",
|
||||
default="local",
|
||||
)
|
||||
|
||||
STORAGE_LOCAL_PATH: str = Field(
|
||||
description="local storage path",
|
||||
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
|
||||
default="storage",
|
||||
)
|
||||
|
||||
|
||||
class VectorStoreConfig(BaseSettings):
|
||||
VECTOR_STORE: Optional[str] = Field(
|
||||
description="vector store type",
|
||||
description="Type of vector store to use for efficient similarity search."
|
||||
" Set to None if not using a vector store.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class KeywordStoreConfig(BaseSettings):
|
||||
KEYWORD_STORE: str = Field(
|
||||
description="keyword store type",
|
||||
description="Method for keyword extraction and storage."
|
||||
" Default is 'jieba', a Chinese text segmentation library.",
|
||||
default="jieba",
|
||||
)
|
||||
|
||||
|
||||
class DatabaseConfig:
|
||||
DB_HOST: str = Field(
|
||||
description="db host",
|
||||
description="Hostname or IP address of the database server.",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
DB_PORT: PositiveInt = Field(
|
||||
description="db port",
|
||||
description="Port number for database connection.",
|
||||
default=5432,
|
||||
)
|
||||
|
||||
DB_USERNAME: str = Field(
|
||||
description="db username",
|
||||
description="Username for database authentication.",
|
||||
default="postgres",
|
||||
)
|
||||
|
||||
DB_PASSWORD: str = Field(
|
||||
description="db password",
|
||||
description="Password for database authentication.",
|
||||
default="",
|
||||
)
|
||||
|
||||
DB_DATABASE: str = Field(
|
||||
description="db database",
|
||||
description="Name of the database to connect to.",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
DB_CHARSET: str = Field(
|
||||
description="db charset",
|
||||
description="Character set for database connection.",
|
||||
default="",
|
||||
)
|
||||
|
||||
DB_EXTRAS: str = Field(
|
||||
description="db extras options. Example: keepalives_idle=60&keepalives=1",
|
||||
description="Additional database connection parameters. Example: 'keepalives_idle=60&keepalives=1'",
|
||||
default="",
|
||||
)
|
||||
|
||||
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
|
||||
description="db uri scheme",
|
||||
description="Database URI scheme for SQLAlchemy connection.",
|
||||
default="postgresql",
|
||||
)
|
||||
|
||||
@@ -110,27 +118,27 @@ class DatabaseConfig:
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
|
||||
description="pool size of SqlAlchemy",
|
||||
description="Maximum number of database connections in the pool.",
|
||||
default=30,
|
||||
)
|
||||
|
||||
SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
|
||||
description="max overflows for SqlAlchemy",
|
||||
description="Maximum number of connections that can be created beyond the pool_size.",
|
||||
default=10,
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
|
||||
description="SqlAlchemy pool recycle",
|
||||
description="Number of seconds after which a connection is automatically recycled.",
|
||||
default=3600,
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_PRE_PING: bool = Field(
|
||||
description="whether to enable pool pre-ping in SqlAlchemy",
|
||||
description="If True, enables connection pool pre-ping feature to check connections.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
SQLALCHEMY_ECHO: bool | str = Field(
|
||||
description="whether to enable SqlAlchemy echo",
|
||||
description="If True, SQLAlchemy will log all SQL statements.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@@ -148,15 +156,30 @@ class DatabaseConfig:
|
||||
|
||||
class CeleryConfig(DatabaseConfig):
|
||||
CELERY_BACKEND: str = Field(
|
||||
description="Celery backend, available values are `database`, `redis`",
|
||||
description="Backend for Celery task results. Options: 'database', 'redis'.",
|
||||
default="database",
|
||||
)
|
||||
|
||||
CELERY_BROKER_URL: Optional[str] = Field(
|
||||
description="CELERY_BROKER_URL",
|
||||
description="URL of the message broker for Celery tasks.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_USE_SENTINEL: Optional[bool] = Field(
|
||||
description="Whether to use Redis Sentinel for high availability.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
|
||||
description="Name of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
||||
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||
default=0.1,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||
@@ -172,6 +195,22 @@ class CeleryConfig(DatabaseConfig):
|
||||
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
|
||||
|
||||
|
||||
class InternalTestConfig(BaseSettings):
|
||||
"""
|
||||
Configuration settings for Internal Test
|
||||
"""
|
||||
|
||||
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
|
||||
description="Internal test AWS secret access key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AWS_ACCESS_KEY_ID: Optional[str] = Field(
|
||||
description="Internal test AWS access key ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class MiddlewareConfig(
|
||||
# place the configs in alphabet order
|
||||
CeleryConfig,
|
||||
@@ -182,10 +221,14 @@ class MiddlewareConfig(
|
||||
StorageConfig,
|
||||
AliyunOSSStorageConfig,
|
||||
AzureBlobStorageConfig,
|
||||
BaiduOBSStorageConfig,
|
||||
GoogleCloudStorageConfig,
|
||||
TencentCloudCOSStorageConfig,
|
||||
S3StorageConfig,
|
||||
HuaweiCloudOBSStorageConfig,
|
||||
OCIStorageConfig,
|
||||
S3StorageConfig,
|
||||
SupabaseStorageConfig,
|
||||
TencentCloudCOSStorageConfig,
|
||||
VolcengineTOSStorageConfig,
|
||||
# configs of vdb and vdb providers
|
||||
VectorStoreConfig,
|
||||
AnalyticdbConfig,
|
||||
@@ -202,5 +245,8 @@ class MiddlewareConfig(
|
||||
TiDBVectorConfig,
|
||||
WeaviateConfig,
|
||||
ElasticsearchConfig,
|
||||
InternalTestConfig,
|
||||
VikingDBConfig,
|
||||
UpstashConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
51
api/configs/middleware/cache/redis_config.py
vendored
51
api/configs/middleware/cache/redis_config.py
vendored
@@ -1,40 +1,75 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class RedisConfig(BaseSettings):
|
||||
"""
|
||||
Redis configs
|
||||
Configuration settings for Redis connection
|
||||
"""
|
||||
|
||||
REDIS_HOST: str = Field(
|
||||
description="Redis host",
|
||||
description="Hostname or IP address of the Redis server",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
REDIS_PORT: PositiveInt = Field(
|
||||
description="Redis port",
|
||||
description="Port number on which the Redis server is listening",
|
||||
default=6379,
|
||||
)
|
||||
|
||||
REDIS_USERNAME: Optional[str] = Field(
|
||||
description="Redis username",
|
||||
description="Username for Redis authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_PASSWORD: Optional[str] = Field(
|
||||
description="Redis password",
|
||||
description="Password for Redis authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_DB: NonNegativeInt = Field(
|
||||
description="Redis database id, default to 0",
|
||||
description="Redis database number to use (0-15)",
|
||||
default=0,
|
||||
)
|
||||
|
||||
REDIS_MAX_CONNECTIONS: PositiveInt = Field(
|
||||
description="Maximum number of connections to Redis",
|
||||
default=200,
|
||||
)
|
||||
|
||||
REDIS_USE_SSL: bool = Field(
|
||||
description="whether to use SSL for Redis connection",
|
||||
description="Enable SSL/TLS for the Redis connection",
|
||||
default=False,
|
||||
)
|
||||
|
||||
REDIS_USE_SENTINEL: Optional[bool] = Field(
|
||||
description="Enable Redis Sentinel mode for high availability",
|
||||
default=False,
|
||||
)
|
||||
|
||||
REDIS_SENTINELS: Optional[str] = Field(
|
||||
description="Comma-separated list of Redis Sentinel nodes (host:port)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
|
||||
description="Name of the Redis Sentinel service to monitor",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
|
||||
description="Username for Redis Sentinel authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
|
||||
description="Password for Redis Sentinel authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
||||
description="Socket timeout in seconds for Redis Sentinel connections",
|
||||
default=0.1,
|
||||
)
|
||||
|
||||
@@ -6,35 +6,40 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class AliyunOSSStorageConfig(BaseSettings):
|
||||
"""
|
||||
Aliyun storage configs
|
||||
Configuration settings for Aliyun Object Storage Service (OSS)
|
||||
"""
|
||||
|
||||
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Aliyun OSS bucket name",
|
||||
description="Name of the Aliyun OSS bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
|
||||
description="Aliyun OSS access key",
|
||||
description="Access key ID for authenticating with Aliyun OSS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
|
||||
description="Aliyun OSS secret key",
|
||||
description="Secret access key for authenticating with Aliyun OSS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
|
||||
description="Aliyun OSS endpoint URL",
|
||||
description="URL of the Aliyun OSS endpoint for your chosen region",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_REGION: Optional[str] = Field(
|
||||
description="Aliyun OSS region",
|
||||
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
|
||||
description="Aliyun OSS authentication version",
|
||||
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_PATH: Optional[str] = Field(
|
||||
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -6,40 +6,40 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class S3StorageConfig(BaseSettings):
|
||||
"""
|
||||
S3 storage configs
|
||||
Configuration settings for S3-compatible object storage
|
||||
"""
|
||||
|
||||
S3_ENDPOINT: Optional[str] = Field(
|
||||
description="S3 storage endpoint",
|
||||
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_REGION: Optional[str] = Field(
|
||||
description="S3 storage region",
|
||||
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_BUCKET_NAME: Optional[str] = Field(
|
||||
description="S3 storage bucket name",
|
||||
description="Name of the S3 bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_ACCESS_KEY: Optional[str] = Field(
|
||||
description="S3 storage access key",
|
||||
description="Access key ID for authenticating with the S3 service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_SECRET_KEY: Optional[str] = Field(
|
||||
description="S3 storage secret key",
|
||||
description="Secret access key for authenticating with the S3 service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_ADDRESS_STYLE: str = Field(
|
||||
description="S3 storage address style",
|
||||
description="S3 addressing style: 'auto', 'path', or 'virtual'",
|
||||
default="auto",
|
||||
)
|
||||
|
||||
S3_USE_AWS_MANAGED_IAM: bool = Field(
|
||||
description="whether to use aws managed IAM for S3",
|
||||
description="Use AWS managed IAM roles for authentication instead of access/secret keys",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@@ -6,25 +6,25 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class AzureBlobStorageConfig(BaseSettings):
|
||||
"""
|
||||
Azure Blob storage configs
|
||||
Configuration settings for Azure Blob Storage
|
||||
"""
|
||||
|
||||
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
|
||||
description="Azure Blob account name",
|
||||
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
|
||||
description="Azure Blob account key",
|
||||
description="Access key for authenticating with the Azure Storage account",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
|
||||
description="Azure Blob container name",
|
||||
description="Name of the Azure Blob container to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
|
||||
description="Azure Blob account URL",
|
||||
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
29
api/configs/middleware/storage/baidu_obs_storage_config.py
Normal file
29
api/configs/middleware/storage/baidu_obs_storage_config.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class BaiduOBSStorageConfig(BaseModel):
|
||||
"""
|
||||
Configuration settings for Baidu Object Storage Service (OBS)
|
||||
"""
|
||||
|
||||
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
|
||||
description="Access Key ID for authenticating with Baidu OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
|
||||
description="Secret Access Key for authenticating with Baidu OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
|
||||
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
|
||||
default=None,
|
||||
)
|
||||
@@ -6,15 +6,15 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class GoogleCloudStorageConfig(BaseSettings):
|
||||
"""
|
||||
Google Cloud storage configs
|
||||
Configuration settings for Google Cloud Storage
|
||||
"""
|
||||
|
||||
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Google Cloud storage bucket name",
|
||||
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
|
||||
description="Google Cloud storage service account json base64",
|
||||
description="Base64-encoded JSON key file for Google Cloud service account authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
29
api/configs/middleware/storage/huawei_obs_storage_config.py
Normal file
29
api/configs/middleware/storage/huawei_obs_storage_config.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class HuaweiCloudOBSStorageConfig(BaseModel):
|
||||
"""
|
||||
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
||||
"""
|
||||
|
||||
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
|
||||
description="Access Key ID for authenticating with Huawei Cloud OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
|
||||
description="Secret Access Key for authenticating with Huawei Cloud OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_SERVER: Optional[str] = Field(
|
||||
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
|
||||
default=None,
|
||||
)
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class OCIStorageConfig(BaseSettings):
|
||||
"""
|
||||
OCI storage configs
|
||||
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
|
||||
"""
|
||||
|
||||
OCI_ENDPOINT: Optional[str] = Field(
|
||||
description="OCI storage endpoint",
|
||||
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_REGION: Optional[str] = Field(
|
||||
description="OCI storage region",
|
||||
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_BUCKET_NAME: Optional[str] = Field(
|
||||
description="OCI storage bucket name",
|
||||
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_ACCESS_KEY: Optional[str] = Field(
|
||||
description="OCI storage access key",
|
||||
description="Access key (also known as API key) for authenticating with OCI Object Storage",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_SECRET_KEY: Optional[str] = Field(
|
||||
description="OCI storage secret key",
|
||||
description="Secret key associated with the access key for authenticating with OCI Object Storage",
|
||||
default=None,
|
||||
)
|
||||
|
||||
24
api/configs/middleware/storage/supabase_storage_config.py
Normal file
24
api/configs/middleware/storage/supabase_storage_config.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SupabaseStorageConfig(BaseModel):
|
||||
"""
|
||||
Configuration settings for Supabase Object Storage Service
|
||||
"""
|
||||
|
||||
SUPABASE_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SUPABASE_API_KEY: Optional[str] = Field(
|
||||
description="API KEY for authenticating with Supabase",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SUPABASE_URL: Optional[str] = Field(
|
||||
description="URL of the Supabase",
|
||||
default=None,
|
||||
)
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class TencentCloudCOSStorageConfig(BaseSettings):
|
||||
"""
|
||||
Tencent Cloud COS storage configs
|
||||
Configuration settings for Tencent Cloud Object Storage (COS)
|
||||
"""
|
||||
|
||||
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Tencent Cloud COS bucket name",
|
||||
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_REGION: Optional[str] = Field(
|
||||
description="Tencent Cloud COS region",
|
||||
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SECRET_ID: Optional[str] = Field(
|
||||
description="Tencent Cloud COS secret id",
|
||||
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
|
||||
description="Tencent Cloud COS secret key",
|
||||
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SCHEME: Optional[str] = Field(
|
||||
description="Tencent Cloud COS scheme",
|
||||
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class VolcengineTOSStorageConfig(BaseModel):
|
||||
"""
|
||||
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
||||
"""
|
||||
|
||||
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
|
||||
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
|
||||
description="Access Key ID for authenticating with Volcengine TOS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
|
||||
description="Secret Access Key for authenticating with Volcengine TOS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
|
||||
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_REGION: Optional[str] = Field(
|
||||
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
|
||||
default=None,
|
||||
)
|
||||
@@ -5,33 +5,38 @@ from pydantic import BaseModel, Field
|
||||
|
||||
class AnalyticdbConfig(BaseModel):
|
||||
"""
|
||||
Configuration for connecting to AnalyticDB.
|
||||
Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
|
||||
Refer to the following documentation for details on obtaining credentials:
|
||||
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
|
||||
"""
|
||||
|
||||
ANALYTICDB_KEY_ID: Optional[str] = Field(
|
||||
default=None, description="The Access Key ID provided by Alibaba Cloud for authentication."
|
||||
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
|
||||
)
|
||||
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
|
||||
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure access."
|
||||
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
|
||||
)
|
||||
ANALYTICDB_REGION_ID: Optional[str] = Field(
|
||||
default=None, description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou')."
|
||||
default=None,
|
||||
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
|
||||
)
|
||||
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
|
||||
default=None,
|
||||
description="The unique identifier of the AnalyticDB instance you want to connect to (e.g., 'gp-ab123456')..",
|
||||
description="The unique identifier of the AnalyticDB instance you want to connect to.",
|
||||
)
|
||||
ANALYTICDB_ACCOUNT: Optional[str] = Field(
|
||||
default=None, description="The account name used to log in to the AnalyticDB instance."
|
||||
default=None,
|
||||
description="The account name used to log in to the AnalyticDB instance"
|
||||
" (usually the initial account created with the instance).",
|
||||
)
|
||||
ANALYTICDB_PASSWORD: Optional[str] = Field(
|
||||
default=None, description="The password associated with the AnalyticDB account for authentication."
|
||||
default=None, description="The password associated with the AnalyticDB account for database authentication."
|
||||
)
|
||||
ANALYTICDB_NAMESPACE: Optional[str] = Field(
|
||||
default=None, description="The namespace within AnalyticDB for schema isolation."
|
||||
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
|
||||
)
|
||||
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
|
||||
default=None, description="The password for accessing the specified namespace within the AnalyticDB instance."
|
||||
default=None,
|
||||
description="The password for accessing the specified namespace within the AnalyticDB instance"
|
||||
" (if namespace feature is enabled).",
|
||||
)
|
||||
|
||||
45
api/configs/middleware/vdb/baidu_vector_config.py
Normal file
45
api/configs/middleware/vdb/baidu_vector_config.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class BaiduVectorDBConfig(BaseSettings):
|
||||
"""
|
||||
Configuration settings for Baidu Vector Database
|
||||
"""
|
||||
|
||||
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
|
||||
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: PositiveInt = Field(
|
||||
description="Timeout in milliseconds for Baidu Vector Database operations (default is 30000 milliseconds)",
|
||||
default=30000,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
|
||||
description="Account for authenticating with the Baidu Vector Database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
|
||||
description="API key for authenticating with the Baidu Vector Database service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
|
||||
description="Name of the specific Baidu Vector Database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_SHARD: PositiveInt = Field(
|
||||
description="Number of shards for the Baidu Vector Database (default is 1)",
|
||||
default=1,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
|
||||
description="Number of replicas for the Baidu Vector Database (default is 3)",
|
||||
default=3,
|
||||
)
|
||||
@@ -6,35 +6,35 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class ChromaConfig(BaseSettings):
|
||||
"""
|
||||
Chroma configs
|
||||
Configuration settings for Chroma vector database
|
||||
"""
|
||||
|
||||
CHROMA_HOST: Optional[str] = Field(
|
||||
description="Chroma host",
|
||||
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_PORT: PositiveInt = Field(
|
||||
description="Chroma port",
|
||||
description="Port number on which the Chroma server is listening (default is 8000)",
|
||||
default=8000,
|
||||
)
|
||||
|
||||
CHROMA_TENANT: Optional[str] = Field(
|
||||
description="Chroma database",
|
||||
description="Tenant identifier for multi-tenancy support in Chroma",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_DATABASE: Optional[str] = Field(
|
||||
description="Chroma database",
|
||||
description="Name of the Chroma database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
|
||||
description="Chroma authentication provider",
|
||||
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
|
||||
description="Chroma authentication credentials",
|
||||
description="Authentication credentials for Chroma (format depends on the auth provider)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -6,25 +6,25 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class ElasticsearchConfig(BaseSettings):
|
||||
"""
|
||||
Elasticsearch configs
|
||||
Configuration settings for Elasticsearch
|
||||
"""
|
||||
|
||||
ELASTICSEARCH_HOST: Optional[str] = Field(
|
||||
description="Elasticsearch host",
|
||||
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
|
||||
default="127.0.0.1",
|
||||
)
|
||||
|
||||
ELASTICSEARCH_PORT: PositiveInt = Field(
|
||||
description="Elasticsearch port",
|
||||
description="Port number on which the Elasticsearch server is listening (default is 9200)",
|
||||
default=9200,
|
||||
)
|
||||
|
||||
ELASTICSEARCH_USERNAME: Optional[str] = Field(
|
||||
description="Elasticsearch username",
|
||||
description="Username for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
|
||||
description="Elasticsearch password",
|
||||
description="Password for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
@@ -1,40 +1,35 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class MilvusConfig(BaseSettings):
|
||||
"""
|
||||
Milvus configs
|
||||
Configuration settings for Milvus vector database
|
||||
"""
|
||||
|
||||
MILVUS_HOST: Optional[str] = Field(
|
||||
description="Milvus host",
|
||||
MILVUS_URI: Optional[str] = Field(
|
||||
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
|
||||
default="http://127.0.0.1:19530",
|
||||
)
|
||||
|
||||
MILVUS_TOKEN: Optional[str] = Field(
|
||||
description="Authentication token for Milvus, if token-based authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_PORT: PositiveInt = Field(
|
||||
description="Milvus RestFul API port",
|
||||
default=9091,
|
||||
)
|
||||
|
||||
MILVUS_USER: Optional[str] = Field(
|
||||
description="Milvus user",
|
||||
description="Username for authenticating with Milvus, if username/password authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_PASSWORD: Optional[str] = Field(
|
||||
description="Milvus password",
|
||||
description="Password for authenticating with Milvus, if username/password authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_SECURE: bool = Field(
|
||||
description="whether to use SSL connection for Milvus",
|
||||
default=False,
|
||||
)
|
||||
|
||||
MILVUS_DATABASE: str = Field(
|
||||
description="Milvus database, default to `default`",
|
||||
description="Name of the Milvus database to connect to (default is 'default')",
|
||||
default="default",
|
||||
)
|
||||
|
||||
@@ -3,35 +3,35 @@ from pydantic import BaseModel, Field, PositiveInt
|
||||
|
||||
class MyScaleConfig(BaseModel):
|
||||
"""
|
||||
MyScale configs
|
||||
Configuration settings for MyScale vector database
|
||||
"""
|
||||
|
||||
MYSCALE_HOST: str = Field(
|
||||
description="MyScale host",
|
||||
description="Hostname or IP address of the MyScale server (e.g., 'localhost' or 'myscale.example.com')",
|
||||
default="localhost",
|
||||
)
|
||||
|
||||
MYSCALE_PORT: PositiveInt = Field(
|
||||
description="MyScale port",
|
||||
description="Port number on which the MyScale server is listening (default is 8123)",
|
||||
default=8123,
|
||||
)
|
||||
|
||||
MYSCALE_USER: str = Field(
|
||||
description="MyScale user",
|
||||
description="Username for authenticating with MyScale (default is 'default')",
|
||||
default="default",
|
||||
)
|
||||
|
||||
MYSCALE_PASSWORD: str = Field(
|
||||
description="MyScale password",
|
||||
description="Password for authenticating with MyScale (default is an empty string)",
|
||||
default="",
|
||||
)
|
||||
|
||||
MYSCALE_DATABASE: str = Field(
|
||||
description="MyScale database name",
|
||||
description="Name of the MyScale database to connect to (default is 'default')",
|
||||
default="default",
|
||||
)
|
||||
|
||||
MYSCALE_FTS_PARAMS: str = Field(
|
||||
description="MyScale fts index parameters",
|
||||
description="Additional parameters for MyScale Full Text Search index)",
|
||||
default="",
|
||||
)
|
||||
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class OpenSearchConfig(BaseSettings):
|
||||
"""
|
||||
OpenSearch configs
|
||||
Configuration settings for OpenSearch
|
||||
"""
|
||||
|
||||
OPENSEARCH_HOST: Optional[str] = Field(
|
||||
description="OpenSearch host",
|
||||
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_PORT: PositiveInt = Field(
|
||||
description="OpenSearch port",
|
||||
description="Port number on which the OpenSearch server is listening (default is 9200)",
|
||||
default=9200,
|
||||
)
|
||||
|
||||
OPENSEARCH_USER: Optional[str] = Field(
|
||||
description="OpenSearch user",
|
||||
description="Username for authenticating with OpenSearch",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_PASSWORD: Optional[str] = Field(
|
||||
description="OpenSearch password",
|
||||
description="Password for authenticating with OpenSearch",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_SECURE: bool = Field(
|
||||
description="whether to use SSL connection for OpenSearch",
|
||||
description="Whether to use SSL/TLS encrypted connection for OpenSearch (True for HTTPS, False for HTTP)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class OracleConfig(BaseSettings):
|
||||
"""
|
||||
ORACLE configs
|
||||
Configuration settings for Oracle database
|
||||
"""
|
||||
|
||||
ORACLE_HOST: Optional[str] = Field(
|
||||
description="ORACLE host",
|
||||
description="Hostname or IP address of the Oracle database server (e.g., 'localhost' or 'oracle.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_PORT: Optional[PositiveInt] = Field(
|
||||
description="ORACLE port",
|
||||
ORACLE_PORT: PositiveInt = Field(
|
||||
description="Port number on which the Oracle database server is listening (default is 1521)",
|
||||
default=1521,
|
||||
)
|
||||
|
||||
ORACLE_USER: Optional[str] = Field(
|
||||
description="ORACLE user",
|
||||
description="Username for authenticating with the Oracle database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_PASSWORD: Optional[str] = Field(
|
||||
description="ORACLE password",
|
||||
description="Password for authenticating with the Oracle database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_DATABASE: Optional[str] = Field(
|
||||
description="ORACLE database",
|
||||
description="Name of the Oracle database or service to connect to (e.g., 'ORCL' or 'pdborcl')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,40 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class PGVectorConfig(BaseSettings):
|
||||
"""
|
||||
PGVector configs
|
||||
Configuration settings for PGVector (PostgreSQL with vector extension)
|
||||
"""
|
||||
|
||||
PGVECTOR_HOST: Optional[str] = Field(
|
||||
description="PGVector host",
|
||||
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_PORT: Optional[PositiveInt] = Field(
|
||||
description="PGVector port",
|
||||
PGVECTOR_PORT: PositiveInt = Field(
|
||||
description="Port number on which the PostgreSQL server is listening (default is 5433)",
|
||||
default=5433,
|
||||
)
|
||||
|
||||
PGVECTOR_USER: Optional[str] = Field(
|
||||
description="PGVector user",
|
||||
description="Username for authenticating with the PostgreSQL database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_PASSWORD: Optional[str] = Field(
|
||||
description="PGVector password",
|
||||
description="Password for authenticating with the PostgreSQL database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_DATABASE: Optional[str] = Field(
|
||||
description="PGVector database",
|
||||
description="Name of the PostgreSQL database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_MIN_CONNECTION: PositiveInt = Field(
|
||||
description="Min connection of the PostgreSQL database",
|
||||
default=1,
|
||||
)
|
||||
|
||||
PGVECTOR_MAX_CONNECTION: PositiveInt = Field(
|
||||
description="Max connection of the PostgreSQL database",
|
||||
default=5,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class PGVectoRSConfig(BaseSettings):
|
||||
"""
|
||||
PGVectoRS configs
|
||||
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
|
||||
"""
|
||||
|
||||
PGVECTO_RS_HOST: Optional[str] = Field(
|
||||
description="PGVectoRS host",
|
||||
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_PORT: Optional[PositiveInt] = Field(
|
||||
description="PGVectoRS port",
|
||||
PGVECTO_RS_PORT: PositiveInt = Field(
|
||||
description="Port number on which the PostgreSQL server with PGVecto.RS is listening (default is 5431)",
|
||||
default=5431,
|
||||
)
|
||||
|
||||
PGVECTO_RS_USER: Optional[str] = Field(
|
||||
description="PGVectoRS user",
|
||||
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_PASSWORD: Optional[str] = Field(
|
||||
description="PGVectoRS password",
|
||||
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_DATABASE: Optional[str] = Field(
|
||||
description="PGVectoRS database",
|
||||
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class QdrantConfig(BaseSettings):
|
||||
"""
|
||||
Qdrant configs
|
||||
Configuration settings for Qdrant vector database
|
||||
"""
|
||||
|
||||
QDRANT_URL: Optional[str] = Field(
|
||||
description="Qdrant url",
|
||||
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
QDRANT_API_KEY: Optional[str] = Field(
|
||||
description="Qdrant api key",
|
||||
description="API key for authenticating with the Qdrant server",
|
||||
default=None,
|
||||
)
|
||||
|
||||
QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
|
||||
description="Qdrant client timeout in seconds",
|
||||
description="Timeout in seconds for Qdrant client operations (default is 20 seconds)",
|
||||
default=20,
|
||||
)
|
||||
|
||||
QDRANT_GRPC_ENABLED: bool = Field(
|
||||
description="whether enable grpc support for Qdrant connection",
|
||||
description="Whether to enable gRPC support for Qdrant connection (True for gRPC, False for HTTP)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
QDRANT_GRPC_PORT: PositiveInt = Field(
|
||||
description="Qdrant grpc port",
|
||||
description="Port number for gRPC connection to Qdrant server (default is 6334)",
|
||||
default=6334,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class RelytConfig(BaseSettings):
|
||||
"""
|
||||
Relyt configs
|
||||
Configuration settings for Relyt database
|
||||
"""
|
||||
|
||||
RELYT_HOST: Optional[str] = Field(
|
||||
description="Relyt host",
|
||||
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_PORT: PositiveInt = Field(
|
||||
description="Relyt port",
|
||||
description="Port number on which the Relyt server is listening (default is 9200)",
|
||||
default=9200,
|
||||
)
|
||||
|
||||
RELYT_USER: Optional[str] = Field(
|
||||
description="Relyt user",
|
||||
description="Username for authenticating with the Relyt database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_PASSWORD: Optional[str] = Field(
|
||||
description="Relyt password",
|
||||
description="Password for authenticating with the Relyt database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_DATABASE: Optional[str] = Field(
|
||||
description="Relyt database",
|
||||
description="Name of the Relyt database to connect to (default is 'default')",
|
||||
default="default",
|
||||
)
|
||||
|
||||
@@ -6,45 +6,45 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class TencentVectorDBConfig(BaseSettings):
|
||||
"""
|
||||
Tencent Vector configs
|
||||
Configuration settings for Tencent Vector Database
|
||||
"""
|
||||
|
||||
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
|
||||
description="Tencent Vector URL",
|
||||
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
|
||||
description="Tencent Vector API key",
|
||||
description="API key for authenticating with the Tencent Vector Database service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_TIMEOUT: PositiveInt = Field(
|
||||
description="Tencent Vector timeout in seconds",
|
||||
description="Timeout in seconds for Tencent Vector Database operations (default is 30 seconds)",
|
||||
default=30,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
|
||||
description="Tencent Vector username",
|
||||
description="Username for authenticating with the Tencent Vector Database (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
|
||||
description="Tencent Vector password",
|
||||
description="Password for authenticating with the Tencent Vector Database (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_SHARD: PositiveInt = Field(
|
||||
description="Tencent Vector sharding number",
|
||||
description="Number of shards for the Tencent Vector Database (default is 1)",
|
||||
default=1,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
|
||||
description="Tencent Vector replicas",
|
||||
description="Number of replicas for the Tencent Vector Database (default is 2)",
|
||||
default=2,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
|
||||
description="Tencent Vector Database",
|
||||
description="Name of the specific Tencent Vector Database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -6,30 +6,30 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class TiDBVectorConfig(BaseSettings):
|
||||
"""
|
||||
TiDB Vector configs
|
||||
Configuration settings for TiDB Vector database
|
||||
"""
|
||||
|
||||
TIDB_VECTOR_HOST: Optional[str] = Field(
|
||||
description="TiDB Vector host",
|
||||
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
|
||||
description="TiDB Vector port",
|
||||
description="Port number on which the TiDB Vector server is listening (default is 4000)",
|
||||
default=4000,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_USER: Optional[str] = Field(
|
||||
description="TiDB Vector user",
|
||||
description="Username for authenticating with the TiDB Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
|
||||
description="TiDB Vector password",
|
||||
description="Password for authenticating with the TiDB Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_DATABASE: Optional[str] = Field(
|
||||
description="TiDB Vector database",
|
||||
description="Name of the TiDB Vector database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
20
api/configs/middleware/vdb/upstash_config.py
Normal file
20
api/configs/middleware/vdb/upstash_config.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class UpstashConfig(BaseSettings):
|
||||
"""
|
||||
Configuration settings for Upstash vector database
|
||||
"""
|
||||
|
||||
UPSTASH_VECTOR_URL: Optional[str] = Field(
|
||||
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
|
||||
description="Token for authenticating with the upstash server",
|
||||
default=None,
|
||||
)
|
||||
49
api/configs/middleware/vdb/vikingdb_config.py
Normal file
49
api/configs/middleware/vdb/vikingdb_config.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class VikingDBConfig(BaseModel):
|
||||
"""
|
||||
Configuration for connecting to Volcengine VikingDB.
|
||||
Refer to the following documentation for details on obtaining credentials:
|
||||
https://www.volcengine.com/docs/6291/65568
|
||||
"""
|
||||
|
||||
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
|
||||
description="The Access Key provided by Volcengine VikingDB for API authentication."
|
||||
"Refer to the following documentation for details on obtaining credentials:"
|
||||
"https://www.volcengine.com/docs/6291/65568",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VIKINGDB_SECRET_KEY: Optional[str] = Field(
|
||||
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VIKINGDB_REGION: str = Field(
|
||||
description="The region of the Volcengine VikingDB service.(e.g., 'cn-shanghai', 'cn-beijing').",
|
||||
default="cn-shanghai",
|
||||
)
|
||||
|
||||
VIKINGDB_HOST: str = Field(
|
||||
description="The host of the Volcengine VikingDB service.(e.g., 'api-vikingdb.volces.com', \
|
||||
'api-vikingdb.mlp.cn-shanghai.volces.com')",
|
||||
default="api-vikingdb.mlp.cn-shanghai.volces.com",
|
||||
)
|
||||
|
||||
VIKINGDB_SCHEME: str = Field(
|
||||
description="The scheme of the Volcengine VikingDB service.(e.g., 'http', 'https').",
|
||||
default="http",
|
||||
)
|
||||
|
||||
VIKINGDB_CONNECTION_TIMEOUT: int = Field(
|
||||
description="The connection timeout of the Volcengine VikingDB service.",
|
||||
default=30,
|
||||
)
|
||||
|
||||
VIKINGDB_SOCKET_TIMEOUT: int = Field(
|
||||
description="The socket timeout of the Volcengine VikingDB service.",
|
||||
default=30,
|
||||
)
|
||||
@@ -6,25 +6,25 @@ from pydantic_settings import BaseSettings
|
||||
|
||||
class WeaviateConfig(BaseSettings):
|
||||
"""
|
||||
Weaviate configs
|
||||
Configuration settings for Weaviate vector database
|
||||
"""
|
||||
|
||||
WEAVIATE_ENDPOINT: Optional[str] = Field(
|
||||
description="Weaviate endpoint URL",
|
||||
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
WEAVIATE_API_KEY: Optional[str] = Field(
|
||||
description="Weaviate API key",
|
||||
description="API key for authenticating with the Weaviate server",
|
||||
default=None,
|
||||
)
|
||||
|
||||
WEAVIATE_GRPC_ENABLED: bool = Field(
|
||||
description="whether to enable gRPC for Weaviate connection",
|
||||
description="Whether to enable gRPC for Weaviate connection (True for gRPC, False for HTTP)",
|
||||
default=True,
|
||||
)
|
||||
|
||||
WEAVIATE_BATCH_SIZE: PositiveInt = Field(
|
||||
description="Weaviate batch size",
|
||||
description="Number of objects to be processed in a single batch operation (default is 100)",
|
||||
default=100,
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
||||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="0.7.2",
|
||||
default="0.10.1",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
||||
@@ -1 +1,24 @@
|
||||
from configs import dify_config
|
||||
|
||||
HIDDEN_VALUE = "[__HIDDEN__]"
|
||||
UUID_NIL = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
|
||||
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
|
||||
|
||||
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "mpga"]
|
||||
VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
|
||||
|
||||
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "webm", "amr"]
|
||||
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
|
||||
|
||||
|
||||
if dify_config.ETL_TYPE == "Unstructured":
|
||||
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls"]
|
||||
DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
DOCUMENT_EXTENSIONS.append("ppt")
|
||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||
else:
|
||||
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"]
|
||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,7 +1,9 @@
|
||||
from contextvars import ContextVar
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
if TYPE_CHECKING:
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
|
||||
tenant_id: ContextVar[str] = ContextVar("tenant_id")
|
||||
|
||||
workflow_variable_pool: ContextVar[VariablePool] = ContextVar("workflow_variable_pool")
|
||||
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
|
||||
|
||||
@@ -37,7 +37,16 @@ from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_p
|
||||
from .billing import billing
|
||||
|
||||
# Import datasets controllers
|
||||
from .datasets import data_source, datasets, datasets_document, datasets_segments, file, hit_testing, website
|
||||
from .datasets import (
|
||||
data_source,
|
||||
datasets,
|
||||
datasets_document,
|
||||
datasets_segments,
|
||||
external,
|
||||
file,
|
||||
hit_testing,
|
||||
website,
|
||||
)
|
||||
|
||||
# Import explore controllers
|
||||
from .explore import (
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import os
|
||||
from functools import wraps
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource, reqparse
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import supported_language
|
||||
from controllers.console import api
|
||||
from controllers.console.wraps import only_edition_cloud
|
||||
@@ -15,7 +15,7 @@ from models.model import App, InstalledApp, RecommendedApp
|
||||
def admin_required(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
if not os.getenv("ADMIN_API_KEY"):
|
||||
if not dify_config.ADMIN_API_KEY:
|
||||
raise Unauthorized("API key is invalid.")
|
||||
|
||||
auth_header = request.headers.get("Authorization")
|
||||
@@ -31,7 +31,7 @@ def admin_required(view):
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
||||
if os.getenv("ADMIN_API_KEY") != auth_token:
|
||||
if dify_config.ADMIN_API_KEY != auth_token:
|
||||
raise Unauthorized("API key is invalid.")
|
||||
|
||||
return view(*args, **kwargs)
|
||||
@@ -60,23 +60,15 @@ class InsertExploreAppListApi(Resource):
|
||||
|
||||
site = app.site
|
||||
if not site:
|
||||
desc = args["desc"] if args["desc"] else ""
|
||||
copy_right = args["copyright"] if args["copyright"] else ""
|
||||
privacy_policy = args["privacy_policy"] if args["privacy_policy"] else ""
|
||||
custom_disclaimer = args["custom_disclaimer"] if args["custom_disclaimer"] else ""
|
||||
desc = args["desc"] or ""
|
||||
copy_right = args["copyright"] or ""
|
||||
privacy_policy = args["privacy_policy"] or ""
|
||||
custom_disclaimer = args["custom_disclaimer"] or ""
|
||||
else:
|
||||
desc = site.description if site.description else args["desc"] if args["desc"] else ""
|
||||
copy_right = site.copyright if site.copyright else args["copyright"] if args["copyright"] else ""
|
||||
privacy_policy = (
|
||||
site.privacy_policy if site.privacy_policy else args["privacy_policy"] if args["privacy_policy"] else ""
|
||||
)
|
||||
custom_disclaimer = (
|
||||
site.custom_disclaimer
|
||||
if site.custom_disclaimer
|
||||
else args["custom_disclaimer"]
|
||||
if args["custom_disclaimer"]
|
||||
else ""
|
||||
)
|
||||
desc = site.description or args["desc"] or ""
|
||||
copy_right = site.copyright or args["copyright"] or ""
|
||||
privacy_policy = site.privacy_policy or args["privacy_policy"] or ""
|
||||
custom_disclaimer = site.custom_disclaimer or args["custom_disclaimer"] or ""
|
||||
|
||||
recommended_app = RecommendedApp.query.filter(RecommendedApp.app_id == args["app_id"]).first()
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ class BaseApiKeyListResource(Resource):
|
||||
def post(self, resource_id):
|
||||
resource_id = str(resource_id)
|
||||
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
|
||||
if not current_user.is_admin_or_owner:
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
current_key_count = (
|
||||
|
||||
@@ -174,6 +174,7 @@ class AppApi(Resource):
|
||||
parser.add_argument("icon", type=str, location="json")
|
||||
parser.add_argument("icon_background", type=str, location="json")
|
||||
parser.add_argument("max_active_requests", type=int, location="json")
|
||||
parser.add_argument("use_icon_as_answer_icon", type=bool, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
app_service = AppService()
|
||||
|
||||
@@ -94,19 +94,15 @@ class ChatMessageTextApi(Resource):
|
||||
message_id = args.get("message_id", None)
|
||||
text = args.get("text", None)
|
||||
if (
|
||||
app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
|
||||
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
|
||||
and app_model.workflow
|
||||
and app_model.workflow.features_dict
|
||||
):
|
||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
||||
voice = args.get("voice") if args.get("voice") else text_to_speech.get("voice")
|
||||
voice = args.get("voice") or text_to_speech.get("voice")
|
||||
else:
|
||||
try:
|
||||
voice = (
|
||||
args.get("voice")
|
||||
if args.get("voice")
|
||||
else app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
)
|
||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
except Exception:
|
||||
voice = None
|
||||
response = AudioService.transcript_tts(app_model=app_model, text=text, message_id=message_id, voice=voice)
|
||||
|
||||
@@ -109,6 +109,7 @@ class ChatMessageApi(Resource):
|
||||
parser.add_argument("files", type=list, required=False, location="json")
|
||||
parser.add_argument("model_config", type=dict, required=True, location="json")
|
||||
parser.add_argument("conversation_id", type=uuid_value, location="json")
|
||||
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
|
||||
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
|
||||
parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -20,9 +20,10 @@ from fields.conversation_fields import (
|
||||
conversation_pagination_fields,
|
||||
conversation_with_summary_pagination_fields,
|
||||
)
|
||||
from libs.helper import datetime_string
|
||||
from libs.helper import DatetimeString
|
||||
from libs.login import login_required
|
||||
from models.model import AppMode, Conversation, EndUser, Message, MessageAnnotation
|
||||
from models import Conversation, EndUser, Message, MessageAnnotation
|
||||
from models.model import AppMode
|
||||
|
||||
|
||||
class CompletionConversationApi(Resource):
|
||||
@@ -36,8 +37,8 @@ class CompletionConversationApi(Resource):
|
||||
raise Forbidden()
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("keyword", type=str, location="args")
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument(
|
||||
"annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args"
|
||||
)
|
||||
@@ -143,8 +144,8 @@ class ChatConversationApi(Resource):
|
||||
raise Forbidden()
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("keyword", type=str, location="args")
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument(
|
||||
"annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args"
|
||||
)
|
||||
@@ -188,6 +189,7 @@ class ChatConversationApi(Resource):
|
||||
subquery.c.from_end_user_session_id.ilike(keyword_filter),
|
||||
),
|
||||
)
|
||||
.group_by(Conversation.id)
|
||||
)
|
||||
|
||||
account = current_user
|
||||
@@ -201,7 +203,11 @@ class ChatConversationApi(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
query = query.where(Conversation.created_at >= start_datetime_utc)
|
||||
match args["sort_by"]:
|
||||
case "updated_at" | "-updated_at":
|
||||
query = query.where(Conversation.updated_at >= start_datetime_utc)
|
||||
case "created_at" | "-created_at" | _:
|
||||
query = query.where(Conversation.created_at >= start_datetime_utc)
|
||||
|
||||
if args["end"]:
|
||||
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
|
||||
@@ -210,7 +216,11 @@ class ChatConversationApi(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
query = query.where(Conversation.created_at < end_datetime_utc)
|
||||
match args["sort_by"]:
|
||||
case "updated_at" | "-updated_at":
|
||||
query = query.where(Conversation.updated_at <= end_datetime_utc)
|
||||
case "created_at" | "-created_at" | _:
|
||||
query = query.where(Conversation.created_at <= end_datetime_utc)
|
||||
|
||||
if args["annotation_status"] == "annotated":
|
||||
query = query.options(joinedload(Conversation.message_annotations)).join(
|
||||
|
||||
@@ -52,4 +52,39 @@ class RuleGenerateApi(Resource):
|
||||
return rules
|
||||
|
||||
|
||||
class RuleCodeGenerateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("no_variable", type=bool, required=True, default=False, location="json")
|
||||
parser.add_argument("code_language", type=str, required=False, default="javascript", location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
account = current_user
|
||||
CODE_GENERATION_MAX_TOKENS = int(os.getenv("CODE_GENERATION_MAX_TOKENS", "1024"))
|
||||
try:
|
||||
code_result = LLMGenerator.generate_code(
|
||||
tenant_id=account.current_tenant_id,
|
||||
instruction=args["instruction"],
|
||||
model_config=args["model_config"],
|
||||
code_language=args["code_language"],
|
||||
max_tokens=CODE_GENERATION_MAX_TOKENS,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
|
||||
return code_result
|
||||
|
||||
|
||||
api.add_resource(RuleGenerateApi, "/rule-generate")
|
||||
api.add_resource(RuleCodeGenerateApi, "/rule-code-generate")
|
||||
|
||||
@@ -12,7 +12,7 @@ from controllers.console.wraps import account_initialization_required
|
||||
from extensions.ext_database import db
|
||||
from fields.app_fields import app_site_fields
|
||||
from libs.login import login_required
|
||||
from models.model import Site
|
||||
from models import Site
|
||||
|
||||
|
||||
def parse_app_site_args():
|
||||
@@ -34,6 +34,7 @@ def parse_app_site_args():
|
||||
)
|
||||
parser.add_argument("prompt_public", type=bool, required=False, location="json")
|
||||
parser.add_argument("show_workflow_steps", type=bool, required=False, location="json")
|
||||
parser.add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@@ -68,6 +69,7 @@ class AppSite(Resource):
|
||||
"customize_token_strategy",
|
||||
"prompt_public",
|
||||
"show_workflow_steps",
|
||||
"use_icon_as_answer_icon",
|
||||
]:
|
||||
value = args.get(attr_name)
|
||||
if value is not None:
|
||||
|
||||
@@ -11,7 +11,7 @@ from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import datetime_string
|
||||
from libs.helper import DatetimeString
|
||||
from libs.login import login_required
|
||||
from models.model import AppMode
|
||||
|
||||
@@ -25,14 +25,17 @@ class DailyMessageStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(*) AS message_count
|
||||
FROM messages where app_id = :app_id
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(*) AS message_count
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -45,7 +48,7 @@ class DailyMessageStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -55,10 +58,10 @@ class DailyMessageStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -79,14 +82,17 @@ class DailyConversationStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct messages.conversation_id) AS conversation_count
|
||||
FROM messages where app_id = :app_id
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(DISTINCT messages.conversation_id) AS conversation_count
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -99,7 +105,7 @@ class DailyConversationStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -109,10 +115,10 @@ class DailyConversationStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -133,14 +139,17 @@ class DailyTerminalsStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct messages.from_end_user_id) AS terminal_count
|
||||
FROM messages where app_id = :app_id
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(DISTINCT messages.from_end_user_id) AS terminal_count
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -153,7 +162,7 @@ class DailyTerminalsStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -163,10 +172,10 @@ class DailyTerminalsStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -187,16 +196,18 @@ class DailyTokenCostStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
(sum(messages.message_tokens) + sum(messages.answer_tokens)) as token_count,
|
||||
sum(total_price) as total_price
|
||||
FROM messages where app_id = :app_id
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
(SUM(messages.message_tokens) + SUM(messages.answer_tokens)) AS token_count,
|
||||
SUM(total_price) AS total_price
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -209,7 +220,7 @@ class DailyTokenCostStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -219,10 +230,10 @@ class DailyTokenCostStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -245,16 +256,26 @@ class AverageSessionInteractionStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """SELECT date(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
AVG(subquery.message_count) AS interactions
|
||||
FROM (SELECT m.conversation_id, COUNT(m.id) AS message_count
|
||||
FROM conversations c
|
||||
JOIN messages m ON c.id = m.conversation_id
|
||||
WHERE c.override_model_configs IS NULL AND c.app_id = :app_id"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
AVG(subquery.message_count) AS interactions
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
m.conversation_id,
|
||||
COUNT(m.id) AS message_count
|
||||
FROM
|
||||
conversations c
|
||||
JOIN
|
||||
messages m
|
||||
ON c.id = m.conversation_id
|
||||
WHERE
|
||||
c.override_model_configs IS NULL
|
||||
AND c.app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -267,7 +288,7 @@ FROM (SELECT m.conversation_id, COUNT(m.id) AS message_count
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and c.created_at >= :start"
|
||||
sql_query += " AND c.created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -277,14 +298,19 @@ FROM (SELECT m.conversation_id, COUNT(m.id) AS message_count
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and c.created_at < :end"
|
||||
sql_query += " AND c.created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += """
|
||||
GROUP BY m.conversation_id) subquery
|
||||
LEFT JOIN conversations c on c.id=subquery.conversation_id
|
||||
GROUP BY date
|
||||
ORDER BY date"""
|
||||
GROUP BY m.conversation_id
|
||||
) subquery
|
||||
LEFT JOIN
|
||||
conversations c
|
||||
ON c.id = subquery.conversation_id
|
||||
GROUP BY
|
||||
date
|
||||
ORDER BY
|
||||
date"""
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -307,17 +333,21 @@ class UserSatisfactionRateStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(m.id) as message_count, COUNT(mf.id) as feedback_count
|
||||
FROM messages m
|
||||
LEFT JOIN message_feedbacks mf on mf.message_id=m.id and mf.rating='like'
|
||||
WHERE m.app_id = :app_id
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(m.id) AS message_count,
|
||||
COUNT(mf.id) AS feedback_count
|
||||
FROM
|
||||
messages m
|
||||
LEFT JOIN
|
||||
message_feedbacks mf
|
||||
ON mf.message_id=m.id AND mf.rating='like'
|
||||
WHERE
|
||||
m.app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -330,7 +360,7 @@ class UserSatisfactionRateStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and m.created_at >= :start"
|
||||
sql_query += " AND m.created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -340,10 +370,10 @@ class UserSatisfactionRateStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and m.created_at < :end"
|
||||
sql_query += " AND m.created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -369,16 +399,17 @@ class AverageResponseTimeStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
AVG(provider_response_latency) as latency
|
||||
FROM messages
|
||||
WHERE app_id = :app_id
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
AVG(provider_response_latency) AS latency
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -391,7 +422,7 @@ class AverageResponseTimeStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -401,10 +432,10 @@ class AverageResponseTimeStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -425,17 +456,20 @@ class TokensPerSecondStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
CASE
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
CASE
|
||||
WHEN SUM(provider_response_latency) = 0 THEN 0
|
||||
ELSE (SUM(answer_tokens) / SUM(provider_response_latency))
|
||||
END as tokens_per_second
|
||||
FROM messages
|
||||
WHERE app_id = :app_id"""
|
||||
FROM
|
||||
messages
|
||||
WHERE
|
||||
app_id = :app_id"""
|
||||
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
|
||||
|
||||
timezone = pytz.timezone(account.timezone)
|
||||
@@ -448,7 +482,7 @@ WHERE app_id = :app_id"""
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -458,10 +492,10 @@ WHERE app_id = :app_id"""
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
|
||||
@@ -13,14 +13,14 @@ from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.app.segments import factory
|
||||
from core.errors.error import AppInvokeQuotaExceededError
|
||||
from factories import variable_factory
|
||||
from fields.workflow_fields import workflow_fields
|
||||
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
||||
from libs import helper
|
||||
from libs.helper import TimestampField, uuid_value
|
||||
from libs.login import current_user, login_required
|
||||
from models.model import App, AppMode
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from services.app_dsl_service import AppDslService
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import WorkflowHashNotEqualError
|
||||
@@ -101,9 +101,13 @@ class DraftWorkflowApi(Resource):
|
||||
|
||||
try:
|
||||
environment_variables_list = args.get("environment_variables") or []
|
||||
environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list]
|
||||
environment_variables = [
|
||||
variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list
|
||||
]
|
||||
conversation_variables_list = args.get("conversation_variables") or []
|
||||
conversation_variables = [factory.build_variable_from_mapping(obj) for obj in conversation_variables_list]
|
||||
conversation_variables = [
|
||||
variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list
|
||||
]
|
||||
workflow = workflow_service.sync_draft_workflow(
|
||||
app_model=app_model,
|
||||
graph=args["graph"],
|
||||
@@ -166,6 +170,8 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
||||
parser.add_argument("query", type=str, required=True, location="json", default="")
|
||||
parser.add_argument("files", type=list, location="json")
|
||||
parser.add_argument("conversation_id", type=uuid_value, location="json")
|
||||
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
@@ -271,17 +277,15 @@ class DraftWorkflowRunApi(Resource):
|
||||
parser.add_argument("files", type=list, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=True
|
||||
)
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model,
|
||||
user=current_user,
|
||||
args=args,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
streaming=True,
|
||||
)
|
||||
|
||||
return helper.compact_generate_response(response)
|
||||
except (ValueError, AppInvokeQuotaExceededError) as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception("internal server error.")
|
||||
raise InternalServerError()
|
||||
return helper.compact_generate_response(response)
|
||||
|
||||
|
||||
class WorkflowTaskStopApi(Resource):
|
||||
@@ -465,6 +469,6 @@ api.add_resource(
|
||||
api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
|
||||
api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
|
||||
api.add_resource(
|
||||
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs" "/<string:block_type>"
|
||||
DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"
|
||||
)
|
||||
api.add_resource(ConvertToWorkflowApi, "/apps/<uuid:app_id>/convert-to-workflow")
|
||||
|
||||
@@ -7,7 +7,8 @@ from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from fields.workflow_app_log_fields import workflow_app_log_pagination_fields
|
||||
from libs.login import login_required
|
||||
from models.model import App, AppMode
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from services.workflow_app_service import WorkflowAppService
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,8 @@ from fields.workflow_run_fields import (
|
||||
)
|
||||
from libs.helper import uuid_value
|
||||
from libs.login import login_required
|
||||
from models.model import App, AppMode
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from services.workflow_run_service import WorkflowRunService
|
||||
|
||||
|
||||
|
||||
@@ -11,10 +11,10 @@ from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import datetime_string
|
||||
from libs.helper import DatetimeString
|
||||
from libs.login import login_required
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.model import AppMode
|
||||
from models.workflow import WorkflowRunTriggeredFrom
|
||||
|
||||
|
||||
class WorkflowDailyRunsStatistic(Resource):
|
||||
@@ -26,16 +26,18 @@ class WorkflowDailyRunsStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(id) AS runs
|
||||
FROM workflow_runs
|
||||
WHERE app_id = :app_id
|
||||
AND triggered_from = :triggered_from
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(id) AS runs
|
||||
FROM
|
||||
workflow_runs
|
||||
WHERE
|
||||
app_id = :app_id
|
||||
AND triggered_from = :triggered_from"""
|
||||
arg_dict = {
|
||||
"tz": account.timezone,
|
||||
"app_id": app_model.id,
|
||||
@@ -52,7 +54,7 @@ class WorkflowDailyRunsStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -62,10 +64,10 @@ class WorkflowDailyRunsStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -86,16 +88,18 @@ class WorkflowDailyTerminalsStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct workflow_runs.created_by) AS terminal_count
|
||||
FROM workflow_runs
|
||||
WHERE app_id = :app_id
|
||||
AND triggered_from = :triggered_from
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
COUNT(DISTINCT workflow_runs.created_by) AS terminal_count
|
||||
FROM
|
||||
workflow_runs
|
||||
WHERE
|
||||
app_id = :app_id
|
||||
AND triggered_from = :triggered_from"""
|
||||
arg_dict = {
|
||||
"tz": account.timezone,
|
||||
"app_id": app_model.id,
|
||||
@@ -112,7 +116,7 @@ class WorkflowDailyTerminalsStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -122,10 +126,10 @@ class WorkflowDailyTerminalsStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -146,18 +150,18 @@ class WorkflowDailyTokenCostStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT
|
||||
date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
SUM(workflow_runs.total_tokens) as token_count
|
||||
FROM workflow_runs
|
||||
WHERE app_id = :app_id
|
||||
AND triggered_from = :triggered_from
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
SUM(workflow_runs.total_tokens) AS token_count
|
||||
FROM
|
||||
workflow_runs
|
||||
WHERE
|
||||
app_id = :app_id
|
||||
AND triggered_from = :triggered_from"""
|
||||
arg_dict = {
|
||||
"tz": account.timezone,
|
||||
"app_id": app_model.id,
|
||||
@@ -174,7 +178,7 @@ class WorkflowDailyTokenCostStatistic(Resource):
|
||||
start_datetime_timezone = timezone.localize(start_datetime)
|
||||
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at >= :start"
|
||||
sql_query += " AND created_at >= :start"
|
||||
arg_dict["start"] = start_datetime_utc
|
||||
|
||||
if args["end"]:
|
||||
@@ -184,10 +188,10 @@ class WorkflowDailyTokenCostStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query += " and created_at < :end"
|
||||
sql_query += " AND created_at < :end"
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
|
||||
sql_query += " GROUP BY date order by date"
|
||||
sql_query += " GROUP BY date ORDER BY date"
|
||||
|
||||
response_data = []
|
||||
|
||||
@@ -213,27 +217,31 @@ class WorkflowAverageAppInteractionStatistic(Resource):
|
||||
account = current_user
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("start", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=datetime_string("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
sql_query = """
|
||||
SELECT
|
||||
AVG(sub.interactions) as interactions,
|
||||
sub.date
|
||||
FROM
|
||||
(SELECT
|
||||
date(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
c.created_by,
|
||||
COUNT(c.id) AS interactions
|
||||
FROM workflow_runs c
|
||||
WHERE c.app_id = :app_id
|
||||
AND c.triggered_from = :triggered_from
|
||||
{{start}}
|
||||
{{end}}
|
||||
GROUP BY date, c.created_by) sub
|
||||
GROUP BY sub.date
|
||||
"""
|
||||
sql_query = """SELECT
|
||||
AVG(sub.interactions) AS interactions,
|
||||
sub.date
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
||||
c.created_by,
|
||||
COUNT(c.id) AS interactions
|
||||
FROM
|
||||
workflow_runs c
|
||||
WHERE
|
||||
c.app_id = :app_id
|
||||
AND c.triggered_from = :triggered_from
|
||||
{{start}}
|
||||
{{end}}
|
||||
GROUP BY
|
||||
date, c.created_by
|
||||
) sub
|
||||
GROUP BY
|
||||
sub.date"""
|
||||
arg_dict = {
|
||||
"tz": account.timezone,
|
||||
"app_id": app_model.id,
|
||||
@@ -262,7 +270,7 @@ class WorkflowAverageAppInteractionStatistic(Resource):
|
||||
end_datetime_timezone = timezone.localize(end_datetime)
|
||||
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
||||
|
||||
sql_query = sql_query.replace("{{end}}", " and c.created_at < :end")
|
||||
sql_query = sql_query.replace("{{end}}", " AND c.created_at < :end")
|
||||
arg_dict["end"] = end_datetime_utc
|
||||
else:
|
||||
sql_query = sql_query.replace("{{end}}", "")
|
||||
|
||||
@@ -5,7 +5,8 @@ from typing import Optional, Union
|
||||
from controllers.console.app.error import AppNotFoundError
|
||||
from extensions.ext_database import db
|
||||
from libs.login import current_user
|
||||
from models.model import App, AppMode
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
|
||||
|
||||
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None):
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import base64
|
||||
import datetime
|
||||
import secrets
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
from constants.languages import supported_language
|
||||
from controllers.console import api
|
||||
from controllers.console.error import AlreadyActivateError
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import email, str_len, timezone
|
||||
from libs.password import hash_password, valid_password
|
||||
from models.account import AccountStatus
|
||||
from services.account_service import RegisterService
|
||||
from libs.helper import StrLen, email, extract_remote_ip, timezone
|
||||
from models.account import AccountStatus, Tenant
|
||||
from services.account_service import AccountService, RegisterService
|
||||
|
||||
|
||||
class ActivateCheckApi(Resource):
|
||||
@@ -27,8 +25,18 @@ class ActivateCheckApi(Resource):
|
||||
token = args["token"]
|
||||
|
||||
invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
|
||||
|
||||
return {"is_valid": invitation is not None, "workspace_name": invitation["tenant"].name if invitation else None}
|
||||
if invitation:
|
||||
data = invitation.get("data", {})
|
||||
tenant: Tenant = invitation.get("tenant", None)
|
||||
workspace_name = tenant.name if tenant else None
|
||||
workspace_id = tenant.id if tenant else None
|
||||
invitee_email = data.get("email") if data else None
|
||||
return {
|
||||
"is_valid": invitation is not None,
|
||||
"data": {"workspace_name": workspace_name, "workspace_id": workspace_id, "email": invitee_email},
|
||||
}
|
||||
else:
|
||||
return {"is_valid": False}
|
||||
|
||||
|
||||
class ActivateApi(Resource):
|
||||
@@ -37,8 +45,7 @@ class ActivateApi(Resource):
|
||||
parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json")
|
||||
parser.add_argument("email", type=email, required=False, nullable=True, location="json")
|
||||
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("name", type=str_len(30), required=True, nullable=False, location="json")
|
||||
parser.add_argument("password", type=valid_password, required=True, nullable=False, location="json")
|
||||
parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json")
|
||||
parser.add_argument(
|
||||
"interface_language", type=supported_language, required=True, nullable=False, location="json"
|
||||
)
|
||||
@@ -54,15 +61,6 @@ class ActivateApi(Resource):
|
||||
account = invitation["account"]
|
||||
account.name = args["name"]
|
||||
|
||||
# generate password salt
|
||||
salt = secrets.token_bytes(16)
|
||||
base64_salt = base64.b64encode(salt).decode()
|
||||
|
||||
# encrypt password with salt
|
||||
password_hashed = hash_password(args["password"], salt)
|
||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
account.interface_language = args["interface_language"]
|
||||
account.timezone = args["timezone"]
|
||||
account.interface_theme = "light"
|
||||
@@ -70,7 +68,9 @@ class ActivateApi(Resource):
|
||||
account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success"}
|
||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||
|
||||
return {"result": "success", "data": token_pair.model_dump()}
|
||||
|
||||
|
||||
api.add_resource(ActivateCheckApi, "/activate/check")
|
||||
|
||||
@@ -27,5 +27,29 @@ class InvalidTokenError(BaseHTTPException):
|
||||
|
||||
class PasswordResetRateLimitExceededError(BaseHTTPException):
|
||||
error_code = "password_reset_rate_limit_exceeded"
|
||||
description = "Password reset rate limit exceeded. Try again later."
|
||||
description = "Too many password reset emails have been sent. Please try again in 1 minutes."
|
||||
code = 429
|
||||
|
||||
|
||||
class EmailCodeError(BaseHTTPException):
|
||||
error_code = "email_code_error"
|
||||
description = "Email code is invalid or expired."
|
||||
code = 400
|
||||
|
||||
|
||||
class EmailOrPasswordMismatchError(BaseHTTPException):
|
||||
error_code = "email_or_password_mismatch"
|
||||
description = "The email or password is mismatched."
|
||||
code = 400
|
||||
|
||||
|
||||
class EmailPasswordLoginLimitError(BaseHTTPException):
|
||||
error_code = "email_code_login_limit"
|
||||
description = "Too many incorrect password attempts. Please try again later."
|
||||
code = 429
|
||||
|
||||
|
||||
class EmailCodeLoginRateLimitExceededError(BaseHTTPException):
|
||||
error_code = "email_code_login_rate_limit_exceeded"
|
||||
description = "Too many login emails have been sent. Please try again in 5 minutes."
|
||||
code = 429
|
||||
|
||||
@@ -1,65 +1,82 @@
|
||||
import base64
|
||||
import logging
|
||||
import secrets
|
||||
|
||||
from flask import request
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
from constants.languages import languages
|
||||
from controllers.console import api
|
||||
from controllers.console.auth.error import (
|
||||
EmailCodeError,
|
||||
InvalidEmailError,
|
||||
InvalidTokenError,
|
||||
PasswordMismatchError,
|
||||
PasswordResetRateLimitExceededError,
|
||||
)
|
||||
from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister
|
||||
from controllers.console.setup import setup_required
|
||||
from events.tenant_event import tenant_was_created
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import email as email_validate
|
||||
from libs.helper import email, extract_remote_ip
|
||||
from libs.password import hash_password, valid_password
|
||||
from models.account import Account
|
||||
from services.account_service import AccountService
|
||||
from services.errors.account import RateLimitExceededError
|
||||
from services.account_service import AccountService, TenantService
|
||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
class ForgotPasswordSendEmailApi(Resource):
|
||||
@setup_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=str, required=True, location="json")
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
parser.add_argument("language", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
email = args["email"]
|
||||
ip_address = extract_remote_ip(request)
|
||||
if AccountService.is_email_send_ip_limit(ip_address):
|
||||
raise EmailSendIpLimitError()
|
||||
|
||||
if not email_validate(email):
|
||||
raise InvalidEmailError()
|
||||
|
||||
account = Account.query.filter_by(email=email).first()
|
||||
|
||||
if account:
|
||||
try:
|
||||
AccountService.send_reset_password_email(account=account)
|
||||
except RateLimitExceededError:
|
||||
logging.warning(f"Rate limit exceeded for email: {account.email}")
|
||||
raise PasswordResetRateLimitExceededError()
|
||||
if args["language"] is not None and args["language"] == "zh-Hans":
|
||||
language = "zh-Hans"
|
||||
else:
|
||||
# Return success to avoid revealing email registration status
|
||||
logging.warning(f"Attempt to reset password for unregistered email: {email}")
|
||||
language = "en-US"
|
||||
|
||||
return {"result": "success"}
|
||||
account = Account.query.filter_by(email=args["email"]).first()
|
||||
token = None
|
||||
if account is None:
|
||||
if FeatureService.get_system_features().is_allow_register:
|
||||
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
||||
return {"result": "fail", "data": token, "code": "account_not_found"}
|
||||
else:
|
||||
raise NotAllowedRegister()
|
||||
else:
|
||||
token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language)
|
||||
|
||||
return {"result": "success", "data": token}
|
||||
|
||||
|
||||
class ForgotPasswordCheckApi(Resource):
|
||||
@setup_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=str, required=True, location="json")
|
||||
parser.add_argument("code", type=str, required=True, location="json")
|
||||
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
|
||||
args = parser.parse_args()
|
||||
token = args["token"]
|
||||
|
||||
reset_data = AccountService.get_reset_password_data(token)
|
||||
user_email = args["email"]
|
||||
|
||||
if reset_data is None:
|
||||
return {"is_valid": False, "email": None}
|
||||
return {"is_valid": True, "email": reset_data.get("email")}
|
||||
token_data = AccountService.get_reset_password_data(args["token"])
|
||||
if token_data is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
if user_email != token_data.get("email"):
|
||||
raise InvalidEmailError()
|
||||
|
||||
if args["code"] != token_data.get("code"):
|
||||
raise EmailCodeError()
|
||||
|
||||
return {"is_valid": True, "email": token_data.get("email")}
|
||||
|
||||
|
||||
class ForgotPasswordResetApi(Resource):
|
||||
@@ -92,9 +109,26 @@ class ForgotPasswordResetApi(Resource):
|
||||
base64_password_hashed = base64.b64encode(password_hashed).decode()
|
||||
|
||||
account = Account.query.filter_by(email=reset_data.get("email")).first()
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
db.session.commit()
|
||||
if account:
|
||||
account.password = base64_password_hashed
|
||||
account.password_salt = base64_salt
|
||||
db.session.commit()
|
||||
tenant = TenantService.get_join_tenants(account)
|
||||
if not tenant and not FeatureService.get_system_features().is_allow_create_workspace:
|
||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
||||
account.current_tenant = tenant
|
||||
tenant_was_created.send(tenant)
|
||||
else:
|
||||
try:
|
||||
account = AccountService.create_account_and_tenant(
|
||||
email=reset_data.get("email"),
|
||||
name=reset_data.get("email"),
|
||||
password=password_confirm,
|
||||
interface_language=languages[0],
|
||||
)
|
||||
except WorkSpaceNotAllowedCreateError:
|
||||
pass
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@@ -5,12 +5,29 @@ from flask import request
|
||||
from flask_restful import Resource, reqparse
|
||||
|
||||
import services
|
||||
from constants.languages import languages
|
||||
from controllers.console import api
|
||||
from controllers.console.auth.error import (
|
||||
EmailCodeError,
|
||||
EmailOrPasswordMismatchError,
|
||||
EmailPasswordLoginLimitError,
|
||||
InvalidEmailError,
|
||||
InvalidTokenError,
|
||||
)
|
||||
from controllers.console.error import (
|
||||
AccountBannedError,
|
||||
EmailSendIpLimitError,
|
||||
NotAllowedCreateWorkspace,
|
||||
NotAllowedRegister,
|
||||
)
|
||||
from controllers.console.setup import setup_required
|
||||
from libs.helper import email, get_remote_ip
|
||||
from events.tenant_event import tenant_was_created
|
||||
from libs.helper import email, extract_remote_ip
|
||||
from libs.password import valid_password
|
||||
from models.account import Account
|
||||
from services.account_service import AccountService, TenantService
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
class LoginApi(Resource):
|
||||
@@ -23,15 +40,43 @@ class LoginApi(Resource):
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
parser.add_argument("password", type=valid_password, required=True, location="json")
|
||||
parser.add_argument("remember_me", type=bool, required=False, default=False, location="json")
|
||||
parser.add_argument("invite_token", type=str, required=False, default=None, location="json")
|
||||
parser.add_argument("language", type=str, required=False, default="en-US", location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
# todo: Verify the recaptcha
|
||||
is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
|
||||
if is_login_error_rate_limit:
|
||||
raise EmailPasswordLoginLimitError()
|
||||
|
||||
invitation = args["invite_token"]
|
||||
if invitation:
|
||||
invitation = RegisterService.get_invitation_if_token_valid(None, args["email"], invitation)
|
||||
|
||||
if args["language"] is not None and args["language"] == "zh-Hans":
|
||||
language = "zh-Hans"
|
||||
else:
|
||||
language = "en-US"
|
||||
|
||||
try:
|
||||
account = AccountService.authenticate(args["email"], args["password"])
|
||||
except services.errors.account.AccountLoginError as e:
|
||||
return {"code": "unauthorized", "message": str(e)}, 401
|
||||
|
||||
if invitation:
|
||||
data = invitation.get("data", {})
|
||||
invitee_email = data.get("email") if data else None
|
||||
if invitee_email != args["email"]:
|
||||
raise InvalidEmailError()
|
||||
account = AccountService.authenticate(args["email"], args["password"], args["invite_token"])
|
||||
else:
|
||||
account = AccountService.authenticate(args["email"], args["password"])
|
||||
except services.errors.account.AccountLoginError:
|
||||
raise AccountBannedError()
|
||||
except services.errors.account.AccountPasswordError:
|
||||
AccountService.add_login_error_rate_limit(args["email"])
|
||||
raise EmailOrPasswordMismatchError()
|
||||
except services.errors.account.AccountNotFoundError:
|
||||
if FeatureService.get_system_features().is_allow_register:
|
||||
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
||||
return {"result": "fail", "data": token, "code": "account_not_found"}
|
||||
else:
|
||||
raise NotAllowedRegister()
|
||||
# SELF_HOSTED only have one workspace
|
||||
tenants = TenantService.get_join_tenants(account)
|
||||
if len(tenants) == 0:
|
||||
@@ -40,71 +85,138 @@ class LoginApi(Resource):
|
||||
"data": "workspace not found, please contact system admin to invite you to join in a workspace",
|
||||
}
|
||||
|
||||
token = AccountService.login(account, ip_address=get_remote_ip(request))
|
||||
|
||||
return {"result": "success", "data": token}
|
||||
token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request))
|
||||
AccountService.reset_login_error_rate_limit(args["email"])
|
||||
return {"result": "success", "data": token_pair.model_dump()}
|
||||
|
||||
|
||||
class LogoutApi(Resource):
|
||||
@setup_required
|
||||
def get(self):
|
||||
account = cast(Account, flask_login.current_user)
|
||||
token = request.headers.get("Authorization", "").split(" ")[1]
|
||||
AccountService.logout(account=account, token=token)
|
||||
if isinstance(account, flask_login.AnonymousUserMixin):
|
||||
return {"result": "success"}
|
||||
AccountService.logout(account=account)
|
||||
flask_login.logout_user()
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
class ResetPasswordApi(Resource):
|
||||
class ResetPasswordSendEmailApi(Resource):
|
||||
@setup_required
|
||||
def get(self):
|
||||
# parser = reqparse.RequestParser()
|
||||
# parser.add_argument('email', type=email, required=True, location='json')
|
||||
# args = parser.parse_args()
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
parser.add_argument("language", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
# import mailchimp_transactional as MailchimpTransactional
|
||||
# from mailchimp_transactional.api_client import ApiClientError
|
||||
if args["language"] is not None and args["language"] == "zh-Hans":
|
||||
language = "zh-Hans"
|
||||
else:
|
||||
language = "en-US"
|
||||
|
||||
# account = {'email': args['email']}
|
||||
# account = AccountService.get_by_email(args['email'])
|
||||
# if account is None:
|
||||
# raise ValueError('Email not found')
|
||||
# new_password = AccountService.generate_password()
|
||||
# AccountService.update_password(account, new_password)
|
||||
account = AccountService.get_user_through_email(args["email"])
|
||||
if account is None:
|
||||
if FeatureService.get_system_features().is_allow_register:
|
||||
token = AccountService.send_reset_password_email(email=args["email"], language=language)
|
||||
else:
|
||||
raise NotAllowedRegister()
|
||||
else:
|
||||
token = AccountService.send_reset_password_email(account=account, language=language)
|
||||
|
||||
# todo: Send email
|
||||
# MAILCHIMP_API_KEY = dify_config.MAILCHIMP_TRANSACTIONAL_API_KEY
|
||||
# mailchimp = MailchimpTransactional(MAILCHIMP_API_KEY)
|
||||
return {"result": "success", "data": token}
|
||||
|
||||
# message = {
|
||||
# 'from_email': 'noreply@example.com',
|
||||
# 'to': [{'email': account['email']}],
|
||||
# 'subject': 'Reset your Dify password',
|
||||
# 'html': """
|
||||
# <p>Dear User,</p>
|
||||
# <p>The Dify team has generated a new password for you, details as follows:</p>
|
||||
# <p><strong>{new_password}</strong></p>
|
||||
# <p>Please change your password to log in as soon as possible.</p>
|
||||
# <p>Regards,</p>
|
||||
# <p>The Dify Team</p>
|
||||
# """
|
||||
# }
|
||||
|
||||
# response = mailchimp.messages.send({
|
||||
# 'message': message,
|
||||
# # required for transactional email
|
||||
# ' settings': {
|
||||
# 'sandbox_mode': dify_config.MAILCHIMP_SANDBOX_MODE,
|
||||
# },
|
||||
# })
|
||||
class EmailCodeLoginSendEmailApi(Resource):
|
||||
@setup_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=email, required=True, location="json")
|
||||
parser.add_argument("language", type=str, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Check if MSG was sent
|
||||
# if response.status_code != 200:
|
||||
# # handle error
|
||||
# pass
|
||||
ip_address = extract_remote_ip(request)
|
||||
if AccountService.is_email_send_ip_limit(ip_address):
|
||||
raise EmailSendIpLimitError()
|
||||
|
||||
return {"result": "success"}
|
||||
if args["language"] is not None and args["language"] == "zh-Hans":
|
||||
language = "zh-Hans"
|
||||
else:
|
||||
language = "en-US"
|
||||
|
||||
account = AccountService.get_user_through_email(args["email"])
|
||||
if account is None:
|
||||
if FeatureService.get_system_features().is_allow_register:
|
||||
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
|
||||
else:
|
||||
raise NotAllowedRegister()
|
||||
else:
|
||||
token = AccountService.send_email_code_login_email(account=account, language=language)
|
||||
|
||||
return {"result": "success", "data": token}
|
||||
|
||||
|
||||
class EmailCodeLoginApi(Resource):
|
||||
@setup_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("email", type=str, required=True, location="json")
|
||||
parser.add_argument("code", type=str, required=True, location="json")
|
||||
parser.add_argument("token", type=str, required=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
user_email = args["email"]
|
||||
|
||||
token_data = AccountService.get_email_code_login_data(args["token"])
|
||||
if token_data is None:
|
||||
raise InvalidTokenError()
|
||||
|
||||
if token_data["email"] != args["email"]:
|
||||
raise InvalidEmailError()
|
||||
|
||||
if token_data["code"] != args["code"]:
|
||||
raise EmailCodeError()
|
||||
|
||||
AccountService.revoke_email_code_login_token(args["token"])
|
||||
account = AccountService.get_user_through_email(user_email)
|
||||
if account:
|
||||
tenant = TenantService.get_join_tenants(account)
|
||||
if not tenant:
|
||||
if not FeatureService.get_system_features().is_allow_create_workspace:
|
||||
raise NotAllowedCreateWorkspace()
|
||||
else:
|
||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
||||
account.current_tenant = tenant
|
||||
tenant_was_created.send(tenant)
|
||||
|
||||
if account is None:
|
||||
try:
|
||||
account = AccountService.create_account_and_tenant(
|
||||
email=user_email, name=user_email, interface_language=languages[0]
|
||||
)
|
||||
except WorkSpaceNotAllowedCreateError:
|
||||
return NotAllowedCreateWorkspace()
|
||||
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
|
||||
AccountService.reset_login_error_rate_limit(args["email"])
|
||||
return {"result": "success", "data": token_pair.model_dump()}
|
||||
|
||||
|
||||
class RefreshTokenApi(Resource):
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("refresh_token", type=str, required=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
new_token_pair = AccountService.refresh_token(args["refresh_token"])
|
||||
return {"result": "success", "data": new_token_pair.model_dump()}
|
||||
except Exception as e:
|
||||
return {"result": "fail", "data": str(e)}, 401
|
||||
|
||||
|
||||
api.add_resource(LoginApi, "/login")
|
||||
api.add_resource(LogoutApi, "/logout")
|
||||
api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login")
|
||||
api.add_resource(EmailCodeLoginApi, "/email-code-login/validity")
|
||||
api.add_resource(ResetPasswordSendEmailApi, "/reset-password")
|
||||
api.add_resource(RefreshTokenApi, "/refresh-token")
|
||||
|
||||
@@ -5,14 +5,20 @@ from typing import Optional
|
||||
import requests
|
||||
from flask import current_app, redirect, request
|
||||
from flask_restful import Resource
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import languages
|
||||
from events.tenant_event import tenant_was_created
|
||||
from extensions.ext_database import db
|
||||
from libs.helper import get_remote_ip
|
||||
from libs.helper import extract_remote_ip
|
||||
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
|
||||
from models.account import Account, AccountStatus
|
||||
from models import Account
|
||||
from models.account import AccountStatus
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.errors.account import AccountNotFoundError
|
||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
from .. import api
|
||||
|
||||
@@ -42,6 +48,7 @@ def get_oauth_providers():
|
||||
|
||||
class OAuthLogin(Resource):
|
||||
def get(self, provider: str):
|
||||
invite_token = request.args.get("invite_token") or None
|
||||
OAUTH_PROVIDERS = get_oauth_providers()
|
||||
with current_app.app_context():
|
||||
oauth_provider = OAUTH_PROVIDERS.get(provider)
|
||||
@@ -49,7 +56,7 @@ class OAuthLogin(Resource):
|
||||
if not oauth_provider:
|
||||
return {"error": "Invalid provider"}, 400
|
||||
|
||||
auth_url = oauth_provider.get_authorization_url()
|
||||
auth_url = oauth_provider.get_authorization_url(invite_token=invite_token)
|
||||
return redirect(auth_url)
|
||||
|
||||
|
||||
@@ -62,6 +69,11 @@ class OAuthCallback(Resource):
|
||||
return {"error": "Invalid provider"}, 400
|
||||
|
||||
code = request.args.get("code")
|
||||
state = request.args.get("state")
|
||||
invite_token = None
|
||||
if state:
|
||||
invite_token = state
|
||||
|
||||
try:
|
||||
token = oauth_provider.get_access_token(code)
|
||||
user_info = oauth_provider.get_user_info(token)
|
||||
@@ -69,21 +81,52 @@ class OAuthCallback(Resource):
|
||||
logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}")
|
||||
return {"error": "OAuth process failed"}, 400
|
||||
|
||||
account = _generate_account(provider, user_info)
|
||||
if invite_token and RegisterService.is_valid_invite_token(invite_token):
|
||||
invitation = RegisterService._get_invitation_by_token(token=invite_token)
|
||||
if invitation:
|
||||
invitation_email = invitation.get("email", None)
|
||||
if invitation_email != user_info.email:
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Invalid invitation token.")
|
||||
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin/invite-settings?invite_token={invite_token}")
|
||||
|
||||
try:
|
||||
account = _generate_account(provider, user_info)
|
||||
except AccountNotFoundError:
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account not found.")
|
||||
except (WorkSpaceNotFoundError, WorkSpaceNotAllowedCreateError):
|
||||
return redirect(
|
||||
f"{dify_config.CONSOLE_WEB_URL}/signin"
|
||||
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
|
||||
)
|
||||
|
||||
# Check account status
|
||||
if account.status == AccountStatus.BANNED.value or account.status == AccountStatus.CLOSED.value:
|
||||
return {"error": "Account is banned or closed."}, 403
|
||||
if account.status == AccountStatus.BANNED.value:
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account is banned.")
|
||||
|
||||
if account.status == AccountStatus.PENDING.value:
|
||||
account.status = AccountStatus.ACTIVE.value
|
||||
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
db.session.commit()
|
||||
|
||||
TenantService.create_owner_tenant_if_not_exist(account)
|
||||
try:
|
||||
TenantService.create_owner_tenant_if_not_exist(account)
|
||||
except Unauthorized:
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Workspace not found.")
|
||||
except WorkSpaceNotAllowedCreateError:
|
||||
return redirect(
|
||||
f"{dify_config.CONSOLE_WEB_URL}/signin"
|
||||
"?message=Workspace not found, please contact system admin to invite you to join in a workspace."
|
||||
)
|
||||
|
||||
token = AccountService.login(account, ip_address=get_remote_ip(request))
|
||||
token_pair = AccountService.login(
|
||||
account=account,
|
||||
ip_address=extract_remote_ip(request),
|
||||
)
|
||||
|
||||
return redirect(f"{dify_config.CONSOLE_WEB_URL}?console_token={token}")
|
||||
return redirect(
|
||||
f"{dify_config.CONSOLE_WEB_URL}?access_token={token_pair.access_token}&refresh_token={token_pair.refresh_token}"
|
||||
)
|
||||
|
||||
|
||||
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
|
||||
@@ -99,9 +142,21 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
|
||||
# Get account by openid or email.
|
||||
account = _get_account_by_openid_or_email(provider, user_info)
|
||||
|
||||
if account:
|
||||
tenant = TenantService.get_join_tenants(account)
|
||||
if not tenant:
|
||||
if not FeatureService.get_system_features().is_allow_create_workspace:
|
||||
raise WorkSpaceNotAllowedCreateError()
|
||||
else:
|
||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
||||
account.current_tenant = tenant
|
||||
tenant_was_created.send(tenant)
|
||||
|
||||
if not account:
|
||||
# Create account
|
||||
account_name = user_info.name if user_info.name else "Dify"
|
||||
if not FeatureService.get_system_features().is_allow_register:
|
||||
raise AccountNotFoundError()
|
||||
account_name = user_info.name or "Dify"
|
||||
account = RegisterService.register(
|
||||
email=user_info.email, name=account_name, password=None, open_id=user_info.id, provider=provider
|
||||
)
|
||||
|
||||
@@ -15,8 +15,7 @@ from core.rag.extractor.notion_extractor import NotionExtractor
|
||||
from extensions.ext_database import db
|
||||
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
|
||||
from libs.login import login_required
|
||||
from models.dataset import Document
|
||||
from models.source import DataSourceOauthBinding
|
||||
from models import DataSourceOauthBinding, Document
|
||||
from services.dataset_service import DatasetService, DocumentService
|
||||
from tasks.document_indexing_sync_task import document_indexing_sync_task
|
||||
|
||||
|
||||
@@ -18,14 +18,14 @@ from core.model_runtime.entities.model_entities import ModelType
|
||||
from core.provider_manager import ProviderManager
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.extractor.entity.extract_setting import ExtractSetting
|
||||
from core.rag.retrieval.retrival_methods import RetrievalMethod
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from extensions.ext_database import db
|
||||
from fields.app_fields import related_app_list
|
||||
from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields
|
||||
from fields.document_fields import document_status_fields
|
||||
from libs.login import login_required
|
||||
from models.dataset import Dataset, DatasetPermissionEnum, Document, DocumentSegment
|
||||
from models.model import ApiToken, UploadFile
|
||||
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
|
||||
from models.dataset import DatasetPermissionEnum
|
||||
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ class DatasetListApi(Resource):
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
ids = request.args.getlist("ids")
|
||||
provider = request.args.get("provider", default="vendor")
|
||||
# provider = request.args.get("provider", default="vendor")
|
||||
search = request.args.get("keyword", default=None, type=str)
|
||||
tag_ids = request.args.getlist("tag_ids")
|
||||
|
||||
@@ -57,7 +57,7 @@ class DatasetListApi(Resource):
|
||||
datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
|
||||
else:
|
||||
datasets, total = DatasetService.get_datasets(
|
||||
page, limit, provider, current_user.current_tenant_id, current_user, search, tag_ids
|
||||
page, limit, current_user.current_tenant_id, current_user, search, tag_ids
|
||||
)
|
||||
|
||||
# check embedding setting
|
||||
@@ -110,6 +110,26 @@ class DatasetListApi(Resource):
|
||||
nullable=True,
|
||||
help="Invalid indexing technique.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"external_knowledge_api_id",
|
||||
type=str,
|
||||
nullable=True,
|
||||
required=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"provider",
|
||||
type=str,
|
||||
nullable=True,
|
||||
choices=Dataset.PROVIDER_LIST,
|
||||
required=False,
|
||||
default="vendor",
|
||||
)
|
||||
parser.add_argument(
|
||||
"external_knowledge_id",
|
||||
type=str,
|
||||
nullable=True,
|
||||
required=False,
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
|
||||
@@ -122,6 +142,10 @@ class DatasetListApi(Resource):
|
||||
name=args["name"],
|
||||
indexing_technique=args["indexing_technique"],
|
||||
account=current_user,
|
||||
permission=DatasetPermissionEnum.ONLY_ME,
|
||||
provider=args["provider"],
|
||||
external_knowledge_api_id=args["external_knowledge_api_id"],
|
||||
external_knowledge_id=args["external_knowledge_id"],
|
||||
)
|
||||
except services.errors.dataset.DatasetNameDuplicateError:
|
||||
raise DatasetNameDuplicateError()
|
||||
@@ -210,6 +234,33 @@ class DatasetApi(Resource):
|
||||
)
|
||||
parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.")
|
||||
parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.")
|
||||
|
||||
parser.add_argument(
|
||||
"external_retrieval_model",
|
||||
type=dict,
|
||||
required=False,
|
||||
nullable=True,
|
||||
location="json",
|
||||
help="Invalid external retrieval model.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"external_knowledge_id",
|
||||
type=str,
|
||||
required=False,
|
||||
nullable=True,
|
||||
location="json",
|
||||
help="Invalid external knowledge id.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"external_knowledge_api_id",
|
||||
type=str,
|
||||
required=False,
|
||||
nullable=True,
|
||||
location="json",
|
||||
help="Invalid external knowledge api id.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
data = request.get_json()
|
||||
|
||||
@@ -398,7 +449,7 @@ class DatasetIndexingEstimateApi(Resource):
|
||||
)
|
||||
except LLMBadRequestError:
|
||||
raise ProviderNotInitializeError(
|
||||
"No Embedding Model available. Please configure a valid provider " "in the Settings -> Model Provider."
|
||||
"No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider."
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
@@ -549,12 +600,7 @@ class DatasetApiBaseUrlApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
return {
|
||||
"api_base_url": (
|
||||
dify_config.SERVICE_API_URL if dify_config.SERVICE_API_URL else request.host_url.rstrip("/")
|
||||
)
|
||||
+ "/v1"
|
||||
}
|
||||
return {"api_base_url": (dify_config.SERVICE_API_URL or request.host_url.rstrip("/")) + "/v1"}
|
||||
|
||||
|
||||
class DatasetRetrievalSettingApi(Resource):
|
||||
@@ -567,10 +613,13 @@ class DatasetRetrievalSettingApi(Resource):
|
||||
case (
|
||||
VectorType.MILVUS
|
||||
| VectorType.RELYT
|
||||
| VectorType.PGVECTOR
|
||||
| VectorType.TIDB_VECTOR
|
||||
| VectorType.CHROMA
|
||||
| VectorType.TENCENT
|
||||
| VectorType.PGVECTO_RS
|
||||
| VectorType.BAIDU
|
||||
| VectorType.VIKINGDB
|
||||
| VectorType.UPSTASH
|
||||
):
|
||||
return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
|
||||
case (
|
||||
@@ -581,6 +630,7 @@ class DatasetRetrievalSettingApi(Resource):
|
||||
| VectorType.MYSCALE
|
||||
| VectorType.ORACLE
|
||||
| VectorType.ELASTICSEARCH
|
||||
| VectorType.PGVECTOR
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
@@ -606,6 +656,9 @@ class DatasetRetrievalSettingMockApi(Resource):
|
||||
| VectorType.CHROMA
|
||||
| VectorType.TENCENT
|
||||
| VectorType.PGVECTO_RS
|
||||
| VectorType.BAIDU
|
||||
| VectorType.VIKINGDB
|
||||
| VectorType.UPSTASH
|
||||
):
|
||||
return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]}
|
||||
case (
|
||||
|
||||
@@ -46,8 +46,7 @@ from fields.document_fields import (
|
||||
document_with_segments_fields,
|
||||
)
|
||||
from libs.login import login_required
|
||||
from models.dataset import Dataset, DatasetProcessRule, Document, DocumentSegment
|
||||
from models.model import UploadFile
|
||||
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
|
||||
from services.dataset_service import DatasetService, DocumentService
|
||||
from tasks.add_document_to_index_task import add_document_to_index_task
|
||||
from tasks.remove_document_from_index_task import remove_document_from_index_task
|
||||
@@ -302,6 +301,8 @@ class DatasetInitApi(Resource):
|
||||
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
|
||||
)
|
||||
parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json")
|
||||
parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
|
||||
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
|
||||
@@ -309,6 +310,8 @@ class DatasetInitApi(Resource):
|
||||
raise Forbidden()
|
||||
|
||||
if args["indexing_technique"] == "high_quality":
|
||||
if args["embedding_model"] is None or args["embedding_model_provider"] is None:
|
||||
raise ValueError("embedding model and embedding model provider are required for high quality indexing.")
|
||||
try:
|
||||
model_manager = ModelManager()
|
||||
model_manager.get_default_model_instance(
|
||||
@@ -350,7 +353,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
|
||||
document_id = str(document_id)
|
||||
document = self.get_document(dataset_id, document_id)
|
||||
|
||||
if document.indexing_status in ["completed", "error"]:
|
||||
if document.indexing_status in {"completed", "error"}:
|
||||
raise DocumentAlreadyFinishedError()
|
||||
|
||||
data_process_rule = document.dataset_process_rule
|
||||
@@ -417,7 +420,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
|
||||
info_list = []
|
||||
extract_settings = []
|
||||
for document in documents:
|
||||
if document.indexing_status in ["completed", "error"]:
|
||||
if document.indexing_status in {"completed", "error"}:
|
||||
raise DocumentAlreadyFinishedError()
|
||||
data_source_info = document.data_source_info_dict
|
||||
# format document files info
|
||||
@@ -661,7 +664,7 @@ class DocumentProcessingApi(DocumentResource):
|
||||
db.session.commit()
|
||||
|
||||
elif action == "resume":
|
||||
if document.indexing_status not in ["paused", "error"]:
|
||||
if document.indexing_status not in {"paused", "error"}:
|
||||
raise InvalidActionError("Document not in paused or error state.")
|
||||
|
||||
document.paused_by = None
|
||||
|
||||
@@ -24,7 +24,7 @@ from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from fields.segment_fields import segment_fields
|
||||
from libs.login import login_required
|
||||
from models.dataset import DocumentSegment
|
||||
from models import DocumentSegment
|
||||
from services.dataset_service import DatasetService, DocumentService, SegmentService
|
||||
from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
|
||||
from tasks.disable_segment_from_index_task import disable_segment_from_index_task
|
||||
|
||||
263
api/controllers/console/datasets/external.py
Normal file
263
api/controllers/console/datasets/external.py
Normal file
@@ -0,0 +1,263 @@
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal, reqparse
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console.datasets.error import DatasetNameDuplicateError
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from fields.dataset_fields import dataset_detail_fields
|
||||
from libs.login import login_required
|
||||
from services.dataset_service import DatasetService
|
||||
from services.external_knowledge_service import ExternalDatasetService
|
||||
from services.hit_testing_service import HitTestingService
|
||||
from services.knowledge_service import ExternalDatasetTestService
|
||||
|
||||
|
||||
def _validate_name(name):
|
||||
if not name or len(name) < 1 or len(name) > 100:
|
||||
raise ValueError("Name must be between 1 to 100 characters.")
|
||||
return name
|
||||
|
||||
|
||||
def _validate_description_length(description):
|
||||
if description and len(description) > 400:
|
||||
raise ValueError("Description cannot exceed 400 characters.")
|
||||
return description
|
||||
|
||||
|
||||
class ExternalApiTemplateListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
search = request.args.get("keyword", default=None, type=str)
|
||||
|
||||
external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis(
|
||||
page, limit, current_user.current_tenant_id, search
|
||||
)
|
||||
response = {
|
||||
"data": [item.to_dict() for item in external_knowledge_apis],
|
||||
"has_more": len(external_knowledge_apis) == limit,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"page": page,
|
||||
}
|
||||
return response, 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument(
|
||||
"name",
|
||||
nullable=False,
|
||||
required=True,
|
||||
help="Name is required. Name must be between 1 to 100 characters.",
|
||||
type=_validate_name,
|
||||
)
|
||||
parser.add_argument(
|
||||
"settings",
|
||||
type=dict,
|
||||
location="json",
|
||||
nullable=False,
|
||||
required=True,
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
ExternalDatasetService.validate_api_list(args["settings"])
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
|
||||
if not current_user.is_dataset_editor:
|
||||
raise Forbidden()
|
||||
|
||||
try:
|
||||
external_knowledge_api = ExternalDatasetService.create_external_knowledge_api(
|
||||
tenant_id=current_user.current_tenant_id, user_id=current_user.id, args=args
|
||||
)
|
||||
except services.errors.dataset.DatasetNameDuplicateError:
|
||||
raise DatasetNameDuplicateError()
|
||||
|
||||
return external_knowledge_api.to_dict(), 201
|
||||
|
||||
|
||||
class ExternalApiTemplateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, external_knowledge_api_id):
|
||||
external_knowledge_api_id = str(external_knowledge_api_id)
|
||||
external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(external_knowledge_api_id)
|
||||
if external_knowledge_api is None:
|
||||
raise NotFound("API template not found.")
|
||||
|
||||
return external_knowledge_api.to_dict(), 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def patch(self, external_knowledge_api_id):
|
||||
external_knowledge_api_id = str(external_knowledge_api_id)
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument(
|
||||
"name",
|
||||
nullable=False,
|
||||
required=True,
|
||||
help="type is required. Name must be between 1 to 100 characters.",
|
||||
type=_validate_name,
|
||||
)
|
||||
parser.add_argument(
|
||||
"settings",
|
||||
type=dict,
|
||||
location="json",
|
||||
nullable=False,
|
||||
required=True,
|
||||
)
|
||||
args = parser.parse_args()
|
||||
ExternalDatasetService.validate_api_list(args["settings"])
|
||||
|
||||
external_knowledge_api = ExternalDatasetService.update_external_knowledge_api(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
user_id=current_user.id,
|
||||
external_knowledge_api_id=external_knowledge_api_id,
|
||||
args=args,
|
||||
)
|
||||
|
||||
return external_knowledge_api.to_dict(), 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete(self, external_knowledge_api_id):
|
||||
external_knowledge_api_id = str(external_knowledge_api_id)
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, or editor
|
||||
if not current_user.is_editor or current_user.is_dataset_operator:
|
||||
raise Forbidden()
|
||||
|
||||
ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id)
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
class ExternalApiUseCheckApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self, external_knowledge_api_id):
|
||||
external_knowledge_api_id = str(external_knowledge_api_id)
|
||||
|
||||
external_knowledge_api_is_using, count = ExternalDatasetService.external_knowledge_api_use_check(
|
||||
external_knowledge_api_id
|
||||
)
|
||||
return {"is_using": external_knowledge_api_is_using, "count": count}, 200
|
||||
|
||||
|
||||
class ExternalDatasetCreateApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
# The role of the current user in the ta table must be admin, owner, or editor
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json")
|
||||
parser.add_argument(
|
||||
"name",
|
||||
nullable=False,
|
||||
required=True,
|
||||
help="name is required. Name must be between 1 to 100 characters.",
|
||||
type=_validate_name,
|
||||
)
|
||||
parser.add_argument("description", type=str, required=False, nullable=True, location="json")
|
||||
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator
|
||||
if not current_user.is_dataset_editor:
|
||||
raise Forbidden()
|
||||
|
||||
try:
|
||||
dataset = ExternalDatasetService.create_external_dataset(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
user_id=current_user.id,
|
||||
args=args,
|
||||
)
|
||||
except services.errors.dataset.DatasetNameDuplicateError:
|
||||
raise DatasetNameDuplicateError()
|
||||
|
||||
return marshal(dataset, dataset_detail_fields), 201
|
||||
|
||||
|
||||
class ExternalKnowledgeHitTestingApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, dataset_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
||||
if dataset is None:
|
||||
raise NotFound("Dataset not found.")
|
||||
|
||||
try:
|
||||
DatasetService.check_dataset_permission(dataset, current_user)
|
||||
except services.errors.account.NoPermissionError as e:
|
||||
raise Forbidden(str(e))
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("query", type=str, location="json")
|
||||
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
HitTestingService.hit_testing_args_check(args)
|
||||
|
||||
try:
|
||||
response = HitTestingService.external_retrieve(
|
||||
dataset=dataset,
|
||||
query=args["query"],
|
||||
account=current_user,
|
||||
external_retrieval_model=args["external_retrieval_model"],
|
||||
)
|
||||
|
||||
return response
|
||||
except Exception as e:
|
||||
raise InternalServerError(str(e))
|
||||
|
||||
|
||||
class BedrockRetrievalApi(Resource):
|
||||
# this api is only for internal testing
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
|
||||
parser.add_argument(
|
||||
"query",
|
||||
nullable=False,
|
||||
required=True,
|
||||
type=str,
|
||||
)
|
||||
parser.add_argument("knowledge_id", nullable=False, required=True, type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Call the knowledge retrieval service
|
||||
result = ExternalDatasetTestService.knowledge_retrieval(
|
||||
args["retrieval_setting"], args["query"], args["knowledge_id"]
|
||||
)
|
||||
return result, 200
|
||||
|
||||
|
||||
api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets/<uuid:dataset_id>/external-hit-testing")
|
||||
api.add_resource(ExternalDatasetCreateApi, "/datasets/external")
|
||||
api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api")
|
||||
api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
|
||||
api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
|
||||
# this api is only for internal test
|
||||
api.add_resource(BedrockRetrievalApi, "/test/retrieval")
|
||||
@@ -1,9 +1,12 @@
|
||||
import urllib.parse
|
||||
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal_with
|
||||
from flask_restful import Resource, marshal_with, reqparse
|
||||
|
||||
import services
|
||||
from configs import dify_config
|
||||
from constants import DOCUMENT_EXTENSIONS
|
||||
from controllers.console import api
|
||||
from controllers.console.datasets.error import (
|
||||
FileTooLargeError,
|
||||
@@ -13,9 +16,10 @@ from controllers.console.datasets.error import (
|
||||
)
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
|
||||
from fields.file_fields import file_fields, upload_config_fields
|
||||
from core.helper import ssrf_proxy
|
||||
from fields.file_fields import file_fields, remote_file_info_fields, upload_config_fields
|
||||
from libs.login import login_required
|
||||
from services.file_service import ALLOWED_EXTENSIONS, UNSTRUCTURED_ALLOWED_EXTENSIONS, FileService
|
||||
from services.file_service import FileService
|
||||
|
||||
PREVIEW_WORDS_LIMIT = 3000
|
||||
|
||||
@@ -26,24 +30,27 @@ class FileApi(Resource):
|
||||
@account_initialization_required
|
||||
@marshal_with(upload_config_fields)
|
||||
def get(self):
|
||||
file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT
|
||||
batch_count_limit = dify_config.UPLOAD_FILE_BATCH_LIMIT
|
||||
image_file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
|
||||
return {
|
||||
"file_size_limit": file_size_limit,
|
||||
"batch_count_limit": batch_count_limit,
|
||||
"image_file_size_limit": image_file_size_limit,
|
||||
"file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT,
|
||||
"batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT,
|
||||
"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT,
|
||||
"video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT,
|
||||
"audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT,
|
||||
}, 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(file_fields)
|
||||
@cloud_edition_billing_resource_check(resource="documents")
|
||||
@cloud_edition_billing_resource_check("documents")
|
||||
def post(self):
|
||||
# get file from request
|
||||
file = request.files["file"]
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("source", type=str, required=False, location="args")
|
||||
source = parser.parse_args().get("source")
|
||||
|
||||
# check file
|
||||
if "file" not in request.files:
|
||||
raise NoFileUploadedError()
|
||||
@@ -51,7 +58,7 @@ class FileApi(Resource):
|
||||
if len(request.files) > 1:
|
||||
raise TooManyFilesError()
|
||||
try:
|
||||
upload_file = FileService.upload_file(file, current_user)
|
||||
upload_file = FileService.upload_file(file=file, user=current_user, source=source)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
@@ -75,11 +82,24 @@ class FileSupportTypeApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
etl_type = dify_config.ETL_TYPE
|
||||
allowed_extensions = UNSTRUCTURED_ALLOWED_EXTENSIONS if etl_type == "Unstructured" else ALLOWED_EXTENSIONS
|
||||
return {"allowed_extensions": allowed_extensions}
|
||||
return {"allowed_extensions": DOCUMENT_EXTENSIONS}
|
||||
|
||||
|
||||
class RemoteFileInfoApi(Resource):
|
||||
@marshal_with(remote_file_info_fields)
|
||||
def get(self, url):
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
try:
|
||||
response = ssrf_proxy.head(decoded_url)
|
||||
return {
|
||||
"file_type": response.headers.get("Content-Type", "application/octet-stream"),
|
||||
"file_length": int(response.headers.get("Content-Length", 0)),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}, 400
|
||||
|
||||
|
||||
api.add_resource(FileApi, "/files/upload")
|
||||
api.add_resource(FilePreviewApi, "/files/<uuid:file_id>/preview")
|
||||
api.add_resource(FileSupportTypeApi, "/files/support-type")
|
||||
api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>")
|
||||
|
||||
@@ -1,86 +1,24 @@
|
||||
import logging
|
||||
from flask_restful import Resource
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal, reqparse
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console.app.error import (
|
||||
CompletionRequestError,
|
||||
ProviderModelCurrentlyNotSupportError,
|
||||
ProviderNotInitializeError,
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.console.datasets.error import DatasetNotInitializedError
|
||||
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
|
||||
from controllers.console.setup import setup_required
|
||||
from controllers.console.wraps import account_initialization_required
|
||||
from core.errors.error import (
|
||||
LLMBadRequestError,
|
||||
ModelCurrentlyNotSupportError,
|
||||
ProviderTokenNotInitError,
|
||||
QuotaExceededError,
|
||||
)
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from fields.hit_testing_fields import hit_testing_record_fields
|
||||
from libs.login import login_required
|
||||
from services.dataset_service import DatasetService
|
||||
from services.hit_testing_service import HitTestingService
|
||||
|
||||
|
||||
class HitTestingApi(Resource):
|
||||
class HitTestingApi(Resource, DatasetsHitTestingBase):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, dataset_id):
|
||||
dataset_id_str = str(dataset_id)
|
||||
|
||||
dataset = DatasetService.get_dataset(dataset_id_str)
|
||||
if dataset is None:
|
||||
raise NotFound("Dataset not found.")
|
||||
dataset = self.get_and_validate_dataset(dataset_id_str)
|
||||
args = self.parse_args()
|
||||
self.hit_testing_args_check(args)
|
||||
|
||||
try:
|
||||
DatasetService.check_dataset_permission(dataset, current_user)
|
||||
except services.errors.account.NoPermissionError as e:
|
||||
raise Forbidden(str(e))
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("query", type=str, location="json")
|
||||
parser.add_argument("retrieval_model", type=dict, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
HitTestingService.hit_testing_args_check(args)
|
||||
|
||||
try:
|
||||
response = HitTestingService.retrieve(
|
||||
dataset=dataset,
|
||||
query=args["query"],
|
||||
account=current_user,
|
||||
retrieval_model=args["retrieval_model"],
|
||||
limit=10,
|
||||
)
|
||||
|
||||
return {"query": response["query"], "records": marshal(response["records"], hit_testing_record_fields)}
|
||||
except services.errors.index.IndexNotInitializedError:
|
||||
raise DatasetNotInitializedError()
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except LLMBadRequestError:
|
||||
raise ProviderNotInitializeError(
|
||||
"No Embedding Model or Reranking Model available. Please configure a valid provider "
|
||||
"in the Settings -> Model Provider."
|
||||
)
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
except ValueError as e:
|
||||
raise ValueError(str(e))
|
||||
except Exception as e:
|
||||
logging.exception("Hit testing failed.")
|
||||
raise InternalServerError(str(e))
|
||||
return self.perform_hit_testing(dataset, args)
|
||||
|
||||
|
||||
api.add_resource(HitTestingApi, "/datasets/<uuid:dataset_id>/hit-testing")
|
||||
|
||||
85
api/controllers/console/datasets/hit_testing_base.py
Normal file
85
api/controllers/console/datasets/hit_testing_base.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import logging
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import marshal, reqparse
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services.dataset_service
|
||||
from controllers.console.app.error import (
|
||||
CompletionRequestError,
|
||||
ProviderModelCurrentlyNotSupportError,
|
||||
ProviderNotInitializeError,
|
||||
ProviderQuotaExceededError,
|
||||
)
|
||||
from controllers.console.datasets.error import DatasetNotInitializedError
|
||||
from core.errors.error import (
|
||||
LLMBadRequestError,
|
||||
ModelCurrentlyNotSupportError,
|
||||
ProviderTokenNotInitError,
|
||||
QuotaExceededError,
|
||||
)
|
||||
from core.model_runtime.errors.invoke import InvokeError
|
||||
from fields.hit_testing_fields import hit_testing_record_fields
|
||||
from services.dataset_service import DatasetService
|
||||
from services.hit_testing_service import HitTestingService
|
||||
|
||||
|
||||
class DatasetsHitTestingBase:
|
||||
@staticmethod
|
||||
def get_and_validate_dataset(dataset_id: str):
|
||||
dataset = DatasetService.get_dataset(dataset_id)
|
||||
if dataset is None:
|
||||
raise NotFound("Dataset not found.")
|
||||
|
||||
try:
|
||||
DatasetService.check_dataset_permission(dataset, current_user)
|
||||
except services.errors.account.NoPermissionError as e:
|
||||
raise Forbidden(str(e))
|
||||
|
||||
return dataset
|
||||
|
||||
@staticmethod
|
||||
def hit_testing_args_check(args):
|
||||
HitTestingService.hit_testing_args_check(args)
|
||||
|
||||
@staticmethod
|
||||
def parse_args():
|
||||
parser = reqparse.RequestParser()
|
||||
|
||||
parser.add_argument("query", type=str, location="json")
|
||||
parser.add_argument("retrieval_model", type=dict, required=False, location="json")
|
||||
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
|
||||
return parser.parse_args()
|
||||
|
||||
@staticmethod
|
||||
def perform_hit_testing(dataset, args):
|
||||
try:
|
||||
response = HitTestingService.retrieve(
|
||||
dataset=dataset,
|
||||
query=args["query"],
|
||||
account=current_user,
|
||||
retrieval_model=args["retrieval_model"],
|
||||
external_retrieval_model=args["external_retrieval_model"],
|
||||
limit=10,
|
||||
)
|
||||
return {"query": response["query"], "records": marshal(response["records"], hit_testing_record_fields)}
|
||||
except services.errors.index.IndexNotInitializedError:
|
||||
raise DatasetNotInitializedError()
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except LLMBadRequestError:
|
||||
raise ProviderNotInitializeError(
|
||||
"No Embedding Model or Reranking Model available. Please configure a valid provider "
|
||||
"in the Settings -> Model Provider."
|
||||
)
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
except ValueError as e:
|
||||
raise ValueError(str(e))
|
||||
except Exception as e:
|
||||
logging.exception("Hit testing failed.")
|
||||
raise InternalServerError(str(e))
|
||||
@@ -14,7 +14,9 @@ class WebsiteCrawlApi(Resource):
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("provider", type=str, choices=["firecrawl"], required=True, nullable=True, location="json")
|
||||
parser.add_argument(
|
||||
"provider", type=str, choices=["firecrawl", "jinareader"], required=True, nullable=True, location="json"
|
||||
)
|
||||
parser.add_argument("url", type=str, required=True, nullable=True, location="json")
|
||||
parser.add_argument("options", type=dict, required=True, nullable=True, location="json")
|
||||
args = parser.parse_args()
|
||||
@@ -33,7 +35,7 @@ class WebsiteCrawlStatusApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self, job_id: str):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("provider", type=str, choices=["firecrawl"], required=True, location="args")
|
||||
parser.add_argument("provider", type=str, choices=["firecrawl", "jinareader"], required=True, location="args")
|
||||
args = parser.parse_args()
|
||||
# get crawl status
|
||||
try:
|
||||
|
||||
@@ -18,9 +18,7 @@ class NotSetupError(BaseHTTPException):
|
||||
|
||||
class NotInitValidateError(BaseHTTPException):
|
||||
error_code = "not_init_validated"
|
||||
description = (
|
||||
"Init validation has not been completed yet. " "Please proceed with the init validation process first."
|
||||
)
|
||||
description = "Init validation has not been completed yet. Please proceed with the init validation process first."
|
||||
code = 401
|
||||
|
||||
|
||||
@@ -40,3 +38,27 @@ class AlreadyActivateError(BaseHTTPException):
|
||||
error_code = "already_activate"
|
||||
description = "Auth Token is invalid or account already activated, please check again."
|
||||
code = 403
|
||||
|
||||
|
||||
class NotAllowedCreateWorkspace(BaseHTTPException):
|
||||
error_code = "not_allowed_create_workspace"
|
||||
description = "Workspace not found, please contact system admin to invite you to join in a workspace."
|
||||
code = 400
|
||||
|
||||
|
||||
class AccountBannedError(BaseHTTPException):
|
||||
error_code = "account_banned"
|
||||
description = "Account is banned."
|
||||
code = 400
|
||||
|
||||
|
||||
class NotAllowedRegister(BaseHTTPException):
|
||||
error_code = "unauthorized"
|
||||
description = "Account not found."
|
||||
code = 400
|
||||
|
||||
|
||||
class EmailSendIpLimitError(BaseHTTPException):
|
||||
error_code = "email_send_ip_limit"
|
||||
description = "Too many emails have been sent from this IP address recently. Please try again later."
|
||||
code = 429
|
||||
|
||||
@@ -81,19 +81,15 @@ class ChatTextApi(InstalledAppResource):
|
||||
message_id = args.get("message_id", None)
|
||||
text = args.get("text", None)
|
||||
if (
|
||||
app_model.mode in [AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value]
|
||||
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
|
||||
and app_model.workflow
|
||||
and app_model.workflow.features_dict
|
||||
):
|
||||
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
|
||||
voice = args.get("voice") if args.get("voice") else text_to_speech.get("voice")
|
||||
voice = args.get("voice") or text_to_speech.get("voice")
|
||||
else:
|
||||
try:
|
||||
voice = (
|
||||
args.get("voice")
|
||||
if args.get("voice")
|
||||
else app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
)
|
||||
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
|
||||
except Exception:
|
||||
voice = None
|
||||
response = AudioService.transcript_tts(app_model=app_model, message_id=message_id, voice=voice, text=text)
|
||||
|
||||
@@ -92,7 +92,7 @@ class ChatApi(InstalledAppResource):
|
||||
def post(self, installed_app):
|
||||
app_model = installed_app.app
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in [AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]:
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
raise NotChatAppError()
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
@@ -100,6 +100,7 @@ class ChatApi(InstalledAppResource):
|
||||
parser.add_argument("query", type=str, required=True, location="json")
|
||||
parser.add_argument("files", type=list, required=False, location="json")
|
||||
parser.add_argument("conversation_id", type=uuid_value, location="json")
|
||||
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
|
||||
parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json")
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -140,7 +141,7 @@ class ChatStopApi(InstalledAppResource):
|
||||
def post(self, installed_app, task_id):
|
||||
app_model = installed_app.app
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
if app_mode not in [AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]:
|
||||
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
|
||||
raise NotChatAppError()
|
||||
|
||||
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user