Compare commits

..

1240 Commits

Author SHA1 Message Date
jyong
85e568e114 compatible query is None 2024-04-03 18:39:29 +08:00
jyong
0026cb404f compatible query is None 2024-04-03 18:38:27 +08:00
JzoNg
62919a9ff5 app list mutation 2024-04-03 18:06:36 +08:00
StyleZhang
e13f8da9d5 fix: prompt editor 2024-04-03 18:04:46 +08:00
takatost
53aca1a922 add sys variables for start node 2024-04-03 17:53:54 +08:00
StyleZhang
464aee08a1 fix: prompt editor 2024-04-03 17:45:27 +08:00
Joel
ab56e6b6af fix: enable memory add sys query var input 2024-04-03 17:42:21 +08:00
Joel
2a2f4cd4d5 fix: label ui 2024-04-03 17:22:10 +08:00
Joel
b3a4a52a7a fix: valid tool valid empty error 2024-04-03 17:18:16 +08:00
Joel
379f9b56ad fix: start node not show sys files 2024-04-03 16:55:55 +08:00
Joel
b705041dda chore: remove uage output 2024-04-03 16:52:01 +08:00
takatost
fedcfe94ae update version to 0.6.0-preview-workflow.2 2024-04-03 16:50:11 +08:00
Joel
add7bdc877 fix: add input instance id 2024-04-03 16:46:47 +08:00
StyleZhang
088842dcdb fix: prompt editor 2024-04-03 16:13:14 +08:00
Joel
c43eaeec06 fix: can not open 2024-04-03 15:48:57 +08:00
StyleZhang
f06554a11e fix: sync draft 2024-04-03 15:37:19 +08:00
Yeuoly
3ac37e802a fix: sandbox tips 2024-04-03 15:34:05 +08:00
JzoNg
aee669f67d fix max length for paragraph 2024-04-03 15:32:59 +08:00
Joel
c8db4d8a08 fix: editor choose context would blur 2024-04-03 15:15:35 +08:00
StyleZhang
c3bb541a69 fix 2024-04-03 15:13:26 +08:00
StyleZhang
ba3039d6c9 merge feat/workflow 2024-04-03 15:00:26 +08:00
Joel
bd3b400121 chore: support click to show choose add var 2024-04-03 14:57:16 +08:00
StyleZhang
28e813f57f fix 2024-04-03 14:38:15 +08:00
Joel
3f11e11c2d chore: confirm ui 2024-04-03 14:08:36 +08:00
Yeuoly
37a282cc1c fix: add default lora 2024-04-03 13:51:18 +08:00
JzoNg
88ef220d4d fix app list cache 2024-04-03 13:49:29 +08:00
Yeuoly
ccb67bffc4 fix: tools 2024-04-03 13:46:29 +08:00
StyleZhang
20394b3231 fix 2024-04-03 13:39:15 +08:00
nite-knite
52a1c4580c feat: update chat app publishing 2024-04-03 13:12:07 +08:00
JzoNg
aca395b97d fix test run 2024-04-03 12:44:32 +08:00
StyleZhang
459b690416 fix: prompt-editor 2024-04-03 12:31:40 +08:00
Yeuoly
d48bdf3e14 chore: generic 2024-04-03 12:02:51 +08:00
Joel
9fea2fd44b feat: change from creditical schema value from api 2024-04-03 11:56:54 +08:00
Joel
f291aec2cd chore: start input placeholder and bg 2024-04-03 11:41:44 +08:00
takatost
00d9c48461 fix migration version dependency 2024-04-02 22:46:38 +08:00
takatost
fef62d937d Merge branch 'main' into feat/workflow 2024-04-02 22:36:27 +08:00
takatost
6b06c5b957 optimize workflow inputs 2024-04-02 22:36:07 +08:00
takatost
5a4ea0932a add inputs for workflow_started event 2024-04-02 21:13:08 +08:00
StyleZhang
f7e4f0a988 fix: run error 2024-04-02 21:05:04 +08:00
StyleZhang
cf449b31a1 code node rename 2024-04-02 20:48:46 +08:00
StyleZhang
f7184c0e36 fix: tool node check 2024-04-02 20:19:50 +08:00
JzoNg
5df66579a8 fix crash of advanced prompt app 2024-04-02 20:12:43 +08:00
StyleZhang
d260e6b064 fix: prompt-editor 2024-04-02 19:55:58 +08:00
Yeuoly
01c6a35966 chore: encoder 2024-04-02 19:25:52 +08:00
StyleZhang
0202469254 fix: checklist 2024-04-02 19:06:22 +08:00
Joel
fb2fa625b4 fix: use sys query instead user query 2024-04-02 18:46:44 +08:00
Joel
fbdf2ba839 fix: classify default two classifies and empty check 2024-04-02 18:41:31 +08:00
Joel
716936e37a fix: remove key replicate 2024-04-02 18:30:57 +08:00
Yeuoly
1c004e0df6 optimize: sd 2024-04-02 18:21:08 +08:00
Joel
59d279fbe0 fix: remove rename check 2024-04-02 18:12:44 +08:00
Joel
2d7c43b60f feat: http panel 2024-04-02 17:45:12 +08:00
Joel
a9f7f88a9a feat: answer support render var input 2024-04-02 17:38:05 +08:00
nite-knite
56cb9ccec1 feat: update workflow app publishing 2024-04-02 17:33:35 +08:00
StyleZhang
e0a152164b fix: prompt-editor 2024-04-02 17:20:32 +08:00
JzoNg
d72524ceb0 hide result info in chatflow 2024-04-02 17:16:39 +08:00
jyong
74538fb3b2 Merge remote-tracking branch 'origin/feat/workflow' into feat/workflow 2024-04-02 16:59:04 +08:00
jyong
f832211e2e db migrate merge 2024-04-02 16:58:49 +08:00
Yeuoly
e46c3a9235 optimize: tool 2024-04-02 16:58:24 +08:00
JzoNg
36c3774fac modify test run panel 2024-04-02 16:57:28 +08:00
jyong
5e201324d6 Merge branch 'main' into feat/workflow
# Conflicts:
#	api/.env.example
#	docker/docker-compose.yaml
2024-04-02 16:55:43 +08:00
Joel
5adbcacc52 fix: end node can not selector 2024-04-02 15:52:15 +08:00
takatost
8b01796f5d fix external data convert 2024-04-02 15:30:39 +08:00
Joel
34f4f76f67 fix: handle debug run valid 2024-04-02 15:27:03 +08:00
Yeuoly
01e832a587 fix: linter 2024-04-02 15:25:15 +08:00
Yeuoly
1af2d06d29 feat: add tool benchmark 2024-04-02 15:23:54 +08:00
Joel
426abe2134 fix: variable type add missing key 2024-04-02 15:14:00 +08:00
StyleZhang
f62775bcad fix: prompt-editor 2024-04-02 14:12:52 +08:00
Joel
7a2083a6b7 fix: num suuport var insert 2024-04-02 14:12:34 +08:00
jyong
09650b9d47 Merge remote-tracking branch 'origin/feat/workflow' into feat/workflow 2024-04-02 14:03:02 +08:00
jyong
f19219ab8d fix knowledge retrival 2024-04-02 14:02:49 +08:00
Joel
8be04b57f9 fix: http attr key rerender 2024-04-02 13:55:30 +08:00
StyleZhang
ef39fa3fb2 node connect 2024-04-02 13:23:36 +08:00
JzoNg
fe569559ac fix app type label 2024-04-02 12:56:11 +08:00
JzoNg
8125d8fc9f modify params of app switch 2024-04-02 12:45:33 +08:00
JzoNg
fd8ed95209 fix prompt log 2024-04-02 12:34:40 +08:00
JzoNg
cf22842554 support app creation in nav 2024-04-02 12:15:28 +08:00
takatost
0fcb746c08 add created_at for app model config 2024-04-02 12:07:30 +08:00
StyleZhang
9d7ab0400d chat error 2024-04-02 12:01:32 +08:00
Yeuoly
396a3e0456 feat: add tool parameter type converter 2024-04-02 11:58:50 +08:00
Joel
56a1d5330a chore: types 2024-04-02 11:50:03 +08:00
takatost
c5e58c713c remove not necessary error reporting 2024-04-02 11:15:50 +08:00
Joel
6e0f13f269 feat: tool new struct 2024-04-02 11:14:33 +08:00
JzoNg
00728c2b1d support type fitlering for app template 2024-04-01 22:55:53 +08:00
JzoNg
9fb7100b3f modify style of app type tag 2024-04-01 22:55:53 +08:00
StyleZhang
4e31d7b64f chat 2024-04-01 21:17:39 +08:00
takatost
1ab3b73c14 add app info for workflow convert 2024-04-01 21:00:08 +08:00
StyleZhang
b5fa68fdfe node selected 2024-04-01 20:47:59 +08:00
Yeuoly
31f24e1a14 enhance: enable configurate limitation of code 2024-04-01 20:47:26 +08:00
StyleZhang
e800109c02 node selected 2024-04-01 20:33:20 +08:00
Joel
5c3162cc33 fix: http delete btn hide 2024-04-01 19:58:51 +08:00
StyleZhang
04b4be27b7 refresh history 2024-04-01 19:41:37 +08:00
takatost
5793855115 fix http single run 2024-04-01 19:40:49 +08:00
Joel
41cce464ca fix: http var inputs 2024-04-01 19:36:09 +08:00
takatost
8c55ff392d fix bugs 2024-04-01 19:33:53 +08:00
StyleZhang
45d5d259a4 fix prompt editor 2024-04-01 19:03:34 +08:00
Joel
e08d871837 fix: http other params check 2024-04-01 18:48:36 +08:00
Joel
ab2c112059 feat: reuse get vars inputs and http request url 2024-04-01 18:33:17 +08:00
Joel
a42f26d857 fix: object label not pass the right value 2024-04-01 17:52:11 +08:00
Joel
6fea18b4d0 feat: insert var key ui 2024-04-01 17:40:31 +08:00
Joel
7e259600bf fix: debugger form struct and textare line-height 2024-04-01 17:40:31 +08:00
Yeuoly
75e95e09d3 fix: test 2024-04-01 17:07:09 +08:00
StyleZhang
51f225e567 fix 2024-04-01 16:57:42 +08:00
Joel
53c988718b fix: no var caused bugs 2024-04-01 16:57:03 +08:00
Joel
74ead43ae1 fix: query selector set sys value problem 2024-04-01 16:50:09 +08:00
StyleZhang
d0509213d1 prompt editor 2024-04-01 16:47:41 +08:00
Yeuoly
5b81234db8 fix: tool entities 2024-04-01 16:43:10 +08:00
StyleZhang
df9e2e478f workflow template 2024-04-01 16:38:37 +08:00
takatost
7c64f2cfe0 feat: use en-US recommended apps as fallback if using unmaintained language 2024-04-01 16:24:59 +08:00
takatost
3b3d19dab7 Merge branch 'main' into feat/workflow
# Conflicts:
#	api/controllers/console/explore/recommended_app.py
2024-04-01 16:22:49 +08:00
JzoNg
806f27c370 revert automatic prompt 2024-04-01 16:00:34 +08:00
Yeuoly
86a32517e5 fix: tool variable selectors 2024-04-01 15:42:00 +08:00
takatost
072967a1d3 fix node single step run of answer & http request & llm 2024-04-01 15:24:35 +08:00
StyleZhang
9147e0046f node connect 2024-04-01 14:10:05 +08:00
JzoNg
f967203012 remove automatic 2024-04-01 13:31:30 +08:00
Joel
e2d0ff4784 chore: run text font size 2024-04-01 13:19:40 +08:00
StyleZhang
85ce2e8df8 merge main 2024-04-01 13:09:40 +08:00
Joel
c330f89c77 feat: support llm single run 2024-04-01 12:55:20 +08:00
StyleZhang
ffb698922a fix: edge 2024-04-01 12:54:15 +08:00
takatost
50a7c2c92c fix bug 2024-04-01 12:51:01 +08:00
Joel
0843af2996 fix: sys var not show 2024-04-01 12:39:54 +08:00
Joel
e9985f0696 chore: enchance debug show name 2024-04-01 12:29:51 +08:00
StyleZhang
dfad42075c sys variable 2024-04-01 12:06:22 +08:00
Joel
705d765a71 feat: llm show vars 2024-04-01 11:48:42 +08:00
Joel
e03367a188 feat: get input vars 2024-04-01 10:38:12 +08:00
Yeuoly
c20685e669 fix 2024-03-29 22:26:26 +08:00
takatost
429dd11dd7 add icon for tool node execution 2024-03-29 22:22:58 +08:00
takatost
b394dd6fb0 fix convert bug 2024-03-29 21:43:44 +08:00
takatost
a30a6dda63 Merge branch 'main' into feat/workflow
# Conflicts:
#	docker/docker-compose.yaml
2024-03-29 21:18:16 +08:00
takatost
de3b7e8815 http request node support template variable 2024-03-29 20:54:17 +08:00
Yeuoly
142d1be4f8 refactor 2024-03-29 20:53:48 +08:00
Yeuoly
fb364d44d1 refactor 2024-03-29 20:12:26 +08:00
jyong
a647698c32 Merge remote-tracking branch 'origin/feat/workflow' into feat/workflow 2024-03-29 19:44:35 +08:00
jyong
75ffdc9d3f fixed single retrival 2024-03-29 19:44:26 +08:00
StyleZhang
0d12e5c795 run history 2024-03-29 19:32:38 +08:00
jyong
bab88efda9 Merge remote-tracking branch 'origin/feat/workflow' into feat/workflow 2024-03-29 19:29:42 +08:00
jyong
ca6acf2650 fixed single retrival 2024-03-29 19:29:27 +08:00
Yeuoly
11b428a73f feat: agent log 2024-03-29 19:23:48 +08:00
nite-knite
f43faa125b feat: add condition placeholder to if-else node 2024-03-29 19:08:35 +08:00
Joel
6b3bc789b5 fix: http text pass vars 2024-03-29 19:04:53 +08:00
Joel
586488c6a9 feat: llm output and raw text 2024-03-29 19:04:53 +08:00
jyong
704cb42869 Merge remote-tracking branch 'origin/feat/workflow' into feat/workflow
# Conflicts:
#	api/core/workflow/nodes/question_classifier/question_classifier_node.py
2024-03-29 19:00:21 +08:00
jyong
2d26c4745b add history message 2024-03-29 18:58:59 +08:00
takatost
971436d935 llm and answer node support inner variable template 2024-03-29 18:44:30 +08:00
Joel
8a2d04b305 chore: llm editor bg and not flash 2024-03-29 18:32:59 +08:00
StyleZhang
7c45f369d1 checklist 2024-03-29 18:27:41 +08:00
JzoNg
6444d94f41 fix style of app card 2024-03-29 18:24:46 +08:00
Joel
a8236a270a feat: body to json editor 2024-03-29 18:19:45 +08:00
StyleZhang
760ada399f checklist 2024-03-29 18:08:27 +08:00
Joel
815262b9a6 chore: remove input vars 2024-03-29 18:02:06 +08:00
Joel
83651a038f feat: http attr support selct keys 2024-03-29 17:55:59 +08:00
Joel
589ac9b22c feat: http key value inputs 2024-03-29 17:24:36 +08:00
StyleZhang
d673b4c219 fix: prompt editor 2024-03-29 17:20:48 +08:00
Joel
4e548fff5e feat: add insert var tooltip 2024-03-29 16:58:07 +08:00
Joel
636603d5af chore: type picker 2024-03-29 16:32:36 +08:00
Joel
950a52f4fc feat: input var ui 2024-03-29 16:19:09 +08:00
StyleZhang
b50e897aa0 fix: prompt editor 2024-03-29 16:08:10 +08:00
StyleZhang
d7be9c0afc prompt editor 2024-03-29 14:59:13 +08:00
StyleZhang
06a6d398cd checklist 2024-03-29 14:56:47 +08:00
Joel
12ed31be4d feat: api support var logic 2024-03-29 14:56:32 +08:00
Joel
8d2ac8ff8f feat: ignore invalid vars keys 2024-03-29 13:56:13 +08:00
Joel
91b84d8f1e chore: http node check 2024-03-29 13:01:36 +08:00
Joel
46cc635e05 fix: error status code 2024-03-29 12:35:04 +08:00
Joel
1ea8504cf1 chore: code output var empty check 2024-03-29 11:48:45 +08:00
Joel
a32465eeb8 chore: handle key exists check 2024-03-29 11:37:16 +08:00
Joel
f930521d64 chore: start var name check 2024-03-29 11:19:24 +08:00
JzoNg
42ad622a6c fix tool icon 2024-03-28 21:49:56 +08:00
JzoNg
4eb9027510 add icon for tool node in web app 2024-03-28 21:47:10 +08:00
JzoNg
05bb65bd94 add icon for tool node 2024-03-28 21:37:23 +08:00
Yeuoly
85285931e2 feat: add agent tool invoke meta 2024-03-28 20:04:31 +08:00
JzoNg
d7c4032917 fix style of app creation 2024-03-28 19:48:45 +08:00
Yeuoly
c1466a7a4d Merge branch 'feat/merge-tool-engine' into feat/workflow 2024-03-28 18:44:12 +08:00
Yeuoly
51404f9035 refactor: tool engine 2024-03-28 18:36:58 +08:00
Joel
c1bf4c6405 chore: var picker ui 2024-03-28 18:23:05 +08:00
Joel
b8818c90b0 feat: answer use selector vars 2024-03-28 17:41:41 +08:00
Joel
ead55ce931 chore: support hide editor var search 2024-03-28 17:37:11 +08:00
takatost
0a0d9565ac add icon return for tool node in workflow event stream 2024-03-28 17:26:09 +08:00
StyleZhang
4235baf493 editor 2024-03-28 17:11:39 +08:00
Joel
82a82fff35 chore: llm remove var inputs 2024-03-28 16:54:14 +08:00
Joel
4934a655dd chore: xxx 2024-03-28 16:48:54 +08:00
Joel
615178dafa feat: get var list hooks 2024-03-28 16:44:11 +08:00
Joel
08650339d7 feat: split var reference 2024-03-28 15:27:11 +08:00
StyleZhang
12ea3af242 fix: sync draft 2024-03-28 14:47:09 +08:00
takatost
858ab8c8c4 merge main 2024-03-28 14:38:21 +08:00
takatost
63a4ddc251 add text/plain support for draft sync api 2024-03-28 14:36:24 +08:00
StyleZhang
aa4d734244 fix 2024-03-28 12:39:24 +08:00
Joel
85b45a7cd0 fix: http json value would changed 2024-03-28 11:34:31 +08:00
StyleZhang
de3fd0f382 fix 2024-03-27 21:00:53 +08:00
JzoNg
db3f38bc2b workflow webapp result modification 2024-03-27 18:42:07 +08:00
StyleZhang
d239e6bf0f fix 2024-03-27 17:36:16 +08:00
jyong
9e4b39e19f fix question classifier node type 2024-03-27 17:03:49 +08:00
Yeuoly
078b10a9f0 fix: linter 2024-03-27 16:01:09 +08:00
Yeuoly
c70d0546ae sign single step files 2024-03-27 16:00:54 +08:00
JzoNg
c3d926e2ed templates filtering 2024-03-27 15:49:33 +08:00
Joel
78a851d240 fix: hide readonly tooltip 2024-03-27 14:47:23 +08:00
Yeuoly
6256a3fadb fix: missing datasets 2024-03-27 14:44:04 +08:00
jyong
8def0f8cf2 Merge remote-tracking branch 'origin/feat/workflow' into feat/workflow 2024-03-27 14:19:38 +08:00
jyong
a29d3f2400 fix question classifier issue when llm
is completion mode
2024-03-27 14:19:23 +08:00
Joel
794c57b938 fix: code editor readonly can get focus 2024-03-27 13:49:22 +08:00
Joel
9eff8715fb fix: model params in question classify 2024-03-27 13:38:26 +08:00
Yeuoly
e952e01dfe fix: sql 2024-03-27 12:28:07 +08:00
Yeuoly
a20d305842 fix: missing agent 2024-03-27 12:26:00 +08:00
Yeuoly
17d1e2e5b7 fix: template transform node output length 2024-03-27 11:51:12 +08:00
Joel
1c05d2ef7f fix: questioin classify memory limit 2024-03-27 11:06:51 +08:00
StyleZhang
f9caa09cac fix: empty chat 2024-03-27 10:39:41 +08:00
JzoNg
829a7b0d16 Merge branch 'main' into feat/workflow 2024-03-27 10:33:28 +08:00
JzoNg
83aaacd71d app creation update 2024-03-26 18:53:02 +08:00
Joel
a56115a664 chore: message type i18n 2024-03-26 18:07:35 +08:00
Joel
b5578c754f fix: var too long 2024-03-26 17:18:23 +08:00
Joel
de00245af0 fix: not highlight query block 2024-03-26 17:02:41 +08:00
Joel
2c9d4c8dca feat: value support hights 2024-03-26 16:47:38 +08:00
Joel
1ac96564a0 feat: http node hightlight node 2024-03-26 16:35:53 +08:00
StyleZhang
c15677634f merge main 2024-03-26 15:25:02 +08:00
Joel
2dd2c8c358 feat: support url highlight 2024-03-26 15:02:34 +08:00
Joel
8e3be982eb feat: chat memory placeholder 2024-03-26 14:19:50 +08:00
Joel
46f4e61edc fix: tools value too long ui 2024-03-26 13:52:35 +08:00
StyleZhang
a35b5e4fff chat restart 2024-03-25 18:42:51 +08:00
StyleZhang
07091c9d33 node panel resize 2024-03-25 18:02:09 +08:00
Joel
b3db119146 feat: memory example 2024-03-25 16:36:33 +08:00
Joel
28206cac72 feat: move start output var to vars 2024-03-25 15:51:31 +08:00
Joel
47f2fe591d feat: default fold output 2024-03-25 15:24:33 +08:00
Joel
acd0e22b9e feat: handle limit tool var type 2024-03-25 15:00:36 +08:00
Joel
2ebd8d9fdc feat: support var search 2024-03-25 14:30:43 +08:00
Joel
b5fe1f7c46 chore: var reference support portal 2024-03-25 11:34:56 +08:00
takatost
d7b2fe1e8b update docker compose images verison 2024-03-24 00:14:53 +09:00
takatost
fac9459402 add workflow image build 2024-03-23 23:19:33 +09:00
takatost
6cf0e0c242 Merge branch 'main' into feat/workflow 2024-03-23 23:09:36 +09:00
takatost
656bd9257d Merge branch 'feat/workflow-backend' into feat/workflow 2024-03-23 23:09:20 +09:00
Yeuoly
38441c930c fix: tool sort 2024-03-23 17:54:40 +08:00
StyleZhang
dafdbfa0fd fix: next step tool icon 2024-03-23 12:15:38 +08:00
JzoNg
a264973366 hide log panel in web app 2024-03-22 17:23:40 +08:00
Joel
5843b30a13 feat: support var remove in code node 2024-03-22 15:20:09 +08:00
Joel
340ae3c52f feat: remove var check in start node 2024-03-22 15:10:01 +08:00
StyleZhang
817e16493f checklist 2024-03-22 14:58:11 +08:00
Joel
66cf787755 fix: can remove struct 2024-03-22 14:35:22 +08:00
StyleZhang
4f3872277c all tools 2024-03-22 13:08:28 +08:00
Yeuoly
9b84086bac fix: tool provider icon 2024-03-22 12:43:56 +08:00
JzoNg
ce2b2755af add description for workflow 2024-03-22 10:30:20 +08:00
takatost
a91bec033d fix bug 2024-03-21 22:04:43 +08:00
JzoNg
096cc74373 hide node info in chat 2024-03-21 19:56:43 +08:00
takatost
34db42ecea fix bug 2024-03-21 18:37:37 +08:00
takatost
34e8d2f6bb add message error record 2024-03-21 18:30:23 +08:00
Joel
a771d59b1e fix: model param inner ui 2024-03-21 18:14:12 +08:00
JzoNg
3a00941125 fix style of dataset 2024-03-21 18:08:10 +08:00
StyleZhang
8e9ade14df fix: style 2024-03-21 17:49:12 +08:00
JzoNg
a1ec45fdd1 fix style of message log operation 2024-03-21 17:47:23 +08:00
Joel
9295739dc0 fix: model trigger ui 2024-03-21 17:45:45 +08:00
StyleZhang
4afb16844c chat stop 2024-03-21 17:30:59 +08:00
takatost
c4e6ed1aa2 optimize codes 2024-03-21 17:12:52 +08:00
takatost
95c5848d05 update workflow app bind datasets 2024-03-21 17:06:45 +08:00
JzoNg
8e56096f83 fix style of nav 2024-03-21 16:58:00 +08:00
Joel
c8f51dd6db chore: edit code support line wrap 2024-03-21 16:56:49 +08:00
Joel
ebbb30de44 fix: wrong infernece 2024-03-21 16:48:53 +08:00
Joel
fe1168d15a feat: code var sync 2024-03-21 16:43:36 +08:00
StyleZhang
fd7fded6e5 fix: style 2024-03-21 16:34:56 +08:00
Joel
8cffbc6b2a chore: more info to workflows 2024-03-21 16:23:47 +08:00
Yeuoly
fa673f9b4c fix: raw text 2024-03-21 16:21:59 +08:00
Joel
178f1fc5d6 fix: tools var renmae problem 2024-03-21 16:15:50 +08:00
Yeuoly
0c409e2b9e enhance: increase code timeout 2024-03-21 16:14:16 +08:00
StyleZhang
e366e12be0 fix: running line 2024-03-21 16:12:55 +08:00
Joel
8e0d8fdb3f feat: other nodes support rename and fix knonw set var bug 2024-03-21 16:06:45 +08:00
StyleZhang
524b19bb3a node style 2024-03-21 15:59:20 +08:00
JzoNg
fb2e351c08 fix icons 2024-03-21 15:56:20 +08:00
Yeuoly
260fef40c4 enhance: full tools 2024-03-21 15:40:08 +08:00
takatost
72818e946d fix llm memory 2024-03-21 15:36:25 +08:00
Joel
e673c64534 feat: llm rename 2024-03-21 15:23:38 +08:00
Joel
93bbb2694f temp publish not check valid 2024-03-21 15:18:10 +08:00
Joel
b038b7aa33 fix: can not choose var type 2024-03-21 15:14:54 +08:00
Joel
02a059bdc6 feat: var name rename struct 2024-03-21 15:07:44 +08:00
StyleZhang
267d9568c6 fix: running status 2024-03-21 15:03:49 +08:00
takatost
d71eae8f93 fix qc 2024-03-21 15:02:55 +08:00
StyleZhang
8bdaab96b1 fix 2024-03-21 14:25:11 +08:00
takatost
a05fcedd61 fix stop 2024-03-21 14:04:22 +08:00
takatost
0db67a2fd3 fix features not publish 2024-03-21 13:47:10 +08:00
JzoNg
6e56a504fd fix docs for advanced-chat 2024-03-21 12:39:46 +08:00
JzoNg
69fa8c9794 add docs for advanced-chat 2024-03-21 12:36:28 +08:00
JzoNg
8dc8650ecb fix type of workflow process 2024-03-21 09:24:09 +08:00
JzoNg
40775e27ce correct api doc of workflow 2024-03-20 23:46:53 +08:00
JzoNg
6fb294202d modify workflow web app output 2024-03-20 23:07:43 +08:00
Yeuoly
bd409a3caf enhance: code node validator 2024-03-20 23:01:24 +08:00
takatost
0d0da9a892 fix variable assigner multi route 2024-03-20 22:49:24 +08:00
takatost
a7e2f9caf0 fix variable assigner 2024-03-20 22:27:59 +08:00
Joel
3d4d60a353 feat: llm intput only number and str 2024-03-20 22:00:56 +08:00
Joel
75e876b14e chore: reduce more var limit 2024-03-20 21:56:18 +08:00
Joel
66fd60bc6f fix: var objects sorts change 2024-03-20 21:56:18 +08:00
takatost
c3e7299494 fix service api blocking mode 2024-03-20 21:55:06 +08:00
StyleZhang
8fc576870d fix 2024-03-20 20:52:19 +08:00
Joel
d4f362164f fix: memory support switch 2024-03-20 20:50:16 +08:00
StyleZhang
94ca0edb68 run history 2024-03-20 20:27:52 +08:00
takatost
a0dde6e4da fix bug 2024-03-20 20:02:51 +08:00
Joel
17f572f23f feat: can not add context 2024-03-20 19:35:42 +08:00
StyleZhang
137746387d fix style 2024-03-20 19:24:47 +08:00
JzoNg
9b5deaf80a add proccess of workflow in web app 2024-03-20 18:56:03 +08:00
Joel
1201bef879 chore: picker width set 2024-03-20 18:18:34 +08:00
takatost
30a9b8b917 fix bug 2024-03-20 17:52:47 +08:00
takatost
77bdc6ffb1 fix bug 2024-03-20 17:36:56 +08:00
takatost
a65c99496b add extra info for workflow stream output 2024-03-20 17:34:07 +08:00
Joel
a8c86b759d fix: var02 icon show 2024-03-20 17:31:46 +08:00
StyleZhang
76081db6e4 fix: style 2024-03-20 17:27:45 +08:00
Joel
0606b6f922 fix: remove datasets problem 2024-03-20 17:23:18 +08:00
Joel
9ed2a99abf feat: var reference support readonly 2024-03-20 17:01:53 +08:00
Joel
8f311b020a feat: tools readonly 2024-03-20 16:56:09 +08:00
Yeuoly
de6cbc36bb enhance: code return tyoe 2024-03-20 16:54:32 +08:00
Joel
0aa984219f feat: telmplate transform support readonly 2024-03-20 16:52:01 +08:00
Joel
c9168c19cd feat: question classify support readonly 2024-03-20 16:48:52 +08:00
Joel
beca05848c feat: llm support readonly 2024-03-20 16:43:00 +08:00
StyleZhang
2e5acef1b6 fix 2024-03-20 16:42:13 +08:00
Joel
c4811f921f feat: retrieval support readonly 2024-03-20 16:36:49 +08:00
Joel
7569346943 feat: if support readonly 2024-03-20 16:27:41 +08:00
StyleZhang
2919cc9adf fix 2024-03-20 16:25:02 +08:00
JzoNg
18883d9faa fix agent config 2024-03-20 16:20:19 +08:00
Joel
e462ddb805 feat: http support readonly 2024-03-20 16:17:04 +08:00
Joel
df274416f9 feat: end support readonly 2024-03-20 16:00:41 +08:00
JzoNg
b35d9f6c36 fix style of running workflow 2024-03-20 15:54:21 +08:00
Joel
4c5737fc7f feat: code support readonly 2024-03-20 15:53:14 +08:00
Joel
7bb1decaf8 feat: support prompt readonly 2024-03-20 15:46:21 +08:00
StyleZhang
4bef2eed25 chat add workflow process 2024-03-20 15:44:51 +08:00
takatost
0d2a90adf3 fix knowledge retriever return 2024-03-20 15:43:22 +08:00
Joel
38a1ea139a feat: answer support readonly 2024-03-20 15:32:06 +08:00
Joel
698eb9671f feat: start support readonly 2024-03-20 15:09:12 +08:00
Joel
1b857eba29 chore: remove useless 2024-03-20 14:49:39 +08:00
Joel
b060b773ef fix: set default logic error 2024-03-20 14:42:35 +08:00
Joel
ae197fb2ba fix: switch provider call infinate 2024-03-20 14:42:35 +08:00
StyleZhang
2697454a8e fix 2024-03-20 14:10:43 +08:00
Joel
2a75258836 feat: not show var 2024-03-20 13:55:26 +08:00
takatost
b50f221327 fix bug 2024-03-20 12:47:36 +08:00
Joel
d984eb3648 chore: workflow editor not choose outtool in var 2024-03-20 12:05:30 +08:00
Joel
4df8fa0afb feat: if change to defalut operator 2024-03-20 11:42:57 +08:00
Joel
67b3ee3776 feat: ifelse check and item choose var first 2024-03-20 11:35:31 +08:00
takatost
8337e3c6ba fix lint 2024-03-20 11:23:33 +08:00
takatost
a9b8917e22 fix bug 2024-03-20 11:23:25 +08:00
JzoNg
70698b553e fix prompt log in completion debug 2024-03-20 11:09:03 +08:00
Joel
b131c5dc73 fix: code defalut may not switch if not load config 2024-03-20 10:50:43 +08:00
Joel
15e2ab9203 feat: ifelse not set var not change selector 2024-03-20 10:50:43 +08:00
JzoNg
d5c79e0489 fix user-inputs generation 2024-03-20 10:45:30 +08:00
jyong
884eeebe83 fix react response 2024-03-20 04:00:50 +08:00
jyong
9042db301d fix page content is empty 2024-03-20 03:50:28 +08:00
takatost
f4f8d6c652 Merge branch 'main' into feat/workflow-backend
# Conflicts:
#	api/core/model_runtime/model_providers/anthropic/llm/llm.py
2024-03-20 00:06:33 +08:00
takatost
20cd3e52d0 fix qc bug 2024-03-19 23:55:06 +08:00
takatost
53fa4ffe73 fix bug 2024-03-19 21:53:24 +08:00
takatost
8acd6f2531 fix bug 2024-03-19 21:10:19 +08:00
takatost
8d8bbc586e fix bug 2024-03-19 20:57:07 +08:00
takatost
df4e1339da fix convert bug 2024-03-19 20:51:06 +08:00
takatost
0183651cd5 fix stream output 2024-03-19 20:34:43 +08:00
StyleZhang
9f024835aa chat 2024-03-19 20:34:10 +08:00
jyong
45017f3f35 fix knowledge single retrieve when function call response is none 2024-03-19 20:08:16 +08:00
StyleZhang
089072432e chat log 2024-03-19 19:46:14 +08:00
jyong
6e600bc0dc Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-19 19:41:33 +08:00
jyong
25995eb735 fix knowledge single retrieve when function call response is none 2024-03-19 19:41:18 +08:00
JzoNg
28dc089540 fix style of node tracing 2024-03-19 19:24:36 +08:00
StyleZhang
8967c4c8f6 fix 2024-03-19 19:19:47 +08:00
Yeuoly
3969ed6f69 enhance: check valid JSON 2024-03-19 19:01:09 +08:00
JzoNg
56b025ebdd fix import by DSL 2024-03-19 18:57:57 +08:00
JzoNg
aab5566d98 fix app switch 2024-03-19 18:49:51 +08:00
Yeuoly
a9e44b1fd2 fix: missing head 2024-03-19 18:38:06 +08:00
takatost
bae1bc2e4b fix 2024-03-19 18:37:27 +08:00
StyleZhang
b9f58d3c1d Merge branch 'main' into feat/workflow 2024-03-19 18:37:09 +08:00
takatost
7c7f3958ff feat: optimize ollama model default parameters (#2894) 2024-03-19 18:36:30 +08:00
Lance Mao
85da94aac4 fix incorrect exception raised by api tool which leads to incorrect L… (#2886)
Co-authored-by: OSS-MAOLONGDONG\kaihong <maolongdong@kaihong.com>
2024-03-19 18:36:30 +08:00
Su Yang
5350753905 chore: update Qwen model params (#2892) 2024-03-19 18:36:30 +08:00
crazywoola
e7895cdc53 chore: update pr template (#2893) 2024-03-19 18:36:30 +08:00
Su Yang
b84d4bdb85 chore: Update TongYi models prices (#2890) 2024-03-19 18:36:30 +08:00
呆萌闷油瓶
66538d8cbd feat:support azure openai llm 0125 version (#2889) 2024-03-19 18:36:30 +08:00
Su Yang
4e24e116aa chore: use API Key instead of APIKey (#2888) 2024-03-19 18:36:29 +08:00
Bowen Liang
3f13c47b9b Bump tiktoken to 0.6.0 to support text-embedding-3-* in encoding_for_model (#2891) 2024-03-19 18:36:29 +08:00
Su Yang
10237c99e4 fix: anthropic system prompt not working (#2885) 2024-03-19 18:36:28 +08:00
Su Yang
faf936416f fix: Fix the problem of system not working (#2884) 2024-03-19 18:36:14 +08:00
crazywoola
779f77ccd6 feat: add icons for 01.ai (#2883) 2024-03-19 18:36:14 +08:00
Su Yang
758b8bf812 i18n: update bedrock label (#2879) 2024-03-19 18:36:14 +08:00
Su Yang
c61f51dc5d feat: AWS Bedrock Claude3 (#2864)
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: Chenhe Gu <guchenhe@gmail.com>
2024-03-19 18:36:14 +08:00
Yeuoly
b17e30b1c2 fix: form-data 2024-03-19 18:30:13 +08:00
Joel
0756b09cf5 chore: var assigner output 2024-03-19 18:19:07 +08:00
Yeuoly
2f16b3600c fix: avoid space in http key 2024-03-19 18:13:30 +08:00
Yeuoly
55d2417906 fix: invalid http header 2024-03-19 18:12:50 +08:00
Joel
49dd5b76f1 chore: http remove blank to value 2024-03-19 18:08:09 +08:00
Joel
43429108f5 chore: http files 2024-03-19 18:05:32 +08:00
Joel
18159b1a4b feat: valid assignes 2024-03-19 18:03:54 +08:00
StyleZhang
b31da3b195 initial node position 2024-03-19 17:59:35 +08:00
takatost
17b7426cc6 fix external_data_tools bug 2024-03-19 17:58:33 +08:00
Joel
ba7b9a595b fix: tool invaild 2024-03-19 17:52:43 +08:00
takatost
7778901630 fix tool image render 2024-03-19 17:49:26 +08:00
JzoNg
8f7356cc12 fix completion log item 2024-03-19 17:37:17 +08:00
Joel
d49834ee56 feat: if node valid 2024-03-19 17:33:49 +08:00
Joel
e4fdf1730e chore: change output 2024-03-19 17:29:40 +08:00
StyleZhang
f41a619490 check before publish 2024-03-19 17:24:35 +08:00
jyong
1607fcfaa7 fix knowledge single retrieve when function call response is none 2024-03-19 17:18:29 +08:00
Yeuoly
8386abaed1 fix: file 2024-03-19 17:07:44 +08:00
JzoNg
16a1562900 fix style of tip modal 2024-03-19 17:06:33 +08:00
Joel
9b1869f521 feat: template transform code valid 2024-03-19 16:58:13 +08:00
Joel
3dfcd9ca67 feat: valid question classify 2024-03-19 16:58:13 +08:00
takatost
74408c4ced fix app convert 2024-03-19 16:44:28 +08:00
JzoNg
653917649d add beta tag and fix some style 2024-03-19 16:43:24 +08:00
JzoNg
00f51749a3 add switch operation in app list 2024-03-19 16:18:22 +08:00
Joel
8d3158a6d5 feat: tool valid 2024-03-19 16:14:25 +08:00
StyleZhang
dbaf54c93d chat 2024-03-19 16:04:34 +08:00
takatost
7762737796 optimize app list desc 2024-03-19 15:40:03 +08:00
takatost
133d52deb9 fix bug 2024-03-19 15:32:10 +08:00
StyleZhang
0ede136d67 fix: single run sync draft 2024-03-19 15:30:13 +08:00
Joel
8d82d9f7ef fix: overwrite defalut value in tool 2024-03-19 15:27:33 +08:00
JzoNg
1532564601 modify operations in app list 2024-03-19 15:27:14 +08:00
takatost
24ac4996c0 fix bug 2024-03-19 15:20:03 +08:00
takatost
112593119a fix suggested_questions_after_answer 2024-03-19 15:12:29 +08:00
StyleZhang
0c100ac0b1 fix node 2024-03-19 15:07:13 +08:00
Joel
45168d0e00 remove log 2024-03-19 14:55:11 +08:00
Joel
dc91b2e3df fix: retieveal output error and var ref error 2024-03-19 14:54:23 +08:00
StyleZhang
ced6a5c18b answer node 2024-03-19 14:50:33 +08:00
JzoNg
0b7cdd1e5d node collapse 2024-03-19 14:47:03 +08:00
Joel
67de047122 fix: http not pass headers and so on 2024-03-19 14:42:54 +08:00
jyong
4ec14d8d91 fix knowledge single retrieve when function call response is none 2024-03-19 14:17:22 +08:00
Joel
09516726e9 fix: overwrite template 2024-03-19 13:58:51 +08:00
Joel
6bfd61a887 feat: retrieval check valid 2024-03-19 13:58:51 +08:00
StyleZhang
a436550dff workflow info 2024-03-19 13:50:10 +08:00
StyleZhang
f5a3069913 sync draft 2024-03-19 13:18:14 +08:00
JzoNg
cf0c96e0d1 fix workflow outputs 2024-03-19 12:34:03 +08:00
Joel
978ee93df7 fix: not show sys var type 2024-03-19 11:54:47 +08:00
Joel
f3bf4c7730 feat: code default value 2024-03-19 11:36:29 +08:00
Joel
d2de16fba2 fix: var list defalut 2024-03-19 10:57:05 +08:00
Joel
90543c458c feat: valid before run struct 2024-03-19 10:46:28 +08:00
JzoNg
d6e655eaae fix restart button 2024-03-19 09:43:39 +08:00
JzoNg
9884466ef0 fix judgement of app configure 2024-03-19 09:33:18 +08:00
JzoNg
3b4676d8e9 fix agent configuration 2024-03-19 08:55:47 +08:00
JzoNg
3ee9f74cf8 fix style of completion creation 2024-03-19 08:02:56 +08:00
takatost
1c7573a686 add logging callback for workflow 2024-03-19 04:37:29 +08:00
takatost
2da7cc6928 fix file bugs 2024-03-19 03:56:47 +08:00
JzoNg
fda802e796 chore: remove comments 2024-03-18 22:46:19 +08:00
jyong
5a5beb5b59 Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-18 22:38:27 +08:00
jyong
a0b16e541c question classifier 2024-03-18 22:38:12 +08:00
JzoNg
a67777b8e2 app overview 2024-03-18 22:32:21 +08:00
Joel
eae4c80679 fix: input text error 2024-03-18 22:12:46 +08:00
Yeuoly
ac63b5385a fix: set code execution timeout 2024-03-18 22:12:21 +08:00
StyleZhang
f61ceadec5 fix 2024-03-18 22:03:26 +08:00
Yeuoly
5ff2fbed59 fix: linter 2024-03-18 22:00:35 +08:00
Yeuoly
d24cf9e56a limit http response 2024-03-18 22:00:34 +08:00
Joel
7b9fbccf60 feat: support add files and vision 2024-03-18 21:59:51 +08:00
takatost
0b07c6914a fix bugs 2024-03-18 21:52:39 +08:00
jyong
f803fb5855 Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-18 21:51:32 +08:00
jyong
cd3c2f6b00 knowledge fix 2024-03-18 21:51:23 +08:00
takatost
587ba27f8c fix bugs 2024-03-18 21:42:45 +08:00
takatost
1b0acdbe63 fix message resign url 2024-03-18 21:22:58 +08:00
jyong
3e810bc490 knowledge fix 2024-03-18 21:22:16 +08:00
Yeuoly
cc86850ad9 pure: rm file transformer 2024-03-18 21:17:13 +08:00
Yeuoly
fed19db938 feat: http download file 2024-03-18 21:16:21 +08:00
takatost
9175eb455f fix context 2024-03-18 21:11:27 +08:00
StyleZhang
a89287bf20 block icon 2024-03-18 21:03:02 +08:00
takatost
977020f580 lint fix 2024-03-18 20:59:22 +08:00
takatost
a2195c813c fix file render 2024-03-18 20:59:11 +08:00
jyong
d5a404236a knowledge fix 2024-03-18 20:54:50 +08:00
JzoNg
202492e5ac message log style modified 2024-03-18 20:54:18 +08:00
Joel
601e888fde feat: handle sys var to run 2024-03-18 20:40:38 +08:00
jyong
4a483a8754 Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-18 20:35:23 +08:00
jyong
a4f367b8ff knowledge fix 2024-03-18 20:35:10 +08:00
Yeuoly
e225a3d33c linter 2024-03-18 20:22:25 +08:00
Joel
31b6383697 fix: to new sys vars 2024-03-18 20:12:43 +08:00
Yeuoly
e7d6def1e8 fix: trim file extension 2024-03-18 19:59:54 +08:00
Yeuoly
197c0bb1a3 fix: jsonable_encoder 2024-03-18 19:56:38 +08:00
Joel
d6953f28d3 chore: remove not necessary config 2024-03-18 19:52:40 +08:00
StyleZhang
249f013ca3 fix 2024-03-18 19:50:55 +08:00
takatost
387a6cfee4 remove answer as end 2024-03-18 19:25:18 +08:00
StyleZhang
81cbf2e713 node prev available nodes 2024-03-18 19:22:58 +08:00
jyong
e66c55ba9e fix enable annotation reply when collection is None 2024-03-18 19:21:36 +08:00
Joel
56044a104c remove logs 2024-03-18 19:14:21 +08:00
Joel
c409ab4c3c feat: knowledge support one sigle 2024-03-18 18:49:01 +08:00
Yeuoly
487efcb206 fix: support deprecated tools 2024-03-18 18:45:29 +08:00
StyleZhang
4eb7546177 workflow publish 2024-03-18 18:45:24 +08:00
Yeuoly
4b561aec93 feat: workflow statistics 2024-03-18 18:44:27 +08:00
takatost
34695f02fb add model config for conversation 2024-03-18 18:25:46 +08:00
takatost
aa421269c4 deduct llm quota use llm node func 2024-03-18 18:01:57 +08:00
takatost
09cfbe117e fix annotation bugs 2024-03-18 17:57:10 +08:00
takatost
0ea233edbe Merge branch 'main' into feat/workflow-backend 2024-03-18 17:20:25 +08:00
takatost
d69e0a79d4 fix file upload config internal err 2024-03-18 16:55:15 +08:00
Joel
7320ac41af feat: support context and other var reset 2024-03-18 16:50:52 +08:00
takatost
08b1f5d7c3 fix web app bugs 2024-03-18 16:48:31 +08:00
takatost
61b41ca04b fix retriever resource 2024-03-18 16:38:39 +08:00
JzoNg
8d4f40bc7c fix style of chat message log 2024-03-18 16:37:50 +08:00
Joel
3e9c7dccc0 feat: prompt editor set context status setter 2024-03-18 16:25:20 +08:00
StyleZhang
672b8f14f2 chat 2024-03-18 16:14:05 +08:00
StyleZhang
513d075ebc chat 2024-03-18 16:03:57 +08:00
Joel
8d34082246 feat: llm default 2024-03-18 15:56:37 +08:00
jyong
5ed181dd42 knowledge entities fix 2024-03-18 15:54:59 +08:00
JzoNg
9d8f9f6f63 fix app template list filtering 2024-03-18 15:52:02 +08:00
Joel
8e8c39a88c feat: sys var remove nodeid 2024-03-18 15:46:36 +08:00
jyong
41d9fdee50 Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-18 15:40:26 +08:00
jyong
9e37021387 knowledge entities fix 2024-03-18 15:40:11 +08:00
takatost
bf06be0c75 fix migration order 2024-03-18 15:37:23 +08:00
takatost
a93a2e2e0c Merge branch 'main' into feat/workflow-backend 2024-03-18 15:35:04 +08:00
takatost
02337cbb09 fix answer message save 2024-03-18 15:07:56 +08:00
StyleZhang
c35c0fc6f4 chat upload file 2024-03-18 15:04:32 +08:00
Joel
1482eb0348 feat: generation support vars 2024-03-18 14:57:28 +08:00
JzoNg
cbe7116bb7 fix data fetching of app list 2024-03-18 14:54:01 +08:00
StyleZhang
788550affa chat upload file 2024-03-18 14:49:40 +08:00
Joel
25949338cb feat: editor history and query 2024-03-18 14:49:15 +08:00
takatost
958da42f74 fix advanced chat answer 2024-03-18 14:28:07 +08:00
JzoNg
6e8ea528c2 fix loading of run 2024-03-18 14:19:47 +08:00
JzoNg
d537efe97a refactor run data fetching 2024-03-18 14:15:29 +08:00
JzoNg
0439276866 add tracing panel 2024-03-18 13:24:27 +08:00
takatost
69c8e4ddd1 fix source handle 2024-03-18 13:11:58 +08:00
JzoNg
711f7107b4 fix merge 2024-03-18 13:11:17 +08:00
StyleZhang
ea4476ac6e init edges 2024-03-18 12:49:55 +08:00
Joel
13dbc7f0ce feat: handle questioin classify 2024-03-18 11:26:26 +08:00
Joel
b8ecfd859b feat: fill single run form variable with constant value first time 2024-03-18 11:03:23 +08:00
Joel
4daf93ef4f feat: form input var type 2024-03-18 10:52:45 +08:00
Joel
90b7ca1df1 chore: merge main 2024-03-18 10:34:57 +08:00
takatost
96f38b2d15 fix bug 2024-03-18 00:13:34 +08:00
takatost
8a27e51658 add Bad Request when generating 2024-03-17 21:40:59 +08:00
takatost
8ecec84dcf Merge branch 'main' into feat/workflow-backend
# Conflicts:
#	api/core/application_manager.py
2024-03-17 21:38:33 +08:00
takatost
a2b3096159 add text chunk subscribe for advanced chat blocking mode 2024-03-17 21:36:22 +08:00
takatost
80f1fbba56 add image file as markdown stream outupt 2024-03-17 21:27:08 +08:00
Yeuoly
d8ab611480 fix: code 2024-03-17 21:08:41 +08:00
StyleZhang
722ff7795d insert node 2024-03-17 20:19:58 +08:00
takatost
73c2b35dfe add completion app creation back 2024-03-17 16:30:04 +08:00
Yeuoly
b99eadecf6 fix: code template 2024-03-17 16:18:15 +08:00
JzoNg
843db3dbdf fix typo 2024-03-17 15:06:53 +08:00
JzoNg
3b660f1698 chat list api 2024-03-17 15:03:37 +08:00
JzoNg
a2e30e6aa9 message log 2024-03-17 14:05:56 +08:00
JzoNg
9638885a67 fix prompt log 2024-03-17 12:41:23 +08:00
StyleZhang
cd01c890e1 chat record 2024-03-17 09:53:16 +08:00
StyleZhang
552ccb058b stop & restart 2024-03-17 09:25:19 +08:00
takatost
36180b1001 add model support for kr node single_retrieval_config 2024-03-16 22:22:08 +08:00
takatost
65ed4dc91f refactor recommend app api 2024-03-16 22:13:06 +08:00
takatost
c709e339b1 fix route 2024-03-16 18:48:16 +08:00
takatost
3cf8416484 add workflow api for installed app & web api & service api 2024-03-16 16:27:39 +08:00
takatost
d2d47d0e0e fix bug 2024-03-16 15:09:47 +08:00
StyleZhang
05f97f6e06 fix chat 2024-03-16 15:00:30 +08:00
takatost
11dfdb236d lint fix 2024-03-16 14:45:39 +08:00
takatost
6df520ebc6 add skip ran node 2024-03-16 14:45:16 +08:00
takatost
a047a98462 advanced chat support 2024-03-16 14:30:53 +08:00
jyong
1df68a546e variable assigner node 2024-03-16 01:15:40 +08:00
jyong
5013ea09d5 variable assigner node 2024-03-16 00:54:29 +08:00
jyong
d92d952e76 Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-16 00:37:15 +08:00
jyong
4af304e6ae question classifier 2024-03-16 00:36:58 +08:00
takatost
5c4d1c52ee add conversation_id & message_id to advanced-chat workflow-runs API 2024-03-15 22:24:00 +08:00
Joel
5ee7fc4fde feat: tools vars limit 2024-03-15 22:15:36 +08:00
takatost
b0cf8c00db add created_at return in publish workflow 2024-03-15 22:08:25 +08:00
Joel
338dd1c714 feat: http var limit 2024-03-15 22:04:07 +08:00
Joel
af9ae91934 feat: template transform default 2024-03-15 21:58:56 +08:00
takatost
d122daca87 fix conversation filter 2024-03-15 21:56:17 +08:00
Joel
ec49da073e feat: code default value 2024-03-15 21:54:16 +08:00
takatost
62846be275 refactor app generate pipeline 2024-03-15 21:42:22 +08:00
Joel
6b9cc927c0 feat: llm default value 2024-03-15 21:41:27 +08:00
StyleZhang
a577db9ddd stop run 2024-03-15 21:33:51 +08:00
JzoNg
e5c8743712 fix elpased time 2024-03-15 21:15:44 +08:00
Joel
777cca1a09 feat: question classify init 2024-03-15 20:50:55 +08:00
StyleZhang
e3c65c072c node value init 2024-03-15 20:26:00 +08:00
StyleZhang
9b069bd3d4 node value form 2024-03-15 20:18:19 +08:00
StyleZhang
56c53d1f07 node value init 2024-03-15 20:18:19 +08:00
JzoNg
6146f24932 fix tip of workflow 2024-03-15 20:15:57 +08:00
JzoNg
9908a8bf1f prompt log 2024-03-15 19:55:33 +08:00
StyleZhang
e33260d2e2 node value init 2024-03-15 19:51:46 +08:00
jyong
5713ee5fce Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend
# Conflicts:
#	api/constants/languages.py
#	api/controllers/console/app/app.py
#	api/controllers/console/app/model_config.py
2024-03-15 19:50:50 +08:00
StyleZhang
129a68bb06 auto layout 2024-03-15 19:34:50 +08:00
Joel
aff5ab933b feat: knowledge node var init value and limit 2024-03-15 19:22:35 +08:00
JzoNg
4835358f24 modify prompt log 2024-03-15 18:26:56 +08:00
jyong
9b57b4c6c8 dataset retrival 2024-03-15 18:22:48 +08:00
jyong
785dfc5c00 dataset retrival 2024-03-15 18:22:48 +08:00
takatost
12eb236364 answer stream output support 2024-03-15 18:22:48 +08:00
Yeuoly
1cfeb989f7 fix: code default output 2024-03-15 18:22:47 +08:00
Yeuoly
ede65eca4d fix: tool 2024-03-15 18:22:47 +08:00
Yeuoly
dc53362506 fix: conversation_id equals to none 2024-03-15 18:22:47 +08:00
Yeuoly
74e644be1c fix: linter 2024-03-15 18:22:47 +08:00
Yeuoly
6e51ce123c fix: null conversation id 2024-03-15 18:22:47 +08:00
takatost
737321da75 add advanced chat apis support 2024-03-15 18:22:47 +08:00
takatost
72d2f76d24 fix default configs 2024-03-15 18:22:47 +08:00
Yeuoly
87a36a1fc8 fix: linter 2024-03-15 18:22:47 +08:00
Yeuoly
c2ded79cb2 fix: node type 2024-03-15 18:22:47 +08:00
takatost
fb6e5bf4d5 fix publish route 2024-03-15 18:22:47 +08:00
Yeuoly
6633a92e1a fix: http 2024-03-15 18:22:47 +08:00
takatost
44c4d5be72 add answer output parse 2024-03-15 18:22:47 +08:00
takatost
5a67c09b48 use answer node instead of end in advanced chatbot 2024-03-15 18:22:47 +08:00
Yeuoly
0614ddde7d fix: allow None AuthorizationConfig 2024-03-15 18:22:47 +08:00
takatost
e5ff06bcb7 fix err typo 2024-03-15 18:22:47 +08:00
Yeuoly
6b19ba3bb2 enhance: sandbox-docker-compose 2024-03-15 18:22:47 +08:00
takatost
735b55e61b add if-else node 2024-03-15 18:22:47 +08:00
takatost
7e53625eae fix value type 2024-03-15 18:22:47 +08:00
takatost
5213b0aade add sequence_number for workflow_started event 2024-03-15 18:22:47 +08:00
takatost
2b4b6817a3 record inputs and process data when node failed 2024-03-15 18:22:47 +08:00
takatost
da3e1e9d14 add deduct quota for llm node 2024-03-15 18:22:47 +08:00
takatost
e4794e309a add llm node test 2024-03-15 18:22:47 +08:00
Yeuoly
e6572ef2d7 fix: linter 2024-03-15 18:22:47 +08:00
Yeuoly
2182533af8 feat: javascript code 2024-03-15 18:22:47 +08:00
takatost
d88ac6c238 add llm node 2024-03-15 18:22:47 +08:00
takatost
e8751bebfa fix single step run error 2024-03-15 18:22:47 +08:00
Yeuoly
92c1da8dbe fix: remove answer 2024-03-15 18:22:47 +08:00
Yeuoly
951aaf5161 feat: sandbox 2024-03-15 18:22:47 +08:00
Yeuoly
a420953385 feat: docker-compose 2024-03-15 18:22:47 +08:00
Yeuoly
b102562614 fix: forward-ref 2024-03-15 18:22:47 +08:00
Yeuoly
2c2b9e7389 test: template transform 2024-03-15 18:22:47 +08:00
Yeuoly
513a8655b1 test: tool 2024-03-15 18:22:47 +08:00
Yeuoly
d3385a2715 feat 2024-03-15 18:22:47 +08:00
Yeuoly
ebf9c41adb feat: http 2024-03-15 18:22:47 +08:00
jyong
7372776992 knowledge node 2024-03-15 18:22:47 +08:00
takatost
7f7269d261 remove unused params in workflow_run_for_list_fields 2024-03-15 18:22:47 +08:00
takatost
f2bb0012fd add debug code 2024-03-15 18:22:47 +08:00
takatost
33113034ea add single step run 2024-03-15 18:22:47 +08:00
Yeuoly
88c29f613f fix: typing 2024-03-15 18:22:47 +08:00
Yeuoly
f318fa058c feat: add variable selector mapping 2024-03-15 18:22:47 +08:00
Yeuoly
407bfb8182 feat: add user uid 2024-03-15 18:22:47 +08:00
Yeuoly
91845fc9f6 fix: linter 2024-03-15 18:22:47 +08:00
Yeuoly
f911b1c488 feat: support empty code output children 2024-03-15 18:22:47 +08:00
takatost
7a6fa3655f add user for node 2024-03-15 18:22:47 +08:00
Yeuoly
5eb7b4d56a feat: tool entity 2024-03-15 18:22:47 +08:00
Yeuoly
5e4bd9fc38 feat: tool node 2024-03-15 18:22:47 +08:00
Yeuoly
f8cba2679e fix: linter 2024-03-15 18:22:47 +08:00
Yeuoly
e0883302d2 feat: jinja2 2024-03-15 18:22:47 +08:00
takatost
a0a1618869 add tenant_id / app_id / workflow_id for nodes 2024-03-15 18:22:47 +08:00
takatost
be68369983 add workflow_app_log codes 2024-03-15 18:22:47 +08:00
Yeuoly
9d0a832e40 refactor: github actions 2024-03-15 18:22:47 +08:00
Yeuoly
8031262006 feat: workflow mock test 2024-03-15 18:22:47 +08:00
takatost
751489fa54 modify readme 2024-03-15 18:22:47 +08:00
Yeuoly
1e6feadc7e fix: code node dose not work as expected 2024-03-15 18:22:47 +08:00
takatost
2d8497f79b add readme for db connection management in App Runner and Task Pipeline 2024-03-15 18:22:47 +08:00
takatost
61a1aadf9c optimize workflow db connections 2024-03-15 18:22:47 +08:00
takatost
8b832097de optimize db connections 2024-03-15 18:22:45 +08:00
takatost
7e4daf131e optimize db connections 2024-03-15 18:17:05 +08:00
takatost
de3978fdbb optimize db connections 2024-03-15 18:17:05 +08:00
Yeuoly
51f6ab49cf fix: linter 2024-03-15 18:17:05 +08:00
Yeuoly
2895c3bc8c feat: template transform 2024-03-15 18:17:05 +08:00
Yeuoly
3d5f9b5a1e fix: missing _extract_variable_selector_to_variable_mapping 2024-03-15 18:17:05 +08:00
Yeuoly
614bc2e075 feat: http reqeust 2024-03-15 18:17:05 +08:00
Yeuoly
193bcce236 feat: http request 2024-03-15 18:17:05 +08:00
Yeuoly
a0fd731170 feat: mapping variables 2024-03-15 18:17:05 +08:00
takatost
2f57d090a1 refactor pipeline and remove node run run_args 2024-03-15 18:17:05 +08:00
Yeuoly
4c5822fb6e fix: transform 2024-03-15 18:17:05 +08:00
takatost
e90637f67a fix generate bug 2024-03-15 18:17:05 +08:00
Yeuoly
9b0f83f807 fix: add max number array length 2024-03-15 18:17:05 +08:00
takatost
fc573564b4 refactor workflow runner 2024-03-15 18:17:05 +08:00
Yeuoly
5596b3b00b fix: linter 2024-03-15 18:17:05 +08:00
Yeuoly
cb02b1e12e feat: code 2024-03-15 18:17:05 +08:00
Yeuoly
736e386f15 fix: bugs 2024-03-15 18:17:05 +08:00
takatost
c152d55f68 fix workflow app bugs 2024-03-15 18:17:05 +08:00
takatost
1a0b6adc2c fix stream bugs 2024-03-15 18:17:05 +08:00
takatost
1914dfea77 fix bugs 2024-03-15 18:17:05 +08:00
takatost
1f986a3abb fix bugs 2024-03-15 18:17:05 +08:00
takatost
b174f85237 fix bug 2024-03-15 18:17:05 +08:00
takatost
2ad9c76093 modify migrations 2024-03-15 18:17:05 +08:00
takatost
8684b172d2 add start, end, direct answer node 2024-03-15 18:17:05 +08:00
takatost
3e54cb26be move funcs 2024-03-15 18:17:05 +08:00
takatost
079cc082a3 use callback to filter workflow stream output 2024-03-15 18:17:05 +08:00
takatost
a1bc6b50c5 refactor workflow generate pipeline 2024-03-15 18:17:05 +08:00
takatost
7d28fe8ea5 completed workflow engine main logic 2024-03-15 18:17:05 +08:00
takatost
dd50deaa43 fix audio voice arg 2024-03-15 18:17:04 +08:00
takatost
79a10e9729 add updated_at to sync workflow api 2024-03-15 18:17:04 +08:00
takatost
a5de7b10f3 update ruff check 2024-03-15 18:17:04 +08:00
takatost
bc4edbfc2b lint fix 2024-03-15 18:17:04 +08:00
takatost
75f1355d4c add few workflow run codes 2024-03-15 18:17:04 +08:00
takatost
1a86e79d4a lint fix 2024-03-15 18:17:04 +08:00
takatost
c8a1f923f5 lint fix 2024-03-15 18:17:04 +08:00
takatost
df753e84a3 fix workflow api return 2024-03-15 18:17:04 +08:00
takatost
3086893ee7 fix typo 2024-03-15 18:17:04 +08:00
takatost
242fcf0145 fix typo 2024-03-15 18:17:04 +08:00
takatost
de40422205 lint fix 2024-03-15 18:17:04 +08:00
takatost
df809ff435 add get default node config 2024-03-15 18:17:04 +08:00
takatost
75559bcbf9 replace block type to node type 2024-03-15 18:17:04 +08:00
takatost
d9b8a938c6 use enum instead 2024-03-15 18:17:04 +08:00
takatost
e9004a06a5 lint fix 2024-03-15 18:17:04 +08:00
takatost
be709d4b84 add AdvancedChatAppGenerateTaskPipeline 2024-03-15 18:17:04 +08:00
takatost
602bc67495 lint fix 2024-03-15 18:17:04 +08:00
takatost
e498efce2d refactor app generate 2024-03-15 18:17:04 +08:00
takatost
09dfe80718 add app copy api 2024-03-15 18:17:04 +08:00
takatost
06b05163f6 update app import response 2024-03-15 18:17:04 +08:00
takatost
b80092ea12 lint fix 2024-03-15 18:17:04 +08:00
takatost
2eaae6742a lint fix 2024-03-15 18:17:04 +08:00
takatost
3f5d1a79c6 refactor apps 2024-03-15 18:17:04 +08:00
takatost
15c7e0ec2f lint fix 2024-03-15 18:17:04 +08:00
takatost
43b0440358 support workflow features 2024-03-15 18:17:03 +08:00
takatost
9651a208a9 lint fix 2024-03-15 18:15:54 +08:00
takatost
7bff65304f add features structure validate 2024-03-15 18:15:54 +08:00
takatost
8a8882ed8d move workflow_id to app 2024-03-15 18:15:54 +08:00
takatost
9467fe9aa9 lint fix 2024-03-15 18:15:54 +08:00
takatost
799db69e4f refactor app 2024-03-15 18:15:48 +08:00
takatost
896c200211 fix import problem 2024-03-15 18:15:17 +08:00
takatost
3badc4423a fix: wrong default model parameters when creating app 2024-03-15 18:15:17 +08:00
takatost
d741527ae4 lint 2024-03-15 18:15:17 +08:00
takatost
77618823a5 add features update api
refactor app model config validation
2024-03-15 18:15:17 +08:00
takatost
dd70aeff24 lint fix 2024-03-15 18:15:17 +08:00
takatost
022b7d5dd4 optimize default model exceptions 2024-03-15 18:15:17 +08:00
takatost
11337e51c5 lint fix 2024-03-15 18:15:17 +08:00
takatost
7724d010b6 add app description
add update app api
2024-03-15 18:15:16 +08:00
takatost
124aa9db08 lint fix 2024-03-15 18:15:16 +08:00
takatost
20cf075b2d add workflow runs & workflow node executions api 2024-03-15 18:15:16 +08:00
takatost
bf4a5f6b33 lint fix 2024-03-15 18:15:16 +08:00
takatost
03749917f0 add workflow app log api 2024-03-15 18:15:16 +08:00
takatost
7d51d6030b remove publish workflow when app import 2024-03-15 18:15:16 +08:00
takatost
742b87df5e lint fix 2024-03-15 18:15:16 +08:00
takatost
a457faa2bf trigger app_model_config_was_updated when app import 2024-03-15 18:15:16 +08:00
takatost
4f50f113dd lint fix 2024-03-15 18:15:16 +08:00
takatost
8b529a3ec7 refactor app api 2024-03-15 18:15:16 +08:00
takatost
84c3ec0ea7 site init move to event handler 2024-03-15 18:15:16 +08:00
takatost
c13e8077ba fix agent app converter command 2024-03-15 18:15:16 +08:00
takatost
9f42892b42 lint fix 2024-03-15 18:15:16 +08:00
takatost
27ba5a0bce refactor app mode
add app import and export
2024-03-15 18:15:13 +08:00
takatost
78afba49bf lint fix 2024-03-15 18:13:55 +08:00
takatost
a9192bc1c6 make recommended app list api public 2024-03-15 18:13:55 +08:00
takatost
77f04603b3 fix bugs 2024-03-15 18:13:55 +08:00
takatost
34ed5e428c fix bugs 2024-03-15 18:13:55 +08:00
takatost
98cb17e79e lint fix 2024-03-15 18:13:55 +08:00
takatost
fce20e483c restore completion app 2024-03-15 18:13:55 +08:00
takatost
97c4733e79 lint fix 2024-03-15 18:13:55 +08:00
takatost
748aa22ee2 add manual convert logic 2024-03-15 18:13:55 +08:00
takatost
2ba7ac8bc1 add expert mode of chatapp convert command 2024-03-15 18:13:55 +08:00
takatost
7458fde5a5 add agent app convert command 2024-03-15 18:13:55 +08:00
takatost
f11bf9153d add more tests 2024-03-15 18:13:55 +08:00
takatost
0806b3163a add to http request node convert tests 2024-03-15 18:13:55 +08:00
takatost
45621ba4d7 add api extension to http request node convert 2024-03-15 18:13:55 +08:00
takatost
6aecf42b6e fix prompt transform bugs 2024-03-15 18:13:55 +08:00
takatost
3b234febf5 fix bugs and add unit tests 2024-03-15 18:13:55 +08:00
takatost
8642354a2a lint 2024-03-15 18:13:55 +08:00
takatost
c028e5f889 add app convert codes 2024-03-15 18:13:55 +08:00
takatost
3642dd3a73 add workflow logics 2024-03-15 18:13:55 +08:00
takatost
603b1e9ed4 lint 2024-03-15 18:13:55 +08:00
takatost
b7c6cba23f add workflow models 2024-03-15 18:13:55 +08:00
takatost
d430136f65 lint 2024-03-15 18:13:55 +08:00
takatost
381b3d5016 optimize get app model to wraps 2024-03-15 18:13:55 +08:00
Joel
e3f1e143e5 feat: llm context type limit 2024-03-15 18:03:01 +08:00
Joel
9f1cbb2ee7 feat: answer node input limit 2024-03-15 18:03:01 +08:00
StyleZhang
d0ef9e672f llm mode 2024-03-15 17:57:46 +08:00
Joel
b5c212f575 feat: parse to right datatype and show parse json error 2024-03-15 17:42:00 +08:00
Joel
ff5ab43f9c feat: check before run 2024-03-15 17:42:00 +08:00
StyleZhang
68f947c7e0 stop workflow run 2024-03-15 17:24:40 +08:00
StyleZhang
e98456b025 store 2024-03-15 16:58:48 +08:00
Joel
75b332695b feat: support string num seletor to single run debug 2024-03-15 16:37:58 +08:00
jyong
3e4bb695e4 dataset retrival 2024-03-15 16:14:32 +08:00
Joel
7ba1b37a5a feat: show assigner panel 2024-03-15 15:23:27 +08:00
Joel
2886255c8b fix: can not get first var type 2024-03-15 15:10:41 +08:00
Joel
f7a9564e11 feat: can noe choose selected nodes 2024-03-15 14:44:48 +08:00
jyong
c1b0f115d0 dataset retrival 2024-03-15 14:40:53 +08:00
StyleZhang
2203d9a138 available nodes 2024-03-15 14:40:44 +08:00
Joel
5adf94bd7d feat: support filter obj select type 2024-03-15 14:01:25 +08:00
StyleZhang
5fbf8ee6c6 available nodes 2024-03-15 13:24:29 +08:00
Joel
8d9c86ac4c fix: advance setting error 2024-03-15 11:47:53 +08:00
Joel
946ef4c685 chore: remove uselsee lang 2024-03-15 11:31:48 +08:00
Joel
c3773bc2d1 chore: add language placeholder 2024-03-15 11:30:39 +08:00
Joel
1b8c8b0a43 feat: node before and after run 2024-03-15 11:26:19 +08:00
StyleZhang
86d2c1184c Merge branch 'main' into feat/workflow 2024-03-15 11:17:18 +08:00
StyleZhang
817aea9f05 fix 2024-03-15 11:04:34 +08:00
StyleZhang
985c07b25b fix 2024-03-15 11:01:47 +08:00
StyleZhang
05ac27dfa8 fix 2024-03-14 21:07:59 +08:00
JzoNg
bcce53a929 web app support workflow 2024-03-14 21:02:15 +08:00
JzoNg
58922ba40b add route for workflow app 2024-03-14 21:02:15 +08:00
takatost
e6b8b13f2e answer stream output support 2024-03-14 20:50:03 +08:00
Joel
ac675c4443 feat: add checkvalid empty fn 2024-03-14 20:44:51 +08:00
Joel
ae6a558662 feat: add prev and next nodes 2024-03-14 20:29:47 +08:00
Joel
64e44d1709 chore: direct answer to answer 2024-03-14 19:58:17 +08:00
Yeuoly
f35ae2355f fix: code default output 2024-03-14 19:17:27 +08:00
Joel
3a857c83e6 chore: only show has value node in end 2024-03-14 18:46:30 +08:00
Joel
d9edcb2250 feat: change to new end node 2024-03-14 18:43:04 +08:00
StyleZhang
d129d7951c fix 2024-03-14 18:27:58 +08:00
Joel
5c246285da feat: support node type filter 2024-03-14 17:58:26 +08:00
StyleZhang
19c1722032 node default value 2024-03-14 17:27:42 +08:00
Joel
cd9a58231b fix: tool show 2024-03-14 16:40:21 +08:00
Yeuoly
d85b5b9134 fix: tool 2024-03-14 16:38:22 +08:00
Joel
8bd74d5abf feat: var picker support choose type 2024-03-14 16:04:15 +08:00
StyleZhang
2af2e2be67 node add 2024-03-14 15:14:31 +08:00
Joel
43a3b827a3 chore: stringify output 2024-03-14 13:43:13 +08:00
Joel
3c6de0bf3e feat: tool default value 2024-03-14 13:43:13 +08:00
Yeuoly
13a724864d fix: conversation_id equals to none 2024-03-14 13:24:48 +08:00
Yeuoly
baf536eb2b fix: linter 2024-03-14 12:57:14 +08:00
Yeuoly
5200668336 fix: null conversation id 2024-03-14 12:57:14 +08:00
JzoNg
7eeffb16e2 fix url of webapp 2024-03-14 12:35:12 +08:00
JzoNg
300909341e use app store in overview 2024-03-14 12:27:54 +08:00
JzoNg
277d21cccb fix webapp url 2024-03-14 12:22:08 +08:00
takatost
de184051f0 add advanced chat apis support 2024-03-14 12:17:15 +08:00
takatost
95ee72556f fix default configs 2024-03-14 12:12:38 +08:00
Yeuoly
f48364914b fix: linter 2024-03-14 11:59:43 +08:00
Yeuoly
19df70efad fix: node type 2024-03-14 11:59:43 +08:00
StyleZhang
9813609645 publish 2024-03-14 11:48:58 +08:00
takatost
975d0a1651 fix publish route 2024-03-14 11:39:18 +08:00
Yeuoly
3c3571713e fix: http 2024-03-14 11:35:51 +08:00
JzoNg
aa6254a3b4 add doc for workflow app 2024-03-14 11:31:31 +08:00
JzoNg
0b05d2939a add doc for workflow 2024-03-14 11:31:31 +08:00
JzoNg
6f33163f88 fix app siderbar hook 2024-03-14 11:31:31 +08:00
JzoNg
93101b4d9a add running state for step run 2024-03-14 11:31:31 +08:00
JzoNg
d8222a15ca remove useless comments 2024-03-14 11:31:31 +08:00
JzoNg
1728513634 fix sequence number and tokens in result panel 2024-03-14 11:31:31 +08:00
Joel
68fa81ec82 chore: change tool input types 2024-03-14 11:25:27 +08:00
Joel
c051c89176 chore: fix http i18n 2024-03-14 11:25:27 +08:00
StyleZhang
8b2a63e545 fix 2024-03-14 11:19:50 +08:00
takatost
fcd470fcac add answer output parse 2024-03-13 23:00:28 +08:00
takatost
fd8fe15d28 use answer node instead of end in advanced chatbot 2024-03-13 20:54:23 +08:00
Yeuoly
e80315f504 fix: allow None AuthorizationConfig 2024-03-13 20:40:37 +08:00
Joel
ae9e7acd77 feat: other node run 2024-03-13 18:49:22 +08:00
Joel
149eb38e84 feat: tool single run 2024-03-13 18:43:43 +08:00
Joel
d777184fd5 feat: http run 2024-03-13 18:33:08 +08:00
Joel
1653e5eebe feat: template transform 2024-03-13 18:05:17 +08:00
takatost
1f4826ca01 fix err typo 2024-03-13 18:02:19 +08:00
Yeuoly
ef700b2688 enhance: sandbox-docker-compose 2024-03-13 17:46:42 +08:00
Joel
cedc1bada2 fix: temple transform query selec can not choose var 2024-03-13 17:17:28 +08:00
takatost
0c709afe5c add if-else node 2024-03-13 17:10:51 +08:00
StyleZhang
1c5d07871f hooks 2024-03-13 17:07:03 +08:00
StyleZhang
e11fc8c131 init 2024-03-13 17:07:03 +08:00
Joel
2edef89a8d feat: handle system var 2024-03-13 16:38:24 +08:00
StyleZhang
cbe7de58ab backup draft 2024-03-13 16:27:04 +08:00
Joel
801160c430 feat: tool output vars 2024-03-13 16:18:46 +08:00
Joel
cb2a814296 feat: assign output 2024-03-13 15:58:03 +08:00
Joel
db78b91ec2 feat: http output 2024-03-13 15:58:02 +08:00
Joel
25a11c5bb7 feat: question classify output 2024-03-13 15:58:02 +08:00
StyleZhang
1f41521c21 workflow run 2024-03-13 15:38:56 +08:00
Joel
e686d42262 feat: support template transform output var 2024-03-13 15:21:51 +08:00
Joel
9bca3f8fd7 feat: code support output var list 2024-03-13 15:17:03 +08:00
takatost
6ef3542c6c fix value type 2024-03-13 15:08:15 +08:00
takatost
db299a876e add sequence_number for workflow_started event 2024-03-13 15:01:02 +08:00
takatost
737d04361b record inputs and process data when node failed 2024-03-13 14:55:56 +08:00
Joel
0d2366b432 feat: knowledge output var 2024-03-13 14:52:32 +08:00
Joel
b13345ceb2 chore: remove useless and node filter 2024-03-13 14:41:11 +08:00
Joel
f15dce9ee3 feat: llm output and var type 2024-03-13 14:37:58 +08:00
StyleZhang
b718e66b26 fix: drag stop & click 2024-03-13 13:59:57 +08:00
StyleZhang
a55a7603dd split hooks 2024-03-13 11:53:49 +08:00
Joel
64fa343d16 chore: remove log 2024-03-13 11:24:20 +08:00
Joel
6b02eebe36 feat: support start node vars 2024-03-13 11:22:59 +08:00
Joel
d0f5318b75 feat: code node can run 2024-03-13 10:43:18 +08:00
takatost
5fe0d50cee add deduct quota for llm node 2024-03-13 00:08:13 +08:00
takatost
4d7caa3458 add llm node test 2024-03-12 23:08:23 +08:00
Yeuoly
856466320d fix: linter 2024-03-12 22:42:28 +08:00
Yeuoly
3bd53556ca feat: javascript code 2024-03-12 22:41:59 +08:00
JzoNg
c74854aec0 icon fix 2024-03-12 22:24:23 +08:00
JzoNg
294128d43a fix tracing 2024-03-12 22:21:45 +08:00
takatost
3f59a579d7 add llm node 2024-03-12 22:12:03 +08:00
JzoNg
e5cf4ea60e fix result panel 2024-03-12 21:40:41 +08:00
JzoNg
768ca2d3f0 add panel of result 2024-03-12 21:05:15 +08:00
JzoNg
92e9b1bbb1 update style of app list 2024-03-12 21:05:15 +08:00
JzoNg
446932e076 update style of app creation 2024-03-12 21:05:15 +08:00
StyleZhang
0469edcc0c fix 2024-03-12 21:03:45 +08:00
Joel
3823ae5890 chore: prompt to promt template 2024-03-12 20:05:42 +08:00
Joel
14d71fb598 feat: var picker get vars 2024-03-12 20:03:35 +08:00
Joel
a031507443 feat: code show result 2024-03-12 19:45:45 +08:00
takatost
4f5c052dc8 fix single step run error 2024-03-12 19:15:11 +08:00
StyleZhang
90e013554c fix 2024-03-12 18:47:24 +08:00
Joel
30ea3cb702 feat: can run code node 2024-03-12 17:59:14 +08:00
StyleZhang
a5147a382d fix 2024-03-12 17:44:45 +08:00
Joel
74bf6cd186 feat: add single run api 2024-03-12 16:26:30 +08:00
Yeuoly
15ddbb5e6f fix: remove answer 2024-03-12 16:25:07 +08:00
StyleZhang
547df0b5fe fix 2024-03-12 15:04:52 +08:00
StyleZhang
8ae46a8a14 fix 2024-03-12 13:25:57 +08:00
StyleZhang
9753077661 fix 2024-03-12 13:03:39 +08:00
StyleZhang
22e7393b9d fix 2024-03-12 11:56:15 +08:00
Yeuoly
943c676768 feat: sandbox 2024-03-11 22:14:28 +08:00
Yeuoly
4ecfe1fec5 feat: docker-compose 2024-03-11 22:12:13 +08:00
Yeuoly
5fac4f8737 fix: forward-ref 2024-03-11 21:58:54 +08:00
Yeuoly
a5394fa2ce test: template transform 2024-03-11 21:53:08 +08:00
Yeuoly
8dc4d122b9 test: tool 2024-03-11 21:53:08 +08:00
StyleZhang
0eb482f35b chat workflow run 2024-03-11 21:05:54 +08:00
StyleZhang
bd52937c88 chat workflow run 2024-03-11 20:54:29 +08:00
jyong
d5b321af3f Merge remote-tracking branch 'origin/feat/workflow-backend' into feat/workflow-backend 2024-03-11 20:06:49 +08:00
jyong
f3b46bf7e2 knowledge node 2024-03-11 20:06:38 +08:00
Yeuoly
2008986f83 feat 2024-03-11 19:51:31 +08:00
Yeuoly
1a57951d72 feat: http 2024-03-11 19:51:06 +08:00
StyleZhang
7655d7f662 run 2024-03-11 19:35:06 +08:00
takatost
373857d0f2 remove unused params in workflow_run_for_list_fields 2024-03-11 19:04:48 +08:00
takatost
6719af9ba9 add debug code 2024-03-11 18:52:24 +08:00
takatost
19c9091d5b add single step run 2024-03-11 18:49:58 +08:00
StyleZhang
84e2071a32 run 2024-03-11 18:11:01 +08:00
Joel
b3b9e1dabb feat: tools support run 2024-03-11 17:33:17 +08:00
Yeuoly
91a35ded18 fix: typing 2024-03-11 16:51:27 +08:00
Yeuoly
2d68594a86 feat: add variable selector mapping 2024-03-11 16:48:28 +08:00
Yeuoly
f3d19f9691 feat: add user uid 2024-03-11 16:46:11 +08:00
Yeuoly
94047de8b4 fix: linter 2024-03-11 16:44:36 +08:00
Yeuoly
1c450e27d3 feat: support empty code output children 2024-03-11 16:44:22 +08:00
Joel
c0ccffa1c3 chore: no var not show var group 2024-03-11 16:43:13 +08:00
takatost
bbc76cb833 add user for node 2024-03-11 16:31:43 +08:00
Yeuoly
94f3cf1a4c feat: tool entity 2024-03-11 16:13:52 +08:00
Joel
2aa8847b78 mrege main 2024-03-11 14:54:29 +08:00
StyleZhang
049e858ef7 run 2024-03-11 14:43:50 +08:00
Yeuoly
8e491ace5c feat: tool node 2024-03-11 13:54:11 +08:00
Yeuoly
dcf9d85e8d fix: linter 2024-03-10 21:12:07 +08:00
Yeuoly
460c0da176 feat: jinja2 2024-03-10 20:24:16 +08:00
takatost
295a248561 add tenant_id / app_id / workflow_id for nodes 2024-03-10 20:15:49 +08:00
takatost
4630f9c746 add workflow_app_log codes 2024-03-10 20:02:19 +08:00
Yeuoly
ba66beb487 refactor: github actions 2024-03-10 18:41:49 +08:00
Yeuoly
b5cb38641a feat: workflow mock test 2024-03-10 18:41:25 +08:00
takatost
4b37d30c0d modify readme 2024-03-10 18:02:05 +08:00
Yeuoly
59ba7917c4 fix: code node dose not work as expected 2024-03-10 17:55:24 +08:00
takatost
8d0ff01a59 add readme for db connection management in App Runner and Task Pipeline 2024-03-10 17:11:39 +08:00
takatost
100fb0c5d6 optimize workflow db connections 2024-03-10 16:59:17 +08:00
takatost
b75cd2514e optimize db connections 2024-03-10 16:29:55 +08:00
takatost
7693ba8797 optimize db connections 2024-03-10 15:55:14 +08:00
takatost
3d6b06696e optimize db connections 2024-03-10 15:55:14 +08:00
Yeuoly
0386061fdf fix: linter 2024-03-10 15:55:14 +08:00
Yeuoly
3407b4d8dd feat: template transform 2024-03-10 15:55:14 +08:00
Yeuoly
71ff2a8356 fix: missing _extract_variable_selector_to_variable_mapping 2024-03-10 15:55:14 +08:00
Yeuoly
8b809b8004 feat: http reqeust 2024-03-10 15:55:14 +08:00
Yeuoly
707a3a0a66 feat: http request 2024-03-10 15:55:14 +08:00
Yeuoly
b798aa915c feat: mapping variables 2024-03-10 15:55:14 +08:00
takatost
2db67c4101 refactor pipeline and remove node run run_args 2024-03-10 15:55:14 +08:00
Yeuoly
80b4db08dc fix: transform 2024-03-10 15:55:14 +08:00
takatost
37cdee5101 fix generate bug 2024-03-10 15:55:14 +08:00
Yeuoly
b5366cba03 fix: add max number array length 2024-03-10 15:55:14 +08:00
takatost
6cfda369ef refactor workflow runner 2024-03-10 15:55:14 +08:00
Yeuoly
5a57ed2536 fix: linter 2024-03-10 15:55:14 +08:00
Yeuoly
13937fc103 feat: code 2024-03-10 15:55:14 +08:00
Yeuoly
17cd512284 fix: bugs 2024-03-10 15:55:14 +08:00
takatost
97398ff209 fix workflow app bugs 2024-03-10 15:55:14 +08:00
takatost
2ffb63ff0c fix stream bugs 2024-03-10 15:55:14 +08:00
takatost
90bcb241cc fix bugs 2024-03-10 15:55:14 +08:00
takatost
f4f7cfd45a fix bugs 2024-03-10 15:55:14 +08:00
takatost
d214c047e9 fix bug 2024-03-10 15:55:14 +08:00
takatost
fee8a86880 modify migrations 2024-03-10 15:55:14 +08:00
takatost
ea883b5e48 add start, end, direct answer node 2024-03-10 15:55:14 +08:00
takatost
46296d777c move funcs 2024-03-10 15:55:14 +08:00
takatost
79f0e894e9 use callback to filter workflow stream output 2024-03-10 15:55:14 +08:00
takatost
6372183471 refactor workflow generate pipeline 2024-03-10 15:55:14 +08:00
takatost
5963e7d1c5 completed workflow engine main logic 2024-03-10 15:55:14 +08:00
takatost
c7618fc377 fix audio voice arg 2024-03-10 15:55:14 +08:00
takatost
3fc932b041 add updated_at to sync workflow api 2024-03-10 15:55:14 +08:00
takatost
97cdc96f7c update ruff check 2024-03-10 15:55:14 +08:00
takatost
892fe927c2 lint fix 2024-03-10 15:55:14 +08:00
takatost
d51d456d80 add few workflow run codes 2024-03-10 15:55:14 +08:00
takatost
836376c6c8 lint fix 2024-03-10 15:55:14 +08:00
takatost
fa29eadb7a lint fix 2024-03-10 15:55:14 +08:00
takatost
0cc0065f8c fix workflow api return 2024-03-10 15:55:14 +08:00
takatost
c3eac450ce fix typo 2024-03-10 15:55:14 +08:00
takatost
7b738e045e fix typo 2024-03-10 15:55:14 +08:00
takatost
3f6c17247f lint fix 2024-03-10 15:55:14 +08:00
takatost
0551a9bfcd add get default node config 2024-03-10 15:55:14 +08:00
takatost
7c149ebf4f replace block type to node type 2024-03-10 15:55:14 +08:00
takatost
37b70eb73e use enum instead 2024-03-10 15:55:14 +08:00
takatost
451ea5308f lint fix 2024-03-10 15:55:14 +08:00
takatost
a4d6954d4f add AdvancedChatAppGenerateTaskPipeline 2024-03-10 15:55:14 +08:00
takatost
c786533f22 lint fix 2024-03-10 15:55:13 +08:00
takatost
406a625c98 refactor app generate 2024-03-10 15:55:13 +08:00
takatost
171b2bdc20 add app copy api 2024-03-10 15:55:13 +08:00
takatost
4266ce73cb update app import response 2024-03-10 15:55:13 +08:00
takatost
afa920cc94 lint fix 2024-03-10 15:55:13 +08:00
takatost
701f116be3 lint fix 2024-03-10 15:55:13 +08:00
takatost
5c7ea08bdf refactor apps 2024-03-10 15:55:12 +08:00
takatost
5e38996222 lint fix 2024-03-10 15:54:10 +08:00
takatost
18febeabd1 support workflow features 2024-03-10 15:54:10 +08:00
takatost
be1500bf7d lint fix 2024-03-10 15:54:10 +08:00
takatost
fea549679a add features structure validate 2024-03-10 15:54:10 +08:00
takatost
11e1b569ea move workflow_id to app 2024-03-10 15:54:10 +08:00
takatost
2bbf96d762 lint fix 2024-03-10 15:54:10 +08:00
takatost
70394bae52 refactor app 2024-03-10 15:54:08 +08:00
takatost
0c9e112f41 fix import problem 2024-03-10 15:52:45 +08:00
takatost
607b84d929 fix: wrong default model parameters when creating app 2024-03-10 15:52:45 +08:00
takatost
7a13cd1530 lint 2024-03-10 15:52:45 +08:00
takatost
9b1afb68eb add features update api
refactor app model config validation
2024-03-10 15:52:45 +08:00
takatost
cf9d2965bf lint fix 2024-03-10 15:52:45 +08:00
takatost
b1328c193b optimize default model exceptions 2024-03-10 15:52:45 +08:00
takatost
3d222caaae lint fix 2024-03-10 15:52:45 +08:00
takatost
77ac6fa356 add app description
add update app api
2024-03-10 15:52:45 +08:00
takatost
a3b46006a8 lint fix 2024-03-10 15:52:45 +08:00
takatost
ea4716d039 add workflow runs & workflow node executions api 2024-03-10 15:52:45 +08:00
takatost
db9e7a53f8 lint fix 2024-03-10 15:52:45 +08:00
takatost
4432e055be add workflow app log api 2024-03-10 15:52:45 +08:00
takatost
403c2f436d remove publish workflow when app import 2024-03-10 15:52:45 +08:00
takatost
594de43dec lint fix 2024-03-10 15:52:45 +08:00
takatost
2e68c3fc11 trigger app_model_config_was_updated when app import 2024-03-10 15:52:45 +08:00
takatost
2187f6f62e lint fix 2024-03-10 15:52:45 +08:00
takatost
9249c38bf9 refactor app api 2024-03-10 15:52:44 +08:00
takatost
67e0ba5167 site init move to event handler 2024-03-10 15:52:10 +08:00
takatost
9004d8c3cd fix agent app converter command 2024-03-10 15:52:10 +08:00
takatost
4df424438d lint fix 2024-03-10 15:52:10 +08:00
takatost
6e3cd62e31 refactor app mode
add app import and export
2024-03-10 15:52:09 +08:00
takatost
61b4bedc16 lint fix 2024-03-10 15:51:36 +08:00
takatost
4e5de036c6 make recommended app list api public 2024-03-10 15:51:36 +08:00
takatost
8e54b2e3f2 fix bugs 2024-03-10 15:51:36 +08:00
takatost
d39a51c134 fix bugs 2024-03-10 15:51:36 +08:00
takatost
6efc3d4913 lint fix 2024-03-10 15:51:35 +08:00
takatost
55c31eec31 restore completion app 2024-03-10 15:51:35 +08:00
takatost
9820dcb201 lint fix 2024-03-10 15:51:35 +08:00
takatost
9f29ce9591 add manual convert logic 2024-03-10 15:51:35 +08:00
takatost
afb0ff37bd add expert mode of chatapp convert command 2024-03-10 15:51:35 +08:00
takatost
67b6f08d89 add agent app convert command 2024-03-10 15:51:35 +08:00
takatost
892036bd7d add more tests 2024-03-10 15:51:35 +08:00
takatost
d123ddedc8 add to http request node convert tests 2024-03-10 15:51:35 +08:00
takatost
fc243982e5 add api extension to http request node convert 2024-03-10 15:51:35 +08:00
takatost
df66cd2205 fix prompt transform bugs 2024-03-10 15:51:35 +08:00
takatost
a44d3c3eda fix bugs and add unit tests 2024-03-10 15:51:35 +08:00
takatost
297b33aa41 lint 2024-03-10 15:51:35 +08:00
takatost
0d858cc036 add app convert codes 2024-03-10 15:51:35 +08:00
takatost
f067947266 add workflow logics 2024-03-10 15:51:35 +08:00
takatost
9ad6bd78f5 lint 2024-03-10 15:51:35 +08:00
takatost
b1e220f2d2 add workflow models 2024-03-10 15:51:35 +08:00
takatost
200dc56c37 lint 2024-03-10 15:51:35 +08:00
takatost
49992925e2 optimize get app model to wraps 2024-03-10 15:51:33 +08:00
JzoNg
405e99d27f fix api url of workflow run 2024-03-09 13:03:22 +08:00
JzoNg
90ee7fe201 tracing 2024-03-09 12:48:14 +08:00
JzoNg
bc90fc885f tracing node style update 2024-03-09 11:06:25 +08:00
JzoNg
5afa5fb085 app switch 2024-03-09 10:30:26 +08:00
JzoNg
93e2dc4f5f workflow log result 2024-03-08 21:10:11 +08:00
StyleZhang
08d2a4279f cache toolsmap 2024-03-08 18:36:48 +08:00
StyleZhang
d79b686992 block selector 2024-03-08 18:10:30 +08:00
Joel
1adec7ab51 feat: tool auth 2024-03-08 17:13:24 +08:00
Joel
3b029f2387 feat: tool auth 2024-03-08 17:13:24 +08:00
JzoNg
6d6afe8f52 fix app mode in logs 2024-03-08 17:11:33 +08:00
StyleZhang
e307947dd8 node control 2024-03-08 17:02:02 +08:00
JzoNg
04ad1eef79 workflow logs 2024-03-08 16:43:41 +08:00
StyleZhang
2b475b7916 help line 2024-03-08 16:02:28 +08:00
Joel
f51f4a5843 feat: tool inputs 2024-03-08 15:43:11 +08:00
JzoNg
b5f3bbead2 update cache in appNav after app info updated 2024-03-08 14:48:10 +08:00
Joel
a192ae9314 feat: remove useless file 2024-03-08 14:37:29 +08:00
Joel
7a07d8c2bc feat: tool params 2024-03-08 14:32:33 +08:00
StyleZhang
17a67e7922 remove annatation 2024-03-08 13:46:14 +08:00
StyleZhang
328a3e2e6b node about author 2024-03-08 13:24:59 +08:00
JzoNg
597053c30e fix style of app info 2024-03-08 11:46:25 +08:00
JzoNg
29bef1e3ab app sidebar auto collapse 2024-03-08 11:46:25 +08:00
JzoNg
e36d62f08c hide switch modal 2024-03-08 11:46:25 +08:00
JzoNg
50b4c7fa18 switch app 2024-03-08 11:46:25 +08:00
JzoNg
d86ef15d9a add tip for switch 2024-03-08 11:46:25 +08:00
JzoNg
fa3eb11b6a old app do not support duplicate and export dsl 2024-03-08 11:46:25 +08:00
JzoNg
beff31b003 update style app config 2024-03-08 11:46:25 +08:00
StyleZhang
2360fb293b update data 2024-03-08 11:27:05 +08:00
Joel
1c82e3870a feat: not choose model hide in node 2024-03-08 11:06:32 +08:00
Joel
49ce9d2200 feat: http support debug and remove mock init debug data 2024-03-08 10:59:49 +08:00
Joel
c20c9b53e1 feat: template tranform support debug 2024-03-08 10:40:12 +08:00
Joel
89fc90ac80 chore: code support debug 2024-03-08 10:36:57 +08:00
StyleZhang
072f5caa06 init 2024-03-07 19:43:11 +08:00
Joel
783f7a9b13 feat: question classifer support run 2024-03-07 18:43:19 +08:00
Joel
425e162a91 feat: knowledge support single run 2024-03-07 18:36:45 +08:00
Joel
55b5d76e0b chore: move node run data to node hooks 2024-03-07 18:13:56 +08:00
Joel
9693d014ba feat: add llm debug 2024-03-07 17:48:18 +08:00
StyleZhang
16abcf082c node control 2024-03-07 17:09:04 +08:00
StyleZhang
173336f256 node handle 2024-03-07 15:58:46 +08:00
Joel
f37316f2a0 feat: single run modal 2024-03-07 15:15:39 +08:00
StyleZhang
e044e8efaa chat mode 2024-03-07 14:35:13 +08:00
StyleZhang
af99a55552 chat mode 2024-03-07 14:24:27 +08:00
StyleZhang
8f3d9d0149 panel 2024-03-07 13:54:02 +08:00
StyleZhang
344e30bef4 node 2024-03-07 12:15:51 +08:00
StyleZhang
45ef4059f0 block-icon 2024-03-07 11:48:42 +08:00
StyleZhang
13174aac18 debug and preview 2024-03-07 11:21:59 +08:00
StyleZhang
74f02363f4 record 2024-03-07 10:48:11 +08:00
Joel
10c421a94c Merge branch 'main' into feat/workflow 2024-03-07 10:35:05 +08:00
StyleZhang
3162227b54 features 2024-03-06 19:43:47 +08:00
JzoNg
7e647cc6e7 fix page title update 2024-03-06 19:14:25 +08:00
StyleZhang
ec710d7ffd Merge branch 'main' into feat/workflow 2024-03-06 19:05:21 +08:00
StyleZhang
36718c39dc features 2024-03-06 19:04:06 +08:00
JzoNg
fca9753140 fix app detail update 2024-03-06 18:42:37 +08:00
Joel
0529c3d5d2 feat: add role tooltip and fix add prompt error 2024-03-06 18:36:35 +08:00
Joel
5a27a95f8d feat: llm support type select 2024-03-06 18:11:14 +08:00
Joel
5ec3a967b5 feat: all other code expand 2024-03-06 17:59:58 +08:00
StyleZhang
a45ec15a56 features 2024-03-06 17:48:01 +08:00
StyleZhang
0164dec438 features 2024-03-06 17:46:42 +08:00
JzoNg
4edaa95cbf app menu 2024-03-06 17:39:27 +08:00
JzoNg
067e6b5ae7 app detail redirection 2024-03-06 17:39:27 +08:00
Joel
6adb986167 feat: expend toggle 2024-03-06 16:56:18 +08:00
Joel
cc4ca942c9 feat: prompt editor blur and focus ui 2024-03-06 15:38:45 +08:00
Joel
3202f12cb8 feat: config prompt 2024-03-06 15:17:51 +08:00
StyleZhang
6448d71ca6 draft updated at 2024-03-06 14:35:47 +08:00
StyleZhang
e3a3e07eef tool 2024-03-06 14:04:15 +08:00
Joel
8a906e2959 fix: http nodes update error and support json 2024-03-06 11:34:07 +08:00
Joel
9839b5cb53 fix: enchance code editor syle 2024-03-06 11:23:42 +08:00
StyleZhang
430569d486 app detail 2024-03-05 17:38:32 +08:00
Joel
d3dfadbd9b feat: add code editor 2024-03-05 17:37:20 +08:00
StyleZhang
e474e02a50 sync workflow draft 2024-03-05 17:11:54 +08:00
StyleZhang
54d9cdaabf sync workflow draft 2024-03-05 17:11:54 +08:00
Joel
76fe3c1d76 fix: question classifer can not edit 2024-03-05 16:13:24 +08:00
StyleZhang
261e56e61d single run 2024-03-05 15:57:10 +08:00
StyleZhang
ede0bb5396 control run 2024-03-05 15:28:28 +08:00
JzoNg
186b85cd62 add store of app detail 2024-03-05 15:27:52 +08:00
Joel
eab405af5b chore: node add memo 2024-03-05 15:26:28 +08:00
Joel
93999cec56 chore: panel memo 2024-03-05 14:54:59 +08:00
StyleZhang
acacc0a4cb service 2024-03-05 14:44:13 +08:00
Joel
b2ae7089dc fix: var assigner 2024-03-05 14:40:21 +08:00
Joel
d4ab6b294a fix: llm default 2024-03-05 14:35:12 +08:00
Joel
e6d89f6756 fix: start node add 2024-03-05 14:27:52 +08:00
Joel
7ec29bbee7 feat: node add default value 2024-03-05 14:27:52 +08:00
StyleZhang
f1d44a4c87 zoom in out 2024-03-05 14:15:05 +08:00
StyleZhang
466f16eb1d node name 2024-03-05 13:05:36 +08:00
StyleZhang
04d54c0319 fix 2024-03-05 12:40:56 +08:00
StyleZhang
0367a2148a bg 2024-03-05 12:36:59 +08:00
Joel
57e9e229de temp 2024-03-05 12:35:36 +08:00
StyleZhang
90c8d9d27b service 2024-03-05 11:57:51 +08:00
Joel
a30b6acc52 fix: start node 2024-03-05 11:30:45 +08:00
Joel
0ee7f952ef fix: start node 2024-03-05 11:27:24 +08:00
Joel
86656de971 feat: classify data panel node sync 2024-03-05 11:06:35 +08:00
JzoNg
2e649c3329 add icon of yaml 2024-03-05 09:03:52 +08:00
StyleZhang
e868e44025 help line 2024-03-04 20:35:01 +08:00
Joel
4376813951 feat: get and set data use context 2024-03-04 20:14:18 +08:00
StyleZhang
ccd3e519ea edges change 2024-03-04 19:01:38 +08:00
StyleZhang
c4ca3bd34d rename data 2024-03-04 18:18:47 +08:00
StyleZhang
081baae883 operator 2024-03-04 17:45:41 +08:00
StyleZhang
a3d4befad4 service 2024-03-04 17:45:41 +08:00
Joel
2f13d2775f Merge branch 'main' into feat/workflow 2024-03-04 17:40:52 +08:00
Joel
a36a2a1080 feat: handleupdate logic 2024-03-04 17:04:53 +08:00
Joel
474c7865d7 feat: get and set value from store 2024-03-04 15:51:05 +08:00
StyleZhang
bd205f63cc fix: workflow route 2024-03-04 14:53:48 +08:00
Joel
ac40eb8d87 chore: add missing jp files 2024-03-04 10:44:34 +08:00
Joel
3ea06d286a merge main 2024-03-04 10:41:33 +08:00
JzoNg
8d6984e286 create app by import yaml 2024-03-03 15:37:47 +08:00
JzoNg
cd773b8cc9 app import supported 2024-03-03 14:25:50 +08:00
JzoNg
9bca69ebfb app DSL export supported 2024-03-03 14:10:11 +08:00
JzoNg
b33da6a09c duplicate app supported 2024-03-03 13:41:14 +08:00
JzoNg
7ae23d5567 add redirection 2024-03-03 13:24:58 +08:00
JzoNg
569315ee3e add tip for chatbot orchestrate 2024-03-03 12:39:07 +08:00
JzoNg
4c7941adef app creation 2024-03-02 16:47:43 +08:00
JzoNg
93d116a9d0 add tracing panel 2024-03-02 14:05:05 +08:00
JzoNg
7b2499c292 add meta data of run log 2024-03-02 11:16:23 +08:00
JzoNg
2be2bc5877 add run log status 2024-03-02 09:59:31 +08:00
JzoNg
cfb853efbf log detail panel 2024-03-01 20:32:22 +08:00
JzoNg
2691164fc4 workflow log list 2024-03-01 20:32:22 +08:00
StyleZhang
b113711a86 hooks 2024-03-01 20:22:06 +08:00
StyleZhang
68e9530507 run by single 2024-03-01 19:09:27 +08:00
StyleZhang
0ca23bb840 features 2024-03-01 18:20:49 +08:00
StyleZhang
6e3d6c4269 features 2024-03-01 18:20:49 +08:00
Joel
c2eaa32036 temp 2024-03-01 17:09:53 +08:00
Joel
604930db64 feat: support detect when to show vision config 2024-03-01 16:43:56 +08:00
Joel
c3f99779f2 feat: vision config 2024-03-01 16:30:24 +08:00
Joel
0518da1e49 feat: handle llm memory 2024-03-01 15:07:29 +08:00
Joel
6f6f032244 feat: choose context var 2024-03-01 14:35:37 +08:00
StyleZhang
0acb2db9b6 layout 2024-03-01 14:29:02 +08:00
Joel
74d26764f8 feat: knowledge retrieval dataset setting 2024-03-01 13:56:52 +08:00
Joel
cf77a89123 feat: dasetitem ui 2024-03-01 11:38:24 +08:00
StyleZhang
1b73632f77 utils 2024-03-01 11:27:30 +08:00
Joel
0a7cbf6fde feat: dataset list struct 2024-02-29 20:26:51 +08:00
Joel
e4701e26c8 feat: add datasets 2024-02-29 20:03:26 +08:00
Joel
045156985a fix: not show rerank modal picker 2024-02-29 19:39:27 +08:00
Joel
257e795ca9 feat: retrieval config 2024-02-29 18:24:15 +08:00
StyleZhang
bafdc510d6 record panel 2024-02-29 17:33:49 +08:00
StyleZhang
1840d05a37 record panel 2024-02-29 17:27:08 +08:00
Joel
6d5618447e feat: knowledge retirveval output 2024-02-29 16:39:36 +08:00
Joel
b2de27b7be feat: knowledge query var 2024-02-29 15:48:48 +08:00
Joel
9c0d44fa09 feat: llm node support config memroy 2024-02-29 15:36:25 +08:00
Joel
f95eb2df0d feat: filed fold 2024-02-29 15:26:06 +08:00
Joel
cbb298ccb6 feat: config conversation role name 2024-02-29 15:17:20 +08:00
Joel
9e6940ed3e feat: mermory size config 2024-02-29 14:50:36 +08:00
Joel
fbcc769d4e feat: instructions 2024-02-29 11:50:46 +08:00
Joel
65f0378e43 feat: classlist crud 2024-02-29 11:37:53 +08:00
Joel
f7a90f2660 merge main 2024-02-29 10:59:21 +08:00
StyleZhang
3d825dcb3e add features 2024-02-28 20:58:00 +08:00
StyleZhang
2094a554f6 multiple edge 2024-02-28 20:58:00 +08:00
Joel
4837ae4958 feat: question add class 2024-02-28 20:49:12 +08:00
Joel
6da9950b72 feat: workflow to auth 2024-02-28 20:19:22 +08:00
StyleZhang
e8921787b3 hooks 2024-02-28 20:11:24 +08:00
StyleZhang
d2d6904c9b panel-operator 2024-02-28 20:11:23 +08:00
Joel
510f0593e9 chore: question classify 2024-02-28 19:54:20 +08:00
Joel
7a438f8999 chore: assign var 2024-02-28 19:49:01 +08:00
Joel
113af85c3c chore: add requierd 2024-02-28 18:32:36 +08:00
Joel
916bacb60e chore: remove auto show modal 2024-02-28 18:17:39 +08:00
Joel
a98b5ca97e chore: auth i18n 2024-02-28 18:11:09 +08:00
Joel
076fe8ca3a feat: auth struct 2024-02-28 17:37:31 +08:00
Joel
b08327cb4b feat: edit body 2024-02-28 16:25:09 +08:00
StyleZhang
f1b868d5d9 next step 2024-02-28 16:19:57 +08:00
Joel
76ff004ea5 feat: bulk edit 2024-02-28 15:01:58 +08:00
Joel
df173764d2 chore: replace remove btn 2024-02-28 13:54:11 +08:00
Joel
7fa25934af feat: key value input 2024-02-28 11:30:28 +08:00
Joel
649c3d0732 feat: key value struct 2024-02-27 18:48:01 +08:00
Joel
35c56237a0 feat: url selector 2024-02-27 18:34:54 +08:00
StyleZhang
236cc6f526 hooks 2024-02-27 18:20:32 +08:00
StyleZhang
a311f88c99 compute node position 2024-02-27 18:02:49 +08:00
Joel
e92bc25216 fix: if node condition operation i18n 2024-02-27 17:10:52 +08:00
Joel
0c06d84e22 chore: add spacing and hover 2024-02-27 16:22:47 +08:00
Joel
77c8261fca chore: if not align 2024-02-27 16:19:29 +08:00
Joel
91a2e71fff feat: if comparasion 2024-02-27 15:52:07 +08:00
Joel
0fb47fed9e feat: add adn update condition 2024-02-27 15:20:04 +08:00
Joel
4519c6ab29 feat: conditions struct 2024-02-27 11:50:56 +08:00
Joel
32c6431dbc feat: assign node no var list tip 2024-02-27 10:58:46 +08:00
Joel
925964ac28 feat: add default and utils 2024-02-27 10:43:11 +08:00
Joel
4d4d3bb965 feat: add default values and utils and fix ts 2024-02-27 10:34:13 +08:00
StyleZhang
3d526b3a87 handleAddNextNode 2024-02-26 19:03:04 +08:00
StyleZhang
f91582e060 publish button 2024-02-26 18:43:33 +08:00
Joel
dec60fdd4c feat: fin var assigner 2024-02-26 18:19:01 +08:00
Joel
31930159b8 feat: var assigner data logic 2024-02-26 18:19:01 +08:00
StyleZhang
6e2611c86c node title desc 2024-02-26 17:10:18 +08:00
StyleZhang
7574107d8c add run-history 2024-02-26 15:38:49 +08:00
StyleZhang
58d8b0dd01 node handle connection line 2024-02-26 14:33:31 +08:00
JzoNg
49f78bacef update icons of app menu 2024-02-25 14:06:07 +08:00
JzoNg
3b190467c1 update i18n and style of creatioin from app template 2024-02-24 14:58:00 +08:00
JzoNg
804a090457 app templates 2024-02-24 14:21:42 +08:00
JzoNg
14cfb310e3 app creation 2024-02-24 12:45:58 +08:00
JzoNg
f607a334ac create from DSL 2024-02-24 12:45:58 +08:00
JzoNg
117b84116e app list modification 2024-02-24 12:45:58 +08:00
StyleZhang
171dd5c737 node-handle 2024-02-23 19:26:56 +08:00
Joel
b5ed4af25a feat: var assigner node 2024-02-23 18:26:42 +08:00
StyleZhang
b6c683a1b8 next step 2024-02-23 17:20:06 +08:00
Joel
5200ec0b9a feat: end node panle 2024-02-23 17:01:48 +08:00
Joel
307cbf1d9f feat: input no var tip 2024-02-23 16:15:45 +08:00
StyleZhang
e7ecdb01a6 block-selector 2024-02-23 15:20:04 +08:00
Joel
383bfd7583 feat: merge i18n 2024-02-23 15:18:48 +08:00
Joel
508ea8bc0a feat: add number type var 2024-02-23 15:02:00 +08:00
Joel
077de17cd5 feat: support config modal edit 2024-02-23 14:25:08 +08:00
StyleZhang
f6c07c996b workflow store 2024-02-23 14:16:19 +08:00
Joel
7ba0bfffa2 fix: debug set var value error 2024-02-23 11:58:28 +08:00
StyleZhang
9b577fa32c chore 2024-02-23 11:31:46 +08:00
StyleZhang
94cda3e837 chore 2024-02-22 20:10:58 +08:00
Joel
f09f91e25a hide debug 2024-02-22 18:51:25 +08:00
Joel
235bec6481 feat: new var input editor 2024-02-22 18:49:32 +08:00
StyleZhang
ee616ee6dd header 2024-02-22 16:35:54 +08:00
Joel
5153068a64 feat: start var list 2024-02-22 16:17:38 +08:00
StyleZhang
701e441349 panel 2024-02-22 15:37:33 +08:00
Joel
5817a035f9 feat: start build in vars show 2024-02-22 15:06:27 +08:00
StyleZhang
ea76f46223 block-selector 2024-02-22 15:02:02 +08:00
Joel
0759b29ca2 feat: retrial node 2024-02-22 14:40:16 +08:00
Joel
db6074e035 feat: tool node 2024-02-22 14:06:34 +08:00
Joel
6057ba0988 feat: var assigner node struct 2024-02-22 11:42:20 +08:00
Joel
2fdcf1756e feat: end node 2024-02-22 11:04:14 +08:00
StyleZhang
f489736e06 add debug-and-preview 2024-02-21 20:23:03 +08:00
StyleZhang
15f13209cf node handle 2024-02-21 19:02:58 +08:00
Joel
dbf3b7ad6d feat: end node typs and mock 2024-02-21 18:38:52 +08:00
Joel
3341077587 feat: start node 2024-02-21 18:31:45 +08:00
Joel
e39d7021e0 feat: if else node 2024-02-21 17:45:52 +08:00
Joel
cffaf30760 feat: if types 2024-02-21 16:34:38 +08:00
Joel
bc60cf0a35 feat: http node content 2024-02-21 16:00:14 +08:00
Joel
9bb9807252 feat: http node struct 2024-02-21 15:50:36 +08:00
Joel
8b8fdb48bb feat: output var 2024-02-21 14:56:10 +08:00
Joel
b4437ccd2b chore: output lines 2024-02-21 14:11:51 +08:00
Joel
65ac4dedcc feat: template transform code tooltip 2024-02-21 13:55:17 +08:00
StyleZhang
671654da71 add node 2024-02-21 12:28:10 +08:00
Joel
31490417d1 Merge branch 'main' into feat/workflow 2024-02-21 12:05:22 +08:00
Joel
17e8c91267 feat: template transform panel content 2024-02-21 11:48:34 +08:00
Joel
db7dccf349 feat: type selector 2024-02-21 11:33:25 +08:00
Joel
71d3f71e22 feat: code editor base 2024-02-21 11:04:37 +08:00
StyleZhang
13a54c3f56 block-selector edit 2024-02-20 20:12:41 +08:00
Joel
d58a1b1359 feat: code support vars 2024-02-20 18:42:21 +08:00
Joel
bb87a350ac feat: question classify panel 2024-02-20 17:39:09 +08:00
Joel
c441a848e7 feat: question classify node 2024-02-20 17:29:06 +08:00
StyleZhang
f14a5c7346 add node-control 2024-02-20 16:58:29 +08:00
Joel
92219b5aad feat: prompt ide and fin direct ansewer node 2024-02-20 16:52:22 +08:00
Joel
291201db1c chore: llm use config 2024-02-20 15:34:44 +08:00
Joel
c8ea6d7bfb feat: direct answer node 2024-02-20 15:21:35 +08:00
Joel
9c70befaf6 chore: move node type to self struct 2024-02-20 14:59:03 +08:00
Joel
fcadb807f6 feat: llm node content 2024-02-20 14:51:19 +08:00
Joel
4364775dcb feat: output vars 2024-02-20 14:27:40 +08:00
Joel
2a196e91a6 feat: default set var name 2024-02-20 14:04:24 +08:00
Joel
7a0358827a feat: finish choose var 2024-02-20 14:01:20 +08:00
Joel
62e2deafca feat: infinite choose var 2024-02-20 11:23:23 +08:00
StyleZhang
25b4e68fbb delete 2024-02-19 19:58:49 +08:00
StyleZhang
c909319413 base-node base-panel 2024-02-19 19:44:48 +08:00
StyleZhang
c7ee8ac1c7 add app-info-panel 2024-02-19 19:06:42 +08:00
Joel
2386eed703 chore: new block enum 2024-02-19 18:46:21 +08:00
Joel
ada558bedc feat: add picker shower 2024-02-19 18:33:12 +08:00
StyleZhang
6caca3aaf7 base panel 2024-02-19 17:58:54 +08:00
StyleZhang
3d3bc4c512 initial node data 2024-02-19 17:45:36 +08:00
Joel
044ed624eb feat: var picker trigger 2024-02-19 16:36:53 +08:00
Joel
4dff0c5dff feat: to not ignore var 2024-02-19 16:01:12 +08:00
StyleZhang
59d8f926c8 block-selector 2024-02-19 15:54:41 +08:00
Joel
c6f1900a93 chore: merge main 2024-02-19 15:52:51 +08:00
Joel
d94a9cd864 feat: add node icons 2024-02-19 15:44:52 +08:00
Joel
21db8e3be4 feat: add var struct 2024-02-19 11:26:25 +08:00
Joel
e05bbec879 chore: model and params select 2024-02-19 10:35:11 +08:00
StyleZhang
240e0dfa6f next-step 2024-02-18 20:26:16 +08:00
Joel
ab6a01b476 chore: handle llm model type 2024-02-18 18:31:04 +08:00
StyleZhang
dce01cf002 header 2024-02-18 17:56:00 +08:00
StyleZhang
da84ba06c7 add block-icon 2024-02-18 17:36:34 +08:00
Joel
45ba3ca07b feat: add model selector 2024-02-18 17:26:52 +08:00
StyleZhang
56407a910d add block-selector 2024-02-18 16:14:21 +08:00
StyleZhang
e624c33d51 node props 2024-02-18 16:14:21 +08:00
Joel
3666462076 feat: llm input struct 2024-02-18 14:33:44 +08:00
Joel
da0d9aab39 chore: remove node code to panel 2024-02-18 14:08:08 +08:00
Joel
ace04b3ef4 feat: filed and var 2024-02-18 14:01:22 +08:00
Joel
1a4c2e77c4 feat: nodes placeholder 2024-02-06 17:49:07 +08:00
StyleZhang
f3c78fe73d init 2024-02-06 17:17:29 +08:00
StyleZhang
a17c0e5bf6 init 2024-02-06 17:05:26 +08:00
StyleZhang
20d5fdea2c init 2024-02-06 12:41:34 +08:00
8019 changed files with 284210 additions and 816011 deletions

View File

@@ -1,19 +0,0 @@
{
"permissions": {
"allow": [],
"deny": []
},
"env": {
"__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.",
"GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
},
"enabledMcpjsonServers": [
"context7",
"sequential-thinking",
"github",
"fetch",
"playwright",
"ide"
],
"enableAllProjectMcpServers": true
}

View File

@@ -1,4 +1,8 @@
FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm
FROM mcr.microsoft.com/devcontainers/python:3.10
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev
COPY . .
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>

View File

@@ -1,26 +1,23 @@
# Development with devcontainer
# Devlopment with devcontainer
This project includes a devcontainer configuration that allows you to open the project in a container with a fully configured development environment.
Both frontend and backend environments are initialized when the container is started.
## GitHub Codespaces
[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/langgenius/dify)
you can simply click the button above to open this project in GitHub Codespaces.
For more info, check out the [GitHub documentation](https://docs.github.com/en/free-pro-team@latest/github/developing-online-with-codespaces/creating-a-codespace#creating-a-codespace).
## VS Code Dev Containers
## VS Code Dev Containers
[![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/langgenius/dify)
if you have VS Code installed, you can click the button above to open this project in VS Code Dev Containers.
You can learn more in the [Dev Containers documentation](https://code.visualstudio.com/docs/devcontainers/containers).
## Pros of Devcontainer
## Pros of Devcontainer
Unified Development Environment: By using devcontainers, you can ensure that all developers are developing in the same environment, reducing the occurrence of "it works on my machine" type of issues.
Quick Start: New developers can set up their development environment in a few simple steps, without spending a lot of time on environment configuration.
@@ -28,15 +25,13 @@ Quick Start: New developers can set up their development environment in a few si
Isolation: Devcontainers isolate your project from your host operating system, reducing the chance of OS updates or other application installations impacting the development environment.
## Cons of Devcontainer
Learning Curve: For developers unfamiliar with Docker and VS Code, using devcontainers may be somewhat complex.
Performance Impact: While usually minimal, programs running inside a devcontainer may be slightly slower than those running directly on the host.
## Troubleshooting
if you see such error message when you open this project in codespaces:
![Alt text](troubleshooting.png)
a simple workaround is change `/signin` endpoint into another one, then login with GitHub account and close the tab, then change it back to `/signin` endpoint. Then all things will be fine.
The reason is `signin` endpoint is not allowed in codespaces, details can be found [here](https://github.com/orgs/community/discussions/5204)
a simple workaround is change `/signin` endpoint into another one, then login with github account and close the tab, then change it back to `/signin` endpoint. Then all things will be fine.
The reason is `signin` endpoint is not allowed in codespaces, details can be found [here](https://github.com/orgs/community/discussions/5204)

View File

@@ -1,8 +1,8 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/anaconda
{
"name": "Python 3.12",
"build": {
"name": "Python 3.10",
"build": {
"context": "..",
"dockerfile": "Dockerfile"
},
@@ -11,7 +11,7 @@
"nodeGypDependencies": true,
"version": "lts"
},
"ghcr.io/devcontainers-extra/features/npm-package:1": {
"ghcr.io/devcontainers-contrib/features/npm-package:1": {
"package": "typescript",
"version": "latest"
},
@@ -32,8 +32,8 @@
]
}
},
"postStartCommand": "./.devcontainer/post_start_command.sh",
"postCreateCommand": "./.devcontainer/post_create_command.sh"
"postStartCommand": "cd api && pip install -r requirements.txt",
"postCreateCommand": "cd web && npm install"
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},

View File

@@ -1,3 +1,3 @@
This file copied into the container along with environment.yml* from the parent
folder. This file is included to prevents the Dockerfile COPY instruction from
failing if no environment.yml is found.
folder. This file is included to prevents the Dockerfile COPY instruction from
failing if no environment.yml is found.

View File

@@ -1,16 +0,0 @@
#!/bin/bash
WORKSPACE_ROOT=$(pwd)
corepack enable
cd web && pnpm install
pipx install uv
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage\"" >> ~/.bashrc
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
echo "alias stop-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down\"" >> ~/.bashrc
source /home/vscode/.bashrc

View File

@@ -1,3 +0,0 @@
#!/bin/bash
cd api && uv sync

View File

@@ -1,39 +0,0 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[*.py]
indent_size = 4
indent_style = space
[*.{yml,yaml}]
indent_style = space
indent_size = 2
[*.toml]
indent_size = 4
indent_style = space
# Markdown and MDX are whitespace sensitive languages.
# Do not remove trailing spaces.
[*.{md,mdx}]
trim_trailing_whitespace = false
# Matches multiple files with brace expansion notation
# Set default charset
[*.{js,tsx}]
indent_style = space
indent_size = 2
# Matches the exact files package.json
[package.json]
indent_style = space
indent_size = 2

7
.gitattributes vendored
View File

@@ -1,7 +0,0 @@
# Ensure that .sh scripts use LF as line separator, even if they are checked out
# to Windows(NTFS) file-system, by a user of Docker for Windows.
# These .sh scripts will be run from the Container after `docker compose up -d`.
# If they appear to be CRLF style, Dash from the Container will fail to execute
# them.
*.sh text eol=lf

View File

@@ -17,25 +17,27 @@ diverse, inclusive, and healthy community.
Examples of behavior that contributes to a positive environment for our
community include:
- Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes,
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
- Focusing on what is best not just for us as individuals, but for the
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
- The use of sexualized language or imagery, and sexual attention or
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
- Other conduct which could reasonably be considered inappropriate in a
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Language Policy
To facilitate clear and effective communication, all discussions, comments, documentation, and pull requests in this project should be conducted in English. This ensures that all contributors can participate and collaborate effectively.

View File

@@ -1,24 +0,0 @@
title: "General Discussion"
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:)"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: Content
placeholder: Please describe the content you would like to discuss.
validations:
required: true
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -1,30 +0,0 @@
title: "Help"
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:)"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -1,37 +0,0 @@
title: Suggestions for New Features
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:)"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: checkboxes
attributes:
label: 3. Can you help us with this feature?
description: Let us know! This is not a commitment, but a starting point for collaboration.
options:
- label: I am interested in contributing to this feature.
required: false
- type: markdown
attributes:
value: Please limit one request per issue.

View File

@@ -8,22 +8,17 @@ body:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
required: true
- label: This is only for bug report, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
required: true
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: 【中文用户 & Non English User】请使用英语提交否则会被关闭
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
- label: "Pleas do not modify this template :) and fill in all the required fields."
required: true
- type: input
attributes:
label: Dify version
placeholder: 0.3.21
description: See about section in Dify console
validations:
required: true
@@ -43,23 +38,21 @@ body:
- type: textarea
attributes:
label: Steps to reproduce
description: We highly suggest including screenshots and a bug report log. Please use the right markdown syntax for code blocks.
placeholder: Having detailed steps helps us reproduce the bug. If you have logs, please use fenced code blocks (triple backticks ```) to format them.
description: We highly suggest including screenshots and a bug report log.
placeholder: Having detailed steps helps us reproduce the bug.
validations:
required: true
- type: textarea
attributes:
label: ✔️ Expected Behavior
description: Describe what you expected to happen.
placeholder: What were you expecting? Please do not copy and paste the steps to reproduce here.
placeholder: What were you expecting?
validations:
required: true
required: false
- type: textarea
attributes:
label: ❌ Actual Behavior
description: Describe what actually happened.
placeholder: What happened instead? Please do not copy and paste the steps to reproduce here.
placeholder: What happened instead?
validations:
required: false

View File

@@ -1,14 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: "\U0001F510 Security Vulnerabilities"
url: "https://github.com/langgenius/dify/security/advisories/new"
about: Report security vulnerabilities through GitHub Security Advisories to ensure responsible disclosure. 💡 Please do not report security vulnerabilities in public issues.
- name: "\U0001F4A1 Model Providers & Plugins"
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.
- name: "\U0001F4AC Documentation Issues"
url: "https://github.com/langgenius/dify-docs/issues/new"
about: Report issues with the documentation, such as typos, outdated information, or missing content. Please provide the specific section and details of the issue.
- name: "\U0001F4E7 Discussions"
url: https://github.com/langgenius/dify/discussions/categories/general
about: General discussions and seek help from the community
- name: "\U0001F4DA Dify user documentation"
url: https://docs.dify.ai/getting-started/readme
about: Documentation for users of Dify
- name: "\U0001F4DA Dify dev documentation"
url: https://docs.dify.ai/getting-started/install-self-hosted
about: Documentation for people interested in developing and contributing for Dify

View File

@@ -0,0 +1,22 @@
name: "📚 Documentation Issue"
description: Report issues in our documentation
labels:
- ducumentation
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "Pleas do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: Provide a description of requested docs changes
placeholder: Briefly describe which document needs to be corrected and why.
validations:
required: true

View File

@@ -8,29 +8,39 @@ body:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
required: true
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
- label: "Pleas do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: 1. Is this request related to a challenge you're experiencing? Tell me about your story.
label: 1. Is this request related to a challenge you're experiencing?
placeholder: Please describe the specific scenario or problem you're facing as clearly as possible. For instance "I was trying to use [feature] for [specific task], and [what happened]... It was frustrating because...."
validations:
required: true
- type: textarea
attributes:
label: 2. Additional context or comments
label: 2. Describe the feature you'd like to see
placeholder: Think about what you want to achieve and how this feature will help you. Sketches, flow diagrams, or any visual representation will be a major plus.
validations:
required: true
- type: textarea
attributes:
label: 3. How will this feature improve your workflow or experience?
placeholder: Tell us how this change will benefit your work. This helps us prioritize based on user impact.
validations:
required: true
- type: textarea
attributes:
label: 4. Additional context or comments
placeholder: (Any other information, comments, documentations, links, or screenshots that would provide more clarity. This is the place to add anything else not covered above.)
validations:
required: false
- type: checkboxes
attributes:
label: 3. Can you help us with this feature?
label: 5. Can you help us with this feature?
description: Let us know! This is not a commitment, but a starting point for collaboration.
options:
- label: I am interested in contributing to this feature.

22
.github/ISSUE_TEMPLATE/help_wanted.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: "🤝 Help Wanted"
description: "Request help from the community [please use English :]"
labels:
- help-wanted
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "Pleas do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: Provide a description of the help you need
placeholder: Briefly describe what you need help with.
validations:
required: true

View File

@@ -1,44 +0,0 @@
name: "✨ Refactor"
description: Refactor existing code for improved readability and maintainability.
title: "[Chore/Refactor] "
labels:
- refactor
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
required: true
- label: This is only for refactoring, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
required: true
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
required: true
- label: 【中文用户 & Non English User】请使用英语提交否则会被关闭
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
id: description
attributes:
label: Description
placeholder: "Describe the refactor you are proposing."
validations:
required: true
- type: textarea
id: motivation
attributes:
label: Motivation
placeholder: "Explain why this refactor is necessary."
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: Additional Context
placeholder: "Add any other context or screenshots about the request here."
validations:
required: false

View File

@@ -1,13 +0,0 @@
name: "👾 Tracker"
description: For inner usages, please do not use this template.
title: "[Tracker] "
labels:
- tracker
body:
- type: textarea
id: content
attributes:
label: Blockers
placeholder: "- [ ] ..."
validations:
required: true

View File

@@ -0,0 +1,54 @@
name: "🌐 Localization/Translation issue"
description: Report incorrect translations. [please use English :]
labels:
- translation
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "Pleas do not modify this template :) and fill in all the required fields."
required: true
- type: input
attributes:
label: Dify version
placeholder: 0.3.21
description: Hover over system tray icon or look at Settings
validations:
required: true
- type: input
attributes:
label: Utility with translation issue
placeholder: Some area
description: Please input here the utility with the translation issue
validations:
required: true
- type: input
attributes:
label: 🌐 Language affected
placeholder: "German"
validations:
required: true
- type: textarea
attributes:
label: ❌ Actual phrase(s)
placeholder: What is there? Please include a screenshot as that is extremely helpful.
validations:
required: true
- type: textarea
attributes:
label: ✔️ Expected phrase(s)
placeholder: What was expected?
validations:
required: true
- type: textarea
attributes:
label: Why is the current translation wrong
placeholder: Why do you feel this is incorrect?
validations:
required: true

View File

@@ -1,12 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/web"
schedule:
interval: "weekly"
open-pull-requests-limit: 2
- package-ecosystem: "uv"
directory: "/api"
schedule:
interval: "weekly"
open-pull-requests-limit: 2

View File

@@ -1,22 +0,0 @@
{
"Verbose": false,
"Debug": false,
"IgnoreDefaults": false,
"SpacesAfterTabs": false,
"NoColor": false,
"Exclude": [
"^web/public/vs/",
"^web/public/pdf.worker.min.mjs$",
"web/app/components/base/icons/src/vender/"
],
"AllowedContentTypes": [],
"PassedFiles": [],
"Disable": {
"EndOfLine": false,
"Indentation": false,
"IndentSize": true,
"InsertFinalNewline": false,
"TrimTrailingWhitespace": false,
"MaxLineLength": false
}
}

View File

@@ -1,23 +1,32 @@
> [!IMPORTANT]
>
> 1. Make sure you have read our [contribution guidelines](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)
> 1. Ensure there is an associated issue and you have been assigned to it
> 1. Use the correct syntax to link this PR: `Fixes #<issue number>`.
# Description
## Summary
Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
<!-- Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. -->
Fixes # (issue)
## Screenshots
## Type of Change
| Before | After |
|--------|-------|
| ... | ... |
## Checklist
Please delete options that are not relevant.
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] This change requires a documentation update, included: [Dify Document](https://github.com/langgenius/dify-docs)
- [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
- [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
- [x] I've updated the documentation accordingly.
- [x] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
- [ ] Improvement, including but not limited to code refactoring, performance optimization, and UI/UX improvement
- [ ] Dependency upgrade
# How Has This Been Tested?
Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
- [ ] TODO
# Suggested Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] My changes generate no new warnings
- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
- [ ] `optional` I have made corresponding changes to the documentation
- [ ] `optional` I have added tests that prove my fix is effective or that my feature works
- [ ] `optional` New and existing unit tests pass locally with my changes

View File

@@ -0,0 +1,45 @@
name: Run Pytest
on:
pull_request:
branches:
- main
jobs:
test:
runs-on: ubuntu-latest
env:
OPENAI_API_KEY: sk-IamNotARealKeyJustForMockTestKawaiiiiiiiiii
AZURE_OPENAI_API_BASE: https://difyai-openai.openai.azure.com
AZURE_OPENAI_API_KEY: xxxxb1707exxxxxxxxxxaaxxxxxf94
ANTHROPIC_API_KEY: sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz
CHATGLM_API_BASE: http://a.abc.com:11451
XINFERENCE_SERVER_URL: http://a.abc.com:11451
XINFERENCE_GENERATION_MODEL_UID: generate
XINFERENCE_CHAT_MODEL_UID: chat
XINFERENCE_EMBEDDINGS_MODEL_UID: embedding
XINFERENCE_RERANK_MODEL_UID: rerank
GOOGLE_API_KEY: abcdefghijklmnopqrstuvwxyz
HUGGINGFACE_API_KEY: hf-awuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwu
HUGGINGFACE_TEXT_GEN_ENDPOINT_URL: a
HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL: b
HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL: c
MOCK_SWITCH: true
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: 'pip'
cache-dependency-path: ./api/requirements.txt
- name: Install dependencies
run: pip install -r ./api/requirements.txt
- name: Run pytest
run: pytest api/tests/integration_tests/model_runtime/anthropic api/tests/integration_tests/model_runtime/azure_openai api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm api/tests/integration_tests/model_runtime/google api/tests/integration_tests/model_runtime/xinference api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py

View File

@@ -1,97 +0,0 @@
name: Run Pytest
on:
workflow_call:
concurrency:
group: api-tests-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
test:
name: API Tests
runs-on: ubuntu-latest
defaults:
run:
shell: bash
strategy:
matrix:
python-version:
- "3.11"
- "3.12"
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: api/uv.lock
- name: Check UV lockfile
run: uv lock --project api --check
- name: Install dependencies
run: uv sync --project api --dev
- name: Run pyrefly check
run: |
cd api
uv add --dev pyrefly
uv run pyrefly check || true
- name: Run dify config tests
run: uv run --project api dev/pytest/pytest_config_tests.py
- name: Set up dotenvs
run: |
cp docker/.env.example docker/.env
cp docker/middleware.env.example docker/middleware.env
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Sandbox
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
redis
sandbox
ssrf_proxy
- name: setup test config
run: |
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
- name: Run Workflow
run: uv run --project api bash dev/pytest/pytest_workflow.sh
- name: Run Tool
run: uv run --project api bash dev/pytest/pytest_tools.sh
- name: Run TestContainers
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh
- name: Run Unit tests
run: |
uv run --project api bash dev/pytest/pytest_unit_tests.sh
- name: Coverage Summary
run: |
set -x
# Extract coverage percentage and create a summary
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
# Create a detailed coverage summary
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY

26
.github/workflows/api-tools-tests.yaml vendored Normal file
View File

@@ -0,0 +1,26 @@
name: Run Tool Pytest
on:
pull_request:
branches:
- main
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: 'pip'
cache-dependency-path: ./api/requirements.txt
- name: Install dependencies
run: pip install -r ./api/requirements.txt
- name: Run pytest
run: pytest ./api/tests/integration_tests/tools/test_all_provider.py

View File

@@ -0,0 +1,31 @@
name: Run Pytest
on:
pull_request:
branches:
- main
- deploy/dev
jobs:
test:
runs-on: ubuntu-latest
env:
MOCK_SWITCH: true
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
cache: 'pip'
cache-dependency-path: ./api/requirements.txt
- name: Install dependencies
run: pip install -r ./api/requirements.txt
- name: Run pytest
run: pytest api/tests/integration_tests/workflow

View File

@@ -1,85 +0,0 @@
name: autofix.ci
on:
pull_request:
branches: ["main"]
push:
branches: ["main"]
permissions:
contents: read
jobs:
autofix:
if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# Use uv to ensure we have the same ruff version in CI and locally.
- uses: astral-sh/setup-uv@v6
with:
python-version: "3.11"
- run: |
cd api
uv sync --dev
# fmt first to avoid line too long
uv run ruff format ..
# Fix lint errors
uv run ruff check --fix .
# Format code
uv run ruff format ..
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg -p '$A = db.Column($$$B)' -r '$A = mapped_column($$$B)' -l py --update-all
uvx --from ast-grep-cli sg -p '$A : $T = db.Column($$$B)' -r '$A : $T = mapped_column($$$B)' -l py --update-all
# Convert Optional[T] to T | None (ignoring quoted types)
cat > /tmp/optional-rule.yml << 'EOF'
id: convert-optional-to-union
language: python
rule:
kind: generic_type
all:
- has:
kind: identifier
pattern: Optional
- has:
kind: type_parameter
has:
kind: type
pattern: $T
fix: $T | None
EOF
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: mdformat
run: |
uvx mdformat .
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Web dependencies
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: oxlint
working-directory: ./web
run: |
pnpx oxlint --fix
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@@ -4,16 +4,10 @@ on:
push:
branches:
- "main"
- "deploy/**"
- "build/**"
- "release/e-*"
- "hotfix/**"
tags:
- "*"
concurrency:
group: build-push-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
- "deploy/dev"
- "feat/workflow"
release:
types: [published]
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
@@ -22,130 +16,50 @@ env:
DIFY_API_IMAGE_NAME: ${{ vars.DIFY_API_IMAGE_NAME || 'langgenius/dify-api' }}
jobs:
build:
runs-on: ${{ matrix.platform == 'linux/arm64' && 'arm64_runner' || 'ubuntu-latest' }}
if: github.repository == 'langgenius/dify'
build-and-push:
runs-on: ubuntu-latest
if: github.event.pull_request.draft == false
strategy:
matrix:
include:
- service_name: "build-api-amd64"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
platform: linux/amd64
- service_name: "build-api-arm64"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
platform: linux/arm64
- service_name: "build-web-amd64"
- service_name: "web"
image_name_env: "DIFY_WEB_IMAGE_NAME"
context: "web"
platform: linux/amd64
- service_name: "build-web-arm64"
image_name_env: "DIFY_WEB_IMAGE_NAME"
context: "web"
platform: linux/arm64
- service_name: "api"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env[matrix.image_name_env] }}
- name: Build Docker image
id: build
uses: docker/build-push-action@v6
with:
context: "{{defaultContext}}:${{ matrix.context }}"
platforms: ${{ matrix.platform }}
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env[matrix.image_name_env] }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=gha,scope=${{ matrix.service_name }}
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
- name: Export digest
env:
DIGEST: ${{ steps.build.outputs.digest }}
run: |
mkdir -p /tmp/digests
sanitized_digest=${DIGEST#sha256:}
touch "/tmp/digests/${sanitized_digest}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.context }}-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
create-manifest:
needs: build
runs-on: ubuntu-latest
if: github.repository == 'langgenius/dify'
strategy:
matrix:
include:
- service_name: "merge-api-images"
image_name_env: "DIFY_API_IMAGE_NAME"
context: "api"
- service_name: "merge-web-images"
image_name_env: "DIFY_WEB_IMAGE_NAME"
context: "web"
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-${{ matrix.context }}-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Extract metadata for Docker
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env[matrix.image_name_env] }}
tags: |
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') && !contains(github.ref, '-') }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' && startsWith(github.ref, 'refs/tags/') }}
type=ref,event=branch
type=sha,enable=true,priority=100,prefix=,suffix=,format=long
type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
- name: Create manifest list and push
working-directory: /tmp/digests
env:
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf "$IMAGE_NAME@sha256:%s " *)
- name: Inspect image
env:
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
IMAGE_VERSION: ${{ steps.meta.outputs.version }}
run: |
docker buildx imagetools inspect "$IMAGE_NAME:$IMAGE_VERSION"
- name: Build and push
uses: docker/build-push-action@v5
with:
context: "{{defaultContext}}:${{ matrix.context }}"
platforms: ${{ startsWith(github.ref, 'refs/tags/') && 'linux/amd64,linux/arm64' || 'linux/amd64' }}
build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,59 +0,0 @@
name: DB Migration Test
on:
workflow_call:
concurrency:
group: db-migration-test-${{ github.ref }}
cancel-in-progress: true
jobs:
db-migration-test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
python-version: "3.12"
cache-dependency-glob: api/uv.lock
- name: Install dependencies
run: uv sync --project api
- name: Ensure Offline migration are supported
run: |
# upgrade
uv run --directory api flask db upgrade 'base:head' --sql
# downgrade
uv run --directory api flask db downgrade 'head:base' --sql
- name: Prepare middleware env
run: |
cd docker
cp middleware.env.example middleware.env
- name: Set up Middlewares
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
redis
- name: Prepare configs
run: |
cd api
cp .env.example .env
- name: Run DB Migration
env:
DEBUG: true
run: uv run --directory api flask upgrade-db

View File

@@ -12,8 +12,7 @@ jobs:
deploy:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/dev'
github.event.workflow_run.conclusion == 'success'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8

View File

@@ -1,41 +0,0 @@
name: Deploy Enterprise
permissions:
contents: read
on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "deploy/enterprise"
types:
- completed
jobs:
deploy:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/enterprise'
steps:
- name: trigger deployments
env:
DEV_ENV_ADDRS: ${{ vars.DEV_ENV_ADDRS }}
DEPLOY_SECRET: ${{ secrets.DEPLOY_SECRET }}
run: |
IFS=',' read -ra ENDPOINTS <<< "${DEV_ENV_ADDRS:-}"
BODY='{"project":"dify-api","tag":"deploy-enterprise"}'
for ENDPOINT in "${ENDPOINTS[@]}"; do
ENDPOINT="$(echo "$ENDPOINT" | xargs)"
[ -z "$ENDPOINT" ] && continue
API_SIGNATURE=$(printf '%s' "$BODY" | openssl dgst -sha256 -hmac "$DEPLOY_SECRET" | awk '{print "sha256="$2}')
curl -sSf -X POST \
-H "Content-Type: application/json" \
-H "X-Hub-Signature-256: $API_SIGNATURE" \
-d "$BODY" \
"$ENDPOINT"
done

View File

@@ -1,28 +0,0 @@
name: Deploy Trigger Dev
permissions:
contents: read
on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "deploy/trigger-dev"
types:
- completed
jobs:
deploy:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'deploy/trigger-dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.TRIGGER_SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |
${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }}

View File

@@ -1,48 +0,0 @@
name: Build docker image
on:
pull_request:
branches:
- "main"
paths:
- api/Dockerfile
- web/Dockerfile
concurrency:
group: docker-build-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
build-docker:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- service_name: "api-amd64"
platform: linux/amd64
context: "api"
- service_name: "api-arm64"
platform: linux/arm64
context: "api"
- service_name: "web-amd64"
platform: linux/amd64
context: "web"
- service_name: "web-arm64"
platform: linux/arm64
context: "web"
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build Docker Image
uses: docker/build-push-action@v6
with:
push: false
context: "{{defaultContext}}:${{ matrix.context }}"
file: "${{ matrix.file }}"
platforms: ${{ matrix.platform }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,17 +0,0 @@
#!/bin/bash
yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml
yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml
yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml
yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml
yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml
yq eval '.services.pgvector.ports += ["5433:5432"]' -i docker/docker-compose.yaml
yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compose.yaml
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
yq eval '.services.oceanbase.ports += ["2881:2881"]' -i docker/docker-compose.yaml
yq eval '.services.opengauss.ports += ["6600:6600"]' -i docker/docker-compose.yaml
echo "Ports exposed for sandbox, weaviate (HTTP 8080, gRPC 50051), tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss"

View File

@@ -1,78 +0,0 @@
name: Main CI Pipeline
on:
pull_request:
branches: ["main"]
push:
branches: ["main"]
permissions:
contents: write
pull-requests: write
checks: write
statuses: write
concurrency:
group: main-ci-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
# Check which paths were changed to determine which tests to run
check-changes:
name: Check Changed Files
runs-on: ubuntu-latest
outputs:
api-changed: ${{ steps.changes.outputs.api }}
web-changed: ${{ steps.changes.outputs.web }}
vdb-changed: ${{ steps.changes.outputs.vdb }}
migration-changed: ${{ steps.changes.outputs.migration }}
steps:
- uses: actions/checkout@v4
- uses: dorny/paths-filter@v3
id: changes
with:
filters: |
api:
- 'api/**'
- 'docker/**'
- '.github/workflows/api-tests.yml'
web:
- 'web/**'
vdb:
- 'api/core/rag/datasource/**'
- 'docker/**'
- '.github/workflows/vdb-tests.yml'
- 'api/uv.lock'
- 'api/pyproject.toml'
migration:
- 'api/migrations/**'
- '.github/workflows/db-migration-test.yml'
# Run tests in parallel
api-tests:
name: API Tests
needs: check-changes
if: needs.check-changes.outputs.api-changed == 'true'
uses: ./.github/workflows/api-tests.yml
web-tests:
name: Web Tests
needs: check-changes
if: needs.check-changes.outputs.web-changed == 'true'
uses: ./.github/workflows/web-tests.yml
style-check:
name: Style Check
uses: ./.github/workflows/style.yml
vdb-tests:
name: VDB Tests
needs: check-changes
if: needs.check-changes.outputs.vdb-changed == 'true'
uses: ./.github/workflows/vdb-tests.yml
db-migration-test:
name: DB Migration Test
needs: check-changes
if: needs.check-changes.outputs.migration-changed == 'true'
uses: ./.github/workflows/db-migration-test.yml

View File

@@ -1,17 +1,14 @@
name: Style check
on:
workflow_call:
pull_request:
branches:
- main
concurrency:
group: style-${{ github.head_ref || github.run_id }}
group: dep-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
checks: write
statuses: write
contents: read
jobs:
python-style:
name: Python Style
@@ -20,163 +17,60 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
persist-credentials: false
python-version: '3.10'
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v46
with:
files: |
api/**
.github/workflows/style.yml
- name: Python dependencies
run: pip install ruff
- name: Setup UV and Python
if: steps.changed-files.outputs.any_changed == 'true'
uses: astral-sh/setup-uv@v6
with:
enable-cache: false
python-version: "3.12"
cache-dependency-glob: api/uv.lock
- name: Ruff check
run: ruff check ./api
- name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true'
run: uv sync --project api --dev
- name: Lint hints
if: failure()
run: echo "Please run 'dev/reformat' to fix the fixable linting errors."
- name: Run Import Linter
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --directory api --dev lint-imports
- name: Run Basedpyright Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: dev/basedpyright-check
- name: Run Mypy Type Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
- name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example
web-style:
name: Web Style
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v46
with:
files: web/**
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup NodeJS
uses: actions/setup-node@v4
if: steps.changed-files.outputs.any_changed == 'true'
with:
node-version: 22
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Web dependencies
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Web style check
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: |
pnpm run lint
- name: Web type check
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm run type-check
docker-compose-template:
name: Docker Compose Template
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v46
with:
files: |
docker/generate_docker_compose
docker/.env.example
docker/docker-compose-template.yaml
docker/docker-compose.yaml
- name: Generate Docker Compose
if: steps.changed-files.outputs.any_changed == 'true'
run: |
cd docker
./generate_docker_compose
- name: Check for changes
if: steps.changed-files.outputs.any_changed == 'true'
run: git diff --exit-code
superlinter:
name: SuperLinter
test:
name: ESLint and SuperLinter
runs-on: ubuntu-latest
needs: python-style
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v46
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
files: |
**.sh
**.yaml
**.yml
**Dockerfile
dev/**
.editorconfig
node-version: 20
cache: yarn
cache-dependency-path: ./web/package.json
- name: Web dependencies
run: |
cd ./web
yarn install --frozen-lockfile
- name: Web style check
run: |
cd ./web
yarn run lint
- name: Super-linter
uses: super-linter/super-linter/slim@v8
if: steps.changed-files.outputs.any_changed == 'true'
uses: super-linter/super-linter/slim@v6
env:
BASH_SEVERITY: warning
DEFAULT_BRANCH: origin/main
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
FILTER_REGEX_INCLUDE: pnpm-lock.yaml
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
IGNORE_GITIGNORED_FILES: true
VALIDATE_BASH: true
VALIDATE_BASH_EXEC: true
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
# VALIDATE_GITHUB_ACTIONS: true
VALIDATE_GITHUB_ACTIONS: true
VALIDATE_DOCKERFILE_HADOLINT: true
VALIDATE_EDITORCONFIG: true
VALIDATE_XML: true
VALIDATE_YAML: true

View File

@@ -4,13 +4,6 @@ on:
pull_request:
branches:
- main
paths:
- sdks/**
concurrency:
group: sdk-tests-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
build:
name: unit test for Node.js SDK
@@ -18,7 +11,7 @@ jobs:
strategy:
matrix:
node-version: [16, 18, 20, 22]
node-version: [16, 18, 20]
defaults:
run:
@@ -26,18 +19,16 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: ''
cache-dependency-path: 'pnpm-lock.yaml'
cache-dependency-path: 'yarn.lock'
- name: Install Dependencies
run: pnpm install --frozen-lockfile
run: yarn install
- name: Test
run: pnpm test
run: yarn test

View File

@@ -1,88 +0,0 @@
name: Check i18n Files and Create PR
on:
push:
branches: [main]
paths:
- 'web/i18n/en-US/*.ts'
permissions:
contents: write
pull-requests: write
jobs:
check-and-update:
if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
token: ${{ secrets.GITHUB_TOKEN }}
- name: Check for file changes in i18n/en-US
id: check_files
run: |
recent_commit_sha=$(git rev-parse HEAD)
second_recent_commit_sha=$(git rev-parse HEAD~1)
changed_files=$(git diff --name-only $recent_commit_sha $second_recent_commit_sha -- 'i18n/en-US/*.ts')
echo "Changed files: $changed_files"
if [ -n "$changed_files" ]; then
echo "FILES_CHANGED=true" >> $GITHUB_ENV
file_args=""
for file in $changed_files; do
filename=$(basename "$file" .ts)
file_args="$file_args --file=$filename"
done
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
echo "File arguments: $file_args"
else
echo "FILES_CHANGED=false" >> $GITHUB_ENV
fi
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Set up Node.js
if: env.FILES_CHANGED == 'true'
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Install dependencies
if: env.FILES_CHANGED == 'true'
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Generate i18n translations
if: env.FILES_CHANGED == 'true'
working-directory: ./web
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
- name: Generate i18n type definitions
if: env.FILES_CHANGED == 'true'
working-directory: ./web
run: pnpm run gen:i18n-types
- name: Create Pull Request
if: env.FILES_CHANGED == 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update i18n files and type definitions based on en-US changes
title: 'chore: translate i18n files and update type definitions'
body: |
This PR was automatically created to update i18n files and TypeScript type definitions based on changes in en-US locale.
**Changes included:**
- Updated translation files for all locales
- Regenerated TypeScript type definitions for type safety
branch: chore/automated-i18n-updates

View File

@@ -1,90 +0,0 @@
name: Run VDB Tests
on:
workflow_call:
concurrency:
group: vdb-tests-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
test:
name: VDB Tests
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.11"
- "3.12"
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Free Disk Space
uses: endersonmenezes/free-disk-space@v2
with:
remove_dotnet: true
remove_haskell: true
remove_tool_cache: true
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: api/uv.lock
- name: Check UV lockfile
run: uv lock --project api --check
- name: Install dependencies
run: uv sync --project api --dev
- name: Set up dotenvs
run: |
cp docker/.env.example docker/.env
cp docker/middleware.env.example docker/middleware.env
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Vector Store (TiDB)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: docker/tidb/docker-compose.yaml
services: |
tidb
tiflash
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
docker/docker-compose.yaml
services: |
weaviate
qdrant
couchbase-server
etcd
minio
milvus-standalone
pgvecto-rs
pgvector
chroma
elasticsearch
oceanbase
- name: setup test config
run: |
echo $(pwd)
ls -lah .
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
- name: Check VDB Ready (TiDB)
run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
run: uv run --project api bash dev/pytest/pytest_vdb.sh

View File

@@ -1,58 +0,0 @@
name: Web Tests
on:
workflow_call:
concurrency:
group: web-tests-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
test:
name: Web Tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v46
with:
files: web/**
- name: Install pnpm
if: steps.changed-files.outputs.any_changed == 'true'
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
uses: actions/setup-node@v4
if: steps.changed-files.outputs.any_changed == 'true'
with:
node-version: 22
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Check i18n types synchronization
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm run check:i18n-types
- name: Run tests
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm test

94
.gitignore vendored
View File

@@ -6,9 +6,6 @@ __pycache__/
# C extensions
*.so
# *db files
*.db
# Distribution / packaging
.Python
build/
@@ -49,7 +46,6 @@ htmlcov/
.cache
nosetests.xml
coverage.xml
coverage.json
*.cover
*.py,cover
.hypothesis/
@@ -100,7 +96,6 @@ __pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat-schedule.db
celerybeat.pid
# SageMath parsed files
@@ -108,7 +103,6 @@ celerybeat.pid
# Environments
.env
.env-local
.venv
env/
venv/
@@ -127,12 +121,10 @@ venv.bak/
# mkdocs documentation
/site
# type checking
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
pyrightconfig.json
!api/pyrightconfig.json
# Pyre type checker
.pyre/
@@ -142,103 +134,25 @@ pyrightconfig.json
web/.vscode/settings.json
# Intellij IDEA Files
.idea/*
!.idea/vcs.xml
!.idea/icon.png
.idea/
.ideaDataSources/
*.iml
api/.idea
api/.env
api/storage/*
docker-legacy/volumes/app/storage/*
docker-legacy/volumes/db/data/*
docker-legacy/volumes/redis/data/*
docker-legacy/volumes/weaviate/*
docker-legacy/volumes/qdrant/*
docker-legacy/volumes/etcd/*
docker-legacy/volumes/minio/*
docker-legacy/volumes/milvus/*
docker-legacy/volumes/chroma/*
docker-legacy/volumes/opensearch/data/*
docker-legacy/volumes/pgvectors/data/*
docker-legacy/volumes/pgvector/data/*
docker/volumes/app/storage/*
docker/volumes/certbot/*
docker/volumes/db/data/*
docker/volumes/redis/data/*
docker/volumes/weaviate/*
docker/volumes/qdrant/*
docker/tidb/volumes/*
docker/volumes/etcd/*
docker/volumes/minio/*
docker/volumes/milvus/*
docker/volumes/chroma/*
docker/volumes/opensearch/data/*
docker/volumes/myscale/data/*
docker/volumes/myscale/log/*
docker/volumes/unstructured/*
docker/volumes/pgvector/data/*
docker/volumes/pgvecto_rs/data/*
docker/volumes/couchbase/*
docker/volumes/oceanbase/*
docker/volumes/plugin_daemon/*
docker/volumes/matrixone/*
!docker/volumes/oceanbase/init.d
docker/nginx/conf.d/default.conf
docker/nginx/ssl/*
!docker/nginx/ssl/.gitkeep
docker/middleware.env
docker/docker-compose.override.yaml
sdks/python-client/build
sdks/python-client/dist
sdks/python-client/dify_client.egg-info
.vscode/*
!.vscode/launch.json.template
!.vscode/README.md
api/.vscode
web/.vscode
# vscode Code History Extension
.history
.idea/
# pnpm
/.pnpm-store
# plugin migrate
plugins.jsonl
# mise
mise.toml
# Next.js build output
.next/
# PWA generated files
web/public/sw.js
web/public/sw.js.map
web/public/workbox-*.js
web/public/workbox-*.js.map
web/public/fallback-*.js
# AI Assistant
.roo/
api/.env.backup
/clickzetta
# Benchmark
scripts/stress-test/setup/config/
scripts/stress-test/reports/
# mcp
.playwright-mcp/
.serena/
# settings
*.local.json
!.vscode/launch.json
pyrightconfig.json

View File

@@ -1,34 +0,0 @@
{
"mcpServers": {
"context7": {
"type": "http",
"url": "https://mcp.context7.com/mcp"
},
"sequential-thinking": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"],
"env": {}
},
"github": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-github"],
"env": {
"GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}"
}
},
"fetch": {
"type": "stdio",
"command": "uvx",
"args": ["mcp-server-fetch"],
"env": {}
},
"playwright": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@playwright/mcp@latest"],
"env": {}
}
}
}

14
.vscode/README.md vendored
View File

@@ -1,14 +0,0 @@
# Debugging with VS Code
This `launch.json.template` file provides various debug configurations for the Dify project within VS Code / Cursor. To use these configurations, you should copy the contents of this file into a new file named `launch.json` in the same `.vscode` directory.
## How to Use
1. **Create `launch.json`**: If you don't have one, create a file named `launch.json` inside the `.vscode` directory.
1. **Copy Content**: Copy the entire content from `launch.json.template` into your newly created `launch.json` file.
1. **Select Debug Configuration**: Go to the Run and Debug view in VS Code / Cursor (Ctrl+Shift+D or Cmd+Shift+D).
1. **Start Debugging**: Select the desired configuration from the dropdown menu and click the green play button.
## Tips
- If you need to debug with Edge browser instead of Chrome, modify the `serverReadyAction` configuration in the "Next.js: debug full stack" section, change `"debugWithChrome"` to `"debugWithEdge"` to use Microsoft Edge for debugging.

View File

@@ -1,65 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: Flask API",
"type": "debugpy",
"request": "launch",
"module": "flask",
"env": {
"FLASK_APP": "app.py",
"FLASK_ENV": "development"
},
"args": [
"run",
"--host=0.0.0.0",
"--port=5001",
"--no-debugger",
"--no-reload"
],
"jinja": true,
"justMyCode": true,
"cwd": "${workspaceFolder}/api",
"python": "${workspaceFolder}/api/.venv/bin/python"
},
{
"name": "Python: Celery Worker (Solo)",
"type": "debugpy",
"request": "launch",
"module": "celery",
"env": {},
"args": [
"-A",
"app.celery",
"worker",
"-P",
"solo",
"-c",
"1",
"-Q",
"dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline",
"--loglevel",
"INFO"
],
"justMyCode": false,
"cwd": "${workspaceFolder}/api",
"python": "${workspaceFolder}/api/.venv/bin/python"
},
{
"name": "Next.js: debug full stack",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/web/node_modules/next/dist/bin/next",
"runtimeArgs": ["--inspect"],
"skipFiles": ["<node_internals>/**"],
"serverReadyAction": {
"action": "debugWithChrome",
"killOnServerStop": true,
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"webRoot": "${workspaceFolder}/web"
},
"cwd": "${workspaceFolder}/web"
}
]
}

View File

@@ -1,54 +0,0 @@
# AGENTS.md
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase is split into:
- **Backend API** (`/api`): Python Flask application organized with Domain-Driven Design
- **Frontend Web** (`/web`): Next.js 15 application using TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Backend Workflow
- Run backend CLI commands through `uv run --project api <command>`.
- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks.
- Integration tests are CI-only and are not expected to run in the local environment.
## Frontend Workflow
```bash
cd web
pnpm lint
pnpm lint:fix
pnpm test
```
## Testing & Quality Practices
- Follow TDD: red → green → refactor.
- Use `pytest` for backend tests with Arrange-Act-Assert structure.
- Enforce strong typing; avoid `Any` and prefer explicit type annotations.
- Write self-documenting code; only add comments that explain intent.
## Language Style
- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`).
- **TypeScript**: Use the strict config, lean on ESLint + Prettier workflows, and avoid `any` types.
## General Practices
- Prefer editing existing files; add new documentation only when requested.
- Inject dependencies through constructors and preserve clean architecture boundaries.
- Handle errors with domain-specific exceptions at the correct layer.
## Project Conventions
- Backend architecture adheres to DDD and Clean Architecture principles.
- Async work runs through Celery with Redis as the broker.
- Frontend user-facing strings must use `web/i18n/en-US/`; avoid hardcoded text.

View File

@@ -1 +0,0 @@
AGENTS.md

View File

@@ -1,97 +1,158 @@
# CONTRIBUTING
So you're looking to contribute to Dify - that's awesome, we can't wait to see what you do. As a startup with limited headcount and funding, we have grand ambitions to design the most intuitive workflow for building and managing LLM applications. Any help from the community counts, truly.
We need to be nimble and ship fast given where we are, but we also want to make sure that contributors like you get as smooth an experience at contributing as possible. We've assembled this contribution guide for that purpose, aiming at getting you familiarized with the codebase & how we work with contributors, so you could quickly jump to the fun part.
We need to be nimble and ship fast given where we are, but we also want to make sure that contributors like you get as smooth an experience at contributing as possible. We've assembled this contribution guide for that purpose, aiming at getting you familiarized with the codebase & how we work with contributors, so you could quickly jump to the fun part.
This guide, like Dify itself, is a constant work in progress. We highly appreciate your understanding if at times it lags behind the actual project, and welcome any feedback for us to improve.
In terms of licensing, please take a minute to read our short [License and Contributor Agreement](./LICENSE). The community also adheres to the [code of conduct](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
In terms of licensing, please take a minute to read our short [License and Contributor Agreement](./license). The community also adheres to the [code of conduct](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md).
## Before you jump in
Looking for something to tackle? Browse our [good first issues](https://github.com/langgenius/dify/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22) and pick one to get started!
[Find](https://github.com/langgenius/dify/issues?q=is:issue+is:closed) an existing issue, or [open](https://github.com/langgenius/dify/issues/new/choose) a new one. We categorize issues into 2 types:
Got a cool new model runtime or tool to add? Open a PR in our [plugin repo](https://github.com/langgenius/dify-plugins) and show us what you've built.
### Feature requests:
Need to update an existing model runtime, tool, or squash some bugs? Head over to our [official plugin repo](https://github.com/langgenius/dify-official-plugins) and make your magic happen!
* If you're opening a new feature request, we'd like you to explain what the proposed feature achieves, and include as much context as possible. [@perzeusss](https://github.com/perzeuss) has made a solid [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) that helps you draft out your needs. Feel free to give it a try.
Join the fun, contribute, and let's build something awesome together! 💡✨
* If you want to pick one up from the existing issues, simply drop a comment below it saying so.
Don't forget to link an existing issue or open a new issue in the PR's description.
### Bug reports
A team member working in the related direction will be looped in. If all looks good, they will give the go-ahead for you to start coding. We ask that you hold off working on the feature until then, so none of your work goes to waste should we propose changes.
> [!IMPORTANT]
> Please make sure to include the following information when submitting a bug report:
Depending on whichever area the proposed feature falls under, you might talk to different team members. Here's rundown of the areas each our team members are working on at the moment:
- A clear and descriptive title
- A detailed description of the bug, including any error messages
- Steps to reproduce the bug
- Expected behavior
- **Logs**, if available, for backend issues, this is really important, you can find them in docker-compose logs
- Screenshots or videos, if applicable
| Member | Scope |
| ------------------------------------------------------------ | ---------------------------------------------------- |
| [@yeuoly](https://github.com/Yeuoly) | Architecting Agents |
| [@jyong](https://github.com/JohnJyong) | RAG pipeline design |
| [@GarfieldDai](https://github.com/GarfieldDai) | Building workflow orchestrations |
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | Making our frontend a breeze to use |
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | Developer experience, points of contact for anything |
| [@takatost](https://github.com/takatost) | Overall product direction and architecture |
How we prioritize:
How we prioritize:
| Issue Type | Priority |
| ------------------------------------------------------------ | --------------- |
| Bugs in core functions (cloud service, cannot login, applications not working, security loopholes) | Critical |
| Non-critical bugs, performance boosts | Medium Priority |
| Minor fixes (typos, confusing but working UI) | Low Priority |
| Feature Type | Priority |
| ------------------------------------------------------------ | --------------- |
| High-Priority Features as being labeled by a team member | High Priority |
| Popular feature requests from our [community feedback board](https://feedback.dify.ai/) | Medium Priority |
| Non-core features and minor enhancements | Low Priority |
| Valuable but not immediate | Future-Feature |
### Feature requests
### Anything else (e.g. bug report, performance optimization, typo correction):
> [!NOTE]
> Please make sure to include the following information when submitting a feature request:
* Start coding right away.
- A clear and descriptive title
- A detailed description of the feature
- A use case for the feature
- Any other context or screenshots about the feature request
How we prioritize:
How we prioritize:
| Issue Type | Priority |
| ------------------------------------------------------------ | --------------- |
| Bugs in core functions (cannot login, applications not working, security loopholes) | Critical |
| Non-critical bugs, performance boosts | Medium Priority |
| Minor fixes (typos, confusing but working UI) | Low Priority |
| Feature Type | Priority |
| ------------------------------------------------------------ | --------------- |
| High-Priority Features as being labeled by a team member | High Priority |
| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority |
| Non-core features and minor enhancements | Low Priority |
| Valuable but not immediate | Future-Feature |
## Installing
Here are the steps to set up Dify for development:
### 1. Fork this repository
### 2. Clone the repo
Clone the forked repository from your terminal:
```
git clone git@github.com:<github_username>/dify.git
```
### 3. Verify dependencies
Dify requires the following dependencies to build, make sure they're installed on your system:
- [Docker](https://www.docker.com/)
- [Docker Compose](https://docs.docker.com/compose/install/)
- [Node.js v18.x (LTS)](http://nodejs.org)
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
- [Python](https://www.python.org/) version 3.10.x
### 4. Installations
Dify is composed of a backend and a frontend. Navigate to the backend directory by `cd api/`, then follow the [Backend README](api/README.md) to install it. In a separate terminal, navigate to the frontend directory by `cd web/`, then follow the [Frontend README](web/README.md) to install.
Check the [installation FAQ](https://docs.dify.ai/getting-started/faq/install-faq) for a list of common issues and steps to troubleshoot.
### 5. Visit dify in your browser
To validate your set up, head over to [http://localhost:3000](http://localhost:3000) (the default, or your self-configured URL and port) in your browser. You should now see Dify up and running.
## Developing
If you are adding a model provider, [this guide](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md) is for you.
If you are adding a tool provider to Agent or Workflow, [this guide](./api/core/tools/README.md) is for you.
To help you quickly navigate where your contribution fits, a brief, annotated outline of Dify's backend & frontend is as follows:
### Backend
Difys backend is written in Python using [Flask](https://flask.palletsprojects.com/en/3.0.x/). It uses [SQLAlchemy](https://www.sqlalchemy.org/) for ORM and [Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html) for task queueing. Authorization logic goes via Flask-login.
```
[api/]
├── constants // Constant settings used throughout code base.
├── controllers // API route definitions and request handling logic.
├── core // Core application orchestration, model integrations, and tools.
├── docker // Docker & containerization related configurations.
├── events // Event handling and processing
├── extensions // Extensions with 3rd party frameworks/platforms.
├── fields // field definitions for serialization/marshalling.
├── libs // Reusable libraries and helpers.
├── migrations // Scripts for database migration.
├── models // Database models & schema definitions.
├── services // Specifies business logic.
├── storage // Private key storage.
├── tasks // Handling of async tasks and background jobs.
└── tests
```
### Frontend
The website is bootstrapped on [Next.js](https://nextjs.org/) boilerplate in Typescript and uses [Tailwind CSS](https://tailwindcss.com/) for styling. [React-i18next](https://react.i18next.com/) is used for internationalization.
```
[web/]
├── app // layouts, pages, and components
│ ├── (commonLayout) // common layout used throughout the app
│ ├── (shareLayout) // layouts specifically shared across token-specific sessions
│ ├── activate // activate page
│ ├── components // shared by pages and layouts
│ ├── install // install page
│ ├── signin // signin page
│ └── styles // globally shared styles
├── assets // Static assets
├── bin // scripts ran at build step
├── config // adjustable settings and options
├── context // shared contexts used by different portions of the app
├── dictionaries // Language-specific translate files
├── docker // container configurations
├── hooks // Reusable hooks
├── i18n // Internationalization configuration
├── models // describes data models & shapes of API responses
├── public // meta assets like favicon
├── service // specifies shapes of API actions
├── test
├── types // descriptions of function params and return values
└── utils // Shared utility functions
```
## Submitting your PR
### Pull Request Process
At last, time to open a pull request (PR) to our repo. For major features, we first merge them into the `deploy/dev` branch for testing, before they go into the `main` branch. If you run into issues like merge conflicts or don't know how to open a pull request, check out [GitHub's pull request tutorial](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests).
1. Fork the repository
1. Before you draft a PR, please create an issue to discuss the changes you want to make
1. Create a new branch for your changes
1. Please add tests for your changes accordingly
1. Ensure your code passes the existing tests
1. Please link the issue in the PR description, `fixes #<issue_number>`
1. Get merged!
### Setup the project
#### Frontend
For setting up the frontend service, please refer to our comprehensive [guide](https://github.com/langgenius/dify/blob/main/web/README.md) in the `web/README.md` file. This document provides detailed instructions to help you set up the frontend environment properly.
#### Backend
For setting up the backend service, kindly refer to our detailed [instructions](https://github.com/langgenius/dify/blob/main/api/README.md) in the `api/README.md` file. This document contains step-by-step guidance to help you get the backend up and running smoothly.
#### Other things to note
We recommend reviewing this document carefully before proceeding with the setup, as it contains essential information about:
- Prerequisites and dependencies
- Installation steps
- Configuration details
- Common troubleshooting tips
Feel free to reach out if you encounter any issues during the setup process.
And that's it! Once your PR is merged, you will be featured as a contributor in our [README](https://github.com/langgenius/dify/blob/main/README.md).
## Getting Help
If you ever get stuck or get a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat.
If you ever get stuck or got a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat.

155
CONTRIBUTING_CN.md Normal file
View File

@@ -0,0 +1,155 @@
所以你想为 Dify 做贡献 - 这太棒了,我们迫不及待地想看到你的贡献。作为一家人员和资金有限的初创公司,我们有着雄心勃勃的目标,希望设计出最直观的工作流程来构建和管理 LLM 应用程序。社区的任何帮助都是宝贵的。
考虑到我们的现状,我们需要灵活快速地交付,但我们也希望确保像你这样的贡献者在贡献过程中获得尽可能顺畅的体验。我们为此编写了这份贡献指南,旨在让你熟悉代码库和我们与贡献者的合作方式,以便你能快速进入有趣的部分。
这份指南,就像 Dify 本身一样,是一个不断改进的工作。如果有时它落后于实际项目,我们非常感谢你的理解,并欢迎任何反馈以供我们改进。
在许可方面,请花一分钟阅读我们简短的[许可证和贡献者协议](./license)。社区还遵守[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
## 在开始之前
[查找](https://github.com/langgenius/dify/issues?q=is:issue+is:closed)现有问题,或[创建](https://github.com/langgenius/dify/issues/new/choose)一个新问题。我们将问题分为两类:
### 功能请求:
* 如果您要提出新的功能请求,请解释所提议的功能的目标,并尽可能提供详细的上下文。[@perzeusss](https://github.com/perzeuss)制作了一个很好的[功能请求助手](https://udify.app/chat/MK2kVSnw1gakVwMX),可以帮助您起草需求。随时尝试一下。
* 如果您想从现有问题中选择一个,请在其下方留下评论表示您的意愿。
相关方向的团队成员将参与其中。如果一切顺利,他们将批准您开始编码。在此之前,请不要开始工作,以免我们提出更改导致您的工作付诸东流。
根据所提议的功能所属的领域不同,您可能需要与不同的团队成员交流。以下是我们团队成员目前正在从事的各个领域的概述:
| Member | Scope |
| ------------------------------------------------------------ | ---------------------------------------------------- |
| [@yeuoly](https://github.com/Yeuoly) | Architecting Agents |
| [@jyong](https://github.com/JohnJyong) | RAG pipeline design |
| [@GarfieldDai](https://github.com/GarfieldDai) | Building workflow orchestrations |
| [@iamjoel](https://github.com/iamjoel) & [@zxhlyh](https://github.com/zxhlyh) | Making our frontend a breeze to use |
| [@guchenhe](https://github.com/guchenhe) & [@crazywoola](https://github.com/crazywoola) | Developer experience, points of contact for anything |
| [@takatost](https://github.com/takatost) | Overall product direction and architecture |
How we prioritize:
| Feature Type | Priority |
| ------------------------------------------------------------ | --------------- |
| High-Priority Features as being labeled by a team member | High Priority |
| Popular feature requests from our [community feedback board](https://feedback.dify.ai/) | Medium Priority |
| Non-core features and minor enhancements | Low Priority |
| Valuable but not immediate | Future-Feature |
### 其他任何事情例如bug报告、性能优化、拼写错误更正
* 立即开始编码。
How we prioritize:
| Issue Type | Priority |
| ------------------------------------------------------------ | --------------- |
| Bugs in core functions (cannot login, applications not working, security loopholes) | Critical |
| Non-critical bugs, performance boosts | Medium Priority |
| Minor fixes (typos, confusing but working UI) | Low Priority |
## 安装
以下是设置Dify进行开发的步骤
### 1. Fork该仓库
### 2. 克隆仓库
从终端克隆fork的仓库
```
git clone git@github.com:<github_username>/dify.git
```
### 3. 验证依赖项
Dify 依赖以下工具和库:
- [Docker](https://www.docker.com/)
- [Docker Compose](https://docs.docker.com/compose/install/)
- [Node.js v18.x (LTS)](http://nodejs.org)
- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
- [Python](https://www.python.org/) version 3.10.x
### 4. 安装
Dify由后端和前端组成。通过`cd api/`导航到后端目录,然后按照[后端README](api/README.md)进行安装。在另一个终端中,通过`cd web/`导航到前端目录,然后按照[前端README](web/README.md)进行安装。
查看[安装常见问题解答](https://docs.dify.ai/getting-started/faq/install-faq)以获取常见问题列表和故障排除步骤。
### 5. 在浏览器中访问Dify
为了验证您的设置,打开浏览器并访问[http://localhost:3000](http://localhost:3000)默认或您自定义的URL和端口。现在您应该看到Dify正在运行。
## 开发
如果您要添加模型提供程序,请参考[此指南](https://github.com/langgenius/dify/blob/main/api/core/model_runtime/README.md)。
如果您要向Agent或Workflow添加工具提供程序请参考[此指南](./api/core/tools/README.md)。
为了帮助您快速了解您的贡献在哪个部分以下是Dify后端和前端的简要注释大纲
### 后端
Dify的后端使用Python编写使用[Flask](https://flask.palletsprojects.com/en/3.0.x/)框架。它使用[SQLAlchemy](https://www.sqlalchemy.org/)作为ORM使用[Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html)作为任务队列。授权逻辑通过Flask-login进行处理。
```
[api/]
├── constants // Constant settings used throughout code base.
├── controllers // API route definitions and request handling logic.
├── core // Core application orchestration, model integrations, and tools.
├── docker // Docker & containerization related configurations.
├── events // Event handling and processing
├── extensions // Extensions with 3rd party frameworks/platforms.
├── fields // field definitions for serialization/marshalling.
├── libs // Reusable libraries and helpers.
├── migrations // Scripts for database migration.
├── models // Database models & schema definitions.
├── services // Specifies business logic.
├── storage // Private key storage.
├── tasks // Handling of async tasks and background jobs.
└── tests
```
### 前端
该网站使用基于Typescript的[Next.js](https://nextjs.org/)模板进行引导,并使用[Tailwind CSS](https://tailwindcss.com/)进行样式设计。[React-i18next](https://react.i18next.com/)用于国际化。
```
[web/]
├── app // layouts, pages, and components
│ ├── (commonLayout) // common layout used throughout the app
│ ├── (shareLayout) // layouts specifically shared across token-specific sessions
│ ├── activate // activate page
│ ├── components // shared by pages and layouts
│ ├── install // install page
│ ├── signin // signin page
│ └── styles // globally shared styles
├── assets // Static assets
├── bin // scripts ran at build step
├── config // adjustable settings and options
├── context // shared contexts used by different portions of the app
├── dictionaries // Language-specific translate files
├── docker // container configurations
├── hooks // Reusable hooks
├── i18n // Internationalization configuration
├── models // describes data models & shapes of API responses
├── public // meta assets like favicon
├── service // specifies shapes of API actions
├── test
├── types // descriptions of function params and return values
└── utils // Shared utility functions
```
## 提交你的 PR
最后是时候向我们的仓库提交一个拉取请求PR了。对于重要的功能我们首先将它们合并到 `deploy/dev` 分支进行测试,然后再合并到 `main` 分支。如果你遇到合并冲突或者不知道如何提交拉取请求的问题,请查看 [GitHub 的拉取请求教程](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests)。
就是这样!一旦你的 PR 被合并,你将成为我们 [README](https://github.com/langgenius/dify/blob/main/README.md) 中的贡献者。
## 获取帮助
如果你在贡献过程中遇到困难或者有任何问题,可以通过相关的 GitHub 问题提出你的疑问,或者加入我们的 [Discord](https://discord.gg/8Tpq4AcN9c) 进行快速交流。

26
LICENSE
View File

@@ -1,14 +1,15 @@
# Open Source License
Dify is licensed under a modified version of the Apache License 2.0, with the following additional conditions:
Dify is licensed under the Apache License 2.0, with the following additional conditions:
1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer:
a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
a. Multi-tenant SaaS service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment.
- Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations.
b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend.
- Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker.
b. LOGO and copyright information: In the process of using Dify's frontend components, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend components.
Please contact business@dify.ai by email to inquire about licensing matters.
2. As a contributor, you should agree that:
@@ -19,4 +20,19 @@ Apart from the specific conditions mentioned above, all other rights and restric
The interactive design of this product is protected by appearance patent.
© 2025 LangGenius, Inc.
© 2024 LangGenius, Inc.
----------
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -4,72 +4,6 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
VERSION=latest
# Default target - show help
.DEFAULT_GOAL := help
# Backend Development Environment Setup
.PHONY: dev-setup prepare-docker prepare-web prepare-api
# Dev setup target
dev-setup: prepare-docker prepare-web prepare-api
@echo "✅ Backend development environment setup complete!"
# Step 1: Prepare Docker middleware
prepare-docker:
@echo "🐳 Setting up Docker middleware..."
@cp -n docker/middleware.env.example docker/middleware.env 2>/dev/null || echo "Docker middleware.env already exists"
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev up -d
@echo "✅ Docker middleware started"
# Step 2: Prepare web environment
prepare-web:
@echo "🌐 Setting up web environment..."
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
@cd web && pnpm install
@echo "✅ Web environment prepared (not started)"
# Step 3: Prepare API environment
prepare-api:
@echo "🔧 Setting up API environment..."
@cp -n api/.env.example api/.env 2>/dev/null || echo "API .env already exists"
@cd api && uv sync --dev
@cd api && uv run flask db upgrade
@echo "✅ API environment prepared (not started)"
# Clean dev environment
dev-clean:
@echo "⚠️ Stopping Docker containers..."
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev down
@echo "🗑️ Removing volumes..."
@rm -rf docker/volumes/db
@rm -rf docker/volumes/redis
@rm -rf docker/volumes/plugin_daemon
@rm -rf docker/volumes/weaviate
@rm -rf api/storage
@echo "✅ Cleanup complete"
# Backend Code Quality Commands
format:
@echo "🎨 Running ruff format..."
@uv run --project api --dev ruff format ./api
@echo "✅ Code formatting complete"
check:
@echo "🔍 Running ruff check..."
@uv run --project api --dev ruff check ./api
@echo "✅ Code check complete"
lint:
@echo "🔧 Running ruff format, check with fixes, and import linter..."
@uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@uv run --directory api --dev lint-imports
@echo "✅ Linting complete"
type-check:
@echo "📝 Running type check with basedpyright..."
@uv run --directory api --dev basedpyright
@echo "✅ Type check complete"
# Build Docker images
build-web:
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
@@ -105,27 +39,5 @@ build-push-web: build-web push-web
build-push-all: build-all push-all
@echo "All Docker images have been built and pushed."
# Help target
help:
@echo "Development Setup Targets:"
@echo " make dev-setup - Run all setup steps for backend dev environment"
@echo " make prepare-docker - Set up Docker middleware"
@echo " make prepare-web - Set up web environment"
@echo " make prepare-api - Set up API environment"
@echo " make dev-clean - Stop Docker middleware containers"
@echo ""
@echo "Backend Code Quality:"
@echo " make format - Format code with ruff"
@echo " make check - Check code with ruff"
@echo " make lint - Format and fix code with ruff"
@echo " make type-check - Run type checking with basedpyright"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"
@echo " make build-api - Build API Docker image"
@echo " make build-all - Build all Docker images"
@echo " make push-all - Push all Docker images"
@echo " make build-push-all - Build and push all Docker images"
# Phony targets
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all

275
README.md
View File

@@ -1,216 +1,163 @@
![cover-v5-optimized](./images/GitHub_README_if.png)
[![](./images/describe.png)](https://dify.ai)
<p align="center">
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Introducing Dify Workflow File Upload: Recreate Google NotebookLM Podcast</a>
</p>
<p align="center">
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> ·
<a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://dify.ai/pricing">Dify edition overview</a>
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a> |
<a href="./README_KL.md">Klingon</a> |
<a href="./README_FR.md">Français</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
<a href="https://dify.ai/pricing" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
<img alt="Static Badge" src="https://img.shields.io/badge/AI-Dify?logo=AI&logoColor=%20%23f5f5f5&label=Dify&labelColor=%20%23155EEF&color=%23EAECF0"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord"
alt="chat on Discord"></a>
<a href="https://reddit.com/r/difyai" target="_blank">
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
alt="join Reddit"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
alt="follow on X(Twitter)"></a>
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
alt="follow on LinkedIn"></a>
<img src="https://img.shields.io/twitter/follow/dify_ai?style=social&logo=X"
alt="follow on Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
<a href="https://github.com/langgenius/dify/" target="_blank">
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web"></a>
</p>
<p align="center">
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
<a href="./docs/zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
<a href="./docs/zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
<a href="./docs/ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
<a href="./docs/es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
<a href="./docs/fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
<a href="./docs/tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
<a href="./docs/ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
<a href="./docs/ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
<a href="./docs/tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
<a href="./docs/vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
<a href="./docs/de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
<a href="./docs/bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
<a href="https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6" target="_blank">
📌 Check out Dify Premium on AWS and deploy it to your own AWS VPC with one-click.
</a>
</p>
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production.
**Dify** is an open-source LLM app development platform. Dify's intuitive interface combines a RAG pipeline, AI workflow orchestration, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production.
## Quick start
![](./images/demo.png)
> Before installing Dify, make sure your machine meets the following minimum system requirements:
>
> - CPU >= 2 Core
> - RAM >= 4 GiB
<br/>
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
## Using our Cloud Services
You can try out [Dify.AI Cloud](https://dify.ai) now. It provides all the capabilities of the self-deployed version, and includes 200 free requests to OpenAI GPT-3.5.
### Looking to purchase via AWS?
Check out [Dify Premium on AWS](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click.
## Dify vs. LangChain vs. Assistants API
| Feature | Dify.AI | Assistants API | LangChain |
|---------|---------|----------------|-----------|
| **Programming Approach** | API-oriented | API-oriented | Python Code-oriented |
| **Ecosystem Strategy** | Open Source | Close Source | Open Source |
| **RAG Engine** | Supported | Supported | Not Supported |
| **Prompt IDE** | Included | Included | None |
| **Supported LLMs** | Rich Variety | OpenAI-only | Rich Variety |
| **Local Deployment** | Supported | Not Supported | Not Applicable |
## Features
![](./images/models.png)
**1. LLM Support**: Integration with OpenAI's GPT family of models, or the open-source Llama2 family models. In fact, Dify supports mainstream commercial models and open-source models (locally deployed or based on MaaS).
**2. Prompt IDE**: Visual orchestration of applications and services based on LLMs with your team.
**3. RAG Engine**: Includes various RAG capabilities based on full-text indexing or vector database embeddings, allowing direct upload of PDFs, TXTs, and other text formats.
**4. AI Agent**: Based on Function Calling and ReAct, the Agent inference framework allows users to customize tools, what you see is what you get. Dify provides more than a dozen built-in tool calling capabilities, such as Google Search, DELL·E, Stable Diffusion, WolframAlpha, etc.
**5. Continuous Operations**: Monitor and analyze application logs and performance, continuously improving Prompts, datasets, or models using production data.
## Before You Start
**Star us on GitHub, and be instantly notified for new releases!**
![star-us](https://github.com/langgenius/dify/assets/100913391/95f37259-7370-4456-a9f0-0bc01ef8642f)
- [Website](https://dify.ai)
- [Docs](https://docs.dify.ai)
- [Deployment Docs](https://docs.dify.ai/getting-started/install-self-hosted)
- [FAQ](https://docs.dify.ai/getting-started/faq)
## Install the Community Edition
### System Requirements
Before installing Dify, make sure your machine meets the following minimum system requirements:
- CPU >= 2 Core
- RAM >= 4GB
### Quick Start
The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
```bash
cd dify
cd docker
cp .env.example .env
docker compose up -d
```
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process.
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization installation process.
#### Seeking help
#### Deploy with Helm Chart
Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) if you encounter problems setting up Dify. Reach out to [the community and us](#community--contact) if you are still having issues.
> If you'd like to contribute to Dify or do additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
## Key features
**1. Workflow**:
Build and test powerful AI workflows on a visual canvas, leveraging all the following features and beyond.
**2. Comprehensive model support**:
Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers).
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
**3. Prompt IDE**:
Intuitive interface for crafting prompts, comparing model performance, and adding additional features such as text-to-speech to a chat-based app.
**4. RAG Pipeline**:
Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats.
**5. Agent capabilities**:
You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha.
**6. LLMOps**:
Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations.
**7. Backend-as-a-Service**:
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
## Using Dify
- **Cloud <br/>**
We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan.
- **Self-hosting Dify Community Edition<br/>**
Quickly get Dify running in your environment with this [starter guide](#quick-start).
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for enterprise / organizations<br/>**
We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss your enterprise needs. <br/>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
## Staying ahead
Star Dify on GitHub and be instantly notified of new releases.
![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
## Advanced Setup
### Custom configurations
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
### Metrics Monitoring with Grafana
Import the dashboard to Grafana, using Dify's PostgreSQL database as data source, to monitor metrics in granularity of apps, tenants, messages, and more.
- [Grafana Dashboard by @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard)
### Deployment with Kubernetes
If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes.
[Helm Chart](https://helm.sh/) version, which allows Dify to be deployed on Kubernetes.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Using Terraform for Deployment
### Configuration
Deploy Dify to Cloud Platform with a single click using [terraform](https://www.terraform.io/)
If you need to customize the configuration, please refer to the comments in our [docker-compose.yml](docker/docker-compose.yaml) file and manually set the environment configuration. After making the changes, please run `docker-compose up -d` again. You can see the full list of environment variables in our [docs](https://docs.dify.ai/getting-started/install-self-hosted/environments).
##### Azure Global
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
## Star History
##### Google Cloud
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
#### Using AWS CDK for Deployment
Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Using Alibaba Cloud Computing Nest
Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Using Alibaba Cloud Data Management
One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Deploy to AKS with Azure Devops Pipeline
One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Contributing
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
### Projects made by community
## Community & contact
- [Chatbot Chrome Extension by @charli117](https://github.com/langgenius/chatbot-chrome-extension)
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
**Contributors**
### Contributors
<a href="https://github.com/langgenius/dify/graphs/contributors">
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
</a>
## Star history
### Translations
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
## Security disclosure
## Community & Support
To protect your privacy, please avoid posting security issues on GitHub. Instead, report issues to security@dify.ai, and our team will respond with detailed answer.
* [Canny](https://feedback.dify.ai/). Best for: sharing feedback and checking out our feature roadmap.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Email Support](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify). Best for: questions you have about using Dify.AI.
* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
* [Business Contact](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry). Best for: business inquiries of licensing Dify.AI for commercial use.
### Direct Meetings
**Help us make Dify better. Reach out directly to us**.
| Point of Contact | Purpose |
| :----------------------------------------------------------: | :----------------------------------------------------------: |
| <a href='https://cal.com/guchenhe/15min' target='_blank'><img src='https://i.postimg.cc/fWBqSmjP/Git-Hub-README-Button-3x.png' border='0' alt='Git-Hub-README-Button-3x' height="60" width="214"/></a> | Product design feedback, user experience discussions, feature planning and roadmaps. |
| <a href='https://cal.com/pinkbanana' target='_blank'><img src='https://i.postimg.cc/LsRTh87D/Git-Hub-README-Button-2x.png' border='0' alt='Git-Hub-README-Button-2x' height="60" width="225"/></a> | Technical support, issues, or feature requests |
## Security Disclosure
To protect your privacy, please avoid posting security issues on GitHub. Instead, send your questions to security@dify.ai and we will provide you with a more detailed answer.
## License
This repository is licensed under the [Dify Open Source License](LICENSE), based on Apache 2.0 with additional conditions.
This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.

131
README_CN.md Normal file
View File

@@ -0,0 +1,131 @@
[![](./images/describe.png)](https://dify.ai)
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a> |
<a href="./README_KL.md">Klingon</a> |
<a href="./README_FR.md">Français</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/AI-Dify?logo=AI&logoColor=%20%23f5f5f5&label=Dify&labelColor=%20%23155EEF&color=%23EAECF0"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord"
alt="chat on Discord"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?style=social&logo=X"
alt="follow on Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web"></a>
</p>
<p align="center">
<a href="https://mp.weixin.qq.com/s/TnyfIuH-tPi9o1KNjwVArw" target="_blank">
Dify 发布 AI Agent 能力:基于不同的大型语言模型构建 GPTs 和 Assistants
</a>
</p>
Dify 是一个 LLM 应用开发平台,已经有超过 10 万个应用基于 Dify.AI 构建。它融合了 Backend as Service 和 LLMOps 的理念,涵盖了构建生成式 AI 原生应用所需的核心技术栈,包括一个内置 RAG 引擎。使用 Dify你可以基于任何模型自部署类似 Assistants API 和 GPTs 的能力。
![](./images/demo.png)
## 使用云端服务
使用 [Dify.AI Cloud](https://dify.ai) 提供开源版本的所有功能,并包含 200 次 GPT 试用额度。
## 为什么选择 Dify
Dify 具有模型中立性,相较 LangChain 等硬编码开发库 Dify 是一个完整的、工程化的技术栈,而相较于 OpenAI 的 Assistants API 你可以完全将服务部署在本地。
| 功能 | Dify.AI | Assistants API | LangChain |
| --- | --- | --- | --- |
| 编程方式 | 面向 API | 面向 API | 面向 Python 代码 |
| 生态策略 | 开源 | 封闭且商用 | 开源 |
| RAG 引擎 | 支持 | 支持 | 不支持 |
| Prompt IDE | 包含 | 包含 | 没有 |
| 支持的 LLMs | 丰富 | 仅 GPT | 丰富 |
| 本地部署 | 支持 | 不支持 | 不适用 |
## 特点
![](./images/models.png)
**1. LLM支持**:与 OpenAI 的 GPT 系列模型集成,或者与开源的 Llama2 系列模型集成。事实上Dify支持主流的商业模型和开源模型(本地部署或基于 MaaS)。
**2. Prompt IDE**:和团队一起在 Dify 协作,通过可视化的 Prompt 和应用编排工具开发 AI 应用。 支持无缝切换多种大型语言模型。
**3. RAG引擎**:包括各种基于全文索引或向量数据库嵌入的 RAG 能力,允许直接上传 PDF、TXT 等各种文本格式。
**4. AI Agent**:基于 Function Calling 和 ReAct 的 Agent 推理框架允许用户自定义工具所见即所得。Dify 提供了十多种内置工具调用能力如谷歌搜索、DELL·E、Stable Diffusion、WolframAlpha 等。
**5. 持续运营**:监控和分析应用日志和性能,使用生产数据持续改进 Prompt、数据集或模型。
## 在开始之前
**关注我们,您将立即收到 GitHub 上所有新发布版本的通知!**
![star-us](https://github.com/langgenius/dify/assets/100913391/95f37259-7370-4456-a9f0-0bc01ef8642f)
- [网站](https://dify.ai)
- [文档](https://docs.dify.ai)
- [部署文档](https://docs.dify.ai/getting-started/install-self-hosted)
- [常见问题](https://docs.dify.ai/getting-started/faq)
## 安装社区版
### 系统要求
在安装 Dify 之前,请确保您的机器满足以下最低系统要求:
- CPU >= 2 Core
- RAM >= 4GB
### 快速启动
启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/)
```bash
cd docker
docker compose up -d
```
运行后,可以在浏览器上访问 [http://localhost/install](http://localhost/install) 进入 Dify 控制台并开始初始化安装操作。
#### 使用 Helm Chart 部署
使用 [Helm Chart](https://helm.sh/) 版本,可以在 Kubernetes 上部署 Dify。
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
### 配置
如果您需要自定义配置,请参考我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件中的注释,并手动设置环境配置。更改后,请再次运行 `docker-compose up -d`。您可以在我们的[文档](https://docs.dify.ai/getting-started/install-self-hosted/environments)中查看所有环境变量的完整列表。
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## 社区与支持
我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括提交代码、问题、新想法或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。
- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。
- [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。
- [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。
- [Twitter](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。
- [商业许可](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)。👉:有关商业用途许可 Dify.AI 的商业咨询。
- [微信]() 👉:扫描下方二维码,添加微信好友,备注 Dify我们将邀请您加入 Dify 社区。
<img src="./images/wechat.png" alt="wechat" width="100"/>
## 安全问题
为了保护您的隐私,请避免在 GitHub 上发布安全问题。发送问题至 security@dify.ai我们将为您做更细致的解答。
## License
本仓库遵循 [Dify Open Source License](LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。

124
README_ES.md Normal file
View File

@@ -0,0 +1,124 @@
[![](./images/describe.png)](https://dify.ai)
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a> |
<a href="./README_KL.md">Klingon</a> |
<a href="./README_FR.md">Français</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/AI-Dify?logo=AI&logoColor=%20%23f5f5f5&label=Dify&labelColor=%20%23155EEF&color=%23EAECF0"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord"
alt="chat on Discord"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?style=social&logo=X"
alt="follow on Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web"></a>
</p>
<p align="center">
<a href="https://dify.ai/blog/dify-ai-unveils-ai-agent-creating-gpts-and-assistants-with-various-llms" target="_blank">
Dify.AI Unveils AI Agent: Creating GPTs and Assistants with Various LLMs
</a>
</p>
**Dify** es una plataforma de desarrollo de aplicaciones para modelos de lenguaje de gran tamaño (LLM) que ya ha visto la creación de más de **100,000** aplicaciones basadas en Dify.AI. Integra los conceptos de Backend como Servicio y LLMOps, cubriendo el conjunto de tecnologías esenciales requerido para construir aplicaciones nativas de inteligencia artificial generativa, incluyendo un motor RAG incorporado. Con Dify, **puedes auto-desplegar capacidades similares a las de Assistants API y GPTs basadas en cualquier LLM.**
![](./images/demo.png)
## Utilizar Servicios en la Nube
Usar [Dify.AI Cloud](https://dify.ai) proporciona todas las capacidades de la versión de código abierto, e incluye un complemento de 200 créditos de prueba para GPT.
## Por qué Dify
Dify se caracteriza por su neutralidad de modelo y es un conjunto tecnológico completo e ingenierizado, en comparación con las bibliotecas de desarrollo codificadas como LangChain. A diferencia de la API de Assistants de OpenAI, Dify permite el despliegue local completo de los servicios.
| Característica | Dify.AI | API de Assistants | LangChain |
|----------------|---------|------------------|-----------|
| **Enfoque de Programación** | Orientado a API | Orientado a API | Orientado a Código en Python |
| **Estrategia del Ecosistema** | Código Abierto | Cerrado y Comercial | Código Abierto |
| **Motor RAG** | Soportado | Soportado | No Soportado |
| **IDE de Prompts** | Incluido | Incluido | Ninguno |
| **LLMs Soportados** | Gran Variedad | Solo GPT | Gran Variedad |
| **Despliegue Local** | Soportado | No Soportado | No Aplicable |
## Características
![](./images/models.png)
**1. Soporte LLM**: Integración con la familia de modelos GPT de OpenAI, o los modelos de la familia Llama2 de código abierto. De hecho, Dify soporta modelos comerciales convencionales y modelos de código abierto (desplegados localmente o basados en MaaS).
**2. IDE de Prompts**: Orquestación visual de aplicaciones y servicios basados en LLMs con tu equipo.
**3. Motor RAG**: Incluye varias capacidades RAG basadas en indexación de texto completo o incrustaciones de base de datos vectoriales, permitiendo la carga directa de PDFs, TXTs y otros formatos de texto.
**4. Agente de IA**: Basado en la llamada de funciones y ReAct, el marco de inferencia del Agente permite a los usuarios personalizar las herramientas, lo que ves es lo que obtienes. Dify proporciona más de una docena de capacidades de llamada de herramientas incorporadas, como Búsqueda de Google, DELL·E, Difusión Estable, WolframAlpha, etc.
**5. Operaciones Continuas**: Monitorear y analizar registros de aplicaciones y rendimiento, mejorando continuamente Prompts, conjuntos de datos o modelos usando datos de producción.
## Antes de Empezar
**¡Danos una estrella, y recibirás notificaciones instantáneas de todos los nuevos lanzamientos en GitHub!**
![star-us](https://github.com/langgenius/dify/assets/100913391/95f37259-7370-4456-a9f0-0bc01ef8642f)
- [Sitio web](https://dify.ai)
- [Documentación](https://docs.dify.ai)
- [Documentación de Implementación](https://docs.dify.ai/getting-started/install-self-hosted)
- [Preguntas Frecuentes](https://docs.dify.ai/getting-started/faq)
## Instalar la Edición Comunitaria
### Requisitos del Sistema
Antes de instalar Dify, asegúrate de que tu máquina cumpla con los siguientes requisitos mínimos del sistema:
- CPU >= 2 núcleos
- RAM >= 4GB
### Inicio Rápido
La forma más sencilla de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina:
```bash
cd docker
docker compose up -d
```
Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegador en [http://localhost/install](http://localhost/install) y comenzar el proceso de instalación de inicialización.
### Gráfico Helm
Un gran agradecimiento a @BorisPolonsky por proporcionarnos una versión del [Gráfico Helm](https://helm.sh/), que permite implementar Dify en Kubernetes. Puedes visitar https://github.com/BorisPolonsky/dify-helm para obtener información sobre la implementación.
### Configuración
Si necesitas personalizar la configuración, consulta los comentarios en nuestro archivo [docker-compose.yml](docker/docker-compose.yaml) y configura manualmente la configuración del entorno. Después de realizar los cambios, ejecuta nuevamente `docker-compose up -d`. Puedes ver la lista completa de variables de entorno en nuestra [documentación](https://docs.dify.ai/getting-started/install-self-hosted/environments).
## Historial de Estrellas
[![Gráfico de Historial de Estrellas](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Comunidad y Soporte
Te damos la bienvenida a contribuir a Dify para ayudar a hacer que Dify sea mejor de diversas maneras, enviando código, informando problemas, proponiendo nuevas ideas o compartiendo las aplicaciones de inteligencia artificial interesantes y útiles que hayas creado basadas en Dify. Al mismo tiempo, también te invitamos a compartir Dify en diferentes eventos, conferencias y redes sociales.
- [Problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores y problemas que encuentres al usar Dify.AI, consulta la [Guía de Contribución](CONTRIBUTING.md).
- [Soporte por Correo Electrónico](mailto:hello@dify.ai?subject=[GitHub]Preguntas%20sobre%20Dify). Lo mejor para: preguntas que tengas sobre el uso de Dify.AI.
- [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y socializar con la comunidad.
- [Twitter](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y socializar con la comunidad.
- [Licencia Comercial](mailto:business@dify.ai?subject=[GitHub]Consulta%20de%20Licencia%20Comercial). Lo mejor para: consultas comerciales sobre la licencia de Dify.AI para uso comercial.
## Divulgación de Seguridad
Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada.
## Licencia
Este repositorio está disponible bajo la [Licencia de Código Abierto Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.

127
README_FR.md Normal file
View File

@@ -0,0 +1,127 @@
[![](./images/describe.png)](https://dify.ai)
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a> |
<a href="./README_KL.md">Klingon</a> |
<a href="./README_FR.md">Français</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/AI-Dify?logo=AI&logoColor=%20%23f5f5f5&label=Dify&labelColor=%20%23155EEF&color=%23EAECF0"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord"
alt="chat on Discord"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?style=social&logo=X"
alt="follow on Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web"></a>
</p>
<p align="center">
<a href="https://dify.ai/blog/dify-ai-unveils-ai-agent-creating-gpts-and-assistants-with-various-llms" target="_blank">
Dify.AI Unveils AI Agent: Creating GPTs and Assistants with Various LLMs
</a>
</p>
**Dify** est une plateforme de développement d'applications LLM qui a déjà vu plus de **100,000** applications construites sur Dify.AI. Elle intègre les concepts de Backend as a Service et LLMOps, couvrant la pile technologique de base requise pour construire des applications natives d'IA générative, y compris un moteur RAG intégré. Avec Dify, **vous pouvez auto-déployer des capacités similaires aux API Assistants et GPT basées sur n'importe quels LLM.**
![](./images/demo.png)
## Utiliser les services cloud
L'utilisation de [Dify.AI Cloud](https://dify.ai) fournit toutes les capacités de la version open source, et comprend un essai gratuit de 200 crédits GPT.
## Pourquoi Dify
Dify présente une neutralité de modèle et est une pile technologique complète et conçue par rapport à des bibliothèques de développement codées en dur comme LangChain. Contrairement à l'API Assistants d'OpenAI, Dify permet un déploiement local complet des services.
| Fonctionnalité | Dify.AI | API Assistants | LangChain |
|---------------|----------|-----------------|------------|
| **Approche de programmation** | Orientée API | Orientée API | Orientée code Python |
| **Stratégie écosystème** | Open source | Fermé et commercial | Open source |
| **Moteur RAG** | Pris en charge | Pris en charge | Non pris en charge |
| **IDE d'invite** | Inclus | Inclus | Aucun |
| **LLM pris en charge** | Grande variété | Seulement GPT | Grande variété |
| **Déploiement local** | Pris en charge | Non pris en charge | Non applicable |
## Fonctionnalités
![](./images/models.png)
**1\. Support LLM**: Intégration avec la famille de modèles GPT d'OpenAI, ou les modèles de la famille open source Llama2. En fait, Dify prend en charge les modèles commerciaux grand public et les modèles open source (déployés localement ou basés sur MaaS).
**2\. IDE d'invite**: Orchestration visuelle d'applications et de services basés sur LLMs avec votre équipe.
**3\. Moteur RAG**: Comprend diverses capacités RAG basées sur l'indexation de texte intégral ou les embeddings de base de données vectorielles, permettant le chargement direct de PDF, TXT et autres formats de texte.
**4\. AI Agent**: Basé sur l'appel de fonction et ReAct, le framework d'inférence de l'Agent permet aux utilisateurs de personnaliser les outils, ce que vous voyez est ce que vous obtenez. Dify propose plus d'une douzaine de capacités d'appel d'outils intégrées, telles que la recherche Google, DELL·E, Diffusion Stable, WolframAlpha, etc.
**5\. Opérations continues**: Surveillez et analysez les journaux et les performances des applications, améliorez en continu les invites, les datasets ou les modèles à l'aide de données de production.
## Avant de commencer
**Étoilez-nous, et vous recevrez des notifications instantanées pour toutes les nouvelles sorties sur GitHub !**
![star-us](https://github.com/langgenius/dify/assets/100913391/95f37259-7370-4456-a9f0-0bc01ef8642f)
- [Site web](https://dify.ai)
- [Documentation](https://docs.dify.ai)
- [Documentation de déploiement](https://docs.dify.ai/getting-started/install-self-hosted)
- [FAQ](https://docs.dify.ai/getting-started/faq)
## Installer la version Communauté
### Configuration système
Avant d'installer Dify, assurez-vous que votre machine répond aux exigences minimales suivantes:
- CPU >= 2 cœurs
- RAM >= 4 Go
### Démarrage rapide
La façon la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine:
```bash
cd docker
docker compose up -d
```
Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre navigateur à l'adresse [http://localhost/install](http://localhost/install) et démarrer le processus d'installation initiale.
### Chart Helm
Un grand merci à @BorisPolonsky pour nous avoir fourni une version [Helm Chart](https://helm.sh/) qui permet le déploiement de Dify sur Kubernetes.
Vous pouvez accéder à https://github.com/BorisPolonsky/dify-helm pour des informations de déploiement.
### Configuration
Si vous avez besoin de personnaliser la configuration, veuillez vous référer aux commentaires de notre fichier [docker-compose.yml](docker/docker-compose.yaml) et définir manuellement la configuration de l'environnement. Après avoir apporté les modifications, veuillez exécuter à nouveau `docker-compose up -d`. Vous trouverez la liste complète des variables d'environnement dans notre [documentation](https://docs.dify.ai/getting-started/install-self-hosted/environments).
## Historique d'étoiles
[![Diagramme de l'historique des étoiles](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## Communauté & Support
Nous vous invitons à contribuer à Dify pour aider à améliorer Dify de diverses manières, en soumettant du code, des problèmes, de nouvelles idées ou en partageant les applications d'IA intéressantes et utiles que vous avez créées sur la base de Dify. En même temps, nous vous invitons également à partager Dify lors de différents événements, conférences et réseaux sociaux.
- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Idéal pour : les bogues et les erreurs que vous rencontrez en utilisant Dify.AI, voir le [Guide de contribution](CONTRIBUTING.md).
- [Support par courriel](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify). Idéal pour : les questions que vous avez au sujet de l'utilisation de Dify.AI.
- [Discord](https://discord.gg/FngNHpbcY7). Idéal pour : partager vos applications et discuter avec la communauté.
- [Twitter](https://twitter.com/dify_ai). Idéal pour : partager vos applications et discuter avec la communauté.
- [Licence commerciale](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry). Idéal pour : les demandes commerciales de licence de Dify.AI pour un usage commercial.
## Divulgation de la sécurité
Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Envoyez plutôt vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée.
## Licence
Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement Apache 2.0 avec quelques restrictions supplémentaires.

130
README_JA.md Normal file
View File

@@ -0,0 +1,130 @@
[![](./images/describe.png)](https://dify.ai)
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a> |
<a href="./README_KL.md">Klingon</a> |
<a href="./README_FR.md">Français</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/AI-Dify?logo=AI&logoColor=%20%23f5f5f5&label=Dify&labelColor=%20%23155EEF&color=%23EAECF0"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord"
alt="chat on Discord"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?style=social&logo=X"
alt="follow on Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web"></a>
</p>
<p align="center">
<a href="https://dify.ai/blog/dify-ai-unveils-ai-agent-creating-gpts-and-assistants-with-various-llms" target="_blank">
Dify.AI Unveils AI Agent: Creating GPTs and Assistants with Various LLMs
</a>
</p>
"Difyは、既にDify.AI上で10万以上のアプリケーションが構築されているLLMアプリケーション開発プラットフォームです。バックエンド・アズ・ア・サービスとLLMOpsの概念を統合し、組み込みのRAGエンジンを含む、生成AIネイティブアプリケーションを構築するためのコアテックスタックをカバーしています。Difyを使用すると、どのLLMに基づいても、Assistants APIやGPTのような機能を自己デプロイすることができます。"
Please note that translating complex technical terms can sometimes result in slight variations in meaning due to differences in language nuances.
![](./images/demo.png)
## クラウドサービスの利用
[Dify.AI Cloud](https://dify.ai) を使用すると、オープンソース版の全機能を利用でき、さらに200GPTのトライアルクレジットが無料で提供されます。
## Difyの利点
Difyはモデルニュートラルであり、LangChainのようなハードコードされた開発ライブラリと比較して、完全にエンジニアリングされた技術スタックを特徴としています。OpenAIのAssistants APIとは異なり、Difyではサービスの完全なローカルデプロイメントが可能です。
| 機能 | Dify.AI | Assistants API | LangChain |
|---------|---------|----------------|-----------|
| **プログラミングアプローチ** | API指向 | API指向 | Pythonコード指向 |
| **エコシステム戦略** | オープンソース | 閉鎖的かつ商業的 | オープンソース |
| **RAGエンジン** | サポート済み | サポート済み | 非サポート |
| **プロンプトIDE** | 含まれる | 含まれる | なし |
| **サポートされるLLMs** | 豊富な種類 | GPTのみ | 豊富な種類 |
| **ローカルデプロイメント** | サポート済み | 非サポート | 該当なし |
## 機能
![](./images/models.png)
**1\. LLMサポート**: OpenAIのGPTファミリーモデルやLlama2ファミリーのオープンソースモデルとの統合。 実際、Difyは主要な商用モデルとオープンソースモデル(ローカルでデプロイまたはMaaSベース)をサポートしています。
**2\. プロンプトIDE**: チームとのLLMベースのアプリケーションとサービスの視覚的なオーケストレーション。
**3\. RAGエンジン**: フルテキストインデックスまたはベクトルデータベース埋め込みに基づくさまざまなRAG機能を含み、PDF、TXT、その他のテキストフォーマットの直接アップロードを可能にします。
**4. AIエージェント**: 関数呼び出しとReActに基づくAgent推論フレームワークにより、ユーザーはツールをカスタマイズすることができます。Difyは、Google検索、DELL·E、Stable Diffusion、WolframAlphaなど、十数種類の組み込みツール呼び出し機能を提供しています。
**5\. 継続的運用**: アプリケーションログとパフォーマンスを監視および分析し、運用データを使用してプロンプト、データセット、またはモデルを継続的に改善します。
## 開始する前に
**私たちをスターして、GitHub上でのすべての新しいリリースに対する即時通知を受け取ります**
![私たちをスターして](https://github.com/langgenius/dify/assets/100913391/95f37259-7370-4456-a9f0-0bc01ef8642f)
- [Website](https://dify.ai)
- [Docs](https://docs.dify.ai)
- [Deployment Docs](https://docs.dify.ai/getting-started/install-self-hosted)
- [FAQ](https://docs.dify.ai/getting-started/faq)
## コミュニティエディションのインストール
### システム要件
Difyをインストールする前に、以下の最低限のシステム要件を満たしていることを確認してください
- CPU >= 2コア
- RAM >= 4GB
### クイックスタート
Difyサーバーを始める最も簡単な方法は、[docker-compose.yml](docker/docker-compose.yaml) ファイルを実行することです。インストールコマンドを実行する前に、マシンに [Docker](https://docs.docker.com/get-docker/) と [Docker Compose](https://docs.docker.com/compose/install/) がインストールされていることを確認してください:
```bash
cd docker
docker compose up -d
```
実行後、ブラウザで [http://localhost/install](http://localhost/install) にアクセスし、初期化インストールプロセスを開始できます。
### Helm Chart
@BorisPolonskyによる[Helm Chart](https://helm.sh/) バージョンを提供してくれて、大変感謝しています。これにより、DifyはKubernetes上にデプロイすることができます。
デプロイ情報については、https://github.com/BorisPolonsky/dify-helm をご覧ください。
### 設定
設定をカスタマイズする必要がある場合は、[docker-compose.yml](docker/docker-compose.yaml) ファイルのコメントを参照し、環境設定を手動で行ってください。変更を行った後は、もう一度 `docker-compose up -d` を実行してください。環境変数の完全なリストは、[ドキュメント](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。
## スターヒストリー
[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## コミュニティとサポート
Difyに貢献していただき、コードの提出、問題の報告、新しいアイデアの提供、またはDifyを基に作成した興味深く有用なAIアプリケーションの共有により、Difyをより良いものにするお手伝いを歓迎します。同時に、さまざまなイベント、会議、ソーシャルメディアでDifyを共有することも歓迎します。
- [GitHub Issues](https://github.com/langgenius/dify/issues)。最適な使用法Dify.AIの使用中に遭遇するバグやエラー、[貢献ガイド](CONTRIBUTING.md)を参照。
- [Email サポート](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。最適な使用法Dify.AIの使用に関する質問。
- [Discord](https://discord.gg/FngNHpbcY7)。最適な使用法:アプリケーションの共有とコミュニティとの交流。
- [Twitter](https://twitter.com/dify_ai)。最適な使用法:アプリケーションの共有とコミュニティとの交流。
- [ビジネスライセンス](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)。最適な使用法Dify.AIを商業利用するためのビジネス関連の問い合わせ。
## セキュリティ
プライバシー保護のため、GitHub へのセキュリティ問題の投稿は避けてください。代わりに、あなたの質問を security@dify.ai に送ってください。より詳細な回答を提供します。
## ライセンス
このリポジトリは、基本的にApache 2.0にいくつかの追加制限を加えた[Difyオープンソースライセンス](LICENSE)の下で利用できます。

119
README_KL.md Normal file
View File

@@ -0,0 +1,119 @@
[![](./images/describe.png)](https://dify.ai)
<p align="center">
<a href="./README.md">English</a> |
<a href="./README_CN.md">简体中文</a> |
<a href="./README_JA.md">日本語</a> |
<a href="./README_ES.md">Español</a> |
<a href="./README_KL.md">Klingon</a> |
<a href="./README_FR.md">Français</a>
</p>
<p align="center">
<a href="https://dify.ai" target="_blank">
<img alt="Static Badge" src="https://img.shields.io/badge/AI-Dify?logo=AI&logoColor=%20%23f5f5f5&label=Dify&labelColor=%20%23155EEF&color=%23EAECF0"></a>
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord"
alt="chat on Discord"></a>
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
<img src="https://img.shields.io/twitter/follow/dify_ai?style=social&logo=X"
alt="follow on Twitter"></a>
<a href="https://hub.docker.com/u/langgenius" target="_blank">
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web"></a>
</p>
**Dify** Hoch LLM qorwI' pIqoDvam pagh laHta' je **100,000** pIqoDvamvam Dify.AI De'wI'. Dify leghpu' Backend chu' a Service teH LLMOps vItlhutlh, generative AI-native pIqoD teq wa'vam, vIyoD Built-in RAG engine. Dify, **'ej chenmoHmoH Hoch 'oHna' Assistant API 'ej GPTmey HoStaHbogh LLMmey.**
![](./images/demo.png)
## ngIl QaQ
[Dify.AI ngIl](https://dify.ai) pIm neHlaH 'ej ghaH. cha'logh wa' DIvI' 200 GPT trial credits.
## Dify WovmoH
Dify Daq rIn neutrality 'ej Hoch, LangChain tInHar HubwI'. maH Daqbe'law' Qawqar, OpenAI's Assistant API Daq local neH deployment.
| Qo'logh | Dify.AI | Assistants API | LangChain |
|---------|---------|----------------|-----------|
| **qet QaS** | API-oriented | API-oriented | Python Code-oriented |
| **Ecosystem Strategy** | Open Source | Closed and Commercial | Open Source |
| **RAG Engine** | Ha'qu' | Ha'qu' | ghoS Ha'qu' |
| **Prompt IDE** | jaH Include | jaH Include | qeylIS qaq |
| **qet LLMmey** | bo'Degh Hoch | GPTmey tIn | bo'Degh Hoch |
| **local deployment** | Ha'qu' | tInHa'qu' | tInHa'qu' ghogh |
## ruch
![](./images/models.png)
**1. LLM tIq**: OpenAI's GPT Hur nISmoHvam neH vIngeH, wa' Llama2 Hur nISmoHvam. Heghlu'lu'pu' Dify mIw 'oH choH qay'be'.Daq commercial Hurmey 'ej Open Source Hurmey (maqtaHvIS pagh locally neH neH deployment HoSvam).
**2. Prompt IDE**: cha'logh wa' LLMmey Hoch janlu'pu' 'ej lughpu' choH qay'be'.
**3. RAG Engine**: RAG vaD tIqpu' lo'taH indexing qor neH vector database wa' embeddings wIj, PDFs, TXTs, 'ej ghojmoHmoH HIq qorlIj je upload.
**4. AI Agent**: Function Calling 'ej ReAct Daq Hurmey, Agent inference framework Hoch users customize tools, vaj 'oH QaQ. Dify Hoch loS ghaH 'ej wa'vatlh built-in tool calling capabilities, Google Search, DELL·E, Stable Diffusion, WolframAlpha, 'ej.
**5. QaS muDHa'wI': cha'logh wa' pIq mI' logs 'ej quv yIn, vItlhutlh tIq 'e'wIj lo'taHmoHmoH Prompts, vItlhutlh, Hurmey ghaH production data jatlh.
## Do'wI' qabmey lo'taH
**maHvaD jatlhchugh, GitHub Daq Hoch chu' ghompu'vam tIqel yInob!**
![star-us](https://github.com/langgenius/dify/assets/100913391/95f37259-7370-4456-a9f0-0bc01ef8642f)
- [Website](https://dify.ai)
- [Docs](https://docs.dify.ai)
- [lo'taHmoH Docs](https://docs.dify.ai/getting-started/install-self-hosted)
- [FAQ](https://docs.dify.ai/getting-started/faq)
## Community Edition tu' yo'
### System Qab
Dify yo' yo' qaqmeH SuS chenmoH 'oH qech!
- CPU >= 2 Cores
- RAM >= 4GB
### Quick Start
Dify server luHoHtaHlu' vIngeH lo'laHbe'chugh vIyoD [docker-compose.yml](docker/docker-compose.yaml) QorwI'ghach. toH yItlhutlh chenmoH luH!chugh 'ay' vaj vIneHmeH, 'ej [Docker](https://docs.docker.com/get-docker/) 'ej [Docker Compose](https://docs.docker.com/compose/install/) vaj 'oH 'e' vIneHmeH:
```bash
cd docker
docker compose up -d
```
luHoHtaHmeH HoHtaHvIS, Dify dashboard vIneHmeH vIngeH lI'wI' [http://localhost/install](http://localhost/install) 'ej 'oH initialization 'e' vIneHmeH.
### Helm Chart
@BorisPolonsky Dify wIq tIq ['ay'var (Helm Chart)](https://helm.sh/) version Hur yIn chu' Dify luHoHchu'. Heghlu'lu' vIneHmeH [https://github.com/BorisPolonsky/dify-helm](https://github.com/BorisPolonsky/dify-helm) 'ej vaj QaS deployment information.
### veS config
chenmoHDI' config lo'taH ghaH, vItlhutlh HIq wIgharghbe'lu'pu'. toH lo'taHvIS pagh vay' vIneHmeH, 'ej `docker-compose up -d` wa'DIch. tIqmoHmeH list full wa' lo'taHvo'lu'pu' ghaH [docs](https://docs.dify.ai/getting-started/install-self-hosted/environments).
## tIng qem
[![tIng qem Hur Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
## choHmoH 'ej vItlhutlh
Dify choHmoH je mIw Dify puqloD, Dify ghaHta'bogh vItlhutlh, HurDI' code, ghItlh, ghItlh qo'lu'pu'pu' qej. tIqmeH, Hurmey je, Dify Hur tIqDI' woDDaj, DuD QangmeH 'ej HInobDaq vItlhutlh HImej Dify'e'.
- [GitHub vItlhutlh](https://github.com/langgenius/dify/issues). Hurmey: bugs 'ej errors Dify.AI tIqmeH. yImej [Contribution Guide](CONTRIBUTING.md).
- [Email QaH](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify). Hurmey: questions vItlhutlh Dify.AI chaw'.
- [Discord](https://discord.gg/FngNHpbcY7). Hurmey: jIpuv 'ej jImej mIw Dify vItlhutlh.
- [Twitter](https://twitter.com/dify_ai). Hurmey: jIpuv 'ej jImej mIw Dify vItlhutlh.
- [Business License](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry). Hurmey: qurgh vItlhutlh Hurmey Dify.AI tIqbe'law'.
## bIQDaqmey bom
taghlI' vIngeH'a'? pong security 'oH posting GitHub. yItlhutlh, toH security@dify.ai 'ej vIngeH'a'.
## License
ghItlh puqloD chenmoH [Dify vItlhutlh Hur](LICENSE), ghaH nIvbogh Apache 2.0.

View File

@@ -1,19 +1,11 @@
.env
*.env.*
storage/generate_files/*
storage/privkeys/*
storage/tools/*
storage/upload_files/*
# Logs
logs
*.log*
# jetbrains
.idea
.mypy_cache
.ruff_cache
# venv
.venv
.idea

View File

@@ -1,173 +1,64 @@
# Server Edition
EDITION=SELF_HOSTED
# Your App secret key will be used for securely signing the session cookie
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
# Alternatively you can set it with `SECRET_KEY` environment variable.
SECRET_KEY=
# Ensure UTF-8 encoding
LANG=en_US.UTF-8
LC_ALL=en_US.UTF-8
PYTHONIOENCODING=utf-8
# Console API base URL
CONSOLE_API_URL=http://localhost:5001
CONSOLE_WEB_URL=http://localhost:3000
CONSOLE_API_URL=http://127.0.0.1:5001
CONSOLE_WEB_URL=http://127.0.0.1:3000
# Service API base URL
SERVICE_API_URL=http://localhost:5001
SERVICE_API_URL=http://127.0.0.1:5001
# Web APP base URL
APP_WEB_URL=http://localhost:3000
APP_WEB_URL=http://127.0.0.1:3000
# Files URL
FILES_URL=http://localhost:5001
FILES_URL=http://127.0.0.1:5001
# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
# Set this to the internal Docker service URL for proper plugin file access.
# Example: INTERNAL_FILES_URL=http://api:5001
INTERNAL_FILES_URL=http://127.0.0.1:5001
# TRIGGER URL
TRIGGER_URL=http://localhost:5001
# The time in seconds after the signature is rejected
FILES_ACCESS_TIMEOUT=300
# Access token expiration time in minutes
ACCESS_TOKEN_EXPIRE_MINUTES=60
# Refresh token expiration time in days
REFRESH_TOKEN_EXPIRE_DAYS=30
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1
# redis configuration
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_USERNAME=
REDIS_PASSWORD=difyai123456
REDIS_USE_SSL=false
# SSL configuration for Redis (when REDIS_USE_SSL=true)
REDIS_SSL_CERT_REQS=CERT_NONE
# Options: CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
REDIS_SSL_CA_CERTS=
# Path to CA certificate file for SSL verification
REDIS_SSL_CERTFILE=
# Path to client certificate file for SSL authentication
REDIS_SSL_KEYFILE=
# Path to client private key file for SSL authentication
REDIS_DB=0
# redis Sentinel configuration.
REDIS_USE_SENTINEL=false
REDIS_SENTINELS=
REDIS_SENTINEL_SERVICE_NAME=
REDIS_SENTINEL_USERNAME=
REDIS_SENTINEL_PASSWORD=
REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
# redis Cluster configuration.
REDIS_USE_CLUSTERS=false
REDIS_CLUSTERS=
REDIS_CLUSTERS_PASSWORD=
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
CELERY_BACKEND=redis
# PostgreSQL database configuration
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
SQLALCHEMY_POOL_PRE_PING=true
SQLALCHEMY_POOL_TIMEOUT=30
# Storage configuration
# use for store upload files, private keys...
# storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
STORAGE_TYPE=opendal
# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal
OPENDAL_SCHEME=fs
OPENDAL_FS_ROOT=storage
# S3 Storage configuration
S3_USE_AWS_MANAGED_IAM=false
S3_ENDPOINT=https://your-bucket-name.storage.s3.cloudflare.com
# storage type: local, s3, azure-blob
STORAGE_TYPE=local
STORAGE_LOCAL_PATH=storage
S3_ENDPOINT=https://your-bucket-name.storage.s3.clooudflare.com
S3_BUCKET_NAME=your-bucket-name
S3_ACCESS_KEY=your-access-key
S3_SECRET_KEY=your-secret-key
S3_REGION=your-region
# Azure Blob Storage configuration
AZURE_BLOB_ACCOUNT_NAME=your-account-name
AZURE_BLOB_ACCOUNT_KEY=your-account-key
AZURE_BLOB_CONTAINER_NAME=your-container-name
AZURE_BLOB_CONTAINER_NAME=yout-container-name
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
# Aliyun oss Storage configuration
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
ALIYUN_OSS_ACCESS_KEY=your-access-key
ALIYUN_OSS_SECRET_KEY=your-secret-key
ALIYUN_OSS_ENDPOINT=your-endpoint
ALIYUN_OSS_AUTH_VERSION=v1
ALIYUN_OSS_REGION=your-region
# Don't start with '/'. OSS doesn't support leading slash in object names.
ALIYUN_OSS_PATH=your-path
# Google Storage configuration
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
# Tencent COS Storage configuration
TENCENT_COS_BUCKET_NAME=your-bucket-name
TENCENT_COS_SECRET_KEY=your-secret-key
TENCENT_COS_SECRET_ID=your-secret-id
TENCENT_COS_REGION=your-region
TENCENT_COS_SCHEME=your-scheme
# Huawei OBS Storage Configuration
HUAWEI_OBS_BUCKET_NAME=your-bucket-name
HUAWEI_OBS_SECRET_KEY=your-secret-key
HUAWEI_OBS_ACCESS_KEY=your-access-key
HUAWEI_OBS_SERVER=your-server-url
# Baidu OBS Storage Configuration
BAIDU_OBS_BUCKET_NAME=your-bucket-name
BAIDU_OBS_SECRET_KEY=your-secret-key
BAIDU_OBS_ACCESS_KEY=your-access-key
BAIDU_OBS_ENDPOINT=your-server-url
# OCI Storage configuration
OCI_ENDPOINT=your-endpoint
OCI_BUCKET_NAME=your-bucket-name
OCI_ACCESS_KEY=your-access-key
OCI_SECRET_KEY=your-secret-key
OCI_REGION=your-region
# Volcengine tos Storage configuration
VOLCENGINE_TOS_ENDPOINT=your-endpoint
VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name
VOLCENGINE_TOS_ACCESS_KEY=your-access-key
VOLCENGINE_TOS_SECRET_KEY=your-secret-key
VOLCENGINE_TOS_REGION=your-region
# Supabase Storage Configuration
SUPABASE_BUCKET_NAME=your-bucket-name
SUPABASE_API_KEY=your-access-key
SUPABASE_URL=your-server-url
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
COOKIE_DOMAIN=
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
# Vector database configuration
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
# Vector database configuration, support: weaviate, qdrant, milvus
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
# Weaviate configuration
WEAVIATE_ENDPOINT=http://localhost:8080
@@ -179,235 +70,39 @@ WEAVIATE_BATCH_SIZE=100
QDRANT_URL=http://localhost:6333
QDRANT_API_KEY=difyai123456
QDRANT_CLIENT_TIMEOUT=20
QDRANT_GRPC_ENABLED=false
QDRANT_GRPC_PORT=6334
QDRANT_REPLICATION_FACTOR=1
#Couchbase configuration
COUCHBASE_CONNECTION_STRING=127.0.0.1
COUCHBASE_USER=Administrator
COUCHBASE_PASSWORD=password
COUCHBASE_BUCKET_NAME=Embeddings
COUCHBASE_SCOPE_NAME=_default
# Milvus configuration
MILVUS_URI=http://127.0.0.1:19530
MILVUS_TOKEN=
MILVUS_HOST=127.0.0.1
MILVUS_PORT=19530
MILVUS_USER=root
MILVUS_PASSWORD=Milvus
MILVUS_ANALYZER_PARAMS=
# MyScale configuration
MYSCALE_HOST=127.0.0.1
MYSCALE_PORT=8123
MYSCALE_USER=default
MYSCALE_PASSWORD=
MYSCALE_DATABASE=default
MYSCALE_FTS_PARAMS=
# Relyt configuration
RELYT_HOST=127.0.0.1
RELYT_PORT=5432
RELYT_USER=postgres
RELYT_PASSWORD=postgres
RELYT_DATABASE=postgres
# Tencent configuration
TENCENT_VECTOR_DB_URL=http://127.0.0.1
TENCENT_VECTOR_DB_API_KEY=dify
TENCENT_VECTOR_DB_TIMEOUT=30
TENCENT_VECTOR_DB_USERNAME=dify
TENCENT_VECTOR_DB_DATABASE=dify
TENCENT_VECTOR_DB_SHARD=1
TENCENT_VECTOR_DB_REPLICAS=2
TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false
# ElasticSearch configuration
ELASTICSEARCH_HOST=127.0.0.1
ELASTICSEARCH_PORT=9200
ELASTICSEARCH_USERNAME=elastic
ELASTICSEARCH_PASSWORD=elastic
# PGVECTO_RS configuration
PGVECTO_RS_HOST=localhost
PGVECTO_RS_PORT=5431
PGVECTO_RS_USER=postgres
PGVECTO_RS_PASSWORD=difyai123456
PGVECTO_RS_DATABASE=postgres
# PGVector configuration
PGVECTOR_HOST=127.0.0.1
PGVECTOR_PORT=5433
PGVECTOR_USER=postgres
PGVECTOR_PASSWORD=postgres
PGVECTOR_DATABASE=postgres
PGVECTOR_MIN_CONNECTION=1
PGVECTOR_MAX_CONNECTION=5
# TableStore Vector configuration
TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
TABLESTORE_INSTANCE_NAME=instance-name
TABLESTORE_ACCESS_KEY_ID=xxx
TABLESTORE_ACCESS_KEY_SECRET=xxx
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
# Tidb Vector configuration
TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
TIDB_VECTOR_PORT=4000
TIDB_VECTOR_USER=xxx.root
TIDB_VECTOR_PASSWORD=xxxxxx
TIDB_VECTOR_DATABASE=dify
# Tidb on qdrant configuration
TIDB_ON_QDRANT_URL=http://127.0.0.1
TIDB_ON_QDRANT_API_KEY=dify
TIDB_ON_QDRANT_CLIENT_TIMEOUT=20
TIDB_ON_QDRANT_GRPC_ENABLED=false
TIDB_ON_QDRANT_GRPC_PORT=6334
TIDB_PUBLIC_KEY=dify
TIDB_PRIVATE_KEY=dify
TIDB_API_URL=http://127.0.0.1
TIDB_IAM_API_URL=http://127.0.0.1
TIDB_REGION=regions/aws-us-east-1
TIDB_PROJECT_ID=dify
TIDB_SPEND_LIMIT=100
# Chroma configuration
CHROMA_HOST=127.0.0.1
CHROMA_PORT=8000
CHROMA_TENANT=default_tenant
CHROMA_DATABASE=default_database
CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
CHROMA_AUTH_CREDENTIALS=difyai123456
# AnalyticDB configuration
ANALYTICDB_KEY_ID=your-ak
ANALYTICDB_KEY_SECRET=your-sk
ANALYTICDB_REGION_ID=cn-hangzhou
ANALYTICDB_INSTANCE_ID=gp-ab123456
ANALYTICDB_ACCOUNT=testaccount
ANALYTICDB_PASSWORD=testpassword
ANALYTICDB_NAMESPACE=dify
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
ANALYTICDB_HOST=gp-test.aliyuncs.com
ANALYTICDB_PORT=5432
ANALYTICDB_MIN_CONNECTION=1
ANALYTICDB_MAX_CONNECTION=5
# OpenSearch configuration
OPENSEARCH_HOST=127.0.0.1
OPENSEARCH_PORT=9200
OPENSEARCH_USER=admin
OPENSEARCH_PASSWORD=admin
OPENSEARCH_SECURE=true
OPENSEARCH_VERIFY_CERTS=true
# Baidu configuration
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000
BAIDU_VECTOR_DB_ACCOUNT=root
BAIDU_VECTOR_DB_API_KEY=dify
BAIDU_VECTOR_DB_DATABASE=dify
BAIDU_VECTOR_DB_SHARD=1
BAIDU_VECTOR_DB_REPLICAS=3
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE
# Upstash configuration
UPSTASH_VECTOR_URL=your-server-url
UPSTASH_VECTOR_TOKEN=your-access-token
# ViKingDB configuration
VIKINGDB_ACCESS_KEY=your-ak
VIKINGDB_SECRET_KEY=your-sk
VIKINGDB_REGION=cn-shanghai
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
VIKINGDB_SCHEMA=http
VIKINGDB_CONNECTION_TIMEOUT=30
VIKINGDB_SOCKET_TIMEOUT=30
# Matrixone configration
MATRIXONE_HOST=127.0.0.1
MATRIXONE_PORT=6001
MATRIXONE_USER=dump
MATRIXONE_PASSWORD=111
MATRIXONE_DATABASE=dify
# Lindorm configuration
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
LINDORM_USERNAME=admin
LINDORM_PASSWORD=admin
LINDORM_USING_UGC=True
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration
OCEANBASE_VECTOR_HOST=127.0.0.1
OCEANBASE_VECTOR_PORT=2881
OCEANBASE_VECTOR_USER=root@test
OCEANBASE_VECTOR_PASSWORD=difyai123456
OCEANBASE_VECTOR_DATABASE=test
OCEANBASE_MEMORY_LIMIT=6G
OCEANBASE_ENABLE_HYBRID_SEARCH=false
# AlibabaCloud MySQL Vector configuration
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
ALIBABACLOUD_MYSQL_PORT=3306
ALIBABACLOUD_MYSQL_USER=root
ALIBABACLOUD_MYSQL_PASSWORD=root
ALIBABACLOUD_MYSQL_DATABASE=dify
ALIBABACLOUD_MYSQL_MAX_CONNECTION=5
ALIBABACLOUD_MYSQL_HNSW_M=6
# openGauss configuration
OPENGAUSS_HOST=127.0.0.1
OPENGAUSS_PORT=6600
OPENGAUSS_USER=postgres
OPENGAUSS_PASSWORD=Dify@123
OPENGAUSS_DATABASE=dify
OPENGAUSS_MIN_CONNECTION=1
OPENGAUSS_MAX_CONNECTION=5
MILVUS_SECURE=false
# Upload configuration
UPLOAD_FILE_SIZE_LIMIT=15
UPLOAD_FILE_BATCH_LIMIT=5
UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
# Comma-separated list of file extensions blocked from upload for security reasons.
# Extensions should be lowercase without dots (e.g., exe,bat,sh,dll).
# Empty by default to allow all file types.
# Recommended: exe,bat,cmd,com,scr,vbs,ps1,msi,dll
UPLOAD_FILE_EXTENSION_BLACKLIST=
# Model Configuration
MULTIMODAL_SEND_IMAGE_FORMAT=base64
# Model configuration
MULTIMODAL_SEND_FORMAT=base64
PROMPT_GENERATION_MAX_TOKENS=512
CODE_GENERATION_MAX_TOKENS=1024
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
# Mail configuration, support: resend, smtp, sendgrid
# Mail configuration, support: resend, smtp
MAIL_TYPE=
# If using SendGrid, use the 'from' field for authentication if necessary.
MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
# resend configuration
RESEND_API_KEY=
RESEND_API_URL=https://api.resend.com
# smtp configuration
SMTP_SERVER=smtp.gmail.com
SMTP_PORT=465
SMTP_PORT=587
SMTP_USERNAME=123
SMTP_PASSWORD=abc
SMTP_USE_TLS=true
SMTP_OPPORTUNISTIC_TLS=false
# Sendgid configuration
SENDGRID_API_KEY=
SMTP_USE_TLS=false
# Sentry configuration
SENTRY_DSN=
# DEBUG
DEBUG=false
ENABLE_REQUEST_LOGGING=False
SQLALCHEMY_ECHO=false
# Notion import configuration, support public and internal
@@ -416,219 +111,41 @@ NOTION_CLIENT_SECRET=you-client-secret
NOTION_CLIENT_ID=you-client-id
NOTION_INTERNAL_SECRET=you-internal-secret
# Hosted Model Credentials
HOSTED_OPENAI_API_KEY=
HOSTED_OPENAI_API_BASE=
HOSTED_OPENAI_API_ORGANIZATION=
HOSTED_OPENAI_TRIAL_ENABLED=false
HOSTED_OPENAI_QUOTA_LIMIT=200
HOSTED_OPENAI_PAID_ENABLED=false
HOSTED_AZURE_OPENAI_ENABLED=false
HOSTED_AZURE_OPENAI_API_KEY=
HOSTED_AZURE_OPENAI_API_BASE=
HOSTED_AZURE_OPENAI_QUOTA_LIMIT=200
HOSTED_ANTHROPIC_API_BASE=
HOSTED_ANTHROPIC_API_KEY=
HOSTED_ANTHROPIC_TRIAL_ENABLED=false
HOSTED_ANTHROPIC_QUOTA_LIMIT=600000
HOSTED_ANTHROPIC_PAID_ENABLED=false
ETL_TYPE=dify
UNSTRUCTURED_API_URL=
UNSTRUCTURED_API_KEY=
SCARF_NO_ANALYTICS=true
#ssrf
SSRF_PROXY_HTTP_URL=
SSRF_PROXY_HTTPS_URL=
SSRF_DEFAULT_MAX_RETRIES=3
SSRF_DEFAULT_TIME_OUT=5
SSRF_DEFAULT_CONNECT_TIME_OUT=5
SSRF_DEFAULT_READ_TIME_OUT=5
SSRF_DEFAULT_WRITE_TIME_OUT=5
SSRF_POOL_MAX_CONNECTIONS=100
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
BATCH_UPLOAD_LIMIT=10
KEYWORD_DATA_SOURCE_TYPE=database
# Workflow file upload limit
WORKFLOW_FILE_UPLOAD_LIMIT=10
# CODE EXECUTION CONFIGURATION
CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
CODE_EXECUTION_API_KEY=dify-sandbox
CODE_EXECUTION_SSL_VERIFY=True
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
CODE_EXECUTION_CONNECT_TIMEOUT=10
CODE_EXECUTION_READ_TIMEOUT=60
CODE_EXECUTION_WRITE_TIMEOUT=10
CODE_MAX_NUMBER=9223372036854775807
CODE_MIN_NUMBER=-9223372036854775808
CODE_MAX_STRING_LENGTH=400000
TEMPLATE_TRANSFORM_MAX_LENGTH=400000
CODE_MAX_STRING_LENGTH=80000
TEMPLATE_TRANSFORM_MAX_LENGTH=80000
CODE_MAX_STRING_ARRAY_LENGTH=30
CODE_MAX_OBJECT_ARRAY_LENGTH=30
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
# API Tool configuration
API_TOOL_DEFAULT_CONNECT_TIMEOUT=10
API_TOOL_DEFAULT_READ_TIMEOUT=60
# HTTP Node configuration
HTTP_REQUEST_MAX_CONNECT_TIMEOUT=300
HTTP_REQUEST_MAX_READ_TIMEOUT=600
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
HTTP_REQUEST_NODE_SSL_VERIFY=True
# Webhook request configuration
WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760
# Respect X-* headers to redirect clients
RESPECT_XFORWARD_HEADERS_ENABLED=false
# Log file path
LOG_FILE=
# Log file max size, the unit is MB
LOG_FILE_MAX_SIZE=20
# Log file max backup count
LOG_FILE_BACKUP_COUNT=5
# Log dateformat
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
# Log Timezone
LOG_TZ=UTC
# Log format
LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
# Indexing configuration
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
# Workflow runtime configuration
WORKFLOW_MAX_EXECUTION_STEPS=500
WORKFLOW_MAX_EXECUTION_TIME=1200
WORKFLOW_CALL_MAX_DEPTH=5
MAX_VARIABLE_SIZE=204800
# GraphEngine Worker Pool Configuration
# Minimum number of workers per GraphEngine instance (default: 1)
GRAPH_ENGINE_MIN_WORKERS=1
# Maximum number of workers per GraphEngine instance (default: 10)
GRAPH_ENGINE_MAX_WORKERS=10
# Queue depth threshold that triggers worker scale up (default: 3)
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
# Seconds of idle time before scaling down workers (default: 5.0)
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
# Workflow storage configuration
# Options: rdbms, hybrid
# rdbms: Use only the relational database (default)
# hybrid: Save new data to object storage, read from both object storage and RDBMS
WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
# Repository configuration
# Core workflow execution repository implementation
CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository
# Core workflow node execution repository implementation
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository
# API workflow node execution repository implementation
API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
# API workflow run repository implementation
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
# Workflow log cleanup configuration
# Enable automatic cleanup of workflow run logs to manage database size
WORKFLOW_LOG_CLEANUP_ENABLED=false
# Number of days to retain workflow run logs (default: 30 days)
WORKFLOW_LOG_RETENTION_DAYS=30
# Batch size for workflow log cleanup operations (default: 100)
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
# App configuration
APP_MAX_EXECUTION_TIME=1200
APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
CELERY_BEAT_SCHEDULER_TIME=1
# Celery schedule tasks configuration
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
ENABLE_CLEAN_MESSAGES=false
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
ENABLE_DATASETS_QUEUE_MONITOR=false
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true
# Interval time in minutes for polling scheduled workflows(default: 1 min)
WORKFLOW_SCHEDULE_POLLER_INTERVAL=1
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100
# Maximum number of scheduled workflows to dispatch per tick (0 for unlimited)
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0
# Position configuration
POSITION_TOOL_PINS=
POSITION_TOOL_INCLUDES=
POSITION_TOOL_EXCLUDES=
POSITION_PROVIDER_PINS=
POSITION_PROVIDER_INCLUDES=
POSITION_PROVIDER_EXCLUDES=
# Plugin configuration
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
PLUGIN_DAEMON_URL=http://127.0.0.1:5002
PLUGIN_REMOTE_INSTALL_PORT=5003
PLUGIN_REMOTE_INSTALL_HOST=localhost
PLUGIN_MAX_PACKAGE_SIZE=15728640
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
# Marketplace configuration
MARKETPLACE_ENABLED=true
MARKETPLACE_API_URL=https://marketplace.dify.ai
# Endpoint configuration
ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
# Reset password token expiry minutes
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
CREATE_TIDB_SERVICE_JOB_ENABLED=false
# Maximum number of submitted thread count in a ThreadPool for parallel node execution
MAX_SUBMIT_COUNT=100
# Lockout duration in seconds
LOGIN_LOCKOUT_DURATION=86400
# Enable OpenTelemetry
ENABLE_OTEL=false
OTLP_TRACE_ENDPOINT=
OTLP_METRIC_ENDPOINT=
OTLP_BASE_ENDPOINT=http://localhost:4318
OTLP_API_KEY=
OTEL_EXPORTER_OTLP_PROTOCOL=
OTEL_EXPORTER_TYPE=otlp
OTEL_SAMPLING_RATE=0.1
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
OTEL_MAX_QUEUE_SIZE=2048
OTEL_MAX_EXPORT_BATCH_SIZE=512
OTEL_METRIC_EXPORT_INTERVAL=60000
OTEL_BATCH_EXPORT_TIMEOUT=10000
OTEL_METRIC_EXPORT_TIMEOUT=30000
# Prevent Clickjacking
ALLOW_EMBED=false
# Dataset queue monitor configuration
QUEUE_MONITOR_THRESHOLD=200
# You can configure multiple ones, separated by commas. eg: test1@dify.ai,test2@dify.ai
QUEUE_MONITOR_ALERT_EMAILS=
# Monitor interval in minutes, default is 30 minutes
QUEUE_MONITOR_INTERVAL=30
# Swagger UI configuration
SWAGGER_UI_ENABLED=true
SWAGGER_UI_PATH=/swagger-ui.html
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
# Set to false to export dataset IDs as plain text for easier cross-environment import
DSL_EXPORT_ENCRYPT_DATASET_ID=true
# Tenant isolated task queue configuration
TENANT_ISOLATED_TASK_CONCURRENCY=1
# Maximum number of segments for dataset segments API (0 for unlimited)
DATASET_MAX_SEGMENTS_PER_REQUEST=0

BIN
api/.idea/icon.png generated

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

17
api/.idea/vcs.xml generated
View File

@@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="IssueNavigationConfiguration">
<option name="links">
<list>
<IssueNavigationLink>
<option name="issueRegexp" value="#(\d+)" />
<option name="linkRegexp" value="https://github.com/langgenius/dify/issues/$1" />
</IssueNavigationLink>
</list>
</option>
</component>
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@@ -1,105 +0,0 @@
[importlinter]
root_packages =
core
configs
controllers
models
tasks
services
[importlinter:contract:workflow]
name = Workflow
type=layers
layers =
graph_engine
graph_events
graph
nodes
node_events
entities
containers =
core.workflow
ignore_imports =
core.workflow.nodes.base.node -> core.workflow.graph_events
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_events
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
core.workflow.nodes.node_factory -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine.command_channels
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine
core.workflow.nodes.loop.loop_node -> core.workflow.graph
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels
[importlinter:contract:rsc]
name = RSC
type = layers
layers =
graph_engine
response_coordinator
containers =
core.workflow.graph_engine
[importlinter:contract:worker]
name = Worker
type = layers
layers =
graph_engine
worker
containers =
core.workflow.graph_engine
[importlinter:contract:graph-engine-architecture]
name = Graph Engine Architecture
type = layers
layers =
graph_engine
orchestration
command_processing
event_management
error_handler
graph_traversal
graph_state_manager
worker_management
domain
containers =
core.workflow.graph_engine
[importlinter:contract:domain-isolation]
name = Domain Model Isolation
type = forbidden
source_modules =
core.workflow.graph_engine.domain
forbidden_modules =
core.workflow.graph_engine.worker_management
core.workflow.graph_engine.command_channels
core.workflow.graph_engine.layers
core.workflow.graph_engine.protocols
[importlinter:contract:worker-management]
name = Worker Management
type = forbidden
source_modules =
core.workflow.graph_engine.worker_management
forbidden_modules =
core.workflow.graph_engine.orchestration
core.workflow.graph_engine.command_processing
core.workflow.graph_engine.event_management
[importlinter:contract:graph-traversal-components]
name = Graph Traversal Components
type = layers
layers =
edge_processor
skip_propagator
containers =
core.workflow.graph_engine.graph_traversal
[importlinter:contract:command-channels]
name = Command Channels Independence
type = independence
modules =
core.workflow.graph_engine.command_channels.in_memory_channel
core.workflow.graph_engine.command_channels.redis_channel

View File

@@ -1,113 +0,0 @@
exclude = ["migrations/*"]
line-length = 120
[format]
quote-style = "double"
[lint]
preview = true
select = [
"B", # flake8-bugbear rules
"C4", # flake8-comprehensions
"E", # pycodestyle E rules
"F", # pyflakes rules
"FURB", # refurb rules
"I", # isort rules
"N", # pep8-naming
"PT", # flake8-pytest-style rules
"PLC0208", # iteration-over-set
"PLC0414", # useless-import-alias
"PLE0604", # invalid-all-object
"PLE0605", # invalid-all-format
"PLR0402", # manual-from-import
"PLR1711", # useless-return
"PLR1714", # repeated-equality-comparison
"RUF013", # implicit-optional
"RUF019", # unnecessary-key-check
"RUF100", # unused-noqa
"RUF101", # redirected-noqa
"RUF200", # invalid-pyproject-toml
"RUF022", # unsorted-dunder-all
"S506", # unsafe-yaml-load
"SIM", # flake8-simplify rules
"T201", # print-found
"TRY400", # error-instead-of-exception
"TRY401", # verbose-log-message
"UP", # pyupgrade rules
"W191", # tab-indentation
"W605", # invalid-escape-sequence
# security related linting rules
# RCE proctection (sort of)
"S102", # exec-builtin, disallow use of `exec`
"S307", # suspicious-eval-usage, disallow use of `eval` and `ast.literal_eval`
"S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers.
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
"S311", # suspicious-non-cryptographic-random-usage
"G001", # don't use str format to logging messages
"G003", # don't use + in logging messages
"G004", # don't use f-strings to format logging messages
"UP042", # use StrEnum
]
ignore = [
"E402", # module-import-not-at-top-of-file
"E711", # none-comparison
"E712", # true-false-comparison
"E721", # type-comparison
"E722", # bare-except
"F821", # undefined-name
"F841", # unused-variable
"FURB113", # repeated-append
"FURB152", # math-constant
"UP007", # non-pep604-annotation
"UP032", # f-string
"UP045", # non-pep604-annotation-optional
"B005", # strip-with-multi-characters
"B006", # mutable-argument-default
"B007", # unused-loop-control-variable
"B026", # star-arg-unpacking-after-keyword-arg
"B901", # allow return in yield
"B903", # class-as-data-structure
"B904", # raise-without-from-inside-except
"B905", # zip-without-explicit-strict
"N806", # non-lowercase-variable-in-function
"N815", # mixed-case-variable-in-class-scope
"PT011", # pytest-raises-too-broad
"SIM102", # collapsible-if
"SIM103", # needless-bool
"SIM105", # suppressible-exception
"SIM107", # return-in-try-except-finally
"SIM108", # if-else-block-instead-of-if-exp
"SIM113", # enumerate-for-loop
"SIM117", # multiple-with-statements
"SIM210", # if-expr-with-true-false
]
[lint.per-file-ignores]
"__init__.py" = [
"F401", # unused-import
"F811", # redefined-while-unused
]
"configs/*" = [
"N802", # invalid-function-name
]
"core/model_runtime/callbacks/base_callback.py" = [
"T201",
]
"core/workflow/callbacks/workflow_logging_callback.py" = [
"T201",
]
"libs/gmpy2_pkcs10aep_cipher.py" = [
"N803", # invalid-argument-name
]
"tests/*" = [
"F811", # redefined-while-unused
"T201", # allow print in tests
]
[lint.pyflakes]
allowed-unused-imports = [
"_pytest.monkeypatch",
"tests.integration_tests",
"tests.unit_tests",
]

42
api/.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,42 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Celery",
"type": "python",
"request": "launch",
"module": "celery",
"justMyCode": true,
"args": ["-A", "app.celery", "worker", "-P", "gevent", "-c", "1", "--loglevel", "info", "-Q", "dataset,generation,mail"],
"envFile": "${workspaceFolder}/.env",
"env": {
"FLASK_APP": "app.py",
"FLASK_DEBUG": "1",
"GEVENT_SUPPORT": "True"
},
"console": "integratedTerminal"
},
{
"name": "Python: Flask",
"type": "python",
"request": "launch",
"module": "flask",
"env": {
"FLASK_APP": "app.py",
"FLASK_DEBUG": "1",
"GEVENT_SUPPORT": "True"
},
"args": [
"run",
"--host=0.0.0.0",
"--port=5001",
"--debug"
],
"jinja": true,
"justMyCode": true
}
]
}

View File

@@ -1,61 +0,0 @@
{
"version": "0.2.0",
"compounds": [
{
"name": "Launch Flask and Celery",
"configurations": ["Python: Flask", "Python: Celery"]
}
],
"configurations": [
{
"name": "Python: Flask",
"consoleName": "Flask",
"type": "debugpy",
"request": "launch",
"python": "${workspaceFolder}/.venv/bin/python",
"cwd": "${workspaceFolder}",
"envFile": ".env",
"module": "flask",
"justMyCode": true,
"jinja": true,
"env": {
"FLASK_APP": "app.py",
"GEVENT_SUPPORT": "True"
},
"args": [
"run",
"--port=5001"
]
},
{
"name": "Python: Celery",
"consoleName": "Celery",
"type": "debugpy",
"request": "launch",
"python": "${workspaceFolder}/.venv/bin/python",
"cwd": "${workspaceFolder}",
"module": "celery",
"justMyCode": true,
"envFile": ".env",
"console": "integratedTerminal",
"env": {
"FLASK_APP": "app.py",
"FLASK_DEBUG": "1",
"GEVENT_SUPPORT": "True"
},
"args": [
"-A",
"app.celery",
"worker",
"-P",
"gevent",
"-c",
"1",
"--loglevel",
"DEBUG",
"-Q",
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
]
}
]
}

View File

@@ -1,62 +0,0 @@
# Agent Skill Index
Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it.
______________________________________________________________________
## Platform Foundations
- **[Infrastructure Overview](agent_skills/infra.md)**\
When to read this:
- You need to understand where a feature belongs in the architecture.
- Youre wiring storage, Redis, vector stores, or OTEL.
- Youre about to add CLI commands or async jobs.\
What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
- **[Coding Style](agent_skills/coding_style.md)**\
When to read this:
- Youre writing or reviewing backend code and need the authoritative checklist.
- Youre unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns.
- You want the exact lint/type/test commands used in PRs.\
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
______________________________________________________________________
## Plugin & Extension Development
- **[Plugin Systems](agent_skills/plugin.md)**\
When to read this:
- Youre building or debugging a marketplace plugin.
- You need to know how manifests, providers, daemons, and migrations fit together.\
What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
- **[Plugin OAuth](agent_skills/plugin_oauth.md)**\
When to read this:
- You must integrate OAuth for a plugin or datasource.
- Youre handling credential encryption or refresh flows.\
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
______________________________________________________________________
## Workflow Entry & Execution
- **[Trigger Concepts](agent_skills/trigger.md)**\
When to read this:
- Youre debugging why a workflow didnt start.
- Youre adding a new trigger type or hook.
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.\
Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions.
______________________________________________________________________
## Additional Notes for Agents
- All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes.
- When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`).
- If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules.
- Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`.
- When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently.

View File

@@ -1,92 +1,48 @@
# base image
FROM python:3.12-slim-bookworm AS base
FROM python:3.10-slim-bookworm AS base
WORKDIR /app/api
LABEL maintainer="takatost@gmail.com"
# Install uv
ENV UV_VERSION=0.8.9
RUN pip install --no-cache-dir uv==${UV_VERSION}
FROM base AS packages
# if you located in China, you can use aliyun mirror to speed up
# RUN sed -i 's@deb.debian.org@mirrors.aliyun.com@g' /etc/apt/sources.list.d/debian.sources
# install packages
FROM base as packages
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
# basic environment
g++ \
# for building gmpy2
libmpfr-dev libmpc-dev
&& apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev
# Install Python dependencies
COPY pyproject.toml uv.lock ./
RUN uv sync --locked --no-dev
COPY requirements.txt /requirements.txt
RUN pip install --prefix=/pkg -r requirements.txt
# production stage
FROM base AS production
ENV FLASK_APP=app.py
ENV EDITION=SELF_HOSTED
ENV DEPLOY_ENV=PRODUCTION
ENV CONSOLE_API_URL=http://127.0.0.1:5001
ENV CONSOLE_WEB_URL=http://127.0.0.1:3000
ENV SERVICE_API_URL=http://127.0.0.1:5001
ENV APP_WEB_URL=http://127.0.0.1:3000
ENV FLASK_APP app.py
ENV EDITION SELF_HOSTED
ENV DEPLOY_ENV PRODUCTION
ENV CONSOLE_API_URL http://127.0.0.1:5001
ENV CONSOLE_WEB_URL http://127.0.0.1:3000
ENV SERVICE_API_URL http://127.0.0.1:5001
ENV APP_WEB_URL http://127.0.0.1:3000
EXPOSE 5001
# set timezone
ENV TZ=UTC
# Set UTF-8 locale
ENV LANG=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
ENV PYTHONIOENCODING=utf-8
ENV TZ UTC
WORKDIR /app/api
RUN \
apt-get update \
# Install dependencies
&& apt-get install -y --no-install-recommends \
# basic environment
curl nodejs \
# for gmpy2 \
libgmp-dev libmpfr-dev libmpc-dev \
# For Security
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
# install fonts to support the use of tools like pypdfium2
fonts-noto-cjk \
# install a package to improve the accuracy of guessing mime type and file extension
media-types \
# install libmagic to support the use of python-magic guess MIMETYPE
libmagic1 \
&& apt-get autoremove -y \
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl wget vim nodejs ffmpeg libgmp-dev libmpfr-dev libmpc-dev \
&& apt-get autoremove \
&& rm -rf /var/lib/apt/lists/*
# Copy Python environment and packages
ENV VIRTUAL_ENV=/app/api/.venv
COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Download nltk data
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')"
# Copy source code
COPY --from=packages /pkg /usr/local
COPY . /app/api/
# Copy entrypoint
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
ARG COMMIT_SHA
ENV COMMIT_SHA=${COMMIT_SHA}
ENV COMMIT_SHA ${COMMIT_SHA}
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]

View File

@@ -2,111 +2,54 @@
## Usage
> [!IMPORTANT]
>
> In the v1.3.0 release, `poetry` has been replaced with
> [`uv`](https://docs.astral.sh/uv/) as the package manager
> for Dify API backend service.
1. Start the docker-compose stack
The backend require some middleware, including PostgreSQL, Redis, and Weaviate, which can be started together using `docker-compose`.
```bash
cd ../docker
cp middleware.env.example middleware.env
# change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d
docker-compose -f docker-compose.middleware.yaml -p dify up -d
cd ../api
```
2. Copy `.env.example` to `.env`
3. Generate a `SECRET_KEY` in the `.env` file.
1. Copy `.env.example` to `.env`
```cli
cp .env.example .env
```
1. Generate a `SECRET_KEY` in the `.env` file.
bash for Linux
```bash for Linux
```bash
sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env
```
bash for Mac
```bash for Mac
secret_key=$(openssl rand -base64 42)
sed -i '' "/^SECRET_KEY=/c\\
SECRET_KEY=${secret_key}" .env
```
1. Create environment.
Dify API service uses [UV](https://docs.astral.sh/uv/) to manage dependencies.
First, you need to add the uv package manager, if you don't have it already.
3.5 If you use annaconda, create a new environment and activate it
```bash
pip install uv
# Or on macOS
brew install uv
conda create --name dify python=3.10
conda activate dify
```
1. Install dependencies
4. Install dependencies
```bash
uv sync --dev
pip install -r requirements.txt
```
1. Run migrate
5. Run migrate
Before the first launch, migrate the database to the latest version.
```bash
uv run flask db upgrade
flask db upgrade
```
1. Start backend
⚠️ If you encounter problems with jieba, for example
```
> flask db upgrade
Error: While importing 'app', an ImportError was raised:
```
Please run the following command instead.
```
pip install -r requirements.txt --upgrade --force-reinstall
```
6. Start backend:
```bash
uv run flask run --host 0.0.0.0 --port=5001 --debug
```
1. Start Dify [web](../web) service.
1. Setup your application by visiting `http://localhost:3000`.
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline
```
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:
```bash
uv run celery -A app.celery beat
```
## Testing
1. Install dependencies for both the backend and the test environment
```bash
uv sync --dev
```
1. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`, more can check [Claude.md](../CLAUDE.md)
```bash
uv run pytest # Run all tests
uv run pytest tests/unit_tests/ # Unit tests only
uv run pytest tests/integration_tests/ # Integration tests
# Code quality
../dev/reformat # Run all formatters and linters
uv run ruff check --fix ./ # Fix linting issues
uv run ruff format ./ # Format code
uv run basedpyright . # Type checking
flask run --host 0.0.0.0 --port=5001 --debug
```
7. Setup your application by visiting http://localhost:5001/console/api/setup or other apis...
8. If you need to debug local async processing, you can run `celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail`, celery can do dataset importing and other async tasks.

View File

@@ -1,115 +0,0 @@
## Linter
- Always follow `.ruff.toml`.
- Run `uv run ruff check --fix --unsafe-fixes`.
- Keep each line under 100 characters (including spaces).
## Code Style
- `snake_case` for variables and functions.
- `PascalCase` for classes.
- `UPPER_CASE` for constants.
## Rules
- Use Pydantic v2 standard.
- Use `uv` for package management.
- Do not override dunder methods like `__init__`, `__iadd__`, etc.
- Never launch services (`uv run app.py`, `flask run`, etc.); running tests under `tests/` is allowed.
- Prefer simple functions over classes for lightweight helpers.
- Keep files below 800 lines; split when necessary.
- Keep code readable—no clever hacks.
- Never use `print`; log with `logger = logging.getLogger(__name__)`.
## Guiding Principles
- Mirror the projects layered architecture: controller → service → core/domain.
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
## SQLAlchemy Patterns
- Models inherit from `models.base.Base`; never create ad-hoc metadata or engines.
- Open sessions with context managers:
```python
from sqlalchemy.orm import Session
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(Workflow).where(
Workflow.id == workflow_id,
Workflow.tenant_id == tenant_id,
)
workflow = session.execute(stmt).scalar_one_or_none()
```
- Use SQLAlchemy expressions; avoid raw SQL unless necessary.
- Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies.
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
## Storage & External IO
- Access storage via `extensions.ext_storage.storage`.
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
- Background tasks that touch storage must be idempotent and log the relevant object identifiers.
## Pydantic Usage
- Define DTOs with Pydantic v2 models and forbid extras by default.
- Use `@field_validator` / `@model_validator` for domain rules.
- Example:
```python
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
class TriggerConfig(BaseModel):
endpoint: HttpUrl
secret: str
model_config = ConfigDict(extra="forbid")
@field_validator("secret")
def ensure_secret_prefix(cls, value: str) -> str:
if not value.startswith("dify_"):
raise ValueError("secret must start with dify_")
return value
```
## Generics & Protocols
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
## Error Handling & Logging
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate to HTTP responses in controllers.
- Declare `logger = logging.getLogger(__name__)` at module top.
- Include tenant/app/workflow identifiers in log context.
- Log retryable events at `warning`, terminal failures at `error`.
## Tooling & Checks
- Format/lint: `uv run --project api --dev ruff format ./api` and `uv run --project api --dev ruff check --fix --unsafe-fixes ./api`.
- Type checks: `uv run --directory api --dev basedpyright`.
- Tests: `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
- Run all of the above before submitting your work.
## Controllers & Services
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
- Avoid repositories unless necessary; direct SQLAlchemy usage is preferred for typical tables.
- Document non-obvious behaviour with concise comments.
## Miscellaneous
- Use `configs.dify_config` for configuration—never read environment variables directly.
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
- Keep experimental scripts under `dev/`; do not ship them in production builds.

View File

@@ -1,96 +0,0 @@
## Configuration
- Import `configs.dify_config` for every runtime toggle. Do not read environment variables directly.
- Add new settings to the proper mixin inside `configs/` (deployment, feature, middleware, etc.) so they load through `DifyConfig`.
- Remote overrides come from the optional providers in `configs/remote_settings_sources`; keep defaults in code safe when the value is missing.
- Example: logging pulls targets from `extensions/ext_logging.py`, and model provider URLs are assembled in `services/entities/model_provider_entities.py`.
## Dependencies
- Runtime dependencies live in `[project].dependencies` inside `pyproject.toml`. Optional clients go into the `storage`, `tools`, or `vdb` groups under `[dependency-groups]`.
- Always pin versions and keep the list alphabetised. Shared tooling (lint, typing, pytest) belongs in the `dev` group.
- When code needs a new package, explain why in the PR and run `uv lock` so the lockfile stays current.
## Storage & Files
- Use `extensions.ext_storage.storage` for all blob IO; it already respects the configured backend.
- Convert files for workflows with helpers in `core/file/file_manager.py`; they handle signed URLs and multimodal payloads.
- When writing controller logic, delegate upload quotas and metadata to `services/file_service.py` instead of touching storage directly.
- All outbound HTTP fetches (webhooks, remote files) must go through the SSRF-safe client in `core/helper/ssrf_proxy.py`; it wraps `httpx` with the allow/deny rules configured for the platform.
## Redis & Shared State
- Access Redis through `extensions.ext_redis.redis_client`. For locking, reuse `redis_client.lock`.
- Prefer higher-level helpers when available: rate limits use `libs.helper.RateLimiter`, provider metadata uses caches in `core/helper/provider_cache.py`.
## Models
- SQLAlchemy models sit in `models/` and inherit from the shared declarative `Base` defined in `models/base.py` (metadata configured via `models/engine.py`).
- `models/__init__.py` exposes grouped aggregates: account/tenant models, app and conversation tables, datasets, providers, workflow runs, triggers, etc. Import from there to avoid deep path churn.
- Follow the DDD boundary: persistence objects live in `models/`, repositories under `repositories/` translate them into domain entities, and services consume those repositories.
- When adding a table, create the model class, register it in `models/__init__.py`, wire a repository if needed, and generate an Alembic migration as described below.
## Vector Stores
- Vector client implementations live in `core/rag/datasource/vdb/<provider>`, with a common factory in `core/rag/datasource/vdb/vector_factory.py` and enums in `core/rag/datasource/vdb/vector_type.py`.
- Retrieval pipelines call these providers through `core/rag/datasource/retrieval_service.py` and dataset ingestion flows in `services/dataset_service.py`.
- The CLI helper `flask vdb-migrate` orchestrates bulk migrations using routines in `commands.py`; reuse that pattern when adding new backend transitions.
- To add another store, mirror the provider layout, register it with the factory, and include any schema changes in Alembic migrations.
## Observability & OTEL
- OpenTelemetry settings live under the observability mixin in `configs/observability`. Toggle exporters and sampling via `dify_config`, not ad-hoc env reads.
- HTTP, Celery, Redis, SQLAlchemy, and httpx instrumentation is initialised in `extensions/ext_app_metrics.py` and `extensions/ext_request_logging.py`; reuse these hooks when adding new workers or entrypoints.
- When creating background tasks or external calls, propagate tracing context with helpers in the existing instrumented clients (e.g. use the shared `httpx` session from `core/helper/http_client_pooling.py`).
- If you add a new external integration, ensure spans and metrics are emitted by wiring the appropriate OTEL instrumentation package in `pyproject.toml` and configuring it in `extensions/`.
## Ops Integrations
- Langfuse support and other tracing bridges live under `core/ops/opik_trace`. Config toggles sit in `configs/observability`, while exporters are initialised in the OTEL extensions mentioned above.
- External monitoring services should follow this pattern: keep client code in `core/ops`, expose switches via `dify_config`, and hook initialisation in `extensions/ext_app_metrics.py` or sibling modules.
- Before instrumenting new code paths, check whether existing context helpers (e.g. `extensions/ext_request_logging.py`) already capture the necessary metadata.
## Controllers, Services, Core
- Controllers only parse HTTP input and call a service method. Keep business rules in `services/`.
- Services enforce tenant rules, quotas, and orchestration, then call into `core/` engines (workflow execution, tools, LLMs).
- When adding a new endpoint, search for an existing service to extend before introducing a new layer. Example: workflow APIs pipe through `services/workflow_service.py` into `core/workflow`.
## Plugins, Tools, Providers
- In Dify a plugin is a tenant-installable bundle that declares one or more providers (tool, model, datasource, trigger, endpoint, agent strategy) plus its resource needs and version metadata. The manifest (`core/plugin/entities/plugin.py`) mirrors what you see in the marketplace documentation.
- Installation, upgrades, and migrations are orchestrated by `services/plugin/plugin_service.py` together with helpers such as `services/plugin/plugin_migration.py`.
- Runtime loading happens through the implementations under `core/plugin/impl/*` (tool/model/datasource/trigger/endpoint/agent). These modules normalise plugin providers so that downstream systems (`core/tools/tool_manager.py`, `services/model_provider_service.py`, `services/trigger/*`) can treat builtin and plugin capabilities the same way.
- For remote execution, plugin daemons (`core/plugin/entities/plugin_daemon.py`, `core/plugin/impl/plugin.py`) manage lifecycle hooks, credential forwarding, and background workers that keep plugin processes in sync with the main application.
- Acquire tool implementations through `core/tools/tool_manager.py`; it resolves builtin, plugin, and workflow-as-tool providers uniformly, injecting the right context (tenant, credentials, runtime config).
- To add a new plugin capability, extend the relevant `core/plugin/entities` schema and register the implementation in the matching `core/plugin/impl` module rather than importing the provider directly.
## Async Workloads
see `agent_skills/trigger.md` for more detailed documentation.
- Enqueue background work through `services/async_workflow_service.py`. It routes jobs to the tiered Celery queues defined in `tasks/`.
- Workers boot from `celery_entrypoint.py` and execute functions in `tasks/workflow_execution_tasks.py`, `tasks/trigger_processing_tasks.py`, etc.
- Scheduled workflows poll from `schedule/workflow_schedule_tasks.py`. Follow the same pattern if you need new periodic jobs.
## Database & Migrations
- SQLAlchemy models live under `models/` and map directly to migration files in `migrations/versions`.
- Generate migrations with `uv run --project api flask db revision --autogenerate -m "<summary>"`, then review the diff; never hand-edit the database outside Alembic.
- Apply migrations locally using `uv run --project api flask db upgrade`; production deploys expect the same history.
- If you add tenant-scoped data, confirm the upgrade includes tenant filters or defaults consistent with the service logic touching those tables.
## CLI Commands
- Maintenance commands from `commands.py` are registered on the Flask CLI. Run them via `uv run --project api flask <command>`.
- Use the built-in `db` commands from Flask-Migrate for schema operations (`flask db upgrade`, `flask db stamp`, etc.). Only fall back to custom helpers if you need their extra behaviour.
- Custom entries such as `flask reset-password`, `flask reset-email`, and `flask vdb-migrate` handle self-hosted account recovery and vector database migrations.
- Before adding a new command, check whether an existing service can be reused and ensure the command guards edition-specific behaviour (many enforce `SELF_HOSTED`). Document any additions in the PR.
- Ruff helpers are run directly with `uv`: `uv run --project api --dev ruff format ./api` for formatting and `uv run --project api --dev ruff check ./api` (add `--fix` if you want automatic fixes).
## When You Add Features
- Check for an existing helper or service before writing a new util.
- Uphold tenancy: every service method should receive the tenant ID from controller wrappers such as `controllers/console/wraps.py`.
- Update or create tests alongside behaviour changes (`tests/unit_tests` for fast coverage, `tests/integration_tests` when touching orchestrations).
- Run `uv run --project api --dev ruff check ./api`, `uv run --directory api --dev basedpyright`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before submitting changes.

View File

@@ -1 +0,0 @@
// TBD

View File

@@ -1 +0,0 @@
// TBD

View File

@@ -1,53 +0,0 @@
## Overview
Trigger is a collection of nodes that we called `Start` nodes, also, the concept of `Start` is the same as `RootNode` in the workflow engine `core/workflow/graph_engine`, On the other hand, `Start` node is the entry point of workflows, every workflow run always starts from a `Start` node.
## Trigger nodes
- `UserInput`
- `Trigger Webhook`
- `Trigger Schedule`
- `Trigger Plugin`
### UserInput
Before `Trigger` concept is introduced, it's what we called `Start` node, but now, to avoid confusion, it was renamed to `UserInput` node, has a strong relation with `ServiceAPI` in `controllers/service_api/app`
1. `UserInput` node introduces a list of arguments that need to be provided by the user, finally it will be converted into variables in the workflow variable pool.
1. `ServiceAPI` accept those arguments, and pass through them into `UserInput` node.
1. For its detailed implementation, please refer to `core/workflow/nodes/start`
### Trigger Webhook
Inside Webhook Node, Dify provided a UI panel that allows user define a HTTP manifest `core/workflow/nodes/trigger_webhook/entities.py`.`WebhookData`, also, Dify generates a random webhook id for each `Trigger Webhook` node, the implementation was implemented in `core/trigger/utils/endpoint.py`, as you can see, `webhook-debug` is a debug mode for webhook, you may find it in `controllers/trigger/webhook.py`.
Finally, requests to `webhook` endpoint will be converted into variables in workflow variable pool during workflow execution.
### Trigger Schedule
`Trigger Schedule` node is a node that allows user define a schedule to trigger the workflow, detailed manifest is here `core/workflow/nodes/trigger_schedule/entities.py`, we have a poller and executor to handle millions of schedules, see `docker/entrypoint.sh` / `schedule/workflow_schedule_task.py` for help.
To Achieve this, a `WorkflowSchedulePlan` model was introduced in `models/trigger.py`, and a `events/event_handlers/sync_workflow_schedule_when_app_published.py` was used to sync workflow schedule plans when app is published.
### Trigger Plugin
`Trigger Plugin` node allows user define there own distributed trigger plugin, whenever a request was received, Dify forwards it to the plugin and wait for parsed variables from it.
1. Requests were saved in storage by `services/trigger/trigger_request_service.py`, referenced by `services/trigger/trigger_service.py`.`TriggerService`.`process_endpoint`
1. Plugins accept those requests and parse variables from it, see `core/plugin/impl/trigger.py` for details.
A `subscription` concept was out here by Dify, it means an endpoint address from Dify was bound to thirdparty webhook service like `Github` `Slack` `Linear` `GoogleDrive` `Gmail` etc. Once a subscription was created, Dify continually receives requests from the platforms and handle them one by one.
## Worker Pool / Async Task
All the events that triggered a new workflow run is always in async mode, a unified entrypoint can be found here `services/async_workflow_service.py`.`AsyncWorkflowService`.`trigger_workflow_async`.
The infrastructure we used is `celery`, we've already configured it in `docker/entrypoint.sh`, and the consumers are in `tasks/async_workflow_tasks.py`, 3 queues were used to handle different tiers of users, `PROFESSIONAL_QUEUE` `TEAM_QUEUE` `SANDBOX_QUEUE`.
## Debug Strategy
Dify divided users into 2 groups: builders / end users.
Builders are the users who create workflows, in this stage, debugging a workflow becomes a critical part of the workflow development process, as the start node in workflows, trigger nodes can `listen` to the events from `WebhookDebug` `Schedule` `Plugin`, debugging process was created in `controllers/console/app/workflow.py`.`DraftWorkflowTriggerNodeApi`.
A polling process can be considered as combine of few single `poll` operations, each `poll` operation fetches events cached in `Redis`, returns `None` if no event was found, more detailed implemented: `core/trigger/debug/event_bus.py` was used to handle the polling process, and `core/trigger/debug/event_selectors.py` was used to select the event poller based on the trigger type.

View File

@@ -1,29 +1,254 @@
import sys
import os
from werkzeug.exceptions import Unauthorized
if not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != 'true':
from gevent import monkey
monkey.patch_all()
# if os.environ.get("VECTOR_STORE") == 'milvus':
import grpc.experimental.gevent
grpc.experimental.gevent.init_gevent()
import langchain
langchain.verbose = True
import json
import logging
import threading
import time
import warnings
from flask import Flask, Response, request
from flask_cors import CORS
from commands import register_commands
from config import CloudEditionConfig, Config
from extensions import (
ext_celery,
ext_code_based_extension,
ext_compress,
ext_database,
ext_hosting_provider,
ext_login,
ext_mail,
ext_migrate,
ext_redis,
ext_sentry,
ext_storage,
)
from extensions.ext_database import db
from extensions.ext_login import login_manager
from libs.passport import PassportService
from services.account_service import AccountService
# DO NOT REMOVE BELOW
from events import event_handlers
from models import account, dataset, model, source, task, tool, tools, web
# DO NOT REMOVE ABOVE
def is_db_command() -> bool:
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
return True
return False
warnings.simplefilter("ignore", ResourceWarning)
# fix windows platform
if os.name == "nt":
os.system('tzutil /s "UTC"')
else:
os.environ['TZ'] = 'UTC'
time.tzset()
class DifyApp(Flask):
pass
# -------------
# Configuration
# -------------
config_type = os.getenv('EDITION', default='SELF_HOSTED') # ce edition first
# ----------------------------
# Application Factory Function
# ----------------------------
def create_app(test_config=None) -> Flask:
app = DifyApp(__name__)
if test_config:
app.config.from_object(test_config)
else:
if config_type == "CLOUD":
app.config.from_object(CloudEditionConfig())
else:
app.config.from_object(Config())
app.secret_key = app.config['SECRET_KEY']
logging.basicConfig(level=app.config.get('LOG_LEVEL', 'INFO'))
initialize_extensions(app)
register_blueprints(app)
register_commands(app)
return app
def initialize_extensions(app):
# Since the application instance is now created, pass it to each Flask
# extension instance to bind it to the Flask application instance (app)
ext_compress.init_app(app)
ext_code_based_extension.init()
ext_database.init_app(app)
ext_migrate.init(app, db)
ext_redis.init_app(app)
ext_storage.init_app(app)
ext_celery.init_app(app)
ext_login.init_app(app)
ext_mail.init_app(app)
ext_hosting_provider.init_app(app)
ext_sentry.init_app(app)
# Flask-Login configuration
@login_manager.request_loader
def load_user_from_request(request_from_flask_login):
"""Load user based on the request."""
if request.blueprint == 'console':
# Check if the user_id contains a dot, indicating the old format
auth_header = request.headers.get('Authorization', '')
if not auth_header:
auth_token = request.args.get('_token')
if not auth_token:
raise Unauthorized('Invalid Authorization token.')
else:
if ' ' not in auth_header:
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != 'bearer':
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
decoded = PassportService().verify(auth_token)
user_id = decoded.get('user_id')
return AccountService.load_user(user_id)
else:
return None
@login_manager.unauthorized_handler
def unauthorized_handler():
"""Handle unauthorized requests."""
return Response(json.dumps({
'code': 'unauthorized',
'message': "Unauthorized."
}), status=401, content_type="application/json")
# register blueprint routers
def register_blueprints(app):
from controllers.console import bp as console_app_bp
from controllers.files import bp as files_bp
from controllers.service_api import bp as service_api_bp
from controllers.web import bp as web_bp
CORS(service_api_bp,
allow_headers=['Content-Type', 'Authorization', 'X-App-Code'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH']
)
app.register_blueprint(service_api_bp)
CORS(web_bp,
resources={
r"/*": {"origins": app.config['WEB_API_CORS_ALLOW_ORIGINS']}},
supports_credentials=True,
allow_headers=['Content-Type', 'Authorization', 'X-App-Code'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH'],
expose_headers=['X-Version', 'X-Env']
)
app.register_blueprint(web_bp)
CORS(console_app_bp,
resources={
r"/*": {"origins": app.config['CONSOLE_CORS_ALLOW_ORIGINS']}},
supports_credentials=True,
allow_headers=['Content-Type', 'Authorization'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH'],
expose_headers=['X-Version', 'X-Env']
)
app.register_blueprint(console_app_bp)
CORS(files_bp,
allow_headers=['Content-Type'],
methods=['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS', 'PATCH']
)
app.register_blueprint(files_bp)
# create app
if is_db_command():
from app_factory import create_migrations_app
app = create_app()
celery = app.extensions["celery"]
app = create_migrations_app()
else:
# Gunicorn and Celery handle monkey patching automatically in production by
# specifying the `gevent` worker class. Manual monkey patching is not required here.
#
# See `api/docker/entrypoint.sh` (lines 33 and 47) for details.
#
# For third-party library patching, refer to `gunicorn.conf.py` and `celery_entrypoint.py`.
from app_factory import create_app
if app.config['TESTING']:
print("App is running in TESTING mode")
app = create_app()
celery = app.extensions["celery"]
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5001)
@app.after_request
def after_request(response):
"""Add Version headers to the response."""
response.set_cookie('remember_token', '', expires=0)
response.headers.add('X-Version', app.config['CURRENT_VERSION'])
response.headers.add('X-Env', app.config['DEPLOY_ENV'])
return response
@app.route('/health')
def health():
return Response(json.dumps({
'status': 'ok',
'version': app.config['CURRENT_VERSION']
}), status=200, content_type="application/json")
@app.route('/threads')
def threads():
num_threads = threading.active_count()
threads = threading.enumerate()
thread_list = []
for thread in threads:
thread_name = thread.name
thread_id = thread.ident
is_alive = thread.is_alive()
thread_list.append({
'name': thread_name,
'id': thread_id,
'is_alive': is_alive
})
return {
'thread_num': num_threads,
'threads': thread_list
}
@app.route('/db-pool-stat')
def pool_stat():
engine = db.engine
return {
'pool_size': engine.pool.size(),
'checked_in_connections': engine.pool.checkedin(),
'checked_out_connections': engine.pool.checkedout(),
'overflow_connections': engine.pool.overflow(),
'connection_timeout': engine.pool.timeout(),
'recycle_time': db.engine.pool._recycle
}
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5001)

View File

@@ -1,119 +0,0 @@
import logging
import time
from configs import dify_config
from contexts.wrapper import RecyclableContextVar
from dify_app import DifyApp
logger = logging.getLogger(__name__)
# ----------------------------
# Application Factory Function
# ----------------------------
def create_flask_app_with_configs() -> DifyApp:
"""
create a raw flask app
with configs loaded from .env file
"""
dify_app = DifyApp(__name__)
dify_app.config.from_mapping(dify_config.model_dump())
# add before request hook
@dify_app.before_request
def before_request():
# add an unique identifier to each request
RecyclableContextVar.increment_thread_recycles()
# Capture the decorator's return value to avoid pyright reportUnusedFunction
_ = before_request
return dify_app
def create_app() -> DifyApp:
start_time = time.perf_counter()
app = create_flask_app_with_configs()
initialize_extensions(app)
end_time = time.perf_counter()
if dify_config.DEBUG:
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
return app
def initialize_extensions(app: DifyApp):
from extensions import (
ext_app_metrics,
ext_blueprints,
ext_celery,
ext_code_based_extension,
ext_commands,
ext_compress,
ext_database,
ext_hosting_provider,
ext_import_modules,
ext_logging,
ext_login,
ext_mail,
ext_migrate,
ext_orjson,
ext_otel,
ext_proxy_fix,
ext_redis,
ext_request_logging,
ext_sentry,
ext_set_secretkey,
ext_storage,
ext_timezone,
ext_warnings,
)
extensions = [
ext_timezone,
ext_logging,
ext_warnings,
ext_import_modules,
ext_orjson,
ext_set_secretkey,
ext_compress,
ext_code_based_extension,
ext_database,
ext_app_metrics,
ext_migrate,
ext_redis,
ext_storage,
ext_celery,
ext_login,
ext_mail,
ext_hosting_provider,
ext_sentry,
ext_proxy_fix,
ext_blueprints,
ext_commands,
ext_otel,
ext_request_logging,
]
for ext in extensions:
short_name = ext.__name__.split(".")[-1]
is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True
if not is_enabled:
if dify_config.DEBUG:
logger.info("Skipped %s", short_name)
continue
start_time = time.perf_counter()
ext.init_app(app)
end_time = time.perf_counter()
if dify_config.DEBUG:
logger.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2))
def create_migrations_app():
app = create_flask_app_with_configs()
from extensions import ext_database, ext_migrate
# Initialize only required extensions
ext_database.init_app(app)
ext_migrate.init_app(app)
return app

View File

@@ -1,13 +0,0 @@
import psycogreen.gevent as pscycogreen_gevent # type: ignore
from grpc.experimental import gevent as grpc_gevent # type: ignore
# grpc gevent
grpc_gevent.init_gevent()
print("gRPC patched with gevent.", flush=True) # noqa: T201
pscycogreen_gevent.patch_psycopg()
print("psycopg2 patched with gevent.", flush=True) # noqa: T201
from app import app, celery
__all__ = ["app", "celery"]

File diff suppressed because it is too large Load Diff

333
api/config.py Normal file
View File

@@ -0,0 +1,333 @@
import os
import dotenv
dotenv.load_dotenv()
DEFAULTS = {
'DB_USERNAME': 'postgres',
'DB_PASSWORD': '',
'DB_HOST': 'localhost',
'DB_PORT': '5432',
'DB_DATABASE': 'dify',
'DB_CHARSET': '',
'REDIS_HOST': 'localhost',
'REDIS_PORT': '6379',
'REDIS_DB': '0',
'REDIS_USE_SSL': 'False',
'OAUTH_REDIRECT_PATH': '/console/api/oauth/authorize',
'OAUTH_REDIRECT_INDEX_PATH': '/',
'CONSOLE_WEB_URL': 'https://cloud.dify.ai',
'CONSOLE_API_URL': 'https://cloud.dify.ai',
'SERVICE_API_URL': 'https://api.dify.ai',
'APP_WEB_URL': 'https://udify.app',
'FILES_URL': '',
'S3_ADDRESS_STYLE': 'auto',
'STORAGE_TYPE': 'local',
'STORAGE_LOCAL_PATH': 'storage',
'CHECK_UPDATE_URL': 'https://updates.dify.ai',
'DEPLOY_ENV': 'PRODUCTION',
'SQLALCHEMY_POOL_SIZE': 30,
'SQLALCHEMY_MAX_OVERFLOW': 10,
'SQLALCHEMY_POOL_RECYCLE': 3600,
'SQLALCHEMY_ECHO': 'False',
'SENTRY_TRACES_SAMPLE_RATE': 1.0,
'SENTRY_PROFILES_SAMPLE_RATE': 1.0,
'WEAVIATE_GRPC_ENABLED': 'True',
'WEAVIATE_BATCH_SIZE': 100,
'QDRANT_CLIENT_TIMEOUT': 20,
'CELERY_BACKEND': 'database',
'LOG_LEVEL': 'INFO',
'HOSTED_OPENAI_QUOTA_LIMIT': 200,
'HOSTED_OPENAI_TRIAL_ENABLED': 'False',
'HOSTED_OPENAI_TRIAL_MODELS': 'gpt-3.5-turbo,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,text-davinci-003',
'HOSTED_OPENAI_PAID_ENABLED': 'False',
'HOSTED_OPENAI_PAID_MODELS': 'gpt-4,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-4-0125-preview,gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-1106,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,gpt-3.5-turbo-instruct,text-davinci-003',
'HOSTED_AZURE_OPENAI_ENABLED': 'False',
'HOSTED_AZURE_OPENAI_QUOTA_LIMIT': 200,
'HOSTED_ANTHROPIC_QUOTA_LIMIT': 600000,
'HOSTED_ANTHROPIC_TRIAL_ENABLED': 'False',
'HOSTED_ANTHROPIC_PAID_ENABLED': 'False',
'HOSTED_MODERATION_ENABLED': 'False',
'HOSTED_MODERATION_PROVIDERS': '',
'HOSTED_FETCH_APP_TEMPLATES_MODE': 'remote',
'HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN': 'https://tmpl.dify.ai',
'CLEAN_DAY_SETTING': 30,
'UPLOAD_FILE_SIZE_LIMIT': 15,
'UPLOAD_FILE_BATCH_LIMIT': 5,
'UPLOAD_IMAGE_FILE_SIZE_LIMIT': 10,
'OUTPUT_MODERATION_BUFFER_SIZE': 300,
'MULTIMODAL_SEND_IMAGE_FORMAT': 'base64',
'INVITE_EXPIRY_HOURS': 72,
'BILLING_ENABLED': 'False',
'CAN_REPLACE_LOGO': 'False',
'ETL_TYPE': 'dify',
'KEYWORD_STORE': 'jieba',
'BATCH_UPLOAD_LIMIT': 20,
'CODE_EXECUTION_ENDPOINT': '',
'CODE_EXECUTION_API_KEY': '',
'TOOL_ICON_CACHE_MAX_AGE': 3600,
'KEYWORD_DATA_SOURCE_TYPE': 'database',
}
def get_env(key):
return os.environ.get(key, DEFAULTS.get(key))
def get_bool_env(key):
value = get_env(key)
return value.lower() == 'true' if value is not None else False
def get_cors_allow_origins(env, default):
cors_allow_origins = []
if get_env(env):
for origin in get_env(env).split(','):
cors_allow_origins.append(origin)
else:
cors_allow_origins = [default]
return cors_allow_origins
class Config:
"""Application configuration class."""
def __init__(self):
# ------------------------
# General Configurations.
# ------------------------
self.CURRENT_VERSION = "0.6.0-preview-workflow.2"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = "SELF_HOSTED"
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
self.TESTING = False
self.LOG_LEVEL = get_env('LOG_LEVEL')
# The backend URL prefix of the console API.
# used to concatenate the login authorization callback or notion integration callback.
self.CONSOLE_API_URL = get_env('CONSOLE_API_URL')
# The front-end URL prefix of the console web.
# used to concatenate some front-end addresses and for CORS configuration use.
self.CONSOLE_WEB_URL = get_env('CONSOLE_WEB_URL')
# WebApp Url prefix.
# used to display WebAPP API Base Url to the front-end.
self.APP_WEB_URL = get_env('APP_WEB_URL')
# Service API Url prefix.
# used to display Service API Base Url to the front-end.
self.SERVICE_API_URL = get_env('SERVICE_API_URL')
# File preview or download Url prefix.
# used to display File preview or download Url to the front-end or as Multi-model inputs;
# Url is signed and has expiration time.
self.FILES_URL = get_env('FILES_URL') if get_env('FILES_URL') else self.CONSOLE_API_URL
# Your App secret key will be used for securely signing the session cookie
# Make sure you are changing this key for your deployment with a strong key.
# You can generate a strong key using `openssl rand -base64 42`.
# Alternatively you can set it with `SECRET_KEY` environment variable.
self.SECRET_KEY = get_env('SECRET_KEY')
# cors settings
self.CONSOLE_CORS_ALLOW_ORIGINS = get_cors_allow_origins(
'CONSOLE_CORS_ALLOW_ORIGINS', self.CONSOLE_WEB_URL)
self.WEB_API_CORS_ALLOW_ORIGINS = get_cors_allow_origins(
'WEB_API_CORS_ALLOW_ORIGINS', '*')
# check update url
self.CHECK_UPDATE_URL = get_env('CHECK_UPDATE_URL')
# ------------------------
# Database Configurations.
# ------------------------
db_credentials = {
key: get_env(key) for key in
['DB_USERNAME', 'DB_PASSWORD', 'DB_HOST', 'DB_PORT', 'DB_DATABASE', 'DB_CHARSET']
}
db_extras = f"?client_encoding={db_credentials['DB_CHARSET']}" if db_credentials['DB_CHARSET'] else ""
self.SQLALCHEMY_DATABASE_URI = f"postgresql://{db_credentials['DB_USERNAME']}:{db_credentials['DB_PASSWORD']}@{db_credentials['DB_HOST']}:{db_credentials['DB_PORT']}/{db_credentials['DB_DATABASE']}{db_extras}"
self.SQLALCHEMY_ENGINE_OPTIONS = {
'pool_size': int(get_env('SQLALCHEMY_POOL_SIZE')),
'max_overflow': int(get_env('SQLALCHEMY_MAX_OVERFLOW')),
'pool_recycle': int(get_env('SQLALCHEMY_POOL_RECYCLE'))
}
self.SQLALCHEMY_ECHO = get_bool_env('SQLALCHEMY_ECHO')
# ------------------------
# Redis Configurations.
# ------------------------
self.REDIS_HOST = get_env('REDIS_HOST')
self.REDIS_PORT = get_env('REDIS_PORT')
self.REDIS_USERNAME = get_env('REDIS_USERNAME')
self.REDIS_PASSWORD = get_env('REDIS_PASSWORD')
self.REDIS_DB = get_env('REDIS_DB')
self.REDIS_USE_SSL = get_bool_env('REDIS_USE_SSL')
# ------------------------
# Celery worker Configurations.
# ------------------------
self.CELERY_BROKER_URL = get_env('CELERY_BROKER_URL')
self.CELERY_BACKEND = get_env('CELERY_BACKEND')
self.CELERY_RESULT_BACKEND = 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \
if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL
self.BROKER_USE_SSL = self.CELERY_BROKER_URL.startswith('rediss://')
# ------------------------
# File Storage Configurations.
# ------------------------
self.STORAGE_TYPE = get_env('STORAGE_TYPE')
self.STORAGE_LOCAL_PATH = get_env('STORAGE_LOCAL_PATH')
self.S3_ENDPOINT = get_env('S3_ENDPOINT')
self.S3_BUCKET_NAME = get_env('S3_BUCKET_NAME')
self.S3_ACCESS_KEY = get_env('S3_ACCESS_KEY')
self.S3_SECRET_KEY = get_env('S3_SECRET_KEY')
self.S3_REGION = get_env('S3_REGION')
self.S3_ADDRESS_STYLE = get_env('S3_ADDRESS_STYLE')
self.AZURE_BLOB_ACCOUNT_NAME = get_env('AZURE_BLOB_ACCOUNT_NAME')
self.AZURE_BLOB_ACCOUNT_KEY = get_env('AZURE_BLOB_ACCOUNT_KEY')
self.AZURE_BLOB_CONTAINER_NAME = get_env('AZURE_BLOB_CONTAINER_NAME')
self.AZURE_BLOB_ACCOUNT_URL = get_env('AZURE_BLOB_ACCOUNT_URL')
# ------------------------
# Vector Store Configurations.
# Currently, only support: qdrant, milvus, zilliz, weaviate
# ------------------------
self.VECTOR_STORE = get_env('VECTOR_STORE')
self.KEYWORD_STORE = get_env('KEYWORD_STORE')
# qdrant settings
self.QDRANT_URL = get_env('QDRANT_URL')
self.QDRANT_API_KEY = get_env('QDRANT_API_KEY')
self.QDRANT_CLIENT_TIMEOUT = get_env('QDRANT_CLIENT_TIMEOUT')
# milvus / zilliz setting
self.MILVUS_HOST = get_env('MILVUS_HOST')
self.MILVUS_PORT = get_env('MILVUS_PORT')
self.MILVUS_USER = get_env('MILVUS_USER')
self.MILVUS_PASSWORD = get_env('MILVUS_PASSWORD')
self.MILVUS_SECURE = get_env('MILVUS_SECURE')
# weaviate settings
self.WEAVIATE_ENDPOINT = get_env('WEAVIATE_ENDPOINT')
self.WEAVIATE_API_KEY = get_env('WEAVIATE_API_KEY')
self.WEAVIATE_GRPC_ENABLED = get_bool_env('WEAVIATE_GRPC_ENABLED')
self.WEAVIATE_BATCH_SIZE = int(get_env('WEAVIATE_BATCH_SIZE'))
# ------------------------
# Mail Configurations.
# ------------------------
self.MAIL_TYPE = get_env('MAIL_TYPE')
self.MAIL_DEFAULT_SEND_FROM = get_env('MAIL_DEFAULT_SEND_FROM')
self.RESEND_API_KEY = get_env('RESEND_API_KEY')
self.RESEND_API_URL = get_env('RESEND_API_URL')
# SMTP settings
self.SMTP_SERVER = get_env('SMTP_SERVER')
self.SMTP_PORT = get_env('SMTP_PORT')
self.SMTP_USERNAME = get_env('SMTP_USERNAME')
self.SMTP_PASSWORD = get_env('SMTP_PASSWORD')
self.SMTP_USE_TLS = get_bool_env('SMTP_USE_TLS')
# ------------------------
# Workpace Configurations.
# ------------------------
self.INVITE_EXPIRY_HOURS = int(get_env('INVITE_EXPIRY_HOURS'))
# ------------------------
# Sentry Configurations.
# ------------------------
self.SENTRY_DSN = get_env('SENTRY_DSN')
self.SENTRY_TRACES_SAMPLE_RATE = float(get_env('SENTRY_TRACES_SAMPLE_RATE'))
self.SENTRY_PROFILES_SAMPLE_RATE = float(get_env('SENTRY_PROFILES_SAMPLE_RATE'))
# ------------------------
# Business Configurations.
# ------------------------
# multi model send image format, support base64, url, default is base64
self.MULTIMODAL_SEND_IMAGE_FORMAT = get_env('MULTIMODAL_SEND_IMAGE_FORMAT')
# Dataset Configurations.
self.CLEAN_DAY_SETTING = get_env('CLEAN_DAY_SETTING')
# File upload Configurations.
self.UPLOAD_FILE_SIZE_LIMIT = int(get_env('UPLOAD_FILE_SIZE_LIMIT'))
self.UPLOAD_FILE_BATCH_LIMIT = int(get_env('UPLOAD_FILE_BATCH_LIMIT'))
self.UPLOAD_IMAGE_FILE_SIZE_LIMIT = int(get_env('UPLOAD_IMAGE_FILE_SIZE_LIMIT'))
# Moderation in app Configurations.
self.OUTPUT_MODERATION_BUFFER_SIZE = int(get_env('OUTPUT_MODERATION_BUFFER_SIZE'))
# Notion integration setting
self.NOTION_CLIENT_ID = get_env('NOTION_CLIENT_ID')
self.NOTION_CLIENT_SECRET = get_env('NOTION_CLIENT_SECRET')
self.NOTION_INTEGRATION_TYPE = get_env('NOTION_INTEGRATION_TYPE')
self.NOTION_INTERNAL_SECRET = get_env('NOTION_INTERNAL_SECRET')
self.NOTION_INTEGRATION_TOKEN = get_env('NOTION_INTEGRATION_TOKEN')
# ------------------------
# Platform Configurations.
# ------------------------
self.HOSTED_OPENAI_API_KEY = get_env('HOSTED_OPENAI_API_KEY')
self.HOSTED_OPENAI_API_BASE = get_env('HOSTED_OPENAI_API_BASE')
self.HOSTED_OPENAI_API_ORGANIZATION = get_env('HOSTED_OPENAI_API_ORGANIZATION')
self.HOSTED_OPENAI_TRIAL_ENABLED = get_bool_env('HOSTED_OPENAI_TRIAL_ENABLED')
self.HOSTED_OPENAI_TRIAL_MODELS = get_env('HOSTED_OPENAI_TRIAL_MODELS')
self.HOSTED_OPENAI_QUOTA_LIMIT = int(get_env('HOSTED_OPENAI_QUOTA_LIMIT'))
self.HOSTED_OPENAI_PAID_ENABLED = get_bool_env('HOSTED_OPENAI_PAID_ENABLED')
self.HOSTED_OPENAI_PAID_MODELS = get_env('HOSTED_OPENAI_PAID_MODELS')
self.HOSTED_AZURE_OPENAI_ENABLED = get_bool_env('HOSTED_AZURE_OPENAI_ENABLED')
self.HOSTED_AZURE_OPENAI_API_KEY = get_env('HOSTED_AZURE_OPENAI_API_KEY')
self.HOSTED_AZURE_OPENAI_API_BASE = get_env('HOSTED_AZURE_OPENAI_API_BASE')
self.HOSTED_AZURE_OPENAI_QUOTA_LIMIT = int(get_env('HOSTED_AZURE_OPENAI_QUOTA_LIMIT'))
self.HOSTED_ANTHROPIC_API_BASE = get_env('HOSTED_ANTHROPIC_API_BASE')
self.HOSTED_ANTHROPIC_API_KEY = get_env('HOSTED_ANTHROPIC_API_KEY')
self.HOSTED_ANTHROPIC_TRIAL_ENABLED = get_bool_env('HOSTED_ANTHROPIC_TRIAL_ENABLED')
self.HOSTED_ANTHROPIC_QUOTA_LIMIT = int(get_env('HOSTED_ANTHROPIC_QUOTA_LIMIT'))
self.HOSTED_ANTHROPIC_PAID_ENABLED = get_bool_env('HOSTED_ANTHROPIC_PAID_ENABLED')
self.HOSTED_MINIMAX_ENABLED = get_bool_env('HOSTED_MINIMAX_ENABLED')
self.HOSTED_SPARK_ENABLED = get_bool_env('HOSTED_SPARK_ENABLED')
self.HOSTED_ZHIPUAI_ENABLED = get_bool_env('HOSTED_ZHIPUAI_ENABLED')
self.HOSTED_MODERATION_ENABLED = get_bool_env('HOSTED_MODERATION_ENABLED')
self.HOSTED_MODERATION_PROVIDERS = get_env('HOSTED_MODERATION_PROVIDERS')
# fetch app templates mode, remote, builtin, db(only for dify SaaS), default: remote
self.HOSTED_FETCH_APP_TEMPLATES_MODE = get_env('HOSTED_FETCH_APP_TEMPLATES_MODE')
self.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN = get_env('HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN')
self.ETL_TYPE = get_env('ETL_TYPE')
self.UNSTRUCTURED_API_URL = get_env('UNSTRUCTURED_API_URL')
self.BILLING_ENABLED = get_bool_env('BILLING_ENABLED')
self.CAN_REPLACE_LOGO = get_bool_env('CAN_REPLACE_LOGO')
self.BATCH_UPLOAD_LIMIT = get_env('BATCH_UPLOAD_LIMIT')
self.CODE_EXECUTION_ENDPOINT = get_env('CODE_EXECUTION_ENDPOINT')
self.CODE_EXECUTION_API_KEY = get_env('CODE_EXECUTION_API_KEY')
self.API_COMPRESSION_ENABLED = get_bool_env('API_COMPRESSION_ENABLED')
self.TOOL_ICON_CACHE_MAX_AGE = get_env('TOOL_ICON_CACHE_MAX_AGE')
self.KEYWORD_DATA_SOURCE_TYPE = get_env('KEYWORD_DATA_SOURCE_TYPE')
class CloudEditionConfig(Config):
def __init__(self):
super().__init__()
self.EDITION = "CLOUD"
self.GITHUB_CLIENT_ID = get_env('GITHUB_CLIENT_ID')
self.GITHUB_CLIENT_SECRET = get_env('GITHUB_CLIENT_SECRET')
self.GOOGLE_CLIENT_ID = get_env('GOOGLE_CLIENT_ID')
self.GOOGLE_CLIENT_SECRET = get_env('GOOGLE_CLIENT_SECRET')
self.OAUTH_REDIRECT_PATH = get_env('OAUTH_REDIRECT_PATH')

View File

@@ -1,3 +0,0 @@
from .app_config import DifyConfig
dify_config = DifyConfig() # type: ignore

View File

@@ -1,113 +0,0 @@
import logging
from pathlib import Path
from typing import Any
from pydantic.fields import FieldInfo
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
from libs.file_utils import search_file_upwards
from .deploy import DeploymentConfig
from .enterprise import EnterpriseFeatureConfig
from .extra import ExtraServiceConfig
from .feature import FeatureConfig
from .middleware import MiddlewareConfig
from .observability import ObservabilityConfig
from .packaging import PackagingInfo
from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
from .remote_settings_sources.apollo import ApolloSettingsSource
from .remote_settings_sources.nacos import NacosSettingsSource
logger = logging.getLogger(__name__)
class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
def __init__(self, settings_cls: type[BaseSettings]):
super().__init__(settings_cls)
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
raise NotImplementedError
def __call__(self) -> dict[str, Any]:
current_state = self.current_state
remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME")
if not remote_source_name:
return {}
remote_source: RemoteSettingsSource | None = None
match remote_source_name:
case RemoteSettingsSourceName.APOLLO:
remote_source = ApolloSettingsSource(current_state)
case RemoteSettingsSourceName.NACOS:
remote_source = NacosSettingsSource(current_state)
case _:
logger.warning("Unsupported remote source: %s", remote_source_name)
return {}
d: dict[str, Any] = {}
for field_name, field in self.settings_cls.model_fields.items():
field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name)
field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex)
if field_value is not None:
d[field_key] = field_value
return d
class DifyConfig(
# Packaging info
PackagingInfo,
# Deployment configs
DeploymentConfig,
# Feature configs
FeatureConfig,
# Middleware configs
MiddlewareConfig,
# Extra service configs
ExtraServiceConfig,
# Observability configs
ObservabilityConfig,
# Remote source configs
RemoteSettingsSourceConfig,
# Enterprise feature configs
# **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
EnterpriseFeatureConfig,
):
model_config = SettingsConfigDict(
# read from dotenv format config file
env_file=".env",
env_file_encoding="utf-8",
# ignore extra attributes
extra="ignore",
)
# Before adding any config,
# please consider to arrange it in the proper config group of existed or added
# for better readability and maintainability.
# Thanks for your concentration and consideration.
@classmethod
def settings_customise_sources(
cls,
settings_cls: type[BaseSettings],
init_settings: PydanticBaseSettingsSource,
env_settings: PydanticBaseSettingsSource,
dotenv_settings: PydanticBaseSettingsSource,
file_secret_settings: PydanticBaseSettingsSource,
) -> tuple[PydanticBaseSettingsSource, ...]:
return (
init_settings,
env_settings,
RemoteSettingsSourceFactory(settings_cls),
dotenv_settings,
file_secret_settings,
TomlConfigSettingsSource(
settings_cls=settings_cls,
toml_file=search_file_upwards(
base_dir_path=Path(__file__).parent,
target_file_name="pyproject.toml",
max_search_parent_depth=2,
),
),
)

View File

@@ -1,34 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class DeploymentConfig(BaseSettings):
"""
Configuration settings for application deployment
"""
APPLICATION_NAME: str = Field(
description="Name of the application, used for identification and logging purposes",
default="langgenius/dify",
)
DEBUG: bool = Field(
description="Enable debug mode for additional logging and development features",
default=False,
)
# Request logging configuration
ENABLE_REQUEST_LOGGING: bool = Field(
description="Enable request and response body logging",
default=False,
)
EDITION: str = Field(
description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
default="SELF_HOSTED",
)
DEPLOY_ENV: str = Field(
description="Deployment environment (e.g., 'PRODUCTION', 'DEVELOPMENT'), default to PRODUCTION",
default="PRODUCTION",
)

View File

@@ -1,20 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class EnterpriseFeatureConfig(BaseSettings):
"""
Configuration for enterprise-level features.
**Before using, please contact business@dify.ai by email to inquire about licensing matters.**
"""
ENTERPRISE_ENABLED: bool = Field(
description="Enable or disable enterprise-level features."
"Before using, please contact business@dify.ai by email to inquire about licensing matters.",
default=False,
)
CAN_REPLACE_LOGO: bool = Field(
description="Allow customization of the enterprise logo.",
default=False,
)

View File

@@ -1,10 +0,0 @@
from configs.extra.notion_config import NotionConfig
from configs.extra.sentry_config import SentryConfig
class ExtraServiceConfig(
# place the configs in alphabet order
NotionConfig,
SentryConfig,
):
pass

View File

@@ -1,34 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class NotionConfig(BaseSettings):
"""
Configuration settings for Notion integration
"""
NOTION_CLIENT_ID: str | None = Field(
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_CLIENT_SECRET: str | None = Field(
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
default=None,
)
NOTION_INTEGRATION_TYPE: str | None = Field(
description="Type of Notion integration."
" Set to 'internal' for internal integrations, or None for public integrations.",
default=None,
)
NOTION_INTERNAL_SECRET: str | None = Field(
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
default=None,
)
NOTION_INTEGRATION_TOKEN: str | None = Field(
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
default=None,
)

View File

@@ -1,26 +0,0 @@
from pydantic import Field, NonNegativeFloat
from pydantic_settings import BaseSettings
class SentryConfig(BaseSettings):
"""
Configuration settings for Sentry error tracking and performance monitoring
"""
SENTRY_DSN: str | None = Field(
description="Sentry Data Source Name (DSN)."
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
default=None,
)
SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
description="Sample rate for Sentry performance monitoring traces."
" Value between 0.0 and 1.0, where 1.0 means 100% of traces are sent to Sentry.",
default=1.0,
)
SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
description="Sample rate for Sentry profiling."
" Value between 0.0 and 1.0, where 1.0 means 100% of profiles are sent to Sentry.",
default=1.0,
)

File diff suppressed because it is too large Load Diff

View File

@@ -1,254 +0,0 @@
from pydantic import Field, NonNegativeInt
from pydantic_settings import BaseSettings
class HostedCreditConfig(BaseSettings):
HOSTED_MODEL_CREDIT_CONFIG: str = Field(
description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
default="",
)
def get_model_credits(self, model_name: str) -> int:
"""
Get credit value for a specific model name.
Returns 1 if model is not found in configuration (default credit).
:param model_name: The name of the model to search for
:return: The credit value for the model
"""
if not self.HOSTED_MODEL_CREDIT_CONFIG:
return 1
try:
credit_map = dict(
item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
)
# Search for matching model pattern
for pattern, credit in credit_map.items():
if pattern.strip() == model_name:
return int(credit)
return 1 # Default quota if no match found
except (ValueError, AttributeError):
return 1 # Return default quota if parsing fails
class HostedOpenAiConfig(BaseSettings):
"""
Configuration for hosted OpenAI service
"""
HOSTED_OPENAI_API_KEY: str | None = Field(
description="API key for hosted OpenAI service",
default=None,
)
HOSTED_OPENAI_API_BASE: str | None = Field(
description="Base URL for hosted OpenAI API",
default=None,
)
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
description="Organization ID for hosted OpenAI service",
default=None,
)
HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
description="Enable trial access to hosted OpenAI service",
default=False,
)
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
description="Comma-separated list of available models for trial access",
default="gpt-3.5-turbo,"
"gpt-3.5-turbo-1106,"
"gpt-3.5-turbo-instruct,"
"gpt-3.5-turbo-16k,"
"gpt-3.5-turbo-16k-0613,"
"gpt-3.5-turbo-0613,"
"gpt-3.5-turbo-0125,"
"text-davinci-003",
)
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
description="Quota limit for hosted OpenAI service usage",
default=200,
)
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
description="Enable paid access to hosted OpenAI service",
default=False,
)
HOSTED_OPENAI_PAID_MODELS: str = Field(
description="Comma-separated list of available models for paid access",
default="gpt-4,"
"gpt-4-turbo-preview,"
"gpt-4-turbo-2024-04-09,"
"gpt-4-1106-preview,"
"gpt-4-0125-preview,"
"gpt-3.5-turbo,"
"gpt-3.5-turbo-16k,"
"gpt-3.5-turbo-16k-0613,"
"gpt-3.5-turbo-1106,"
"gpt-3.5-turbo-0613,"
"gpt-3.5-turbo-0125,"
"gpt-3.5-turbo-instruct,"
"text-davinci-003",
)
class HostedAzureOpenAiConfig(BaseSettings):
"""
Configuration for hosted Azure OpenAI service
"""
HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
description="Enable hosted Azure OpenAI service",
default=False,
)
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
description="API key for hosted Azure OpenAI service",
default=None,
)
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
description="Base URL for hosted Azure OpenAI API",
default=None,
)
HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
description="Quota limit for hosted Azure OpenAI service usage",
default=200,
)
class HostedAnthropicConfig(BaseSettings):
"""
Configuration for hosted Anthropic service
"""
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
description="Base URL for hosted Anthropic API",
default=None,
)
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
description="API key for hosted Anthropic service",
default=None,
)
HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
description="Enable trial access to hosted Anthropic service",
default=False,
)
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
description="Quota limit for hosted Anthropic service usage",
default=600000,
)
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
description="Enable paid access to hosted Anthropic service",
default=False,
)
class HostedMinmaxConfig(BaseSettings):
"""
Configuration for hosted Minmax service
"""
HOSTED_MINIMAX_ENABLED: bool = Field(
description="Enable hosted Minmax service",
default=False,
)
class HostedSparkConfig(BaseSettings):
"""
Configuration for hosted Spark service
"""
HOSTED_SPARK_ENABLED: bool = Field(
description="Enable hosted Spark service",
default=False,
)
class HostedZhipuAIConfig(BaseSettings):
"""
Configuration for hosted ZhipuAI service
"""
HOSTED_ZHIPUAI_ENABLED: bool = Field(
description="Enable hosted ZhipuAI service",
default=False,
)
class HostedModerationConfig(BaseSettings):
"""
Configuration for hosted Moderation service
"""
HOSTED_MODERATION_ENABLED: bool = Field(
description="Enable hosted Moderation service",
default=False,
)
HOSTED_MODERATION_PROVIDERS: str = Field(
description="Comma-separated list of moderation providers",
default="",
)
class HostedFetchAppTemplateConfig(BaseSettings):
"""
Configuration for fetching app templates
"""
HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
description="Mode for fetching app templates: remote, db, or builtin default to remote,",
default="remote",
)
HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
description="Domain for fetching remote app templates",
default="https://tmpl.dify.ai",
)
class HostedFetchPipelineTemplateConfig(BaseSettings):
"""
Configuration for fetching pipeline templates
"""
HOSTED_FETCH_PIPELINE_TEMPLATES_MODE: str = Field(
description="Mode for fetching pipeline templates: remote, db, or builtin default to remote,",
default="remote",
)
HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN: str = Field(
description="Domain for fetching remote pipeline templates",
default="https://tmpl.dify.ai",
)
class HostedServiceConfig(
# place the configs in alphabet order
HostedAnthropicConfig,
HostedAzureOpenAiConfig,
HostedFetchAppTemplateConfig,
HostedFetchPipelineTemplateConfig,
HostedMinmaxConfig,
HostedOpenAiConfig,
HostedSparkConfig,
HostedZhipuAIConfig,
# moderation
HostedModerationConfig,
# credit config
HostedCreditConfig,
):
pass

View File

@@ -1,360 +0,0 @@
import os
from typing import Any, Literal
from urllib.parse import parse_qsl, quote_plus
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
from pydantic_settings import BaseSettings
from .cache.redis_config import RedisConfig
from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
from .storage.amazon_s3_storage_config import S3StorageConfig
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
from .storage.clickzetta_volume_storage_config import ClickZettaVolumeStorageConfig
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
from .storage.oci_storage_config import OCIStorageConfig
from .storage.opendal_storage_config import OpenDALStorageConfig
from .storage.supabase_storage_config import SupabaseStorageConfig
from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
from .vdb.alibabacloud_mysql_config import AlibabaCloudMySQLConfig
from .vdb.analyticdb_config import AnalyticdbConfig
from .vdb.baidu_vector_config import BaiduVectorDBConfig
from .vdb.chroma_config import ChromaConfig
from .vdb.clickzetta_config import ClickzettaConfig
from .vdb.couchbase_config import CouchbaseConfig
from .vdb.elasticsearch_config import ElasticsearchConfig
from .vdb.huawei_cloud_config import HuaweiCloudConfig
from .vdb.lindorm_config import LindormConfig
from .vdb.matrixone_config import MatrixoneConfig
from .vdb.milvus_config import MilvusConfig
from .vdb.myscale_config import MyScaleConfig
from .vdb.oceanbase_config import OceanBaseVectorConfig
from .vdb.opengauss_config import OpenGaussConfig
from .vdb.opensearch_config import OpenSearchConfig
from .vdb.oracle_config import OracleConfig
from .vdb.pgvector_config import PGVectorConfig
from .vdb.pgvectors_config import PGVectoRSConfig
from .vdb.qdrant_config import QdrantConfig
from .vdb.relyt_config import RelytConfig
from .vdb.tablestore_config import TableStoreConfig
from .vdb.tencent_vector_config import TencentVectorDBConfig
from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
from .vdb.tidb_vector_config import TiDBVectorConfig
from .vdb.upstash_config import UpstashConfig
from .vdb.vastbase_vector_config import VastbaseVectorConfig
from .vdb.vikingdb_config import VikingDBConfig
from .vdb.weaviate_config import WeaviateConfig
class StorageConfig(BaseSettings):
STORAGE_TYPE: Literal[
"opendal",
"s3",
"aliyun-oss",
"azure-blob",
"baidu-obs",
"clickzetta-volume",
"google-storage",
"huawei-obs",
"oci-storage",
"tencent-cos",
"volcengine-tos",
"supabase",
"local",
] = Field(
description="Type of storage to use."
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', "
"'clickzetta-volume', 'google-storage', 'huawei-obs', 'oci-storage', 'tencent-cos', "
"'volcengine-tos', 'supabase'. Default is 'opendal'.",
default="opendal",
)
STORAGE_LOCAL_PATH: str = Field(
description="Path for local storage when STORAGE_TYPE is set to 'local'.",
default="storage",
deprecated=True,
)
class VectorStoreConfig(BaseSettings):
VECTOR_STORE: str | None = Field(
description="Type of vector store to use for efficient similarity search."
" Set to None if not using a vector store.",
default=None,
)
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
description="Enable whitelist for vector store.",
default=False,
)
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
class KeywordStoreConfig(BaseSettings):
KEYWORD_STORE: str = Field(
description="Method for keyword extraction and storage."
" Default is 'jieba', a Chinese text segmentation library.",
default="jieba",
)
class DatabaseConfig(BaseSettings):
DB_HOST: str = Field(
description="Hostname or IP address of the database server.",
default="localhost",
)
DB_PORT: PositiveInt = Field(
description="Port number for database connection.",
default=5432,
)
DB_USERNAME: str = Field(
description="Username for database authentication.",
default="postgres",
)
DB_PASSWORD: str = Field(
description="Password for database authentication.",
default="",
)
DB_DATABASE: str = Field(
description="Name of the database to connect to.",
default="dify",
)
DB_CHARSET: str = Field(
description="Character set for database connection.",
default="",
)
DB_EXTRAS: str = Field(
description="Additional database connection parameters. Example: 'keepalives_idle=60&keepalives=1'",
default="",
)
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
description="Database URI scheme for SQLAlchemy connection.",
default="postgresql",
)
@computed_field # type: ignore[prop-decorator]
@property
def SQLALCHEMY_DATABASE_URI(self) -> str:
db_extras = (
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
).strip("&")
db_extras = f"?{db_extras}" if db_extras else ""
return (
f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
f"{db_extras}"
)
SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
description="Maximum number of database connections in the pool.",
default=30,
)
SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
description="Maximum number of connections that can be created beyond the pool_size.",
default=10,
)
SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
description="Number of seconds after which a connection is automatically recycled.",
default=3600,
)
SQLALCHEMY_POOL_USE_LIFO: bool = Field(
description="If True, SQLAlchemy will use last-in-first-out way to retrieve connections from pool.",
default=False,
)
SQLALCHEMY_POOL_PRE_PING: bool = Field(
description="If True, enables connection pool pre-ping feature to check connections.",
default=False,
)
SQLALCHEMY_ECHO: bool | str = Field(
description="If True, SQLAlchemy will log all SQL statements.",
default=False,
)
SQLALCHEMY_POOL_TIMEOUT: NonNegativeInt = Field(
description="Number of seconds to wait for a connection from the pool before raising a timeout error.",
default=30,
)
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
description="Number of processes for the retrieval service, default to CPU cores.",
default=os.cpu_count() or 1,
)
@computed_field # type: ignore[prop-decorator]
@property
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
# Parse DB_EXTRAS for 'options'
db_extras_dict = dict(parse_qsl(self.DB_EXTRAS))
options = db_extras_dict.get("options", "")
# Always include timezone
timezone_opt = "-c timezone=UTC"
if options:
# Merge user options and timezone
merged_options = f"{options} {timezone_opt}"
else:
merged_options = timezone_opt
connect_args = {"options": merged_options}
return {
"pool_size": self.SQLALCHEMY_POOL_SIZE,
"max_overflow": self.SQLALCHEMY_MAX_OVERFLOW,
"pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
"connect_args": connect_args,
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
"pool_reset_on_return": None,
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
}
class CeleryConfig(DatabaseConfig):
CELERY_BACKEND: str = Field(
description="Backend for Celery task results. Options: 'database', 'redis', 'rabbitmq'.",
default="redis",
)
CELERY_BROKER_URL: str | None = Field(
description="URL of the message broker for Celery tasks.",
default=None,
)
CELERY_USE_SENTINEL: bool | None = Field(
description="Whether to use Redis Sentinel for high availability.",
default=False,
)
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
description="Name of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_PASSWORD: str | None = Field(
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
)
@computed_field
def CELERY_RESULT_BACKEND(self) -> str | None:
if self.CELERY_BACKEND in ("database", "rabbitmq"):
return f"db+{self.SQLALCHEMY_DATABASE_URI}"
elif self.CELERY_BACKEND == "redis":
return self.CELERY_BROKER_URL
else:
return None
@property
def BROKER_USE_SSL(self) -> bool:
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
class InternalTestConfig(BaseSettings):
"""
Configuration settings for Internal Test
"""
AWS_SECRET_ACCESS_KEY: str | None = Field(
description="Internal test AWS secret access key",
default=None,
)
AWS_ACCESS_KEY_ID: str | None = Field(
description="Internal test AWS access key ID",
default=None,
)
class DatasetQueueMonitorConfig(BaseSettings):
"""
Configuration settings for Dataset Queue Monitor
"""
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
description="Threshold for dataset queue monitor",
default=200,
)
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
description="Emails for dataset queue monitor alert, separated by commas",
default=None,
)
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
description="Interval for dataset queue monitor in minutes",
default=30,
)
class MiddlewareConfig(
# place the configs in alphabet order
CeleryConfig, # Note: CeleryConfig already inherits from DatabaseConfig
KeywordStoreConfig,
RedisConfig,
# configs of storage and storage providers
StorageConfig,
AliyunOSSStorageConfig,
AzureBlobStorageConfig,
BaiduOBSStorageConfig,
ClickZettaVolumeStorageConfig,
GoogleCloudStorageConfig,
HuaweiCloudOBSStorageConfig,
OCIStorageConfig,
OpenDALStorageConfig,
S3StorageConfig,
SupabaseStorageConfig,
TencentCloudCOSStorageConfig,
VolcengineTOSStorageConfig,
# configs of vdb and vdb providers
VectorStoreConfig,
AnalyticdbConfig,
ChromaConfig,
ClickzettaConfig,
HuaweiCloudConfig,
MilvusConfig,
AlibabaCloudMySQLConfig,
MyScaleConfig,
OpenSearchConfig,
OracleConfig,
PGVectorConfig,
VastbaseVectorConfig,
PGVectoRSConfig,
QdrantConfig,
RelytConfig,
TencentVectorDBConfig,
TiDBVectorConfig,
WeaviateConfig,
ElasticsearchConfig,
CouchbaseConfig,
InternalTestConfig,
VikingDBConfig,
UpstashConfig,
TidbOnQdrantConfig,
LindormConfig,
OceanBaseVectorConfig,
BaiduVectorDBConfig,
OpenGaussConfig,
TableStoreConfig,
DatasetQueueMonitorConfig,
MatrixoneConfig,
):
pass

View File

@@ -1,113 +0,0 @@
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
from pydantic_settings import BaseSettings
class RedisConfig(BaseSettings):
"""
Configuration settings for Redis connection
"""
REDIS_HOST: str = Field(
description="Hostname or IP address of the Redis server",
default="localhost",
)
REDIS_PORT: PositiveInt = Field(
description="Port number on which the Redis server is listening",
default=6379,
)
REDIS_USERNAME: str | None = Field(
description="Username for Redis authentication (if required)",
default=None,
)
REDIS_PASSWORD: str | None = Field(
description="Password for Redis authentication (if required)",
default=None,
)
REDIS_DB: NonNegativeInt = Field(
description="Redis database number to use (0-15)",
default=0,
)
REDIS_USE_SSL: bool = Field(
description="Enable SSL/TLS for the Redis connection",
default=False,
)
REDIS_SSL_CERT_REQS: str = Field(
description="SSL certificate requirements (CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED)",
default="CERT_NONE",
)
REDIS_SSL_CA_CERTS: str | None = Field(
description="Path to the CA certificate file for SSL verification",
default=None,
)
REDIS_SSL_CERTFILE: str | None = Field(
description="Path to the client certificate file for SSL authentication",
default=None,
)
REDIS_SSL_KEYFILE: str | None = Field(
description="Path to the client private key file for SSL authentication",
default=None,
)
REDIS_USE_SENTINEL: bool | None = Field(
description="Enable Redis Sentinel mode for high availability",
default=False,
)
REDIS_SENTINELS: str | None = Field(
description="Comma-separated list of Redis Sentinel nodes (host:port)",
default=None,
)
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
description="Name of the Redis Sentinel service to monitor",
default=None,
)
REDIS_SENTINEL_USERNAME: str | None = Field(
description="Username for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_PASSWORD: str | None = Field(
description="Password for Redis Sentinel authentication (if required)",
default=None,
)
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
description="Socket timeout in seconds for Redis Sentinel connections",
default=0.1,
)
REDIS_USE_CLUSTERS: bool = Field(
description="Enable Redis Clusters mode for high availability",
default=False,
)
REDIS_CLUSTERS: str | None = Field(
description="Comma-separated list of Redis Clusters nodes (host:port)",
default=None,
)
REDIS_CLUSTERS_PASSWORD: str | None = Field(
description="Password for Redis Clusters authentication (if required)",
default=None,
)
REDIS_SERIALIZATION_PROTOCOL: int = Field(
description="Redis serialization protocol (RESP) version",
default=3,
)
REDIS_ENABLE_CLIENT_SIDE_CACHE: bool = Field(
description="Enable client side cache in redis",
default=False,
)

View File

@@ -1,43 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class AliyunOSSStorageConfig(BaseSettings):
"""
Configuration settings for Aliyun Object Storage Service (OSS)
"""
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
description="Name of the Aliyun OSS bucket to store and retrieve objects",
default=None,
)
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
description="Access key ID for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_SECRET_KEY: str | None = Field(
description="Secret access key for authenticating with Aliyun OSS",
default=None,
)
ALIYUN_OSS_ENDPOINT: str | None = Field(
description="URL of the Aliyun OSS endpoint for your chosen region",
default=None,
)
ALIYUN_OSS_REGION: str | None = Field(
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
default=None,
)
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
default=None,
)
ALIYUN_OSS_PATH: str | None = Field(
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
default=None,
)

View File

@@ -1,45 +0,0 @@
from typing import Literal
from pydantic import Field
from pydantic_settings import BaseSettings
class S3StorageConfig(BaseSettings):
"""
Configuration settings for S3-compatible object storage
"""
S3_ENDPOINT: str | None = Field(
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
default=None,
)
S3_REGION: str | None = Field(
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
default=None,
)
S3_BUCKET_NAME: str | None = Field(
description="Name of the S3 bucket to store and retrieve objects",
default=None,
)
S3_ACCESS_KEY: str | None = Field(
description="Access key ID for authenticating with the S3 service",
default=None,
)
S3_SECRET_KEY: str | None = Field(
description="Secret access key for authenticating with the S3 service",
default=None,
)
S3_ADDRESS_STYLE: Literal["auto", "virtual", "path"] = Field(
description="S3 addressing style: 'auto', 'path', or 'virtual'",
default="auto",
)
S3_USE_AWS_MANAGED_IAM: bool = Field(
description="Use AWS managed IAM roles for authentication instead of access/secret keys",
default=False,
)

View File

@@ -1,28 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class AzureBlobStorageConfig(BaseSettings):
"""
Configuration settings for Azure Blob Storage
"""
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
default=None,
)
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
description="Access key for authenticating with the Azure Storage account",
default=None,
)
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
description="Name of the Azure Blob container to store and retrieve objects",
default=None,
)
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
default=None,
)

View File

@@ -1,28 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class BaiduOBSStorageConfig(BaseSettings):
"""
Configuration settings for Baidu Object Storage Service (OBS)
"""
BAIDU_OBS_BUCKET_NAME: str | None = Field(
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
BAIDU_OBS_ACCESS_KEY: str | None = Field(
description="Access Key ID for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_SECRET_KEY: str | None = Field(
description="Secret Access Key for authenticating with Baidu OBS",
default=None,
)
BAIDU_OBS_ENDPOINT: str | None = Field(
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
default=None,
)

View File

@@ -1,63 +0,0 @@
"""ClickZetta Volume Storage Configuration"""
from pydantic import Field
from pydantic_settings import BaseSettings
class ClickZettaVolumeStorageConfig(BaseSettings):
"""Configuration for ClickZetta Volume storage."""
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
description="Username for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
description="Password for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
description="ClickZetta instance identifier",
default=None,
)
CLICKZETTA_VOLUME_SERVICE: str = Field(
description="ClickZetta service endpoint",
default="api.clickzetta.com",
)
CLICKZETTA_VOLUME_WORKSPACE: str = Field(
description="ClickZetta workspace name",
default="quick_start",
)
CLICKZETTA_VOLUME_VCLUSTER: str = Field(
description="ClickZetta virtual cluster name",
default="default_ap",
)
CLICKZETTA_VOLUME_SCHEMA: str = Field(
description="ClickZetta schema name",
default="dify",
)
CLICKZETTA_VOLUME_TYPE: str = Field(
description="ClickZetta volume type (table|user|external)",
default="user",
)
CLICKZETTA_VOLUME_NAME: str | None = Field(
description="ClickZetta volume name for external volumes",
default=None,
)
CLICKZETTA_VOLUME_TABLE_PREFIX: str = Field(
description="Prefix for ClickZetta volume table names",
default="dataset_",
)
CLICKZETTA_VOLUME_DIFY_PREFIX: str = Field(
description="Directory prefix for User Volume to organize Dify files",
default="dify_km",
)

View File

@@ -1,18 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class GoogleCloudStorageConfig(BaseSettings):
"""
Configuration settings for Google Cloud Storage
"""
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
default=None,
)
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
description="Base64-encoded JSON key file for Google Cloud service account authentication",
default=None,
)

View File

@@ -1,28 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class HuaweiCloudOBSStorageConfig(BaseSettings):
"""
Configuration settings for Huawei Cloud Object Storage Service (OBS)
"""
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
default=None,
)
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
description="Access Key ID for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SECRET_KEY: str | None = Field(
description="Secret Access Key for authenticating with Huawei Cloud OBS",
default=None,
)
HUAWEI_OBS_SERVER: str | None = Field(
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
default=None,
)

View File

@@ -1,33 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class OCIStorageConfig(BaseSettings):
"""
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
"""
OCI_ENDPOINT: str | None = Field(
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
default=None,
)
OCI_REGION: str | None = Field(
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
default=None,
)
OCI_BUCKET_NAME: str | None = Field(
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
default=None,
)
OCI_ACCESS_KEY: str | None = Field(
description="Access key (also known as API key) for authenticating with OCI Object Storage",
default=None,
)
OCI_SECRET_KEY: str | None = Field(
description="Secret key associated with the access key for authenticating with OCI Object Storage",
default=None,
)

View File

@@ -1,9 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class OpenDALStorageConfig(BaseSettings):
OPENDAL_SCHEME: str = Field(
default="fs",
description="OpenDAL scheme.",
)

View File

@@ -1,23 +0,0 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class SupabaseStorageConfig(BaseSettings):
"""
Configuration settings for Supabase Object Storage Service
"""
SUPABASE_BUCKET_NAME: str | None = Field(
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
default=None,
)
SUPABASE_API_KEY: str | None = Field(
description="API KEY for authenticating with Supabase",
default=None,
)
SUPABASE_URL: str | None = Field(
description="URL of the Supabase",
default=None,
)

Some files were not shown because too many files have changed in this diff Show More