Compare commits

..

282 Commits

Author SHA1 Message Date
twwu
f815310fda Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-02-05 10:08:24 +08:00
JzoNg
2ca03d80f9 Merge branch 'main' into jzh
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-02-04 12:53:07 +08:00
JzoNg
52b9299e43 Merge branch 'main' into jzh
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-02-03 16:17:41 +08:00
JzoNg
757317f965 chore: add feat/hitl-frontend to build-push workflow triggers 2026-02-03 15:11:38 +08:00
JzoNg
e119d1a16c fix: do not stop when workflow paused event recieved 2026-02-03 15:08:59 +08:00
JzoNg
0187838a54 Merge branch 'main' into jzh 2026-02-03 14:42:30 +08:00
twwu
68c36655a7 chore: remove feat/hitl-frontend from build-push workflow triggers 2026-01-30 09:46:24 +08:00
twwu
0357530468 Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-29 18:55:13 +08:00
twwu
966e308e8b test(billing, rag-pipeline): enhance tests by adding act wrapper and mocking download utility 2026-01-29 18:42:33 +08:00
twwu
e405e801ce test(billing): add human_input_email_delivery_enabled to billing utils tests; remove obsolete text generation result tests 2026-01-29 18:20:26 +08:00
twwu
3170e12b0a Merge branch 'main' into feat/hitl-frontend 2026-01-29 17:27:25 +08:00
twwu
8bbf3ef261 Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-29 10:58:26 +08:00
twwu
e09bd3e977 refactor(Result): update setWorkflowProcessData to use useCallback and reset workflow process data on completion 2026-01-29 10:39:40 +08:00
twwu
b1f7602dc8 feat(humanInput): update email configuration defaults and remove validation tooltip
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-28 22:43:58 +08:00
twwu
2ac20ced26 feat(humanInput): add email configuration validation and tooltip for delivery method 2026-01-28 22:22:01 +08:00
twwu
4e34fa3d70 feat(humanInput): enhance web app delivery method handling in trigger mode 2026-01-28 22:03:47 +08:00
twwu
bf161d01a8 Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-28 18:29:13 +08:00
twwu
fe5a65b21c test(shortcuts-popup-plugin): add JSDOM mocks for Range methods to improve test reliability 2026-01-28 13:24:55 +08:00
twwu
b3be035c64 fix(userActions): update error messages to include dynamic max length and improve validation logic 2026-01-28 11:51:08 +08:00
twwu
91f118c82c fix(varReferencePicker): update trigger condition to use double negation for clarity 2026-01-28 11:38:52 +08:00
twwu
3eaafccff2 chore(mock): remove unused mock data file for human input form 2026-01-28 11:31:14 +08:00
twwu
9d3b4749ae fix(humanInput): wrap rate limit exceeded error message in return statement for proper rendering 2026-01-28 11:15:10 +08:00
twwu
67bbfd972b feat(humanInput): add rate limit exceeded error handling and localization support
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-28 10:43:13 +08:00
twwu
1968589fdf Merge branch 'main' into feat/hitl-frontend 2026-01-28 10:19:52 +08:00
twwu
fc2acc2c53 refactor(form): streamline error handling by removing redundant expired state management
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-27 21:58:15 +08:00
twwu
e945f3d439 feat(log): add paused status count to log rendering and simplify status rendering logic 2026-01-27 21:20:47 +08:00
twwu
1b0abdf642 feat(humanInput): improve form handling with site info integration and error management
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-27 18:14:24 +08:00
JzoNg
441104ac3c fix: lnit error 2026-01-27 17:55:43 +08:00
twwu
8897633e42 feat(workflow): add human input node formatting to retain latest status 2026-01-27 17:09:00 +08:00
twwu
8a20a59d72 Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-27 15:57:42 +08:00
twwu
7cd4a7c1de feat: implement human input form timeout handling and enhance expiration time display 2026-01-27 15:57:13 +08:00
JzoNg
b503da0942 fix: validation of inputs in email sender 2026-01-27 15:35:53 +08:00
twwu
94671d42c1 refactor(workflow): optimize input variable generation in useSingleRunFormParams hook 2026-01-27 15:19:45 +08:00
JzoNg
8cf817a5fd fix: empty result judgement 2026-01-27 15:02:45 +08:00
JzoNg
3e0b3fae37 fix: test running tab switch 2026-01-27 14:45:46 +08:00
twwu
19664daeef feat(chat): enhance file handling in chat by converting file types to MIME format 2026-01-27 11:19:07 +08:00
twwu
fe5b5bc4c3 Merge branch 'main' into feat/hitl-frontend 2026-01-27 11:06:50 +08:00
twwu
07d8eaae82 refactor: simplify filteredHumanInputFormDataList computation by removing useMemo
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-26 15:16:55 +08:00
JzoNg
e8130fc509 enhancement: add error style for wrong member in email configure of delivery methods 2026-01-26 14:38:17 +08:00
twwu
4ea3cf46fa fix: update key references in HumanInputFormList and adjust resume URL in useChat hook 2026-01-26 14:23:45 +08:00
JzoNg
824f139b46 fix: variable display in form content preview 2026-01-26 13:52:21 +08:00
twwu
f2d98e832f Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-26 12:00:37 +08:00
twwu
5f73fed1e7 fix(input-field): update regex for name validation to ensure correct format 2026-01-26 12:00:18 +08:00
JzoNg
2ccef3ac84 fix: human input field dragging 2026-01-26 11:47:51 +08:00
twwu
9341227cf1 fix(i18n): update Chinese translations for human input terminology 2026-01-26 11:11:39 +08:00
twwu
d9982b8dc4 Merge branch 'main' into feat/hitl-frontend 2026-01-26 10:50:25 +08:00
Wu Tianwei
d3d42e3a8e refactor: rename placeholder to default in form input (#31452)
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-23 18:01:44 +08:00
twwu
f73db70cec Merge branch 'main' into feat/hitl-frontend 2026-01-23 15:51:58 +08:00
twwu
bcd6e22735 fix: update key prop in UserActionItem to use index for consistent rendering
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-22 17:15:58 +08:00
twwu
9aa674a04f feat: add jumpToEmailConfigModal function to enhance email configuration navigation in delivery method component 2026-01-22 16:48:29 +08:00
twwu
a4b87be5f4 Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-22 16:16:52 +08:00
twwu
c8c98519ad refactor: update expiration time handling in human input component with new utility functions and localization support 2026-01-22 16:16:34 +08:00
JzoNg
6aa452d4e3 fix: add default value for form in HITL 2026-01-22 15:41:42 +08:00
JzoNg
45c2167e0f fix: default value of input field in form content editor 2026-01-22 15:35:52 +08:00
JzoNg
b242578b86 enhancement: add email address when input blur 2026-01-22 15:35:52 +08:00
JzoNg
d65523aa0b save draft when delivery method changed 2026-01-22 15:35:52 +08:00
twwu
ac46cf499f refactor: update human input form handling with new hooks and improve error management
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-22 14:52:06 +08:00
twwu
8f780cad4c Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-21 17:33:15 +08:00
twwu
0389dd459d style: update class names for consistent width handling in chat answer component 2026-01-21 17:31:46 +08:00
JzoNg
a19ae24adb fix: fix typo 2026-01-21 17:27:26 +08:00
JzoNg
42e80659b6 fix: name validation for output variable 2026-01-21 17:02:17 +08:00
JzoNg
59cb447e05 enhancement: add keyboard events handle 2026-01-21 16:52:49 +08:00
twwu
e63f7b2249 fix: include state snapshot in workflow event URLs for chat hooks 2026-01-21 16:45:38 +08:00
twwu
307f0d5827 fix: initialize requiredInputs state to an empty object and improve requestParamsObj handling in useSingleRunFormParams hook 2026-01-21 16:23:42 +08:00
JzoNg
757b1c7190 fix: display node name in form content preview 2026-01-21 16:05:03 +08:00
JzoNg
c42b78f2d2 fix: lint err 2026-01-21 16:05:03 +08:00
twwu
645793c48c refactor: streamline input variable handling and enhance type definitions in human input components 2026-01-21 15:44:14 +08:00
twwu
528e3400da fix: add HumanInput block type to available blocks filter logic
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-21 13:54:19 +08:00
twwu
c69a26b1ca fix: correct spacing in expiration time message in Chinese localization 2026-01-21 12:51:21 +08:00
twwu
663f320d86 style: adjust layout and styling in HITL input component for improved responsiveness 2026-01-21 12:19:12 +08:00
twwu
6e9facd9b5 Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-21 11:32:20 +08:00
twwu
52e9342af5 feat: refactor user action handling in human input panel and update error messaging 2026-01-21 11:31:09 +08:00
JzoNg
0138dc45b6 fix linit error 2026-01-21 11:03:53 +08:00
JzoNg
aaf6e8f978 use action is for executed action text 2026-01-21 11:03:53 +08:00
JzoNg
55be933342 fix email input handle & email test sender UI 2026-01-21 11:03:51 +08:00
twwu
711cca01b8 Merge branch 'main' into feat/hitl-frontend 2026-01-21 10:44:23 +08:00
twwu
2b28074f4c feat: enhance email configuration and body input components with support for dynamic node variables 2026-01-21 10:30:48 +08:00
twwu
c5721184e9 refactor: implement workflow resumption logic in embedded chat and update chat wrapper functionality
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-20 17:03:06 +08:00
Wu Tianwei
f3ec6ad53c feat: enhance chat functionality with workflow resumption and support regeneration (#31281) 2026-01-20 16:52:04 +08:00
twwu
1014852ebd Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-20 10:45:27 +08:00
JzoNg
5e644315e4 add tip modal for email type
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-19 16:40:54 +08:00
JzoNg
e3a22e5027 hide workflow as tool button when DSL contains human input nodes 2026-01-19 14:16:05 +08:00
twwu
977cef5a22 Merge branch 'main' into feat/hitl-frontend 2026-01-19 13:46:07 +08:00
twwu
1353eec9ca Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-19 12:40:11 +08:00
twwu
73343f03c1 Merge branch 'main' into feat/hitl-frontend 2026-01-19 10:16:27 +08:00
twwu
d401a29bd9 feat: update test email sender mutation to include inputs and adjust API endpoint for better data handling
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-16 17:52:33 +08:00
twwu
3bf8f19874 fix: reduce BUTTON_TEXT_MAX_LENGTH to 20 in user action component for consistency 2026-01-16 17:12:58 +08:00
twwu
51a7ddba81 refactor: rename placeholder_values to resolved_placeholder_values and adjust expiration_time calculation in human input form 2026-01-16 16:46:09 +08:00
twwu
bd634b165d feat: enhance user action validation in human input form by adding checks for duplicate IDs, empty IDs, and empty titles; update translations accordingly 2026-01-16 15:31:13 +08:00
twwu
a298140d8f Merge branch 'main' into feat/hitl-frontend 2026-01-16 13:49:15 +08:00
twwu
3db3a18eff refactor: move getButtonStyle function to utils for better code organization in human input form 2026-01-16 13:47:34 +08:00
twwu
91c35c2f0a feat: enhance human input form handling by adding placeholder values and new workflow event handlers
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-15 18:22:11 +08:00
twwu
61c7fdc614 feat: add humanInputEmailDeliveryEnabled to provider context and update related components for email delivery handling 2026-01-15 16:13:01 +08:00
twwu
88c2483192 refactor: simplify dependencies in DetailPanel and enhance workflow run components with new props for better handling of paused states 2026-01-15 11:52:33 +08:00
twwu
ca58055a39 Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-15 09:58:09 +08:00
twwu
368e38d593 fix(chat): add tracing for workflow process when node status is running
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-14 17:24:16 +08:00
twwu
7463bc9199 feat: integrate workflow hook to handle output variable renaming in useFormContent 2026-01-14 16:36:37 +08:00
twwu
35a707199f refactor: update email configuration modal to use selector for user email and enhance tooltip functionality in delivery method item 2026-01-14 16:28:22 +08:00
twwu
dfb25df5ec Merge branch 'main' into feat/hitl-frontend 2026-01-14 13:24:56 +08:00
JzoNg
cdc24696e4 add validation for output name & add correct placeholder
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-13 14:41:46 +08:00
JzoNg
8f6a0e2d8e step run of human input params 2026-01-13 10:39:35 +08:00
twwu
20ba0de47d Merge branch 'main' into feat/hitl-frontend 2026-01-13 09:43:47 +08:00
twwu
f13ace9dd9 fix(use-available-nodes-meta-data): temporarily exclude human-input nodes from available nodes
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-12 16:10:04 +08:00
twwu
23f5427349 feat(elk-layout): add support for Human Input nodes with ELK ports and enhance edge sorting 2026-01-12 16:00:48 +08:00
JzoNg
7deaab116a fix display of preview 2026-01-12 15:56:09 +08:00
JzoNg
b6db6d9305 revert regex for output variable name 2026-01-12 15:56:09 +08:00
twwu
2fa688cefd refactor(use-checklist, variable-utils): update dependencies in hooks and enhance human-input node support 2026-01-12 15:41:25 +08:00
JzoNg
b24fafe901 fix variable name of input field in chinese 2026-01-12 15:33:37 +08:00
JzoNg
c837def205 Revert "add check valid for human input"
This reverts commit d21cf87bb6.
2026-01-12 15:17:29 +08:00
QuantumGhost
1a7eac192c ci: remove unused GitHub Action file
The `workflow_run` event requires the executed workflow files exist in
the default branch.
2026-01-12 15:14:49 +08:00
JzoNg
d21cf87bb6 add check valid for human input 2026-01-12 15:12:28 +08:00
twwu
ef41325ad1 Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-12 14:52:09 +08:00
twwu
57c33d5869 refactor(draggable-plugin, hitl-input, human-input): update styles and improve layout for better UI consistency 2026-01-12 14:51:48 +08:00
JzoNg
4484261023 fix judgement of generate button display 2026-01-12 14:21:32 +08:00
JzoNg
72bc396646 step run of human input 2026-01-12 14:16:53 +08:00
twwu
68885afac6 Merge branch 'main' into feat/hitl-frontend 2026-01-12 13:47:36 +08:00
twwu
18e57096d2 Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-12 13:46:48 +08:00
twwu
b6c6d52725 feat(hitl-input): add readonly prop to HITL input components for enhanced user interaction control 2026-01-12 13:46:04 +08:00
QuantumGhost
bd104557ef chore(web): fix incorrect update to deploy actions 2026-01-12 11:40:38 +08:00
QuantumGhost
55b18bb79e chore(web): update deployment action 2026-01-12 11:38:23 +08:00
QuantumGhost
3f8f158b04 ci(web): add automatically deployment for HITL frontend 2026-01-12 09:43:06 +08:00
JzoNg
471d14f882 step run of human input
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-09 18:31:56 +08:00
twwu
a280df2c07 feat(chat): implement workflow event handling and audio player management in chat hooks 2026-01-09 17:05:38 +08:00
twwu
b479a36273 feat(human-input): add formContent prop to delivery method components for enhanced email configuration
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-09 15:20:07 +08:00
twwu
9136bf48f5 Merge branch 'main' into feat/hitl-frontend 2026-01-09 10:49:24 +08:00
twwu
ac70069847 Merge branch 'feat/hitl-frontend' of https://github.com/langgenius/dify into feat/hitl-frontend 2026-01-09 10:47:52 +08:00
twwu
27da1e72eb refactor: remove unused webAppLogout dependency from effect hooks and enhance chat item structure with extra content handling 2026-01-08 16:18:44 +08:00
JzoNg
467119d186 fix loading state in batch run
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-08 15:24:02 +08:00
JzoNg
1cdbfa2539 human input in workflow apps
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-08 14:45:05 +08:00
twwu
d3299db915 feat: add human input output structure and enhance filtering in human input form components
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-07 18:07:30 +08:00
twwu
ebb816b90b fix: update condition for rendering human input form in Answer component 2026-01-07 15:46:47 +08:00
twwu
0b794ad9cc feat: extend log status to include 'paused' and update related components for improved status rendering
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-07 11:48:42 +08:00
twwu
a19c0023f9 Merge branch 'main' into feat/hitl-frontend 2026-01-07 10:24:51 +08:00
twwu
c7fa7009b1 refactor: update form submission parameter from formID to formToken across chat components
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-06 18:13:42 +08:00
JzoNg
3e1a96ad64 expand filled form content as default 2026-01-06 17:57:35 +08:00
JzoNg
d500a631f0 add node title in content wrapper 2026-01-06 17:57:34 +08:00
twwu
607e77e0a7 feat: add human input form divider background color and enhance chat answer component layout 2026-01-06 15:18:15 +08:00
twwu
46ec24cf8a Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-06 10:41:22 +08:00
twwu
f654a7f704 feat: implement human input form handling and display components
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-05 17:05:10 +08:00
twwu
cf9b72d574 Merge branch 'main' into feat/hitl-frontend 2026-01-05 15:31:08 +08:00
twwu
5a92bbdab5 feat: enhance chat component to manage multiple human input forms and their submissions 2026-01-05 15:30:17 +08:00
twwu
d0a713e117 feat: integrate human input form submission handling in chat components
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-04 15:27:40 +08:00
twwu
9ec127ea8f feat: enhance human input handling by adding filled data support
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2026-01-04 11:31:09 +08:00
twwu
d478822fe1 refactor: update locale hook in ExpirationTime component 2026-01-04 09:50:08 +08:00
twwu
4eac271adc Merge branch 'main' into feat/hitl-frontend 2026-01-04 09:38:08 +08:00
twwu
0f13b6b26d fix: include workflowStore in dependency array for useChat hook to ensure proper state updates
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-31 16:52:22 +08:00
twwu
52ce46c364 refactor: optimize workflow variable handling and enhance node state management for resumption scenarios 2025-12-31 16:44:43 +08:00
twwu
f0f1ae0b49 fix: enhance workflow node handling by including paused state and improving human input management 2025-12-31 13:45:16 +08:00
twwu
0c69466b0f feat: add workflow resume functionality and enhance handling of HumanInput node in workflow events
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-30 18:30:32 +08:00
twwu
44a688cb81 feat: implement edge source handle change functionality and enhance node interactions for HumanInput node 2025-12-30 16:05:33 +08:00
twwu
0e2b59d661 fix: update placeholder handling in ContentItem and remove unused error messages from HumanInputNode
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-30 10:45:59 +08:00
twwu
501c3bcc94 chore: add i18n namespace to various components in the workflow for consistency 2025-12-30 10:18:31 +08:00
twwu
bf6a2c22eb Merge branch 'main' into feat/hitl-frontend 2025-12-30 09:34:30 +08:00
twwu
8c0365c71e Merge branch 'main' into feat/hitl-frontend
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-29 18:36:35 +08:00
twwu
c0a4f3b715 refactor(human-input): reorganize hooks and improve structure for better maintainability
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-29 13:57:34 +08:00
twwu
d308c34842 refactor(use-common): simplify current workspace query function by removing unnecessary body parameter 2025-12-29 13:37:33 +08:00
twwu
68615eec04 Merge branch 'main' into feat/hitl-frontend 2025-12-29 13:36:39 +08:00
WTW0313
eca3e23af0 Merge branch 'main' into feat/hitl-frontend 2025-12-29 12:54:17 +08:00
twwu
c716c4ccbe Merge branch 'main' into feat/hitl-frontend 2025-12-29 10:35:51 +08:00
QuantumGhost
32366774a1 chore(web): remove temporary workaround for CurrentWorkspace query
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-29 09:55:29 +08:00
twwu
640661983a refactor: update feature branch naming in build-push workflow
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-26 16:40:50 +08:00
twwu
f528f2eafc refactor: streamline chat operation handling and enhance workflow event management 2025-12-26 16:39:17 +08:00
twwu
0994953728 refactor: enhance email delivery method handling and improve debug mode display in HumanInputContent 2025-12-26 15:43:20 +08:00
twwu
d80167d9ec refactor: update UserActionItem component props and improve dependency management in useFormContent 2025-12-26 15:09:04 +08:00
twwu
2969a77b15 refactor: rename 'suspended' status to 'paused' across workflow components and update related styles 2025-12-26 14:50:47 +08:00
twwu
062896cb9c refactor: enhance Markdown component and update FormContentPreview props 2025-12-26 12:55:46 +08:00
twwu
5a1e6269d5 Merge branch 'main' into feat/hitl-frontend 2025-12-26 12:32:29 +08:00
twwu
e744d4de80 feat: replace timeout handling with expiration time in HumanInput form and add ExpirationTime component 2025-12-25 18:16:32 +08:00
twwu
be0f493e61 feat: enhance HumanInput node by adding a Timeout action and restructuring user action rendering 2025-12-25 16:16:24 +08:00
twwu
b8d4f60782 feat: add HumanInput block support with output variable handling and integration 2025-12-25 15:51:19 +08:00
twwu
8b65b689f7 feat: enhance human input panel with structured output and variable handling improvements 2025-12-25 14:53:19 +08:00
twwu
8b9846f52b Merge branch 'main' into feat/hitl-frontend 2025-12-25 13:43:27 +08:00
twwu
1c4c1b5cb1 refactor: streamline HITL input components by consolidating props and enhancing variable handling 2025-12-25 10:50:20 +08:00
twwu
ec0c144eb2 refactor: rename isHideNodeLabel to isShowNodeLabel for clarity in variable label component 2025-12-24 16:21:07 +08:00
twwu
afddc56bb4 refactor: replace GeneratedFormInputItem with FormInputItem across components for consistency 2025-12-24 16:01:05 +08:00
twwu
57dde4a4d8 refactor: update dependency arrays in HITL input components for improved stability 2025-12-24 13:06:52 +08:00
twwu
3c0fd213bf refactor: update human input form handling to support async submission and improve placeholder resolution 2025-12-24 12:13:32 +08:00
twwu
ddfd1cb1f5 refactor: update classnames import style across multiple components 2025-12-22 18:09:11 +08:00
twwu
138922f3f4 Merge branch 'main' into feat/hitl-frontend 2025-12-22 18:07:16 +08:00
twwu
aa9d0bd655 Merge branch 'main' into feat/hitl-frontend 2025-12-15 10:56:43 +08:00
twwu
ccef15aafa Merge branch 'main' into feat/hitl-frontend 2025-12-12 17:59:08 +08:00
JzoNg
202f924d99 fix type error of placeholder 2025-12-03 19:48:24 +08:00
JzoNg
882d2f5b4c Merge branch 'main' into tp 2025-12-02 15:33:41 +08:00
JzoNg
93ec42344c add info panel 2025-12-02 15:33:32 +08:00
JzoNg
330585ec9b merge main 2025-11-24 10:46:20 +08:00
JzoNg
8f2660cea8 fix markdown props 2025-11-17 14:56:45 +08:00
JzoNg
87f6ac78c5 Merge branch 'main' into tp 2025-11-17 10:55:31 +08:00
JzoNg
6c7d58aa11 merge main 2025-11-14 10:51:26 +08:00
JzoNg
8bca22a94b merge main 2025-10-28 19:41:33 +08:00
JzoNg
17aa11da79 fix workflow preview crash 2025-10-28 19:40:10 +08:00
JzoNg
63bbcff496 test email sender 2025-10-27 14:07:23 +08:00
JzoNg
f71a632cdc delivery methods config update 2025-10-27 11:55:57 +08:00
JzoNg
4e3bded902 Merge branch 'main' into tp 2025-10-27 10:54:00 +08:00
JzoNg
e75bbdbc0d add other delivery methods 2025-10-26 18:01:05 +08:00
JzoNg
331a5edbf9 fix: drag icon position 2025-10-20 15:57:39 +08:00
JzoNg
6afc99a5ad fix immer import 2025-10-20 14:25:14 +08:00
JzoNg
a4e2ef6b0c merge main 2025-10-20 14:21:09 +08:00
JzoNg
3632b473df fix: merge error 2025-10-20 11:29:52 +08:00
JzoNg
e35dd14c59 merge main 2025-10-13 14:41:31 +08:00
JzoNg
495f901798 add validation for form content 2025-09-08 11:43:51 +08:00
JzoNg
8703515153 human input step run when no variables 2025-09-06 12:34:25 +08:00
JzoNg
be3c6da654 human input step run 2025-09-05 19:59:33 +08:00
JzoNg
d51db3afb3 single run form data 2025-09-05 18:37:28 +08:00
JzoNg
22683fba3f single run form of human input 2025-09-05 16:14:48 +08:00
JzoNg
ed16265eee form inputs in email sender 2025-09-05 15:13:44 +08:00
JzoNg
463ea14d44 email sender modal 2025-09-05 15:13:44 +08:00
JzoNg
527736b8e4 email debug switch 2025-09-05 15:13:44 +08:00
Joel
f22dcee6d9 fix: preview i18n and ui promblem 2025-09-05 14:45:46 +08:00
Joel
9bdd7e5465 fix: note match and render problem 2025-09-05 14:25:52 +08:00
Joel
ad0e79372f feat: can show notes 2025-09-05 14:19:13 +08:00
Joel
a362114486 chore: can render note 2025-09-05 11:26:13 +08:00
Joel
79ab253c26 fix: match variable error 2025-09-05 11:06:35 +08:00
Joel
783b78cc0a chore: add custom variable 2025-09-04 18:41:48 +08:00
Joel
b1e123c3aa fix: can not choose vars 2025-09-04 16:46:43 +08:00
Joel
84709a7941 temp 2025-09-02 10:06:57 +08:00
Joel
ec07636ce9 feat: preveiw wrap 2025-08-29 16:21:39 +08:00
Joel
4d4c8b21ac chore: some tiny ui fix 2025-08-29 14:35:37 +08:00
Joel
ec8754173f feat: copy and expand 2025-08-29 14:30:57 +08:00
Joel
7920b89714 chore: hotkey tip i18n 2025-08-29 11:07:34 +08:00
Joel
19e152fd0c chore: prompt editor ui 2025-08-28 16:57:12 +08:00
yessenia
076a8ecff4 feat: portal position 2025-08-27 18:46:09 +08:00
Joel
40591b2196 fix: not choose vars 2025-08-26 17:35:25 +08:00
Joel
5156b8f9c9 fix: not auto focus and popup var hide 2025-08-26 16:54:00 +08:00
Joel
286ab0d468 feat: insert the hitl config 2025-08-26 16:23:16 +08:00
Joel
71a511a470 feat: can insert hitl node by / 2025-08-26 15:51:03 +08:00
JzoNg
6b11973151 add workflow run events 2025-08-26 14:27:27 +08:00
JzoNg
949a894f03 preview tip of human input delivery methods 2025-08-26 14:27:27 +08:00
Joel
305b5da764 chore: pre popular placeholder ui 2025-08-25 17:47:45 +08:00
Joel
fda19d3f0e feat: in placeholder choose var 2025-08-25 16:29:58 +08:00
JzoNg
e5a2172a85 human input form display & submit in preview 2025-08-25 16:17:40 +08:00
Joel
2d89d59d74 main 2025-08-25 16:08:24 +08:00
Joel
4f56acd432 chore: fix variable change ux error 2025-08-25 15:36:40 +08:00
Joel
c1b7412465 chore: handle textare to ui 2025-08-25 15:31:21 +08:00
Joel
62b9a20115 chore: fix placeholder ux 2025-08-25 14:40:51 +08:00
Joel
5392401e60 chore: btn text 2025-08-22 18:10:53 +08:00
Joel
baa77d3cda chore: can show popup 2025-08-22 16:59:04 +08:00
Joel
a41176b66d chore: pre poplulte field 2025-08-22 16:24:45 +08:00
yessenia
465e978209 feat: modify shortcutsplugin 2025-08-22 16:13:54 +08:00
yessenia
a9ea8cfd1c feat: shortcut popup unit test 2025-08-21 16:30:19 +08:00
yessenia
c771f4dbc7 feat: shortcut popup 2025-08-20 18:18:42 +08:00
JzoNg
ebbed8f863 form field 2025-08-12 14:17:08 +08:00
JzoNg
bdf1e9ed3b form content 2025-08-12 13:59:28 +08:00
JzoNg
36acd0b9dd form submit 2025-08-12 10:58:23 +08:00
JzoNg
a4049e1ea7 api of human input form 2025-08-12 10:21:02 +08:00
JzoNg
114dfe038c human input form 2025-08-11 17:58:03 +08:00
Joel
81f6344aaa feat: can update var 2025-08-08 16:16:52 +08:00
Joel
89963ecf59 feat: can remove 2025-08-08 15:08:02 +08:00
Joel
a18bcf3957 feat: pass current value to form input 2025-08-08 14:22:30 +08:00
JzoNg
c28720529e add suspended status 2025-08-08 11:27:11 +08:00
JzoNg
242826013e update validation for human input node 2025-08-08 10:38:32 +08:00
JzoNg
05453cb22f user action validation 2025-08-08 10:14:29 +08:00
JzoNg
da211d3009 timeout value validation 2025-08-08 09:42:21 +08:00
JzoNg
f8a249de03 email config validation 2025-08-07 20:03:53 +08:00
Joel
e2e5dedceb feat: add form content editor 2025-08-07 18:28:17 +08:00
JzoNg
bd7ba85471 fix input focus 2025-08-07 18:21:43 +08:00
JzoNg
7d5f6bc255 email input 2025-08-07 18:13:21 +08:00
JzoNg
fcc8789cc3 member selector 2025-08-07 18:13:21 +08:00
Joel
6da1a48cad chore: notes add form content holder 2025-08-07 17:56:37 +08:00
Joel
465ff7838a fix: move style 2025-08-07 17:25:15 +08:00
Joel
a4bf493343 chore: hanele drag ui 2025-08-07 16:56:56 +08:00
Joel
c27f20b4b7 chore: hide tree view 2025-08-07 15:06:13 +08:00
Joel
a9e6140dc6 feat: can drag and drop hitl block 2025-08-07 15:05:44 +08:00
JzoNg
792f28451c timeout new data structure 2025-08-07 10:44:25 +08:00
JzoNg
82530df38f mail body input 2025-08-06 17:04:46 +08:00
JzoNg
ce8325c83c update data structure 2025-08-06 17:02:40 +08:00
JzoNg
3ed561d943 delivery method item 2025-08-06 17:02:40 +08:00
Joel
bb8d54c48b chore: fix hitl not full 2025-08-06 16:59:46 +08:00
Joel
922aeb7c21 feat: node hitl render 2025-08-06 16:56:54 +08:00
Joel
177be06d09 files 2025-08-05 18:31:32 +08:00
Joel
736ec55f86 feat: hitl block 2025-08-05 18:31:18 +08:00
Joel
ab373197f9 feat: i18n 2025-08-05 15:14:17 +08:00
JzoNg
a6d2392c6c method selector 2025-08-01 15:36:02 +08:00
JzoNg
3371989572 method selector 2025-08-01 15:14:27 +08:00
JzoNg
f04daf056d delivery methods 2025-08-01 14:37:48 +08:00
JzoNg
fb6c8fa01f user action add 2025-08-01 14:26:21 +08:00
JzoNg
b02199145e user actions 2025-07-27 16:46:40 +08:00
Joel
e257455c9c feat: input filed form 2025-07-25 16:59:09 +08:00
Joel
63af5305e6 chore: template 2025-07-25 15:47:35 +08:00
JzoNg
95b88a0621 add timeout of human input node 2025-07-25 11:42:50 +08:00
JzoNg
1ab02c6e9a human input node style complete 2025-07-25 10:45:56 +08:00
JzoNg
1099ab5d91 update human input node 2025-07-25 08:54:43 +08:00
JzoNg
6485adae35 add human input node 2025-07-25 08:54:43 +08:00
281 changed files with 12636 additions and 8471 deletions

View File

@@ -0,0 +1 @@
../../.agents/skills/component-refactoring

View File

@@ -0,0 +1 @@
../../.agents/skills/frontend-code-review

View File

@@ -0,0 +1 @@
../../.agents/skills/frontend-testing

View File

@@ -0,0 +1 @@
../../.agents/skills/orpc-contract-first

View File

@@ -79,6 +79,29 @@ jobs:
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 24
cache: pnpm
cache-dependency-path: ./web/pnpm-lock.yaml
- name: Install web dependencies
run: |
cd web
pnpm install --frozen-lockfile
- name: ESLint autofix
run: |
cd web
pnpm lint:fix || true
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
- name: mdformat
run: |

View File

@@ -8,6 +8,7 @@ on:
- "build/**"
- "release/e-*"
- "hotfix/**"
- "feat/hitl-frontend"
tags:
- "*"

View File

@@ -4,7 +4,8 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "build/feat/hitl"
- "feat/hitl-frontend"
- "feat/hitl-backend"
types:
- completed
@@ -13,7 +14,10 @@ jobs:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'build/feat/hitl'
(
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
)
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v1

View File

@@ -39,7 +39,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Run tests
run: pnpm test:ci
run: pnpm test:coverage
- name: Coverage Summary
if: always()

View File

@@ -136,6 +136,7 @@ ignore_imports =
core.workflow.nodes.llm.llm_utils -> models.provider
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
core.workflow.nodes.llm.node -> core.tools.signature
core.workflow.nodes.template_transform.template_transform_node -> configs
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager

View File

@@ -122,7 +122,7 @@ These commands assume you start from the repository root.
```bash
cd api
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
```
1. Optional: start Celery Beat (scheduled tasks, in a new terminal).

View File

@@ -739,10 +739,8 @@ def upgrade_db():
click.echo(click.style("Database migration successful!", fg="green"))
except Exception as e:
except Exception:
logger.exception("Failed to execute database migration")
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
raise SystemExit(1)
finally:
lock.release()
else:

View File

@@ -1155,16 +1155,6 @@ class CeleryScheduleTasksConfig(BaseSettings):
default=0,
)
# API token last_used_at batch update
ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK: bool = Field(
description="Enable periodic batch update of API token last_used_at timestamps",
default=True,
)
API_TOKEN_LAST_USED_UPDATE_INTERVAL: int = Field(
description="Interval in minutes for batch updating API token last_used_at (default 30)",
default=30,
)
# Trigger provider refresh (simple version)
ENABLE_TRIGGER_PROVIDER_REFRESH_TASK: bool = Field(
description="Enable trigger provider refresh poller",

View File

@@ -10,7 +10,6 @@ from libs.helper import TimestampField
from libs.login import current_account_with_tenant, login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from services.api_token_service import ApiTokenCache
from . import console_ns
from .wraps import account_initialization_required, edit_permission_required, setup_required
@@ -132,11 +131,6 @@ class BaseApiKeyResource(Resource):
if key is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
# Invalidate cache before deleting from database
# Type assertion: key is guaranteed to be non-None here because abort() raises
assert key is not None # nosec - for type checker only
ApiTokenCache.delete(key.token, key.type)
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()

View File

@@ -1,4 +1,3 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Literal, TypeAlias
@@ -55,8 +54,6 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
class AppListQuery(BaseModel):
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
@@ -502,7 +499,6 @@ class AppListApi(Resource):
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
Workflow.tenant_id == current_tenant_id,
)
)
.scalars()
@@ -514,14 +510,12 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
node_id = None
try:
for node_id, node_data in workflow.walk_nodes():
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
continue
for app in app_pagination.items:

View File

@@ -55,7 +55,6 @@ from libs.login import current_account_with_tenant, login_required
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.api_token_service import ApiTokenCache
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
# Register models for flask_restx to avoid dict type issues in Swagger
@@ -821,11 +820,6 @@ class DatasetApiDeleteApi(Resource):
if key is None:
console_ns.abort(404, message="API key not found")
# Invalidate cache before deleting from database
# Type assertion: key is guaranteed to be non-None here because abort() raises
assert key is not None # nosec - for type checker only
ApiTokenCache.delete(key.token, key.type)
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()

View File

@@ -120,7 +120,7 @@ class TagUpdateDeleteApi(Resource):
TagService.delete_tag(tag_id)
return "", 204
return 204
@console_ns.route("/tag-bindings/create")

View File

@@ -396,7 +396,7 @@ class DatasetApi(DatasetApiResource):
try:
if DatasetService.delete_dataset(dataset_id_str, current_user):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
return "", 204
return 204
else:
raise NotFound("Dataset not found.")
except services.errors.dataset.DatasetInUseError:
@@ -557,7 +557,7 @@ class DatasetTagsApi(DatasetApiResource):
payload = TagDeletePayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag(payload.tag_id)
return "", 204
return 204
@service_api_ns.route("/datasets/tags/binding")
@@ -581,7 +581,7 @@ class DatasetTagBindingApi(DatasetApiResource):
payload = TagBindingPayload.model_validate(service_api_ns.payload or {})
TagService.save_tag_binding({"tag_ids": payload.tag_ids, "target_id": payload.target_id, "type": "knowledge"})
return "", 204
return 204
@service_api_ns.route("/datasets/tags/unbinding")
@@ -605,7 +605,7 @@ class DatasetTagUnbindingApi(DatasetApiResource):
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag_binding({"tag_id": payload.tag_id, "target_id": payload.target_id, "type": "knowledge"})
return "", 204
return 204
@service_api_ns.route("/datasets/<uuid:dataset_id>/tags")

View File

@@ -746,4 +746,4 @@ class DocumentApi(DatasetApiResource):
except services.errors.document.DocumentIndexingError:
raise DocumentIndexingError("Cannot delete document during indexing.")
return "", 204
return 204

View File

@@ -128,7 +128,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
DatasetService.check_dataset_permission(dataset, current_user)
MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
return "", 204
return 204
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata/built-in")

View File

@@ -233,7 +233,7 @@ class DatasetSegmentApi(DatasetApiResource):
if not segment:
raise NotFound("Segment not found.")
SegmentService.delete_segment(segment, document, dataset)
return "", 204
return 204
@service_api_ns.expect(service_api_ns.models[SegmentUpdatePayload.__name__])
@service_api_ns.doc("update_segment")
@@ -499,7 +499,7 @@ class DatasetChildChunkApi(DatasetApiResource):
except ChildChunkDeleteIndexServiceError as e:
raise ChildChunkDeleteIndexError(str(e))
return "", 204
return 204
@service_api_ns.expect(service_api_ns.models[ChildChunkUpdatePayload.__name__])
@service_api_ns.doc("update_child_chunk")

View File

@@ -1,24 +1,27 @@
import logging
import time
from collections.abc import Callable
from datetime import timedelta
from enum import StrEnum, auto
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar, cast
from typing import Concatenate, ParamSpec, TypeVar
from flask import current_app, request
from flask_login import user_logged_in
from flask_restx import Resource
from pydantic import BaseModel
from sqlalchemy import select, update
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from libs.login import current_user
from models import Account, Tenant, TenantAccountJoin, TenantStatus
from models.dataset import Dataset, RateLimitLog
from models.model import ApiToken, App
from services.api_token_service import ApiTokenCache, fetch_token_with_single_flight, record_token_usage
from services.end_user_service import EndUserService
from services.feature_service import FeatureService
@@ -293,14 +296,7 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
def validate_and_get_api_token(scope: str | None = None):
"""
Validate and get API token with Redis caching.
This function uses a two-tier approach:
1. First checks Redis cache for the token
2. If not cached, queries database and caches the result
The last_used_at field is updated asynchronously via Celery task
to avoid blocking the request.
Validate and get API token.
"""
auth_header = request.headers.get("Authorization")
if auth_header is None or " " not in auth_header:
@@ -312,18 +308,29 @@ def validate_and_get_api_token(scope: str | None = None):
if auth_scheme != "bearer":
raise Unauthorized("Authorization scheme must be 'Bearer'")
# Try to get token from cache first
# Returns a CachedApiToken (plain Python object), not a SQLAlchemy model
cached_token = ApiTokenCache.get(auth_token, scope)
if cached_token is not None:
logger.debug("Token validation served from cache for scope: %s", scope)
# Record usage in Redis for later batch update (no Celery task per request)
record_token_usage(auth_token, scope)
return cast(ApiToken, cached_token)
current_time = naive_utc_now()
cutoff_time = current_time - timedelta(minutes=1)
with Session(db.engine, expire_on_commit=False) as session:
update_stmt = (
update(ApiToken)
.where(
ApiToken.token == auth_token,
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < cutoff_time)),
ApiToken.type == scope,
)
.values(last_used_at=current_time)
)
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
result = session.execute(update_stmt)
api_token = session.scalar(stmt)
# Cache miss - use Redis lock for single-flight mode
# This ensures only one request queries DB for the same token concurrently
return fetch_token_with_single_flight(auth_token, scope)
if hasattr(result, "rowcount") and result.rowcount > 0:
session.commit()
if not api_token:
raise Unauthorized("Access token is invalid")
return api_token
class DatasetApiResource(Resource):

View File

@@ -47,7 +47,6 @@ class DifyNodeFactory(NodeFactory):
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
code_limits: CodeNodeLimits | None = None,
template_renderer: Jinja2TemplateRenderer | None = None,
template_transform_max_output_length: int | None = None,
http_request_http_client: HttpClientProtocol | None = None,
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
http_request_file_manager: FileManagerProtocol | None = None,
@@ -69,9 +68,6 @@ class DifyNodeFactory(NodeFactory):
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
self._template_transform_max_output_length = (
template_transform_max_output_length or dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
)
self._http_request_http_client = http_request_http_client or ssrf_proxy
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
self._http_request_file_manager = http_request_file_manager or file_manager
@@ -126,7 +122,6 @@ class DifyNodeFactory(NodeFactory):
graph_init_params=self.graph_init_params,
graph_runtime_state=self.graph_runtime_state,
template_renderer=self._template_renderer,
max_output_length=self._template_transform_max_output_length,
)
if node_type == NodeType.HTTP_REQUEST:

View File

@@ -6,8 +6,7 @@ from yarl import URL
from configs import dify_config
from core.helper.download import download_with_size_limit
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
from extensions.ext_redis import redis_client
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
logger = logging.getLogger(__name__)
@@ -44,37 +43,28 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
return data.get("data", {}).get("plugins", [])
def batch_fetch_plugin_manifests_ignore_deserialization_error(
plugin_ids: list[str],
) -> Sequence[MarketplacePluginDeclaration]:
if len(plugin_ids) == 0:
return []
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
response.raise_for_status()
result: list[MarketplacePluginDeclaration] = []
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration.model_validate(plugin))
except Exception:
logger.exception(
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
)
return result
def record_install_plugin_event(plugin_unique_identifier: str):
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
response.raise_for_status()
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
"""
Fetch all plugin manifests from marketplace and cache them in Redis.
This should be called once per check cycle to populate the instance-level cache.
Args:
cache_key_prefix: Redis key prefix for caching plugin manifests
cache_ttl: Cache TTL in seconds
Raises:
httpx.HTTPError: If the HTTP request fails
Exception: If any other error occurs during fetching or caching
"""
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
response.raise_for_status()
raw_json = response.json()
plugins_data = raw_json.get("plugins", [])
# Parse and cache all plugin snapshots
for plugin_data in plugins_data:
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
redis_client.setex(
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
time=cache_ttl,
value=plugin_snapshot.model_dump_json(),
)

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field, computed_field, model_validator
from pydantic import BaseModel, Field, model_validator
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.plugin.entities.endpoint import EndpointProviderDeclaration
@@ -48,15 +48,3 @@ class MarketplacePluginDeclaration(BaseModel):
if "tool" in data and not data["tool"]:
del data["tool"]
return data
class MarketplacePluginSnapshot(BaseModel):
org: str
name: str
latest_version: str
latest_package_identifier: str
latest_package_url: str
@computed_field
def plugin_id(self) -> str:
return f"{self.org}/{self.name}"

View File

@@ -112,7 +112,7 @@ class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable):
class RAGPipelineVariable(BaseModel):
belong_to_node_id: str = Field(description="belong to which node id, shared means public")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
label: str = Field(description="label")
description: str | None = Field(description="description", default="")
variable: str = Field(description="variable key", default="")

View File

@@ -1,6 +1,7 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any
from configs import dify_config
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes.base.node import Node
@@ -15,13 +16,12 @@ if TYPE_CHECKING:
from core.workflow.entities import GraphInitParams
from core.workflow.runtime import GraphRuntimeState
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
class TemplateTransformNode(Node[TemplateTransformNodeData]):
node_type = NodeType.TEMPLATE_TRANSFORM
_template_renderer: Jinja2TemplateRenderer
_max_output_length: int
def __init__(
self,
@@ -31,7 +31,6 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
graph_runtime_state: "GraphRuntimeState",
*,
template_renderer: Jinja2TemplateRenderer | None = None,
max_output_length: int | None = None,
) -> None:
super().__init__(
id=id,
@@ -41,10 +40,6 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
if max_output_length is not None and max_output_length <= 0:
raise ValueError("max_output_length must be a positive integer")
self._max_output_length = max_output_length or DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
@classmethod
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
"""
@@ -74,11 +69,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
except TemplateRenderError as e:
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
if len(rendered) > self._max_output_length:
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
return NodeRunResult(
inputs=variables,
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Output length exceeds {self._max_output_length} characters",
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
)
return NodeRunResult(

View File

@@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then
if [[ -z "${CELERY_QUEUES}" ]]; then
if [[ "${EDITION}" == "CLOUD" ]]; then
# Cloud edition: separate queues for dataset and trigger tasks
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
else
# Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
fi
else
DEFAULT_QUEUES="${CELERY_QUEUES}"

View File

@@ -184,14 +184,6 @@ def init_app(app: DifyApp) -> Celery:
"task": "schedule.trigger_provider_refresh_task.trigger_provider_refresh",
"schedule": timedelta(minutes=dify_config.TRIGGER_PROVIDER_REFRESH_INTERVAL),
}
if dify_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK:
imports.append("schedule.update_api_token_last_used_task")
beat_schedule["batch_update_api_token_last_used"] = {
"task": "schedule.update_api_token_last_used_task.batch_update_api_token_last_used",
"schedule": timedelta(minutes=dify_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL),
}
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
return celery_app

View File

@@ -10,10 +10,6 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7df29de0f6be'
down_revision = '03ea244985ce'
@@ -23,31 +19,16 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
else:
# For MySQL and other databases, UUID should be generated at application level
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)

View File

@@ -2166,9 +2166,7 @@ class TenantCreditPool(TypeBase):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)

View File

@@ -1,24 +1,16 @@
import logging
import math
import time
import click
import app
from core.helper.marketplace import fetch_global_plugin_manifest
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
logger = logging.getLogger(__name__)
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
MAX_CONCURRENT_CHECK_TASKS = 20
# Import cache constants from the task module
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
@app.celery.task(queue="plugin")
def check_upgradable_plugin_task():
@@ -48,22 +40,6 @@ def check_upgradable_plugin_task():
) # make sure all strategies are checked in this interval
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
if total_strategies == 0:
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
return
# Fetch and cache all plugin manifests before processing tenants
# This reduces load on marketplace from 300k requests to 1 request per check cycle
logger.info("fetching global plugin manifest from marketplace")
try:
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
logger.info("successfully fetched and cached global plugin manifest")
except Exception as e:
logger.exception("failed to fetch global plugin manifest")
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
return
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
for strategy in batch_strategies:

View File

@@ -1,114 +0,0 @@
"""
Scheduled task to batch-update API token last_used_at timestamps.
Instead of updating the database on every request, token usage is recorded
in Redis as lightweight SET keys (api_token_active:{scope}:{token}).
This task runs periodically (default every 30 minutes) to flush those
records into the database in a single batch operation.
"""
import logging
import time
from datetime import datetime
import click
from sqlalchemy import update
from sqlalchemy.orm import Session
import app
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.model import ApiToken
from services.api_token_service import ACTIVE_TOKEN_KEY_PREFIX
logger = logging.getLogger(__name__)
@app.celery.task(queue="api_token")
def batch_update_api_token_last_used():
"""
Batch update last_used_at for all recently active API tokens.
Scans Redis for api_token_active:* keys, parses the token and scope
from each key, and performs a batch database update.
"""
click.echo(click.style("batch_update_api_token_last_used: start.", fg="green"))
start_at = time.perf_counter()
updated_count = 0
scanned_count = 0
try:
# Collect all active token keys and their values (the actual usage timestamps)
token_entries: list[tuple[str, str | None, datetime]] = [] # (token, scope, usage_time)
keys_to_delete: list[str | bytes] = []
for key in redis_client.scan_iter(match=f"{ACTIVE_TOKEN_KEY_PREFIX}*", count=200):
if isinstance(key, bytes):
key = key.decode("utf-8")
scanned_count += 1
# Read the value (ISO timestamp recorded at actual request time)
value = redis_client.get(key)
if not value:
keys_to_delete.append(key)
continue
if isinstance(value, bytes):
value = value.decode("utf-8")
try:
usage_time = datetime.fromisoformat(value)
except (ValueError, TypeError):
logger.warning("Invalid timestamp in key %s: %s", key, value)
keys_to_delete.append(key)
continue
# Parse token info from key: api_token_active:{scope}:{token}
suffix = key[len(ACTIVE_TOKEN_KEY_PREFIX) :]
parts = suffix.split(":", 1)
if len(parts) == 2:
scope_str, token = parts
scope = None if scope_str == "None" else scope_str
token_entries.append((token, scope, usage_time))
keys_to_delete.append(key)
if not token_entries:
click.echo(click.style("batch_update_api_token_last_used: no active tokens found.", fg="yellow"))
# Still clean up any invalid keys
if keys_to_delete:
redis_client.delete(*keys_to_delete)
return
# Update each token in its own short transaction to avoid long transactions
for token, scope, usage_time in token_entries:
with Session(db.engine, expire_on_commit=False) as session, session.begin():
stmt = (
update(ApiToken)
.where(
ApiToken.token == token,
ApiToken.type == scope,
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < usage_time)),
)
.values(last_used_at=usage_time)
)
result = session.execute(stmt)
rowcount = getattr(result, "rowcount", 0)
if rowcount > 0:
updated_count += 1
# Delete processed keys from Redis
if keys_to_delete:
redis_client.delete(*keys_to_delete)
except Exception:
logger.exception("batch_update_api_token_last_used failed")
elapsed = time.perf_counter() - start_at
click.echo(
click.style(
f"batch_update_api_token_last_used: done. "
f"scanned={scanned_count}, updated={updated_count}, elapsed={elapsed:.2f}s",
fg="green",
)
)

View File

@@ -1,330 +0,0 @@
"""
API Token Service
Handles all API token caching, validation, and usage recording.
Includes Redis cache operations, database queries, and single-flight concurrency control.
"""
import logging
from datetime import datetime
from typing import Any
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import Unauthorized
from extensions.ext_database import db
from extensions.ext_redis import redis_client, redis_fallback
from libs.datetime_utils import naive_utc_now
from models.model import ApiToken
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------
# Pydantic DTO
# ---------------------------------------------------------------------
class CachedApiToken(BaseModel):
"""
Pydantic model for cached API token data.
This is NOT a SQLAlchemy model instance, but a plain Pydantic model
that mimics the ApiToken model interface for read-only access.
"""
id: str
app_id: str | None
tenant_id: str | None
type: str
token: str
last_used_at: datetime | None
created_at: datetime | None
def __repr__(self) -> str:
return f"<CachedApiToken id={self.id} type={self.type}>"
# ---------------------------------------------------------------------
# Cache configuration
# ---------------------------------------------------------------------
CACHE_KEY_PREFIX = "api_token"
CACHE_TTL_SECONDS = 600 # 10 minutes
CACHE_NULL_TTL_SECONDS = 60 # 1 minute for non-existent tokens
ACTIVE_TOKEN_KEY_PREFIX = "api_token_active:"
# ---------------------------------------------------------------------
# Cache class
# ---------------------------------------------------------------------
class ApiTokenCache:
"""
Redis cache wrapper for API tokens.
Handles serialization, deserialization, and cache invalidation.
"""
@staticmethod
def make_active_key(token: str, scope: str | None = None) -> str:
"""Generate Redis key for recording token usage."""
return f"{ACTIVE_TOKEN_KEY_PREFIX}{scope}:{token}"
@staticmethod
def _make_tenant_index_key(tenant_id: str) -> str:
"""Generate Redis key for tenant token index."""
return f"tenant_tokens:{tenant_id}"
@staticmethod
def _make_cache_key(token: str, scope: str | None = None) -> str:
"""Generate cache key for the given token and scope."""
scope_str = scope or "any"
return f"{CACHE_KEY_PREFIX}:{scope_str}:{token}"
@staticmethod
def _serialize_token(api_token: Any) -> bytes:
"""Serialize ApiToken object to JSON bytes."""
if isinstance(api_token, CachedApiToken):
return api_token.model_dump_json().encode("utf-8")
cached = CachedApiToken(
id=str(api_token.id),
app_id=str(api_token.app_id) if api_token.app_id else None,
tenant_id=str(api_token.tenant_id) if api_token.tenant_id else None,
type=api_token.type,
token=api_token.token,
last_used_at=api_token.last_used_at,
created_at=api_token.created_at,
)
return cached.model_dump_json().encode("utf-8")
@staticmethod
def _deserialize_token(cached_data: bytes | str) -> Any:
"""Deserialize JSON bytes/string back to a CachedApiToken Pydantic model."""
if cached_data in {b"null", "null"}:
return None
try:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return CachedApiToken.model_validate_json(cached_data)
except (ValueError, Exception) as e:
logger.warning("Failed to deserialize token from cache: %s", e)
return None
@staticmethod
@redis_fallback(default_return=None)
def get(token: str, scope: str | None) -> Any | None:
"""Get API token from cache."""
cache_key = ApiTokenCache._make_cache_key(token, scope)
cached_data = redis_client.get(cache_key)
if cached_data is None:
logger.debug("Cache miss for token key: %s", cache_key)
return None
logger.debug("Cache hit for token key: %s", cache_key)
return ApiTokenCache._deserialize_token(cached_data)
@staticmethod
def _add_to_tenant_index(tenant_id: str | None, cache_key: str) -> None:
"""Add cache key to tenant index for efficient invalidation."""
if not tenant_id:
return
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
redis_client.sadd(index_key, cache_key)
redis_client.expire(index_key, CACHE_TTL_SECONDS + 60)
except Exception as e:
logger.warning("Failed to update tenant index: %s", e)
@staticmethod
def _remove_from_tenant_index(tenant_id: str | None, cache_key: str) -> None:
"""Remove cache key from tenant index."""
if not tenant_id:
return
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
redis_client.srem(index_key, cache_key)
except Exception as e:
logger.warning("Failed to remove from tenant index: %s", e)
@staticmethod
@redis_fallback(default_return=False)
def set(token: str, scope: str | None, api_token: Any | None, ttl: int = CACHE_TTL_SECONDS) -> bool:
"""Set API token in cache."""
cache_key = ApiTokenCache._make_cache_key(token, scope)
if api_token is None:
cached_value = b"null"
ttl = CACHE_NULL_TTL_SECONDS
else:
cached_value = ApiTokenCache._serialize_token(api_token)
try:
redis_client.setex(cache_key, ttl, cached_value)
if api_token is not None and hasattr(api_token, "tenant_id"):
ApiTokenCache._add_to_tenant_index(api_token.tenant_id, cache_key)
logger.debug("Cached token with key: %s, ttl: %ss", cache_key, ttl)
return True
except Exception as e:
logger.warning("Failed to cache token: %s", e)
return False
@staticmethod
@redis_fallback(default_return=False)
def delete(token: str, scope: str | None = None) -> bool:
"""Delete API token from cache."""
if scope is None:
pattern = f"{CACHE_KEY_PREFIX}:*:{token}"
try:
keys_to_delete = list(redis_client.scan_iter(match=pattern))
if keys_to_delete:
redis_client.delete(*keys_to_delete)
logger.info("Deleted %d cache entries for token", len(keys_to_delete))
return True
except Exception as e:
logger.warning("Failed to delete token cache with pattern: %s", e)
return False
else:
cache_key = ApiTokenCache._make_cache_key(token, scope)
try:
tenant_id = None
try:
cached_data = redis_client.get(cache_key)
if cached_data and cached_data != b"null":
cached_token = ApiTokenCache._deserialize_token(cached_data)
if cached_token:
tenant_id = cached_token.tenant_id
except Exception as e:
logger.debug("Failed to get tenant_id for cache cleanup: %s", e)
redis_client.delete(cache_key)
if tenant_id:
ApiTokenCache._remove_from_tenant_index(tenant_id, cache_key)
logger.info("Deleted cache for key: %s", cache_key)
return True
except Exception as e:
logger.warning("Failed to delete token cache: %s", e)
return False
@staticmethod
@redis_fallback(default_return=False)
def invalidate_by_tenant(tenant_id: str) -> bool:
"""Invalidate all API token caches for a specific tenant via tenant index."""
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
cache_keys = redis_client.smembers(index_key)
if cache_keys:
deleted_count = 0
for cache_key in cache_keys:
if isinstance(cache_key, bytes):
cache_key = cache_key.decode("utf-8")
redis_client.delete(cache_key)
deleted_count += 1
redis_client.delete(index_key)
logger.info(
"Invalidated %d token cache entries for tenant: %s",
deleted_count,
tenant_id,
)
else:
logger.info(
"No tenant index found for %s, relying on TTL expiration",
tenant_id,
)
return True
except Exception as e:
logger.warning("Failed to invalidate tenant token cache: %s", e)
return False
# ---------------------------------------------------------------------
# Token usage recording (for batch update)
# ---------------------------------------------------------------------
def record_token_usage(auth_token: str, scope: str | None) -> None:
"""
Record token usage in Redis for later batch update by a scheduled job.
Instead of dispatching a Celery task per request, we simply SET a key in Redis.
A Celery Beat scheduled task will periodically scan these keys and batch-update
last_used_at in the database.
"""
try:
key = ApiTokenCache.make_active_key(auth_token, scope)
redis_client.set(key, naive_utc_now().isoformat(), ex=3600)
except Exception as e:
logger.warning("Failed to record token usage: %s", e)
# ---------------------------------------------------------------------
# Database query + single-flight
# ---------------------------------------------------------------------
def query_token_from_db(auth_token: str, scope: str | None) -> ApiToken:
"""
Query API token from database and cache the result.
Raises Unauthorized if token is invalid.
"""
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
api_token = session.scalar(stmt)
if not api_token:
ApiTokenCache.set(auth_token, scope, None)
raise Unauthorized("Access token is invalid")
ApiTokenCache.set(auth_token, scope, api_token)
record_token_usage(auth_token, scope)
return api_token
def fetch_token_with_single_flight(auth_token: str, scope: str | None) -> ApiToken | Any:
"""
Fetch token from DB with single-flight pattern using Redis lock.
Ensures only one concurrent request queries the database for the same token.
Falls back to direct query if lock acquisition fails.
"""
logger.debug("Token cache miss, attempting to acquire query lock for scope: %s", scope)
lock_key = f"api_token_query_lock:{scope}:{auth_token}"
lock = redis_client.lock(lock_key, timeout=10, blocking_timeout=5)
try:
if lock.acquire(blocking=True):
try:
cached_token = ApiTokenCache.get(auth_token, scope)
if cached_token is not None:
logger.debug("Token cached by concurrent request, using cached version")
return cached_token
return query_token_from_db(auth_token, scope)
finally:
lock.release()
else:
logger.warning("Lock timeout for token: %s, proceeding with direct query", auth_token[:10])
return query_token_from_db(auth_token, scope)
except Unauthorized:
raise
except Exception as e:
logger.warning("Redis lock failed for token query: %s, proceeding anyway", e)
return query_token_from_db(auth_token, scope)

View File

@@ -14,9 +14,6 @@ from models.model import UploadFile
logger = logging.getLogger(__name__)
# Batch size for database operations to keep transactions short
BATCH_SIZE = 1000
@shared_task(queue="dataset")
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
@@ -34,179 +31,63 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
if not doc_form:
raise ValueError("doc_form is required")
storage_keys_to_delete: list[str] = []
index_node_ids: list[str] = []
segment_ids: list[str] = []
total_image_upload_file_ids: list[str] = []
with session_factory.create_session() as session:
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
).delete(synchronize_session=False)
try:
# ============ Step 1: Query segment and file data (short read-only transaction) ============
with session_factory.create_session() as session:
# Get segments info
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
).all()
# check segment is exist
if segments:
index_node_ids = [segment.index_node_id for segment in segments]
segment_ids = [segment.id for segment in segments]
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
# Collect image file IDs from segment content
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
total_image_upload_file_ids.extend(image_upload_file_ids)
# Query storage keys for image files
if total_image_upload_file_ids:
image_files = session.scalars(
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
).all()
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
# Query storage keys for document files
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
for image_file in image_files:
try:
if image_file and image_file.key:
storage.delete(image_file.key)
except Exception:
logger.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: %s",
image_file.id,
)
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
session.execute(stmt)
session.delete(segment)
if file_ids:
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
for file in files:
try:
storage.delete(file.key)
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
if index_node_ids:
try:
# Fetch dataset in a fresh session to avoid DetachedInstanceError
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
else:
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
except Exception:
logger.exception(
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
dataset_id,
document_ids,
len(index_node_ids),
)
session.commit()
# ============ Step 3: Delete metadata binding (separate short transaction) ============
try:
with session_factory.create_session() as session:
deleted_count = (
session.query(DatasetMetadataBinding)
.where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
)
.delete(synchronize_session=False)
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
fg="green",
)
session.commit()
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
)
except Exception:
logger.exception(
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
if total_image_upload_file_ids:
failed_batches = 0
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
session.execute(stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
i,
i + len(batch),
dataset_id,
)
if failed_batches > 0:
logger.warning(
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
failed_batches,
total_batches,
dataset_id,
)
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
if segment_ids:
failed_batches = 0
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(segment_ids), BATCH_SIZE):
batch = segment_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
session.execute(segment_delete_stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
i,
i + len(batch),
dataset_id,
document_ids,
)
if failed_batches > 0:
logger.warning(
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
failed_batches,
total_batches,
document_ids,
)
# ============ Step 6: Delete document-associated files (separate short transaction) ============
if file_ids:
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
session.commit()
except Exception:
logger.exception(
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
dataset_id,
file_ids,
)
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
storage_delete_failures = 0
for storage_key in storage_keys_to_delete:
try:
storage.delete(storage_key)
except Exception:
storage_delete_failures += 1
logger.exception("Failed to delete file from storage, key: %s", storage_key)
if storage_delete_failures > 0:
logger.warning(
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
storage_delete_failures,
len(storage_keys_to_delete),
dataset_id,
)
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
f"storage_files: {len(storage_keys_to_delete)}",
fg="green",
)
)
except Exception:
logger.exception(
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)
logger.exception("Cleaned documents when documents deleted failed")

View File

@@ -3,7 +3,6 @@ import time
import click
from celery import shared_task
from sqlalchemy import delete
from core.db.session_factory import session_factory
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
@@ -68,14 +67,8 @@ def delete_segment_from_index_task(
if segment_attachment_bindings:
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
for i in range(0, len(segment_attachment_bind_ids), 1000):
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
)
session.execute(segment_attachment_bind_delete_stmt)
for binding in segment_attachment_bindings:
session.delete(binding)
# delete upload file
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
session.commit()

View File

@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@@ -68,6 +68,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
document.indexing_status = "error"
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
document.stopped_at = naive_utc_now()
session.commit()
return
loader = NotionExtractor(
@@ -84,6 +85,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:

View File

@@ -8,6 +8,7 @@ from sqlalchemy import delete, select
from core.db.session_factory import session_factory
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
@@ -26,7 +27,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@@ -35,6 +36,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:
@@ -54,7 +56,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
db.session.commit()
end_at = time.perf_counter()
logger.info(
click.style(

View File

@@ -6,8 +6,8 @@ import typing
import click
from celery import shared_task
from core.helper.marketplace import record_install_plugin_event
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
from core.helper import marketplace
from core.helper.marketplace import MarketplacePluginDeclaration
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
@@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
logger = logging.getLogger(__name__)
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_TTL = 60 * 60 # 1 hour
@@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
"""
Get cached plugin manifest from Redis.
Returns:
- MarketplacePluginSnapshot: if found in cache
- MarketplacePluginDeclaration: if found in cache
- None: if cached as not found (marketplace returned no result)
- False: if not in cache at all
"""
@@ -43,31 +43,76 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapsh
if cached_json is None:
return None
return MarketplacePluginSnapshot.model_validate(cached_json)
return MarketplacePluginDeclaration.model_validate(cached_json)
except Exception:
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
return False
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
"""
Cache plugin manifest in Redis.
Args:
plugin_id: The plugin ID
manifest: The manifest to cache, or None if not found in marketplace
"""
try:
key = _get_redis_cache_key(plugin_id)
if manifest is None:
# Cache the fact that this plugin was not found
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
else:
# Cache the manifest data
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
except Exception:
# If Redis fails, continue without caching
# traceback.print_exc()
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
def marketplace_batch_fetch_plugin_manifests(
plugin_ids_plain_list: list[str],
) -> list[MarketplacePluginSnapshot]:
"""
Fetch plugin manifests from Redis cache only.
This function assumes fetch_global_plugin_manifest() has been called
to pre-populate the cache with all marketplace plugins.
"""
result: list[MarketplacePluginSnapshot] = []
) -> list[MarketplacePluginDeclaration]:
"""Fetch plugin manifests with Redis caching support."""
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
not_cached_plugin_ids: list[str] = []
# Check Redis cache for each plugin
for plugin_id in plugin_ids_plain_list:
cached_result = _get_cached_manifest(plugin_id)
if not isinstance(cached_result, MarketplacePluginSnapshot):
# cached_result is False (not in cache) or None (cached as not found)
logger.warning("plugin %s not found in cache, skipping", plugin_id)
continue
if cached_result is False:
# Not in cache, need to fetch
not_cached_plugin_ids.append(plugin_id)
else:
# Either found manifest or cached as None (not found in marketplace)
# At this point, cached_result is either MarketplacePluginDeclaration or None
if isinstance(cached_result, bool):
# This should never happen due to the if condition above, but for type safety
continue
cached_manifests[plugin_id] = cached_result
result.append(cached_result)
# Fetch uncached plugins from marketplace
if not_cached_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
# Cache the fetched manifests
for manifest in manifests:
cached_manifests[manifest.plugin_id] = manifest
_set_cached_manifest(manifest.plugin_id, manifest)
# Cache plugins that were not found in marketplace
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
for plugin_id in not_cached_plugin_ids:
if plugin_id not in fetched_plugin_ids:
cached_manifests[plugin_id] = None
_set_cached_manifest(plugin_id, None)
# Build result list from cached manifests
result: list[MarketplacePluginDeclaration] = []
for plugin_id in plugin_ids_plain_list:
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
if cached_manifest is not None:
result.append(cached_manifest)
return result
@@ -166,7 +211,7 @@ def process_tenant_plugin_autoupgrade_check_task(
# execute upgrade
new_unique_identifier = manifest.latest_package_identifier
record_install_plugin_event(new_unique_identifier)
marketplace.record_install_plugin_event(new_unique_identifier)
click.echo(
click.style(
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",

View File

@@ -48,7 +48,6 @@ from models.workflow import (
WorkflowArchiveLog,
)
from repositories.factory import DifyAPIRepositoryFactory
from services.api_token_service import ApiTokenCache
logger = logging.getLogger(__name__)
@@ -135,12 +134,6 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str):
def _delete_app_api_tokens(tenant_id: str, app_id: str):
def del_api_token(session, api_token_id: str):
# Fetch token details for cache invalidation
token_obj = session.query(ApiToken).where(ApiToken.id == api_token_id).first()
if token_obj:
# Invalidate cache before deletion
ApiTokenCache.delete(token_obj.token, token_obj.type)
session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False)
_delete_records(

View File

@@ -1,375 +0,0 @@
"""
Integration tests for API Token Cache with Redis.
These tests require:
- Redis server running
- Test database configured
"""
import time
from datetime import datetime, timedelta
from unittest.mock import patch
import pytest
from extensions.ext_redis import redis_client
from models.model import ApiToken
from services.api_token_service import ApiTokenCache, CachedApiToken
class TestApiTokenCacheRedisIntegration:
"""Integration tests with real Redis."""
def setup_method(self):
"""Setup test fixtures and clean Redis."""
self.test_token = "test-integration-token-123"
self.test_scope = "app"
self.cache_key = f"api_token:{self.test_scope}:{self.test_token}"
# Clean up any existing test data
self._cleanup()
def teardown_method(self):
"""Cleanup test data from Redis."""
self._cleanup()
def _cleanup(self):
"""Remove test data from Redis."""
try:
redis_client.delete(self.cache_key)
redis_client.delete(ApiTokenCache._make_tenant_index_key("test-tenant-id"))
redis_client.delete(ApiTokenCache.make_active_key(self.test_token, self.test_scope))
except Exception:
pass # Ignore cleanup errors
def test_cache_set_and_get_with_real_redis(self):
"""Test cache set and get operations with real Redis."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id-123"
mock_token.app_id = "test-app-456"
mock_token.tenant_id = "test-tenant-789"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = datetime.now()
mock_token.created_at = datetime.now() - timedelta(days=30)
# Set in cache
result = ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
assert result is True
# Verify in Redis
cached_data = redis_client.get(self.cache_key)
assert cached_data is not None
# Get from cache
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
assert cached_token is not None
assert isinstance(cached_token, CachedApiToken)
assert cached_token.id == "test-id-123"
assert cached_token.app_id == "test-app-456"
assert cached_token.tenant_id == "test-tenant-789"
assert cached_token.type == "app"
assert cached_token.token == self.test_token
def test_cache_ttl_with_real_redis(self):
"""Test cache TTL is set correctly."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
ttl = redis_client.ttl(self.cache_key)
assert 595 <= ttl <= 600 # Should be around 600 seconds (10 minutes)
def test_cache_null_value_for_invalid_token(self):
"""Test caching null value for invalid tokens."""
result = ApiTokenCache.set(self.test_token, self.test_scope, None)
assert result is True
cached_data = redis_client.get(self.cache_key)
assert cached_data == b"null"
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
assert cached_token is None
ttl = redis_client.ttl(self.cache_key)
assert 55 <= ttl <= 60
def test_cache_delete_with_real_redis(self):
"""Test cache deletion with real Redis."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
assert redis_client.exists(self.cache_key) == 1
result = ApiTokenCache.delete(self.test_token, self.test_scope)
assert result is True
assert redis_client.exists(self.cache_key) == 0
def test_tenant_index_creation(self):
"""Test tenant index is created when caching token."""
from unittest.mock import MagicMock
tenant_id = "test-tenant-id"
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = tenant_id
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
assert redis_client.exists(index_key) == 1
members = redis_client.smembers(index_key)
cache_keys = [m.decode("utf-8") if isinstance(m, bytes) else m for m in members]
assert self.cache_key in cache_keys
def test_invalidate_by_tenant_via_index(self):
"""Test tenant-wide cache invalidation using index (fast path)."""
from unittest.mock import MagicMock
tenant_id = "test-tenant-id"
for i in range(3):
token_value = f"test-token-{i}"
mock_token = MagicMock()
mock_token.id = f"test-id-{i}"
mock_token.app_id = "test-app"
mock_token.tenant_id = tenant_id
mock_token.type = "app"
mock_token.token = token_value
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(token_value, "app", mock_token)
for i in range(3):
key = f"api_token:app:test-token-{i}"
assert redis_client.exists(key) == 1
result = ApiTokenCache.invalidate_by_tenant(tenant_id)
assert result is True
for i in range(3):
key = f"api_token:app:test-token-{i}"
assert redis_client.exists(key) == 0
assert redis_client.exists(ApiTokenCache._make_tenant_index_key(tenant_id)) == 0
def test_concurrent_cache_access(self):
"""Test concurrent cache access doesn't cause issues."""
import concurrent.futures
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
def get_from_cache():
return ApiTokenCache.get(self.test_token, self.test_scope)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = [executor.submit(get_from_cache) for _ in range(50)]
results = [f.result() for f in concurrent.futures.as_completed(futures)]
assert len(results) == 50
assert all(r is not None for r in results)
assert all(isinstance(r, CachedApiToken) for r in results)
class TestTokenUsageRecording:
"""Tests for recording token usage in Redis (batch update approach)."""
def setup_method(self):
self.test_token = "test-usage-token"
self.test_scope = "app"
self.active_key = ApiTokenCache.make_active_key(self.test_token, self.test_scope)
def teardown_method(self):
try:
redis_client.delete(self.active_key)
except Exception:
pass
def test_record_token_usage_sets_redis_key(self):
"""Test that record_token_usage writes an active key to Redis."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
# Key should exist
assert redis_client.exists(self.active_key) == 1
# Value should be an ISO timestamp
value = redis_client.get(self.active_key)
if isinstance(value, bytes):
value = value.decode("utf-8")
datetime.fromisoformat(value) # Should not raise
def test_record_token_usage_has_ttl(self):
"""Test that active keys have a TTL as safety net."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
ttl = redis_client.ttl(self.active_key)
assert 3595 <= ttl <= 3600 # ~1 hour
def test_record_token_usage_overwrites(self):
"""Test that repeated calls overwrite the same key (no accumulation)."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
first_value = redis_client.get(self.active_key)
time.sleep(0.01) # Tiny delay so timestamp differs
record_token_usage(self.test_token, self.test_scope)
second_value = redis_client.get(self.active_key)
# Key count should still be 1 (overwritten, not accumulated)
assert redis_client.exists(self.active_key) == 1
class TestEndToEndCacheFlow:
"""End-to-end integration test for complete cache flow."""
@pytest.mark.usefixtures("db_session")
def test_complete_flow_cache_miss_then_hit(self, db_session):
"""
Test complete flow:
1. First request (cache miss) -> query DB -> cache result
2. Second request (cache hit) -> return from cache
3. Verify Redis state
"""
test_token_value = "test-e2e-token"
test_scope = "app"
test_token = ApiToken()
test_token.id = "test-e2e-id"
test_token.token = test_token_value
test_token.type = test_scope
test_token.app_id = "test-app"
test_token.tenant_id = "test-tenant"
test_token.last_used_at = None
test_token.created_at = datetime.now()
db_session.add(test_token)
db_session.commit()
try:
# Step 1: Cache miss - set token in cache
ApiTokenCache.set(test_token_value, test_scope, test_token)
cache_key = f"api_token:{test_scope}:{test_token_value}"
assert redis_client.exists(cache_key) == 1
# Step 2: Cache hit - get from cache
cached_token = ApiTokenCache.get(test_token_value, test_scope)
assert cached_token is not None
assert cached_token.id == test_token.id
assert cached_token.token == test_token_value
# Step 3: Verify tenant index
index_key = ApiTokenCache._make_tenant_index_key(test_token.tenant_id)
assert redis_client.exists(index_key) == 1
assert cache_key.encode() in redis_client.smembers(index_key)
# Step 4: Delete and verify cleanup
ApiTokenCache.delete(test_token_value, test_scope)
assert redis_client.exists(cache_key) == 0
assert cache_key.encode() not in redis_client.smembers(index_key)
finally:
db_session.delete(test_token)
db_session.commit()
redis_client.delete(f"api_token:{test_scope}:{test_token_value}")
redis_client.delete(ApiTokenCache._make_tenant_index_key(test_token.tenant_id))
def test_high_concurrency_simulation(self):
"""Simulate high concurrency access to cache."""
import concurrent.futures
from unittest.mock import MagicMock
test_token_value = "test-concurrent-token"
test_scope = "app"
mock_token = MagicMock()
mock_token.id = "concurrent-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = test_scope
mock_token.token = test_token_value
mock_token.last_used_at = datetime.now()
mock_token.created_at = datetime.now()
ApiTokenCache.set(test_token_value, test_scope, mock_token)
try:
def read_cache():
return ApiTokenCache.get(test_token_value, test_scope)
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
futures = [executor.submit(read_cache) for _ in range(100)]
results = [f.result() for f in concurrent.futures.as_completed(futures)]
elapsed = time.time() - start_time
assert len(results) == 100
assert all(r is not None for r in results)
assert elapsed < 1.0, f"Too slow: {elapsed}s for 100 cache reads"
finally:
ApiTokenCache.delete(test_token_value, test_scope)
redis_client.delete(ApiTokenCache._make_tenant_index_key(mock_token.tenant_id))
class TestRedisFailover:
"""Test behavior when Redis is unavailable."""
@patch("services.api_token_service.redis_client")
def test_graceful_degradation_when_redis_fails(self, mock_redis):
"""Test system degrades gracefully when Redis is unavailable."""
from redis import RedisError
mock_redis.get.side_effect = RedisError("Connection failed")
mock_redis.setex.side_effect = RedisError("Connection failed")
result_get = ApiTokenCache.get("test-token", "app")
assert result_get is None
result_set = ApiTokenCache.set("test-token", "app", None)
assert result_set is False

View File

@@ -1,182 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from faker import Faker
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, Document, DocumentSegment
from tasks.document_indexing_update_task import document_indexing_update_task
class TestDocumentIndexingUpdateTask:
@pytest.fixture
def mock_external_dependencies(self):
"""Patch external collaborators used by the update task.
- IndexProcessorFactory.init_index_processor().clean(...)
- IndexingRunner.run([...])
"""
with (
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
):
processor_instance = MagicMock()
mock_factory.return_value.init_index_processor.return_value = processor_instance
runner_instance = MagicMock()
mock_runner.return_value = runner_instance
yield {
"factory": mock_factory,
"processor": processor_instance,
"runner": mock_runner,
"runner_instance": runner_instance,
}
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
fake = Faker()
# Account and tenant
account = Account(
email=fake.email(),
name=fake.name(),
interface_language="en-US",
status="active",
)
db_session_with_containers.add(account)
db_session_with_containers.commit()
tenant = Tenant(name=fake.company(), status="normal")
db_session_with_containers.add(tenant)
db_session_with_containers.commit()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
current=True,
)
db_session_with_containers.add(join)
db_session_with_containers.commit()
# Dataset and document
dataset = Dataset(
tenant_id=tenant.id,
name=fake.company(),
description=fake.text(max_nb_chars=64),
data_source_type="upload_file",
indexing_technique="high_quality",
created_by=account.id,
)
db_session_with_containers.add(dataset)
db_session_with_containers.commit()
document = Document(
tenant_id=tenant.id,
dataset_id=dataset.id,
position=0,
data_source_type="upload_file",
batch="test_batch",
name=fake.file_name(),
created_from="upload_file",
created_by=account.id,
indexing_status="waiting",
enabled=True,
doc_form="text_model",
)
db_session_with_containers.add(document)
db_session_with_containers.commit()
# Segments
node_ids = []
for i in range(segment_count):
node_id = f"node-{i + 1}"
seg = DocumentSegment(
tenant_id=tenant.id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=32),
answer=None,
word_count=10,
tokens=5,
index_node_id=node_id,
status="completed",
created_by=account.id,
)
db_session_with_containers.add(seg)
node_ids.append(node_id)
db_session_with_containers.commit()
# Refresh to ensure ORM state
db_session_with_containers.refresh(dataset)
db_session_with_containers.refresh(document)
return dataset, document, node_ids
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Act
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Assert document status updated before reindex
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
assert updated.indexing_status == "parsing"
assert updated.processing_started_at is not None
# Segments should be deleted
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining == 0
# Assert index processor clean was called with expected args
clean_call = mock_external_dependencies["processor"].clean.call_args
assert clean_call is not None
args, kwargs = clean_call
# args[0] is a Dataset instance (from another session) — validate by id
assert getattr(args[0], "id", None) == dataset.id
# args[1] should contain our node_ids
assert set(args[1]) == set(node_ids)
assert kwargs.get("with_keywords") is True
assert kwargs.get("delete_child_chunks") is True
# Assert indexing runner invoked with the updated document
run_call = mock_external_dependencies["runner_instance"].run.call_args
assert run_call is not None
run_docs = run_call[0][0]
assert len(run_docs) == 1
first = run_docs[0]
assert getattr(first, "id", None) == document.id
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Force clean to raise; task should continue to indexing
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Indexing should still be triggered
mock_external_dependencies["runner_instance"].run.assert_called_once()
# Segments should remain (since clean failed before DB delete)
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining > 0
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
fake = Faker()
# Act with non-existent document id
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
# Neither processor nor runner should be called
mock_external_dependencies["processor"].clean.assert_not_called()
mock_external_dependencies["runner_instance"].run.assert_not_called()

View File

@@ -217,6 +217,7 @@ class TestTemplateTransformNode:
@patch(
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
)
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
def test_run_output_length_exceeds_limit(
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
):
@@ -230,7 +231,6 @@ class TestTemplateTransformNode:
graph_init_params=graph_init_params,
graph=mock_graph,
graph_runtime_state=mock_graph_runtime_state,
max_output_length=10,
)
result = node._run()

View File

@@ -132,8 +132,6 @@ class TestCelerySSLConfiguration:
mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0
mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False
mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15
mock_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK = False
mock_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL = 30
with patch("extensions.ext_celery.dify_config", mock_config):
from dify_app import DifyApp

View File

@@ -1,250 +0,0 @@
"""
Unit tests for API Token Cache module.
"""
import json
from datetime import datetime
from unittest.mock import MagicMock, patch
from services.api_token_service import (
CACHE_KEY_PREFIX,
CACHE_NULL_TTL_SECONDS,
CACHE_TTL_SECONDS,
ApiTokenCache,
CachedApiToken,
)
class TestApiTokenCache:
"""Test cases for ApiTokenCache class."""
def setup_method(self):
"""Setup test fixtures."""
self.mock_token = MagicMock()
self.mock_token.id = "test-token-id-123"
self.mock_token.app_id = "test-app-id-456"
self.mock_token.tenant_id = "test-tenant-id-789"
self.mock_token.type = "app"
self.mock_token.token = "test-token-value-abc"
self.mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
self.mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
def test_make_cache_key(self):
"""Test cache key generation."""
# Test with scope
key = ApiTokenCache._make_cache_key("my-token", "app")
assert key == f"{CACHE_KEY_PREFIX}:app:my-token"
# Test without scope
key = ApiTokenCache._make_cache_key("my-token", None)
assert key == f"{CACHE_KEY_PREFIX}:any:my-token"
def test_serialize_token(self):
"""Test token serialization."""
serialized = ApiTokenCache._serialize_token(self.mock_token)
data = json.loads(serialized)
assert data["id"] == "test-token-id-123"
assert data["app_id"] == "test-app-id-456"
assert data["tenant_id"] == "test-tenant-id-789"
assert data["type"] == "app"
assert data["token"] == "test-token-value-abc"
assert data["last_used_at"] == "2026-02-03T10:00:00"
assert data["created_at"] == "2026-01-01T00:00:00"
def test_serialize_token_with_nulls(self):
"""Test token serialization with None values."""
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = None
mock_token.tenant_id = None
mock_token.type = "dataset"
mock_token.token = "test-token"
mock_token.last_used_at = None
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
serialized = ApiTokenCache._serialize_token(mock_token)
data = json.loads(serialized)
assert data["app_id"] is None
assert data["tenant_id"] is None
assert data["last_used_at"] is None
def test_deserialize_token(self):
"""Test token deserialization."""
cached_data = json.dumps(
{
"id": "test-id",
"app_id": "test-app",
"tenant_id": "test-tenant",
"type": "app",
"token": "test-token",
"last_used_at": "2026-02-03T10:00:00",
"created_at": "2026-01-01T00:00:00",
}
)
result = ApiTokenCache._deserialize_token(cached_data)
assert isinstance(result, CachedApiToken)
assert result.id == "test-id"
assert result.app_id == "test-app"
assert result.tenant_id == "test-tenant"
assert result.type == "app"
assert result.token == "test-token"
assert result.last_used_at == datetime(2026, 2, 3, 10, 0, 0)
assert result.created_at == datetime(2026, 1, 1, 0, 0, 0)
def test_deserialize_null_token(self):
"""Test deserialization of null token (cached miss)."""
result = ApiTokenCache._deserialize_token("null")
assert result is None
def test_deserialize_invalid_json(self):
"""Test deserialization with invalid JSON."""
result = ApiTokenCache._deserialize_token("invalid-json{")
assert result is None
@patch("services.api_token_service.redis_client")
def test_get_cache_hit(self, mock_redis):
"""Test cache hit scenario."""
cached_data = json.dumps(
{
"id": "test-id",
"app_id": "test-app",
"tenant_id": "test-tenant",
"type": "app",
"token": "test-token",
"last_used_at": "2026-02-03T10:00:00",
"created_at": "2026-01-01T00:00:00",
}
).encode("utf-8")
mock_redis.get.return_value = cached_data
result = ApiTokenCache.get("test-token", "app")
assert result is not None
assert isinstance(result, CachedApiToken)
assert result.app_id == "test-app"
mock_redis.get.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
@patch("services.api_token_service.redis_client")
def test_get_cache_miss(self, mock_redis):
"""Test cache miss scenario."""
mock_redis.get.return_value = None
result = ApiTokenCache.get("test-token", "app")
assert result is None
mock_redis.get.assert_called_once()
@patch("services.api_token_service.redis_client")
def test_set_valid_token(self, mock_redis):
"""Test setting a valid token in cache."""
result = ApiTokenCache.set("test-token", "app", self.mock_token)
assert result is True
mock_redis.setex.assert_called_once()
args = mock_redis.setex.call_args[0]
assert args[0] == f"{CACHE_KEY_PREFIX}:app:test-token"
assert args[1] == CACHE_TTL_SECONDS
@patch("services.api_token_service.redis_client")
def test_set_null_token(self, mock_redis):
"""Test setting a null token (cache penetration prevention)."""
result = ApiTokenCache.set("invalid-token", "app", None)
assert result is True
mock_redis.setex.assert_called_once()
args = mock_redis.setex.call_args[0]
assert args[0] == f"{CACHE_KEY_PREFIX}:app:invalid-token"
assert args[1] == CACHE_NULL_TTL_SECONDS
assert args[2] == b"null"
@patch("services.api_token_service.redis_client")
def test_delete_with_scope(self, mock_redis):
"""Test deleting token cache with specific scope."""
result = ApiTokenCache.delete("test-token", "app")
assert result is True
mock_redis.delete.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
@patch("services.api_token_service.redis_client")
def test_delete_without_scope(self, mock_redis):
"""Test deleting token cache without scope (delete all)."""
# Mock scan_iter to return an iterator of keys
mock_redis.scan_iter.return_value = iter(
[
b"api_token:app:test-token",
b"api_token:dataset:test-token",
]
)
result = ApiTokenCache.delete("test-token", None)
assert result is True
# Verify scan_iter was called with the correct pattern
mock_redis.scan_iter.assert_called_once()
call_args = mock_redis.scan_iter.call_args
assert call_args[1]["match"] == f"{CACHE_KEY_PREFIX}:*:test-token"
# Verify delete was called with all matched keys
mock_redis.delete.assert_called_once_with(
b"api_token:app:test-token",
b"api_token:dataset:test-token",
)
@patch("services.api_token_service.redis_client")
def test_redis_fallback_on_exception(self, mock_redis):
"""Test Redis fallback when Redis is unavailable."""
from redis import RedisError
mock_redis.get.side_effect = RedisError("Connection failed")
result = ApiTokenCache.get("test-token", "app")
# Should return None (fallback) instead of raising exception
assert result is None
class TestApiTokenCacheIntegration:
"""Integration test scenarios."""
@patch("services.api_token_service.redis_client")
def test_full_cache_lifecycle(self, mock_redis):
"""Test complete cache lifecycle: set -> get -> delete."""
# Setup mock token
mock_token = MagicMock()
mock_token.id = "id-123"
mock_token.app_id = "app-456"
mock_token.tenant_id = "tenant-789"
mock_token.type = "app"
mock_token.token = "token-abc"
mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
# 1. Set token in cache
ApiTokenCache.set("token-abc", "app", mock_token)
assert mock_redis.setex.called
# 2. Simulate cache hit
cached_data = ApiTokenCache._serialize_token(mock_token)
mock_redis.get.return_value = cached_data # bytes from model_dump_json().encode()
retrieved = ApiTokenCache.get("token-abc", "app")
assert retrieved is not None
assert isinstance(retrieved, CachedApiToken)
# 3. Delete from cache
ApiTokenCache.delete("token-abc", "app")
assert mock_redis.delete.called
@patch("services.api_token_service.redis_client")
def test_cache_penetration_prevention(self, mock_redis):
"""Test that non-existent tokens are cached as null."""
# Set null token (cache miss)
ApiTokenCache.set("non-existent-token", "app", None)
args = mock_redis.setex.call_args[0]
assert args[2] == b"null"
assert args[1] == CACHE_NULL_TTL_SECONDS # Shorter TTL for null values

View File

@@ -114,21 +114,6 @@ def mock_db_session():
session = MagicMock()
# Ensure tests can observe session.close() via context manager teardown
session.close = MagicMock()
session.commit = MagicMock()
# Mock session.begin() context manager to auto-commit on exit
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def _begin_exit_side_effect(*args, **kwargs):
# session.begin().__exit__() should commit if no exception
if args[0] is None: # No exception
session.commit()
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session

View File

@@ -35,6 +35,7 @@ export const baseProviderContextValue: ProviderContextState = {
refreshLicenseLimit: noop,
isAllowTransferWorkspace: false,
isAllowPublishAsCustomKnowledgePipelineTemplate: false,
humanInputEmailDeliveryEnabled: false,
}
export const createMockProviderContextValue = (overrides: Partial<ProviderContextState> = {}): ProviderContextState => {

View File

@@ -8,7 +8,6 @@ describe('SVG Attribute Error Reproduction', () => {
// Capture console errors
const originalError = console.error
let errorMessages: string[] = []
beforeEach(() => {
errorMessages = []
console.error = vi.fn((message) => {

View File

@@ -0,0 +1,289 @@
'use client'
import type { ButtonProps } from '@/app/components/base/button'
import type { FormInputItem, UserAction } from '@/app/components/workflow/nodes/human-input/types'
import type { SiteInfo } from '@/models/share'
import type { HumanInputFormError } from '@/service/use-share'
import {
RiCheckboxCircleFill,
RiErrorWarningFill,
RiInformation2Fill,
} from '@remixicon/react'
import { produce } from 'immer'
import { useParams } from 'next/navigation'
import * as React from 'react'
import { useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import AppIcon from '@/app/components/base/app-icon'
import Button from '@/app/components/base/button'
import ContentItem from '@/app/components/base/chat/chat/answer/human-input-content/content-item'
import ExpirationTime from '@/app/components/base/chat/chat/answer/human-input-content/expiration-time'
import { getButtonStyle } from '@/app/components/base/chat/chat/answer/human-input-content/utils'
import Loading from '@/app/components/base/loading'
import DifyLogo from '@/app/components/base/logo/dify-logo'
import useDocumentTitle from '@/hooks/use-document-title'
import { useGetHumanInputForm, useSubmitHumanInputForm } from '@/service/use-share'
import { cn } from '@/utils/classnames'
export type FormData = {
site: { site: SiteInfo }
form_content: string
inputs: FormInputItem[]
resolved_default_values: Record<string, string>
user_actions: UserAction[]
expiration_time: number
}
const FormContent = () => {
const { t } = useTranslation()
const { token } = useParams<{ token: string }>()
useDocumentTitle('')
const [inputs, setInputs] = useState<Record<string, string>>({})
const [success, setSuccess] = useState(false)
const { mutate: submitForm, isPending: isSubmitting } = useSubmitHumanInputForm()
const { data: formData, isLoading, error } = useGetHumanInputForm(token)
const expired = (error as HumanInputFormError | null)?.code === 'human_input_form_expired'
const submitted = (error as HumanInputFormError | null)?.code === 'human_input_form_submitted'
const rateLimitExceeded = (error as HumanInputFormError | null)?.code === 'web_form_rate_limit_exceeded'
const splitByOutputVar = (content: string): string[] => {
const outputVarRegex = /(\{\{#\$output\.[^#]+#\}\})/g
const parts = content.split(outputVarRegex)
return parts.filter(part => part.length > 0)
}
const contentList = useMemo(() => {
if (!formData?.form_content)
return []
return splitByOutputVar(formData.form_content)
}, [formData?.form_content])
useEffect(() => {
if (!formData?.inputs)
return
const initialInputs: Record<string, string> = {}
formData.inputs.forEach((item) => {
initialInputs[item.output_variable_name] = item.default.type === 'variable' ? formData.resolved_default_values[item.output_variable_name] || '' : item.default.value
})
setInputs(initialInputs)
}, [formData?.inputs, formData?.resolved_default_values])
// use immer
const handleInputsChange = (name: string, value: string) => {
const newInputs = produce(inputs, (draft) => {
draft[name] = value
})
setInputs(newInputs)
}
const submit = (actionID: string) => {
submitForm(
{ token, data: { inputs, action: actionID } },
{
onSuccess: () => {
setSuccess(true)
},
},
)
}
if (isLoading) {
return (
<Loading type="app" />
)
}
if (success) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="h-[56px] w-[56px] shrink-0 rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiCheckboxCircleFill className="h-8 w-8 text-text-success" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.thanks', { ns: 'share' })}</div>
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.recorded', { ns: 'share' })}</div>
</div>
<div className="system-2xs-regular-uppercase shrink-0 text-text-tertiary">{t('humanInput.submissionID', { id: token, ns: 'share' })}</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (expired) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiInformation2Fill className="h-8 w-8 text-text-accent" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.sorry', { ns: 'share' })}</div>
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.expired', { ns: 'share' })}</div>
</div>
<div className="system-2xs-regular-uppercase shrink-0 text-text-tertiary">{t('humanInput.submissionID', { id: token, ns: 'share' })}</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (submitted) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiInformation2Fill className="h-8 w-8 text-text-accent" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.sorry', { ns: 'share' })}</div>
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.completed', { ns: 'share' })}</div>
</div>
<div className="system-2xs-regular-uppercase shrink-0 text-text-tertiary">{t('humanInput.submissionID', { id: token, ns: 'share' })}</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (rateLimitExceeded) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiErrorWarningFill className="h-8 w-8 text-text-destructive" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.rateLimitExceeded', { ns: 'share' })}</div>
</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (!formData) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiErrorWarningFill className="h-8 w-8 text-text-destructive" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.formNotFound', { ns: 'share' })}</div>
</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
const site = formData.site.site
return (
<div className={cn('mx-auto flex h-full w-full max-w-[720px] flex-col items-center')}>
<div className="mt-4 flex w-full shrink-0 items-center gap-3 py-3">
<AppIcon
size="large"
iconType={site.icon_type}
icon={site.icon}
background={site.icon_background}
imageUrl={site.icon_url}
/>
<div className="system-xl-semibold grow text-text-primary">{site.title}</div>
</div>
<div className="h-0 w-full grow overflow-y-auto">
<div className="border-components-divider-subtle rounded-[20px] border bg-chat-bubble-bg p-4 shadow-lg backdrop-blur-sm">
{contentList.map((content, index) => (
<ContentItem
key={index}
content={content}
formInputFields={formData.inputs}
inputs={inputs}
onInputChange={handleInputsChange}
/>
))}
<div className="flex flex-wrap gap-1 py-1">
{formData.user_actions.map((action: UserAction) => (
<Button
key={action.id}
disabled={isSubmitting}
variant={getButtonStyle(action.button_style) as ButtonProps['variant']}
onClick={() => submit(action.id)}
>
{action.title}
</Button>
))}
</div>
<ExpirationTime expirationTime={formData.expiration_time * 1000} />
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
export default React.memo(FormContent)

View File

@@ -0,0 +1,13 @@
'use client'
import * as React from 'react'
import FormContent from './form'
const FormPage = () => {
return (
<div className="h-full min-w-[300px] bg-chatbot-bg pb-[env(safe-area-inset-bottom)]">
<FormContent />
</div>
)
}
export default React.memo(FormPage)

View File

@@ -47,7 +47,7 @@ const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => {
await webAppLogout(shareCode!)
const url = getSigninUrl()
router.replace(url)
}, [getSigninUrl, router, webAppLogout, shareCode])
}, [getSigninUrl, router, shareCode])
if (appInfoError) {
return (

View File

@@ -31,7 +31,7 @@ const Splash: FC<PropsWithChildren> = ({ children }) => {
await webAppLogout(shareCode!)
const url = getSigninUrl()
router.replace(url)
}, [getSigninUrl, router, webAppLogout, shareCode])
}, [getSigninUrl, router, shareCode])
const [isLoading, setIsLoading] = useState(true)
useEffect(() => {

View File

@@ -115,6 +115,7 @@ export type AppPublisherProps = {
missingStartNode?: boolean
hasTriggerNode?: boolean // Whether workflow currently contains any trigger nodes (used to hide missing-start CTA when triggers exist).
startNodeLimitExceeded?: boolean
hasHumanInputNode?: boolean
}
const PUBLISH_SHORTCUT = ['ctrl', '⇧', 'P']
@@ -138,13 +139,14 @@ const AppPublisher = ({
missingStartNode = false,
hasTriggerNode = false,
startNodeLimitExceeded = false,
hasHumanInputNode = false,
}: AppPublisherProps) => {
const { t } = useTranslation()
const [published, setPublished] = useState(false)
const [open, setOpen] = useState(false)
const [showAppAccessControl, setShowAppAccessControl] = useState(false)
const [isAppAccessSet, setIsAppAccessSet] = useState(true)
const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false)
const appDetail = useAppStore(state => state.appDetail)
@@ -161,6 +163,13 @@ const AppPublisher = ({
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
const openAsyncWindow = useAsyncWindowOpen()
const isAppAccessSet = useMemo(() => {
if (appDetail && appAccessSubjects) {
return !(appDetail.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS && appAccessSubjects.groups?.length === 0 && appAccessSubjects.members?.length === 0)
}
return true
}, [appAccessSubjects, appDetail])
const noAccessPermission = useMemo(() => systemFeatures.webapp_auth.enabled && appDetail && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result, [systemFeatures, appDetail, userCanAccessApp])
const disabledFunctionButton = useMemo(() => (!publishedAt || missingStartNode || noAccessPermission), [publishedAt, missingStartNode, noAccessPermission])
@@ -171,25 +180,13 @@ const AppPublisher = ({
return t('noUserInputNode', { ns: 'app' })
if (noAccessPermission)
return t('noAccessPermission', { ns: 'app' })
}, [missingStartNode, noAccessPermission, publishedAt])
}, [missingStartNode, noAccessPermission, publishedAt, t])
useEffect(() => {
if (systemFeatures.webapp_auth.enabled && open && appDetail)
refetch()
}, [open, appDetail, refetch, systemFeatures])
useEffect(() => {
if (appDetail && appAccessSubjects) {
if (appDetail.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS && appAccessSubjects.groups?.length === 0 && appAccessSubjects.members?.length === 0)
setIsAppAccessSet(false)
else
setIsAppAccessSet(true)
}
else {
setIsAppAccessSet(true)
}
}, [appAccessSubjects, appDetail])
const handlePublish = useCallback(async (params?: ModelAndParameter | PublishWorkflowParams) => {
try {
await onPublish?.(params)
@@ -461,7 +458,7 @@ const AppPublisher = ({
{t('common.accessAPIReference', { ns: 'workflow' })}
</SuggestedAction>
</Tooltip>
{appDetail?.mode === AppModeEnum.WORKFLOW && (
{appDetail?.mode === AppModeEnum.WORKFLOW && !hasHumanInputNode && (
<WorkflowToolConfigureButton
disabled={workflowToolDisabled}
published={!!toolPublished}

View File

@@ -109,7 +109,6 @@ const AgentTools: FC = () => {
tool_parameters: paramsWithDefaultValue,
notAuthor: !tool.is_team_authorization,
enabled: true,
type: tool.provider_type as CollectionType,
}
}
const handleSelectTool = (tool: ToolDefaultValue) => {

View File

@@ -68,6 +68,7 @@ type IDrawerContext = {
}
type StatusCount = {
paused: number
success: number
failed: number
partial_success: number
@@ -93,7 +94,15 @@ const statusTdRender = (statusCount: StatusCount) => {
if (!statusCount)
return null
if (statusCount.partial_success + statusCount.failed === 0) {
if (statusCount.paused > 0) {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">
<Indicator color="yellow" />
<span className="text-util-colors-warning-warning-600">Pending</span>
</div>
)
}
else if (statusCount.partial_success + statusCount.failed === 0) {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">
<Indicator color="green" />
@@ -296,7 +305,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
if (abortControllerRef.current === controller)
abortControllerRef.current = null
}
}, [detail.id, hasMore, timezone, t, appDetail, detail?.model_config?.configs?.introduction])
}, [detail.id, hasMore, timezone, t, appDetail])
// Derive chatItemTree, threadChatItems, and oldestAnswerIdRef from allChatItems
useEffect(() => {
@@ -411,7 +420,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
return false
}
}, [allChatItems, appDetail?.id, t])
}, [allChatItems, appDetail?.id, notify, t])
const fetchInitiated = useRef(false)
@@ -504,7 +513,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
finally {
setIsLoading(false)
}
}, [detail.id, hasMore, isLoading, timezone, t, appDetail, detail?.model_config?.configs?.introduction])
}, [detail.id, hasMore, isLoading, timezone, t, appDetail])
const handleScroll = useCallback(() => {
const scrollableDiv = document.getElementById('scrollableDiv')

View File

@@ -53,6 +53,7 @@ const defaultProviderContext = {
refreshLicenseLimit: noop,
isAllowTransferWorkspace: false,
isAllowPublishAsCustomKnowledgePipelineTemplate: false,
humanInputEmailDeliveryEnabled: false,
}
const defaultModalContext: ModalContextState = {

View File

@@ -8,7 +8,7 @@ import {
RiClipboardLine,
RiFileList3Line,
RiPlayList2Line,
RiReplay15Line,
RiResetLeftLine,
RiSparklingFill,
RiSparklingLine,
RiThumbDownLine,
@@ -18,10 +18,12 @@ import { useBoolean } from 'ahooks'
import copy from 'copy-to-clipboard'
import { useParams } from 'next/navigation'
import * as React from 'react'
import { useEffect, useState } from 'react'
import { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useStore as useAppStore } from '@/app/components/app/store'
import ActionButton, { ActionButtonState } from '@/app/components/base/action-button'
import HumanInputFilledFormList from '@/app/components/base/chat/chat/answer/human-input-filled-form-list'
import HumanInputFormList from '@/app/components/base/chat/chat/answer/human-input-form-list'
import WorkflowProcessItem from '@/app/components/base/chat/chat/answer/workflow-process'
import { useChatContext } from '@/app/components/base/chat/chat/context'
import Loading from '@/app/components/base/loading'
@@ -29,7 +31,8 @@ import { Markdown } from '@/app/components/base/markdown'
import NewAudioButton from '@/app/components/base/new-audio-button'
import Toast from '@/app/components/base/toast'
import { fetchTextGenerationMessage } from '@/service/debug'
import { AppSourceType, fetchMoreLikeThis, updateFeedback } from '@/service/share'
import { AppSourceType, fetchMoreLikeThis, submitHumanInputForm, updateFeedback } from '@/service/share'
import { submitHumanInputForm as submitHumanInputFormService } from '@/service/workflow'
import { cn } from '@/utils/classnames'
import ResultTab from './result-tab'
@@ -121,7 +124,7 @@ const GenerationItem: FC<IGenerationItemProps> = ({
const [isQuerying, { setTrue: startQuerying, setFalse: stopQuerying }] = useBoolean(false)
const childProps = {
isInWebApp: true,
isInWebApp,
content: completionRes,
messageId: childMessageId,
depth: depth + 1,
@@ -202,16 +205,22 @@ const GenerationItem: FC<IGenerationItemProps> = ({
}
const [currentTab, setCurrentTab] = useState<string>('DETAIL')
const showResultTabs = !!workflowProcessData?.resultText || !!workflowProcessData?.files?.length
const showResultTabs = !!workflowProcessData?.resultText || !!workflowProcessData?.files?.length || (workflowProcessData?.humanInputFormDataList && workflowProcessData?.humanInputFormDataList.length > 0) || (workflowProcessData?.humanInputFilledFormDataList && workflowProcessData?.humanInputFilledFormDataList.length > 0)
const switchTab = async (tab: string) => {
setCurrentTab(tab)
}
useEffect(() => {
if (workflowProcessData?.resultText || !!workflowProcessData?.files?.length)
if (workflowProcessData?.resultText || !!workflowProcessData?.files?.length || (workflowProcessData?.humanInputFormDataList && workflowProcessData?.humanInputFormDataList.length > 0) || (workflowProcessData?.humanInputFilledFormDataList && workflowProcessData?.humanInputFilledFormDataList.length > 0))
switchTab('RESULT')
else
switchTab('DETAIL')
}, [workflowProcessData?.files?.length, workflowProcessData?.resultText])
}, [workflowProcessData?.files?.length, workflowProcessData?.resultText, workflowProcessData?.humanInputFormDataList, workflowProcessData?.humanInputFilledFormDataList])
const handleSubmitHumanInputForm = useCallback(async (formToken: string, formData: { inputs: Record<string, string>, action: string }) => {
if (appSourceType === AppSourceType.installedApp)
await submitHumanInputFormService(formToken, formData)
else
await submitHumanInputForm(formToken, formData)
}, [appSourceType])
return (
<>
@@ -275,7 +284,24 @@ const GenerationItem: FC<IGenerationItemProps> = ({
)}
</div>
{!isError && (
<ResultTab data={workflowProcessData} content={content} currentTab={currentTab} />
<>
{currentTab === 'RESULT' && workflowProcessData.humanInputFormDataList && workflowProcessData.humanInputFormDataList.length > 0 && (
<div className="px-4 pt-4">
<HumanInputFormList
humanInputFormDataList={workflowProcessData.humanInputFormDataList}
onHumanInputFormSubmit={handleSubmitHumanInputForm}
/>
</div>
)}
{currentTab === 'RESULT' && workflowProcessData.humanInputFilledFormDataList && workflowProcessData.humanInputFilledFormDataList.length > 0 && (
<div className="px-4 pt-4">
<HumanInputFilledFormList
humanInputFilledFormDataList={workflowProcessData.humanInputFilledFormDataList}
/>
</div>
)}
<ResultTab data={workflowProcessData} content={content} currentTab={currentTab} />
</>
)}
</>
)}
@@ -348,7 +374,7 @@ const GenerationItem: FC<IGenerationItemProps> = ({
)}
{isInWebApp && isError && (
<ActionButton onClick={onRetry}>
<RiReplay15Line className="h-4 w-4" />
<RiResetLeftLine className="h-4 w-4" />
</ActionButton>
)}
{isInWebApp && !isWorkflow && !isTryApp && (

View File

@@ -81,6 +81,14 @@ const WorkflowAppLogList: FC<ILogs> = ({ logs, appDetail, onRefresh }) => {
</div>
)
}
if (status === 'paused') {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">
<Indicator color="yellow" />
<span className="text-util-colors-warning-warning-600">Pending</span>
</div>
)
}
if (status === 'running') {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">

View File

@@ -26,6 +26,10 @@
@apply p-0.5 w-6 h-6 rounded-lg
}
.action-btn-s {
@apply w-5 h-5 rounded-[6px]
}
.action-btn-xs {
@apply p-0 w-4 h-4 rounded
}

View File

@@ -18,6 +18,7 @@ const actionButtonVariants = cva(
variants: {
size: {
xs: 'action-btn-xs',
s: 'action-btn-s',
m: 'action-btn-m',
l: 'action-btn-l',
xl: 'action-btn-xl',

View File

@@ -2,6 +2,7 @@ import type { FileEntity } from '../../file-uploader/types'
import type {
ChatConfig,
ChatItem,
ChatItemInTree,
OnSend,
} from '../types'
import { useCallback, useEffect, useMemo, useState } from 'react'
@@ -16,7 +17,9 @@ import {
fetchSuggestedQuestions,
getUrl,
stopChatMessageResponding,
submitHumanInputForm,
} from '@/service/share'
import { submitHumanInputForm as submitHumanInputFormService } from '@/service/workflow'
import { TransferMethod } from '@/types/app'
import { cn } from '@/utils/classnames'
import { formatBooleanInputs } from '@/utils/model-config'
@@ -73,9 +76,9 @@ const ChatWrapper = () => {
}, [appParams, currentConversationItem?.introduction])
const {
chatList,
setTargetMessageId,
handleSend,
handleStop,
handleSwitchSibling,
isResponding: respondingState,
suggestedQuestions,
} = useChat(
@@ -122,8 +125,11 @@ const ChatWrapper = () => {
if (fileIsUploading)
return true
if (chatList.some(item => item.isAnswer && item.humanInputFormDataList && item.humanInputFormDataList.length > 0))
return true
return false
}, [inputsFormValue, inputsForms, allInputsHidden])
}, [allInputsHidden, inputsForms, chatList, inputsFormValue])
useEffect(() => {
if (currentChatInstanceRef.current)
@@ -134,6 +140,40 @@ const ChatWrapper = () => {
setIsResponding(respondingState)
}, [respondingState, setIsResponding])
// Resume paused workflows when chat history is loaded
useEffect(() => {
if (!appPrevChatTree || appPrevChatTree.length === 0)
return
// Find the last answer item with workflow_run_id that needs resumption (DFS - find deepest first)
let lastPausedNode: ChatItemInTree | undefined
const findLastPausedWorkflow = (nodes: ChatItemInTree[]) => {
nodes.forEach((node) => {
// DFS: recurse to children first
if (node.children && node.children.length > 0)
findLastPausedWorkflow(node.children)
// Track the last node with humanInputFormDataList
if (node.isAnswer && node.workflow_run_id && node.humanInputFormDataList && node.humanInputFormDataList.length > 0)
lastPausedNode = node
})
}
findLastPausedWorkflow(appPrevChatTree)
// Only resume the last paused workflow
if (lastPausedNode) {
handleSwitchSibling(
lastPausedNode.id,
{
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}
}, [])
const doSend: OnSend = useCallback((message, files, isRegenerate = false, parentAnswer: ChatItem | null = null) => {
const data: any = {
query: message,
@@ -149,10 +189,10 @@ const ChatWrapper = () => {
{
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: isHistoryConversation ? undefined : handleNewConversationCompleted,
isPublicAPI: !isInstalledApp,
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}, [chatList, handleNewConversationCompleted, handleSend, currentConversationId, currentConversationInputs, newConversationInputs, isInstalledApp, appId])
}, [inputsForms, currentConversationId, currentConversationInputs, newConversationInputs, chatList, handleSend, appSourceType, appId, isHistoryConversation, handleNewConversationCompleted])
const doRegenerate = useCallback((chatItem: ChatItem, editedQuestion?: { message: string, files?: FileEntity[] }) => {
const question = editedQuestion ? chatItem : chatList.find(item => item.id === chatItem.parentMessageId)!
@@ -160,12 +200,27 @@ const ChatWrapper = () => {
doSend(editedQuestion ? editedQuestion.message : question.content, editedQuestion ? editedQuestion.files : question.message_files, true, isValidGeneratedAnswer(parentAnswer) ? parentAnswer : null)
}, [chatList, doSend])
const doSwitchSibling = useCallback((siblingMessageId: string) => {
handleSwitchSibling(siblingMessageId, {
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
})
}, [handleSwitchSibling, currentConversationId, handleNewConversationCompleted, appSourceType, appId])
const messageList = useMemo(() => {
if (currentConversationId || chatList.length > 1)
return chatList
// Without messages we are in the welcome screen, so hide the opening statement from chatlist
return chatList.filter(item => !item.isOpeningStatement)
}, [chatList])
}, [chatList, currentConversationId])
const handleSubmitHumanInputForm = useCallback(async (formToken: string, formData: any) => {
if (isInstalledApp)
await submitHumanInputFormService(formToken, formData)
else
await submitHumanInputForm(formToken, formData)
}, [isInstalledApp])
const [collapsed, setCollapsed] = useState(!!currentConversationId)
@@ -274,6 +329,7 @@ const ChatWrapper = () => {
inputsForm={inputsForms}
onRegenerate={doRegenerate}
onStopResponding={handleStop}
onHumanInputFormSubmit={handleSubmitHumanInputForm}
chatNode={(
<>
{chatNode}
@@ -286,7 +342,7 @@ const ChatWrapper = () => {
answerIcon={answerIcon}
hideProcessDetail
themeBuilder={themeBuilder}
switchSibling={siblingMessageId => setTargetMessageId(siblingMessageId)}
switchSibling={doSwitchSibling}
inputDisabled={inputDisabled}
sidebarCollapseState={sidebarCollapseState}
questionIcon={

View File

@@ -1,3 +1,4 @@
import type { ExtraContent } from '../chat/type'
import type {
Callback,
ChatConfig,
@@ -9,6 +10,7 @@ import type {
AppData,
ConversationItem,
} from '@/models/share'
import type { HumanInputFilledFormData, HumanInputFormData } from '@/types/workflow'
import { useLocalStorageState } from 'ahooks'
import { noop } from 'es-toolkit/function'
import { produce } from 'immer'
@@ -57,6 +59,24 @@ function getFormattedChatList(messages: any[]) {
parentMessageId: item.parent_message_id || undefined,
})
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
const humanInputFormDataList: HumanInputFormData[] = []
const humanInputFilledFormDataList: HumanInputFilledFormData[] = []
let workflowRunId = ''
if (item.status === 'paused') {
item.extra_contents?.forEach((content: ExtraContent) => {
if (content.type === 'human_input' && !content.submitted) {
humanInputFormDataList.push(content.form_definition)
workflowRunId = content.workflow_run_id
}
})
}
else if (item.status === 'normal') {
item.extra_contents?.forEach((content: ExtraContent) => {
if (content.type === 'human_input' && content.submitted) {
humanInputFilledFormDataList.push(content.form_submission_data)
}
})
}
newChatList.push({
id: item.id,
content: item.answer,
@@ -66,6 +86,9 @@ function getFormattedChatList(messages: any[]) {
citation: item.retriever_resources,
message_files: getProcessedFilesFromResponse(answerFiles.map((item: any) => ({ ...item, related_id: item.id, upload_file_id: item.upload_file_id }))),
parentMessageId: `question-${item.id}`,
humanInputFormDataList,
humanInputFilledFormDataList,
workflow_run_id: workflowRunId,
})
})
return newChatList

View File

@@ -0,0 +1,54 @@
import type { ContentItemProps } from './type'
import * as React from 'react'
import { useMemo } from 'react'
import { Markdown } from '@/app/components/base/markdown'
import Textarea from '@/app/components/base/textarea'
const ContentItem = ({
content,
formInputFields,
inputs,
onInputChange,
}: ContentItemProps) => {
const isInputField = (field: string) => {
const outputVarRegex = /\{\{#\$output\.[^#]+#\}\}/
return outputVarRegex.test(field)
}
const extractFieldName = (str: string): string => {
const outputVarRegex = /\{\{#\$output\.([^#]+)#\}\}/
const match = str.match(outputVarRegex)
return match ? match[1] : ''
}
const fieldName = useMemo(() => {
return extractFieldName(content)
}, [content])
const formInputField = useMemo(() => {
return formInputFields.find(field => field.output_variable_name === fieldName)
}, [formInputFields, fieldName])
if (!isInputField(content)) {
return (
<Markdown content={content} />
)
}
if (!formInputField)
return null
return (
<div className="py-3">
{formInputField.type === 'paragraph' && (
<Textarea
className="h-[104px] sm:text-xs"
value={inputs[fieldName]}
onChange={(e) => { onInputChange(fieldName, e.target.value) }}
/>
)}
</div>
)
}
export default React.memo(ContentItem)

View File

@@ -0,0 +1,64 @@
import { RiArrowDownSLine, RiArrowRightSLine } from '@remixicon/react'
import { useCallback, useState } from 'react'
import BlockIcon from '@/app/components/workflow/block-icon'
import { BlockEnum } from '@/app/components/workflow/types'
import { cn } from '@/utils/classnames'
type ContentWrapperProps = {
nodeTitle: string
children: React.ReactNode
showExpandIcon?: boolean
className?: string
expanded?: boolean
}
const ContentWrapper = ({
nodeTitle,
children,
showExpandIcon = false,
className,
expanded = false,
}: ContentWrapperProps) => {
const [isExpanded, setIsExpanded] = useState(expanded)
const handleToggleExpand = useCallback(() => {
setIsExpanded(!isExpanded)
}, [isExpanded])
return (
<div className={cn('rounded-2xl border-[0.5px] border-components-panel-border bg-background-section p-2 shadow-md', className)}>
<div className="flex items-center gap-2 p-2">
{/* node icon */}
<BlockIcon type={BlockEnum.HumanInput} className="shrink-0" />
{/* node name */}
<div
className="system-sm-semibold-uppercase grow truncate text-text-primary"
title={nodeTitle}
>
{nodeTitle}
</div>
{showExpandIcon && (
<div className="shrink-0 cursor-pointer" onClick={handleToggleExpand}>
{
isExpanded
? (
<RiArrowDownSLine className="size-4" />
)
: (
<RiArrowRightSLine className="size-4" />
)
}
</div>
)}
</div>
{(!showExpandIcon || isExpanded) && (
<div className="px-2 py-1">
{/* human input form content */}
{children}
</div>
)}
</div>
)
}
export default ContentWrapper

View File

@@ -0,0 +1,30 @@
import type { ExecutedAction as ExecutedActionType } from './type'
import { memo } from 'react'
import { Trans } from 'react-i18next'
import Divider from '@/app/components/base/divider'
import { TriggerAll } from '@/app/components/base/icons/src/vender/workflow'
type ExecutedActionProps = {
executedAction: ExecutedActionType
}
const ExecutedAction = ({
executedAction,
}: ExecutedActionProps) => {
return (
<div className="flex flex-col gap-y-1 py-1">
<Divider className="mb-2 mt-1 w-[30px]" />
<div className="system-xs-regular flex items-center gap-x-1 text-text-tertiary">
<TriggerAll className="size-3.5 shrink-0" />
<Trans
i18nKey="nodes.humanInput.userActions.triggered"
ns="workflow"
components={{ strong: <span className="system-xs-medium text-text-secondary"></span> }}
values={{ actionName: executedAction.id }}
/>
</div>
</div>
)
}
export default memo(ExecutedAction)

View File

@@ -0,0 +1,46 @@
'use client'
import { RiAlertFill, RiTimeLine } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { useLocale } from '@/context/i18n'
import { cn } from '@/utils/classnames'
import { getRelativeTime, isRelativeTimeSameOrAfter } from './utils'
type ExpirationTimeProps = {
expirationTime: number
}
const ExpirationTime = ({
expirationTime,
}: ExpirationTimeProps) => {
const { t } = useTranslation()
const locale = useLocale()
const relativeTime = getRelativeTime(expirationTime, locale)
const isSameOrAfter = isRelativeTimeSameOrAfter(expirationTime)
return (
<div
className={cn(
'system-xs-regular mt-1 flex items-center gap-x-1 text-text-tertiary',
!isSameOrAfter && 'text-text-warning',
)}
>
{
isSameOrAfter
? (
<>
<RiTimeLine className="size-3.5" />
<span>{t('humanInput.expirationTimeNowOrFuture', { relativeTime, ns: 'share' })}</span>
</>
)
: (
<>
<RiAlertFill className="size-3.5" />
<span>{t('humanInput.expiredTip', { ns: 'share' })}</span>
</>
)
}
</div>
)
}
export default ExpirationTime

View File

@@ -0,0 +1,61 @@
'use client'
import type { HumanInputFormProps } from './type'
import type { ButtonProps } from '@/app/components/base/button'
import type { UserAction } from '@/app/components/workflow/nodes/human-input/types'
import * as React from 'react'
import { useCallback, useState } from 'react'
import Button from '@/app/components/base/button'
import ContentItem from './content-item'
import { getButtonStyle, initializeInputs, splitByOutputVar } from './utils'
const HumanInputForm = ({
formData,
onSubmit,
}: HumanInputFormProps) => {
const formToken = formData.form_token
const defaultInputs = initializeInputs(formData.inputs, formData.resolved_default_values || {})
const contentList = splitByOutputVar(formData.form_content)
const [inputs, setInputs] = useState(defaultInputs)
const [isSubmitting, setIsSubmitting] = useState(false)
const handleInputsChange = useCallback((name: string, value: string) => {
setInputs(prev => ({
...prev,
[name]: value,
}))
}, [])
const submit = async (formToken: string, actionID: string, inputs: Record<string, string>) => {
setIsSubmitting(true)
await onSubmit?.(formToken, { inputs, action: actionID })
setIsSubmitting(false)
}
return (
<>
{contentList.map((content, index) => (
<ContentItem
key={index}
content={content}
formInputFields={formData.inputs}
inputs={inputs}
onInputChange={handleInputsChange}
/>
))}
<div className="flex flex-wrap gap-1 py-1">
{formData.actions.map((action: UserAction) => (
<Button
key={action.id}
disabled={isSubmitting}
variant={getButtonStyle(action.button_style) as ButtonProps['variant']}
onClick={() => submit(formToken, action.id, inputs)}
>
{action.title}
</Button>
))}
</div>
</>
)
}
export default React.memo(HumanInputForm)

View File

@@ -0,0 +1,16 @@
import * as React from 'react'
import { Markdown } from '@/app/components/base/markdown'
type SubmittedContentProps = {
content: string
}
const SubmittedContent = ({
content,
}: SubmittedContentProps) => {
return (
<Markdown content={content} />
)
}
export default React.memo(SubmittedContent)

View File

@@ -0,0 +1,25 @@
import type { SubmittedHumanInputContentProps } from './type'
import { useMemo } from 'react'
import ExecutedAction from './executed-action'
import SubmittedContent from './submitted-content'
export const SubmittedHumanInputContent = ({
formData,
}: SubmittedHumanInputContentProps) => {
const { rendered_content, action_id, action_text } = formData
const executedAction = useMemo(() => {
return {
id: action_id,
title: action_text,
}
}, [action_id, action_text])
return (
<>
<SubmittedContent content={rendered_content} />
{/* Executed Action */}
<ExecutedAction executedAction={executedAction} />
</>
)
}

View File

@@ -0,0 +1,43 @@
import { memo } from 'react'
import { Trans, useTranslation } from 'react-i18next'
import Divider from '@/app/components/base/divider'
import { useSelector as useAppSelector } from '@/context/app-context'
type TipsProps = {
showEmailTip: boolean
isEmailDebugMode: boolean
showDebugModeTip: boolean
}
const Tips = ({
showEmailTip,
isEmailDebugMode,
showDebugModeTip,
}: TipsProps) => {
const { t } = useTranslation()
const email = useAppSelector(s => s.userProfile.email)
return (
<>
<Divider className="!my-2 w-[30px]" />
<div className="space-y-1 pt-1">
{showEmailTip && !isEmailDebugMode && (
<div className="system-xs-regular text-text-secondary">{t('common.humanInputEmailTip', { ns: 'workflow' })}</div>
)}
{showEmailTip && isEmailDebugMode && (
<div className="system-xs-regular text-text-secondary">
<Trans
i18nKey="common.humanInputEmailTipInDebugMode"
ns="workflow"
components={{ email: <span className="system-xs-semibold"></span> }}
values={{ email }}
/>
</div>
)}
{showDebugModeTip && <div className="system-xs-medium text-text-warning">{t('common.humanInputWebappTip', { ns: 'workflow' })}</div>}
</div>
</>
)
}
export default memo(Tips)

View File

@@ -0,0 +1,31 @@
import type { FormInputItem } from '@/app/components/workflow/nodes/human-input/types'
import type { HumanInputFilledFormData, HumanInputFormData } from '@/types/workflow'
export type ExecutedAction = {
id: string
title: string
}
export type UnsubmittedHumanInputContentProps = {
formData: HumanInputFormData
showEmailTip?: boolean
isEmailDebugMode?: boolean
showDebugModeTip?: boolean
onSubmit?: (formToken: string, data: { inputs: Record<string, string>, action: string }) => Promise<void>
}
export type SubmittedHumanInputContentProps = {
formData: HumanInputFilledFormData
}
export type HumanInputFormProps = {
formData: HumanInputFormData
onSubmit?: (formToken: string, data: { inputs: Record<string, string>, action: string }) => Promise<void>
}
export type ContentItemProps = {
content: string
formInputFields: FormInputItem[]
inputs: Record<string, string>
onInputChange: (name: string, value: string) => void
}

View File

@@ -0,0 +1,36 @@
import type { UnsubmittedHumanInputContentProps } from './type'
import ExpirationTime from './expiration-time'
import HumanInputForm from './human-input-form'
import Tips from './tips'
export const UnsubmittedHumanInputContent = ({
formData,
showEmailTip = false,
isEmailDebugMode = false,
showDebugModeTip = false,
onSubmit,
}: UnsubmittedHumanInputContentProps) => {
const { expiration_time } = formData
return (
<>
{/* Form */}
<HumanInputForm
formData={formData}
onSubmit={onSubmit}
/>
{/* Tips */}
{(showEmailTip || showDebugModeTip) && (
<Tips
showEmailTip={showEmailTip}
isEmailDebugMode={isEmailDebugMode}
showDebugModeTip={showDebugModeTip}
/>
)}
{/* Expiration Time */}
{typeof expiration_time === 'number' && (
<ExpirationTime expirationTime={expiration_time * 1000} />
)}
</>
)
}

View File

@@ -0,0 +1,64 @@
import type { FormInputItem } from '@/app/components/workflow/nodes/human-input/types'
import type { Locale } from '@/i18n-config'
import dayjs from 'dayjs'
import isSameOrAfter from 'dayjs/plugin/isSameOrAfter'
import relativeTime from 'dayjs/plugin/relativeTime'
import utc from 'dayjs/plugin/utc'
import { UserActionButtonType } from '@/app/components/workflow/nodes/human-input/types'
import 'dayjs/locale/en'
import 'dayjs/locale/zh-cn'
import 'dayjs/locale/ja'
dayjs.extend(utc)
dayjs.extend(relativeTime)
dayjs.extend(isSameOrAfter)
export const getButtonStyle = (style: UserActionButtonType) => {
if (style === UserActionButtonType.Primary)
return 'primary'
if (style === UserActionButtonType.Default)
return 'secondary'
if (style === UserActionButtonType.Accent)
return 'secondary-accent'
if (style === UserActionButtonType.Ghost)
return 'ghost'
}
export const splitByOutputVar = (content: string): string[] => {
const outputVarRegex = /(\{\{#\$output\.[^#]+#\}\})/g
const parts = content.split(outputVarRegex)
return parts.filter(part => part.length > 0)
}
export const initializeInputs = (formInputs: FormInputItem[], defaultValues: Record<string, string> = {}) => {
const initialInputs: Record<string, any> = {}
formInputs.forEach((item) => {
if (item.type === 'text-input' || item.type === 'paragraph')
initialInputs[item.output_variable_name] = item.default.type === 'variable' ? defaultValues[item.output_variable_name] || '' : item.default.value
else
initialInputs[item.output_variable_name] = undefined
})
return initialInputs
}
const localeMap: Record<string, string> = {
'en-US': 'en',
'zh-Hans': 'zh-cn',
'ja-JP': 'ja',
}
export const getRelativeTime = (
utcTimestamp: string | number,
locale: Locale = 'en-US',
) => {
const dayjsLocale = localeMap[locale] ?? 'en'
return dayjs
.utc(utcTimestamp)
.locale(dayjsLocale)
.fromNow()
}
export const isRelativeTimeSameOrAfter = (utcTimestamp: string | number) => {
return dayjs.utc(utcTimestamp).isSameOrAfter(dayjs())
}

View File

@@ -0,0 +1,32 @@
import type { HumanInputFilledFormData } from '@/types/workflow'
import ContentWrapper from './human-input-content/content-wrapper'
import { SubmittedHumanInputContent } from './human-input-content/submitted'
type HumanInputFilledFormListProps = {
humanInputFilledFormDataList: HumanInputFilledFormData[]
}
const HumanInputFilledFormList = ({
humanInputFilledFormDataList,
}: HumanInputFilledFormListProps) => {
return (
<div className="mt-2 flex flex-col gap-y-2">
{
humanInputFilledFormDataList.map(formData => (
<ContentWrapper
key={formData.node_id}
nodeTitle={formData.node_title}
showExpandIcon
>
<SubmittedHumanInputContent
key={formData.node_id}
formData={formData}
/>
</ContentWrapper>
))
}
</div>
)
}
export default HumanInputFilledFormList

View File

@@ -0,0 +1,70 @@
import type { DeliveryMethod, HumanInputNodeType } from '@/app/components/workflow/nodes/human-input/types'
import type { Node } from '@/app/components/workflow/types'
import type { HumanInputFormData } from '@/types/workflow'
import { useMemo } from 'react'
import { DeliveryMethodType } from '@/app/components/workflow/nodes/human-input/types'
import ContentWrapper from './human-input-content/content-wrapper'
import { UnsubmittedHumanInputContent } from './human-input-content/unsubmitted'
type HumanInputFormListProps = {
humanInputFormDataList: HumanInputFormData[]
onHumanInputFormSubmit?: (formToken: string, formData: { inputs: Record<string, string>, action: string }) => Promise<void>
getHumanInputNodeData?: (nodeID: string) => Node<HumanInputNodeType> | undefined
}
const HumanInputFormList = ({
humanInputFormDataList,
onHumanInputFormSubmit,
getHumanInputNodeData,
}: HumanInputFormListProps) => {
const deliveryMethodsConfig = useMemo((): Record<string, { showEmailTip: boolean, isEmailDebugMode: boolean, showDebugModeTip: boolean }> => {
if (!humanInputFormDataList.length)
return {}
return humanInputFormDataList.reduce((acc, formData) => {
const deliveryMethodsConfig = getHumanInputNodeData?.(formData.node_id)?.data.delivery_methods || []
if (!deliveryMethodsConfig.length) {
acc[formData.node_id] = {
showEmailTip: false,
isEmailDebugMode: false,
showDebugModeTip: false,
}
return acc
}
const isWebappEnabled = deliveryMethodsConfig.some((method: DeliveryMethod) => method.type === DeliveryMethodType.WebApp && method.enabled)
const isEmailEnabled = deliveryMethodsConfig.some((method: DeliveryMethod) => method.type === DeliveryMethodType.Email && method.enabled)
const isEmailDebugMode = deliveryMethodsConfig.some((method: DeliveryMethod) => method.type === DeliveryMethodType.Email && method.config?.debug_mode)
acc[formData.node_id] = {
showEmailTip: isEmailEnabled,
isEmailDebugMode,
showDebugModeTip: !isWebappEnabled,
}
return acc
}, {} as Record<string, { showEmailTip: boolean, isEmailDebugMode: boolean, showDebugModeTip: boolean }>)
}, [getHumanInputNodeData, humanInputFormDataList])
const filteredHumanInputFormDataList = humanInputFormDataList.filter(formData => formData.display_in_ui)
return (
<div className="mt-2 flex flex-col gap-y-2">
{
filteredHumanInputFormDataList.map(formData => (
<ContentWrapper
key={formData.form_id}
nodeTitle={formData.node_title}
>
<UnsubmittedHumanInputContent
key={formData.form_id}
formData={formData}
showEmailTip={!!deliveryMethodsConfig[formData.node_id]?.showEmailTip}
isEmailDebugMode={!!deliveryMethodsConfig[formData.node_id]?.isEmailDebugMode}
showDebugModeTip={!!deliveryMethodsConfig[formData.node_id]?.showDebugModeTip}
onSubmit={onHumanInputFormSubmit}
/>
</ContentWrapper>
))
}
</div>
)
}
export default HumanInputFormList

View File

@@ -16,8 +16,11 @@ import LoadingAnim from '@/app/components/base/chat/chat/loading-anim'
import { FileList } from '@/app/components/base/file-uploader'
import { cn } from '@/utils/classnames'
import ContentSwitch from '../content-switch'
import { useChatContext } from '../context'
import AgentContent from './agent-content'
import BasicContent from './basic-content'
import HumanInputFilledFormList from './human-input-filled-form-list'
import HumanInputFormList from './human-input-form-list'
import More from './more'
import Operation from './operation'
import SuggestedQuestions from './suggested-questions'
@@ -36,6 +39,8 @@ type AnswerProps = {
appData?: AppData
noChatInput?: boolean
switchSibling?: (siblingMessageId: string) => void
hideAvatar?: boolean
onHumanInputFormSubmit?: (formToken: string, formData: any) => Promise<void>
}
const Answer: FC<AnswerProps> = ({
item,
@@ -50,6 +55,8 @@ const Answer: FC<AnswerProps> = ({
appData,
noChatInput,
switchSibling,
hideAvatar,
onHumanInputFormSubmit,
}) => {
const { t } = useTranslation()
const {
@@ -61,13 +68,22 @@ const Answer: FC<AnswerProps> = ({
workflowProcess,
allFiles,
message_files,
humanInputFormDataList,
humanInputFilledFormDataList,
} = item
const hasAgentThoughts = !!agent_thoughts?.length
const hasHumanInputs = !!humanInputFormDataList?.length || !!humanInputFilledFormDataList?.length
const [containerWidth, setContainerWidth] = useState(0)
const [contentWidth, setContentWidth] = useState(0)
const [humanInputFormContainerWidth, setHumanInputFormContainerWidth] = useState(0)
const containerRef = useRef<HTMLDivElement>(null)
const contentRef = useRef<HTMLDivElement>(null)
const humanInputFormContainerRef = useRef<HTMLDivElement>(null)
const {
getHumanInputNodeData,
} = useChatContext()
const getContainerWidth = () => {
if (containerRef.current)
@@ -87,12 +103,23 @@ const Answer: FC<AnswerProps> = ({
getContentWidth()
}, [responding])
const getHumanInputFormContainerWidth = () => {
if (humanInputFormContainerRef.current)
setHumanInputFormContainerWidth(humanInputFormContainerRef.current?.clientWidth)
}
useEffect(() => {
if (hasHumanInputs)
getHumanInputFormContainerWidth()
}, [hasHumanInputs])
// Recalculate contentWidth when content changes (e.g., SVG preview/source toggle)
useEffect(() => {
if (!containerRef.current)
return
const resizeObserver = new ResizeObserver(() => {
getContentWidth()
getHumanInputFormContainerWidth()
})
resizeObserver.observe(containerRef.current)
return () => {
@@ -115,115 +142,285 @@ const Answer: FC<AnswerProps> = ({
return (
<div className="mb-2 flex last:mb-0">
<div className="relative h-10 w-10 shrink-0">
{answerIcon || <AnswerIcon />}
{responding && (
<div className="absolute left-[-3px] top-[-3px] flex h-4 w-4 items-center rounded-full border-[0.5px] border-divider-subtle bg-background-section-burn pl-[6px] shadow-xs">
<LoadingAnim type="avatar" />
{!hideAvatar && (
<div className="relative h-10 w-10 shrink-0">
{answerIcon || <AnswerIcon />}
{responding && (
<div className="absolute left-[-3px] top-[-3px] flex h-4 w-4 items-center rounded-full border-[0.5px] border-divider-subtle bg-background-section-burn pl-[6px] shadow-xs">
<LoadingAnim type="avatar" />
</div>
)}
</div>
)}
<div className="chat-answer-container group ml-4 w-0 grow pb-4" ref={containerRef}>
{/* Block 1: Workflow Process + Human Input Forms */}
{hasHumanInputs && (
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={humanInputFormContainerRef}
className={cn('body-lg-regular relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary')}
>
{
!responding && contentIsEmpty && !hasAgentThoughts && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - humanInputFormContainerWidth - 4}
contentWidth={humanInputFormContainerWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{/** Render workflow process */}
{
workflowProcess && (
<WorkflowProcessItem
data={workflowProcess}
item={item}
hideProcessDetail={hideProcessDetail}
readonly={hideProcessDetail && appData ? !appData.site.show_workflow_steps : undefined}
/>
)
}
{
humanInputFormDataList && humanInputFormDataList.length > 0 && (
<HumanInputFormList
humanInputFormDataList={humanInputFormDataList}
onHumanInputFormSubmit={onHumanInputFormSubmit}
getHumanInputNodeData={getHumanInputNodeData}
/>
)
}
{
humanInputFilledFormDataList && humanInputFilledFormDataList.length > 0 && (
<HumanInputFilledFormList
humanInputFilledFormDataList={humanInputFilledFormDataList}
/>
)
}
{
typeof item.siblingCount === 'number'
&& item.siblingCount > 1
&& !responding
&& contentIsEmpty
&& !hasAgentThoughts
&& (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
</div>
)}
</div>
<div className="chat-answer-container group ml-4 w-0 grow pb-4" ref={containerRef}>
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={contentRef}
className={cn('body-lg-regular relative inline-block max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary', workflowProcess && 'w-full')}
>
{
!responding && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - contentWidth - 4}
contentWidth={contentWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{/** Render workflow process */}
{
workflowProcess && (
<WorkflowProcessItem
data={workflowProcess}
item={item}
hideProcessDetail={hideProcessDetail}
readonly={hideProcessDetail && appData ? !appData.site?.show_workflow_steps : undefined}
/>
)
}
{
responding && contentIsEmpty && !hasAgentThoughts && (
<div className="flex h-5 w-6 items-center justify-center">
<LoadingAnim type="text" />
</div>
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
<BasicContent item={item} />
)
}
{
(hasAgentThoughts) && (
<AgentContent
item={item}
responding={responding}
content={content}
/>
)
}
{
!!allFiles?.length && (
<FileList
className="my-1"
files={allFiles}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
!!message_files?.length && (
<FileList
className="my-1"
files={message_files}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
annotation?.id && annotation.authorName && (
<EditTitle
className="mt-1"
title={t('editBy', { ns: 'appAnnotation', author: annotation.authorName })}
/>
)
}
<SuggestedQuestions item={item} />
{
!!citation?.length && !responding && (
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{
!!(item.siblingCount && item.siblingCount > 1 && item.siblingIndex !== undefined) && (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
{/* Block 2: Response Content (when human inputs exist) */}
{hasHumanInputs && (responding || !contentIsEmpty || hasAgentThoughts) && (
<div className={cn('group relative mt-2 pr-10', chatAnswerContainerInner)}>
<div className="absolute -top-2 left-6 h-3 w-0.5 bg-chat-answer-human-input-form-divider-bg" />
<div
ref={contentRef}
className="body-lg-regular relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary"
>
{
!responding && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - contentWidth - 4}
contentWidth={contentWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{
responding && contentIsEmpty && !hasAgentThoughts && (
<div className="flex h-5 w-6 items-center justify-center">
<LoadingAnim type="text" />
</div>
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
<BasicContent item={item} />
)
}
{
hasAgentThoughts && (
<AgentContent
item={item}
responding={responding}
content={content}
/>
)
}
{
!!allFiles?.length && (
<FileList
className="my-1"
files={allFiles}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
!!message_files?.length && (
<FileList
className="my-1"
files={message_files}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
annotation?.id && annotation.authorName && (
<EditTitle
className="mt-1"
title={t('editBy', { ns: 'appAnnotation', author: annotation.authorName })}
/>
)
}
<SuggestedQuestions item={item} />
{
!!citation?.length && !responding && (
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{
typeof item.siblingCount === 'number'
&& item.siblingCount > 1
&& (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
</div>
</div>
)}
{/* Original single block layout (when no human inputs) */}
{!hasHumanInputs && (
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={contentRef}
className={cn('body-lg-regular relative inline-block max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary', workflowProcess && 'w-full')}
>
{
!responding && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - contentWidth - 4}
contentWidth={contentWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{/** Render workflow process */}
{
workflowProcess && (
<WorkflowProcessItem
data={workflowProcess}
item={item}
hideProcessDetail={hideProcessDetail}
readonly={hideProcessDetail && appData ? !appData.site?.show_workflow_steps : undefined}
/>
)
}
{
responding && contentIsEmpty && !hasAgentThoughts && (
<div className="flex h-5 w-6 items-center justify-center">
<LoadingAnim type="text" />
</div>
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
<BasicContent item={item} />
)
}
{
hasAgentThoughts && (
<AgentContent
item={item}
responding={responding}
content={content}
/>
)
}
{
!!allFiles?.length && (
<FileList
className="my-1"
files={allFiles}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
!!message_files?.length && (
<FileList
className="my-1"
files={message_files}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
annotation?.id && annotation.authorName && (
<EditTitle
className="mt-1"
title={t('editBy', { ns: 'appAnnotation', author: annotation.authorName })}
/>
)
}
<SuggestedQuestions item={item} />
{
!!citation?.length && !responding && (
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{
typeof item.siblingCount === 'number'
&& item.siblingCount > 1 && (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
</div>
)}
<More more={more} />
</div>
</div>

View File

@@ -69,6 +69,7 @@ const Operation: FC<OperationProps> = ({
feedback,
adminFeedback,
agent_thoughts,
humanInputFormDataList,
} = item
const [userLocalFeedback, setUserLocalFeedback] = useState(feedback)
const [adminLocalFeedback, setAdminLocalFeedback] = useState(adminFeedback)
@@ -186,7 +187,7 @@ const Operation: FC<OperationProps> = ({
)}
style={(!hasWorkflowProcess && positionRight) ? { left: contentWidth + 8 } : {}}
>
{shouldShowUserFeedbackBar && (
{shouldShowUserFeedbackBar && !humanInputFormDataList?.length && (
<div className={cn(
'ml-1 items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm',
hasUserFeedback ? 'flex' : 'hidden group-hover:flex',
@@ -226,7 +227,7 @@ const Operation: FC<OperationProps> = ({
)}
</div>
)}
{shouldShowAdminFeedbackBar && (
{shouldShowAdminFeedbackBar && !humanInputFormDataList?.length && (
<div className={cn(
'ml-1 items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm',
(hasAdminFeedback || hasUserFeedback) ? 'flex' : 'hidden group-hover:flex',
@@ -305,26 +306,28 @@ const Operation: FC<OperationProps> = ({
)}
{!isOpeningStatement && (
<div className="ml-1 hidden items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm group-hover:flex">
{(config?.text_to_speech?.enabled) && (
{(config?.text_to_speech?.enabled && !humanInputFormDataList?.length) && (
<NewAudioButton
id={id}
value={content}
voice={config?.text_to_speech?.voice}
/>
)}
<ActionButton onClick={() => {
copy(content)
Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) })
}}
>
<RiClipboardLine className="h-4 w-4" />
</ActionButton>
{!humanInputFormDataList?.length && (
<ActionButton onClick={() => {
copy(content)
Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) })
}}
>
<RiClipboardLine className="h-4 w-4" />
</ActionButton>
)}
{!noChatInput && (
<ActionButton onClick={() => onRegenerate?.(item)}>
<RiResetLeftLine className="h-4 w-4" />
</ActionButton>
)}
{(config?.supportAnnotation && config.annotation_reply?.enabled) && (
{config?.supportAnnotation && config.annotation_reply?.enabled && !humanInputFormDataList?.length && (
<AnnotationCtrlButton
appId={config?.appId || ''}
messageId={id}

View File

@@ -3,6 +3,7 @@ import {
RiArrowRightSLine,
RiErrorWarningFill,
RiLoader2Line,
RiPauseCircleFill,
} from '@remixicon/react'
import {
useEffect,
@@ -34,6 +35,8 @@ const WorkflowProcessItem = ({
const running = data.status === WorkflowRunningStatus.Running
const succeeded = data.status === WorkflowRunningStatus.Succeeded
const failed = data.status === WorkflowRunningStatus.Failed || data.status === WorkflowRunningStatus.Stopped
const paused = data.status === WorkflowRunningStatus.Paused
const latestNode = data.tracing[data.tracing.length - 1]
useEffect(() => {
setCollapse(!expand)
@@ -50,7 +53,10 @@ const WorkflowProcessItem = ({
running && !collapse && 'bg-background-section-burn',
succeeded && !collapse && 'bg-state-success-hover',
failed && !collapse && 'bg-state-destructive-hover',
collapse && 'bg-workflow-process-bg',
paused && !collapse && 'bg-state-warning-hover',
collapse && !failed && !paused && 'bg-workflow-process-bg',
collapse && paused && 'bg-workflow-process-paused-bg',
collapse && failed && 'bg-workflow-process-failed-bg',
)}
>
<div
@@ -72,8 +78,13 @@ const WorkflowProcessItem = ({
<RiErrorWarningFill className="mr-1 h-3.5 w-3.5 shrink-0 text-text-destructive" />
)
}
{
paused && (
<RiPauseCircleFill className="mr-1 h-3.5 w-3.5 shrink-0 text-text-warning-secondary" />
)
}
<div className={cn('system-xs-medium text-text-secondary', !collapse && 'grow')}>
{t('common.workflowProcess', { ns: 'workflow' })}
{!collapse ? t('common.workflowProcess', { ns: 'workflow' }) : latestNode?.title}
</div>
<RiArrowRightSLine className={cn('ml-1 h-4 w-4 text-text-tertiary', !collapse && 'rotate-90')} />
</div>

View File

@@ -16,7 +16,8 @@ export type ChatContextValue = Pick<ChatProps, 'config'
| 'onAnnotationAdded'
| 'onAnnotationRemoved'
| 'disableFeedback'
| 'onFeedback'> & {
| 'onFeedback'
| 'getHumanInputNodeData'> & {
readonly?: boolean
}
@@ -45,6 +46,7 @@ export const ChatContextProvider = ({
onAnnotationRemoved,
disableFeedback,
onFeedback,
getHumanInputNodeData,
}: ChatContextProviderProps) => {
return (
<ChatContext.Provider value={{
@@ -62,6 +64,7 @@ export const ChatContextProvider = ({
onAnnotationRemoved,
disableFeedback,
onFeedback,
getHumanInputNodeData,
}}
>
{children}

File diff suppressed because it is too large Load Diff

View File

@@ -75,6 +75,9 @@ export type ChatProps = {
noSpacing?: boolean
inputDisabled?: boolean
sidebarCollapseState?: boolean
hideAvatar?: boolean
onHumanInputFormSubmit?: (formToken: string, formData: any) => Promise<void>
getHumanInputNodeData?: (nodeID: string) => any
}
const Chat: FC<ChatProps> = ({
@@ -116,6 +119,9 @@ const Chat: FC<ChatProps> = ({
noSpacing,
inputDisabled,
sidebarCollapseState,
hideAvatar,
onHumanInputFormSubmit,
getHumanInputNodeData,
}) => {
const { t } = useTranslation()
const { currentLogItem, setCurrentLogItem, showPromptLogModal, setShowPromptLogModal, showAgentLogModal, setShowAgentLogModal } = useAppStore(useShallow(state => ({
@@ -265,6 +271,7 @@ const Chat: FC<ChatProps> = ({
onAnnotationRemoved={onAnnotationRemoved}
disableFeedback={disableFeedback}
onFeedback={onFeedback}
getHumanInputNodeData={getHumanInputNodeData}
>
<div className={cn('relative h-full', isTryApp && 'flex flex-col')}>
<div
@@ -295,6 +302,8 @@ const Chat: FC<ChatProps> = ({
hideProcessDetail={hideProcessDetail}
noChatInput={noChatInput}
switchSibling={switchSibling}
hideAvatar={hideAvatar}
onHumanInputFormSubmit={onHumanInputFormSubmit}
/>
)
}
@@ -306,6 +315,7 @@ const Chat: FC<ChatProps> = ({
theme={themeBuilder?.theme}
enableEdit={config?.questionEditEnable}
switchSibling={switchSibling}
hideAvatar={hideAvatar}
/>
)
})

View File

@@ -32,6 +32,7 @@ type QuestionProps = {
theme: Theme | null | undefined
enableEdit?: boolean
switchSibling?: (siblingMessageId: string) => void
hideAvatar?: boolean
}
const Question: FC<QuestionProps> = ({
@@ -40,6 +41,7 @@ const Question: FC<QuestionProps> = ({
theme,
enableEdit = true,
switchSibling,
hideAvatar,
}) => {
const { t } = useTranslation()
@@ -174,15 +176,17 @@ const Question: FC<QuestionProps> = ({
</div>
<div className="mt-1 h-[18px]" />
</div>
<div className="h-10 w-10 shrink-0">
{
questionIcon || (
<div className="h-full w-full rounded-full border-[0.5px] border-black/5">
<User className="h-full w-full" />
</div>
)
}
</div>
{!hideAvatar && (
<div className="h-10 w-10 shrink-0">
{
questionIcon || (
<div className="h-full w-full rounded-full border-[0.5px] border-black/5">
<User className="h-full w-full" />
</div>
)
}
</div>
)}
</div>
)
}

View File

@@ -2,7 +2,11 @@ import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { TypeWithI18N } from '@/app/components/header/account-setting/model-provider-page/declarations'
import type { InputVarType } from '@/app/components/workflow/types'
import type { Annotation, MessageRating } from '@/models/log'
import type { FileResponse } from '@/types/workflow'
import type {
FileResponse,
HumanInputFilledFormData,
HumanInputFormData,
} from '@/types/workflow'
export type MessageMore = {
time: string
@@ -64,6 +68,19 @@ export type CitationItem = {
word_count: number
}
export type ExtraContent
= {
type: 'human_input'
submitted: false
form_definition: HumanInputFormData
workflow_run_id: string
}
| {
type: 'human_input'
submitted: true
form_submission_data: HumanInputFilledFormData
}
export type IChatItem = {
id: string
content: string
@@ -104,6 +121,10 @@ export type IChatItem = {
siblingIndex?: number
prevSibling?: string
nextSibling?: string
// for human input
humanInputFormDataList?: HumanInputFormData[]
humanInputFilledFormDataList?: HumanInputFilledFormData[]
extra_contents?: ExtraContent[]
}
export type Metadata = {

View File

@@ -2,6 +2,7 @@ import type { FileEntity } from '../../file-uploader/types'
import type {
ChatConfig,
ChatItem,
ChatItemInTree,
OnSend,
} from '../types'
import { useCallback, useEffect, useMemo, useState } from 'react'
@@ -17,7 +18,9 @@ import {
fetchSuggestedQuestions,
getUrl,
stopChatMessageResponding,
submitHumanInputForm,
} from '@/service/share'
import { submitHumanInputForm as submitHumanInputFormService } from '@/service/workflow'
import { TransferMethod } from '@/types/app'
import { cn } from '@/utils/classnames'
import Avatar from '../../avatar'
@@ -70,9 +73,9 @@ const ChatWrapper = () => {
}, [appParams, currentConversationItem?.introduction])
const {
chatList,
setTargetMessageId,
handleSend,
handleStop,
handleSwitchSibling,
isResponding: respondingState,
suggestedQuestions,
} = useChat(
@@ -130,6 +133,40 @@ const ChatWrapper = () => {
setIsResponding(respondingState)
}, [respondingState, setIsResponding])
// Resume paused workflows when chat history is loaded
useEffect(() => {
if (!appPrevChatList || appPrevChatList.length === 0)
return
// Find the last answer item with workflow_run_id that needs resumption (DFS - find deepest first)
let lastPausedNode: ChatItemInTree | undefined
const findLastPausedWorkflow = (nodes: ChatItemInTree[]) => {
nodes.forEach((node) => {
// DFS: recurse to children first
if (node.children && node.children.length > 0)
findLastPausedWorkflow(node.children)
// Track the last node with humanInputFormDataList
if (node.isAnswer && node.workflow_run_id && node.humanInputFormDataList && node.humanInputFormDataList.length > 0)
lastPausedNode = node
})
}
findLastPausedWorkflow(appPrevChatList)
// Only resume the last paused workflow
if (lastPausedNode) {
handleSwitchSibling(
lastPausedNode.id,
{
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}
}, [])
const doSend: OnSend = useCallback((message, files, isRegenerate = false, parentAnswer: ChatItem | null = null) => {
const data: any = {
query: message,
@@ -147,7 +184,7 @@ const ChatWrapper = () => {
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}, [currentConversationId, currentConversationInputs, newConversationInputs, chatList, handleSend, isInstalledApp, appId, handleNewConversationCompleted])
}, [currentConversationId, currentConversationInputs, newConversationInputs, chatList, handleSend, appSourceType, appId, handleNewConversationCompleted])
const doRegenerate = useCallback((chatItem: ChatItem, editedQuestion?: { message: string, files?: FileEntity[] }) => {
const question = editedQuestion ? chatItem : chatList.find(item => item.id === chatItem.parentMessageId)!
@@ -155,6 +192,14 @@ const ChatWrapper = () => {
doSend(editedQuestion ? editedQuestion.message : question.content, editedQuestion ? editedQuestion.files : question.message_files, true, isValidGeneratedAnswer(parentAnswer) ? parentAnswer : null)
}, [chatList, doSend])
const doSwitchSibling = useCallback((siblingMessageId: string) => {
handleSwitchSibling(siblingMessageId, {
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
})
}, [handleSwitchSibling, appSourceType, appId, currentConversationId, handleNewConversationCompleted])
const messageList = useMemo(() => {
if (currentConversationId || chatList.length > 1)
return chatList
@@ -178,6 +223,13 @@ const ChatWrapper = () => {
}
}, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden])
const handleSubmitHumanInputForm = useCallback(async (formToken: string, formData: any) => {
if (isInstalledApp)
await submitHumanInputFormService(formToken, formData)
else
await submitHumanInputForm(formToken, formData)
}, [isInstalledApp])
const welcome = useMemo(() => {
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
if (respondingState)
@@ -223,7 +275,7 @@ const ChatWrapper = () => {
</div>
</div>
)
}, [appData?.site, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden])
}, [chatList, respondingState, currentConversationId, collapsed, inputsForms.length, allInputsHidden, appData?.site, isMobile])
const answerIcon = isDify()
? <LogoAvatar className="relative shrink-0" />
@@ -253,6 +305,7 @@ const ChatWrapper = () => {
inputsForm={inputsForms}
onRegenerate={doRegenerate}
onStopResponding={handleStop}
onHumanInputFormSubmit={handleSubmitHumanInputForm}
chatNode={(
<>
{chatNode}
@@ -266,7 +319,7 @@ const ChatWrapper = () => {
answerIcon={answerIcon}
hideProcessDetail
themeBuilder={themeBuilder}
switchSibling={siblingMessageId => setTargetMessageId(siblingMessageId)}
switchSibling={doSwitchSibling}
inputDisabled={inputDisabled}
questionIcon={
initUserVariables?.avatar_url

View File

@@ -5,7 +5,7 @@ import type {
ModelConfig,
VisionSettings,
} from '@/types/app'
import type { NodeTracing } from '@/types/workflow'
import type { HumanInputFilledFormData, HumanInputFormData, NodeTracing } from '@/types/workflow'
export type {
Inputs,
@@ -67,6 +67,8 @@ export type WorkflowProcess = {
expand?: boolean // for UI
resultText?: string
files?: FileEntity[]
humanInputFormDataList?: HumanInputFormData[]
humanInputFilledFormDataList?: HumanInputFilledFormData[]
}
export type ChatItem = IChatItem & {

View File

@@ -1,3 +1,4 @@
/* eslint-disable tailwindcss/classnames-order */
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Effect from '.'
@@ -28,8 +29,8 @@ type Story = StoryObj<typeof meta>
export const Playground: Story = {
render: () => (
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
<Effect className="left-8 top-6" />
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
<Effect className="top-6 left-8" />
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
Accent glow
</div>

View File

@@ -0,0 +1,6 @@
<svg width="27" height="27" viewBox="0 0 27 27" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.625 0C9.27992 0.000320434 8.93828 0.0686147 8.6196 0.200983C8.30091 0.333351 8.01142 0.527199 7.76766 0.771458C7.5239 1.01572 7.33064 1.3056 7.19893 1.62456C7.06721 1.94351 6.99962 2.28529 7 2.63037C6.99968 2.97541 7.06732 3.31714 7.19907 3.63603C7.33081 3.95493 7.52408 4.24476 7.76783 4.48897C8.01159 4.73317 8.30105 4.92698 8.61971 5.05932C8.93836 5.19165 9.27996 5.25993 9.625 5.26025H12.25V2.63037C12.2504 2.28529 12.1828 1.94351 12.0511 1.62456C11.9194 1.3056 11.7261 1.01572 11.4823 0.771458C11.2386 0.527199 10.9491 0.333351 10.6304 0.200983C10.3117 0.0686147 9.97008 0.000320434 9.625 0ZM9.625 7.01416H2.625C2.27996 7.01448 1.93836 7.08276 1.61971 7.21509C1.30106 7.34743 1.01159 7.54124 0.767835 7.78544C0.524081 8.02965 0.330814 8.31948 0.199069 8.63838C0.0673241 8.95728 -0.000319124 9.299 1.63476e-06 9.64404C-0.000383292 9.98912 0.0672126 10.3309 0.198929 10.6499C0.330645 10.9688 0.523902 11.2587 0.767662 11.503C1.01142 11.7472 1.30091 11.9411 1.6196 12.0734C1.93828 12.2058 2.27992 12.2741 2.625 12.2744H9.625C9.97008 12.2741 10.3117 12.2058 10.6304 12.0734C10.9491 11.9411 11.2386 11.7472 11.4823 11.503C11.7261 11.2587 11.9194 10.9688 12.0511 10.6499C12.1828 10.3309 12.2504 9.98912 12.25 9.64404C12.2503 9.299 12.1827 8.95728 12.0509 8.63838C11.9192 8.31948 11.7259 8.02965 11.4822 7.78544C11.2384 7.54124 10.9489 7.34743 10.6303 7.21509C10.3116 7.08276 9.97004 7.01448 9.625 7.01416Z" fill="#44BEDF"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M26.25 9.64404C26.2503 9.299 26.1827 8.95728 26.0509 8.63838C25.9192 8.31948 25.7259 8.02965 25.4822 7.78544C25.2384 7.54124 24.9489 7.34743 24.6303 7.21509C24.3116 7.08276 23.97 7.01448 23.625 7.01416C23.28 7.01448 22.9384 7.08276 22.6197 7.21509C22.3011 7.34743 22.0116 7.54124 21.7678 7.78544C21.5241 8.02965 21.3308 8.31948 21.1991 8.63838C21.0673 8.95728 20.9997 9.299 21 9.64404V12.2744H23.625C23.9701 12.2741 24.3117 12.2058 24.6304 12.0734C24.9491 11.9411 25.2386 11.7472 25.4823 11.503C25.7261 11.2587 25.9194 10.9688 26.0511 10.6499C26.1828 10.3309 26.2504 9.98912 26.25 9.64404ZM19.25 9.64404V2.63037C19.2504 2.28529 19.1828 1.94351 19.0511 1.62456C18.9194 1.3056 18.7261 1.01572 18.4823 0.771458C18.2386 0.527199 17.9491 0.333351 17.6304 0.200983C17.3117 0.0686147 16.9701 0.000320434 16.625 0C16.2799 0.000320434 15.9383 0.0686147 15.6196 0.200983C15.3009 0.333351 15.0114 0.527199 14.7677 0.771458C14.5239 1.01572 14.3306 1.3056 14.1989 1.62456C14.0672 1.94351 13.9996 2.28529 14 2.63037V9.64404C13.9996 9.98912 14.0672 10.3309 14.1989 10.6499C14.3306 10.9688 14.5239 11.2587 14.7677 11.503C15.0114 11.7472 15.3009 11.9411 15.6196 12.0734C15.9383 12.2058 16.2799 12.2741 16.625 12.2744C16.9701 12.2741 17.3117 12.2058 17.6304 12.0734C17.9491 11.9411 18.2386 11.7472 18.4823 11.503C18.7261 11.2587 18.9194 10.9688 19.0511 10.6499C19.1828 10.3309 19.2504 9.98912 19.25 9.64404Z" fill="#2EB67D"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M16.625 26.302C16.9701 26.3017 17.3117 26.2334 17.6304 26.101C17.9491 25.9686 18.2386 25.7748 18.4823 25.5305C18.7261 25.2863 18.9194 24.9964 19.0511 24.6774C19.1828 24.3585 19.2504 24.0167 19.25 23.6716C19.2503 23.3266 19.1827 22.9849 19.0509 22.666C18.9192 22.3471 18.7259 22.0572 18.4822 21.813C18.2384 21.5688 17.9489 21.375 17.6303 21.2427C17.3116 21.1103 16.97 21.0421 16.625 21.0417H14V23.6716C13.9996 24.0167 14.0672 24.3585 14.1989 24.6774C14.3306 24.9964 14.5239 25.2863 14.7677 25.5305C15.0114 25.7748 15.3009 25.9686 15.6196 26.101C15.9383 26.2334 16.2799 26.3017 16.625 26.302ZM16.625 19.2878H23.625C23.97 19.2875 24.3116 19.2192 24.6303 19.0869C24.9489 18.9546 25.2384 18.7608 25.4822 18.5166C25.7259 18.2723 25.9192 17.9825 26.0509 17.6636C26.1827 17.3447 26.2503 17.003 26.25 16.658C26.2504 16.3129 26.1828 15.9711 26.0511 15.6521C25.9194 15.3332 25.7261 15.0433 25.4823 14.799C25.2386 14.5548 24.9491 14.3609 24.6304 14.2286C24.3117 14.0962 23.9701 14.0279 23.625 14.0276H16.625C16.2799 14.0279 15.9383 14.0962 15.6196 14.2286C15.3009 14.3609 15.0114 14.5548 14.7677 14.799C14.5239 15.0433 14.3306 15.3332 14.1989 15.6521C14.0672 15.9711 13.9996 16.3129 14 16.658C13.9997 17.003 14.0673 17.3447 14.1991 17.6636C14.3308 17.9825 14.5241 18.2723 14.7678 18.5166C15.0116 18.7608 15.3011 18.9546 15.6197 19.0869C15.9384 19.2192 16.28 19.2875 16.625 19.2878Z" fill="#ECB22E"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M2.22485e-06 16.658C-0.000318534 17.003 0.0673247 17.3447 0.19907 17.6636C0.330815 17.9825 0.524081 18.2723 0.767835 18.5166C1.01159 18.7608 1.30106 18.9546 1.61971 19.0869C1.93836 19.2192 2.27996 19.2875 2.625 19.2878C2.97004 19.2875 3.31164 19.2192 3.63029 19.0869C3.94895 18.9546 4.23841 18.7608 4.48217 18.5166C4.72592 18.2723 4.91919 17.9825 5.05093 17.6636C5.18268 17.3447 5.25032 17.003 5.25 16.658V14.0276H2.625C2.27988 14.0279 1.9382 14.0962 1.61948 14.2286C1.30077 14.361 1.01126 14.5549 0.76749 14.7992C0.523723 15.0435 0.330477 15.3335 0.198789 15.6525C0.0671016 15.9715 -0.000446885 16.3128 2.22485e-06 16.658ZM7 16.658V23.6716C6.99962 24.0167 7.06721 24.3585 7.19893 24.6774C7.33064 24.9964 7.5239 25.2863 7.76766 25.5305C8.01142 25.7748 8.30091 25.9686 8.6196 26.101C8.93828 26.2334 9.27992 26.3017 9.625 26.302C9.97008 26.3017 10.3117 26.2334 10.6304 26.101C10.9491 25.9686 11.2386 25.7748 11.4823 25.5305C11.7261 25.2863 11.9194 24.9964 12.0511 24.6774C12.1828 24.3585 12.2504 24.0167 12.25 23.6716V16.6584C12.2504 16.3134 12.1828 15.9716 12.0511 15.6526C11.9194 15.3337 11.7261 15.0438 11.4823 14.7995C11.2386 14.5553 10.9491 14.3614 10.6304 14.2291C10.3117 14.0967 9.97008 14.0284 9.625 14.0281C9.27992 14.0284 8.93828 14.0967 8.6196 14.2291C8.30091 14.3614 8.01142 14.5553 7.76766 14.7995C7.5239 15.0438 7.33064 15.3337 7.19893 15.6526C7.06721 15.9716 6.99962 16.3129 7 16.658Z" fill="#E01E5A"/>
</svg>

After

Width:  |  Height:  |  Size: 5.8 KiB

View File

@@ -0,0 +1,19 @@
<svg width="28" height="28" viewBox="0 0 28 28" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_20307_29670)">
<path opacity="0.1" d="M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z" fill="black"/>
<path opacity="0.1" d="M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z" fill="black"/>
<path d="M26.6817 10.5H20.8484L19.2267 11.8183V18.34C19.2846 19.4628 19.7715 20.5204 20.5867 21.2946C21.4019 22.0689 22.4833 22.5005 23.6075 22.5005C24.7318 22.5005 25.8131 22.0689 26.6283 21.2946C27.4436 20.5204 27.9304 19.4628 27.9884 18.34V11.8183C27.9899 11.6458 27.9572 11.4746 27.8923 11.3147C27.8273 11.1548 27.7313 11.0094 27.6098 10.8868C27.4883 10.7643 27.3437 10.667 27.1844 10.6006C27.0251 10.5342 26.8543 10.5 26.6817 10.5Z" fill="#5059C9"/>
<path d="M23.9167 9.33333C25.5275 9.33333 26.8333 8.0275 26.8333 6.41667C26.8333 4.80584 25.5275 3.5 23.9167 3.5C22.3058 3.5 21 4.80584 21 6.41667C21 8.0275 22.3058 9.33333 23.9167 9.33333Z" fill="#5059C9"/>
<path d="M10.64 10.5H20.86C21.2065 10.5 21.5389 10.6377 21.7839 10.8827C22.029 11.1278 22.1666 11.4601 22.1666 11.8067V20.4167C22.1666 22.1185 21.4906 23.7506 20.2872 24.9539C19.0839 26.1573 17.4518 26.8333 15.75 26.8333C14.0482 26.8333 12.4161 26.1573 11.2127 24.9539C10.0094 23.7506 9.33331 22.1185 9.33331 20.4167V11.8067C9.33331 11.4601 9.47098 11.1278 9.71603 10.8827C9.96107 10.6377 10.2934 10.5 10.64 10.5Z" fill="#7B83EB"/>
<path d="M16.3333 9.69477C18.4661 9.69477 20.195 7.96584 20.195 5.8331C20.195 3.70036 18.4661 1.97144 16.3333 1.97144C14.2006 1.97144 12.4717 3.70036 12.4717 5.8331C12.4717 7.96584 14.2006 9.69477 16.3333 9.69477Z" fill="#7B83EB"/>
<path opacity="0.5" d="M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z" fill="black"/>
<path opacity="0.5" d="M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z" fill="black"/>
<path d="M14.9683 5.83325H1.365C0.611131 5.83325 0 6.44438 0 7.19825V20.8016C0 21.5555 0.611131 22.1666 1.365 22.1666H14.9683C15.7222 22.1666 16.3333 21.5555 16.3333 20.8016V7.19825C16.3333 6.44438 15.7222 5.83325 14.9683 5.83325Z" fill="#4B53BC"/>
<path d="M11.8767 11.1766H9.08833V18.6666H7.25666V11.1766H4.45667V9.33325H11.8767V11.1766Z" fill="white"/>
</g>
<defs>
<clipPath id="clip0_20307_29670">
<rect width="28" height="28" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -0,0 +1,5 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.66634 2.66675C3.66634 2.29856 3.36787 2.00008 2.99967 2.00008C2.63149 2.00008 2.33301 2.29855 2.33301 2.66675L3.66634 2.66675ZM2.99967 5.33341H2.33301C2.33301 5.51023 2.40325 5.6798 2.52827 5.80482C2.65329 5.92984 2.82286 6.00008 2.99967 6.00008V5.33341ZM5.66641 6.00008C6.03459 6.00008 6.33307 5.7016 6.33307 5.33341C6.33307 4.96523 6.03459 4.66675 5.66641 4.66675L5.66641 6.00008ZM2.41183 5.01659C2.23816 5.34125 2.36056 5.74523 2.68522 5.91889C3.00988 6.09256 3.41385 5.97016 3.58752 5.6455L2.41183 5.01659ZM3.03395 8.58915C2.99109 8.22348 2.6599 7.96175 2.29421 8.00461C1.92853 8.04748 1.66682 8.37868 1.70967 8.74435L3.03395 8.58915ZM12.0439 5.05931C12.2607 5.3569 12.6777 5.42238 12.9753 5.20557C13.2729 4.98876 13.3383 4.57176 13.1215 4.27417L12.0439 5.05931ZM5.02145 13.5907C5.34627 13.7641 5.75013 13.6413 5.92349 13.3165C6.09685 12.9917 5.97407 12.5878 5.64925 12.4145L5.02145 13.5907ZM2.33301 2.66675L2.33301 5.33341H3.66634L3.66634 2.66675L2.33301 2.66675ZM2.99967 6.00008L5.66641 6.00008L5.66641 4.66675H2.99967L2.99967 6.00008ZM3.58752 5.6455C4.43045 4.06972 6.09066 3.00008 7.99968 3.00008V1.66675C5.57951 1.66675 3.47747 3.02445 2.41183 5.01659L3.58752 5.6455ZM7.99968 3.00008C9.66128 3.00008 11.1336 3.80991 12.0439 5.05931L13.1215 4.27417C11.9711 2.69513 10.1055 1.66675 7.99968 1.66675V3.00008ZM5.64925 12.4145C4.23557 11.6599 3.22828 10.2474 3.03395 8.58915L1.70967 8.74435C1.95639 10.8495 3.23403 12.6367 5.02145 13.5907L5.64925 12.4145Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.1946 8.13826C12.9027 7.84637 12.4294 7.84638 12.1375 8.13829L9.11842 11.1574C9.01642 11.2594 8.95025 11.3917 8.92985 11.5345C8.92985 11.5345 8.92985 11.5345 8.92985 11.5345L8.78508 12.5479L9.79846 12.4031C9.94127 12.3827 10.0736 12.3165 10.1756 12.2145L13.1947 9.19548C13.4866 8.90359 13.4866 8.43027 13.1946 8.13826C13.1947 8.13827 13.1946 8.13825 13.1946 8.13826ZM11.1947 7.19548C12.0073 6.38286 13.3249 6.38286 14.1375 7.19548C14.95 8.00814 14.9501 9.32565 14.1375 10.1383L11.1184 13.1574C10.8124 13.4633 10.4154 13.6618 9.98703 13.723L8.09369 13.9935C7.88596 14.0232 7.67639 13.9533 7.52801 13.805C7.37963 13.6566 7.30977 13.447 7.33945 13.2393L7.60991 11.3459C7.67111 10.9175 7.86961 10.5205 8.17561 10.2145L11.1947 7.19548Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.528 10.8048L10.528 8.80482L11.4708 7.86201L13.4708 9.86201L12.528 10.8048Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -0,0 +1,61 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "27",
"height": "27",
"viewBox": "0 0 27 27",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M9.625 0C9.27992 0.000320434 8.93828 0.0686147 8.6196 0.200983C8.30091 0.333351 8.01142 0.527199 7.76766 0.771458C7.5239 1.01572 7.33064 1.3056 7.19893 1.62456C7.06721 1.94351 6.99962 2.28529 7 2.63037C6.99968 2.97541 7.06732 3.31714 7.19907 3.63603C7.33081 3.95493 7.52408 4.24476 7.76783 4.48897C8.01159 4.73317 8.30105 4.92698 8.61971 5.05932C8.93836 5.19165 9.27996 5.25993 9.625 5.26025H12.25V2.63037C12.2504 2.28529 12.1828 1.94351 12.0511 1.62456C11.9194 1.3056 11.7261 1.01572 11.4823 0.771458C11.2386 0.527199 10.9491 0.333351 10.6304 0.200983C10.3117 0.0686147 9.97008 0.000320434 9.625 0ZM9.625 7.01416H2.625C2.27996 7.01448 1.93836 7.08276 1.61971 7.21509C1.30106 7.34743 1.01159 7.54124 0.767835 7.78544C0.524081 8.02965 0.330814 8.31948 0.199069 8.63838C0.0673241 8.95728 -0.000319124 9.299 1.63476e-06 9.64404C-0.000383292 9.98912 0.0672126 10.3309 0.198929 10.6499C0.330645 10.9688 0.523902 11.2587 0.767662 11.503C1.01142 11.7472 1.30091 11.9411 1.6196 12.0734C1.93828 12.2058 2.27992 12.2741 2.625 12.2744H9.625C9.97008 12.2741 10.3117 12.2058 10.6304 12.0734C10.9491 11.9411 11.2386 11.7472 11.4823 11.503C11.7261 11.2587 11.9194 10.9688 12.0511 10.6499C12.1828 10.3309 12.2504 9.98912 12.25 9.64404C12.2503 9.299 12.1827 8.95728 12.0509 8.63838C11.9192 8.31948 11.7259 8.02965 11.4822 7.78544C11.2384 7.54124 10.9489 7.34743 10.6303 7.21509C10.3116 7.08276 9.97004 7.01448 9.625 7.01416Z",
"fill": "#44BEDF"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M26.25 9.64404C26.2503 9.299 26.1827 8.95728 26.0509 8.63838C25.9192 8.31948 25.7259 8.02965 25.4822 7.78544C25.2384 7.54124 24.9489 7.34743 24.6303 7.21509C24.3116 7.08276 23.97 7.01448 23.625 7.01416C23.28 7.01448 22.9384 7.08276 22.6197 7.21509C22.3011 7.34743 22.0116 7.54124 21.7678 7.78544C21.5241 8.02965 21.3308 8.31948 21.1991 8.63838C21.0673 8.95728 20.9997 9.299 21 9.64404V12.2744H23.625C23.9701 12.2741 24.3117 12.2058 24.6304 12.0734C24.9491 11.9411 25.2386 11.7472 25.4823 11.503C25.7261 11.2587 25.9194 10.9688 26.0511 10.6499C26.1828 10.3309 26.2504 9.98912 26.25 9.64404ZM19.25 9.64404V2.63037C19.2504 2.28529 19.1828 1.94351 19.0511 1.62456C18.9194 1.3056 18.7261 1.01572 18.4823 0.771458C18.2386 0.527199 17.9491 0.333351 17.6304 0.200983C17.3117 0.0686147 16.9701 0.000320434 16.625 0C16.2799 0.000320434 15.9383 0.0686147 15.6196 0.200983C15.3009 0.333351 15.0114 0.527199 14.7677 0.771458C14.5239 1.01572 14.3306 1.3056 14.1989 1.62456C14.0672 1.94351 13.9996 2.28529 14 2.63037V9.64404C13.9996 9.98912 14.0672 10.3309 14.1989 10.6499C14.3306 10.9688 14.5239 11.2587 14.7677 11.503C15.0114 11.7472 15.3009 11.9411 15.6196 12.0734C15.9383 12.2058 16.2799 12.2741 16.625 12.2744C16.9701 12.2741 17.3117 12.2058 17.6304 12.0734C17.9491 11.9411 18.2386 11.7472 18.4823 11.503C18.7261 11.2587 18.9194 10.9688 19.0511 10.6499C19.1828 10.3309 19.2504 9.98912 19.25 9.64404Z",
"fill": "#2EB67D"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M16.625 26.302C16.9701 26.3017 17.3117 26.2334 17.6304 26.101C17.9491 25.9686 18.2386 25.7748 18.4823 25.5305C18.7261 25.2863 18.9194 24.9964 19.0511 24.6774C19.1828 24.3585 19.2504 24.0167 19.25 23.6716C19.2503 23.3266 19.1827 22.9849 19.0509 22.666C18.9192 22.3471 18.7259 22.0572 18.4822 21.813C18.2384 21.5688 17.9489 21.375 17.6303 21.2427C17.3116 21.1103 16.97 21.0421 16.625 21.0417H14V23.6716C13.9996 24.0167 14.0672 24.3585 14.1989 24.6774C14.3306 24.9964 14.5239 25.2863 14.7677 25.5305C15.0114 25.7748 15.3009 25.9686 15.6196 26.101C15.9383 26.2334 16.2799 26.3017 16.625 26.302ZM16.625 19.2878H23.625C23.97 19.2875 24.3116 19.2192 24.6303 19.0869C24.9489 18.9546 25.2384 18.7608 25.4822 18.5166C25.7259 18.2723 25.9192 17.9825 26.0509 17.6636C26.1827 17.3447 26.2503 17.003 26.25 16.658C26.2504 16.3129 26.1828 15.9711 26.0511 15.6521C25.9194 15.3332 25.7261 15.0433 25.4823 14.799C25.2386 14.5548 24.9491 14.3609 24.6304 14.2286C24.3117 14.0962 23.9701 14.0279 23.625 14.0276H16.625C16.2799 14.0279 15.9383 14.0962 15.6196 14.2286C15.3009 14.3609 15.0114 14.5548 14.7677 14.799C14.5239 15.0433 14.3306 15.3332 14.1989 15.6521C14.0672 15.9711 13.9996 16.3129 14 16.658C13.9997 17.003 14.0673 17.3447 14.1991 17.6636C14.3308 17.9825 14.5241 18.2723 14.7678 18.5166C15.0116 18.7608 15.3011 18.9546 15.6197 19.0869C15.9384 19.2192 16.28 19.2875 16.625 19.2878Z",
"fill": "#ECB22E"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M2.22485e-06 16.658C-0.000318534 17.003 0.0673247 17.3447 0.19907 17.6636C0.330815 17.9825 0.524081 18.2723 0.767835 18.5166C1.01159 18.7608 1.30106 18.9546 1.61971 19.0869C1.93836 19.2192 2.27996 19.2875 2.625 19.2878C2.97004 19.2875 3.31164 19.2192 3.63029 19.0869C3.94895 18.9546 4.23841 18.7608 4.48217 18.5166C4.72592 18.2723 4.91919 17.9825 5.05093 17.6636C5.18268 17.3447 5.25032 17.003 5.25 16.658V14.0276H2.625C2.27988 14.0279 1.9382 14.0962 1.61948 14.2286C1.30077 14.361 1.01126 14.5549 0.76749 14.7992C0.523723 15.0435 0.330477 15.3335 0.198789 15.6525C0.0671016 15.9715 -0.000446885 16.3128 2.22485e-06 16.658ZM7 16.658V23.6716C6.99962 24.0167 7.06721 24.3585 7.19893 24.6774C7.33064 24.9964 7.5239 25.2863 7.76766 25.5305C8.01142 25.7748 8.30091 25.9686 8.6196 26.101C8.93828 26.2334 9.27992 26.3017 9.625 26.302C9.97008 26.3017 10.3117 26.2334 10.6304 26.101C10.9491 25.9686 11.2386 25.7748 11.4823 25.5305C11.7261 25.2863 11.9194 24.9964 12.0511 24.6774C12.1828 24.3585 12.2504 24.0167 12.25 23.6716V16.6584C12.2504 16.3134 12.1828 15.9716 12.0511 15.6526C11.9194 15.3337 11.7261 15.0438 11.4823 14.7995C11.2386 14.5553 10.9491 14.3614 10.6304 14.2291C10.3117 14.0967 9.97008 14.0284 9.625 14.0281C9.27992 14.0284 8.93828 14.0967 8.6196 14.2291C8.30091 14.3614 8.01142 14.5553 7.76766 14.7995C7.5239 15.0438 7.33064 15.3337 7.19893 15.6526C7.06721 15.9716 6.99962 16.3129 7 16.658Z",
"fill": "#E01E5A"
},
"children": []
}
]
},
"name": "Slack"
}

View File

@@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Slack.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Slack'
export default Icon

View File

@@ -0,0 +1,146 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "28",
"height": "28",
"viewBox": "0 0 28 28",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"clip-path": "url(#clip0_20307_29670)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.1",
"d": "M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.1",
"d": "M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M26.6817 10.5H20.8484L19.2267 11.8183V18.34C19.2846 19.4628 19.7715 20.5204 20.5867 21.2946C21.4019 22.0689 22.4833 22.5005 23.6075 22.5005C24.7318 22.5005 25.8131 22.0689 26.6283 21.2946C27.4436 20.5204 27.9304 19.4628 27.9884 18.34V11.8183C27.9899 11.6458 27.9572 11.4746 27.8923 11.3147C27.8273 11.1548 27.7313 11.0094 27.6098 10.8868C27.4883 10.7643 27.3437 10.667 27.1844 10.6006C27.0251 10.5342 26.8543 10.5 26.6817 10.5Z",
"fill": "#5059C9"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M23.9167 9.33333C25.5275 9.33333 26.8333 8.0275 26.8333 6.41667C26.8333 4.80584 25.5275 3.5 23.9167 3.5C22.3058 3.5 21 4.80584 21 6.41667C21 8.0275 22.3058 9.33333 23.9167 9.33333Z",
"fill": "#5059C9"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M10.64 10.5H20.86C21.2065 10.5 21.5389 10.6377 21.7839 10.8827C22.029 11.1278 22.1666 11.4601 22.1666 11.8067V20.4167C22.1666 22.1185 21.4906 23.7506 20.2872 24.9539C19.0839 26.1573 17.4518 26.8333 15.75 26.8333C14.0482 26.8333 12.4161 26.1573 11.2127 24.9539C10.0094 23.7506 9.33331 22.1185 9.33331 20.4167V11.8067C9.33331 11.4601 9.47098 11.1278 9.71603 10.8827C9.96107 10.6377 10.2934 10.5 10.64 10.5Z",
"fill": "#7B83EB"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M16.3333 9.69477C18.4661 9.69477 20.195 7.96584 20.195 5.8331C20.195 3.70036 18.4661 1.97144 16.3333 1.97144C14.2006 1.97144 12.4717 3.70036 12.4717 5.8331C12.4717 7.96584 14.2006 9.69477 16.3333 9.69477Z",
"fill": "#7B83EB"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.5",
"d": "M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.5",
"d": "M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M14.9683 5.83325H1.365C0.611131 5.83325 0 6.44438 0 7.19825V20.8016C0 21.5555 0.611131 22.1666 1.365 22.1666H14.9683C15.7222 22.1666 16.3333 21.5555 16.3333 20.8016V7.19825C16.3333 6.44438 15.7222 5.83325 14.9683 5.83325Z",
"fill": "#4B53BC"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M11.8767 11.1766H9.08833V18.6666H7.25666V11.1766H4.45667V9.33325H11.8767V11.1766Z",
"fill": "white"
},
"children": []
}
]
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_20307_29670"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "28",
"height": "28",
"fill": "white"
},
"children": []
}
]
}
]
}
]
},
"name": "Teams"
}

View File

@@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Teams.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Teams'
export default Icon

View File

@@ -2,3 +2,5 @@ export { default as DefaultToolIcon } from './DefaultToolIcon'
export { default as Icon3Dots } from './Icon3Dots'
export { default as Message3Fill } from './Message3Fill'
export { default as RowStruct } from './RowStruct'
export { default as Slack } from './Slack'
export { default as Teams } from './Teams'

View File

@@ -0,0 +1,48 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M3.66634 2.66675C3.66634 2.29856 3.36787 2.00008 2.99967 2.00008C2.63149 2.00008 2.33301 2.29855 2.33301 2.66675L3.66634 2.66675ZM2.99967 5.33341H2.33301C2.33301 5.51023 2.40325 5.6798 2.52827 5.80482C2.65329 5.92984 2.82286 6.00008 2.99967 6.00008V5.33341ZM5.66641 6.00008C6.03459 6.00008 6.33307 5.7016 6.33307 5.33341C6.33307 4.96523 6.03459 4.66675 5.66641 4.66675L5.66641 6.00008ZM2.41183 5.01659C2.23816 5.34125 2.36056 5.74523 2.68522 5.91889C3.00988 6.09256 3.41385 5.97016 3.58752 5.6455L2.41183 5.01659ZM3.03395 8.58915C2.99109 8.22348 2.6599 7.96175 2.29421 8.00461C1.92853 8.04748 1.66682 8.37868 1.70967 8.74435L3.03395 8.58915ZM12.0439 5.05931C12.2607 5.3569 12.6777 5.42238 12.9753 5.20557C13.2729 4.98876 13.3383 4.57176 13.1215 4.27417L12.0439 5.05931ZM5.02145 13.5907C5.34627 13.7641 5.75013 13.6413 5.92349 13.3165C6.09685 12.9917 5.97407 12.5878 5.64925 12.4145L5.02145 13.5907ZM2.33301 2.66675L2.33301 5.33341H3.66634L3.66634 2.66675L2.33301 2.66675ZM2.99967 6.00008L5.66641 6.00008L5.66641 4.66675H2.99967L2.99967 6.00008ZM3.58752 5.6455C4.43045 4.06972 6.09066 3.00008 7.99968 3.00008V1.66675C5.57951 1.66675 3.47747 3.02445 2.41183 5.01659L3.58752 5.6455ZM7.99968 3.00008C9.66128 3.00008 11.1336 3.80991 12.0439 5.05931L13.1215 4.27417C11.9711 2.69513 10.1055 1.66675 7.99968 1.66675V3.00008ZM5.64925 12.4145C4.23557 11.6599 3.22828 10.2474 3.03395 8.58915L1.70967 8.74435C1.95639 10.8495 3.23403 12.6367 5.02145 13.5907L5.64925 12.4145Z",
"fill": "currentColor"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M13.1946 8.13826C12.9027 7.84637 12.4294 7.84638 12.1375 8.13829L9.11842 11.1574C9.01642 11.2594 8.95025 11.3917 8.92985 11.5345C8.92985 11.5345 8.92985 11.5345 8.92985 11.5345L8.78508 12.5479L9.79846 12.4031C9.94127 12.3827 10.0736 12.3165 10.1756 12.2145L13.1947 9.19548C13.4866 8.90359 13.4866 8.43027 13.1946 8.13826C13.1947 8.13827 13.1946 8.13825 13.1946 8.13826ZM11.1947 7.19548C12.0073 6.38286 13.3249 6.38286 14.1375 7.19548C14.95 8.00814 14.9501 9.32565 14.1375 10.1383L11.1184 13.1574C10.8124 13.4633 10.4154 13.6618 9.98703 13.723L8.09369 13.9935C7.88596 14.0232 7.67639 13.9533 7.52801 13.805C7.37963 13.6566 7.30977 13.447 7.33945 13.2393L7.60991 11.3459C7.67111 10.9175 7.86961 10.5205 8.17561 10.2145L11.1947 7.19548Z",
"fill": "currentColor"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M12.528 10.8048L10.528 8.80482L11.4708 7.86201L13.4708 9.86201L12.528 10.8048Z",
"fill": "currentColor"
},
"children": []
}
]
},
"name": "HumanInLoop"
}

View File

@@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './HumanInLoop.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'HumanInLoop'
export default Icon

View File

@@ -10,6 +10,7 @@ export { default as DocsExtractor } from './DocsExtractor'
export { default as End } from './End'
export { default as Home } from './Home'
export { default as Http } from './Http'
export { default as HumanInLoop } from './HumanInLoop'
export { default as IfElse } from './IfElse'
export { default as Iteration } from './Iteration'
export { default as IterationStart } from './IterationStart'

View File

@@ -18,7 +18,7 @@ export type MarkdownProps = {
content: string
className?: string
pluginInfo?: SimplePluginInfo
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements'>
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements' | 'rehypePlugins'>
export const Markdown = (props: MarkdownProps) => {
const { customComponents = {}, pluginInfo } = props
@@ -29,7 +29,13 @@ export const Markdown = (props: MarkdownProps) => {
return (
<div className={cn('markdown-body', '!text-text-primary', props.className)}>
<ReactMarkdown pluginInfo={pluginInfo} latexContent={latexContent} customComponents={customComponents} customDisallowedElements={props.customDisallowedElements} />
<ReactMarkdown
pluginInfo={pluginInfo}
latexContent={latexContent}
customComponents={customComponents}
customDisallowedElements={props.customDisallowedElements}
rehypePlugins={props.rehypePlugins}
/>
</div>
)
}

View File

@@ -22,6 +22,7 @@ export type ReactMarkdownWrapperProps = {
customDisallowedElements?: string[]
customComponents?: Record<string, React.ComponentType<any>>
pluginInfo?: SimplePluginInfo
rehypePlugins?: any// js: PluggableList[]
}
export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
@@ -55,6 +56,7 @@ export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
tree.children.forEach(iterate)
}
},
...(props.rehypePlugins || []),
]}
urlTransform={customUrlTransform}
disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]}

View File

@@ -4,6 +4,7 @@ import { SupportUploadFileTypes } from '../../workflow/types'
export const CONTEXT_PLACEHOLDER_TEXT = '{{#context#}}'
export const HISTORY_PLACEHOLDER_TEXT = '{{#histories#}}'
export const QUERY_PLACEHOLDER_TEXT = '{{#query#}}'
export const REQUEST_URL_PLACEHOLDER_TEXT = '{{#url#}}'
export const CURRENT_PLACEHOLDER_TEXT = '{{#current#}}'
export const ERROR_MESSAGE_PLACEHOLDER_TEXT = '{{#error_message#}}'
export const LAST_RUN_PLACEHOLDER_TEXT = '{{#last_run#}}'
@@ -30,6 +31,12 @@ export const checkHasQueryBlock = (text: string) => {
return text.includes(QUERY_PLACEHOLDER_TEXT)
}
export const checkHasRequestURLBlock = (text: string) => {
if (!text)
return false
return text.includes(REQUEST_URL_PLACEHOLDER_TEXT)
}
/*
* {{#1711617514996.name#}} => [1711617514996, name]
* {{#1711617514996.sys.query#}} => [sys, query]

Some files were not shown because too many files have changed in this diff Show More