Compare commits

...

42 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
f5528f2030 Initial plan 2025-11-18 16:22:22 +00:00
-LAN-
6efdc94661 refactor: consume events after pause/abort and improve API clarity (#28328)
Some checks are pending
autofix.ci / autofix (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-11-18 19:04:11 +08:00
github-actions[bot]
68526c09fc chore: translate i18n files and update type definitions (#28284)
Co-authored-by: zhsama <33454514+zhsama@users.noreply.github.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-11-18 18:52:36 +08:00
kenwoodjw
a78bc507c0 fix: dataset metadata counts when documents are deleted (#28305)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-11-18 17:36:07 +08:00
Joel
e83c7438cb doc: add doc for env config when site and backend are in different domains (#28318)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-18 17:29:54 +08:00
Jyong
82068a6918 add vdb-test workflow run filter (#28336) 2025-11-18 17:22:15 +08:00
Asuka Minato
108bcbeb7c add cnt script and one more example (#28272)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-18 16:44:14 +09:00
非法操作
c4b02be6d3 fix: published webhook can't receive inputs (#28205)
Some checks are pending
autofix.ci / autofix (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
2025-11-18 11:14:26 +08:00
lyzno1
30eebf804f chore: remove unused style.module.css from app-icon component (#28302) 2025-11-18 10:36:39 +08:00
Yessenia-d
ad7fdd18d0 fix: update currentTriggerPlugin check in BasePanel component (#28287)
Some checks failed
autofix.ci / autofix (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
Check i18n Files and Create PR / check-and-update (push) Has been cancelled
2025-11-17 17:19:35 +08:00
zhsama
5d2fbf5215 Perf/mutual node UI (#28282) 2025-11-17 16:23:04 +08:00
非法操作
4a89403566 fix: click log panel of log page cause whole page crash (#28218)
Some checks failed
autofix.ci / autofix (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Main CI Pipeline / Check Changed Files (push) Has been cancelled
Main CI Pipeline / Style Check (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Main CI Pipeline / API Tests (push) Has been cancelled
Main CI Pipeline / Web Tests (push) Has been cancelled
Main CI Pipeline / VDB Tests (push) Has been cancelled
Main CI Pipeline / DB Migration Test (push) Has been cancelled
2025-11-14 16:38:43 +09:00
crazywoola
e0c05b2123 add icon for forum (#28164) 2025-11-14 16:38:19 +09:00
lyzno1
85b99580ea fix: card view render (#28189) 2025-11-14 14:16:11 +08:00
lyzno1
15fbedfcad feat: add icon gallery stories (#28214)
Signed-off-by: lyzno1 <yuanyouhuilyz@gmail.com>
2025-11-14 13:34:23 +08:00
非法操作
1e6d0de48b fix: knowledge pipeline can not published (#28203)
Some checks are pending
autofix.ci / autofix (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
2025-11-14 09:47:37 +08:00
Anubhav Singh
cad751c00c Upgrade weave version to fix weave configuration failure (#28197) 2025-11-14 09:47:21 +08:00
Maries
a47276ac24 chore: bump to 1.10.0 (#28186)
Some checks failed
autofix.ci / autofix (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
Check i18n Files and Create PR / check-and-update (push) Has been cancelled
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-13 22:36:04 +08:00
yangzheli
20403c69b2 refactor(web): remove redundant add-tool-modal components and related code (#27996) 2025-11-13 20:21:04 +08:00
hoffer
ffc04f2a9b fix: StreamableHTTPTransport got invalid json exception when receive a ping event from mcp server #28111 (#28116) 2025-11-13 20:19:48 +08:00
Asuka Minato
d1580791e4 TypedBase + TypedDict (#28137)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-13 20:18:51 +08:00
NeatGuyCoding
c74eb4fcf3 minor fix(rag): return early when pushing empty tasks to avoid Redis DataError (#28027)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-13 20:18:11 +08:00
NeatGuyCoding
a798534337 fix(web): fix unit promotion in formatNumberAbbreviated (#27918)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-13 20:17:26 +08:00
GuanMu
470883858e fix: adjust padding in AgentNode and NodeComponent for consistent layout (#28175) 2025-11-13 20:16:56 +08:00
GuanMu
4f4911686d fix: update start-worker alias to include additional queues for bette… (#28179) 2025-11-13 20:16:44 +08:00
GuanMu
6d479dcdbb fix: update package manager version to 10.22.0 (#28181) 2025-11-13 20:16:00 +08:00
zhsama
24348c40a6 feat: enhance start node metadata to be undeletable in chat mode (#28173)
Some checks are pending
autofix.ci / autofix (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
2025-11-13 18:11:15 +08:00
yihong
a39b50adbb fix: skip tests if no database run (#28102)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:57:13 +08:00
李龙飞
81832c14ee Fix: Correctly handle merged cells in DOCX tables to prevent content duplication and loss (#27871)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:56:24 +08:00
zhsama
b86022c64a feat: add draft trigger detection to app model and UI (#28163)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-13 15:43:58 +08:00
breath57
45e816a9f6 fix(knowledge-base): regenerate child chunks not working completely (#27934) 2025-11-13 15:36:27 +08:00
Joel
667b1c37a3 fix: can still invite when api is pending (#28161) 2025-11-13 15:28:32 +08:00
Chen Yu
b75d533f9b fix(moderation): change OpenAI moderation model to omni-moderation-la… (#28119)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:21:44 +08:00
CrabSAMA
aece55d82f fix: fixed error when clear value of INTEGER and FLOAT type (#27954)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:21:34 +08:00
kenwoodjw
c432b398f4 fix: missing pipeline_templates.json when HOSTED_FETCH_PIPELINE_TEMPLATES_MODE is builtin (#27946)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:04:35 +08:00
katakyo
9cb2645793 fix: update input field width for retry configuration in RetryOnPanel (#28142) 2025-11-13 15:00:22 +08:00
ye4241
6ac61bd585 fix: correct spelling of "模板" in translation files (#28151) 2025-11-13 14:58:10 +08:00
非法操作
b02165ffe6 fix: inconsistent behaviour of zoom in button and shortcut (#27944) 2025-11-13 14:37:27 +08:00
Asuka Minato
6c576e2c66 add doc (#28016)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-13 13:38:45 +09:00
yangzheli
b0e7e7752f refactor(web): reuse the same edit-custom-collection-modal component, and fix the pop up error (#28003) 2025-11-13 11:44:21 +08:00
mnasrautinno
2799b79e8c fix: app's ai site text to speech api (#28091) 2025-11-13 11:44:04 +08:00
Maries
805a1479f9 fix: simplify graph structure validation in WorkflowService (#28146)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-13 10:59:31 +08:00
163 changed files with 10670 additions and 3433 deletions

View File

@@ -6,11 +6,10 @@ cd web && pnpm install
pipx install uv
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage\"" >> ~/.bashrc
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor\"" >> ~/.bashrc
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
echo "alias stop-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down\"" >> ~/.bashrc
source /home/vscode/.bashrc

View File

@@ -28,6 +28,11 @@ jobs:
# Format code
uv run ruff format ..
- name: count migration progress
run: |
cd api
./cnt_base.sh
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all

View File

@@ -1,7 +1,10 @@
name: Run VDB Tests
on:
workflow_call:
push:
branches: [main]
paths:
- 'api/core/rag/*.py'
concurrency:
group: vdb-tests-${{ github.head_ref || github.run_id }}

View File

@@ -159,8 +159,7 @@ SUPABASE_URL=your-server-url
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). Leading dots are optional.
COOKIE_DOMAIN=
# Vector database configuration

View File

@@ -26,6 +26,10 @@
cp .env.example .env
```
> [!IMPORTANT]
>
> When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). The frontend and backend must be under the same top-level domain in order to share authentication cookies.
1. Generate a `SECRET_KEY` in the `.env` file.
bash for Linux

7
api/cnt_base.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
set -euxo pipefail
for pattern in "Base" "TypeBase"; do
printf "%s " "$pattern"
grep "($pattern):" -r --include='*.py' --exclude-dir=".venv" --exclude-dir="tests" . | wc -l
done

File diff suppressed because one or more lines are too long

View File

@@ -5,18 +5,20 @@ from controllers.console.wraps import account_initialization_required, setup_req
from libs.login import login_required
from services.advanced_prompt_template_service import AdvancedPromptTemplateService
parser = (
reqparse.RequestParser()
.add_argument("app_mode", type=str, required=True, location="args", help="Application mode")
.add_argument("model_mode", type=str, required=True, location="args", help="Model mode")
.add_argument("has_context", type=str, required=False, default="true", location="args", help="Whether has context")
.add_argument("model_name", type=str, required=True, location="args", help="Model name")
)
@console_ns.route("/app/prompt-templates")
class AdvancedPromptTemplateList(Resource):
@api.doc("get_advanced_prompt_templates")
@api.doc(description="Get advanced prompt templates based on app mode and model configuration")
@api.expect(
api.parser()
.add_argument("app_mode", type=str, required=True, location="args", help="Application mode")
.add_argument("model_mode", type=str, required=True, location="args", help="Model mode")
.add_argument("has_context", type=str, default="true", location="args", help="Whether has context")
.add_argument("model_name", type=str, required=True, location="args", help="Model name")
)
@api.expect(parser)
@api.response(
200, "Prompt templates retrieved successfully", fields.List(fields.Raw(description="Prompt template data"))
)
@@ -25,13 +27,6 @@ class AdvancedPromptTemplateList(Resource):
@login_required
@account_initialization_required
def get(self):
parser = (
reqparse.RequestParser()
.add_argument("app_mode", type=str, required=True, location="args")
.add_argument("model_mode", type=str, required=True, location="args")
.add_argument("has_context", type=str, required=False, default="true", location="args")
.add_argument("model_name", type=str, required=True, location="args")
)
args = parser.parse_args()
return AdvancedPromptTemplateService.get_prompt(args)

View File

@@ -8,17 +8,19 @@ from libs.login import login_required
from models.model import AppMode
from services.agent_service import AgentService
parser = (
reqparse.RequestParser()
.add_argument("message_id", type=uuid_value, required=True, location="args", help="Message UUID")
.add_argument("conversation_id", type=uuid_value, required=True, location="args", help="Conversation UUID")
)
@console_ns.route("/apps/<uuid:app_id>/agent/logs")
class AgentLogApi(Resource):
@api.doc("get_agent_logs")
@api.doc(description="Get agent execution logs for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("message_id", type=str, required=True, location="args", help="Message UUID")
.add_argument("conversation_id", type=str, required=True, location="args", help="Conversation UUID")
)
@api.expect(parser)
@api.response(200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries")))
@api.response(400, "Invalid request parameters")
@setup_required
@@ -27,12 +29,6 @@ class AgentLogApi(Resource):
@get_app_model(mode=[AppMode.AGENT_CHAT])
def get(self, app_model):
"""Get agent logs"""
parser = (
reqparse.RequestParser()
.add_argument("message_id", type=uuid_value, required=True, location="args")
.add_argument("conversation_id", type=uuid_value, required=True, location="args")
)
args = parser.parse_args()
return AgentService.get_agent_logs(app_model, args["conversation_id"], args["message_id"])

View File

@@ -251,6 +251,13 @@ class AnnotationExportApi(Resource):
return response, 200
parser = (
reqparse.RequestParser()
.add_argument("question", required=True, type=str, location="json")
.add_argument("answer", required=True, type=str, location="json")
)
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
class AnnotationUpdateDeleteApi(Resource):
@api.doc("update_delete_annotation")
@@ -259,6 +266,7 @@ class AnnotationUpdateDeleteApi(Resource):
@api.response(200, "Annotation updated successfully", annotation_fields)
@api.response(204, "Annotation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.expect(parser)
@setup_required
@login_required
@account_initialization_required
@@ -268,11 +276,6 @@ class AnnotationUpdateDeleteApi(Resource):
def post(self, app_id, annotation_id):
app_id = str(app_id)
annotation_id = str(annotation_id)
parser = (
reqparse.RequestParser()
.add_argument("question", required=True, type=str, location="json")
.add_argument("answer", required=True, type=str, location="json")
)
args = parser.parse_args()
annotation = AppAnnotationService.update_app_annotation_directly(args, app_id, annotation_id)
return annotation

View File

@@ -15,11 +15,12 @@ from controllers.console.wraps import (
setup_required,
)
from core.ops.ops_trace_manager import OpsTraceManager
from core.workflow.enums import NodeType
from extensions.ext_database import db
from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields
from libs.login import current_account_with_tenant, login_required
from libs.validators import validate_description_length
from models import App
from models import App, Workflow
from services.app_dsl_service import AppDslService, ImportMode
from services.app_service import AppService
from services.enterprise.enterprise_service import EnterpriseService
@@ -106,6 +107,35 @@ class AppListApi(Resource):
if str(app.id) in res:
app.access_mode = res[str(app.id)].access_mode
workflow_capable_app_ids = [
str(app.id) for app in app_pagination.items if app.mode in {"workflow", "advanced-chat"}
]
draft_trigger_app_ids: set[str] = set()
if workflow_capable_app_ids:
draft_workflows = (
db.session.execute(
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
)
)
.scalars()
.all()
)
trigger_node_types = {
NodeType.TRIGGER_WEBHOOK,
NodeType.TRIGGER_SCHEDULE,
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
for app in app_pagination.items:
app.has_draft_trigger = str(app.id) in draft_trigger_app_ids
return marshal(app_pagination, app_pagination_fields), 200
@api.doc("create_app")
@@ -353,12 +383,15 @@ class AppExportApi(Resource):
}
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json", help="Name to check")
@console_ns.route("/apps/<uuid:app_id>/name")
class AppNameApi(Resource):
@api.doc("check_app_name")
@api.doc(description="Check if app name is available")
@api.doc(params={"app_id": "Application ID"})
@api.expect(api.parser().add_argument("name", type=str, required=True, location="args", help="Name to check"))
@api.expect(parser)
@api.response(200, "Name availability checked")
@setup_required
@login_required
@@ -367,7 +400,6 @@ class AppNameApi(Resource):
@marshal_with(app_detail_fields)
@edit_permission_required
def post(self, app_model):
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
app_service = AppService()

View File

@@ -1,6 +1,7 @@
from flask_restx import Resource, marshal_with, reqparse
from sqlalchemy.orm import Session
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
@@ -18,9 +19,23 @@ from services.feature_service import FeatureService
from .. import console_ns
parser = (
reqparse.RequestParser()
.add_argument("mode", type=str, required=True, location="json")
.add_argument("yaml_content", type=str, location="json")
.add_argument("yaml_url", type=str, location="json")
.add_argument("name", type=str, location="json")
.add_argument("description", type=str, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
.add_argument("app_id", type=str, location="json")
)
@console_ns.route("/apps/imports")
class AppImportApi(Resource):
@api.expect(parser)
@setup_required
@login_required
@account_initialization_required
@@ -30,18 +45,6 @@ class AppImportApi(Resource):
def post(self):
# Check user role first
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("mode", type=str, required=True, location="json")
.add_argument("yaml_content", type=str, location="json")
.add_argument("yaml_url", type=str, location="json")
.add_argument("name", type=str, location="json")
.add_argument("description", type=str, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
.add_argument("app_id", type=str, location="json")
)
args = parser.parse_args()
# Create service with session

View File

@@ -80,16 +80,19 @@ WHERE
return jsonify({"data": response_data})
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-conversations")
class DailyConversationStatistic(Resource):
@api.doc("get_daily_conversation_statistics")
@api.doc(description="Get daily conversation statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"Daily conversation statistics retrieved successfully",
@@ -102,11 +105,6 @@ class DailyConversationStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
assert account.timezone is not None
@@ -148,11 +146,7 @@ class DailyTerminalsStatistic(Resource):
@api.doc("get_daily_terminals_statistics")
@api.doc(description="Get daily terminal/end-user statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"Daily terminal statistics retrieved successfully",
@@ -165,11 +159,6 @@ class DailyTerminalsStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
@@ -213,11 +202,7 @@ class DailyTokenCostStatistic(Resource):
@api.doc("get_daily_token_cost_statistics")
@api.doc(description="Get daily token cost statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"Daily token cost statistics retrieved successfully",
@@ -230,11 +215,6 @@ class DailyTokenCostStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
@@ -281,11 +261,7 @@ class AverageSessionInteractionStatistic(Resource):
@api.doc("get_average_session_interaction_statistics")
@api.doc(description="Get average session interaction statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"Average session interaction statistics retrieved successfully",
@@ -298,11 +274,6 @@ class AverageSessionInteractionStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
@@ -365,11 +336,7 @@ class UserSatisfactionRateStatistic(Resource):
@api.doc("get_user_satisfaction_rate_statistics")
@api.doc(description="Get user satisfaction rate statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"User satisfaction rate statistics retrieved successfully",
@@ -382,11 +349,6 @@ class UserSatisfactionRateStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
@@ -439,11 +401,7 @@ class AverageResponseTimeStatistic(Resource):
@api.doc("get_average_response_time_statistics")
@api.doc(description="Get average response time statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"Average response time statistics retrieved successfully",
@@ -456,11 +414,6 @@ class AverageResponseTimeStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
@@ -504,11 +457,7 @@ class TokensPerSecondStatistic(Resource):
@api.doc("get_tokens_per_second_statistics")
@api.doc(description="Get tokens per second statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.expect(parser)
@api.response(
200,
"Tokens per second statistics retrieved successfully",
@@ -520,12 +469,6 @@ class TokensPerSecondStatistic(Resource):
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT

View File

@@ -586,6 +586,13 @@ class DraftWorkflowNodeRunApi(Resource):
return workflow_node_execution
parser_publish = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, default="", location="json")
.add_argument("marked_comment", type=str, required=False, default="", location="json")
)
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
class PublishedWorkflowApi(Resource):
@api.doc("get_published_workflow")
@@ -610,6 +617,7 @@ class PublishedWorkflowApi(Resource):
# return workflow, if not found, return None
return workflow
@api.expect(parser_publish)
@setup_required
@login_required
@account_initialization_required
@@ -620,12 +628,8 @@ class PublishedWorkflowApi(Resource):
Publish workflow
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, default="", location="json")
.add_argument("marked_comment", type=str, required=False, default="", location="json")
)
args = parser.parse_args()
args = parser_publish.parse_args()
# Validate name and comment length
if args.marked_name and len(args.marked_name) > 20:
@@ -680,6 +684,9 @@ class DefaultBlockConfigsApi(Resource):
return workflow_service.get_default_block_configs()
parser_block = reqparse.RequestParser().add_argument("q", type=str, location="args")
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>")
class DefaultBlockConfigApi(Resource):
@api.doc("get_default_block_config")
@@ -687,6 +694,7 @@ class DefaultBlockConfigApi(Resource):
@api.doc(params={"app_id": "Application ID", "block_type": "Block type"})
@api.response(200, "Default block configuration retrieved successfully")
@api.response(404, "Block type not found")
@api.expect(parser_block)
@setup_required
@login_required
@account_initialization_required
@@ -696,8 +704,7 @@ class DefaultBlockConfigApi(Resource):
"""
Get default block config
"""
parser = reqparse.RequestParser().add_argument("q", type=str, location="args")
args = parser.parse_args()
args = parser_block.parse_args()
q = args.get("q")
@@ -713,8 +720,18 @@ class DefaultBlockConfigApi(Resource):
return workflow_service.get_default_block_config(node_type=block_type, filters=filters)
parser_convert = (
reqparse.RequestParser()
.add_argument("name", type=str, required=False, nullable=True, location="json")
.add_argument("icon_type", type=str, required=False, nullable=True, location="json")
.add_argument("icon", type=str, required=False, nullable=True, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
)
@console_ns.route("/apps/<uuid:app_id>/convert-to-workflow")
class ConvertToWorkflowApi(Resource):
@api.expect(parser_convert)
@api.doc("convert_to_workflow")
@api.doc(description="Convert application to workflow mode")
@api.doc(params={"app_id": "Application ID"})
@@ -735,14 +752,7 @@ class ConvertToWorkflowApi(Resource):
current_user, _ = current_account_with_tenant()
if request.data:
parser = (
reqparse.RequestParser()
.add_argument("name", type=str, required=False, nullable=True, location="json")
.add_argument("icon_type", type=str, required=False, nullable=True, location="json")
.add_argument("icon", type=str, required=False, nullable=True, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_convert.parse_args()
else:
args = {}
@@ -756,8 +766,18 @@ class ConvertToWorkflowApi(Resource):
}
parser_workflows = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=10, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
@console_ns.route("/apps/<uuid:app_id>/workflows")
class PublishedAllWorkflowApi(Resource):
@api.expect(parser_workflows)
@api.doc("get_all_published_workflows")
@api.doc(description="Get all published workflows for an application")
@api.doc(params={"app_id": "Application ID"})
@@ -774,16 +794,9 @@ class PublishedAllWorkflowApi(Resource):
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
args = parser.parse_args()
page = int(args.get("page", 1))
limit = int(args.get("limit", 10))
args = parser_workflows.parse_args()
page = args["page"]
limit = args["limit"]
user_id = args.get("user_id")
named_only = args.get("named_only", False)

View File

@@ -30,23 +30,25 @@ def _parse_workflow_run_list_args():
Returns:
Parsed arguments containing last_id, limit, status, and triggered_from filters
"""
parser = reqparse.RequestParser()
parser.add_argument("last_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
parser.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
parser.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
parser = (
reqparse.RequestParser()
.add_argument("last_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
)
)
return parser.parse_args()
@@ -58,28 +60,30 @@ def _parse_workflow_run_count_args():
Returns:
Parsed arguments containing status, time_range, and triggered_from filters
"""
parser = reqparse.RequestParser()
parser.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
parser.add_argument(
"time_range",
type=time_duration,
location="args",
required=False,
help="Time range filter (e.g., 7d, 4h, 30m, 30s)",
)
parser.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
parser = (
reqparse.RequestParser()
.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
.add_argument(
"time_range",
type=time_duration,
location="args",
required=False,
help="Time range filter (e.g., 7d, 4h, 30m, 30s)",
)
.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
)
)
return parser.parse_args()

View File

@@ -3,7 +3,7 @@ from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden, NotFound
from configs import dify_config
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.utils.encoders import jsonable_encoder
@@ -121,8 +121,16 @@ class DatasourceOAuthCallback(Resource):
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
parser_datasource = (
reqparse.RequestParser()
.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None)
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>")
class DatasourceAuth(Resource):
@api.expect(parser_datasource)
@setup_required
@login_required
@account_initialization_required
@@ -130,14 +138,7 @@ class DatasourceAuth(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument(
"name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None
)
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_datasource.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
@@ -168,8 +169,14 @@ class DatasourceAuth(Resource):
return {"result": datasources}, 200
parser_datasource_delete = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/delete")
class DatasourceAuthDeleteApi(Resource):
@api.expect(parser_datasource_delete)
@setup_required
@login_required
@account_initialization_required
@@ -181,10 +188,7 @@ class DatasourceAuthDeleteApi(Resource):
plugin_id = datasource_provider_id.plugin_id
provider_name = datasource_provider_id.provider_name
parser = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_datasource_delete.parse_args()
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.remove_datasource_credentials(
tenant_id=current_tenant_id,
@@ -195,8 +199,17 @@ class DatasourceAuthDeleteApi(Resource):
return {"result": "success"}, 200
parser_datasource_update = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/update")
class DatasourceAuthUpdateApi(Resource):
@api.expect(parser_datasource_update)
@setup_required
@login_required
@account_initialization_required
@@ -205,13 +218,7 @@ class DatasourceAuthUpdateApi(Resource):
_, current_tenant_id = current_account_with_tenant()
datasource_provider_id = DatasourceProviderID(provider_id)
parser = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_datasource_update.parse_args()
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.update_datasource_credentials(
@@ -251,8 +258,16 @@ class DatasourceHardCodeAuthListApi(Resource):
return {"result": jsonable_encoder(datasources)}, 200
parser_datasource_custom = (
reqparse.RequestParser()
.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/custom-client")
class DatasourceAuthOauthCustomClient(Resource):
@api.expect(parser_datasource_custom)
@setup_required
@login_required
@account_initialization_required
@@ -260,12 +275,7 @@ class DatasourceAuthOauthCustomClient(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_datasource_custom.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.setup_oauth_custom_client_params(
@@ -291,8 +301,12 @@ class DatasourceAuthOauthCustomClient(Resource):
return {"result": "success"}, 200
parser_default = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json")
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/default")
class DatasourceAuthDefaultApi(Resource):
@api.expect(parser_default)
@setup_required
@login_required
@account_initialization_required
@@ -300,8 +314,7 @@ class DatasourceAuthDefaultApi(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
args = parser_default.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.set_default_datasource_provider(
@@ -312,8 +325,16 @@ class DatasourceAuthDefaultApi(Resource):
return {"result": "success"}, 200
parser_update_name = (
reqparse.RequestParser()
.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/update-name")
class DatasourceUpdateProviderNameApi(Resource):
@api.expect(parser_update_name)
@setup_required
@login_required
@account_initialization_required
@@ -321,12 +342,7 @@ class DatasourceUpdateProviderNameApi(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_update_name.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.update_datasource_provider_name(

View File

@@ -9,7 +9,7 @@ from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.app.error import (
ConversationCompletedError,
DraftWorkflowNotExist,
@@ -148,8 +148,12 @@ class DraftRagPipelineApi(Resource):
}
parser_run = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
class RagPipelineDraftRunIterationNodeApi(Resource):
@api.expect(parser_run)
@setup_required
@login_required
@account_initialization_required
@@ -162,8 +166,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource):
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
args = parser_run.parse_args()
try:
response = PipelineGenerateService.generate_single_iteration(
@@ -184,6 +187,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/loop/nodes/<string:node_id>/run")
class RagPipelineDraftRunLoopNodeApi(Resource):
@api.expect(parser_run)
@setup_required
@login_required
@account_initialization_required
@@ -197,8 +201,7 @@ class RagPipelineDraftRunLoopNodeApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
args = parser_run.parse_args()
try:
response = PipelineGenerateService.generate_single_loop(
@@ -217,8 +220,18 @@ class RagPipelineDraftRunLoopNodeApi(Resource):
raise InternalServerError()
parser_draft_run = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run")
class DraftRagPipelineRunApi(Resource):
@api.expect(parser_draft_run)
@setup_required
@login_required
@account_initialization_required
@@ -232,14 +245,7 @@ class DraftRagPipelineRunApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_draft_run.parse_args()
try:
response = PipelineGenerateService.generate(
@@ -255,8 +261,21 @@ class DraftRagPipelineRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
parser_published_run = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("is_preview", type=bool, required=True, location="json", default=False)
.add_argument("response_mode", type=str, required=True, location="json", default="streaming")
.add_argument("original_document_id", type=str, required=False, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/run")
class PublishedRagPipelineRunApi(Resource):
@api.expect(parser_published_run)
@setup_required
@login_required
@account_initialization_required
@@ -270,17 +289,7 @@ class PublishedRagPipelineRunApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("is_preview", type=bool, required=True, location="json", default=False)
.add_argument("response_mode", type=str, required=True, location="json", default="streaming")
.add_argument("original_document_id", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_published_run.parse_args()
streaming = args["response_mode"] == "streaming"
@@ -381,8 +390,17 @@ class PublishedRagPipelineRunApi(Resource):
#
# return result
#
parser_rag_run = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/run")
class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@api.expect(parser_rag_run)
@setup_required
@login_required
@account_initialization_required
@@ -396,13 +414,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_rag_run.parse_args()
inputs = args.get("inputs")
if inputs is None:
@@ -429,6 +441,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/nodes/<string:node_id>/run")
class RagPipelineDraftDatasourceNodeRunApi(Resource):
@api.expect(parser_rag_run)
@setup_required
@login_required
@account_initialization_required
@@ -442,13 +455,7 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_rag_run.parse_args()
inputs = args.get("inputs")
if inputs is None:
@@ -473,8 +480,14 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
)
parser_run_api = reqparse.RequestParser().add_argument(
"inputs", type=dict, required=True, nullable=False, location="json"
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run")
class RagPipelineDraftNodeRunApi(Resource):
@api.expect(parser_run_api)
@setup_required
@login_required
@account_initialization_required
@@ -489,10 +502,7 @@ class RagPipelineDraftNodeRunApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"inputs", type=dict, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_run_api.parse_args()
inputs = args.get("inputs")
if inputs == None:
@@ -607,8 +617,12 @@ class DefaultRagPipelineBlockConfigsApi(Resource):
return rag_pipeline_service.get_default_block_configs()
parser_default = reqparse.RequestParser().add_argument("q", type=str, location="args")
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs/<string:block_type>")
class DefaultRagPipelineBlockConfigApi(Resource):
@api.expect(parser_default)
@setup_required
@login_required
@account_initialization_required
@@ -622,8 +636,7 @@ class DefaultRagPipelineBlockConfigApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser().add_argument("q", type=str, location="args")
args = parser.parse_args()
args = parser_default.parse_args()
q = args.get("q")
@@ -639,8 +652,18 @@ class DefaultRagPipelineBlockConfigApi(Resource):
return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters)
parser_wf = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=10, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows")
class PublishedAllRagPipelineApi(Resource):
@api.expect(parser_wf)
@setup_required
@login_required
@account_initialization_required
@@ -654,16 +677,9 @@ class PublishedAllRagPipelineApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
args = parser.parse_args()
page = int(args.get("page", 1))
limit = int(args.get("limit", 10))
args = parser_wf.parse_args()
page = args["page"]
limit = args["limit"]
user_id = args.get("user_id")
named_only = args.get("named_only", False)
@@ -691,8 +707,16 @@ class PublishedAllRagPipelineApi(Resource):
}
parser_wf_id = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, location="json")
.add_argument("marked_comment", type=str, required=False, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>")
class RagPipelineByIdApi(Resource):
@api.expect(parser_wf_id)
@setup_required
@login_required
@account_initialization_required
@@ -707,19 +731,13 @@ class RagPipelineByIdApi(Resource):
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, location="json")
.add_argument("marked_comment", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_wf_id.parse_args()
# Validate name and comment length
if args.marked_name and len(args.marked_name) > 20:
raise ValueError("Marked name cannot exceed 20 characters")
if args.marked_comment and len(args.marked_comment) > 100:
raise ValueError("Marked comment cannot exceed 100 characters")
args = parser.parse_args()
# Prepare update data
update_data = {}
@@ -752,8 +770,12 @@ class RagPipelineByIdApi(Resource):
return workflow
parser_parameters = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters")
class PublishedRagPipelineSecondStepApi(Resource):
@api.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@@ -763,8 +785,7 @@ class PublishedRagPipelineSecondStepApi(Resource):
"""
Get second step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@@ -777,6 +798,7 @@ class PublishedRagPipelineSecondStepApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters")
class PublishedRagPipelineFirstStepApi(Resource):
@api.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@@ -786,8 +808,7 @@ class PublishedRagPipelineFirstStepApi(Resource):
"""
Get first step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@@ -800,6 +821,7 @@ class PublishedRagPipelineFirstStepApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters")
class DraftRagPipelineFirstStepApi(Resource):
@api.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@@ -809,8 +831,7 @@ class DraftRagPipelineFirstStepApi(Resource):
"""
Get first step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@@ -823,6 +844,7 @@ class DraftRagPipelineFirstStepApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters")
class DraftRagPipelineSecondStepApi(Resource):
@api.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@@ -832,8 +854,7 @@ class DraftRagPipelineSecondStepApi(Resource):
"""
Get second step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@@ -845,8 +866,16 @@ class DraftRagPipelineSecondStepApi(Resource):
}
parser_wf_run = (
reqparse.RequestParser()
.add_argument("last_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs")
class RagPipelineWorkflowRunListApi(Resource):
@api.expect(parser_wf_run)
@setup_required
@login_required
@account_initialization_required
@@ -856,12 +885,7 @@ class RagPipelineWorkflowRunListApi(Resource):
"""
Get workflow run list
"""
parser = (
reqparse.RequestParser()
.add_argument("last_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
)
args = parser.parse_args()
args = parser_wf_run.parse_args()
rag_pipeline_service = RagPipelineService()
result = rag_pipeline_service.get_rag_pipeline_paginate_workflow_runs(pipeline=pipeline, args=args)
@@ -961,8 +985,18 @@ class RagPipelineTransformApi(Resource):
return result
parser_var = (
reqparse.RequestParser()
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info", type=dict, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("start_node_title", type=str, required=True, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect")
class RagPipelineDatasourceVariableApi(Resource):
@api.expect(parser_var)
@setup_required
@login_required
@account_initialization_required
@@ -974,14 +1008,7 @@ class RagPipelineDatasourceVariableApi(Resource):
Set datasource variables
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info", type=dict, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("start_node_title", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_var.parse_args()
rag_pipeline_service = RagPipelineService()
workflow_node_execution = rag_pipeline_service.set_datasource_variables(

View File

@@ -1,7 +1,7 @@
from flask_restx import Resource, fields, marshal_with, reqparse
from constants.languages import languages
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required
from libs.helper import AppIconUrlField
from libs.login import current_user, login_required
@@ -35,15 +35,18 @@ recommended_app_list_fields = {
}
parser_apps = reqparse.RequestParser().add_argument("language", type=str, location="args")
@console_ns.route("/explore/apps")
class RecommendedAppListApi(Resource):
@api.expect(parser_apps)
@login_required
@account_initialization_required
@marshal_with(recommended_app_list_fields)
def get(self):
# language args
parser = reqparse.RequestParser().add_argument("language", type=str, location="args")
args = parser.parse_args()
args = parser_apps.parse_args()
language = args.get("language")
if language and language in languages:

View File

@@ -10,6 +10,7 @@ from controllers.common.errors import (
RemoteFileUploadError,
UnsupportedFileTypeError,
)
from controllers.console import api
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
@@ -36,12 +37,15 @@ class RemoteFileInfoApi(Resource):
}
parser_upload = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required")
@console_ns.route("/remote-files/upload")
class RemoteFileUploadApi(Resource):
@api.expect(parser_upload)
@marshal_with(file_fields_with_signed_url)
def post(self):
parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required")
args = parser.parse_args()
args = parser_upload.parse_args()
url = args["url"]

View File

@@ -49,6 +49,7 @@ class SetupApi(Resource):
"email": fields.String(required=True, description="Admin email address"),
"name": fields.String(required=True, description="Admin name (max 30 characters)"),
"password": fields.String(required=True, description="Admin password"),
"language": fields.String(required=False, description="Admin language"),
},
)
)

View File

@@ -2,7 +2,7 @@ from flask import request
from flask_restx import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from fields.tag_fields import dataset_tag_fields
from libs.login import current_account_with_tenant, login_required
@@ -16,6 +16,19 @@ def _validate_name(name):
return name
parser_tags = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 50 characters.",
type=_validate_name,
)
.add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.")
)
@console_ns.route("/tags")
class TagListApi(Resource):
@setup_required
@@ -30,6 +43,7 @@ class TagListApi(Resource):
return tags, 200
@api.expect(parser_tags)
@setup_required
@login_required
@account_initialization_required
@@ -39,20 +53,7 @@ class TagListApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 50 characters.",
type=_validate_name,
)
.add_argument(
"type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type."
)
)
args = parser.parse_args()
args = parser_tags.parse_args()
tag = TagService.save_tags(args)
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
@@ -60,8 +61,14 @@ class TagListApi(Resource):
return response, 200
parser_tag_id = reqparse.RequestParser().add_argument(
"name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name
)
@console_ns.route("/tags/<uuid:tag_id>")
class TagUpdateDeleteApi(Resource):
@api.expect(parser_tag_id)
@setup_required
@login_required
@account_initialization_required
@@ -72,10 +79,7 @@ class TagUpdateDeleteApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name
)
args = parser.parse_args()
args = parser_tag_id.parse_args()
tag = TagService.update_tags(args, tag_id)
binding_count = TagService.get_tag_binding_count(tag_id)
@@ -99,8 +103,17 @@ class TagUpdateDeleteApi(Resource):
return 204
parser_create = (
reqparse.RequestParser()
.add_argument("tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required.")
.add_argument("target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required.")
.add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.")
)
@console_ns.route("/tag-bindings/create")
class TagBindingCreateApi(Resource):
@api.expect(parser_create)
@setup_required
@login_required
@account_initialization_required
@@ -110,26 +123,23 @@ class TagBindingCreateApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument(
"tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required."
)
.add_argument(
"target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required."
)
.add_argument(
"type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type."
)
)
args = parser.parse_args()
args = parser_create.parse_args()
TagService.save_tag_binding(args)
return {"result": "success"}, 200
parser_remove = (
reqparse.RequestParser()
.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.")
.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.")
.add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.")
)
@console_ns.route("/tag-bindings/remove")
class TagBindingDeleteApi(Resource):
@api.expect(parser_remove)
@setup_required
@login_required
@account_initialization_required
@@ -139,15 +149,7 @@ class TagBindingDeleteApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.")
.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.")
.add_argument(
"type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type."
)
)
args = parser.parse_args()
args = parser_remove.parse_args()
TagService.delete_tag_binding(args)
return {"result": "success"}, 200

View File

@@ -11,16 +11,16 @@ from . import api, console_ns
logger = logging.getLogger(__name__)
parser = reqparse.RequestParser().add_argument(
"current_version", type=str, required=True, location="args", help="Current application version"
)
@console_ns.route("/version")
class VersionApi(Resource):
@api.doc("check_version_update")
@api.doc(description="Check for application version updates")
@api.expect(
api.parser().add_argument(
"current_version", type=str, required=True, location="args", help="Current application version"
)
)
@api.expect(parser)
@api.response(
200,
"Success",
@@ -37,7 +37,6 @@ class VersionApi(Resource):
)
def get(self):
"""Check for application version updates"""
parser = reqparse.RequestParser().add_argument("current_version", type=str, required=True, location="args")
args = parser.parse_args()
check_update_url = dify_config.CHECK_UPDATE_URL

View File

@@ -8,7 +8,7 @@ from sqlalchemy.orm import Session
from configs import dify_config
from constants.languages import supported_language
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.auth.error import (
EmailAlreadyInUseError,
EmailChangeLimitError,
@@ -43,8 +43,19 @@ from services.billing_service import BillingService
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
def _init_parser():
parser = reqparse.RequestParser()
if dify_config.EDITION == "CLOUD":
parser.add_argument("invitation_code", type=str, location="json")
parser.add_argument("interface_language", type=supported_language, required=True, location="json").add_argument(
"timezone", type=timezone, required=True, location="json"
)
return parser
@console_ns.route("/account/init")
class AccountInitApi(Resource):
@api.expect(_init_parser())
@setup_required
@login_required
def post(self):
@@ -53,14 +64,7 @@ class AccountInitApi(Resource):
if account.status == "active":
raise AccountAlreadyInitedError()
parser = reqparse.RequestParser()
if dify_config.EDITION == "CLOUD":
parser.add_argument("invitation_code", type=str, location="json")
parser.add_argument("interface_language", type=supported_language, required=True, location="json").add_argument(
"timezone", type=timezone, required=True, location="json"
)
args = parser.parse_args()
args = _init_parser().parse_args()
if dify_config.EDITION == "CLOUD":
if not args["invitation_code"]:
@@ -106,16 +110,19 @@ class AccountProfileApi(Resource):
return current_user
parser_name = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
@console_ns.route("/account/name")
class AccountNameApi(Resource):
@api.expect(parser_name)
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
args = parser_name.parse_args()
# Validate account name length
if len(args["name"]) < 3 or len(args["name"]) > 30:
@@ -126,68 +133,80 @@ class AccountNameApi(Resource):
return updated_account
parser_avatar = reqparse.RequestParser().add_argument("avatar", type=str, required=True, location="json")
@console_ns.route("/account/avatar")
class AccountAvatarApi(Resource):
@api.expect(parser_avatar)
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("avatar", type=str, required=True, location="json")
args = parser.parse_args()
args = parser_avatar.parse_args()
updated_account = AccountService.update_account(current_user, avatar=args["avatar"])
return updated_account
parser_interface = reqparse.RequestParser().add_argument(
"interface_language", type=supported_language, required=True, location="json"
)
@console_ns.route("/account/interface-language")
class AccountInterfaceLanguageApi(Resource):
@api.expect(parser_interface)
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"interface_language", type=supported_language, required=True, location="json"
)
args = parser.parse_args()
args = parser_interface.parse_args()
updated_account = AccountService.update_account(current_user, interface_language=args["interface_language"])
return updated_account
parser_theme = reqparse.RequestParser().add_argument(
"interface_theme", type=str, choices=["light", "dark"], required=True, location="json"
)
@console_ns.route("/account/interface-theme")
class AccountInterfaceThemeApi(Resource):
@api.expect(parser_theme)
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"interface_theme", type=str, choices=["light", "dark"], required=True, location="json"
)
args = parser.parse_args()
args = parser_theme.parse_args()
updated_account = AccountService.update_account(current_user, interface_theme=args["interface_theme"])
return updated_account
parser_timezone = reqparse.RequestParser().add_argument("timezone", type=str, required=True, location="json")
@console_ns.route("/account/timezone")
class AccountTimezoneApi(Resource):
@api.expect(parser_timezone)
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("timezone", type=str, required=True, location="json")
args = parser.parse_args()
args = parser_timezone.parse_args()
# Validate timezone string, e.g. America/New_York, Asia/Shanghai
if args["timezone"] not in pytz.all_timezones:
@@ -198,21 +217,24 @@ class AccountTimezoneApi(Resource):
return updated_account
parser_pw = (
reqparse.RequestParser()
.add_argument("password", type=str, required=False, location="json")
.add_argument("new_password", type=str, required=True, location="json")
.add_argument("repeat_new_password", type=str, required=True, location="json")
)
@console_ns.route("/account/password")
class AccountPasswordApi(Resource):
@api.expect(parser_pw)
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("password", type=str, required=False, location="json")
.add_argument("new_password", type=str, required=True, location="json")
.add_argument("repeat_new_password", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_pw.parse_args()
if args["new_password"] != args["repeat_new_password"]:
raise RepeatPasswordNotMatchError()
@@ -294,20 +316,23 @@ class AccountDeleteVerifyApi(Resource):
return {"result": "success", "data": token}
parser_delete = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
)
@console_ns.route("/account/delete")
class AccountDeleteApi(Resource):
@api.expect(parser_delete)
@setup_required
@login_required
@account_initialization_required
def post(self):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_delete.parse_args()
if not AccountService.verify_account_deletion_code(args["token"], args["code"]):
raise InvalidAccountDeletionCodeError()
@@ -317,16 +342,19 @@ class AccountDeleteApi(Resource):
return {"result": "success"}
parser_feedback = (
reqparse.RequestParser()
.add_argument("email", type=str, required=True, location="json")
.add_argument("feedback", type=str, required=True, location="json")
)
@console_ns.route("/account/delete/feedback")
class AccountDeleteUpdateFeedbackApi(Resource):
@api.expect(parser_feedback)
@setup_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=str, required=True, location="json")
.add_argument("feedback", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_feedback.parse_args()
BillingService.update_account_deletion_feedback(args["email"], args["feedback"])
@@ -351,6 +379,14 @@ class EducationVerifyApi(Resource):
return BillingService.EducationIdentity.verify(account.id, account.email)
parser_edu = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, location="json")
.add_argument("institution", type=str, required=True, location="json")
.add_argument("role", type=str, required=True, location="json")
)
@console_ns.route("/account/education")
class EducationApi(Resource):
status_fields = {
@@ -360,6 +396,7 @@ class EducationApi(Resource):
"allow_refresh": fields.Boolean,
}
@api.expect(parser_edu)
@setup_required
@login_required
@account_initialization_required
@@ -368,13 +405,7 @@ class EducationApi(Resource):
def post(self):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, location="json")
.add_argument("institution", type=str, required=True, location="json")
.add_argument("role", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_edu.parse_args()
return BillingService.EducationIdentity.activate(account, args["token"], args["institution"], args["role"])
@@ -394,6 +425,14 @@ class EducationApi(Resource):
return res
parser_autocomplete = (
reqparse.RequestParser()
.add_argument("keywords", type=str, required=True, location="args")
.add_argument("page", type=int, required=False, location="args", default=0)
.add_argument("limit", type=int, required=False, location="args", default=20)
)
@console_ns.route("/account/education/autocomplete")
class EducationAutoCompleteApi(Resource):
data_fields = {
@@ -402,6 +441,7 @@ class EducationAutoCompleteApi(Resource):
"has_next": fields.Boolean,
}
@api.expect(parser_autocomplete)
@setup_required
@login_required
@account_initialization_required
@@ -409,33 +449,30 @@ class EducationAutoCompleteApi(Resource):
@cloud_edition_billing_enabled
@marshal_with(data_fields)
def get(self):
parser = (
reqparse.RequestParser()
.add_argument("keywords", type=str, required=True, location="args")
.add_argument("page", type=int, required=False, location="args", default=0)
.add_argument("limit", type=int, required=False, location="args", default=20)
)
args = parser.parse_args()
args = parser_autocomplete.parse_args()
return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"])
parser_change_email = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
.add_argument("phase", type=str, required=False, location="json")
.add_argument("token", type=str, required=False, location="json")
)
@console_ns.route("/account/change-email")
class ChangeEmailSendEmailApi(Resource):
@api.expect(parser_change_email)
@enable_change_email
@setup_required
@login_required
@account_initialization_required
def post(self):
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
.add_argument("phase", type=str, required=False, location="json")
.add_argument("token", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_change_email.parse_args()
ip_address = extract_remote_ip(request)
if AccountService.is_email_send_ip_limit(ip_address):
@@ -470,20 +507,23 @@ class ChangeEmailSendEmailApi(Resource):
return {"result": "success", "data": token}
parser_validity = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/account/change-email/validity")
class ChangeEmailCheckApi(Resource):
@api.expect(parser_validity)
@enable_change_email
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_validity.parse_args()
user_email = args["email"]
@@ -514,20 +554,23 @@ class ChangeEmailCheckApi(Resource):
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
parser_reset = (
reqparse.RequestParser()
.add_argument("new_email", type=email, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/account/change-email/reset")
class ChangeEmailResetApi(Resource):
@api.expect(parser_reset)
@enable_change_email
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("new_email", type=email, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_reset.parse_args()
if AccountService.is_account_in_freeze(args["new_email"]):
raise AccountInFreezeError()
@@ -555,12 +598,15 @@ class ChangeEmailResetApi(Resource):
return updated_account
parser_check = reqparse.RequestParser().add_argument("email", type=email, required=True, location="json")
@console_ns.route("/account/change-email/check-email-unique")
class CheckEmailUnique(Resource):
@api.expect(parser_check)
@setup_required
def post(self):
parser = reqparse.RequestParser().add_argument("email", type=email, required=True, location="json")
args = parser.parse_args()
args = parser_check.parse_args()
if AccountService.is_account_in_freeze(args["email"]):
raise AccountInFreezeError()
if not AccountService.check_email_unique(args["email"]):

View File

@@ -5,7 +5,7 @@ from flask_restx import Resource, marshal_with, reqparse
import services
from configs import dify_config
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.auth.error import (
CannotTransferOwnerToSelfError,
EmailCodeError,
@@ -48,22 +48,25 @@ class MemberListApi(Resource):
return {"result": "success", "accounts": members}, 200
parser_invite = (
reqparse.RequestParser()
.add_argument("emails", type=list, required=True, location="json")
.add_argument("role", type=str, required=True, default="admin", location="json")
.add_argument("language", type=str, required=False, location="json")
)
@console_ns.route("/workspaces/current/members/invite-email")
class MemberInviteEmailApi(Resource):
"""Invite a new member by email."""
@api.expect(parser_invite)
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("members")
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("emails", type=list, required=True, location="json")
.add_argument("role", type=str, required=True, default="admin", location="json")
.add_argument("language", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_invite.parse_args()
invitee_emails = args["emails"]
invitee_role = args["role"]
@@ -143,16 +146,19 @@ class MemberCancelInviteApi(Resource):
}, 200
parser_update = reqparse.RequestParser().add_argument("role", type=str, required=True, location="json")
@console_ns.route("/workspaces/current/members/<uuid:member_id>/update-role")
class MemberUpdateRoleApi(Resource):
"""Update member role."""
@api.expect(parser_update)
@setup_required
@login_required
@account_initialization_required
def put(self, member_id):
parser = reqparse.RequestParser().add_argument("role", type=str, required=True, location="json")
args = parser.parse_args()
args = parser_update.parse_args()
new_role = args["role"]
if not TenantAccountRole.is_valid_role(new_role):
@@ -191,17 +197,20 @@ class DatasetOperatorMemberListApi(Resource):
return {"result": "success", "accounts": members}, 200
parser_send = reqparse.RequestParser().add_argument("language", type=str, required=False, location="json")
@console_ns.route("/workspaces/current/members/send-owner-transfer-confirm-email")
class SendOwnerTransferEmailApi(Resource):
"""Send owner transfer email."""
@api.expect(parser_send)
@setup_required
@login_required
@account_initialization_required
@is_allow_transfer_owner
def post(self):
parser = reqparse.RequestParser().add_argument("language", type=str, required=False, location="json")
args = parser.parse_args()
args = parser_send.parse_args()
ip_address = extract_remote_ip(request)
if AccountService.is_email_send_ip_limit(ip_address):
raise EmailSendIpLimitError()
@@ -229,19 +238,22 @@ class SendOwnerTransferEmailApi(Resource):
return {"result": "success", "data": token}
parser_owner = (
reqparse.RequestParser()
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/workspaces/current/members/owner-transfer-check")
class OwnerTransferCheckApi(Resource):
@api.expect(parser_owner)
@setup_required
@login_required
@account_initialization_required
@is_allow_transfer_owner
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_owner.parse_args()
# check if the current user is the owner of the workspace
current_user, _ = current_account_with_tenant()
if not current_user.current_tenant:
@@ -276,17 +288,20 @@ class OwnerTransferCheckApi(Resource):
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
parser_owner_transfer = reqparse.RequestParser().add_argument(
"token", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/members/<uuid:member_id>/owner-transfer")
class OwnerTransfer(Resource):
@api.expect(parser_owner_transfer)
@setup_required
@login_required
@account_initialization_required
@is_allow_transfer_owner
def post(self, member_id):
parser = reqparse.RequestParser().add_argument(
"token", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_owner_transfer.parse_args()
# check if the current user is the owner of the workspace
current_user, _ = current_account_with_tenant()

View File

@@ -4,7 +4,7 @@ from flask import send_file
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.validate import CredentialsValidateFailedError
@@ -14,9 +14,19 @@ from libs.login import current_account_with_tenant, login_required
from services.billing_service import BillingService
from services.model_provider_service import ModelProviderService
parser_model = reqparse.RequestParser().add_argument(
"model_type",
type=str,
required=False,
nullable=True,
choices=[mt.value for mt in ModelType],
location="args",
)
@console_ns.route("/workspaces/current/model-providers")
class ModelProviderListApi(Resource):
@api.expect(parser_model)
@setup_required
@login_required
@account_initialization_required
@@ -24,15 +34,7 @@ class ModelProviderListApi(Resource):
_, current_tenant_id = current_account_with_tenant()
tenant_id = current_tenant_id
parser = reqparse.RequestParser().add_argument(
"model_type",
type=str,
required=False,
nullable=True,
choices=[mt.value for mt in ModelType],
location="args",
)
args = parser.parse_args()
args = parser_model.parse_args()
model_provider_service = ModelProviderService()
provider_list = model_provider_service.get_provider_list(tenant_id=tenant_id, model_type=args.get("model_type"))
@@ -40,8 +42,30 @@ class ModelProviderListApi(Resource):
return jsonable_encoder({"data": provider_list})
parser_cred = reqparse.RequestParser().add_argument(
"credential_id", type=uuid_value, required=False, nullable=True, location="args"
)
parser_post_cred = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
parser_put_cred = (
reqparse.RequestParser()
.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
parser_delete_cred = reqparse.RequestParser().add_argument(
"credential_id", type=uuid_value, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/credentials")
class ModelProviderCredentialApi(Resource):
@api.expect(parser_cred)
@setup_required
@login_required
@account_initialization_required
@@ -49,10 +73,7 @@ class ModelProviderCredentialApi(Resource):
_, current_tenant_id = current_account_with_tenant()
tenant_id = current_tenant_id
# if credential_id is not provided, return current used credential
parser = reqparse.RequestParser().add_argument(
"credential_id", type=uuid_value, required=False, nullable=True, location="args"
)
args = parser.parse_args()
args = parser_cred.parse_args()
model_provider_service = ModelProviderService()
credentials = model_provider_service.get_provider_credential(
@@ -61,6 +82,7 @@ class ModelProviderCredentialApi(Resource):
return {"credentials": credentials}
@api.expect(parser_post_cred)
@setup_required
@login_required
@account_initialization_required
@@ -69,12 +91,7 @@ class ModelProviderCredentialApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_post_cred.parse_args()
model_provider_service = ModelProviderService()
@@ -90,6 +107,7 @@ class ModelProviderCredentialApi(Resource):
return {"result": "success"}, 201
@api.expect(parser_put_cred)
@setup_required
@login_required
@account_initialization_required
@@ -98,13 +116,7 @@ class ModelProviderCredentialApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_put_cred.parse_args()
model_provider_service = ModelProviderService()
@@ -121,6 +133,7 @@ class ModelProviderCredentialApi(Resource):
return {"result": "success"}
@api.expect(parser_delete_cred)
@setup_required
@login_required
@account_initialization_required
@@ -128,10 +141,8 @@ class ModelProviderCredentialApi(Resource):
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"credential_id", type=uuid_value, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_delete_cred.parse_args()
model_provider_service = ModelProviderService()
model_provider_service.remove_provider_credential(
@@ -141,8 +152,14 @@ class ModelProviderCredentialApi(Resource):
return {"result": "success"}, 204
parser_switch = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/credentials/switch")
class ModelProviderCredentialSwitchApi(Resource):
@api.expect(parser_switch)
@setup_required
@login_required
@account_initialization_required
@@ -150,10 +167,7 @@ class ModelProviderCredentialSwitchApi(Resource):
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_switch.parse_args()
service = ModelProviderService()
service.switch_active_provider_credential(
@@ -164,17 +178,20 @@ class ModelProviderCredentialSwitchApi(Resource):
return {"result": "success"}
parser_validate = reqparse.RequestParser().add_argument(
"credentials", type=dict, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/credentials/validate")
class ModelProviderValidateApi(Resource):
@api.expect(parser_validate)
@setup_required
@login_required
@account_initialization_required
def post(self, provider: str):
_, current_tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"credentials", type=dict, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_validate.parse_args()
tenant_id = current_tenant_id
@@ -218,8 +235,19 @@ class ModelProviderIconApi(Resource):
return send_file(io.BytesIO(icon), mimetype=mimetype)
parser_preferred = reqparse.RequestParser().add_argument(
"preferred_provider_type",
type=str,
required=True,
nullable=False,
choices=["system", "custom"],
location="json",
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/preferred-provider-type")
class PreferredProviderTypeUpdateApi(Resource):
@api.expect(parser_preferred)
@setup_required
@login_required
@account_initialization_required
@@ -230,15 +258,7 @@ class PreferredProviderTypeUpdateApi(Resource):
tenant_id = current_tenant_id
parser = reqparse.RequestParser().add_argument(
"preferred_provider_type",
type=str,
required=True,
nullable=False,
choices=["system", "custom"],
location="json",
)
args = parser.parse_args()
args = parser_preferred.parse_args()
model_provider_service = ModelProviderService()
model_provider_service.switch_preferred_provider(

View File

@@ -3,7 +3,7 @@ import logging
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.validate import CredentialsValidateFailedError
@@ -16,23 +16,29 @@ from services.model_provider_service import ModelProviderService
logger = logging.getLogger(__name__)
parser_get_default = reqparse.RequestParser().add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="args",
)
parser_post_default = reqparse.RequestParser().add_argument(
"model_settings", type=list, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/default-model")
class DefaultModelApi(Resource):
@api.expect(parser_get_default)
@setup_required
@login_required
@account_initialization_required
def get(self):
_, tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="args",
)
args = parser.parse_args()
args = parser_get_default.parse_args()
model_provider_service = ModelProviderService()
default_model_entity = model_provider_service.get_default_model_of_model_type(
@@ -41,6 +47,7 @@ class DefaultModelApi(Resource):
return jsonable_encoder({"data": default_model_entity})
@api.expect(parser_post_default)
@setup_required
@login_required
@account_initialization_required
@@ -50,10 +57,7 @@ class DefaultModelApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"model_settings", type=list, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_post_default.parse_args()
model_provider_service = ModelProviderService()
model_settings = args["model_settings"]
for model_setting in model_settings:
@@ -84,6 +88,35 @@ class DefaultModelApi(Resource):
return {"result": "success"}
parser_post_models = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("load_balancing", type=dict, required=False, nullable=True, location="json")
.add_argument("config_from", type=str, required=False, nullable=True, location="json")
.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json")
)
parser_delete_models = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models")
class ModelProviderModelApi(Resource):
@setup_required
@@ -97,6 +130,7 @@ class ModelProviderModelApi(Resource):
return jsonable_encoder({"data": models})
@api.expect(parser_post_models)
@setup_required
@login_required
@account_initialization_required
@@ -106,23 +140,7 @@ class ModelProviderModelApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("load_balancing", type=dict, required=False, nullable=True, location="json")
.add_argument("config_from", type=str, required=False, nullable=True, location="json")
.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_post_models.parse_args()
if args.get("config_from", "") == "custom-model":
if not args.get("credential_id"):
@@ -160,6 +178,7 @@ class ModelProviderModelApi(Resource):
return {"result": "success"}, 200
@api.expect(parser_delete_models)
@setup_required
@login_required
@account_initialization_required
@@ -169,19 +188,7 @@ class ModelProviderModelApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
)
args = parser.parse_args()
args = parser_delete_models.parse_args()
model_provider_service = ModelProviderService()
model_provider_service.remove_model(
@@ -191,29 +198,76 @@ class ModelProviderModelApi(Resource):
return {"result": "success"}, 204
parser_get_credentials = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="args")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="args",
)
.add_argument("config_from", type=str, required=False, nullable=True, location="args")
.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args")
)
parser_post_cred = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
parser_put_cred = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
parser_delete_cred = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json")
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials")
class ModelProviderModelCredentialApi(Resource):
@api.expect(parser_get_credentials)
@setup_required
@login_required
@account_initialization_required
def get(self, provider: str):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="args")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="args",
)
.add_argument("config_from", type=str, required=False, nullable=True, location="args")
.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args")
)
args = parser.parse_args()
args = parser_get_credentials.parse_args()
model_provider_service = ModelProviderService()
current_credential = model_provider_service.get_model_credential(
@@ -257,6 +311,7 @@ class ModelProviderModelCredentialApi(Resource):
}
)
@api.expect(parser_post_cred)
@setup_required
@login_required
@account_initialization_required
@@ -266,21 +321,7 @@ class ModelProviderModelCredentialApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_post_cred.parse_args()
model_provider_service = ModelProviderService()
@@ -304,6 +345,7 @@ class ModelProviderModelCredentialApi(Resource):
return {"result": "success"}, 201
@api.expect(parser_put_cred)
@setup_required
@login_required
@account_initialization_required
@@ -313,22 +355,7 @@ class ModelProviderModelCredentialApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_put_cred.parse_args()
model_provider_service = ModelProviderService()
@@ -347,6 +374,7 @@ class ModelProviderModelCredentialApi(Resource):
return {"result": "success"}
@api.expect(parser_delete_cred)
@setup_required
@login_required
@account_initialization_required
@@ -355,20 +383,7 @@ class ModelProviderModelCredentialApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_delete_cred.parse_args()
model_provider_service = ModelProviderService()
model_provider_service.remove_model_credential(
@@ -382,8 +397,24 @@ class ModelProviderModelCredentialApi(Resource):
return {"result": "success"}, 204
parser_switch = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials/switch")
class ModelProviderModelCredentialSwitchApi(Resource):
@api.expect(parser_switch)
@setup_required
@login_required
@account_initialization_required
@@ -392,20 +423,7 @@ class ModelProviderModelCredentialSwitchApi(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_switch.parse_args()
service = ModelProviderService()
service.add_model_credential_to_model_list(
@@ -418,29 +436,32 @@ class ModelProviderModelCredentialSwitchApi(Resource):
return {"result": "success"}
parser_model_enable_disable = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
)
@console_ns.route(
"/workspaces/current/model-providers/<path:provider>/models/enable", endpoint="model-provider-model-enable"
)
class ModelProviderModelEnableApi(Resource):
@api.expect(parser_model_enable_disable)
@setup_required
@login_required
@account_initialization_required
def patch(self, provider: str):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
)
args = parser.parse_args()
args = parser_model_enable_disable.parse_args()
model_provider_service = ModelProviderService()
model_provider_service.enable_model(
@@ -454,25 +475,14 @@ class ModelProviderModelEnableApi(Resource):
"/workspaces/current/model-providers/<path:provider>/models/disable", endpoint="model-provider-model-disable"
)
class ModelProviderModelDisableApi(Resource):
@api.expect(parser_model_enable_disable)
@setup_required
@login_required
@account_initialization_required
def patch(self, provider: str):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
)
args = parser.parse_args()
args = parser_model_enable_disable.parse_args()
model_provider_service = ModelProviderService()
model_provider_service.disable_model(
@@ -482,28 +492,31 @@ class ModelProviderModelDisableApi(Resource):
return {"result": "success"}
parser_validate = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials/validate")
class ModelProviderModelValidateApi(Resource):
@api.expect(parser_validate)
@setup_required
@login_required
@account_initialization_required
def post(self, provider: str):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("model", type=str, required=True, nullable=False, location="json")
.add_argument(
"model_type",
type=str,
required=True,
nullable=False,
choices=[mt.value for mt in ModelType],
location="json",
)
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_validate.parse_args()
model_provider_service = ModelProviderService()
@@ -530,16 +543,19 @@ class ModelProviderModelValidateApi(Resource):
return response
parser_parameter = reqparse.RequestParser().add_argument(
"model", type=str, required=True, nullable=False, location="args"
)
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/parameter-rules")
class ModelProviderModelParameterRuleApi(Resource):
@api.expect(parser_parameter)
@setup_required
@login_required
@account_initialization_required
def get(self, provider: str):
parser = reqparse.RequestParser().add_argument(
"model", type=str, required=True, nullable=False, location="args"
)
args = parser.parse_args()
args = parser_parameter.parse_args()
_, tenant_id = current_account_with_tenant()
model_provider_service = ModelProviderService()

View File

@@ -5,7 +5,7 @@ from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.workspace import plugin_permission_required
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
@@ -37,19 +37,22 @@ class PluginDebuggingKeyApi(Resource):
raise ValueError(e)
parser_list = (
reqparse.RequestParser()
.add_argument("page", type=int, required=False, location="args", default=1)
.add_argument("page_size", type=int, required=False, location="args", default=256)
)
@console_ns.route("/workspaces/current/plugin/list")
class PluginListApi(Resource):
@api.expect(parser_list)
@setup_required
@login_required
@account_initialization_required
def get(self):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("page", type=int, required=False, location="args", default=1)
.add_argument("page_size", type=int, required=False, location="args", default=256)
)
args = parser.parse_args()
args = parser_list.parse_args()
try:
plugins_with_total = PluginService.list_with_total(tenant_id, args["page"], args["page_size"])
except PluginDaemonClientSideError as e:
@@ -58,14 +61,17 @@ class PluginListApi(Resource):
return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total})
parser_latest = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json")
@console_ns.route("/workspaces/current/plugin/list/latest-versions")
class PluginListLatestVersionsApi(Resource):
@api.expect(parser_latest)
@setup_required
@login_required
@account_initialization_required
def post(self):
req = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json")
args = req.parse_args()
args = parser_latest.parse_args()
try:
versions = PluginService.list_latest_versions(args["plugin_ids"])
@@ -75,16 +81,19 @@ class PluginListLatestVersionsApi(Resource):
return jsonable_encoder({"versions": versions})
parser_ids = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json")
@console_ns.route("/workspaces/current/plugin/list/installations/ids")
class PluginListInstallationsFromIdsApi(Resource):
@api.expect(parser_ids)
@setup_required
@login_required
@account_initialization_required
def post(self):
_, tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json")
args = parser.parse_args()
args = parser_ids.parse_args()
try:
plugins = PluginService.list_installations_from_ids(tenant_id, args["plugin_ids"])
@@ -94,16 +103,19 @@ class PluginListInstallationsFromIdsApi(Resource):
return jsonable_encoder({"plugins": plugins})
parser_icon = (
reqparse.RequestParser()
.add_argument("tenant_id", type=str, required=True, location="args")
.add_argument("filename", type=str, required=True, location="args")
)
@console_ns.route("/workspaces/current/plugin/icon")
class PluginIconApi(Resource):
@api.expect(parser_icon)
@setup_required
def get(self):
req = (
reqparse.RequestParser()
.add_argument("tenant_id", type=str, required=True, location="args")
.add_argument("filename", type=str, required=True, location="args")
)
args = req.parse_args()
args = parser_icon.parse_args()
try:
icon_bytes, mimetype = PluginService.get_asset(args["tenant_id"], args["filename"])
@@ -157,8 +169,17 @@ class PluginUploadFromPkgApi(Resource):
return jsonable_encoder(response)
parser_github = (
reqparse.RequestParser()
.add_argument("repo", type=str, required=True, location="json")
.add_argument("version", type=str, required=True, location="json")
.add_argument("package", type=str, required=True, location="json")
)
@console_ns.route("/workspaces/current/plugin/upload/github")
class PluginUploadFromGithubApi(Resource):
@api.expect(parser_github)
@setup_required
@login_required
@account_initialization_required
@@ -166,13 +187,7 @@ class PluginUploadFromGithubApi(Resource):
def post(self):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("repo", type=str, required=True, location="json")
.add_argument("version", type=str, required=True, location="json")
.add_argument("package", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_github.parse_args()
try:
response = PluginService.upload_pkg_from_github(tenant_id, args["repo"], args["version"], args["package"])
@@ -206,19 +221,21 @@ class PluginUploadFromBundleApi(Resource):
return jsonable_encoder(response)
parser_pkg = reqparse.RequestParser().add_argument(
"plugin_unique_identifiers", type=list, required=True, location="json"
)
@console_ns.route("/workspaces/current/plugin/install/pkg")
class PluginInstallFromPkgApi(Resource):
@api.expect(parser_pkg)
@setup_required
@login_required
@account_initialization_required
@plugin_permission_required(install_required=True)
def post(self):
_, tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"plugin_unique_identifiers", type=list, required=True, location="json"
)
args = parser.parse_args()
args = parser_pkg.parse_args()
# check if all plugin_unique_identifiers are valid string
for plugin_unique_identifier in args["plugin_unique_identifiers"]:
@@ -233,8 +250,18 @@ class PluginInstallFromPkgApi(Resource):
return jsonable_encoder(response)
parser_githubapi = (
reqparse.RequestParser()
.add_argument("repo", type=str, required=True, location="json")
.add_argument("version", type=str, required=True, location="json")
.add_argument("package", type=str, required=True, location="json")
.add_argument("plugin_unique_identifier", type=str, required=True, location="json")
)
@console_ns.route("/workspaces/current/plugin/install/github")
class PluginInstallFromGithubApi(Resource):
@api.expect(parser_githubapi)
@setup_required
@login_required
@account_initialization_required
@@ -242,14 +269,7 @@ class PluginInstallFromGithubApi(Resource):
def post(self):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("repo", type=str, required=True, location="json")
.add_argument("version", type=str, required=True, location="json")
.add_argument("package", type=str, required=True, location="json")
.add_argument("plugin_unique_identifier", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_githubapi.parse_args()
try:
response = PluginService.install_from_github(
@@ -265,8 +285,14 @@ class PluginInstallFromGithubApi(Resource):
return jsonable_encoder(response)
parser_marketplace = reqparse.RequestParser().add_argument(
"plugin_unique_identifiers", type=list, required=True, location="json"
)
@console_ns.route("/workspaces/current/plugin/install/marketplace")
class PluginInstallFromMarketplaceApi(Resource):
@api.expect(parser_marketplace)
@setup_required
@login_required
@account_initialization_required
@@ -274,10 +300,7 @@ class PluginInstallFromMarketplaceApi(Resource):
def post(self):
_, tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"plugin_unique_identifiers", type=list, required=True, location="json"
)
args = parser.parse_args()
args = parser_marketplace.parse_args()
# check if all plugin_unique_identifiers are valid string
for plugin_unique_identifier in args["plugin_unique_identifiers"]:
@@ -292,19 +315,21 @@ class PluginInstallFromMarketplaceApi(Resource):
return jsonable_encoder(response)
parser_pkgapi = reqparse.RequestParser().add_argument(
"plugin_unique_identifier", type=str, required=True, location="args"
)
@console_ns.route("/workspaces/current/plugin/marketplace/pkg")
class PluginFetchMarketplacePkgApi(Resource):
@api.expect(parser_pkgapi)
@setup_required
@login_required
@account_initialization_required
@plugin_permission_required(install_required=True)
def get(self):
_, tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"plugin_unique_identifier", type=str, required=True, location="args"
)
args = parser.parse_args()
args = parser_pkgapi.parse_args()
try:
return jsonable_encoder(
@@ -319,8 +344,14 @@ class PluginFetchMarketplacePkgApi(Resource):
raise ValueError(e)
parser_fetch = reqparse.RequestParser().add_argument(
"plugin_unique_identifier", type=str, required=True, location="args"
)
@console_ns.route("/workspaces/current/plugin/fetch-manifest")
class PluginFetchManifestApi(Resource):
@api.expect(parser_fetch)
@setup_required
@login_required
@account_initialization_required
@@ -328,10 +359,7 @@ class PluginFetchManifestApi(Resource):
def get(self):
_, tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"plugin_unique_identifier", type=str, required=True, location="args"
)
args = parser.parse_args()
args = parser_fetch.parse_args()
try:
return jsonable_encoder(
@@ -345,8 +373,16 @@ class PluginFetchManifestApi(Resource):
raise ValueError(e)
parser_tasks = (
reqparse.RequestParser()
.add_argument("page", type=int, required=True, location="args")
.add_argument("page_size", type=int, required=True, location="args")
)
@console_ns.route("/workspaces/current/plugin/tasks")
class PluginFetchInstallTasksApi(Resource):
@api.expect(parser_tasks)
@setup_required
@login_required
@account_initialization_required
@@ -354,12 +390,7 @@ class PluginFetchInstallTasksApi(Resource):
def get(self):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("page", type=int, required=True, location="args")
.add_argument("page_size", type=int, required=True, location="args")
)
args = parser.parse_args()
args = parser_tasks.parse_args()
try:
return jsonable_encoder(
@@ -429,8 +460,16 @@ class PluginDeleteInstallTaskItemApi(Resource):
raise ValueError(e)
parser_marketplace_api = (
reqparse.RequestParser()
.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json")
.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json")
)
@console_ns.route("/workspaces/current/plugin/upgrade/marketplace")
class PluginUpgradeFromMarketplaceApi(Resource):
@api.expect(parser_marketplace_api)
@setup_required
@login_required
@account_initialization_required
@@ -438,12 +477,7 @@ class PluginUpgradeFromMarketplaceApi(Resource):
def post(self):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json")
.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_marketplace_api.parse_args()
try:
return jsonable_encoder(
@@ -455,8 +489,19 @@ class PluginUpgradeFromMarketplaceApi(Resource):
raise ValueError(e)
parser_github_post = (
reqparse.RequestParser()
.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json")
.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json")
.add_argument("repo", type=str, required=True, location="json")
.add_argument("version", type=str, required=True, location="json")
.add_argument("package", type=str, required=True, location="json")
)
@console_ns.route("/workspaces/current/plugin/upgrade/github")
class PluginUpgradeFromGithubApi(Resource):
@api.expect(parser_github_post)
@setup_required
@login_required
@account_initialization_required
@@ -464,15 +509,7 @@ class PluginUpgradeFromGithubApi(Resource):
def post(self):
_, tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json")
.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json")
.add_argument("repo", type=str, required=True, location="json")
.add_argument("version", type=str, required=True, location="json")
.add_argument("package", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_github_post.parse_args()
try:
return jsonable_encoder(
@@ -489,15 +526,20 @@ class PluginUpgradeFromGithubApi(Resource):
raise ValueError(e)
parser_uninstall = reqparse.RequestParser().add_argument(
"plugin_installation_id", type=str, required=True, location="json"
)
@console_ns.route("/workspaces/current/plugin/uninstall")
class PluginUninstallApi(Resource):
@api.expect(parser_uninstall)
@setup_required
@login_required
@account_initialization_required
@plugin_permission_required(install_required=True)
def post(self):
req = reqparse.RequestParser().add_argument("plugin_installation_id", type=str, required=True, location="json")
args = req.parse_args()
args = parser_uninstall.parse_args()
_, tenant_id = current_account_with_tenant()
@@ -507,8 +549,16 @@ class PluginUninstallApi(Resource):
raise ValueError(e)
parser_change_post = (
reqparse.RequestParser()
.add_argument("install_permission", type=str, required=True, location="json")
.add_argument("debug_permission", type=str, required=True, location="json")
)
@console_ns.route("/workspaces/current/plugin/permission/change")
class PluginChangePermissionApi(Resource):
@api.expect(parser_change_post)
@setup_required
@login_required
@account_initialization_required
@@ -518,12 +568,7 @@ class PluginChangePermissionApi(Resource):
if not user.is_admin_or_owner:
raise Forbidden()
req = (
reqparse.RequestParser()
.add_argument("install_permission", type=str, required=True, location="json")
.add_argument("debug_permission", type=str, required=True, location="json")
)
args = req.parse_args()
args = parser_change_post.parse_args()
install_permission = TenantPluginPermission.InstallPermission(args["install_permission"])
debug_permission = TenantPluginPermission.DebugPermission(args["debug_permission"])
@@ -558,8 +603,20 @@ class PluginFetchPermissionApi(Resource):
)
parser_dynamic = (
reqparse.RequestParser()
.add_argument("plugin_id", type=str, required=True, location="args")
.add_argument("provider", type=str, required=True, location="args")
.add_argument("action", type=str, required=True, location="args")
.add_argument("parameter", type=str, required=True, location="args")
.add_argument("credential_id", type=str, required=False, location="args")
.add_argument("provider_type", type=str, required=True, location="args")
)
@console_ns.route("/workspaces/current/plugin/parameters/dynamic-options")
class PluginFetchDynamicSelectOptionsApi(Resource):
@api.expect(parser_dynamic)
@setup_required
@login_required
@account_initialization_required
@@ -571,16 +628,7 @@ class PluginFetchDynamicSelectOptionsApi(Resource):
user_id = current_user.id
parser = (
reqparse.RequestParser()
.add_argument("plugin_id", type=str, required=True, location="args")
.add_argument("provider", type=str, required=True, location="args")
.add_argument("action", type=str, required=True, location="args")
.add_argument("parameter", type=str, required=True, location="args")
.add_argument("credential_id", type=str, required=False, location="args")
.add_argument("provider_type", type=str, required=True, location="args")
)
args = parser.parse_args()
args = parser_dynamic.parse_args()
try:
options = PluginParameterService.get_dynamic_select_options(
@@ -599,8 +647,16 @@ class PluginFetchDynamicSelectOptionsApi(Resource):
return jsonable_encoder({"options": options})
parser_change = (
reqparse.RequestParser()
.add_argument("permission", type=dict, required=True, location="json")
.add_argument("auto_upgrade", type=dict, required=True, location="json")
)
@console_ns.route("/workspaces/current/plugin/preferences/change")
class PluginChangePreferencesApi(Resource):
@api.expect(parser_change)
@setup_required
@login_required
@account_initialization_required
@@ -609,12 +665,7 @@ class PluginChangePreferencesApi(Resource):
if not user.is_admin_or_owner:
raise Forbidden()
req = (
reqparse.RequestParser()
.add_argument("permission", type=dict, required=True, location="json")
.add_argument("auto_upgrade", type=dict, required=True, location="json")
)
args = req.parse_args()
args = parser_change.parse_args()
permission = args["permission"]
@@ -694,8 +745,12 @@ class PluginFetchPreferencesApi(Resource):
return jsonable_encoder({"permission": permission_dict, "auto_upgrade": auto_upgrade_dict})
parser_exclude = reqparse.RequestParser().add_argument("plugin_id", type=str, required=True, location="json")
@console_ns.route("/workspaces/current/plugin/preferences/autoupgrade/exclude")
class PluginAutoUpgradeExcludePluginApi(Resource):
@api.expect(parser_exclude)
@setup_required
@login_required
@account_initialization_required
@@ -703,8 +758,7 @@ class PluginAutoUpgradeExcludePluginApi(Resource):
# exclude one single plugin
_, tenant_id = current_account_with_tenant()
req = reqparse.RequestParser().add_argument("plugin_id", type=str, required=True, location="json")
args = req.parse_args()
args = parser_exclude.parse_args()
return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])})

View File

@@ -10,7 +10,7 @@ from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.wraps import (
account_initialization_required,
enterprise_license_required,
@@ -52,8 +52,19 @@ def is_valid_url(url: str) -> bool:
return False
parser_tool = reqparse.RequestParser().add_argument(
"type",
type=str,
choices=["builtin", "model", "api", "workflow", "mcp"],
required=False,
nullable=True,
location="args",
)
@console_ns.route("/workspaces/current/tool-providers")
class ToolProviderListApi(Resource):
@api.expect(parser_tool)
@setup_required
@login_required
@account_initialization_required
@@ -62,15 +73,7 @@ class ToolProviderListApi(Resource):
user_id = user.id
req = reqparse.RequestParser().add_argument(
"type",
type=str,
choices=["builtin", "model", "api", "workflow", "mcp"],
required=False,
nullable=True,
location="args",
)
args = req.parse_args()
args = parser_tool.parse_args()
return ToolCommonService.list_tool_providers(user_id, tenant_id, args.get("type", None))
@@ -102,8 +105,14 @@ class ToolBuiltinProviderInfoApi(Resource):
return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider))
parser_delete = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/tool-provider/builtin/<path:provider>/delete")
class ToolBuiltinProviderDeleteApi(Resource):
@api.expect(parser_delete)
@setup_required
@login_required
@account_initialization_required
@@ -112,10 +121,7 @@ class ToolBuiltinProviderDeleteApi(Resource):
if not user.is_admin_or_owner:
raise Forbidden()
req = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
args = req.parse_args()
args = parser_delete.parse_args()
return BuiltinToolManageService.delete_builtin_tool_provider(
tenant_id,
@@ -124,8 +130,17 @@ class ToolBuiltinProviderDeleteApi(Resource):
)
parser_add = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=False, location="json")
.add_argument("type", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/builtin/<path:provider>/add")
class ToolBuiltinProviderAddApi(Resource):
@api.expect(parser_add)
@setup_required
@login_required
@account_initialization_required
@@ -134,13 +149,7 @@ class ToolBuiltinProviderAddApi(Resource):
user_id = user.id
parser = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=False, location="json")
.add_argument("type", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_add.parse_args()
if args["type"] not in CredentialType.values():
raise ValueError(f"Invalid credential type: {args['type']}")
@@ -155,8 +164,17 @@ class ToolBuiltinProviderAddApi(Resource):
)
parser_update = (
reqparse.RequestParser()
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/builtin/<path:provider>/update")
class ToolBuiltinProviderUpdateApi(Resource):
@api.expect(parser_update)
@setup_required
@login_required
@account_initialization_required
@@ -168,14 +186,7 @@ class ToolBuiltinProviderUpdateApi(Resource):
user_id = user.id
parser = (
reqparse.RequestParser()
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_update.parse_args()
result = BuiltinToolManageService.update_builtin_tool_provider(
user_id=user_id,
@@ -213,8 +224,22 @@ class ToolBuiltinProviderIconApi(Resource):
return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age)
parser_api_add = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("schema_type", type=str, required=True, nullable=False, location="json")
.add_argument("schema", type=str, required=True, nullable=False, location="json")
.add_argument("provider", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json", default=[])
.add_argument("custom_disclaimer", type=str, required=False, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/api/add")
class ToolApiProviderAddApi(Resource):
@api.expect(parser_api_add)
@setup_required
@login_required
@account_initialization_required
@@ -226,19 +251,7 @@ class ToolApiProviderAddApi(Resource):
user_id = user.id
parser = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("schema_type", type=str, required=True, nullable=False, location="json")
.add_argument("schema", type=str, required=True, nullable=False, location="json")
.add_argument("provider", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json", default=[])
.add_argument("custom_disclaimer", type=str, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_api_add.parse_args()
return ApiToolManageService.create_api_tool_provider(
user_id,
@@ -254,8 +267,12 @@ class ToolApiProviderAddApi(Resource):
)
parser_remote = reqparse.RequestParser().add_argument("url", type=str, required=True, nullable=False, location="args")
@console_ns.route("/workspaces/current/tool-provider/api/remote")
class ToolApiProviderGetRemoteSchemaApi(Resource):
@api.expect(parser_remote)
@setup_required
@login_required
@account_initialization_required
@@ -264,9 +281,7 @@ class ToolApiProviderGetRemoteSchemaApi(Resource):
user_id = user.id
parser = reqparse.RequestParser().add_argument("url", type=str, required=True, nullable=False, location="args")
args = parser.parse_args()
args = parser_remote.parse_args()
return ApiToolManageService.get_api_tool_provider_remote_schema(
user_id,
@@ -275,8 +290,14 @@ class ToolApiProviderGetRemoteSchemaApi(Resource):
)
parser_tools = reqparse.RequestParser().add_argument(
"provider", type=str, required=True, nullable=False, location="args"
)
@console_ns.route("/workspaces/current/tool-provider/api/tools")
class ToolApiProviderListToolsApi(Resource):
@api.expect(parser_tools)
@setup_required
@login_required
@account_initialization_required
@@ -285,11 +306,7 @@ class ToolApiProviderListToolsApi(Resource):
user_id = user.id
parser = reqparse.RequestParser().add_argument(
"provider", type=str, required=True, nullable=False, location="args"
)
args = parser.parse_args()
args = parser_tools.parse_args()
return jsonable_encoder(
ApiToolManageService.list_api_tool_provider_tools(
@@ -300,8 +317,23 @@ class ToolApiProviderListToolsApi(Resource):
)
parser_api_update = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("schema_type", type=str, required=True, nullable=False, location="json")
.add_argument("schema", type=str, required=True, nullable=False, location="json")
.add_argument("provider", type=str, required=True, nullable=False, location="json")
.add_argument("original_provider", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=True, nullable=True, location="json")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json")
.add_argument("custom_disclaimer", type=str, required=True, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/api/update")
class ToolApiProviderUpdateApi(Resource):
@api.expect(parser_api_update)
@setup_required
@login_required
@account_initialization_required
@@ -313,20 +345,7 @@ class ToolApiProviderUpdateApi(Resource):
user_id = user.id
parser = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("schema_type", type=str, required=True, nullable=False, location="json")
.add_argument("schema", type=str, required=True, nullable=False, location="json")
.add_argument("provider", type=str, required=True, nullable=False, location="json")
.add_argument("original_provider", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=True, nullable=True, location="json")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json")
.add_argument("custom_disclaimer", type=str, required=True, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_api_update.parse_args()
return ApiToolManageService.update_api_tool_provider(
user_id,
@@ -343,8 +362,14 @@ class ToolApiProviderUpdateApi(Resource):
)
parser_api_delete = reqparse.RequestParser().add_argument(
"provider", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/tool-provider/api/delete")
class ToolApiProviderDeleteApi(Resource):
@api.expect(parser_api_delete)
@setup_required
@login_required
@account_initialization_required
@@ -356,11 +381,7 @@ class ToolApiProviderDeleteApi(Resource):
user_id = user.id
parser = reqparse.RequestParser().add_argument(
"provider", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_api_delete.parse_args()
return ApiToolManageService.delete_api_tool_provider(
user_id,
@@ -369,8 +390,12 @@ class ToolApiProviderDeleteApi(Resource):
)
parser_get = reqparse.RequestParser().add_argument("provider", type=str, required=True, nullable=False, location="args")
@console_ns.route("/workspaces/current/tool-provider/api/get")
class ToolApiProviderGetApi(Resource):
@api.expect(parser_get)
@setup_required
@login_required
@account_initialization_required
@@ -379,11 +404,7 @@ class ToolApiProviderGetApi(Resource):
user_id = user.id
parser = reqparse.RequestParser().add_argument(
"provider", type=str, required=True, nullable=False, location="args"
)
args = parser.parse_args()
args = parser_get.parse_args()
return ApiToolManageService.get_api_tool_provider(
user_id,
@@ -407,40 +428,44 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource):
)
parser_schema = reqparse.RequestParser().add_argument(
"schema", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/tool-provider/api/schema")
class ToolApiProviderSchemaApi(Resource):
@api.expect(parser_schema)
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser().add_argument(
"schema", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_schema.parse_args()
return ApiToolManageService.parser_api_schema(
schema=args["schema"],
)
parser_pre = (
reqparse.RequestParser()
.add_argument("tool_name", type=str, required=True, nullable=False, location="json")
.add_argument("provider_name", type=str, required=False, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("parameters", type=dict, required=True, nullable=False, location="json")
.add_argument("schema_type", type=str, required=True, nullable=False, location="json")
.add_argument("schema", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/api/test/pre")
class ToolApiProviderPreviousTestApi(Resource):
@api.expect(parser_pre)
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("tool_name", type=str, required=True, nullable=False, location="json")
.add_argument("provider_name", type=str, required=False, nullable=False, location="json")
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
.add_argument("parameters", type=dict, required=True, nullable=False, location="json")
.add_argument("schema_type", type=str, required=True, nullable=False, location="json")
.add_argument("schema", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_pre.parse_args()
_, current_tenant_id = current_account_with_tenant()
return ApiToolManageService.test_api_tool_preview(
current_tenant_id,
@@ -453,8 +478,22 @@ class ToolApiProviderPreviousTestApi(Resource):
)
parser_create = (
reqparse.RequestParser()
.add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json")
.add_argument("label", type=str, required=True, nullable=False, location="json")
.add_argument("description", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/workflow/create")
class ToolWorkflowProviderCreateApi(Resource):
@api.expect(parser_create)
@setup_required
@login_required
@account_initialization_required
@@ -466,19 +505,7 @@ class ToolWorkflowProviderCreateApi(Resource):
user_id = user.id
reqparser = (
reqparse.RequestParser()
.add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json")
.add_argument("label", type=str, required=True, nullable=False, location="json")
.add_argument("description", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json")
)
args = reqparser.parse_args()
args = parser_create.parse_args()
return WorkflowToolManageService.create_workflow_tool(
user_id=user_id,
@@ -494,8 +521,22 @@ class ToolWorkflowProviderCreateApi(Resource):
)
parser_workflow_update = (
reqparse.RequestParser()
.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json")
.add_argument("label", type=str, required=True, nullable=False, location="json")
.add_argument("description", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/workflow/update")
class ToolWorkflowProviderUpdateApi(Resource):
@api.expect(parser_workflow_update)
@setup_required
@login_required
@account_initialization_required
@@ -507,19 +548,7 @@ class ToolWorkflowProviderUpdateApi(Resource):
user_id = user.id
reqparser = (
reqparse.RequestParser()
.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json")
.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json")
.add_argument("label", type=str, required=True, nullable=False, location="json")
.add_argument("description", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=dict, required=True, nullable=False, location="json")
.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json")
.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="")
.add_argument("labels", type=list[str], required=False, nullable=True, location="json")
)
args = reqparser.parse_args()
args = parser_workflow_update.parse_args()
if not args["workflow_tool_id"]:
raise ValueError("incorrect workflow_tool_id")
@@ -538,8 +567,14 @@ class ToolWorkflowProviderUpdateApi(Resource):
)
parser_workflow_delete = reqparse.RequestParser().add_argument(
"workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/tool-provider/workflow/delete")
class ToolWorkflowProviderDeleteApi(Resource):
@api.expect(parser_workflow_delete)
@setup_required
@login_required
@account_initialization_required
@@ -551,11 +586,7 @@ class ToolWorkflowProviderDeleteApi(Resource):
user_id = user.id
reqparser = reqparse.RequestParser().add_argument(
"workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json"
)
args = reqparser.parse_args()
args = parser_workflow_delete.parse_args()
return WorkflowToolManageService.delete_workflow_tool(
user_id,
@@ -564,8 +595,16 @@ class ToolWorkflowProviderDeleteApi(Resource):
)
parser_wf_get = (
reqparse.RequestParser()
.add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args")
.add_argument("workflow_app_id", type=uuid_value, required=False, nullable=True, location="args")
)
@console_ns.route("/workspaces/current/tool-provider/workflow/get")
class ToolWorkflowProviderGetApi(Resource):
@api.expect(parser_wf_get)
@setup_required
@login_required
@account_initialization_required
@@ -574,13 +613,7 @@ class ToolWorkflowProviderGetApi(Resource):
user_id = user.id
parser = (
reqparse.RequestParser()
.add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args")
.add_argument("workflow_app_id", type=uuid_value, required=False, nullable=True, location="args")
)
args = parser.parse_args()
args = parser_wf_get.parse_args()
if args.get("workflow_tool_id"):
tool = WorkflowToolManageService.get_workflow_tool_by_tool_id(
@@ -600,8 +633,14 @@ class ToolWorkflowProviderGetApi(Resource):
return jsonable_encoder(tool)
parser_wf_tools = reqparse.RequestParser().add_argument(
"workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args"
)
@console_ns.route("/workspaces/current/tool-provider/workflow/tools")
class ToolWorkflowProviderListToolApi(Resource):
@api.expect(parser_wf_tools)
@setup_required
@login_required
@account_initialization_required
@@ -610,11 +649,7 @@ class ToolWorkflowProviderListToolApi(Resource):
user_id = user.id
parser = reqparse.RequestParser().add_argument(
"workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args"
)
args = parser.parse_args()
args = parser_wf_tools.parse_args()
return jsonable_encoder(
WorkflowToolManageService.list_single_workflow_tools(
@@ -790,32 +825,40 @@ class ToolOAuthCallback(Resource):
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
parser_default_cred = reqparse.RequestParser().add_argument(
"id", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/tool-provider/builtin/<path:provider>/default-credential")
class ToolBuiltinProviderSetDefaultApi(Resource):
@api.expect(parser_default_cred)
@setup_required
@login_required
@account_initialization_required
def post(self, provider):
current_user, current_tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
args = parser_default_cred.parse_args()
return BuiltinToolManageService.set_default_provider(
tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"]
)
parser_custom = (
reqparse.RequestParser()
.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/builtin/<path:provider>/oauth/custom-client")
class ToolOAuthCustomClient(Resource):
@api.expect(parser_custom)
@setup_required
@login_required
@account_initialization_required
def post(self, provider):
parser = (
reqparse.RequestParser()
.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_custom.parse_args()
user, tenant_id = current_account_with_tenant()
@@ -878,25 +921,44 @@ class ToolBuiltinProviderGetCredentialInfoApi(Resource):
)
parser_mcp = (
reqparse.RequestParser()
.add_argument("server_url", type=str, required=True, nullable=False, location="json")
.add_argument("name", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=str, required=True, nullable=False, location="json")
.add_argument("icon_type", type=str, required=True, nullable=False, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="")
.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
.add_argument("configuration", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("authentication", type=dict, required=False, nullable=True, location="json", default={})
)
parser_mcp_put = (
reqparse.RequestParser()
.add_argument("server_url", type=str, required=True, nullable=False, location="json")
.add_argument("name", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=str, required=True, nullable=False, location="json")
.add_argument("icon_type", type=str, required=True, nullable=False, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
.add_argument("configuration", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("authentication", type=dict, required=False, nullable=True, location="json", default={})
)
parser_mcp_delete = reqparse.RequestParser().add_argument(
"provider_id", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/workspaces/current/tool-provider/mcp")
class ToolProviderMCPApi(Resource):
@api.expect(parser_mcp)
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("server_url", type=str, required=True, nullable=False, location="json")
.add_argument("name", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=str, required=True, nullable=False, location="json")
.add_argument("icon_type", type=str, required=True, nullable=False, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="")
.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
.add_argument("configuration", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("authentication", type=dict, required=False, nullable=True, location="json", default={})
)
args = parser.parse_args()
args = parser_mcp.parse_args()
user, tenant_id = current_account_with_tenant()
# Parse and validate models
@@ -921,24 +983,12 @@ class ToolProviderMCPApi(Resource):
)
return jsonable_encoder(result)
@api.expect(parser_mcp_put)
@setup_required
@login_required
@account_initialization_required
def put(self):
parser = (
reqparse.RequestParser()
.add_argument("server_url", type=str, required=True, nullable=False, location="json")
.add_argument("name", type=str, required=True, nullable=False, location="json")
.add_argument("icon", type=str, required=True, nullable=False, location="json")
.add_argument("icon_type", type=str, required=True, nullable=False, location="json")
.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
.add_argument("configuration", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={})
.add_argument("authentication", type=dict, required=False, nullable=True, location="json", default={})
)
args = parser.parse_args()
args = parser_mcp_put.parse_args()
configuration = MCPConfiguration.model_validate(args["configuration"])
authentication = MCPAuthentication.model_validate(args["authentication"]) if args["authentication"] else None
_, current_tenant_id = current_account_with_tenant()
@@ -972,14 +1022,12 @@ class ToolProviderMCPApi(Resource):
)
return {"result": "success"}
@api.expect(parser_mcp_delete)
@setup_required
@login_required
@account_initialization_required
def delete(self):
parser = reqparse.RequestParser().add_argument(
"provider_id", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_mcp_delete.parse_args()
_, current_tenant_id = current_account_with_tenant()
with Session(db.engine) as session, session.begin():
@@ -988,18 +1036,21 @@ class ToolProviderMCPApi(Resource):
return {"result": "success"}
parser_auth = (
reqparse.RequestParser()
.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
.add_argument("authorization_code", type=str, required=False, nullable=True, location="json")
)
@console_ns.route("/workspaces/current/tool-provider/mcp/auth")
class ToolMCPAuthApi(Resource):
@api.expect(parser_auth)
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
.add_argument("authorization_code", type=str, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_auth.parse_args()
provider_id = args["provider_id"]
_, tenant_id = current_account_with_tenant()
@@ -1097,15 +1148,18 @@ class ToolMCPUpdateApi(Resource):
return jsonable_encoder(tools)
parser_cb = (
reqparse.RequestParser()
.add_argument("code", type=str, required=True, nullable=False, location="args")
.add_argument("state", type=str, required=True, nullable=False, location="args")
)
@console_ns.route("/mcp/oauth/callback")
class ToolMCPCallbackApi(Resource):
@api.expect(parser_cb)
def get(self):
parser = (
reqparse.RequestParser()
.add_argument("code", type=str, required=True, nullable=False, location="args")
.add_argument("state", type=str, required=True, nullable=False, location="args")
)
args = parser.parse_args()
args = parser_cb.parse_args()
state_key = args["state"]
authorization_code = args["code"]

View File

@@ -13,7 +13,7 @@ from controllers.common.errors import (
TooManyFilesError,
UnsupportedFileTypeError,
)
from controllers.console import console_ns
from controllers.console import api, console_ns
from controllers.console.admin import admin_required
from controllers.console.error import AccountNotLinkTenantError
from controllers.console.wraps import (
@@ -150,15 +150,18 @@ class TenantApi(Resource):
return WorkspaceService.get_tenant_info(tenant), 200
parser_switch = reqparse.RequestParser().add_argument("tenant_id", type=str, required=True, location="json")
@console_ns.route("/workspaces/switch")
class SwitchWorkspaceApi(Resource):
@api.expect(parser_switch)
@setup_required
@login_required
@account_initialization_required
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("tenant_id", type=str, required=True, location="json")
args = parser.parse_args()
args = parser_switch.parse_args()
# check if tenant_id is valid, 403 if not
try:
@@ -242,16 +245,19 @@ class WebappLogoWorkspaceApi(Resource):
return {"id": upload_file.id}, 201
parser_info = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
@console_ns.route("/workspaces/info")
class WorkspaceInfoApi(Resource):
@api.expect(parser_info)
@setup_required
@login_required
@account_initialization_required
# Change workspace name
def post(self):
_, current_tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
args = parser_info.parse_args()
if not current_tenant_id:
raise ValueError("No current tenant")

View File

@@ -88,12 +88,6 @@ class AudioApi(WebApiResource):
@web_ns.route("/text-to-audio")
class TextApi(WebApiResource):
text_to_audio_response_fields = {
"audio_url": fields.String,
"duration": fields.Float,
}
@marshal_with(text_to_audio_response_fields)
@web_ns.doc("Text to Audio")
@web_ns.doc(description="Convert text to audio using text-to-speech service.")
@web_ns.doc(

View File

@@ -138,6 +138,10 @@ class StreamableHTTPTransport:
) -> bool:
"""Handle an SSE event, returning True if the response is complete."""
if sse.event == "message":
# ping event send by server will be recognized as a message event with empty data by httpx-sse's SSEDecoder
if not sse.data.strip():
return False
try:
message = JSONRPCMessage.model_validate_json(sse.data)
logger.debug("SSE message: %s", message)

View File

@@ -52,7 +52,7 @@ class OpenAIModeration(Moderation):
text = "\n".join(str(inputs.values()))
model_manager = ModelManager()
model_instance = model_manager.get_model_instance(
tenant_id=self.tenant_id, provider="openai", model_type=ModelType.MODERATION, model="text-moderation-stable"
tenant_id=self.tenant_id, provider="openai", model_type=ModelType.MODERATION, model="omni-moderation-latest"
)
openai_moderation = model_instance.invoke_moderation(text=text)

View File

@@ -152,13 +152,15 @@ class WordExtractor(BaseExtractor):
# Initialize a row, all of which are empty by default
row_cells = [""] * total_cols
col_index = 0
for cell in row.cells:
while col_index < len(row.cells):
# make sure the col_index is not out of range
while col_index < total_cols and row_cells[col_index] != "":
while col_index < len(row.cells) and row_cells[col_index] != "":
col_index += 1
# if col_index is out of range the loop is jumped
if col_index >= total_cols:
if col_index >= len(row.cells):
break
# get the correct cell
cell = row.cells[col_index]
cell_content = self._parse_cell(cell, image_map).strip()
cell_colspan = cell.grid_span or 1
for i in range(cell_colspan):

View File

@@ -54,6 +54,9 @@ class TenantIsolatedTaskQueue:
serialized_data = wrapper.serialize()
serialized_tasks.append(serialized_data)
if not serialized_tasks:
return
redis_client.lpush(self._queue, *serialized_tasks)
def pull_tasks(self, count: int = 1) -> Sequence[Any]:

View File

@@ -202,6 +202,35 @@ class SegmentType(StrEnum):
raise ValueError(f"element_type is only supported by array type, got {self}")
return _ARRAY_ELEMENT_TYPES_MAPPING.get(self)
@staticmethod
def get_zero_value(t: "SegmentType"):
# Lazy import to avoid circular dependency
from factories import variable_factory
match t:
case (
SegmentType.ARRAY_OBJECT
| SegmentType.ARRAY_ANY
| SegmentType.ARRAY_STRING
| SegmentType.ARRAY_NUMBER
| SegmentType.ARRAY_BOOLEAN
):
return variable_factory.build_segment_with_type(t, [])
case SegmentType.OBJECT:
return variable_factory.build_segment({})
case SegmentType.STRING:
return variable_factory.build_segment("")
case SegmentType.INTEGER:
return variable_factory.build_segment(0)
case SegmentType.FLOAT:
return variable_factory.build_segment(0.0)
case SegmentType.NUMBER:
return variable_factory.build_segment(0)
case SegmentType.BOOLEAN:
return variable_factory.build_segment(False)
case _:
raise ValueError(f"unsupported variable type: {t}")
_ARRAY_ELEMENT_TYPES_MAPPING: Mapping[SegmentType, SegmentType] = {
# ARRAY_ANY does not have corresponding element type.

View File

@@ -192,7 +192,6 @@ class GraphEngine:
self._dispatcher = Dispatcher(
event_queue=self._event_queue,
event_handler=self._event_handler_registry,
event_collector=self._event_manager,
execution_coordinator=self._execution_coordinator,
event_emitter=self._event_manager,
)

View File

@@ -43,7 +43,6 @@ class Dispatcher:
self,
event_queue: queue.Queue[GraphNodeEventBase],
event_handler: "EventHandler",
event_collector: EventManager,
execution_coordinator: ExecutionCoordinator,
event_emitter: EventManager | None = None,
) -> None:
@@ -53,13 +52,11 @@ class Dispatcher:
Args:
event_queue: Queue of events from workers
event_handler: Event handler registry for processing events
event_collector: Event manager for collecting unhandled events
execution_coordinator: Coordinator for execution flow
event_emitter: Optional event manager to signal completion
"""
self._event_queue = event_queue
self._event_handler = event_handler
self._event_collector = event_collector
self._execution_coordinator = execution_coordinator
self._event_emitter = event_emitter
@@ -86,37 +83,31 @@ class Dispatcher:
def _dispatcher_loop(self) -> None:
"""Main dispatcher loop."""
try:
self._process_commands()
while not self._stop_event.is_set():
commands_checked = False
should_check_commands = False
should_break = False
if (
self._execution_coordinator.aborted
or self._execution_coordinator.paused
or self._execution_coordinator.execution_complete
):
break
if self._execution_coordinator.is_execution_complete():
should_check_commands = True
should_break = True
else:
# Check for scaling
self._execution_coordinator.check_scaling()
self._execution_coordinator.check_scaling()
try:
event = self._event_queue.get(timeout=0.1)
self._event_handler.dispatch(event)
self._event_queue.task_done()
self._process_commands(event)
except queue.Empty:
time.sleep(0.1)
# Process events
try:
event = self._event_queue.get(timeout=0.1)
# Route to the event handler
self._event_handler.dispatch(event)
should_check_commands = self._should_check_commands(event)
self._event_queue.task_done()
except queue.Empty:
# Process commands even when no new events arrive so abort requests are not missed
should_check_commands = True
time.sleep(0.1)
if should_check_commands and not commands_checked:
self._execution_coordinator.check_commands()
commands_checked = True
if should_break:
if not commands_checked:
self._execution_coordinator.check_commands()
self._process_commands()
while True:
try:
event = self._event_queue.get(block=False)
self._event_handler.dispatch(event)
self._event_queue.task_done()
except queue.Empty:
break
except Exception as e:
@@ -129,6 +120,6 @@ class Dispatcher:
if self._event_emitter:
self._event_emitter.mark_complete()
def _should_check_commands(self, event: GraphNodeEventBase) -> bool:
"""Return True if the event represents a node completion."""
return isinstance(event, self._COMMAND_TRIGGER_EVENTS)
def _process_commands(self, event: GraphNodeEventBase | None = None):
if event is None or isinstance(event, self._COMMAND_TRIGGER_EVENTS):
self._execution_coordinator.process_commands()

View File

@@ -40,7 +40,7 @@ class ExecutionCoordinator:
self._command_processor = command_processor
self._worker_pool = worker_pool
def check_commands(self) -> None:
def process_commands(self) -> None:
"""Process any pending commands."""
self._command_processor.process_commands()
@@ -48,24 +48,16 @@ class ExecutionCoordinator:
"""Check and perform worker scaling if needed."""
self._worker_pool.check_and_scale()
def is_execution_complete(self) -> bool:
"""
Check if execution is complete.
Returns:
True if execution is complete
"""
# Treat paused, aborted, or failed executions as terminal states
if self._graph_execution.is_paused:
return True
if self._graph_execution.aborted or self._graph_execution.has_error:
return True
@property
def execution_complete(self):
return self._state_manager.is_execution_complete()
@property
def is_paused(self) -> bool:
def aborted(self):
return self._graph_execution.aborted or self._graph_execution.has_error
@property
def paused(self) -> bool:
"""Expose whether the underlying graph execution is paused."""
return self._graph_execution.is_paused

View File

@@ -2,7 +2,6 @@ from collections.abc import Callable, Mapping, Sequence
from typing import TYPE_CHECKING, Any, TypeAlias
from core.variables import SegmentType, Variable
from core.variables.segments import BooleanSegment
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID
from core.workflow.conversation_variable_updater import ConversationVariableUpdater
from core.workflow.entities import GraphInitParams
@@ -12,7 +11,6 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig
from core.workflow.nodes.base.node import Node
from core.workflow.nodes.variable_assigner.common import helpers as common_helpers
from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError
from factories import variable_factory
from ..common.impl import conversation_variable_updater_factory
from .node_data import VariableAssignerData, WriteMode
@@ -116,7 +114,7 @@ class VariableAssignerNode(Node):
updated_variable = original_variable.model_copy(update={"value": updated_value})
case WriteMode.CLEAR:
income_value = get_zero_value(original_variable.value_type)
income_value = SegmentType.get_zero_value(original_variable.value_type)
updated_variable = original_variable.model_copy(update={"value": income_value.to_object()})
# Over write the variable.
@@ -143,24 +141,3 @@ class VariableAssignerNode(Node):
process_data=common_helpers.set_updated_variables({}, updated_variables),
outputs={},
)
def get_zero_value(t: SegmentType):
# TODO(QuantumGhost): this should be a method of `SegmentType`.
match t:
case SegmentType.ARRAY_OBJECT | SegmentType.ARRAY_STRING | SegmentType.ARRAY_NUMBER | SegmentType.ARRAY_BOOLEAN:
return variable_factory.build_segment_with_type(t, [])
case SegmentType.OBJECT:
return variable_factory.build_segment({})
case SegmentType.STRING:
return variable_factory.build_segment("")
case SegmentType.INTEGER:
return variable_factory.build_segment(0)
case SegmentType.FLOAT:
return variable_factory.build_segment(0.0)
case SegmentType.NUMBER:
return variable_factory.build_segment(0)
case SegmentType.BOOLEAN:
return BooleanSegment(value=False)
case _:
raise VariableOperatorNodeError(f"unsupported variable type: {t}")

View File

@@ -1,14 +0,0 @@
from core.variables import SegmentType
# Note: This mapping is duplicated with `get_zero_value`. Consider refactoring to avoid redundancy.
EMPTY_VALUE_MAPPING = {
SegmentType.STRING: "",
SegmentType.NUMBER: 0,
SegmentType.BOOLEAN: False,
SegmentType.OBJECT: {},
SegmentType.ARRAY_ANY: [],
SegmentType.ARRAY_STRING: [],
SegmentType.ARRAY_NUMBER: [],
SegmentType.ARRAY_OBJECT: [],
SegmentType.ARRAY_BOOLEAN: [],
}

View File

@@ -16,7 +16,6 @@ from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNod
from core.workflow.nodes.variable_assigner.common.impl import conversation_variable_updater_factory
from . import helpers
from .constants import EMPTY_VALUE_MAPPING
from .entities import VariableAssignerNodeData, VariableOperationItem
from .enums import InputType, Operation
from .exc import (
@@ -249,7 +248,7 @@ class VariableAssignerNode(Node):
case Operation.OVER_WRITE:
return value
case Operation.CLEAR:
return EMPTY_VALUE_MAPPING[variable.value_type]
return SegmentType.get_zero_value(variable.value_type).to_object()
case Operation.APPEND:
return variable.value + [value]
case Operation.EXTEND:

View File

@@ -3,7 +3,7 @@ import io
import json
from collections.abc import Generator
from google.cloud import storage as google_cloud_storage
from google.cloud import storage as google_cloud_storage # type: ignore
from configs import dify_config
from extensions.storage.base_storage import BaseStorage

View File

@@ -116,6 +116,7 @@ app_partial_fields = {
"access_mode": fields.String,
"create_user_name": fields.String,
"author_name": fields.String,
"has_draft_trigger": fields.Boolean,
}

View File

@@ -21,6 +21,7 @@ from configs import dify_config
from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from extensions.ext_storage import storage
from models.base import TypeBase
from services.entities.knowledge_entities.knowledge_entities import ParentMode, Rule
from .account import Account
@@ -224,7 +225,7 @@ class Dataset(Base):
ExternalKnowledgeApis.id == external_knowledge_binding.external_knowledge_api_id
)
)
if not external_knowledge_api:
if external_knowledge_api is None or external_knowledge_api.settings is None:
return None
return {
"external_knowledge_id": external_knowledge_binding.external_knowledge_id,
@@ -906,17 +907,21 @@ class ChildChunk(Base):
return db.session.query(DocumentSegment).where(DocumentSegment.id == self.segment_id).first()
class AppDatasetJoin(Base):
class AppDatasetJoin(TypeBase):
__tablename__ = "app_dataset_joins"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="app_dataset_join_pkey"),
sa.Index("app_dataset_join_app_dataset_idx", "dataset_id", "app_id"),
)
id = mapped_column(StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()"))
app_id = mapped_column(StringUUID, nullable=False)
dataset_id = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
id: Mapped[str] = mapped_column(
StringUUID, primary_key=True, nullable=False, server_default=sa.text("uuid_generate_v4()"), init=False
)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=sa.func.current_timestamp(), init=False
)
@property
def app(self):
@@ -940,18 +945,20 @@ class DatasetQuery(Base):
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp())
class DatasetKeywordTable(Base):
class DatasetKeywordTable(TypeBase):
__tablename__ = "dataset_keyword_tables"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="dataset_keyword_table_pkey"),
sa.Index("dataset_keyword_table_dataset_id_idx", "dataset_id"),
)
id = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()"))
dataset_id = mapped_column(StringUUID, nullable=False, unique=True)
keyword_table = mapped_column(sa.Text, nullable=False)
data_source_type = mapped_column(
String(255), nullable=False, server_default=sa.text("'database'::character varying")
id: Mapped[str] = mapped_column(
StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()"), init=False
)
dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False, unique=True)
keyword_table: Mapped[str] = mapped_column(sa.Text, nullable=False)
data_source_type: Mapped[str] = mapped_column(
String(255), nullable=False, server_default=sa.text("'database'::character varying"), default="database"
)
@property
@@ -1049,19 +1056,23 @@ class TidbAuthBinding(Base):
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
class Whitelist(Base):
class Whitelist(TypeBase):
__tablename__ = "whitelists"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="whitelists_pkey"),
sa.Index("whitelists_tenant_idx", "tenant_id"),
)
id = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=True)
id: Mapped[str] = mapped_column(
StringUUID, primary_key=True, server_default=sa.text("uuid_generate_v4()"), init=False
)
tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
category: Mapped[str] = mapped_column(String(255), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
class DatasetPermission(Base):
class DatasetPermission(TypeBase):
__tablename__ = "dataset_permissions"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="dataset_permission_pkey"),
@@ -1070,15 +1081,21 @@ class DatasetPermission(Base):
sa.Index("idx_dataset_permissions_tenant_id", "tenant_id"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), primary_key=True)
dataset_id = mapped_column(StringUUID, nullable=False)
account_id = mapped_column(StringUUID, nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
has_permission: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"))
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
id: Mapped[str] = mapped_column(
StringUUID, server_default=sa.text("uuid_generate_v4()"), primary_key=True, init=False
)
dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
account_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
has_permission: Mapped[bool] = mapped_column(
sa.Boolean, nullable=False, server_default=sa.text("true"), default=True
)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
class ExternalKnowledgeApis(Base):
class ExternalKnowledgeApis(TypeBase):
__tablename__ = "external_knowledge_apis"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="external_knowledge_apis_pkey"),
@@ -1086,16 +1103,20 @@ class ExternalKnowledgeApis(Base):
sa.Index("external_knowledge_apis_name_idx", "name"),
)
id = mapped_column(StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()"))
id: Mapped[str] = mapped_column(
StringUUID, nullable=False, server_default=sa.text("uuid_generate_v4()"), init=False
)
name: Mapped[str] = mapped_column(String(255), nullable=False)
description: Mapped[str] = mapped_column(String(255), nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
settings = mapped_column(sa.Text, nullable=True)
created_by = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
updated_by = mapped_column(StringUUID, nullable=True)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
settings: Mapped[str | None] = mapped_column(sa.Text, nullable=True)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
updated_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
)
def to_dict(self) -> dict[str, Any]:
@@ -1173,7 +1194,7 @@ class DatasetAutoDisableLog(Base):
)
class RateLimitLog(Base):
class RateLimitLog(TypeBase):
__tablename__ = "rate_limit_logs"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="rate_limit_log_pkey"),
@@ -1181,12 +1202,12 @@ class RateLimitLog(Base):
sa.Index("rate_limit_log_operation_idx", "operation"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
subscription_plan: Mapped[str] = mapped_column(String(255), nullable=False)
operation: Mapped[str] = mapped_column(String(255), nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")
DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)"), init=False
)

View File

@@ -14,7 +14,7 @@ from core.trigger.entities.api_entities import TriggerProviderSubscriptionApiEnt
from core.trigger.entities.entities import Subscription
from core.trigger.utils.endpoint import generate_plugin_trigger_endpoint_url, generate_webhook_trigger_endpoint
from libs.datetime_utils import naive_utc_now
from models.base import Base
from models.base import Base, TypeBase
from models.engine import db
from models.enums import AppTriggerStatus, AppTriggerType, CreatorUserRole, WorkflowTriggerStatus
from models.model import Account
@@ -399,7 +399,7 @@ class AppTrigger(Base):
)
class WorkflowSchedulePlan(Base):
class WorkflowSchedulePlan(TypeBase):
"""
Workflow Schedule Configuration
@@ -425,7 +425,7 @@ class WorkflowSchedulePlan(Base):
sa.Index("workflow_schedule_plan_next_idx", "next_run_at"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuidv7()"))
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=sa.text("uuidv7()"), init=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
node_id: Mapped[str] = mapped_column(String(64), nullable=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
@@ -436,9 +436,11 @@ class WorkflowSchedulePlan(Base):
# Schedule control
next_run_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
updated_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False
)
def to_dict(self) -> dict[str, Any]:

View File

@@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.9.2"
version = "1.10.0"
requires-python = ">=3.11,<3.13"
dependencies = [
@@ -37,7 +37,7 @@ dependencies = [
"numpy~=1.26.4",
"openpyxl~=3.1.5",
"opik~=1.8.72",
"litellm==1.77.1", # Pinned to avoid madoka dependency issue
"litellm==1.77.1", # Pinned to avoid madoka dependency issue
"opentelemetry-api==1.27.0",
"opentelemetry-distro==0.48b0",
"opentelemetry-exporter-otlp==1.27.0",
@@ -79,7 +79,6 @@ dependencies = [
"tiktoken~=0.9.0",
"transformers~=4.56.1",
"unstructured[docx,epub,md,ppt,pptx]~=0.16.1",
"weave~=0.51.0",
"yarl~=1.18.3",
"webvtt-py~=0.5.1",
"sseclient-py~=1.8.0",
@@ -90,6 +89,7 @@ dependencies = [
"croniter>=6.0.0",
"weaviate-client==4.17.0",
"apscheduler>=3.11.0",
"weave>=0.52.16",
]
# Before adding new dependency, consider place it in
# alphabet order (a-z) and suitable group.

View File

@@ -62,7 +62,7 @@ class ExternalDatasetService:
tenant_id=tenant_id,
created_by=user_id,
updated_by=user_id,
name=args.get("name"),
name=str(args.get("name")),
description=args.get("description", ""),
settings=json.dumps(args.get("settings"), ensure_ascii=False),
)
@@ -163,7 +163,7 @@ class ExternalDatasetService:
external_knowledge_api = (
db.session.query(ExternalKnowledgeApis).filter_by(id=external_knowledge_api_id, tenant_id=tenant_id).first()
)
if external_knowledge_api is None:
if external_knowledge_api is None or external_knowledge_api.settings is None:
raise ValueError("api template not found")
settings = json.loads(external_knowledge_api.settings)
for setting in settings:
@@ -290,7 +290,7 @@ class ExternalDatasetService:
.filter_by(id=external_knowledge_binding.external_knowledge_api_id)
.first()
)
if not external_knowledge_api:
if external_knowledge_api is None or external_knowledge_api.settings is None:
raise ValueError("external api template not found")
settings = json.loads(external_knowledge_api.settings)

View File

@@ -1,5 +1,5 @@
import json
from typing import Any
from typing import Any, TypedDict
from core.app.app_config.entities import (
DatasetEntity,
@@ -28,6 +28,12 @@ from models.model import App, AppMode, AppModelConfig
from models.workflow import Workflow, WorkflowType
class _NodeType(TypedDict):
id: str
position: None
data: dict[str, Any]
class WorkflowConverter:
"""
App Convert to Workflow Mode
@@ -217,7 +223,7 @@ class WorkflowConverter:
return app_config
def _convert_to_start_node(self, variables: list[VariableEntity]):
def _convert_to_start_node(self, variables: list[VariableEntity]) -> _NodeType:
"""
Convert to Start Node
:param variables: list of variables
@@ -235,7 +241,7 @@ class WorkflowConverter:
def _convert_to_http_request_node(
self, app_model: App, variables: list[VariableEntity], external_data_variables: list[ExternalDataVariableEntity]
) -> tuple[list[dict], dict[str, str]]:
) -> tuple[list[_NodeType], dict[str, str]]:
"""
Convert API Based Extension to HTTP Request Node
:param app_model: App instance
@@ -285,7 +291,7 @@ class WorkflowConverter:
request_body_json = json.dumps(request_body)
request_body_json = request_body_json.replace(r"\{\{", "{{").replace(r"\}\}", "}}")
http_request_node = {
http_request_node: _NodeType = {
"id": f"http_request_{index}",
"position": None,
"data": {
@@ -303,7 +309,7 @@ class WorkflowConverter:
nodes.append(http_request_node)
# append code node for response body parsing
code_node: dict[str, Any] = {
code_node: _NodeType = {
"id": f"code_{index}",
"position": None,
"data": {
@@ -326,7 +332,7 @@ class WorkflowConverter:
def _convert_to_knowledge_retrieval_node(
self, new_app_mode: AppMode, dataset_config: DatasetEntity, model_config: ModelConfigEntity
) -> dict | None:
) -> _NodeType | None:
"""
Convert datasets to Knowledge Retrieval Node
:param new_app_mode: new app mode
@@ -384,7 +390,7 @@ class WorkflowConverter:
prompt_template: PromptTemplateEntity,
file_upload: FileUploadConfig | None = None,
external_data_variable_node_mapping: dict[str, str] | None = None,
):
) -> _NodeType:
"""
Convert to LLM Node
:param original_app_mode: original app mode
@@ -561,7 +567,7 @@ class WorkflowConverter:
return template
def _convert_to_end_node(self):
def _convert_to_end_node(self) -> _NodeType:
"""
Convert to End Node
:return:
@@ -577,7 +583,7 @@ class WorkflowConverter:
},
}
def _convert_to_answer_node(self):
def _convert_to_answer_node(self) -> _NodeType:
"""
Convert to Answer Node
:return:
@@ -598,7 +604,7 @@ class WorkflowConverter:
"""
return {"id": f"{source}-{target}", "source": source, "target": target}
def _append_node(self, graph: dict, node: dict):
def _append_node(self, graph: dict[str, Any], node: _NodeType):
"""
Append Node to Graph

View File

@@ -10,20 +10,17 @@ from sqlalchemy.orm import Session, sessionmaker
from core.app.app_config.entities import VariableEntityType
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file import File
from core.repositories import DifyCoreRepositoryFactory
from core.variables import Variable
from core.variables.variables import VariableUnion
from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool, WorkflowNodeExecution
from core.workflow.entities import VariablePool, WorkflowNodeExecution
from core.workflow.enums import ErrorStrategy, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
from core.workflow.errors import WorkflowNodeRunFailedError
from core.workflow.graph.graph import Graph
from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, NodeRunSucceededEvent
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes import NodeType
from core.workflow.nodes.base.node import Node
from core.workflow.nodes.node_factory import DifyNodeFactory
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
from core.workflow.nodes.start.entities import StartNodeData
from core.workflow.system_variable import SystemVariable
@@ -34,7 +31,6 @@ from extensions.ext_storage import storage
from factories.file_factory import build_from_mapping, build_from_mappings
from libs.datetime_utils import naive_utc_now
from models import Account
from models.enums import UserFrom
from models.model import App, AppMode
from models.tools import WorkflowToolProvider
from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType
@@ -215,7 +211,7 @@ class WorkflowService:
self.validate_features_structure(app_model=app_model, features=features)
# validate graph structure
self.validate_graph_structure(user_id=account.id, app_model=app_model, graph=graph)
self.validate_graph_structure(graph=graph)
# create draft workflow if not found
if not workflow:
@@ -274,7 +270,7 @@ class WorkflowService:
self._validate_workflow_credentials(draft_workflow)
# validate graph structure
self.validate_graph_structure(user_id=account.id, app_model=app_model, graph=draft_workflow.graph_dict)
self.validate_graph_structure(graph=draft_workflow.graph_dict)
# create new workflow
workflow = Workflow.new(
@@ -905,42 +901,30 @@ class WorkflowService:
return new_app
def validate_graph_structure(self, user_id: str, app_model: App, graph: Mapping[str, Any]):
def validate_graph_structure(self, graph: Mapping[str, Any]):
"""
Validate workflow graph structure by instantiating the Graph object.
Validate workflow graph structure.
This leverages the built-in graph validators (including trigger/UserInput exclusivity)
and raises any structural errors before persisting the workflow.
This performs a lightweight validation on the graph, checking for structural
inconsistencies such as the coexistence of start and trigger nodes.
"""
node_configs = graph.get("nodes", [])
node_configs = cast(list[dict[str, object]], node_configs)
node_configs = cast(list[dict[str, Any]], node_configs)
# is empty graph
if not node_configs:
return
workflow_id = app_model.workflow_id or "UNKNOWN"
Graph.init(
graph_config=graph,
# TODO(Mairuis): Add root node id
root_node_id=None,
node_factory=DifyNodeFactory(
graph_init_params=GraphInitParams(
tenant_id=app_model.tenant_id,
app_id=app_model.id,
workflow_id=workflow_id,
graph_config=graph,
user_id=user_id,
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.VALIDATION,
call_depth=0,
),
graph_runtime_state=GraphRuntimeState(
variable_pool=VariablePool(),
start_at=time.perf_counter(),
),
),
)
node_types: set[NodeType] = set()
for node in node_configs:
node_type = node.get("data", {}).get("type")
if node_type:
node_types.add(NodeType(node_type))
# start node and trigger node cannot coexist
if NodeType.START in node_types:
if any(nt.is_trigger_node for nt in node_types):
raise ValueError("Start node and trigger nodes cannot coexist in the same workflow")
def validate_features_structure(self, app_model: App, features: dict):
if app_model.mode == AppMode.ADVANCED_CHAT:

View File

@@ -13,13 +13,13 @@ from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
from configs import dify_config
from core.app.apps.workflow.app_generator import WorkflowAppGenerator
from core.app.apps.workflow.app_generator import SKIP_PREPARE_USER_INPUTS_KEY, WorkflowAppGenerator
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.layers.timeslice_layer import TimeSliceLayer
from core.app.layers.trigger_post_layer import TriggerPostLayer
from extensions.ext_database import db
from models.account import Account
from models.enums import CreatorUserRole, WorkflowTriggerStatus
from models.enums import AppTriggerType, CreatorUserRole, WorkflowTriggerStatus
from models.model import App, EndUser, Tenant
from models.trigger import WorkflowTriggerLog
from models.workflow import Workflow
@@ -81,6 +81,19 @@ def execute_workflow_sandbox(task_data_dict: dict[str, Any]):
)
def _build_generator_args(trigger_data: TriggerData) -> dict[str, Any]:
"""Build args passed into WorkflowAppGenerator.generate for Celery executions."""
args: dict[str, Any] = {
"inputs": dict(trigger_data.inputs),
"files": list(trigger_data.files),
}
if trigger_data.trigger_type == AppTriggerType.TRIGGER_WEBHOOK:
args[SKIP_PREPARE_USER_INPUTS_KEY] = True # Webhooks already provide structured inputs
return args
def _execute_workflow_common(
task_data: WorkflowTaskData,
cfs_plan_scheduler: AsyncWorkflowCFSPlanScheduler,
@@ -128,7 +141,7 @@ def _execute_workflow_common(
generator = WorkflowAppGenerator()
# Prepare args matching AppGenerateService.generate format
args: dict[str, Any] = {"inputs": dict(trigger_data.inputs), "files": list(trigger_data.files)}
args = _build_generator_args(trigger_data)
# If workflow_id was specified, add it to args
if trigger_data.workflow_id:

View File

@@ -9,7 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.dataset import Dataset, DocumentSegment
from models.dataset import Dataset, DatasetMetadataBinding, DocumentSegment
from models.model import UploadFile
logger = logging.getLogger(__name__)
@@ -37,6 +37,11 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
if not dataset:
raise Exception("Document has no dataset")
db.session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
).delete(synchronize_session=False)
segments = db.session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
).all()
@@ -71,7 +76,8 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
db.session.delete(file)
db.session.commit()
db.session.commit()
end_at = time.perf_counter()
logger.info(

View File

@@ -0,0 +1,49 @@
"""Primarily used for testing merged cell scenarios"""
from docx import Document
from core.rag.extractor.word_extractor import WordExtractor
def _generate_table_with_merged_cells():
doc = Document()
"""
The table looks like this:
+-----+-----+-----+
| 1-1 & 1-2 | 1-3 |
+-----+-----+-----+
| 2-1 | 2-2 | 2-3 |
| & |-----+-----+
| 3-1 | 3-2 | 3-3 |
+-----+-----+-----+
"""
table = doc.add_table(rows=3, cols=3)
table.style = "Table Grid"
for i in range(3):
for j in range(3):
cell = table.cell(i, j)
cell.text = f"{i + 1}-{j + 1}"
# Merge cells
cell_0_0 = table.cell(0, 0)
cell_0_1 = table.cell(0, 1)
merged_cell_1 = cell_0_0.merge(cell_0_1)
merged_cell_1.text = "1-1 & 1-2"
cell_1_0 = table.cell(1, 0)
cell_2_0 = table.cell(2, 0)
merged_cell_2 = cell_1_0.merge(cell_2_0)
merged_cell_2.text = "2-1 & 3-1"
ground_truth = [["1-1 & 1-2", "", "1-3"], ["2-1 & 3-1", "2-2", "2-3"], ["2-1 & 3-1", "3-2", "3-3"]]
return doc.tables[0], ground_truth
def test_parse_row():
table, gt = _generate_table_with_merged_cells()
extractor = object.__new__(WordExtractor)
for idx, row in enumerate(table.rows):
assert extractor._parse_row(row, {}, 3) == gt[idx]

View File

@@ -179,7 +179,7 @@ class TestTenantIsolatedTaskQueue:
"""Test pushing empty task list."""
sample_queue.push_tasks([])
mock_redis.lpush.assert_called_once_with("tenant_self_test-key_task_queue:tenant-123")
mock_redis.lpush.assert_not_called()
@patch("core.rag.pipeline.queue.redis_client")
def test_pull_tasks_default_count(self, mock_redis, sample_queue):

View File

@@ -1,3 +1,5 @@
import pytest
from core.variables.types import ArrayValidation, SegmentType
@@ -83,3 +85,81 @@ class TestSegmentTypeIsValidArrayValidation:
value = [1, 2, 3]
# validation is None, skip
assert SegmentType.ARRAY_STRING.is_valid(value, array_validation=ArrayValidation.NONE)
class TestSegmentTypeGetZeroValue:
"""
Test class for SegmentType.get_zero_value static method.
Provides comprehensive coverage of all supported SegmentType values to ensure
correct zero value generation for each type.
"""
def test_array_types_return_empty_list(self):
"""Test that all array types return empty list segments."""
array_types = [
SegmentType.ARRAY_ANY,
SegmentType.ARRAY_STRING,
SegmentType.ARRAY_NUMBER,
SegmentType.ARRAY_OBJECT,
SegmentType.ARRAY_BOOLEAN,
]
for seg_type in array_types:
result = SegmentType.get_zero_value(seg_type)
assert result.value == []
assert result.value_type == seg_type
def test_object_returns_empty_dict(self):
"""Test that OBJECT type returns empty dictionary segment."""
result = SegmentType.get_zero_value(SegmentType.OBJECT)
assert result.value == {}
assert result.value_type == SegmentType.OBJECT
def test_string_returns_empty_string(self):
"""Test that STRING type returns empty string segment."""
result = SegmentType.get_zero_value(SegmentType.STRING)
assert result.value == ""
assert result.value_type == SegmentType.STRING
def test_integer_returns_zero(self):
"""Test that INTEGER type returns zero segment."""
result = SegmentType.get_zero_value(SegmentType.INTEGER)
assert result.value == 0
assert result.value_type == SegmentType.INTEGER
def test_float_returns_zero_point_zero(self):
"""Test that FLOAT type returns 0.0 segment."""
result = SegmentType.get_zero_value(SegmentType.FLOAT)
assert result.value == 0.0
assert result.value_type == SegmentType.FLOAT
def test_number_returns_zero(self):
"""Test that NUMBER type returns zero segment."""
result = SegmentType.get_zero_value(SegmentType.NUMBER)
assert result.value == 0
# NUMBER type with integer value returns INTEGER segment type
# (NUMBER is a union type that can be INTEGER or FLOAT)
assert result.value_type == SegmentType.INTEGER
# Verify that exposed_type returns NUMBER for frontend compatibility
assert result.value_type.exposed_type() == SegmentType.NUMBER
def test_boolean_returns_false(self):
"""Test that BOOLEAN type returns False segment."""
result = SegmentType.get_zero_value(SegmentType.BOOLEAN)
assert result.value is False
assert result.value_type == SegmentType.BOOLEAN
def test_unsupported_types_raise_value_error(self):
"""Test that unsupported types raise ValueError."""
unsupported_types = [
SegmentType.SECRET,
SegmentType.FILE,
SegmentType.NONE,
SegmentType.GROUP,
SegmentType.ARRAY_FILE,
]
for seg_type in unsupported_types:
with pytest.raises(ValueError, match="unsupported variable type"):
SegmentType.get_zero_value(seg_type)

View File

@@ -0,0 +1,189 @@
"""Tests for dispatcher command checking behavior."""
from __future__ import annotations
import queue
from datetime import datetime
from unittest import mock
from core.workflow.entities.pause_reason import SchedulingPause
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.graph_engine.event_management.event_handlers import EventHandler
from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher
from core.workflow.graph_engine.orchestration.execution_coordinator import ExecutionCoordinator
from core.workflow.graph_events import (
GraphNodeEventBase,
NodeRunPauseRequestedEvent,
NodeRunStartedEvent,
NodeRunSucceededEvent,
)
from core.workflow.node_events import NodeRunResult
def test_dispatcher_should_consume_remains_events_after_pause():
event_queue = queue.Queue()
event_queue.put(
GraphNodeEventBase(
id="test",
node_id="test",
node_type=NodeType.START,
)
)
event_handler = mock.Mock(spec=EventHandler)
execution_coordinator = mock.Mock(spec=ExecutionCoordinator)
execution_coordinator.paused.return_value = True
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
execution_coordinator=execution_coordinator,
)
dispatcher._dispatcher_loop()
assert event_queue.empty()
class _StubExecutionCoordinator:
"""Stub execution coordinator that tracks command checks."""
def __init__(self) -> None:
self.command_checks = 0
self.scaling_checks = 0
self.execution_complete = False
self.failed = False
self._paused = False
def process_commands(self) -> None:
self.command_checks += 1
def check_scaling(self) -> None:
self.scaling_checks += 1
@property
def paused(self) -> bool:
return self._paused
@property
def aborted(self) -> bool:
return False
def mark_complete(self) -> None:
self.execution_complete = True
def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests
self.failed = True
class _StubEventHandler:
"""Minimal event handler that marks execution complete after handling an event."""
def __init__(self, coordinator: _StubExecutionCoordinator) -> None:
self._coordinator = coordinator
self.events = []
def dispatch(self, event) -> None:
self.events.append(event)
self._coordinator.mark_complete()
def _run_dispatcher_for_event(event) -> int:
"""Run the dispatcher loop for a single event and return command check count."""
event_queue: queue.Queue = queue.Queue()
event_queue.put(event)
coordinator = _StubExecutionCoordinator()
event_handler = _StubEventHandler(coordinator)
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
execution_coordinator=coordinator,
)
dispatcher._dispatcher_loop()
return coordinator.command_checks
def _make_started_event() -> NodeRunStartedEvent:
return NodeRunStartedEvent(
id="start-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
)
def _make_succeeded_event() -> NodeRunSucceededEvent:
return NodeRunSucceededEvent(
id="success-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED),
)
def test_dispatcher_checks_commands_during_idle_and_on_completion() -> None:
"""Dispatcher polls commands when idle and after completion events."""
started_checks = _run_dispatcher_for_event(_make_started_event())
succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event())
assert started_checks == 2
assert succeeded_checks == 3
class _PauseStubEventHandler:
"""Minimal event handler that marks execution complete after handling an event."""
def __init__(self, coordinator: _StubExecutionCoordinator) -> None:
self._coordinator = coordinator
self.events = []
def dispatch(self, event) -> None:
self.events.append(event)
if isinstance(event, NodeRunPauseRequestedEvent):
self._coordinator.mark_complete()
def test_dispatcher_drain_event_queue():
events = [
NodeRunStartedEvent(
id="start-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Code",
start_at=datetime.utcnow(),
),
NodeRunPauseRequestedEvent(
id="pause-event",
node_id="node-1",
node_type=NodeType.CODE,
reason=SchedulingPause(message="test pause"),
),
NodeRunSucceededEvent(
id="success-event",
node_id="node-1",
node_type=NodeType.CODE,
start_at=datetime.utcnow(),
node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED),
),
]
event_queue: queue.Queue = queue.Queue()
for e in events:
event_queue.put(e)
coordinator = _StubExecutionCoordinator()
event_handler = _PauseStubEventHandler(coordinator)
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
execution_coordinator=coordinator,
)
dispatcher._dispatcher_loop()
# ensure all events are drained.
assert event_queue.empty()

View File

@@ -3,13 +3,17 @@
import time
from unittest.mock import MagicMock
from core.app.entities.app_invoke_entities import InvokeFrom
from core.workflow.entities.graph_init_params import GraphInitParams
from core.workflow.entities.pause_reason import SchedulingPause
from core.workflow.graph import Graph
from core.workflow.graph_engine import GraphEngine
from core.workflow.graph_engine.command_channels import InMemoryChannel
from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand
from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunPausedEvent, GraphRunStartedEvent
from core.workflow.nodes.start.start_node import StartNode
from core.workflow.runtime import GraphRuntimeState, VariablePool
from models.enums import UserFrom
def test_abort_command():
@@ -26,11 +30,23 @@ def test_abort_command():
mock_graph.root_node.id = "start"
# Create mock nodes with required attributes - using shared runtime state
mock_start_node = MagicMock()
mock_start_node.state = None
mock_start_node.id = "start"
mock_start_node.graph_runtime_state = shared_runtime_state # Use shared instance
mock_graph.nodes["start"] = mock_start_node
start_node = StartNode(
id="start",
config={"id": "start"},
graph_init_params=GraphInitParams(
tenant_id="test_tenant",
app_id="test_app",
workflow_id="test_workflow",
graph_config={},
user_id="test_user",
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
),
graph_runtime_state=shared_runtime_state,
)
start_node.init_node_data({"title": "start", "variables": []})
mock_graph.nodes["start"] = start_node
# Mock graph methods
mock_graph.get_outgoing_edges = MagicMock(return_value=[])
@@ -124,11 +140,23 @@ def test_pause_command():
mock_graph.root_node = MagicMock()
mock_graph.root_node.id = "start"
mock_start_node = MagicMock()
mock_start_node.state = None
mock_start_node.id = "start"
mock_start_node.graph_runtime_state = shared_runtime_state
mock_graph.nodes["start"] = mock_start_node
start_node = StartNode(
id="start",
config={"id": "start"},
graph_init_params=GraphInitParams(
tenant_id="test_tenant",
app_id="test_app",
workflow_id="test_workflow",
graph_config={},
user_id="test_user",
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
),
graph_runtime_state=shared_runtime_state,
)
start_node.init_node_data({"title": "start", "variables": []})
mock_graph.nodes["start"] = start_node
mock_graph.get_outgoing_edges = MagicMock(return_value=[])
mock_graph.get_incoming_edges = MagicMock(return_value=[])
@@ -153,5 +181,5 @@ def test_pause_command():
assert pause_events[0].reason == SchedulingPause(message="User requested pause")
graph_execution = engine.graph_runtime_state.graph_execution
assert graph_execution.is_paused
assert graph_execution.paused
assert graph_execution.pause_reason == SchedulingPause(message="User requested pause")

View File

@@ -0,0 +1,46 @@
"""
Utilities for detecting if database service is available for workflow tests.
"""
import psycopg2
import pytest
from configs import dify_config
def is_database_available() -> bool:
"""
Check if the database service is available by attempting to connect to it.
Returns:
True if database is available, False otherwise.
"""
try:
# Try to establish a database connection using a context manager
with psycopg2.connect(
host=dify_config.DB_HOST,
port=dify_config.DB_PORT,
database=dify_config.DB_DATABASE,
user=dify_config.DB_USERNAME,
password=dify_config.DB_PASSWORD,
connect_timeout=2, # 2 second timeout
) as conn:
pass # Connection established and will be closed automatically
return True
except (psycopg2.OperationalError, psycopg2.Error):
return False
def skip_if_database_unavailable():
"""
Pytest skip decorator that skips tests when database service is unavailable.
Usage:
@skip_if_database_unavailable()
def test_my_workflow():
...
"""
return pytest.mark.skipif(
not is_database_available(),
reason="Database service is not available (connection refused or authentication failed)",
)

View File

@@ -1,109 +0,0 @@
"""Tests for dispatcher command checking behavior."""
from __future__ import annotations
import queue
from datetime import datetime
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.graph_engine.event_management.event_manager import EventManager
from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher
from core.workflow.graph_events import NodeRunStartedEvent, NodeRunSucceededEvent
from core.workflow.node_events import NodeRunResult
class _StubExecutionCoordinator:
"""Stub execution coordinator that tracks command checks."""
def __init__(self) -> None:
self.command_checks = 0
self.scaling_checks = 0
self._execution_complete = False
self.mark_complete_called = False
self.failed = False
self._paused = False
def check_commands(self) -> None:
self.command_checks += 1
def check_scaling(self) -> None:
self.scaling_checks += 1
@property
def is_paused(self) -> bool:
return self._paused
def is_execution_complete(self) -> bool:
return self._execution_complete
def mark_complete(self) -> None:
self.mark_complete_called = True
def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests
self.failed = True
def set_execution_complete(self) -> None:
self._execution_complete = True
class _StubEventHandler:
"""Minimal event handler that marks execution complete after handling an event."""
def __init__(self, coordinator: _StubExecutionCoordinator) -> None:
self._coordinator = coordinator
self.events = []
def dispatch(self, event) -> None:
self.events.append(event)
self._coordinator.set_execution_complete()
def _run_dispatcher_for_event(event) -> int:
"""Run the dispatcher loop for a single event and return command check count."""
event_queue: queue.Queue = queue.Queue()
event_queue.put(event)
coordinator = _StubExecutionCoordinator()
event_handler = _StubEventHandler(coordinator)
event_manager = EventManager()
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
event_collector=event_manager,
execution_coordinator=coordinator,
)
dispatcher._dispatcher_loop()
return coordinator.command_checks
def _make_started_event() -> NodeRunStartedEvent:
return NodeRunStartedEvent(
id="start-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
)
def _make_succeeded_event() -> NodeRunSucceededEvent:
return NodeRunSucceededEvent(
id="success-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED),
)
def test_dispatcher_checks_commands_during_idle_and_on_completion() -> None:
"""Dispatcher polls commands when idle and after completion events."""
started_checks = _run_dispatcher_for_event(_make_started_event())
succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event())
assert started_checks == 1
assert succeeded_checks == 2

View File

@@ -48,15 +48,3 @@ def test_handle_pause_noop_when_execution_running() -> None:
worker_pool.stop.assert_not_called()
state_manager.clear_executing.assert_not_called()
def test_is_execution_complete_when_paused() -> None:
"""Paused execution should be treated as complete."""
graph_execution = GraphExecution(workflow_id="workflow")
graph_execution.start()
graph_execution.pause("Awaiting input")
coordinator, state_manager, _worker_pool = _build_coordinator(graph_execution)
state_manager.is_execution_complete.return_value = False
assert coordinator.is_execution_complete()

View File

@@ -6,9 +6,11 @@ This module tests the iteration node's ability to:
2. Preserve nested array structure when flatten_output=False
"""
from .test_database_utils import skip_if_database_unavailable
from .test_table_runner import TableTestRunner, WorkflowTestCase
@skip_if_database_unavailable()
def test_iteration_with_flatten_output_enabled():
"""
Test iteration node with flatten_output=True (default behavior).
@@ -37,6 +39,7 @@ def test_iteration_with_flatten_output_enabled():
)
@skip_if_database_unavailable()
def test_iteration_with_flatten_output_disabled():
"""
Test iteration node with flatten_output=False.
@@ -65,6 +68,7 @@ def test_iteration_with_flatten_output_disabled():
)
@skip_if_database_unavailable()
def test_iteration_flatten_output_comparison():
"""
Run both flatten_output configurations in parallel to verify the difference.

View File

@@ -199,6 +199,7 @@ def test__convert_to_knowledge_retrieval_node_for_chatbot():
node = WorkflowConverter()._convert_to_knowledge_retrieval_node(
new_app_mode=new_app_mode, dataset_config=dataset_config, model_config=model_config
)
assert node is not None
assert node["data"]["type"] == "knowledge-retrieval"
assert node["data"]["query_variable_selector"] == ["sys", "query"]
@@ -231,6 +232,7 @@ def test__convert_to_knowledge_retrieval_node_for_workflow_app():
node = WorkflowConverter()._convert_to_knowledge_retrieval_node(
new_app_mode=new_app_mode, dataset_config=dataset_config, model_config=model_config
)
assert node is not None
assert node["data"]["type"] == "knowledge-retrieval"
assert node["data"]["query_variable_selector"] == ["start", dataset_config.retrieve_config.query_variable]

View File

@@ -0,0 +1,37 @@
from core.app.apps.workflow.app_generator import SKIP_PREPARE_USER_INPUTS_KEY
from models.enums import AppTriggerType, WorkflowRunTriggeredFrom
from services.workflow.entities import TriggerData, WebhookTriggerData
from tasks import async_workflow_tasks
def test_build_generator_args_sets_skip_flag_for_webhook():
trigger_data = WebhookTriggerData(
app_id="app",
tenant_id="tenant",
workflow_id="workflow",
root_node_id="node",
inputs={"webhook_data": {"body": {"foo": "bar"}}},
)
args = async_workflow_tasks._build_generator_args(trigger_data)
assert args[SKIP_PREPARE_USER_INPUTS_KEY] is True
assert args["inputs"]["webhook_data"]["body"]["foo"] == "bar"
def test_build_generator_args_keeps_validation_for_other_triggers():
trigger_data = TriggerData(
app_id="app",
tenant_id="tenant",
workflow_id="workflow",
root_node_id="node",
inputs={"foo": "bar"},
files=[],
trigger_type=AppTriggerType.TRIGGER_SCHEDULE,
trigger_from=WorkflowRunTriggeredFrom.SCHEDULE,
)
args = async_workflow_tasks._build_generator_args(trigger_data)
assert SKIP_PREPARE_USER_INPUTS_KEY not in args
assert args["inputs"] == {"foo": "bar"}

1598
api/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -365,10 +365,9 @@ WEB_API_CORS_ALLOW_ORIGINS=*
# Specifies the allowed origins for cross-origin requests to the console API,
# e.g. https://cloud.dify.ai or * for all origins.
CONSOLE_CORS_ALLOW_ORIGINS=*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). Leading dots are optional.
COOKIE_DOMAIN=
# The frontend reads NEXT_PUBLIC_COOKIE_DOMAIN to align cookie handling with the API.
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
NEXT_PUBLIC_COOKIE_DOMAIN=
# ------------------------------

View File

@@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.10.0-rc1
image: langgenius/dify-api:1.10.0
restart: always
environment:
# Use the shared environment variables.
@@ -31,7 +31,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.10.0-rc1
image: langgenius/dify-api:1.10.0
restart: always
environment:
# Use the shared environment variables.
@@ -58,7 +58,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.10.0-rc1
image: langgenius/dify-api:1.10.0
restart: always
environment:
# Use the shared environment variables.
@@ -76,7 +76,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.10.0-rc1
image: langgenius/dify-web:1.10.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@@ -182,7 +182,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.4.0-local
image: langgenius/dify-plugin-daemon:0.4.1-local
restart: always
environment:
# Use the shared environment variables.

View File

@@ -625,7 +625,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.10.0-rc1
image: langgenius/dify-api:1.10.0
restart: always
environment:
# Use the shared environment variables.
@@ -654,7 +654,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.10.0-rc1
image: langgenius/dify-api:1.10.0
restart: always
environment:
# Use the shared environment variables.
@@ -681,7 +681,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.10.0-rc1
image: langgenius/dify-api:1.10.0
restart: always
environment:
# Use the shared environment variables.
@@ -699,7 +699,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.10.0-rc1
image: langgenius/dify-web:1.10.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@@ -805,7 +805,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.4.0-local
image: langgenius/dify-plugin-daemon:0.4.1-local
restart: always
environment:
# Use the shared environment variables.

View File

@@ -12,6 +12,9 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
# console or api domain.
# example: http://udify.app/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
NEXT_PUBLIC_COOKIE_DOMAIN=
# The API PREFIX for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
# The URL for MARKETPLACE
@@ -34,9 +37,6 @@ NEXT_PUBLIC_CSP_WHITELIST=
# Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking
NEXT_PUBLIC_ALLOW_EMBED=
# Shared cookie domain when console UI and API use different subdomains (e.g. example.com)
NEXT_PUBLIC_COOKIE_DOMAIN=
# Allow rendering unsafe URLs which have "data:" scheme.
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME=false

View File

@@ -32,6 +32,7 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
# different from api or web app domain.
# example: http://cloud.dify.ai/console/api
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
NEXT_PUBLIC_COOKIE_DOMAIN=
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain.
# example: http://udify.app/api
@@ -41,6 +42,11 @@ NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
NEXT_PUBLIC_SENTRY_DSN=
```
> [!IMPORTANT]
>
> 1. When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1. The frontend and backend must be under the same top-level domain in order to share authentication cookies.
> 1. It's necessary to set NEXT_PUBLIC_API_PREFIX and NEXT_PUBLIC_PUBLIC_API_PREFIX to the correct backend API URL.
Finally, run the development server:
```bash

View File

@@ -1,6 +1,6 @@
'use client'
import type { FC } from 'react'
import React, { useMemo } from 'react'
import React, { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AppCard from '@/app/components/app/overview/app-card'
@@ -24,6 +24,7 @@ import { useStore as useAppStore } from '@/app/components/app/store'
import { useAppWorkflow } from '@/service/use-workflow'
import type { BlockEnum } from '@/app/components/workflow/types'
import { isTriggerNode } from '@/app/components/workflow/types'
import { useDocLink } from '@/context/i18n'
export type ICardViewProps = {
appId: string
@@ -33,22 +34,56 @@ export type ICardViewProps = {
const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
const { t } = useTranslation()
const docLink = useDocLink()
const { notify } = useContext(ToastContext)
const appDetail = useAppStore(state => state.appDetail)
const setAppDetail = useAppStore(state => state.setAppDetail)
const isWorkflowApp = appDetail?.mode === AppModeEnum.WORKFLOW
const showMCPCard = isInPanel
const showTriggerCard = isInPanel && appDetail?.mode === AppModeEnum.WORKFLOW
const { data: currentWorkflow } = useAppWorkflow(appDetail?.mode === AppModeEnum.WORKFLOW ? appDetail.id : '')
const hasTriggerNode = useMemo(() => {
if (appDetail?.mode !== AppModeEnum.WORKFLOW)
const showTriggerCard = isInPanel && isWorkflowApp
const { data: currentWorkflow } = useAppWorkflow(isWorkflowApp ? appDetail.id : '')
const hasTriggerNode = useMemo<boolean | null>(() => {
if (!isWorkflowApp)
return false
const nodes = currentWorkflow?.graph?.nodes || []
if (!currentWorkflow)
return null
const nodes = currentWorkflow.graph?.nodes || []
return nodes.some((node) => {
const nodeType = node.data?.type as BlockEnum | undefined
return !!nodeType && isTriggerNode(nodeType)
})
}, [appDetail?.mode, currentWorkflow])
}, [isWorkflowApp, currentWorkflow])
const shouldRenderAppCards = !isWorkflowApp || hasTriggerNode === false
const disableAppCards = !shouldRenderAppCards
const triggerDocUrl = docLink('/guides/workflow/node/start')
const buildTriggerModeMessage = useCallback((featureName: string) => (
<div className='flex flex-col gap-1'>
<div className='text-xs text-text-secondary'>
{t('appOverview.overview.disableTooltip.triggerMode', { feature: featureName })}
</div>
<div
className='cursor-pointer text-xs font-medium text-text-accent hover:underline'
onClick={(event) => {
event.stopPropagation()
window.open(triggerDocUrl, '_blank')
}}
>
{t('appOverview.overview.appInfo.enableTooltip.learnMore')}
</div>
</div>
), [t, triggerDocUrl])
const disableWebAppTooltip = disableAppCards
? buildTriggerModeMessage(t('appOverview.overview.appInfo.title'))
: null
const disableApiTooltip = disableAppCards
? buildTriggerModeMessage(t('appOverview.overview.apiInfo.title'))
: null
const disableMcpTooltip = disableAppCards
? buildTriggerModeMessage(t('tools.mcp.server.title'))
: null
const updateAppDetail = async () => {
try {
@@ -120,39 +155,48 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
if (!appDetail)
return <Loading />
return (
<div className={className || 'mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'}>
{
!hasTriggerNode && (
<>
<AppCard
appInfo={appDetail}
cardType="webapp"
isInPanel={isInPanel}
onChangeStatus={onChangeSiteStatus}
onGenerateCode={onGenerateCode}
onSaveSiteConfig={onSaveSiteConfig}
/>
<AppCard
cardType="api"
appInfo={appDetail}
isInPanel={isInPanel}
onChangeStatus={onChangeApiStatus}
/>
{showMCPCard && (
<MCPServiceCard
appInfo={appDetail}
/>
)}
</>
)
}
{showTriggerCard && (
<TriggerCard
const appCards = (
<>
<AppCard
appInfo={appDetail}
cardType="webapp"
isInPanel={isInPanel}
triggerModeDisabled={disableAppCards}
triggerModeMessage={disableWebAppTooltip}
onChangeStatus={onChangeSiteStatus}
onGenerateCode={onGenerateCode}
onSaveSiteConfig={onSaveSiteConfig}
/>
<AppCard
cardType="api"
appInfo={appDetail}
isInPanel={isInPanel}
triggerModeDisabled={disableAppCards}
triggerModeMessage={disableApiTooltip}
onChangeStatus={onChangeApiStatus}
/>
{showMCPCard && (
<MCPServiceCard
appInfo={appDetail}
onToggleResult={handleCallbackResult}
triggerModeDisabled={disableAppCards}
triggerModeMessage={disableMcpTooltip}
/>
)}
</>
)
const triggerCardNode = showTriggerCard ? (
<TriggerCard
appInfo={appDetail}
onToggleResult={handleCallbackResult}
/>
) : null
return (
<div className={className || 'mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'}>
{disableAppCards && triggerCardNode}
{appCards}
{!disableAppCards && triggerCardNode}
</div>
)
}

View File

@@ -42,6 +42,7 @@ import { getProcessedFilesFromResponse } from '@/app/components/base/file-upload
import cn from '@/utils/classnames'
import { noop } from 'lodash-es'
import PromptLogModal from '../../base/prompt-log-modal'
import { WorkflowContextProvider } from '@/app/components/workflow/context'
type AppStoreState = ReturnType<typeof useAppStore.getState>
type ConversationListItem = ChatConversationGeneralDetail | CompletionConversationGeneralDetail
@@ -779,15 +780,17 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
}
</div>
{showMessageLogModal && (
<MessageLogModal
width={width}
currentLogItem={currentLogItem}
onCancel={() => {
setCurrentLogItem()
setShowMessageLogModal(false)
}}
defaultTab={currentLogModalActiveTab}
/>
<WorkflowContextProvider>
<MessageLogModal
width={width}
currentLogItem={currentLogItem}
onCancel={() => {
setCurrentLogItem()
setShowMessageLogModal(false)
}}
defaultTab={currentLogModalActiveTab}
/>
</WorkflowContextProvider>
)}
{!isChatMode && showPromptLogModal && (
<PromptLogModal

View File

@@ -51,6 +51,8 @@ export type IAppCardProps = {
isInPanel?: boolean
cardType?: 'api' | 'webapp'
customBgColor?: string
triggerModeDisabled?: boolean // true when Trigger Node mode needs UI locked to avoid conflicting actions
triggerModeMessage?: React.ReactNode // contextual copy explaining why the card is disabled in trigger mode
onChangeStatus: (val: boolean) => Promise<void>
onSaveSiteConfig?: (params: ConfigParams) => Promise<void>
onGenerateCode?: () => Promise<void>
@@ -61,6 +63,8 @@ function AppCard({
isInPanel,
cardType = 'webapp',
customBgColor,
triggerModeDisabled = false,
triggerModeMessage = '',
onChangeStatus,
onSaveSiteConfig,
onGenerateCode,
@@ -111,7 +115,7 @@ function AppCard({
const hasStartNode = currentWorkflow?.graph?.nodes?.some(node => node.data.type === BlockEnum.Start)
const missingStartNode = isWorkflowApp && !hasStartNode
const hasInsufficientPermissions = isApp ? !isCurrentWorkspaceEditor : !isCurrentWorkspaceManager
const toggleDisabled = hasInsufficientPermissions || appUnpublished || missingStartNode
const toggleDisabled = hasInsufficientPermissions || appUnpublished || missingStartNode || triggerModeDisabled
const runningStatus = (appUnpublished || missingStartNode) ? false : (isApp ? appInfo.enable_site : appInfo.enable_api)
const isMinimalState = appUnpublished || missingStartNode
const { app_base_url, access_token } = appInfo.site ?? {}
@@ -189,7 +193,20 @@ function AppCard({
className={
`${isInPanel ? 'border-l-[0.5px] border-t' : 'border-[0.5px] shadow-xs'} w-full max-w-full rounded-xl border-effects-highlight ${className ?? ''} ${isMinimalState ? 'h-12' : ''}`}
>
<div className={`${customBgColor ?? 'bg-background-default'} rounded-xl`}>
<div className={`${customBgColor ?? 'bg-background-default'} relative rounded-xl ${triggerModeDisabled ? 'opacity-60' : ''}`}>
{triggerModeDisabled && (
triggerModeMessage
? (
<Tooltip
popupContent={triggerModeMessage}
popupClassName="max-w-64 rounded-xl bg-components-panel-bg px-3 py-2 text-xs text-text-secondary shadow-lg"
position="right"
>
<div className='absolute inset-0 z-10 cursor-not-allowed rounded-xl' aria-hidden="true"></div>
</Tooltip>
)
: <div className='absolute inset-0 z-10 cursor-not-allowed rounded-xl' aria-hidden="true"></div>
)}
<div className={`flex w-full flex-col items-start justify-center gap-3 self-stretch p-3 ${isMinimalState ? 'border-0' : 'border-b-[0.5px] border-divider-subtle'}`}>
<div className='flex w-full items-center gap-3 self-stretch'>
<AppBasic
@@ -214,18 +231,23 @@ function AppCard({
</div>
<Tooltip
popupContent={
toggleDisabled && (appUnpublished || missingStartNode) ? (
<>
<div className="mb-1 text-xs font-normal text-text-secondary">
{t('appOverview.overview.appInfo.enableTooltip.description')}
</div>
<div
className="cursor-pointer text-xs font-normal text-text-accent hover:underline"
onClick={() => window.open(docLink('/guides/workflow/node/user-input'), '_blank')}
>
{t('appOverview.overview.appInfo.enableTooltip.learnMore')}
</div>
</>
toggleDisabled ? (
triggerModeDisabled && triggerModeMessage
? triggerModeMessage
: (appUnpublished || missingStartNode) ? (
<>
<div className="mb-1 text-xs font-normal text-text-secondary">
{t('appOverview.overview.appInfo.enableTooltip.description')}
</div>
<div
className="cursor-pointer text-xs font-normal text-text-accent hover:underline"
onClick={() => window.open(docLink('/guides/workflow/node/user-input'), '_blank')}
>
{t('appOverview.overview.appInfo.enableTooltip.learnMore')}
</div>
</>
)
: ''
) : ''
}
position="right"
@@ -329,9 +351,11 @@ function AppCard({
{!isApp && <SecretKeyButton appId={appInfo.id} />}
{OPERATIONS_MAP[cardType].map((op) => {
const disabled
= op.opName === t('appOverview.overview.appInfo.settings.entry')
? false
: !runningStatus
= triggerModeDisabled
? true
: op.opName === t('appOverview.overview.appInfo.settings.entry')
? false
: !runningStatus
return (
<Button
className="mr-1 min-w-[88px]"

View File

@@ -282,21 +282,23 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
</>
)}
{
(!systemFeatures.webapp_auth.enabled)
? <>
<Divider className="my-1" />
<button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
</button>
</>
: !(isGettingUserCanAccessApp || !userCanAccessApp?.result) && (
<>
!app.has_draft_trigger && (
(!systemFeatures.webapp_auth.enabled)
? <>
<Divider className="my-1" />
<button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
</button>
</>
)
: !(isGettingUserCanAccessApp || !userCanAccessApp?.result) && (
<>
<Divider className="my-1" />
<button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickInstalledApp}>
<span className='system-sm-regular text-text-secondary'>{t('app.openInExplore')}</span>
</button>
</>
)
)
}
<Divider className="my-1" />
{

View File

@@ -1,6 +1,6 @@
import React from 'react'
import Link from 'next/link'
import { RiDiscordFill, RiGithubFill } from '@remixicon/react'
import { RiDiscordFill, RiDiscussLine, RiGithubFill } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
type CustomLinkProps = {
@@ -38,6 +38,9 @@ const Footer = () => {
<CustomLink href='https://discord.gg/FngNHpbcY7'>
<RiDiscordFill className='h-5 w-5 text-text-tertiary' />
</CustomLink>
<CustomLink href='https://forum.dify.ai'>
<RiDiscussLine className='h-5 w-5 text-text-tertiary' />
</CustomLink>
</div>
</footer>
)

View File

@@ -1,23 +0,0 @@
.appIcon {
@apply flex items-center justify-center relative w-9 h-9 text-lg rounded-lg grow-0 shrink-0;
}
.appIcon.large {
@apply w-10 h-10;
}
.appIcon.small {
@apply w-8 h-8;
}
.appIcon.tiny {
@apply w-6 h-6 text-base;
}
.appIcon.xs {
@apply w-5 h-5 text-base;
}
.appIcon.rounded {
@apply rounded-full;
}

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="apps-2-line">
<path id="Vector" d="M4.66602 7.6665C3.00916 7.6665 1.66602 6.32336 1.66602 4.6665C1.66602 3.00965 3.00916 1.6665 4.66602 1.6665C6.32287 1.6665 7.66602 3.00965 7.66602 4.6665C7.66602 6.32336 6.32287 7.6665 4.66602 7.6665ZM4.66602 14.3332C3.00916 14.3332 1.66602 12.99 1.66602 11.3332C1.66602 9.6763 3.00916 8.33317 4.66602 8.33317C6.32287 8.33317 7.66602 9.6763 7.66602 11.3332C7.66602 12.99 6.32287 14.3332 4.66602 14.3332ZM11.3327 7.6665C9.67582 7.6665 8.33268 6.32336 8.33268 4.6665C8.33268 3.00965 9.67582 1.6665 11.3327 1.6665C12.9895 1.6665 14.3327 3.00965 14.3327 4.6665C14.3327 6.32336 12.9895 7.6665 11.3327 7.6665ZM11.3327 14.3332C9.67582 14.3332 8.33268 12.99 8.33268 11.3332C8.33268 9.6763 9.67582 8.33317 11.3327 8.33317C12.9895 8.33317 14.3327 9.6763 14.3327 11.3332C14.3327 12.99 12.9895 14.3332 11.3327 14.3332ZM4.66602 6.33317C5.58649 6.33317 6.33268 5.58698 6.33268 4.6665C6.33268 3.74603 5.58649 2.99984 4.66602 2.99984C3.74554 2.99984 2.99935 3.74603 2.99935 4.6665C2.99935 5.58698 3.74554 6.33317 4.66602 6.33317ZM4.66602 12.9998C5.58649 12.9998 6.33268 12.2536 6.33268 11.3332C6.33268 10.4127 5.58649 9.6665 4.66602 9.6665C3.74554 9.6665 2.99935 10.4127 2.99935 11.3332C2.99935 12.2536 3.74554 12.9998 4.66602 12.9998ZM11.3327 6.33317C12.2531 6.33317 12.9993 5.58698 12.9993 4.6665C12.9993 3.74603 12.2531 2.99984 11.3327 2.99984C10.4122 2.99984 9.66602 3.74603 9.66602 4.6665C9.66602 5.58698 10.4122 6.33317 11.3327 6.33317ZM11.3327 12.9998C12.2531 12.9998 12.9993 12.2536 12.9993 11.3332C12.9993 10.4127 12.2531 9.6665 11.3327 9.6665C10.4122 9.6665 9.66602 10.4127 9.66602 11.3332C9.66602 12.2536 10.4122 12.9998 11.3327 12.9998Z" fill="#155EEF"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4.66602 14.3334C3.00916 14.3334 1.66602 12.9903 1.66602 11.3334C1.66602 9.67655 3.00916 8.33342 4.66602 8.33342C6.32287 8.33342 7.66602 9.67655 7.66602 11.3334C7.66602 12.9903 6.32287 14.3334 4.66602 14.3334ZM11.3327 7.66675C9.67582 7.66675 8.33268 6.3236 8.33268 4.66675C8.33268 3.00989 9.67582 1.66675 11.3327 1.66675C12.9895 1.66675 14.3327 3.00989 14.3327 4.66675C14.3327 6.3236 12.9895 7.66675 11.3327 7.66675ZM4.66602 13.0001C5.58649 13.0001 6.33268 12.2539 6.33268 11.3334C6.33268 10.4129 5.58649 9.66675 4.66602 9.66675C3.74554 9.66675 2.99935 10.4129 2.99935 11.3334C2.99935 12.2539 3.74554 13.0001 4.66602 13.0001ZM11.3327 6.33342C12.2531 6.33342 12.9993 5.58722 12.9993 4.66675C12.9993 3.74627 12.2531 3.00008 11.3327 3.00008C10.4122 3.00008 9.66602 3.74627 9.66602 4.66675C9.66602 5.58722 10.4122 6.33342 11.3327 6.33342ZM1.99935 5.33341C1.99935 3.49247 3.49174 2.00008 5.33268 2.00008H7.33268V3.33341H5.33268C4.22812 3.33341 3.33268 4.22885 3.33268 5.33341V7.33342H1.99935V5.33341ZM13.9993 8.66675H12.666V10.6667C12.666 11.7713 11.7706 12.6667 10.666 12.6667H8.66602V14.0001H10.666C12.5069 14.0001 13.9993 12.5077 13.9993 10.6667V8.66675Z" fill="#344054"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10 2.66659H3.33333V13.3333H12.6667V5.33325H10V2.66659ZM2 1.99445C2 1.62929 2.29833 1.33325 2.66567 1.33325H10.6667L13.9998 4.66658L14 13.9949C14 14.3659 13.7034 14.6666 13.3377 14.6666H2.66227C2.29651 14.6666 2 14.3631 2 14.0054V1.99445ZM11.7713 7.99992L9.4142 10.3569L8.4714 9.41412L9.8856 7.99992L8.4714 6.58571L9.4142 5.6429L11.7713 7.99992ZM4.22877 7.99992L6.58579 5.6429L7.5286 6.58571L6.11438 7.99992L7.5286 9.41412L6.58579 10.3569L4.22877 7.99992Z" fill="#344054"/>
</svg>

Before

Width:  |  Height:  |  Size: 586 B

View File

@@ -0,0 +1,258 @@
import type { Meta, StoryObj } from '@storybook/nextjs'
import React from 'react'
declare const require: any
type IconComponent = React.ComponentType<Record<string, unknown>>
type IconEntry = {
name: string
category: string
path: string
Component: IconComponent
}
const iconContext = require.context('./src', true, /\.tsx$/)
const iconEntries: IconEntry[] = iconContext
.keys()
.filter((key: string) => !key.endsWith('.stories.tsx') && !key.endsWith('.spec.tsx'))
.map((key: string) => {
const mod = iconContext(key)
const Component = mod.default as IconComponent | undefined
if (!Component)
return null
const relativePath = key.replace(/^\.\//, '')
const path = `app/components/base/icons/src/${relativePath}`
const parts = relativePath.split('/')
const fileName = parts.pop() || ''
const category = parts.length ? parts.join('/') : '(root)'
const name = Component.displayName || fileName.replace(/\.tsx$/, '')
return {
name,
category,
path,
Component,
}
})
.filter(Boolean) as IconEntry[]
const sortedEntries = [...iconEntries].sort((a, b) => {
if (a.category === b.category)
return a.name.localeCompare(b.name)
return a.category.localeCompare(b.category)
})
const filterEntries = (entries: IconEntry[], query: string) => {
const normalized = query.trim().toLowerCase()
if (!normalized)
return entries
return entries.filter(entry =>
entry.name.toLowerCase().includes(normalized)
|| entry.path.toLowerCase().includes(normalized)
|| entry.category.toLowerCase().includes(normalized),
)
}
const groupByCategory = (entries: IconEntry[]) => entries.reduce((acc, entry) => {
if (!acc[entry.category])
acc[entry.category] = []
acc[entry.category].push(entry)
return acc
}, {} as Record<string, IconEntry[]>)
const containerStyle: React.CSSProperties = {
padding: 24,
display: 'flex',
flexDirection: 'column',
gap: 24,
}
const headerStyle: React.CSSProperties = {
display: 'flex',
flexDirection: 'column',
gap: 8,
}
const controlsStyle: React.CSSProperties = {
display: 'flex',
alignItems: 'center',
gap: 12,
flexWrap: 'wrap',
}
const searchInputStyle: React.CSSProperties = {
padding: '8px 12px',
minWidth: 280,
borderRadius: 6,
border: '1px solid #d0d0d5',
}
const toggleButtonStyle: React.CSSProperties = {
padding: '8px 12px',
borderRadius: 6,
border: '1px solid #d0d0d5',
background: '#fff',
cursor: 'pointer',
}
const emptyTextStyle: React.CSSProperties = { color: '#5f5f66' }
const sectionStyle: React.CSSProperties = {
display: 'flex',
flexDirection: 'column',
gap: 12,
}
const gridStyle: React.CSSProperties = {
display: 'grid',
gap: 12,
gridTemplateColumns: 'repeat(auto-fill, minmax(200px, 1fr))',
}
const cardStyle: React.CSSProperties = {
border: '1px solid #e1e1e8',
borderRadius: 8,
padding: 12,
display: 'flex',
flexDirection: 'column',
gap: 8,
minHeight: 140,
}
const previewBaseStyle: React.CSSProperties = {
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
minHeight: 48,
borderRadius: 6,
}
const nameButtonBaseStyle: React.CSSProperties = {
display: 'inline-flex',
padding: 0,
border: 'none',
background: 'transparent',
font: 'inherit',
cursor: 'pointer',
textAlign: 'left',
fontWeight: 600,
}
const PREVIEW_SIZE = 40
const IconGalleryStory = () => {
const [query, setQuery] = React.useState('')
const [copiedPath, setCopiedPath] = React.useState<string | null>(null)
const [previewTheme, setPreviewTheme] = React.useState<'light' | 'dark'>('light')
const filtered = React.useMemo(() => filterEntries(sortedEntries, query), [query])
const grouped = React.useMemo(() => groupByCategory(filtered), [filtered])
const categoryOrder = React.useMemo(
() => Object.keys(grouped).sort((a, b) => a.localeCompare(b)),
[grouped],
)
React.useEffect(() => {
if (!copiedPath)
return undefined
const timerId = window.setTimeout(() => {
setCopiedPath(null)
}, 1200)
return () => window.clearTimeout(timerId)
}, [copiedPath])
const handleCopy = React.useCallback((text: string) => {
navigator.clipboard?.writeText(text)
.then(() => {
setCopiedPath(text)
})
.catch((err) => {
console.error('Failed to copy icon path:', err)
})
}, [])
return (
<div style={containerStyle}>
<header style={headerStyle}>
<h1 style={{ margin: 0 }}>Icon Gallery</h1>
<p style={{ margin: 0, color: '#5f5f66' }}>
Browse all icon components sourced from <code>app/components/base/icons/src</code>. Use the search bar
to filter by name or path.
</p>
<div style={controlsStyle}>
<input
style={searchInputStyle}
placeholder="Search icons"
value={query}
onChange={event => setQuery(event.target.value)}
/>
<span style={{ color: '#5f5f66' }}>{filtered.length} icons</span>
<button
type="button"
onClick={() => setPreviewTheme(prev => (prev === 'light' ? 'dark' : 'light'))}
style={toggleButtonStyle}
>
Toggle {previewTheme === 'light' ? 'dark' : 'light'} preview
</button>
</div>
</header>
{categoryOrder.length === 0 && (
<p style={emptyTextStyle}>No icons match the current filter.</p>
)}
{categoryOrder.map(category => (
<section key={category} style={sectionStyle}>
<h2 style={{ margin: 0, fontSize: 18 }}>{category}</h2>
<div style={gridStyle}>
{grouped[category].map(entry => (
<div key={entry.path} style={cardStyle}>
<div
style={{
...previewBaseStyle,
background: previewTheme === 'dark' ? '#1f2024' : '#fff',
}}
>
<entry.Component style={{ width: PREVIEW_SIZE, height: PREVIEW_SIZE }} />
</div>
<button
type="button"
onClick={() => handleCopy(entry.path)}
style={{
...nameButtonBaseStyle,
color: copiedPath === entry.path ? '#00754a' : '#24262c',
}}
>
{copiedPath === entry.path ? 'Copied!' : entry.name}
</button>
</div>
))}
</div>
</section>
))}
</div>
)
}
const meta: Meta<typeof IconGalleryStory> = {
title: 'Base/Icons/Icon Gallery',
component: IconGalleryStory,
parameters: {
layout: 'fullscreen',
},
}
export default meta
type Story = StoryObj<typeof IconGalleryStory>
export const All: Story = {
render: () => <IconGalleryStory />,
}

View File

@@ -1,36 +0,0 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"id": "apps-2-line"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector",
"d": "M4.66602 7.6665C3.00916 7.6665 1.66602 6.32336 1.66602 4.6665C1.66602 3.00965 3.00916 1.6665 4.66602 1.6665C6.32287 1.6665 7.66602 3.00965 7.66602 4.6665C7.66602 6.32336 6.32287 7.6665 4.66602 7.6665ZM4.66602 14.3332C3.00916 14.3332 1.66602 12.99 1.66602 11.3332C1.66602 9.6763 3.00916 8.33317 4.66602 8.33317C6.32287 8.33317 7.66602 9.6763 7.66602 11.3332C7.66602 12.99 6.32287 14.3332 4.66602 14.3332ZM11.3327 7.6665C9.67582 7.6665 8.33268 6.32336 8.33268 4.6665C8.33268 3.00965 9.67582 1.6665 11.3327 1.6665C12.9895 1.6665 14.3327 3.00965 14.3327 4.6665C14.3327 6.32336 12.9895 7.6665 11.3327 7.6665ZM11.3327 14.3332C9.67582 14.3332 8.33268 12.99 8.33268 11.3332C8.33268 9.6763 9.67582 8.33317 11.3327 8.33317C12.9895 8.33317 14.3327 9.6763 14.3327 11.3332C14.3327 12.99 12.9895 14.3332 11.3327 14.3332ZM4.66602 6.33317C5.58649 6.33317 6.33268 5.58698 6.33268 4.6665C6.33268 3.74603 5.58649 2.99984 4.66602 2.99984C3.74554 2.99984 2.99935 3.74603 2.99935 4.6665C2.99935 5.58698 3.74554 6.33317 4.66602 6.33317ZM4.66602 12.9998C5.58649 12.9998 6.33268 12.2536 6.33268 11.3332C6.33268 10.4127 5.58649 9.6665 4.66602 9.6665C3.74554 9.6665 2.99935 10.4127 2.99935 11.3332C2.99935 12.2536 3.74554 12.9998 4.66602 12.9998ZM11.3327 6.33317C12.2531 6.33317 12.9993 5.58698 12.9993 4.6665C12.9993 3.74603 12.2531 2.99984 11.3327 2.99984C10.4122 2.99984 9.66602 3.74603 9.66602 4.6665C9.66602 5.58698 10.4122 6.33317 11.3327 6.33317ZM11.3327 12.9998C12.2531 12.9998 12.9993 12.2536 12.9993 11.3332C12.9993 10.4127 12.2531 9.6665 11.3327 9.6665C10.4122 9.6665 9.66602 10.4127 9.66602 11.3332C9.66602 12.2536 10.4122 12.9998 11.3327 12.9998Z",
"fill": "currentColor"
},
"children": []
}
]
}
]
},
"name": "Apps02"
}

View File

@@ -1,20 +0,0 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './Apps02.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Apps02'
export default Icon

View File

@@ -1,26 +0,0 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M4.66602 14.3334C3.00916 14.3334 1.66602 12.9903 1.66602 11.3334C1.66602 9.67655 3.00916 8.33342 4.66602 8.33342C6.32287 8.33342 7.66602 9.67655 7.66602 11.3334C7.66602 12.9903 6.32287 14.3334 4.66602 14.3334ZM11.3327 7.66675C9.67582 7.66675 8.33268 6.3236 8.33268 4.66675C8.33268 3.00989 9.67582 1.66675 11.3327 1.66675C12.9895 1.66675 14.3327 3.00989 14.3327 4.66675C14.3327 6.3236 12.9895 7.66675 11.3327 7.66675ZM4.66602 13.0001C5.58649 13.0001 6.33268 12.2539 6.33268 11.3334C6.33268 10.4129 5.58649 9.66675 4.66602 9.66675C3.74554 9.66675 2.99935 10.4129 2.99935 11.3334C2.99935 12.2539 3.74554 13.0001 4.66602 13.0001ZM11.3327 6.33342C12.2531 6.33342 12.9993 5.58722 12.9993 4.66675C12.9993 3.74627 12.2531 3.00008 11.3327 3.00008C10.4122 3.00008 9.66602 3.74627 9.66602 4.66675C9.66602 5.58722 10.4122 6.33342 11.3327 6.33342ZM1.99935 5.33341C1.99935 3.49247 3.49174 2.00008 5.33268 2.00008H7.33268V3.33341H5.33268C4.22812 3.33341 3.33268 4.22885 3.33268 5.33341V7.33342H1.99935V5.33341ZM13.9993 8.66675H12.666V10.6667C12.666 11.7713 11.7706 12.6667 10.666 12.6667H8.66602V14.0001H10.666C12.5069 14.0001 13.9993 12.5077 13.9993 10.6667V8.66675Z",
"fill": "currentColor"
},
"children": []
}
]
},
"name": "Exchange02"
}

View File

@@ -1,20 +0,0 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './Exchange02.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Exchange02'
export default Icon

View File

@@ -1,26 +0,0 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M10 2.66659H3.33333V13.3333H12.6667V5.33325H10V2.66659ZM2 1.99445C2 1.62929 2.29833 1.33325 2.66567 1.33325H10.6667L13.9998 4.66658L14 13.9949C14 14.3659 13.7034 14.6666 13.3377 14.6666H2.66227C2.29651 14.6666 2 14.3631 2 14.0054V1.99445ZM11.7713 7.99992L9.4142 10.3569L8.4714 9.41412L9.8856 7.99992L8.4714 6.58571L9.4142 5.6429L11.7713 7.99992ZM4.22877 7.99992L6.58579 5.6429L7.5286 6.58571L6.11438 7.99992L7.5286 9.41412L6.58579 10.3569L4.22877 7.99992Z",
"fill": "currentColor"
},
"children": []
}
]
},
"name": "FileCode"
}

View File

@@ -1,20 +0,0 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './FileCode.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconData } from '@/app/components/base/icons/IconBase'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'FileCode'
export default Icon

View File

@@ -1,10 +1,7 @@
export { default as Apps02 } from './Apps02'
export { default as BubbleX } from './BubbleX'
export { default as Colors } from './Colors'
export { default as DragHandle } from './DragHandle'
export { default as Env } from './Env'
export { default as Exchange02 } from './Exchange02'
export { default as FileCode } from './FileCode'
export { default as GlobalVariable } from './GlobalVariable'
export { default as Icon3Dots } from './Icon3Dots'
export { default as LongArrowLeft } from './LongArrowLeft'

View File

@@ -6,6 +6,7 @@ import { useStore } from '@/app/components/app/store'
import type { WorkflowRunDetailResponse } from '@/models/log'
import type { NodeTracing, NodeTracingListResponse } from '@/types/workflow'
import { BlockEnum } from '@/app/components/workflow/types'
import { WorkflowContextProvider } from '@/app/components/workflow/context'
const SAMPLE_APP_DETAIL = {
id: 'app-demo-1',
@@ -143,10 +144,12 @@ const MessageLogPreview = (props: MessageLogModalProps) => {
return (
<div className="relative min-h-[640px] w-full bg-background-default-subtle p-6">
<MessageLogModal
{...props}
currentLogItem={mockCurrentLogItem}
/>
<WorkflowContextProvider>
<MessageLogModal
{...props}
currentLogItem={mockCurrentLogItem}
/>
</WorkflowContextProvider>
</div>
)
}

View File

@@ -121,7 +121,7 @@ const RegenerationModal: FC<IRegenerationModalProps> = ({
})
return (
<Modal isShow={isShow} onClose={noop} className='!max-w-[480px] !rounded-2xl'>
<Modal isShow={isShow} onClose={noop} className='!max-w-[480px] !rounded-2xl' wrapperClassName='!z-[10000]'>
{!loading && !updateSucceeded && <DefaultContent onCancel={onCancel} onConfirm={onConfirm} />}
{loading && !updateSucceeded && <RegeneratingContent />}
{!loading && updateSucceeded && <RegenerationCompletedContent onClose={onClose} />}

View File

@@ -124,6 +124,7 @@ const Completed: FC<ICompletedProps> = ({
const [limit, setLimit] = useState(DEFAULT_LIMIT)
const [fullScreen, setFullScreen] = useState(false)
const [showNewChildSegmentModal, setShowNewChildSegmentModal] = useState(false)
const [isRegenerationModalOpen, setIsRegenerationModalOpen] = useState(false)
const segmentListRef = useRef<HTMLDivElement>(null)
const childSegmentListRef = useRef<HTMLDivElement>(null)
@@ -669,6 +670,7 @@ const Completed: FC<ICompletedProps> = ({
onClose={onCloseSegmentDetail}
showOverlay={false}
needCheckChunks
modal={isRegenerationModalOpen}
>
<SegmentDetail
key={currSegment.segInfo?.id}
@@ -677,6 +679,7 @@ const Completed: FC<ICompletedProps> = ({
isEditMode={currSegment.isEditMode}
onUpdate={handleUpdateSegment}
onCancel={onCloseSegmentDetail}
onModalStateChange={setIsRegenerationModalOpen}
/>
</FullScreenDrawer>
{/* Create New Segment */}

View File

@@ -27,6 +27,7 @@ type ISegmentDetailProps = {
onCancel: () => void
isEditMode?: boolean
docForm: ChunkingMode
onModalStateChange?: (isOpen: boolean) => void
}
/**
@@ -38,6 +39,7 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
onCancel,
isEditMode,
docForm,
onModalStateChange,
}) => {
const { t } = useTranslation()
const [question, setQuestion] = useState(isEditMode ? segInfo?.content || '' : segInfo?.sign_content || '')
@@ -68,11 +70,19 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
const handleRegeneration = useCallback(() => {
setShowRegenerationModal(true)
}, [])
onModalStateChange?.(true)
}, [onModalStateChange])
const onCancelRegeneration = useCallback(() => {
setShowRegenerationModal(false)
}, [])
onModalStateChange?.(false)
}, [onModalStateChange])
const onCloseAfterRegeneration = useCallback(() => {
setShowRegenerationModal(false)
onModalStateChange?.(false)
onCancel() // Close the edit drawer
}, [onCancel, onModalStateChange])
const onConfirmRegeneration = useCallback(() => {
onUpdate(segInfo?.id || '', question, answer, keywords, true)
@@ -161,7 +171,7 @@ const SegmentDetail: FC<ISegmentDetailProps> = ({
isShow={showRegenerationModal}
onConfirm={onConfirmRegeneration}
onCancel={onCancelRegeneration}
onClose={onCancelRegeneration}
onClose={onCloseAfterRegeneration}
/>
)
}

View File

@@ -17,8 +17,9 @@ import type { InvitationResult } from '@/models/common'
import I18n from '@/context/i18n'
import 'react-multi-email/dist/style.css'
import { noop } from 'lodash-es'
import { useProviderContextSelector } from '@/context/provider-context'
import { useBoolean } from 'ahooks'
type IInviteModalProps = {
isEmailSetup: boolean
onCancel: () => void
@@ -49,9 +50,15 @@ const InviteModal = ({
const { locale } = useContext(I18n)
const [role, setRole] = useState<string>('normal')
const [isSubmitting, {
setTrue: setIsSubmitting,
setFalse: setIsSubmitted,
}] = useBoolean(false)
const handleSend = useCallback(async () => {
if (isLimitExceeded)
if (isLimitExceeded || isSubmitting)
return
setIsSubmitting()
if (emails.map((email: string) => emailRegex.test(email)).every(Boolean)) {
try {
const { result, invitation_results } = await inviteMember({
@@ -70,7 +77,8 @@ const InviteModal = ({
else {
notify({ type: 'error', message: t('common.members.emailInvalid') })
}
}, [isLimitExceeded, emails, role, locale, onCancel, onSend, notify, t])
setIsSubmitted()
}, [isLimitExceeded, emails, role, locale, onCancel, onSend, notify, t, isSubmitting])
return (
<div className={cn(s.wrap)}>
@@ -133,7 +141,7 @@ const InviteModal = ({
tabIndex={0}
className='w-full'
onClick={handleSend}
disabled={!emails.length || isLimitExceeded}
disabled={!emails.length || isLimitExceeded || isSubmitting}
variant='primary'
>
{t('common.members.sendInvite')}

View File

@@ -24,8 +24,8 @@ import { debounce } from 'lodash-es'
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import LogViewer from '../log-viewer'
import { usePluginSubscriptionStore } from '../store'
import { usePluginStore } from '../../store'
import { useSubscriptionList } from '../use-subscription-list'
type Props = {
onClose: () => void
@@ -91,7 +91,7 @@ const MultiSteps = ({ currentStep }: { currentStep: ApiKeyStep }) => {
export const CommonCreateModal = ({ onClose, createType, builder }: Props) => {
const { t } = useTranslation()
const detail = usePluginStore(state => state.detail)
const { refresh } = usePluginSubscriptionStore()
const { refetch } = useSubscriptionList()
const [currentStep, setCurrentStep] = useState<ApiKeyStep>(createType === SupportedCreationMethods.APIKEY ? ApiKeyStep.Verify : ApiKeyStep.Configuration)
@@ -295,7 +295,7 @@ export const CommonCreateModal = ({ onClose, createType, builder }: Props) => {
message: t('pluginTrigger.subscription.createSuccess'),
})
onClose()
refresh?.()
refetch?.()
},
onError: async (error: any) => {
const errorMessage = await parsePluginErrorMessage(error) || t('pluginTrigger.subscription.createFailed')

View File

@@ -4,7 +4,7 @@ import Toast from '@/app/components/base/toast'
import { useDeleteTriggerSubscription } from '@/service/use-triggers'
import { useState } from 'react'
import { useTranslation } from 'react-i18next'
import { usePluginSubscriptionStore } from './store'
import { useSubscriptionList } from './use-subscription-list'
type Props = {
onClose: (deleted: boolean) => void
@@ -18,7 +18,7 @@ const tPrefix = 'pluginTrigger.subscription.list.item.actions.deleteConfirm'
export const DeleteConfirm = (props: Props) => {
const { onClose, isShow, currentId, currentName, workflowsInUse } = props
const { refresh } = usePluginSubscriptionStore()
const { refetch } = useSubscriptionList()
const { mutate: deleteSubscription, isPending: isDeleting } = useDeleteTriggerSubscription()
const { t } = useTranslation()
const [inputName, setInputName] = useState('')
@@ -40,7 +40,7 @@ export const DeleteConfirm = (props: Props) => {
message: t(`${tPrefix}.success`, { name: currentName }),
className: 'z-[10000001]',
})
refresh?.()
refetch?.()
onClose(true)
},
onError: (error: any) => {

View File

@@ -1,11 +0,0 @@
import { create } from 'zustand'
type ShapeSubscription = {
refresh?: () => void
setRefresh: (refresh: () => void) => void
}
export const usePluginSubscriptionStore = create<ShapeSubscription>(set => ({
refresh: undefined,
setRefresh: (refresh: () => void) => set({ refresh }),
}))

View File

@@ -1,19 +1,11 @@
import { useEffect } from 'react'
import { useTriggerSubscriptions } from '@/service/use-triggers'
import { usePluginStore } from '../store'
import { usePluginSubscriptionStore } from './store'
export const useSubscriptionList = () => {
const detail = usePluginStore(state => state.detail)
const { setRefresh } = usePluginSubscriptionStore()
const { data: subscriptions, isLoading, refetch } = useTriggerSubscriptions(detail?.provider || '')
useEffect(() => {
if (refetch)
setRefresh(refetch)
}, [refetch, setRefresh])
return {
detail,
subscriptions,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,80 +0,0 @@
'use client'
import { useRef } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { useMount } from 'ahooks'
import cn from '@/utils/classnames'
import { Apps02 } from '@/app/components/base/icons/src/vender/line/others'
import I18n from '@/context/i18n'
import { getLanguage } from '@/i18n-config/language'
import { useStore as useLabelStore } from '@/app/components/tools/labels/store'
import { fetchLabelList } from '@/service/tools'
import { renderI18nObject } from '@/i18n-config'
type Props = {
value: string
onSelect: (type: string) => void
}
const Icon = ({ svgString, active }: { svgString: string; active: boolean }) => {
const svgRef = useRef<SVGSVGElement | null>(null)
const SVGParser = (svg: string) => {
if (!svg)
return null
const parser = new DOMParser()
const doc = parser.parseFromString(svg, 'image/svg+xml')
return doc.documentElement
}
useMount(() => {
const svgElement = SVGParser(svgString)
if (svgRef.current && svgElement)
svgRef.current.appendChild(svgElement)
})
return <svg className={cn('h-4 w-4 text-gray-700', active && '!text-primary-600')} ref={svgRef} />
}
const Category = ({
value,
onSelect,
}: Props) => {
const { t } = useTranslation()
const { locale } = useContext(I18n)
const language = getLanguage(locale)
const labelList = useLabelStore(s => s.labelList)
const setLabelList = useLabelStore(s => s.setLabelList)
useMount(() => {
fetchLabelList().then((res) => {
setLabelList(res)
})
})
return (
<div className='mb-3'>
<div className='px-3 py-0.5 text-xs font-medium leading-[18px] text-gray-500'>{t('tools.addToolModal.category').toLocaleUpperCase()}</div>
<div className={cn('mb-0.5 flex cursor-pointer items-center rounded-lg p-1 pl-3 text-sm leading-5 text-gray-700 hover:bg-white', value === '' && '!bg-white font-medium !text-primary-600')} onClick={() => onSelect('')}>
<Apps02 className='mr-2 h-4 w-4 shrink-0' />
{t('tools.type.all')}
</div>
{labelList.map((label) => {
const labelText = typeof label.label === 'string'
? label.label
: (label.label ? renderI18nObject(label.label, language) : '')
return (
<div
key={label.name}
title={labelText}
className={cn('mb-0.5 flex cursor-pointer items-center overflow-hidden truncate rounded-lg p-1 pl-3 text-sm leading-5 text-gray-700 hover:bg-white', value === label.name && '!bg-white font-medium !text-primary-600')}
onClick={() => onSelect(label.name)}
>
<div className='mr-2 h-4 w-4 shrink-0'>
<Icon active={value === label.name} svgString={label.icon || ''} />
</div>
{labelText}
</div>
)
})}
</div>
)
}
export default Category

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

View File

@@ -1,258 +0,0 @@
'use client'
import type { FC } from 'react'
import React, { useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { produce } from 'immer'
import {
RiAddLine,
RiCloseLine,
} from '@remixicon/react'
import { useMount } from 'ahooks'
import type { Collection, CustomCollectionBackend, Tool } from '../types'
import type { CollectionType } from '../types'
import Type from './type'
import Category from './category'
import Tools from './tools'
import cn from '@/utils/classnames'
import { basePath } from '@/utils/var'
import I18n from '@/context/i18n'
import Drawer from '@/app/components/base/drawer'
import Button from '@/app/components/base/button'
import Loading from '@/app/components/base/loading'
import Input from '@/app/components/base/input'
import EditCustomToolModal from '@/app/components/tools/edit-custom-collection-modal'
import ConfigCredential from '@/app/components/tools/setting/build-in/config-credentials'
import {
createCustomCollection,
fetchAllBuiltInTools,
fetchAllCustomTools,
fetchAllWorkflowTools,
removeBuiltInToolCredential,
updateBuiltInToolCredential,
} from '@/service/tools'
import type { ToolWithProvider } from '@/app/components/workflow/types'
import Toast from '@/app/components/base/toast'
import ConfigContext from '@/context/debug-configuration'
import type { ModelConfig } from '@/models/debug'
type Props = {
onHide: () => void
}
// Add and Edit
const AddToolModal: FC<Props> = ({
onHide,
}) => {
const { t } = useTranslation()
const { locale } = useContext(I18n)
const [currentType, setCurrentType] = useState('builtin')
const [currentCategory, setCurrentCategory] = useState('')
const [keywords, setKeywords] = useState<string>('')
const handleKeywordsChange = (value: string) => {
setKeywords(value)
}
const isMatchingKeywords = (text: string, keywords: string) => {
return text.toLowerCase().includes(keywords.toLowerCase())
}
const [toolList, setToolList] = useState<ToolWithProvider[]>([])
const [listLoading, setListLoading] = useState(true)
const getAllTools = async () => {
setListLoading(true)
const buildInTools = await fetchAllBuiltInTools()
if (basePath) {
buildInTools.forEach((item) => {
if (typeof item.icon == 'string' && !item.icon.includes(basePath))
item.icon = `${basePath}${item.icon}`
})
}
const customTools = await fetchAllCustomTools()
const workflowTools = await fetchAllWorkflowTools()
const mergedToolList = [
...buildInTools,
...customTools,
...workflowTools.filter((toolWithProvider) => {
return !toolWithProvider.tools.some((tool) => {
return !!tool.parameters.find(item => item.name === '__image')
})
}),
]
setToolList(mergedToolList)
setListLoading(false)
}
const filteredList = useMemo(() => {
return toolList.filter((toolWithProvider) => {
if (currentType === 'all')
return true
else
return toolWithProvider.type === currentType
}).filter((toolWithProvider) => {
if (!currentCategory)
return true
else
return toolWithProvider.labels.includes(currentCategory)
}).filter((toolWithProvider) => {
return (
isMatchingKeywords(toolWithProvider.name, keywords)
|| toolWithProvider.tools.some((tool) => {
return Object.values(tool.label).some((label) => {
return isMatchingKeywords(label, keywords)
})
})
)
})
}, [currentType, currentCategory, toolList, keywords])
const {
modelConfig,
setModelConfig,
} = useContext(ConfigContext)
const [isShowEditCollectionToolModal, setIsShowEditCustomCollectionModal] = useState(false)
const doCreateCustomToolCollection = async (data: CustomCollectionBackend) => {
await createCustomCollection(data)
Toast.notify({
type: 'success',
message: t('common.api.actionSuccess'),
})
setIsShowEditCustomCollectionModal(false)
getAllTools()
}
const [showSettingAuth, setShowSettingAuth] = useState(false)
const [collection, setCollection] = useState<Collection>()
const toolSelectHandle = (collection: Collection, tool: Tool) => {
const parameters: Record<string, string> = {}
if (tool.parameters) {
tool.parameters.forEach((item) => {
parameters[item.name] = ''
})
}
const nexModelConfig = produce(modelConfig, (draft: ModelConfig) => {
draft.agentConfig.tools.push({
provider_id: collection.id || collection.name,
provider_type: collection.type as CollectionType,
provider_name: collection.name,
tool_name: tool.name,
tool_label: tool.label[locale] || tool.label[locale.replaceAll('-', '_')],
tool_parameters: parameters,
enabled: true,
})
})
setModelConfig(nexModelConfig)
}
const authSelectHandle = (provider: Collection) => {
setCollection(provider)
setShowSettingAuth(true)
}
const updateBuiltinAuth = async (value: Record<string, any>) => {
if (!collection)
return
await updateBuiltInToolCredential(collection.name, value)
Toast.notify({
type: 'success',
message: t('common.api.actionSuccess'),
})
await getAllTools()
setShowSettingAuth(false)
}
const removeBuiltinAuth = async () => {
if (!collection)
return
await removeBuiltInToolCredential(collection.name)
Toast.notify({
type: 'success',
message: t('common.api.actionSuccess'),
})
await getAllTools()
setShowSettingAuth(false)
}
useMount(() => {
getAllTools()
})
return (
<>
<Drawer
isOpen
mask
clickOutsideNotOpen
onClose={onHide}
footer={null}
panelClassName={cn('mx-2 mb-3 mt-16 rounded-xl !p-0 sm:mr-2', 'mt-2 !w-[640px]', '!max-w-[640px]')}
>
<div
className='flex w-full rounded-xl border-[0.5px] border-gray-200 bg-white shadow-xl'
style={{
height: 'calc(100vh - 16px)',
}}
>
<div className='relative w-[200px] shrink-0 overflow-y-auto rounded-l-xl border-r-[0.5px] border-black/2 bg-gray-100 pb-3'>
<div className='sticky left-0 right-0 top-0'>
<div className='text-md sticky left-0 right-0 top-0 px-5 py-3 font-semibold text-gray-900'>{t('tools.addTool')}</div>
<div className='px-3 pb-4 pt-2'>
<Button variant='primary' className='w-[176px]' onClick={() => setIsShowEditCustomCollectionModal(true)}>
<RiAddLine className='mr-1 h-4 w-4' />
{t('tools.createCustomTool')}
</Button>
</div>
</div>
<div className='px-2 py-1'>
<Type value={currentType} onSelect={setCurrentType} />
<Category value={currentCategory} onSelect={setCurrentCategory} />
</div>
</div>
<div className='relative grow overflow-y-auto rounded-r-xl bg-white'>
<div className='sticky left-0 right-0 top-0 z-10 flex items-center gap-1 bg-white p-2'>
<div className='grow'>
<Input
showLeftIcon
showClearIcon
value={keywords}
onChange={e => handleKeywordsChange(e.target.value)}
onClear={() => handleKeywordsChange('')}
/>
</div>
<div className='ml-2 mr-1 h-4 w-[1px] bg-gray-200'></div>
<div className='cursor-pointer p-2' onClick={onHide}>
<RiCloseLine className='h-4 w-4 text-gray-500' />
</div>
</div>
{listLoading && (
<div className='flex h-[200px] items-center justify-center bg-white'>
<Loading />
</div>
)}
{!listLoading && (
<Tools
showWorkflowEmpty={currentType === 'workflow'}
tools={filteredList}
addedTools={(modelConfig?.agentConfig?.tools as any) || []}
onSelect={toolSelectHandle}
onAuthSetup={authSelectHandle}
/>
)}
</div>
</div>
</Drawer>
{isShowEditCollectionToolModal && (
<EditCustomToolModal
positionLeft
payload={null}
onHide={() => setIsShowEditCustomCollectionModal(false)}
onAdd={doCreateCustomToolCollection}
/>
)}
{showSettingAuth && collection && (
<ConfigCredential
collection={collection}
onCancel={() => setShowSettingAuth(false)}
onSaved={updateBuiltinAuth}
onRemove={removeBuiltinAuth}
/>
)}
</>
)
}
export default React.memo(AddToolModal)

View File

@@ -1,158 +0,0 @@
import {
memo,
useCallback,
} from 'react'
import { basePath } from '@/utils/var'
import { useTranslation } from 'react-i18next'
import {
RiAddLine,
} from '@remixicon/react'
import cn from '@/utils/classnames'
import { ArrowUpRight } from '@/app/components/base/icons/src/vender/line/arrows'
import { Check } from '@/app/components/base/icons/src/vender/line/general'
import { Tag01 } from '@/app/components/base/icons/src/vender/line/financeAndECommerce'
import type { ToolWithProvider } from '@/app/components/workflow/types'
import { BlockEnum } from '@/app/components/workflow/types'
import BlockIcon from '@/app/components/workflow/block-icon'
import Tooltip from '@/app/components/base/tooltip'
import Button from '@/app/components/base/button'
import { useGetLanguage } from '@/context/i18n'
import { useStore as useLabelStore } from '@/app/components/tools/labels/store'
import Empty from '@/app/components/tools/add-tool-modal/empty'
import type { Tool } from '@/app/components/tools/types'
import { CollectionType } from '@/app/components/tools/types'
import type { AgentTool } from '@/types/app'
import { MAX_TOOLS_NUM } from '@/config'
import type { TypeWithI18N } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { renderI18nObject } from '@/i18n-config'
const resolveI18nText = (value: TypeWithI18N | string | undefined, language: string): string => {
if (!value)
return ''
return typeof value === 'string' ? value : renderI18nObject(value, language)
}
type ToolsProps = {
showWorkflowEmpty: boolean
tools: ToolWithProvider[]
addedTools: AgentTool[]
onSelect: (provider: ToolWithProvider, tool: Tool) => void
onAuthSetup: (provider: ToolWithProvider) => void
}
const Blocks = ({
showWorkflowEmpty,
tools,
addedTools,
onSelect,
onAuthSetup,
}: ToolsProps) => {
const { t } = useTranslation()
const language = useGetLanguage()
const labelList = useLabelStore(s => s.labelList)
const addable = addedTools.length < MAX_TOOLS_NUM
const renderGroup = useCallback((toolWithProvider: ToolWithProvider) => {
const list = toolWithProvider.tools
const needAuth = toolWithProvider.allow_delete && !toolWithProvider.is_team_authorization && toolWithProvider.type === CollectionType.builtIn
return (
<div
key={toolWithProvider.id}
className='group mb-1 last-of-type:mb-0'
>
<div className='flex h-[22px] w-full items-center justify-between pl-3 pr-1 text-xs font-medium text-gray-500'>
{resolveI18nText(toolWithProvider.label, language)}
<a className='hidden cursor-pointer items-center group-hover:flex' href={`${basePath}/tools?category=${toolWithProvider.type}`} target='_blank'>{t('tools.addToolModal.manageInTools')}<ArrowUpRight className='ml-0.5 h-3 w-3' /></a>
</div>
{list.map((tool) => {
const labelContent = (() => {
if (!tool.labels)
return ''
return tool.labels.map((name) => {
const label = labelList.find(item => item.name === name)
return resolveI18nText(label?.label, language)
}).filter(Boolean).join(', ')
})()
const added = !!addedTools?.find(v => v.provider_id === toolWithProvider.id && v.provider_type === toolWithProvider.type && v.tool_name === tool.name)
return (
<Tooltip
key={tool.name}
position='bottom'
popupClassName='!p-0 !px-3 !py-2.5 !w-[210px] !leading-[18px] !text-xs !text-gray-700 !border-[0.5px] !border-black/5 !bg-transparent !rounded-xl !shadow-lg translate-x-[108px]'
popupContent={(
<div>
<BlockIcon
size='md'
className='mb-2'
type={BlockEnum.Tool}
toolIcon={toolWithProvider.icon}
/>
<div className='mb-1 text-sm leading-5 text-gray-900'>{resolveI18nText(tool.label, language)}</div>
<div className='text-xs leading-[18px] text-gray-700'>{resolveI18nText(tool.description, language)}</div>
{tool.labels?.length > 0 && (
<div className='mt-1 flex shrink-0 items-center'>
<div className='relative flex w-full items-center gap-1 rounded-md py-1 text-gray-500' title={labelContent}>
<Tag01 className='h-3 w-3 shrink-0 text-gray-500' />
<div className='grow truncate text-start text-xs font-normal leading-[18px]'>{labelContent}</div>
</div>
</div>
)}
</div>
)}
>
<div className='group/item flex h-8 w-full cursor-pointer items-center rounded-lg pl-3 pr-1 hover:bg-gray-50'>
<BlockIcon
className={cn('mr-2 shrink-0', needAuth && 'opacity-30')}
type={BlockEnum.Tool}
toolIcon={toolWithProvider.icon}
/>
<div className={cn('grow truncate text-sm text-gray-900', needAuth && 'opacity-30')}>{resolveI18nText(tool.label, language)}</div>
{!needAuth && added && (
<div className='flex items-center gap-1 rounded-[6px] border border-gray-100 bg-white px-2 py-[3px] text-xs font-medium leading-[18px] text-gray-300'>
<Check className='h-3 w-3' />
{t('tools.addToolModal.added').toLocaleUpperCase()}
</div>
)}
{!needAuth && !added && addable && (
<Button
variant='secondary-accent'
size='small'
className={cn('hidden shrink-0 items-center group-hover/item:flex')}
onClick={() => onSelect(toolWithProvider, tool)}
>
<RiAddLine className='h-3 w-3' />
{t('tools.addToolModal.add').toLocaleUpperCase()}
</Button>
)}
{needAuth && (
<Button
variant='secondary-accent'
size='small'
className={cn('hidden shrink-0 group-hover/item:flex')}
onClick={() => onAuthSetup(toolWithProvider)}
>{t('tools.auth.setup')}</Button>
)}
</div>
</Tooltip>
)
})}
</div>
)
}, [addable, language, t, labelList, addedTools, onAuthSetup, onSelect])
return (
<div className='max-w-[440px] p-1 pb-6'>
{!tools.length && !showWorkflowEmpty && (
<div className='flex h-[22px] items-center px-3 text-xs font-medium text-gray-500'>{t('workflow.tabs.noResult')}</div>
)}
{!tools.length && showWorkflowEmpty && (
<div className='pt-[280px]'>
<Empty />
</div>
)}
{!!tools.length && tools.map(renderGroup)}
</div>
)
}
export default memo(Blocks)

View File

@@ -1,34 +0,0 @@
'use client'
import { useTranslation } from 'react-i18next'
import cn from '@/utils/classnames'
import { Exchange02, FileCode } from '@/app/components/base/icons/src/vender/line/others'
type Props = {
value: string
onSelect: (type: string) => void
}
const Types = ({
value,
onSelect,
}: Props) => {
const { t } = useTranslation()
return (
<div className='mb-3'>
<div className={cn('mb-0.5 flex cursor-pointer items-center rounded-lg p-1 pl-3 text-sm leading-5 hover:bg-white', value === 'builtin' && '!bg-white font-medium')} onClick={() => onSelect('builtin')}>
<div className="mr-2 h-4 w-4 shrink-0 bg-[url('~@/app/components/tools/add-tool-modal/D.png')] bg-cover bg-no-repeat" />
<span className={cn('text-gray-700', value === 'builtin' && '!text-primary-600')}>{t('tools.type.builtIn')}</span>
</div>
<div className={cn('mb-0.5 flex cursor-pointer items-center rounded-lg p-1 pl-3 text-sm leading-5 text-gray-700 hover:bg-white', value === 'api' && '!bg-white font-medium !text-primary-600')} onClick={() => onSelect('api')}>
<FileCode className='mr-2 h-4 w-4 shrink-0' />
{t('tools.type.custom')}
</div>
<div className={cn('mb-0.5 flex cursor-pointer items-center rounded-lg p-1 pl-3 text-sm leading-5 text-gray-700 hover:bg-white', value === 'workflow' && '!bg-white font-medium !text-primary-600')} onClick={() => onSelect('workflow')}>
<Exchange02 className='mr-2 h-4 w-4 shrink-0' />
{t('tools.type.workflow')}
</div>
</div>
)
}
export default Types

Some files were not shown because too many files have changed in this diff Show More