mirror of
https://github.com/langgenius/dify.git
synced 2026-02-05 07:34:13 +00:00
Compare commits
1 Commits
2-5-eslint
...
refactor/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
73a386c443 |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -239,7 +239,7 @@
|
||||
/web/app/components/base/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Base Components Tests
|
||||
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
|
||||
/web/app/components/base/**/__tests__/ @hyoban @CodingOnStar
|
||||
|
||||
# Frontend - Utils and Hooks
|
||||
/web/utils/classnames.ts @iamjoel @zxhlyh
|
||||
|
||||
23
.github/workflows/autofix.yml
vendored
23
.github/workflows/autofix.yml
vendored
@@ -79,6 +79,29 @@ jobs:
|
||||
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
- name: Install web dependencies
|
||||
run: |
|
||||
cd web
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: ESLint autofix
|
||||
run: |
|
||||
cd web
|
||||
pnpm lint:fix || true
|
||||
|
||||
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
|
||||
- name: mdformat
|
||||
run: |
|
||||
|
||||
@@ -136,7 +136,6 @@ ignore_imports =
|
||||
core.workflow.nodes.llm.llm_utils -> models.provider
|
||||
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
|
||||
core.workflow.nodes.llm.node -> core.tools.signature
|
||||
core.workflow.nodes.template_transform.template_transform_node -> configs
|
||||
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
|
||||
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
|
||||
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager
|
||||
|
||||
@@ -47,6 +47,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
|
||||
code_limits: CodeNodeLimits | None = None,
|
||||
template_renderer: Jinja2TemplateRenderer | None = None,
|
||||
template_transform_max_output_length: int | None = None,
|
||||
http_request_http_client: HttpClientProtocol | None = None,
|
||||
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
|
||||
http_request_file_manager: FileManagerProtocol | None = None,
|
||||
@@ -68,6 +69,11 @@ class DifyNodeFactory(NodeFactory):
|
||||
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
|
||||
)
|
||||
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
|
||||
self._template_transform_max_output_length = (
|
||||
template_transform_max_output_length
|
||||
if template_transform_max_output_length is not None
|
||||
else dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
)
|
||||
self._http_request_http_client = http_request_http_client or ssrf_proxy
|
||||
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
|
||||
self._http_request_file_manager = http_request_file_manager or file_manager
|
||||
@@ -122,6 +128,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
template_renderer=self._template_renderer,
|
||||
max_output_length=self._template_transform_max_output_length,
|
||||
)
|
||||
|
||||
if node_type == NodeType.HTTP_REQUEST:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from configs import dify_config
|
||||
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
|
||||
from core.workflow.node_events import NodeRunResult
|
||||
from core.workflow.nodes.base.node import Node
|
||||
@@ -16,12 +15,13 @@ if TYPE_CHECKING:
|
||||
from core.workflow.entities import GraphInitParams
|
||||
from core.workflow.runtime import GraphRuntimeState
|
||||
|
||||
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
|
||||
|
||||
|
||||
class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
node_type = NodeType.TEMPLATE_TRANSFORM
|
||||
_template_renderer: Jinja2TemplateRenderer
|
||||
_max_output_length: int
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -31,6 +31,7 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
graph_runtime_state: "GraphRuntimeState",
|
||||
*,
|
||||
template_renderer: Jinja2TemplateRenderer | None = None,
|
||||
max_output_length: int | None = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
id=id,
|
||||
@@ -39,6 +40,9 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
graph_runtime_state=graph_runtime_state,
|
||||
)
|
||||
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
|
||||
self._max_output_length = (
|
||||
max_output_length if max_output_length is not None else DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
|
||||
@@ -69,11 +73,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
except TemplateRenderError as e:
|
||||
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
|
||||
|
||||
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
|
||||
if len(rendered) > self._max_output_length:
|
||||
return NodeRunResult(
|
||||
inputs=variables,
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
|
||||
error=f"Output length exceeds {self._max_output_length} characters",
|
||||
)
|
||||
|
||||
return NodeRunResult(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.12.1"
|
||||
version = "1.12.0"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@@ -327,17 +327,6 @@ class AccountService:
|
||||
@staticmethod
|
||||
def delete_account(account: Account):
|
||||
"""Delete account. This method only adds a task to the queue for deletion."""
|
||||
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_account_deletion
|
||||
|
||||
sync_success = sync_account_deletion(account_id=account.id, source="account_deleted")
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise account deletion sync failed for account %s; proceeding with local deletion.",
|
||||
account.id,
|
||||
)
|
||||
|
||||
# Now proceed with async account deletion
|
||||
delete_account_task.delay(account.id)
|
||||
|
||||
@staticmethod
|
||||
@@ -1241,19 +1230,6 @@ class TenantService:
|
||||
if dify_config.BILLING_ENABLED:
|
||||
BillingService.clean_billing_info_cache(tenant.id)
|
||||
|
||||
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
|
||||
|
||||
sync_success = sync_workspace_member_removal(
|
||||
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
|
||||
)
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
|
||||
tenant.id,
|
||||
account.id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
|
||||
"""Update member role"""
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.account import TenantAccountJoin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
|
||||
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
|
||||
|
||||
|
||||
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Queue an account deletion sync task to Redis.
|
||||
|
||||
Internal helper function. Do not call directly - use the public functions instead.
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID to sync
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (for debugging/tracking)
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
task = {
|
||||
"task_id": str(uuid.uuid4()),
|
||||
"workspace_id": workspace_id,
|
||||
"member_id": member_id,
|
||||
"retry_count": 0,
|
||||
"created_at": datetime.now(UTC).isoformat(),
|
||||
"source": source,
|
||||
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
|
||||
}
|
||||
|
||||
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
|
||||
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
|
||||
|
||||
logger.info(
|
||||
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
task["task_id"],
|
||||
source,
|
||||
)
|
||||
return True
|
||||
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.error(
|
||||
"Failed to queue account deletion sync for workspace %s, member %s: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
str(e),
|
||||
exc_info=True,
|
||||
)
|
||||
# Don't raise - we don't want to fail member deletion if queueing fails
|
||||
return False
|
||||
|
||||
|
||||
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync a single workspace member removal (enterprise only).
|
||||
|
||||
Queues a task for the enterprise backend to reassign resources from the removed member.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (e.g., "workspace_member_removed")
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued (or skipped in community), False if queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
|
||||
def sync_account_deletion(account_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync full account deletion across all workspaces (enterprise only).
|
||||
|
||||
Fetches all workspace memberships for the account and queues a sync task for each.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
account_id: The account ID being deleted
|
||||
source: Source of the sync request (e.g., "account_deleted")
|
||||
|
||||
Returns:
|
||||
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
# Fetch all workspaces the account belongs to
|
||||
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
|
||||
|
||||
# Queue sync task for each workspace
|
||||
success = True
|
||||
for join in workspace_joins:
|
||||
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
|
||||
success = False
|
||||
|
||||
return success
|
||||
@@ -259,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
|
||||
|
||||
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_archive_log(session, workflow_archive_log_id: str):
|
||||
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
def del_workflow_archive_log(workflow_archive_log_id: str):
|
||||
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -420,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
|
||||
total_files_deleted = 0
|
||||
|
||||
while True:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
with session_factory.create_session() as session:
|
||||
# Get a batch of draft variable IDs along with their file_ids
|
||||
query_sql = """
|
||||
SELECT id, file_id FROM workflow_draft_variables
|
||||
|
||||
@@ -10,10 +10,7 @@ from models import Tenant
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import App, UploadFile
|
||||
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
|
||||
from tasks.remove_app_and_related_data_task import (
|
||||
_delete_draft_variables,
|
||||
delete_draft_variables_batch,
|
||||
)
|
||||
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -300,18 +297,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
var_files_before = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_before = session.query(UploadFile).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
@@ -324,12 +315,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -342,8 +329,6 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.side_effect = [Exception("Storage error"), None]
|
||||
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
@@ -354,12 +339,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -414,275 +395,3 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
if app2_obj:
|
||||
session.delete(app2_obj)
|
||||
session.commit()
|
||||
|
||||
|
||||
class TestDeleteDraftVariablesSessionCommit:
|
||||
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
|
||||
|
||||
@pytest.fixture
|
||||
def setup_offload_test_data(self, app_and_tenant):
|
||||
"""Create test data with offload files for session commit tests."""
|
||||
from core.variables.types import SegmentType
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
tenant, app = app_and_tenant
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
upload_file1 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file1.json",
|
||||
name="file1.json",
|
||||
size=1024,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
upload_file2 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file2.json",
|
||||
name="file2.json",
|
||||
size=2048,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
session.add(upload_file1)
|
||||
session.add(upload_file2)
|
||||
session.flush()
|
||||
|
||||
var_file1 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file1.id,
|
||||
size=1024,
|
||||
length=10,
|
||||
value_type=SegmentType.STRING,
|
||||
)
|
||||
var_file2 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file2.id,
|
||||
size=2048,
|
||||
length=20,
|
||||
value_type=SegmentType.OBJECT,
|
||||
)
|
||||
session.add(var_file1)
|
||||
session.add(var_file2)
|
||||
session.flush()
|
||||
|
||||
draft_var1 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_1",
|
||||
name="large_var_1",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file1.id,
|
||||
)
|
||||
draft_var2 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_2",
|
||||
name="large_var_2",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file2.id,
|
||||
)
|
||||
draft_var3 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_3",
|
||||
name="regular_var",
|
||||
value=StringSegment(value="regular_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(draft_var1)
|
||||
session.add(draft_var2)
|
||||
session.add(draft_var3)
|
||||
session.commit()
|
||||
|
||||
data = {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"upload_files": [upload_file1, upload_file2],
|
||||
"variable_files": [var_file1, var_file2],
|
||||
"draft_variables": [draft_var1, draft_var2, draft_var3],
|
||||
}
|
||||
|
||||
yield data
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
for table, ids in [
|
||||
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
|
||||
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
|
||||
(UploadFile, [uf.id for uf in data["upload_files"]]),
|
||||
]:
|
||||
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
@pytest.fixture
|
||||
def setup_commit_test_data(self, app_and_tenant):
|
||||
"""Create test data for session commit tests."""
|
||||
tenant, app = app_and_tenant
|
||||
variable_ids: list[str] = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
variables = []
|
||||
for i in range(10):
|
||||
var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id=f"node_{i}",
|
||||
name=f"var_{i}",
|
||||
value=StringSegment(value="test_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(var)
|
||||
variables.append(var)
|
||||
session.commit()
|
||||
variable_ids = [v.id for v in variables]
|
||||
|
||||
yield {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"variable_ids": variable_ids,
|
||||
}
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
cleanup_query = (
|
||||
delete(WorkflowDraftVariable)
|
||||
.where(WorkflowDraftVariable.id.in_(variable_ids))
|
||||
.execution_options(synchronize_session=False)
|
||||
)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
|
||||
"""Test that session.begin() is used for automatic transaction management."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
# Since session.begin() is used, the transaction is automatically committed
|
||||
# when the with block exits successfully. We verify this by checking that
|
||||
# data is actually persisted.
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
# Verify all data was deleted (proves transaction was committed)
|
||||
with session_factory.create_session() as session:
|
||||
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
|
||||
assert deleted_count == 10
|
||||
assert remaining_count == 0
|
||||
|
||||
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
|
||||
"""Test that data is actually persisted to database after batch deletion with commits."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
variable_ids = data["variable_ids"]
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Perform deletion with small batch size to force multiple commits
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify all data is deleted in a new session (proves commits worked)
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
# Verify specific IDs are deleted
|
||||
with session_factory.create_session() as session:
|
||||
remaining_vars = (
|
||||
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
|
||||
)
|
||||
assert remaining_vars == 0
|
||||
|
||||
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
|
||||
"""Test session behavior when deleting from an empty dataset."""
|
||||
nonexistent_app_id = str(uuid.uuid4())
|
||||
|
||||
# Should not raise any errors and should return 0
|
||||
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_session_commit_with_single_batch(self, setup_commit_test_data):
|
||||
"""Test that commit happens correctly when all data fits in a single batch."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Delete all in a single batch
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify data is persisted
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
|
||||
"""Test that invalid batch size raises ValueError."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=0)
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=-1)
|
||||
|
||||
@patch("extensions.ext_storage.storage")
|
||||
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
"""Test that session commits correctly when cleaning up offload data."""
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
|
||||
# Delete variables with offload data
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
assert deleted_count == 3
|
||||
|
||||
# Verify all data is persisted (deleted) in new session
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_after == 0
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
# Verify storage cleanup was called
|
||||
assert mock_storage.delete.call_count == 2
|
||||
|
||||
@@ -1016,7 +1016,7 @@ class TestAccountService:
|
||||
|
||||
def test_delete_account(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test account deletion (should add task to queue and sync to enterprise).
|
||||
Test account deletion (should add task to queue).
|
||||
"""
|
||||
fake = Faker()
|
||||
email = fake.email()
|
||||
@@ -1034,18 +1034,10 @@ class TestAccountService:
|
||||
password=password,
|
||||
)
|
||||
|
||||
with (
|
||||
patch("services.account_service.delete_account_task") as mock_delete_task,
|
||||
patch("services.enterprise.account_deletion_sync.sync_account_deletion") as mock_sync,
|
||||
):
|
||||
mock_sync.return_value = True
|
||||
|
||||
with patch("services.account_service.delete_account_task") as mock_delete_task:
|
||||
# Delete account
|
||||
AccountService.delete_account(account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(account_id=account.id, source="account_deleted")
|
||||
|
||||
# Verify task was added to queue
|
||||
mock_delete_task.delay.assert_called_once_with(account.id)
|
||||
|
||||
@@ -1724,7 +1716,7 @@ class TestTenantService:
|
||||
|
||||
def test_remove_member_from_tenant_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful member removal from tenant (should sync to enterprise).
|
||||
Test successful member removal from tenant.
|
||||
"""
|
||||
fake = Faker()
|
||||
tenant_name = fake.company()
|
||||
@@ -1759,15 +1751,7 @@ class TestTenantService:
|
||||
TenantService.create_tenant_member(tenant, member_account, role="normal")
|
||||
|
||||
# Remove member
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(
|
||||
workspace_id=tenant.id, member_id=member_account.id, source="workspace_member_removed"
|
||||
)
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
|
||||
# Verify member was removed
|
||||
from extensions.ext_database import db
|
||||
|
||||
@@ -217,7 +217,6 @@ class TestTemplateTransformNode:
|
||||
@patch(
|
||||
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
|
||||
)
|
||||
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
|
||||
def test_run_output_length_exceeds_limit(
|
||||
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
|
||||
):
|
||||
@@ -231,6 +230,7 @@ class TestTemplateTransformNode:
|
||||
graph_init_params=graph_init_params,
|
||||
graph=mock_graph,
|
||||
graph_runtime_state=mock_graph_runtime_state,
|
||||
max_output_length=10,
|
||||
)
|
||||
|
||||
result = node._run()
|
||||
|
||||
@@ -1,276 +0,0 @@
|
||||
"""Unit tests for account deletion synchronization.
|
||||
|
||||
This test module verifies the enterprise account deletion sync functionality,
|
||||
including Redis queuing, error handling, and community vs enterprise behavior.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from redis import RedisError
|
||||
|
||||
from services.enterprise.account_deletion_sync import (
|
||||
_queue_task,
|
||||
sync_account_deletion,
|
||||
sync_workspace_member_removal,
|
||||
)
|
||||
|
||||
|
||||
class TestQueueTask:
|
||||
"""Unit tests for the _queue_task helper function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self):
|
||||
"""Mock redis_client for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.redis_client") as mock_redis:
|
||||
yield mock_redis
|
||||
|
||||
@pytest.fixture
|
||||
def mock_uuid(self):
|
||||
"""Mock UUID generation for predictable task IDs."""
|
||||
with patch("services.enterprise.account_deletion_sync.uuid.uuid4") as mock_uuid_gen:
|
||||
mock_uuid_gen.return_value = MagicMock(hex="test-task-id-1234")
|
||||
yield mock_uuid_gen
|
||||
|
||||
def test_queue_task_success(self, mock_redis_client, mock_uuid):
|
||||
"""Test successful task queueing to Redis."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "test_source"
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_redis_client.lpush.assert_called_once()
|
||||
|
||||
# Verify the task payload structure
|
||||
call_args = mock_redis_client.lpush.call_args[0]
|
||||
assert call_args[0] == "enterprise:member:sync:queue"
|
||||
|
||||
import json
|
||||
|
||||
task_data = json.loads(call_args[1])
|
||||
assert task_data["workspace_id"] == workspace_id
|
||||
assert task_data["member_id"] == member_id
|
||||
assert task_data["source"] == source
|
||||
assert task_data["type"] == "sync_member_deletion_from_workspace"
|
||||
assert task_data["retry_count"] == 0
|
||||
assert "task_id" in task_data
|
||||
assert "created_at" in task_data
|
||||
|
||||
def test_queue_task_redis_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of Redis connection errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = RedisError("Connection failed")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
def test_queue_task_type_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of JSON serialization errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = TypeError("Cannot serialize")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
|
||||
class TestSyncWorkspaceMemberRemoval:
|
||||
"""Unit tests for sync_workspace_member_removal function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_enabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is True."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "workspace_member_removed"
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_called_once_with(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_disabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_workspace_member_removal_queue_failure(self, mock_queue_task):
|
||||
"""Test handling of queue task failures."""
|
||||
# Arrange
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestSyncAccountDeletion:
|
||||
"""Unit tests for sync_account_deletion function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""Mock database session for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.db.session") as mock_session:
|
||||
yield mock_session
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_account_deletion_enterprise_disabled(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_db_session.query.assert_not_called()
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_multiple_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with multiple workspace memberships."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
# Verify each workspace was queued
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-1", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-2", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-3", member_id=account_id, source="account_deleted")
|
||||
|
||||
def test_sync_account_deletion_no_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with no workspace memberships."""
|
||||
# Arrange
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = []
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_partial_failure(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when some tasks fail to queue."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Mock queue_task to fail for second workspace
|
||||
def queue_side_effect(workspace_id, member_id, source):
|
||||
return workspace_id != "tenant-2"
|
||||
|
||||
mock_queue_task.side_effect = queue_side_effect
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False # Should return False if any task fails
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
def test_sync_account_deletion_all_failures(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when all tasks fail to queue."""
|
||||
# Arrange
|
||||
mock_join = MagicMock()
|
||||
mock_join.tenant_id = "tenant-1"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_queue_task.assert_called_once()
|
||||
@@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
|
||||
mock_query.where.return_value = mock_delete_query
|
||||
mock_db.session.query.return_value = mock_query
|
||||
|
||||
delete_func(mock_db.session, "log-1")
|
||||
delete_func("log-1")
|
||||
|
||||
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
|
||||
mock_query.where.assert_called_once()
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@@ -1368,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.12.1"
|
||||
version = "1.12.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.1
|
||||
image: langgenius/dify-web:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -707,7 +707,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -749,7 +749,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -788,7 +788,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -818,7 +818,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.1
|
||||
image: langgenius/dify-web:1.12.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable tailwindcss/classnames-order */
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import Effect from '.'
|
||||
|
||||
@@ -28,8 +29,8 @@ type Story = StoryObj<typeof meta>
|
||||
export const Playground: Story = {
|
||||
render: () => (
|
||||
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
|
||||
<Effect className="left-8 top-6" />
|
||||
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
|
||||
<Effect className="top-6 left-8" />
|
||||
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
|
||||
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
|
||||
Accent glow
|
||||
</div>
|
||||
|
||||
@@ -98,46 +98,31 @@ export const useNodesSyncDraft = () => {
|
||||
) => {
|
||||
if (getNodesReadOnly())
|
||||
return
|
||||
const postParams = getPostParams()
|
||||
|
||||
// Get base params without hash
|
||||
const baseParams = getPostParams()
|
||||
if (!baseParams)
|
||||
return
|
||||
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
|
||||
try {
|
||||
// IMPORTANT: Get the LATEST hash right before sending the request
|
||||
// This ensures that even if queued, each request uses the most recent hash
|
||||
const latestHash = workflowStore.getState().syncWorkflowDraftHash
|
||||
|
||||
const postParams = {
|
||||
...baseParams,
|
||||
params: {
|
||||
...baseParams.params,
|
||||
hash: latestHash || null, // null for first-time, otherwise use latest hash
|
||||
},
|
||||
if (postParams) {
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
try {
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
}
|
||||
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
}
|
||||
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft])
|
||||
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
import type {
|
||||
TriggerLogEntity,
|
||||
TriggerOAuthClientParams,
|
||||
TriggerOAuthConfig,
|
||||
TriggerProviderApiEntity,
|
||||
TriggerSubscription,
|
||||
TriggerSubscriptionBuilder,
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { type } from '@orpc/contract'
|
||||
import { base } from '../base'
|
||||
|
||||
export const triggersContract = base
|
||||
.route({ path: '/workspaces/current/triggers', method: 'GET' })
|
||||
.input(type<{ query?: { type?: string } }>())
|
||||
.output(type<TriggerProviderApiEntity[]>())
|
||||
|
||||
export const triggerProviderInfoContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/info', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerProviderApiEntity>())
|
||||
|
||||
export const triggerSubscriptionsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/list', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerSubscription[]>())
|
||||
|
||||
export const triggerSubscriptionBuilderCreateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/create', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body?: { credential_type?: string }
|
||||
}>())
|
||||
.output(type<{ subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
|
||||
export const triggerSubscriptionBuilderUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<TriggerSubscriptionBuilder>())
|
||||
|
||||
export const triggerSubscriptionBuilderVerifyUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/verify-and-update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionVerifyContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/verify/{subscriptionId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionBuildContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/build/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
parameters?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<unknown>())
|
||||
|
||||
export const triggerSubscriptionDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/delete', method: 'POST' })
|
||||
.input(type<{ params: { subscriptionId: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerSubscriptionUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/update', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { subscriptionId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<{ result: string, id: string }>())
|
||||
|
||||
export const triggerSubscriptionBuilderLogsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/logs/{subscriptionBuilderId}', method: 'GET' })
|
||||
.input(type<{ params: { provider: string, subscriptionBuilderId: string } }>())
|
||||
.output(type<{ logs: TriggerLogEntity[] }>())
|
||||
|
||||
export const triggerOAuthConfigContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerOAuthConfig>())
|
||||
|
||||
export const triggerOAuthConfigureContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body: { client_params?: TriggerOAuthClientParams, enabled: boolean }
|
||||
}>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'DELETE' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthInitiateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/oauth/authorize', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
@@ -1,23 +1,6 @@
|
||||
import type { InferContractRouterInputs } from '@orpc/contract'
|
||||
import { bindPartnerStackContract, invoicesContract } from './console/billing'
|
||||
import { systemFeaturesContract } from './console/system'
|
||||
import {
|
||||
triggerOAuthConfigContract,
|
||||
triggerOAuthConfigureContract,
|
||||
triggerOAuthDeleteContract,
|
||||
triggerOAuthInitiateContract,
|
||||
triggerProviderInfoContract,
|
||||
triggersContract,
|
||||
triggerSubscriptionBuildContract,
|
||||
triggerSubscriptionBuilderCreateContract,
|
||||
triggerSubscriptionBuilderLogsContract,
|
||||
triggerSubscriptionBuilderUpdateContract,
|
||||
triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
triggerSubscriptionDeleteContract,
|
||||
triggerSubscriptionsContract,
|
||||
triggerSubscriptionUpdateContract,
|
||||
triggerSubscriptionVerifyContract,
|
||||
} from './console/trigger'
|
||||
import { trialAppDatasetsContract, trialAppInfoContract, trialAppParametersContract, trialAppWorkflowsContract } from './console/try-app'
|
||||
import { collectionPluginsContract, collectionsContract, searchAdvancedContract } from './marketplace'
|
||||
|
||||
@@ -41,23 +24,6 @@ export const consoleRouterContract = {
|
||||
invoices: invoicesContract,
|
||||
bindPartnerStack: bindPartnerStackContract,
|
||||
},
|
||||
triggers: {
|
||||
list: triggersContract,
|
||||
providerInfo: triggerProviderInfoContract,
|
||||
subscriptions: triggerSubscriptionsContract,
|
||||
subscriptionBuilderCreate: triggerSubscriptionBuilderCreateContract,
|
||||
subscriptionBuilderUpdate: triggerSubscriptionBuilderUpdateContract,
|
||||
subscriptionBuilderVerifyUpdate: triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
subscriptionVerify: triggerSubscriptionVerifyContract,
|
||||
subscriptionBuild: triggerSubscriptionBuildContract,
|
||||
subscriptionDelete: triggerSubscriptionDeleteContract,
|
||||
subscriptionUpdate: triggerSubscriptionUpdateContract,
|
||||
subscriptionBuilderLogs: triggerSubscriptionBuilderLogsContract,
|
||||
oauthConfig: triggerOAuthConfigContract,
|
||||
oauthConfigure: triggerOAuthConfigureContract,
|
||||
oauthDelete: triggerOAuthDeleteContract,
|
||||
oauthInitiate: triggerOAuthInitiateContract,
|
||||
},
|
||||
}
|
||||
|
||||
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
|
||||
|
||||
@@ -38,11 +38,6 @@ pnpm lint:tss
|
||||
|
||||
This command lints the entire project and is intended for final verification before committing or pushing changes.
|
||||
|
||||
### Introducing New Plugins or Rules
|
||||
|
||||
If a new rule causes many existing code errors or automatic fixes generate too many diffs, do not use the `--fix` option for automatic fixes.
|
||||
You can introduce the rule first, then use the `--suppress-all` option to temporarily suppress these errors, and gradually fix them in subsequent changes.
|
||||
|
||||
## Type Check
|
||||
|
||||
You should be able to see suggestions from TypeScript in your editor for all open files.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
// @ts-check
|
||||
import antfu from '@antfu/eslint-config'
|
||||
import pluginQuery from '@tanstack/eslint-plugin-query'
|
||||
import tailwindcss from 'eslint-plugin-better-tailwindcss'
|
||||
import sonar from 'eslint-plugin-sonarjs'
|
||||
import storybook from 'eslint-plugin-storybook'
|
||||
import tailwind from 'eslint-plugin-tailwindcss'
|
||||
import dify from './eslint-rules/index.js'
|
||||
|
||||
export default antfu(
|
||||
@@ -66,16 +66,42 @@ export default antfu(
|
||||
sonarjs: sonar,
|
||||
},
|
||||
},
|
||||
tailwind.configs['flat/recommended'],
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
plugins: {
|
||||
tailwindcss,
|
||||
settings: {
|
||||
tailwindcss: {
|
||||
// These are the default values but feel free to customize
|
||||
callees: ['classnames', 'clsx', 'ctl', 'cn', 'classNames'],
|
||||
config: 'tailwind.config.js', // returned from `loadConfig()` utility if not provided
|
||||
cssFiles: [
|
||||
'**/*.css',
|
||||
'!**/node_modules',
|
||||
'!**/.*',
|
||||
'!**/dist',
|
||||
'!**/build',
|
||||
'!**/.storybook',
|
||||
'!**/.next',
|
||||
'!**/.public',
|
||||
],
|
||||
cssFilesRefreshRate: 5_000,
|
||||
removeDuplicates: true,
|
||||
skipClassAttribute: false,
|
||||
whitelist: [],
|
||||
tags: [], // can be set to e.g. ['tw'] for use in tw`bg-blue`
|
||||
classRegex: '^class(Name)?$', // can be modified to support custom attributes. E.g. "^tw$" for `twin.macro`
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'tailwindcss/enforce-consistent-class-order': 'error',
|
||||
'tailwindcss/no-duplicate-classes': 'error',
|
||||
'tailwindcss/no-unnecessary-whitespace': 'error',
|
||||
'tailwindcss/no-unknown-classes': 'warn',
|
||||
// due to 1k lines of tailwind config, these rule have performance issue
|
||||
'tailwindcss/no-contradicting-classname': 'off',
|
||||
'tailwindcss/enforces-shorthand': 'off',
|
||||
'tailwindcss/no-custom-classname': 'off',
|
||||
'tailwindcss/no-unnecessary-arbitrary-value': 'off',
|
||||
|
||||
'tailwindcss/no-arbitrary-value': 'off',
|
||||
'tailwindcss/classnames-order': 'warn',
|
||||
'tailwindcss/enforces-negative-arbitrary-values': 'warn',
|
||||
'tailwindcss/migration-from-tailwind-2': 'warn',
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.12.1",
|
||||
"version": "1.12.0",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
|
||||
"imports": {
|
||||
@@ -153,7 +153,7 @@
|
||||
"sharp": "0.33.5",
|
||||
"sortablejs": "1.15.6",
|
||||
"string-ts": "2.3.1",
|
||||
"tailwind-merge": "2.6.1",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"tldts": "7.0.17",
|
||||
"use-context-selector": "2.0.0",
|
||||
"uuid": "10.0.0",
|
||||
@@ -211,11 +211,11 @@
|
||||
"cross-env": "10.1.0",
|
||||
"esbuild-wasm": "0.27.2",
|
||||
"eslint": "9.39.2",
|
||||
"eslint-plugin-better-tailwindcss": "4.1.1",
|
||||
"eslint-plugin-react-hooks": "7.0.1",
|
||||
"eslint-plugin-react-refresh": "0.4.26",
|
||||
"eslint-plugin-sonarjs": "3.0.6",
|
||||
"eslint-plugin-storybook": "10.2.1",
|
||||
"eslint-plugin-tailwindcss": "3.18.2",
|
||||
"husky": "9.1.7",
|
||||
"jsdom": "27.3.0",
|
||||
"jsdom-testing-mocks": "1.16.0",
|
||||
@@ -227,7 +227,7 @@
|
||||
"sass": "1.93.2",
|
||||
"serwist": "9.5.0",
|
||||
"storybook": "10.2.0",
|
||||
"tailwindcss": "3.4.19",
|
||||
"tailwindcss": "3.4.18",
|
||||
"tsx": "4.21.0",
|
||||
"typescript": "5.9.3",
|
||||
"uglify-js": "3.19.3",
|
||||
|
||||
125
web/pnpm-lock.yaml
generated
125
web/pnpm-lock.yaml
generated
@@ -127,7 +127,7 @@ importers:
|
||||
version: 3.2.5
|
||||
'@tailwindcss/typography':
|
||||
specifier: 0.5.19
|
||||
version: 0.5.19(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))
|
||||
version: 0.5.19(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.2))
|
||||
'@tanstack/react-form':
|
||||
specifier: 1.23.7
|
||||
version: 1.23.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
@@ -342,8 +342,8 @@ importers:
|
||||
specifier: 2.3.1
|
||||
version: 2.3.1
|
||||
tailwind-merge:
|
||||
specifier: 2.6.1
|
||||
version: 2.6.1
|
||||
specifier: 2.6.0
|
||||
version: 2.6.0
|
||||
tldts:
|
||||
specifier: 7.0.17
|
||||
version: 7.0.17
|
||||
@@ -510,9 +510,6 @@ importers:
|
||||
eslint:
|
||||
specifier: 9.39.2
|
||||
version: 9.39.2(jiti@1.21.7)
|
||||
eslint-plugin-better-tailwindcss:
|
||||
specifier: 4.1.1
|
||||
version: 4.1.1(eslint@9.39.2(jiti@1.21.7))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))(typescript@5.9.3)
|
||||
eslint-plugin-react-hooks:
|
||||
specifier: 7.0.1
|
||||
version: 7.0.1(eslint@9.39.2(jiti@1.21.7))
|
||||
@@ -525,6 +522,9 @@ importers:
|
||||
eslint-plugin-storybook:
|
||||
specifier: 10.2.1
|
||||
version: 10.2.1(eslint@9.39.2(jiti@1.21.7))(storybook@10.2.0(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)
|
||||
eslint-plugin-tailwindcss:
|
||||
specifier: 3.18.2
|
||||
version: 3.18.2(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.2))
|
||||
husky:
|
||||
specifier: 9.1.7
|
||||
version: 9.1.7
|
||||
@@ -559,8 +559,8 @@ importers:
|
||||
specifier: 10.2.0
|
||||
version: 10.2.0(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
tailwindcss:
|
||||
specifier: 3.4.19
|
||||
version: 3.4.19(tsx@4.21.0)(yaml@2.8.2)
|
||||
specifier: 3.4.18
|
||||
version: 3.4.18(tsx@4.21.0)(yaml@2.8.2)
|
||||
tsx:
|
||||
specifier: 4.21.0
|
||||
version: 4.21.0
|
||||
@@ -1202,10 +1202,6 @@ packages:
|
||||
resolution: {integrity: sha512-r18fEAj9uCk+VjzGt2thsbOmychS+4kxI14spVNibUO2vqKX7obOG+ymZljAwuPZl+S3clPGwCwTDtrdqTiY6Q==}
|
||||
engines: {node: ^20.19.0 || ^22.13.0 || >=24}
|
||||
|
||||
'@eslint/css-tree@3.6.8':
|
||||
resolution: {integrity: sha512-s0f40zY7dlMp8i0Jf0u6l/aSswS0WRAgkhgETgiCJRcxIWb4S/Sp9uScKHWbkM3BnoFLbJbmOYk5AZUDFVxaLA==}
|
||||
engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0}
|
||||
|
||||
'@eslint/eslintrc@3.3.3':
|
||||
resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
@@ -3366,11 +3362,6 @@ packages:
|
||||
'@ungap/structured-clone@1.3.0':
|
||||
resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==}
|
||||
|
||||
'@valibot/to-json-schema@1.5.0':
|
||||
resolution: {integrity: sha512-GE7DmSr1C2UCWPiV0upRH6mv0cCPsqYGs819fb6srCS1tWhyXrkGGe+zxUiwzn/L1BOfADH4sNjY/YHCuP8phQ==}
|
||||
peerDependencies:
|
||||
valibot: ^1.2.0
|
||||
|
||||
'@vitejs/plugin-react@5.1.2':
|
||||
resolution: {integrity: sha512-EcA07pHJouywpzsoTUqNh5NwGayl2PPVEJKUSinGGSxFGYn+shYbqMGBg6FXDqgXum9Ou/ecb+411ssw8HImJQ==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
@@ -4436,19 +4427,6 @@ packages:
|
||||
peerDependencies:
|
||||
eslint: '*'
|
||||
|
||||
eslint-plugin-better-tailwindcss@4.1.1:
|
||||
resolution: {integrity: sha512-ctw461TGJi8iM0P01mNVjSW7jeUAdyUgmrrd59np5/VxqX50nayMbwKZkfmjWpP1PWOqlh4CSMOH/WW6ICWmJw==}
|
||||
engines: {node: ^20.19.0 || ^22.12.0 || >=23.0.0}
|
||||
peerDependencies:
|
||||
eslint: ^7.0.0 || ^8.0.0 || ^9.0.0
|
||||
oxlint: ^1.35.0
|
||||
tailwindcss: ^3.3.0 || ^4.1.17
|
||||
peerDependenciesMeta:
|
||||
eslint:
|
||||
optional: true
|
||||
oxlint:
|
||||
optional: true
|
||||
|
||||
eslint-plugin-command@3.4.0:
|
||||
resolution: {integrity: sha512-EW4eg/a7TKEhG0s5IEti72kh3YOTlnhfFNuctq5WnB1fst37/IHTd5OkD+vnlRf3opTvUcSRihAateP6bT5ZcA==}
|
||||
peerDependencies:
|
||||
@@ -4562,6 +4540,12 @@ packages:
|
||||
eslint: '>=8'
|
||||
storybook: ^10.2.1
|
||||
|
||||
eslint-plugin-tailwindcss@3.18.2:
|
||||
resolution: {integrity: sha512-QbkMLDC/OkkjFQ1iz/5jkMdHfiMu/uwujUHLAJK5iwNHD8RTxVTlsUezE0toTZ6VhybNBsk+gYGPDq2agfeRNA==}
|
||||
engines: {node: '>=18.12.0'}
|
||||
peerDependencies:
|
||||
tailwindcss: ^3.4.0
|
||||
|
||||
eslint-plugin-toml@1.0.3:
|
||||
resolution: {integrity: sha512-GlCBX+R313RvFY2Tj0ZmvzCEv8FDp1z2itvTFTV4bW/Bkbl3xEp9inWNsRWH3SiDUlxo8Pew31ILEp/3J0WxaA==}
|
||||
engines: {node: ^20.19.0 || ^22.13.0 || >=24}
|
||||
@@ -5583,9 +5567,6 @@ packages:
|
||||
mdn-data@2.12.2:
|
||||
resolution: {integrity: sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==}
|
||||
|
||||
mdn-data@2.23.0:
|
||||
resolution: {integrity: sha512-786vq1+4079JSeu2XdcDjrhi/Ry7BWtjDl9WtGPWLiIHb2T66GvIVflZTBoSNZ5JqTtJGYEVMuFA/lbQlMOyDQ==}
|
||||
|
||||
memoize-one@5.2.1:
|
||||
resolution: {integrity: sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==}
|
||||
|
||||
@@ -6851,15 +6832,11 @@ packages:
|
||||
resolution: {integrity: sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==}
|
||||
engines: {node: '>=20'}
|
||||
|
||||
tailwind-csstree@0.1.4:
|
||||
resolution: {integrity: sha512-FzD187HuFIZEyeR7Xy6sJbJll2d4SybS90satC8SKIuaNRC05CxMvdzN7BUsfDQffcnabckRM5OIcfArjsZ0mg==}
|
||||
engines: {node: '>=18.18'}
|
||||
tailwind-merge@2.6.0:
|
||||
resolution: {integrity: sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA==}
|
||||
|
||||
tailwind-merge@2.6.1:
|
||||
resolution: {integrity: sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ==}
|
||||
|
||||
tailwindcss@3.4.19:
|
||||
resolution: {integrity: sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==}
|
||||
tailwindcss@3.4.18:
|
||||
resolution: {integrity: sha512-6A2rnmW5xZMdw11LYjhcI5846rt9pbLSabY5XPxo+XWdxwZaFEn47Go4NzFiHu9sNNmr/kXivP1vStfvMaK1GQ==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
hasBin: true
|
||||
|
||||
@@ -7008,10 +6985,6 @@ packages:
|
||||
typescript:
|
||||
optional: true
|
||||
|
||||
tsconfig-paths-webpack-plugin@4.2.0:
|
||||
resolution: {integrity: sha512-zbem3rfRS8BgeNK50Zz5SIQgXzLafiHjOwUAvk/38/o1jHn/V5QAgVUcz884or7WYcPaH3N2CIfUc2u0ul7UcA==}
|
||||
engines: {node: '>=10.13.0'}
|
||||
|
||||
tsconfig-paths@4.2.0:
|
||||
resolution: {integrity: sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -7180,14 +7153,6 @@ packages:
|
||||
resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==}
|
||||
hasBin: true
|
||||
|
||||
valibot@1.2.0:
|
||||
resolution: {integrity: sha512-mm1rxUsmOxzrwnX5arGS+U4T25RdvpPjPN4yR0u9pUBov9+zGVtO84tif1eY4r6zWxVxu3KzIyknJy3rxfRZZg==}
|
||||
peerDependencies:
|
||||
typescript: '>=5'
|
||||
peerDependenciesMeta:
|
||||
typescript:
|
||||
optional: true
|
||||
|
||||
vfile-location@5.0.3:
|
||||
resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==}
|
||||
|
||||
@@ -8272,11 +8237,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/json-schema': 7.0.15
|
||||
|
||||
'@eslint/css-tree@3.6.8':
|
||||
dependencies:
|
||||
mdn-data: 2.23.0
|
||||
source-map-js: 1.2.1
|
||||
|
||||
'@eslint/eslintrc@3.3.3':
|
||||
dependencies:
|
||||
ajv: 6.12.6
|
||||
@@ -9913,10 +9873,10 @@ snapshots:
|
||||
dependencies:
|
||||
'@swc/counter': 0.1.3
|
||||
|
||||
'@tailwindcss/typography@0.5.19(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))':
|
||||
'@tailwindcss/typography@0.5.19(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.2))':
|
||||
dependencies:
|
||||
postcss-selector-parser: 6.0.10
|
||||
tailwindcss: 3.4.19(tsx@4.21.0)(yaml@2.8.2)
|
||||
tailwindcss: 3.4.18(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
'@tanstack/devtools-client@0.0.5':
|
||||
dependencies:
|
||||
@@ -10604,10 +10564,6 @@ snapshots:
|
||||
|
||||
'@ungap/structured-clone@1.3.0': {}
|
||||
|
||||
'@valibot/to-json-schema@1.5.0(valibot@1.2.0(typescript@5.9.3))':
|
||||
dependencies:
|
||||
valibot: 1.2.0(typescript@5.9.3)
|
||||
|
||||
'@vitejs/plugin-react@5.1.2(vite@7.3.1(@types/node@18.15.0)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))':
|
||||
dependencies:
|
||||
'@babel/core': 7.28.6
|
||||
@@ -11761,22 +11717,6 @@ snapshots:
|
||||
dependencies:
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
|
||||
eslint-plugin-better-tailwindcss@4.1.1(eslint@9.39.2(jiti@1.21.7))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))(typescript@5.9.3):
|
||||
dependencies:
|
||||
'@eslint/css-tree': 3.6.8
|
||||
'@valibot/to-json-schema': 1.5.0(valibot@1.2.0(typescript@5.9.3))
|
||||
enhanced-resolve: 5.18.4
|
||||
jiti: 2.6.1
|
||||
synckit: 0.11.12
|
||||
tailwind-csstree: 0.1.4
|
||||
tailwindcss: 3.4.19(tsx@4.21.0)(yaml@2.8.2)
|
||||
tsconfig-paths-webpack-plugin: 4.2.0
|
||||
valibot: 1.2.0(typescript@5.9.3)
|
||||
optionalDependencies:
|
||||
eslint: 9.39.2(jiti@1.21.7)
|
||||
transitivePeerDependencies:
|
||||
- typescript
|
||||
|
||||
eslint-plugin-command@3.4.0(eslint@9.39.2(jiti@1.21.7)):
|
||||
dependencies:
|
||||
'@es-joy/jsdoccomment': 0.78.0
|
||||
@@ -12008,6 +11948,12 @@ snapshots:
|
||||
- supports-color
|
||||
- typescript
|
||||
|
||||
eslint-plugin-tailwindcss@3.18.2(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.2)):
|
||||
dependencies:
|
||||
fast-glob: 3.3.3
|
||||
postcss: 8.5.6
|
||||
tailwindcss: 3.4.18(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
eslint-plugin-toml@1.0.3(eslint@9.39.2(jiti@1.21.7)):
|
||||
dependencies:
|
||||
'@eslint/core': 1.0.1
|
||||
@@ -13274,8 +13220,6 @@ snapshots:
|
||||
|
||||
mdn-data@2.12.2: {}
|
||||
|
||||
mdn-data@2.23.0: {}
|
||||
|
||||
memoize-one@5.2.1: {}
|
||||
|
||||
merge-stream@2.0.0: {}
|
||||
@@ -14887,11 +14831,9 @@ snapshots:
|
||||
|
||||
tagged-tag@1.0.0: {}
|
||||
|
||||
tailwind-csstree@0.1.4: {}
|
||||
tailwind-merge@2.6.0: {}
|
||||
|
||||
tailwind-merge@2.6.1: {}
|
||||
|
||||
tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2):
|
||||
tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.2):
|
||||
dependencies:
|
||||
'@alloc/quick-lru': 5.2.0
|
||||
arg: 5.0.2
|
||||
@@ -15046,13 +14988,6 @@ snapshots:
|
||||
optionalDependencies:
|
||||
typescript: 5.9.3
|
||||
|
||||
tsconfig-paths-webpack-plugin@4.2.0:
|
||||
dependencies:
|
||||
chalk: 4.1.2
|
||||
enhanced-resolve: 5.18.4
|
||||
tapable: 2.3.0
|
||||
tsconfig-paths: 4.2.0
|
||||
|
||||
tsconfig-paths@4.2.0:
|
||||
dependencies:
|
||||
json5: 2.2.3
|
||||
@@ -15218,10 +15153,6 @@ snapshots:
|
||||
|
||||
uuid@11.1.0: {}
|
||||
|
||||
valibot@1.2.0(typescript@5.9.3):
|
||||
optionalDependencies:
|
||||
typescript: 5.9.3
|
||||
|
||||
vfile-location@5.0.3:
|
||||
dependencies:
|
||||
'@types/unist': 3.0.3
|
||||
|
||||
@@ -10,14 +10,17 @@ import type {
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
import { consoleClient, consoleQuery } from '@/service/client'
|
||||
import { get, post } from './base'
|
||||
import { del, get, post } from './base'
|
||||
import { useInvalid } from './use-base'
|
||||
|
||||
const NAME_SPACE = 'triggers'
|
||||
|
||||
// Trigger Provider Service - Provider ID Format: plugin_id/provider_name
|
||||
|
||||
// Convert backend API response to frontend ToolWithProvider format
|
||||
const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): TriggerWithProvider => {
|
||||
return {
|
||||
// Collection fields
|
||||
id: provider.plugin_id || provider.name,
|
||||
name: provider.name,
|
||||
author: provider.author,
|
||||
@@ -55,9 +58,12 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
labels: provider.tags || [],
|
||||
output_schema: event.output_schema || {},
|
||||
})),
|
||||
|
||||
// Trigger-specific schema fields
|
||||
subscription_constructor: provider.subscription_constructor,
|
||||
subscription_schema: provider.subscription_schema,
|
||||
supported_creation_methods: provider.supported_creation_methods,
|
||||
|
||||
meta: {
|
||||
version: '1.0',
|
||||
},
|
||||
@@ -66,20 +72,22 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
|
||||
export const useAllTriggerPlugins = (enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: {} }),
|
||||
queryKey: [NAME_SPACE, 'all'],
|
||||
queryFn: async () => {
|
||||
const response = await consoleClient.triggers.list({})
|
||||
const response = await get<TriggerProviderApiEntity[]>('/workspaces/current/triggers')
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerPluginsByType = (triggerType: string, enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: { query: { type: triggerType } } }),
|
||||
queryKey: [NAME_SPACE, 'byType', triggerType],
|
||||
queryFn: async () => {
|
||||
const response = await consoleClient.triggers.list({ query: { type: triggerType } })
|
||||
const response = await get<TriggerProviderApiEntity[]>(`/workspaces/current/triggers?type=${triggerType}`)
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled: enabled && !!triggerType,
|
||||
@@ -87,23 +95,25 @@ export const useTriggerPluginsByType = (triggerType: string, enabled = true) =>
|
||||
}
|
||||
|
||||
export const useInvalidateAllTriggerPlugins = () => {
|
||||
return useInvalid(consoleQuery.triggers.list.queryKey({ input: {} }))
|
||||
return useInvalid([NAME_SPACE, 'all'])
|
||||
}
|
||||
|
||||
// ===== Trigger Subscriptions Management =====
|
||||
|
||||
export const useTriggerProviderInfo = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerProviderApiEntity>({
|
||||
queryKey: consoleQuery.triggers.providerInfo.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.providerInfo({ params: { provider } }),
|
||||
queryKey: [NAME_SPACE, 'provider-info', provider],
|
||||
queryFn: () => get<TriggerProviderApiEntity>(`/workspaces/current/trigger-provider/${provider}/info`),
|
||||
enabled: enabled && !!provider,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerSubscriptions = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerSubscription[]>({
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptions({ params: { provider } }),
|
||||
queryKey: [NAME_SPACE, 'list-subscriptions', provider],
|
||||
queryFn: () => get<TriggerSubscription[]>(`/workspaces/current/trigger-provider/${provider}/subscriptions/list`),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@@ -112,30 +122,30 @@ export const useInvalidateTriggerSubscriptions = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
queryKey: [NAME_SPACE, 'subscriptions', provider],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const useCreateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderCreate.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'create-subscription-builder'],
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
credential_type?: string
|
||||
}) => {
|
||||
const { provider, ...body } = payload
|
||||
return consoleClient.triggers.subscriptionBuilderCreate({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
return post<{ subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/create`,
|
||||
{ body },
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderUpdate.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'update-subscription-builder'],
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@@ -145,17 +155,17 @@ export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
credentials?: Record<string, unknown>
|
||||
}) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return consoleClient.triggers.subscriptionBuilderUpdate({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
return post<TriggerSubscriptionBuilder>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/update/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderVerifyUpdate.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'verify-and-update-subscription-builder'],
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@@ -173,7 +183,7 @@ export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
|
||||
export const useVerifyTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionVerify.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'verify-subscription'],
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionId: string
|
||||
@@ -198,24 +208,24 @@ export type BuildTriggerSubscriptionPayload = {
|
||||
|
||||
export const useBuildTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuild.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'build-subscription'],
|
||||
mutationFn: (payload: BuildTriggerSubscriptionPayload) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return consoleClient.triggers.subscriptionBuild({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
return post(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/build/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionDelete.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'delete-subscription'],
|
||||
mutationFn: (subscriptionId: string) => {
|
||||
return consoleClient.triggers.subscriptionDelete({
|
||||
params: { subscriptionId },
|
||||
})
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/delete`,
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -230,13 +240,13 @@ export type UpdateTriggerSubscriptionPayload = {
|
||||
|
||||
export const useUpdateTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.subscriptionUpdate.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'update-subscription'],
|
||||
mutationFn: (payload: UpdateTriggerSubscriptionPayload) => {
|
||||
const { subscriptionId, ...body } = payload
|
||||
return consoleClient.triggers.subscriptionUpdate({
|
||||
params: { subscriptionId },
|
||||
body,
|
||||
})
|
||||
return post<{ result: string, id: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/update`,
|
||||
{ body },
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -252,8 +262,10 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
const { enabled = true, refetchInterval = false } = options
|
||||
|
||||
return useQuery<{ logs: TriggerLogEntity[] }>({
|
||||
queryKey: consoleQuery.triggers.subscriptionBuilderLogs.queryKey({ input: { params: { provider, subscriptionBuilderId } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptionBuilderLogs({ params: { provider, subscriptionBuilderId } }),
|
||||
queryKey: [NAME_SPACE, 'subscription-builder-logs', provider, subscriptionBuilderId],
|
||||
queryFn: () => get(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/logs/${subscriptionBuilderId}`,
|
||||
),
|
||||
enabled: enabled && !!provider && !!subscriptionBuilderId,
|
||||
refetchInterval,
|
||||
})
|
||||
@@ -262,8 +274,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
// ===== OAuth Management =====
|
||||
export const useTriggerOAuthConfig = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerOAuthConfig>({
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.oauthConfig({ params: { provider } }),
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryFn: () => get<TriggerOAuthConfig>(`/workspaces/current/trigger-provider/${provider}/oauth/client`),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@@ -276,31 +288,31 @@ export type ConfigureTriggerOAuthPayload = {
|
||||
|
||||
export const useConfigureTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.oauthConfigure.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'configure-oauth'],
|
||||
mutationFn: (payload: ConfigureTriggerOAuthPayload) => {
|
||||
const { provider, ...body } = payload
|
||||
return consoleClient.triggers.oauthConfigure({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
{ body },
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.oauthDelete.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'delete-oauth'],
|
||||
mutationFn: (provider: string) => {
|
||||
return consoleClient.triggers.oauthDelete({
|
||||
params: { provider },
|
||||
})
|
||||
return del<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useInitiateTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: consoleQuery.triggers.oauthInitiate.mutationKey(),
|
||||
mutationKey: [NAME_SPACE, 'initiate-oauth'],
|
||||
mutationFn: (provider: string) => {
|
||||
return get<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/oauth/authorize`,
|
||||
@@ -324,6 +336,7 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
return useQuery<{ options: FormOption[] }>({
|
||||
queryKey: [NAME_SPACE, 'dynamic-options', payload.plugin_id, payload.provider, payload.action, payload.parameter, payload.credential_id, payload.credentials, payload.extra],
|
||||
queryFn: () => {
|
||||
// Use new endpoint with POST when credentials provided (for edit mode)
|
||||
if (payload.credentials) {
|
||||
return post<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options-with-credentials',
|
||||
@@ -340,6 +353,7 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
{ silent: true },
|
||||
)
|
||||
}
|
||||
// Use original GET endpoint for normal cases
|
||||
return get<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options',
|
||||
{
|
||||
@@ -358,6 +372,7 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
enabled: enabled && !!payload.plugin_id && !!payload.provider && !!payload.action && !!payload.parameter && !!payload.credential_id,
|
||||
retry: 0,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -367,7 +382,7 @@ export const useInvalidateTriggerOAuthConfig = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
2
web/types/i18n.d.ts
vendored
2
web/types/i18n.d.ts
vendored
@@ -27,3 +27,5 @@ export type I18nKeysWithPrefix<
|
||||
> = Prefix extends ''
|
||||
? keyof Resources[NS]
|
||||
: Extract<keyof Resources[NS], `${Prefix}${string}`>
|
||||
|
||||
type A = I18nKeysWithPrefix<'billing'>
|
||||
|
||||
157
web/utils/classnames.spec.ts
Normal file
157
web/utils/classnames.spec.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* Test suite for the classnames utility function
|
||||
* This utility combines the classnames library with tailwind-merge
|
||||
* to handle conditional CSS classes and merge conflicting Tailwind classes
|
||||
*/
|
||||
import { cn } from './classnames'
|
||||
|
||||
describe('classnames', () => {
|
||||
/**
|
||||
* Tests basic classnames library features:
|
||||
* - String concatenation
|
||||
* - Array handling
|
||||
* - Falsy value filtering
|
||||
* - Object-based conditional classes
|
||||
*/
|
||||
it('classnames libs feature', () => {
|
||||
expect(cn('foo')).toBe('foo')
|
||||
expect(cn('foo', 'bar')).toBe('foo bar')
|
||||
expect(cn(['foo', 'bar'])).toBe('foo bar')
|
||||
|
||||
expect(cn(undefined)).toBe('')
|
||||
expect(cn(null)).toBe('')
|
||||
expect(cn(false)).toBe('')
|
||||
|
||||
expect(cn({
|
||||
foo: true,
|
||||
bar: false,
|
||||
baz: true,
|
||||
})).toBe('foo baz')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests tailwind-merge functionality:
|
||||
* - Conflicting class resolution (last one wins)
|
||||
* - Modifier handling (hover, focus, etc.)
|
||||
* - Important prefix (!)
|
||||
* - Custom color classes
|
||||
* - Arbitrary values
|
||||
*/
|
||||
it('tailwind-merge', () => {
|
||||
/* eslint-disable tailwindcss/classnames-order */
|
||||
expect(cn('p-0')).toBe('p-0')
|
||||
expect(cn('text-right text-center text-left')).toBe('text-left')
|
||||
expect(cn('pl-4 p-8')).toBe('p-8')
|
||||
expect(cn('m-[2px] m-[4px]')).toBe('m-[4px]')
|
||||
expect(cn('m-1 m-[4px]')).toBe('m-[4px]')
|
||||
expect(cn('overflow-x-auto hover:overflow-x-hidden overflow-x-scroll')).toBe(
|
||||
'hover:overflow-x-hidden overflow-x-scroll',
|
||||
)
|
||||
expect(cn('h-10 h-min')).toBe('h-min')
|
||||
expect(cn('bg-grey-5 bg-hotpink')).toBe('bg-hotpink')
|
||||
|
||||
expect(cn('hover:block hover:inline')).toBe('hover:inline')
|
||||
|
||||
expect(cn('font-medium !font-bold')).toBe('font-medium !font-bold')
|
||||
expect(cn('!font-medium !font-bold')).toBe('!font-bold')
|
||||
|
||||
expect(cn('text-gray-100 text-primary-200')).toBe('text-primary-200')
|
||||
expect(cn('text-some-unknown-color text-components-input-bg-disabled text-primary-200')).toBe('text-primary-200')
|
||||
expect(cn('bg-some-unknown-color bg-components-input-bg-disabled bg-primary-200')).toBe('bg-primary-200')
|
||||
|
||||
expect(cn('border-t border-white/10')).toBe('border-t border-white/10')
|
||||
expect(cn('border-t border-white')).toBe('border-t border-white')
|
||||
expect(cn('text-3.5xl text-black')).toBe('text-3.5xl text-black')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests the integration of classnames and tailwind-merge:
|
||||
* - Object-based conditional classes with Tailwind conflict resolution
|
||||
*/
|
||||
it('classnames combined with tailwind-merge', () => {
|
||||
expect(cn('text-right', {
|
||||
'text-center': true,
|
||||
})).toBe('text-center')
|
||||
|
||||
expect(cn('text-right', {
|
||||
'text-center': false,
|
||||
})).toBe('text-right')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests handling of multiple mixed argument types:
|
||||
* - Strings, arrays, and objects in a single call
|
||||
* - Tailwind merge working across different argument types
|
||||
*/
|
||||
it('multiple mixed argument types', () => {
|
||||
expect(cn('foo', ['bar', 'baz'], { qux: true, quux: false })).toBe('foo bar baz qux')
|
||||
expect(cn('p-4', ['p-2', 'm-4'], { 'text-left': true, 'text-right': true })).toBe('p-2 m-4 text-right')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests nested array handling:
|
||||
* - Deep array flattening
|
||||
* - Tailwind merge with nested structures
|
||||
*/
|
||||
it('nested arrays', () => {
|
||||
expect(cn(['foo', ['bar', 'baz']])).toBe('foo bar baz')
|
||||
expect(cn(['p-4', ['p-2', 'text-center']])).toBe('p-2 text-center')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests empty input handling:
|
||||
* - Empty strings, arrays, and objects
|
||||
* - Mixed empty and non-empty values
|
||||
*/
|
||||
it('empty inputs', () => {
|
||||
expect(cn('')).toBe('')
|
||||
expect(cn([])).toBe('')
|
||||
expect(cn({})).toBe('')
|
||||
expect(cn('', [], {})).toBe('')
|
||||
expect(cn('foo', '', 'bar')).toBe('foo bar')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests number input handling:
|
||||
* - Truthy numbers converted to strings
|
||||
* - Zero treated as falsy
|
||||
*/
|
||||
it('numbers as inputs', () => {
|
||||
expect(cn(1)).toBe('1')
|
||||
expect(cn(0)).toBe('')
|
||||
expect(cn('foo', 1, 'bar')).toBe('foo 1 bar')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests multiple object arguments:
|
||||
* - Object merging
|
||||
* - Tailwind conflict resolution across objects
|
||||
*/
|
||||
it('multiple objects', () => {
|
||||
expect(cn({ foo: true }, { bar: true })).toBe('foo bar')
|
||||
expect(cn({ foo: true, bar: false }, { bar: true, baz: true })).toBe('foo bar baz')
|
||||
expect(cn({ 'p-4': true }, { 'p-2': true })).toBe('p-2')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests complex edge cases:
|
||||
* - Mixed falsy values
|
||||
* - Nested arrays with falsy values
|
||||
* - Multiple conflicting Tailwind classes
|
||||
*/
|
||||
it('complex edge cases', () => {
|
||||
expect(cn('foo', null, undefined, false, 'bar', 0, 1, '')).toBe('foo bar 1')
|
||||
expect(cn(['foo', null, ['bar', undefined, 'baz']])).toBe('foo bar baz')
|
||||
expect(cn('text-sm', { 'text-lg': false, 'text-xl': true }, 'text-2xl')).toBe('text-2xl')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests important (!) modifier behavior:
|
||||
* - Important modifiers in objects
|
||||
* - Conflict resolution with important prefix
|
||||
*/
|
||||
it('important modifier with objects', () => {
|
||||
expect(cn({ '!font-medium': true }, { '!font-bold': true })).toBe('!font-bold')
|
||||
expect(cn('font-normal', { '!font-bold': true })).toBe('font-normal !font-bold')
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user