mirror of
https://github.com/langgenius/dify.git
synced 2026-03-28 19:26:47 +00:00
Compare commits
13 Commits
fix/fix-te
...
build/mess
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f07f844d39 | ||
|
|
d1cdc85a2e | ||
|
|
b4414901d1 | ||
|
|
34caf19f5b | ||
|
|
8c31b69c8e | ||
|
|
b886b3f6c8 | ||
|
|
ef0d18bb61 | ||
|
|
c56ad8e323 | ||
|
|
365f749ed5 | ||
|
|
f686197589 | ||
|
|
f584be9cf0 | ||
|
|
3bd228ddb7 | ||
|
|
0dfa59b1db |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -239,7 +239,7 @@
|
||||
/web/app/components/base/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Base Components Tests
|
||||
/web/app/components/base/**/__tests__/ @hyoban @CodingOnStar
|
||||
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
|
||||
|
||||
# Frontend - Utils and Hooks
|
||||
/web/utils/classnames.ts @iamjoel @zxhlyh
|
||||
|
||||
2
.github/workflows/web-tests.yml
vendored
2
.github/workflows/web-tests.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:coverage
|
||||
run: pnpm test:ci
|
||||
|
||||
- name: Coverage Summary
|
||||
if: always()
|
||||
|
||||
@@ -715,5 +715,6 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
|
||||
@@ -1309,6 +1309,10 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
|
||||
description="Maximum number of records to process in each batch",
|
||||
default=1000,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
|
||||
description="Maximum interval in milliseconds between batches",
|
||||
default=200,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
|
||||
description="Retention days for sandbox expired workflow_run records and message records",
|
||||
default=30,
|
||||
|
||||
@@ -1038,7 +1038,6 @@ class Message(Base):
|
||||
Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
|
||||
Index("message_account_idx", "app_id", "from_source", "from_account_id"),
|
||||
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||
Index("message_created_at_idx", "created_at"),
|
||||
Index("message_app_mode_idx", "app_mode"),
|
||||
Index("message_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
@@ -16,6 +16,7 @@ class SavedMessage(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
|
||||
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
|
||||
sa.Index("saved_message_message_id_idx", "message_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.12.0"
|
||||
version = "1.12.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@@ -327,6 +327,17 @@ class AccountService:
|
||||
@staticmethod
|
||||
def delete_account(account: Account):
|
||||
"""Delete account. This method only adds a task to the queue for deletion."""
|
||||
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_account_deletion
|
||||
|
||||
sync_success = sync_account_deletion(account_id=account.id, source="account_deleted")
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise account deletion sync failed for account %s; proceeding with local deletion.",
|
||||
account.id,
|
||||
)
|
||||
|
||||
# Now proceed with async account deletion
|
||||
delete_account_task.delay(account.id)
|
||||
|
||||
@staticmethod
|
||||
@@ -1230,6 +1241,19 @@ class TenantService:
|
||||
if dify_config.BILLING_ENABLED:
|
||||
BillingService.clean_billing_info_cache(tenant.id)
|
||||
|
||||
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
|
||||
|
||||
sync_success = sync_workspace_member_removal(
|
||||
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
|
||||
)
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
|
||||
tenant.id,
|
||||
account.id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
|
||||
"""Update member role"""
|
||||
|
||||
115
api/services/enterprise/account_deletion_sync.py
Normal file
115
api/services/enterprise/account_deletion_sync.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.account import TenantAccountJoin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
|
||||
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
|
||||
|
||||
|
||||
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Queue an account deletion sync task to Redis.
|
||||
|
||||
Internal helper function. Do not call directly - use the public functions instead.
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID to sync
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (for debugging/tracking)
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
task = {
|
||||
"task_id": str(uuid.uuid4()),
|
||||
"workspace_id": workspace_id,
|
||||
"member_id": member_id,
|
||||
"retry_count": 0,
|
||||
"created_at": datetime.now(UTC).isoformat(),
|
||||
"source": source,
|
||||
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
|
||||
}
|
||||
|
||||
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
|
||||
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
|
||||
|
||||
logger.info(
|
||||
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
task["task_id"],
|
||||
source,
|
||||
)
|
||||
return True
|
||||
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.error(
|
||||
"Failed to queue account deletion sync for workspace %s, member %s: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
str(e),
|
||||
exc_info=True,
|
||||
)
|
||||
# Don't raise - we don't want to fail member deletion if queueing fails
|
||||
return False
|
||||
|
||||
|
||||
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync a single workspace member removal (enterprise only).
|
||||
|
||||
Queues a task for the enterprise backend to reassign resources from the removed member.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (e.g., "workspace_member_removed")
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued (or skipped in community), False if queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
|
||||
def sync_account_deletion(account_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync full account deletion across all workspaces (enterprise only).
|
||||
|
||||
Fetches all workspace memberships for the account and queues a sync task for each.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
account_id: The account ID being deleted
|
||||
source: Source of the sync request (e.g., "account_deleted")
|
||||
|
||||
Returns:
|
||||
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
# Fetch all workspaces the account belongs to
|
||||
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
|
||||
|
||||
# Queue sync task for each workspace
|
||||
success = True
|
||||
for join in workspace_joins:
|
||||
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
|
||||
success = False
|
||||
|
||||
return success
|
||||
@@ -1,10 +1,12 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy import delete, select, tuple_
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@@ -193,11 +195,15 @@ class MessagesCleanService:
|
||||
self._end_before,
|
||||
)
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
stats["batches"] += 1
|
||||
batch_start = time.monotonic()
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
fetch_messages_start = time.monotonic()
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
.where(Message.created_at < self._end_before)
|
||||
@@ -209,13 +215,9 @@ class MessagesCleanService:
|
||||
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
|
||||
|
||||
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
|
||||
# This translates to:
|
||||
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
|
||||
if _cursor:
|
||||
# Continuing from previous batch
|
||||
msg_stmt = msg_stmt.where(
|
||||
(Message.created_at > _cursor[0])
|
||||
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
|
||||
tuple_(Message.created_at, Message.id) > tuple_(_cursor[0], _cursor[1])
|
||||
)
|
||||
|
||||
raw_messages = list(session.execute(msg_stmt).all())
|
||||
@@ -223,6 +225,12 @@ class MessagesCleanService:
|
||||
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
|
||||
for msg_id, app_id, msg_created_at in raw_messages
|
||||
]
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s messages in %sms",
|
||||
stats["batches"],
|
||||
len(messages),
|
||||
int((time.monotonic() - fetch_messages_start) * 1000),
|
||||
)
|
||||
|
||||
# Track total messages fetched across all batches
|
||||
stats["total_messages"] += len(messages)
|
||||
@@ -241,8 +249,16 @@ class MessagesCleanService:
|
||||
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
fetch_apps_start = time.monotonic()
|
||||
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
|
||||
apps = list(session.execute(app_stmt).all())
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
|
||||
stats["batches"],
|
||||
len(apps),
|
||||
len(app_ids),
|
||||
int((time.monotonic() - fetch_apps_start) * 1000),
|
||||
)
|
||||
|
||||
if not apps:
|
||||
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
|
||||
@@ -252,7 +268,15 @@ class MessagesCleanService:
|
||||
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
|
||||
|
||||
# Step 3: Delegate to policy to determine which messages to delete
|
||||
policy_start = time.monotonic()
|
||||
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
|
||||
stats["batches"],
|
||||
len(message_ids_to_delete),
|
||||
len(messages),
|
||||
int((time.monotonic() - policy_start) * 1000),
|
||||
)
|
||||
|
||||
if not message_ids_to_delete:
|
||||
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
|
||||
@@ -263,14 +287,20 @@ class MessagesCleanService:
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
delete_relations_start = time.monotonic()
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
|
||||
|
||||
# Delete messages
|
||||
delete_messages_start = time.monotonic()
|
||||
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
|
||||
commit_start = time.monotonic()
|
||||
session.commit()
|
||||
commit_ms = int((time.monotonic() - commit_start) * 1000)
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
|
||||
@@ -280,6 +310,19 @@ class MessagesCleanService:
|
||||
len(messages),
|
||||
messages_deleted,
|
||||
)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
|
||||
stats["batches"],
|
||||
delete_relations_ms,
|
||||
delete_messages_ms,
|
||||
commit_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
else:
|
||||
# Log random sample of message IDs that would be deleted (up to 10)
|
||||
sample_size = min(10, len(message_ids_to_delete))
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
import click
|
||||
@@ -72,7 +75,12 @@ class WorkflowRunCleanup:
|
||||
batch_index = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
batch_start = time.monotonic()
|
||||
|
||||
fetch_start = time.monotonic()
|
||||
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=self.window_start,
|
||||
end_before=self.window_end,
|
||||
@@ -80,12 +88,30 @@ class WorkflowRunCleanup:
|
||||
batch_size=self.batch_size,
|
||||
)
|
||||
if not run_rows:
|
||||
logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1)
|
||||
break
|
||||
|
||||
batch_index += 1
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): fetched %s rows in %sms",
|
||||
batch_index,
|
||||
len(run_rows),
|
||||
int((time.monotonic() - fetch_start) * 1000),
|
||||
)
|
||||
|
||||
tenant_ids = {row.tenant_id for row in run_rows}
|
||||
|
||||
filter_start = time.monotonic()
|
||||
free_tenants = self._filter_free_tenants(tenant_ids)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms",
|
||||
batch_index,
|
||||
len(free_tenants),
|
||||
len(tenant_ids),
|
||||
int((time.monotonic() - filter_start) * 1000),
|
||||
)
|
||||
|
||||
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
|
||||
paid_or_skipped = len(run_rows) - len(free_runs)
|
||||
|
||||
@@ -104,11 +130,17 @@ class WorkflowRunCleanup:
|
||||
total_runs_targeted += len(free_runs)
|
||||
|
||||
if self.dry_run:
|
||||
count_start = time.monotonic()
|
||||
batch_counts = self.workflow_run_repo.count_runs_with_related(
|
||||
free_runs,
|
||||
count_node_executions=self._count_node_executions,
|
||||
count_trigger_logs=self._count_trigger_logs,
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - count_start) * 1000),
|
||||
)
|
||||
if related_totals is not None:
|
||||
for key in related_totals:
|
||||
related_totals[key] += batch_counts.get(key, 0)
|
||||
@@ -120,14 +152,21 @@ class WorkflowRunCleanup:
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): batch total %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
delete_start = time.monotonic()
|
||||
counts = self.workflow_run_repo.delete_runs_with_related(
|
||||
free_runs,
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
delete_ms = int((time.monotonic() - delete_start) * 1000)
|
||||
except Exception:
|
||||
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
|
||||
raise
|
||||
@@ -143,6 +182,17 @@ class WorkflowRunCleanup:
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): delete %sms, batch total %sms",
|
||||
batch_index,
|
||||
delete_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
|
||||
if self.dry_run:
|
||||
if self.window_start:
|
||||
|
||||
@@ -8,7 +8,6 @@ from sqlalchemy import delete, select
|
||||
from core.db.session_factory import session_factory
|
||||
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
|
||||
@@ -27,7 +26,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -36,7 +35,6 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
@@ -56,7 +54,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
|
||||
@@ -259,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
|
||||
|
||||
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_archive_log(workflow_archive_log_id: str):
|
||||
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
def del_workflow_archive_log(session, workflow_archive_log_id: str):
|
||||
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -420,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
|
||||
total_files_deleted = 0
|
||||
|
||||
while True:
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# Get a batch of draft variable IDs along with their file_ids
|
||||
query_sql = """
|
||||
SELECT id, file_id FROM workflow_draft_variables
|
||||
|
||||
@@ -10,7 +10,10 @@ from models import Tenant
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import App, UploadFile
|
||||
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
|
||||
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
|
||||
from tasks.remove_app_and_related_data_task import (
|
||||
_delete_draft_variables,
|
||||
delete_draft_variables_batch,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -297,12 +300,18 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_before = session.query(UploadFile).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
@@ -315,8 +324,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -329,6 +342,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.side_effect = [Exception("Storage error"), None]
|
||||
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
@@ -339,8 +354,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -395,3 +414,275 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
if app2_obj:
|
||||
session.delete(app2_obj)
|
||||
session.commit()
|
||||
|
||||
|
||||
class TestDeleteDraftVariablesSessionCommit:
|
||||
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
|
||||
|
||||
@pytest.fixture
|
||||
def setup_offload_test_data(self, app_and_tenant):
|
||||
"""Create test data with offload files for session commit tests."""
|
||||
from core.variables.types import SegmentType
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
tenant, app = app_and_tenant
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
upload_file1 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file1.json",
|
||||
name="file1.json",
|
||||
size=1024,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
upload_file2 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file2.json",
|
||||
name="file2.json",
|
||||
size=2048,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
session.add(upload_file1)
|
||||
session.add(upload_file2)
|
||||
session.flush()
|
||||
|
||||
var_file1 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file1.id,
|
||||
size=1024,
|
||||
length=10,
|
||||
value_type=SegmentType.STRING,
|
||||
)
|
||||
var_file2 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file2.id,
|
||||
size=2048,
|
||||
length=20,
|
||||
value_type=SegmentType.OBJECT,
|
||||
)
|
||||
session.add(var_file1)
|
||||
session.add(var_file2)
|
||||
session.flush()
|
||||
|
||||
draft_var1 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_1",
|
||||
name="large_var_1",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file1.id,
|
||||
)
|
||||
draft_var2 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_2",
|
||||
name="large_var_2",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file2.id,
|
||||
)
|
||||
draft_var3 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_3",
|
||||
name="regular_var",
|
||||
value=StringSegment(value="regular_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(draft_var1)
|
||||
session.add(draft_var2)
|
||||
session.add(draft_var3)
|
||||
session.commit()
|
||||
|
||||
data = {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"upload_files": [upload_file1, upload_file2],
|
||||
"variable_files": [var_file1, var_file2],
|
||||
"draft_variables": [draft_var1, draft_var2, draft_var3],
|
||||
}
|
||||
|
||||
yield data
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
for table, ids in [
|
||||
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
|
||||
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
|
||||
(UploadFile, [uf.id for uf in data["upload_files"]]),
|
||||
]:
|
||||
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
@pytest.fixture
|
||||
def setup_commit_test_data(self, app_and_tenant):
|
||||
"""Create test data for session commit tests."""
|
||||
tenant, app = app_and_tenant
|
||||
variable_ids: list[str] = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
variables = []
|
||||
for i in range(10):
|
||||
var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id=f"node_{i}",
|
||||
name=f"var_{i}",
|
||||
value=StringSegment(value="test_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(var)
|
||||
variables.append(var)
|
||||
session.commit()
|
||||
variable_ids = [v.id for v in variables]
|
||||
|
||||
yield {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"variable_ids": variable_ids,
|
||||
}
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
cleanup_query = (
|
||||
delete(WorkflowDraftVariable)
|
||||
.where(WorkflowDraftVariable.id.in_(variable_ids))
|
||||
.execution_options(synchronize_session=False)
|
||||
)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
|
||||
"""Test that session.begin() is used for automatic transaction management."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
# Since session.begin() is used, the transaction is automatically committed
|
||||
# when the with block exits successfully. We verify this by checking that
|
||||
# data is actually persisted.
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
# Verify all data was deleted (proves transaction was committed)
|
||||
with session_factory.create_session() as session:
|
||||
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
|
||||
assert deleted_count == 10
|
||||
assert remaining_count == 0
|
||||
|
||||
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
|
||||
"""Test that data is actually persisted to database after batch deletion with commits."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
variable_ids = data["variable_ids"]
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Perform deletion with small batch size to force multiple commits
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify all data is deleted in a new session (proves commits worked)
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
# Verify specific IDs are deleted
|
||||
with session_factory.create_session() as session:
|
||||
remaining_vars = (
|
||||
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
|
||||
)
|
||||
assert remaining_vars == 0
|
||||
|
||||
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
|
||||
"""Test session behavior when deleting from an empty dataset."""
|
||||
nonexistent_app_id = str(uuid.uuid4())
|
||||
|
||||
# Should not raise any errors and should return 0
|
||||
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_session_commit_with_single_batch(self, setup_commit_test_data):
|
||||
"""Test that commit happens correctly when all data fits in a single batch."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Delete all in a single batch
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify data is persisted
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
|
||||
"""Test that invalid batch size raises ValueError."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=0)
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=-1)
|
||||
|
||||
@patch("extensions.ext_storage.storage")
|
||||
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
"""Test that session commits correctly when cleaning up offload data."""
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
|
||||
# Delete variables with offload data
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
assert deleted_count == 3
|
||||
|
||||
# Verify all data is persisted (deleted) in new session
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_after == 0
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
# Verify storage cleanup was called
|
||||
assert mock_storage.delete.call_count == 2
|
||||
|
||||
@@ -1016,7 +1016,7 @@ class TestAccountService:
|
||||
|
||||
def test_delete_account(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test account deletion (should add task to queue).
|
||||
Test account deletion (should add task to queue and sync to enterprise).
|
||||
"""
|
||||
fake = Faker()
|
||||
email = fake.email()
|
||||
@@ -1034,10 +1034,18 @@ class TestAccountService:
|
||||
password=password,
|
||||
)
|
||||
|
||||
with patch("services.account_service.delete_account_task") as mock_delete_task:
|
||||
with (
|
||||
patch("services.account_service.delete_account_task") as mock_delete_task,
|
||||
patch("services.enterprise.account_deletion_sync.sync_account_deletion") as mock_sync,
|
||||
):
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Delete account
|
||||
AccountService.delete_account(account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(account_id=account.id, source="account_deleted")
|
||||
|
||||
# Verify task was added to queue
|
||||
mock_delete_task.delay.assert_called_once_with(account.id)
|
||||
|
||||
@@ -1716,7 +1724,7 @@ class TestTenantService:
|
||||
|
||||
def test_remove_member_from_tenant_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful member removal from tenant.
|
||||
Test successful member removal from tenant (should sync to enterprise).
|
||||
"""
|
||||
fake = Faker()
|
||||
tenant_name = fake.company()
|
||||
@@ -1751,7 +1759,15 @@ class TestTenantService:
|
||||
TenantService.create_tenant_member(tenant, member_account, role="normal")
|
||||
|
||||
# Remove member
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(
|
||||
workspace_id=tenant.id, member_id=member_account.id, source="workspace_member_removed"
|
||||
)
|
||||
|
||||
# Verify member was removed
|
||||
from extensions.ext_database import db
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from tasks.document_indexing_update_task import document_indexing_update_task
|
||||
|
||||
|
||||
class TestDocumentIndexingUpdateTask:
|
||||
@pytest.fixture
|
||||
def mock_external_dependencies(self):
|
||||
"""Patch external collaborators used by the update task.
|
||||
- IndexProcessorFactory.init_index_processor().clean(...)
|
||||
- IndexingRunner.run([...])
|
||||
"""
|
||||
with (
|
||||
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
|
||||
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
|
||||
):
|
||||
processor_instance = MagicMock()
|
||||
mock_factory.return_value.init_index_processor.return_value = processor_instance
|
||||
|
||||
runner_instance = MagicMock()
|
||||
mock_runner.return_value = runner_instance
|
||||
|
||||
yield {
|
||||
"factory": mock_factory,
|
||||
"processor": processor_instance,
|
||||
"runner": mock_runner,
|
||||
"runner_instance": runner_instance,
|
||||
}
|
||||
|
||||
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
|
||||
fake = Faker()
|
||||
|
||||
# Account and tenant
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(name=fake.company(), status="normal")
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Dataset and document
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=64),
|
||||
data_source_type="upload_file",
|
||||
indexing_technique="high_quality",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(dataset)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
document = Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=0,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name=fake.file_name(),
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="waiting",
|
||||
enabled=True,
|
||||
doc_form="text_model",
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Segments
|
||||
node_ids = []
|
||||
for i in range(segment_count):
|
||||
node_id = f"node-{i + 1}"
|
||||
seg = DocumentSegment(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
position=i,
|
||||
content=fake.text(max_nb_chars=32),
|
||||
answer=None,
|
||||
word_count=10,
|
||||
tokens=5,
|
||||
index_node_id=node_id,
|
||||
status="completed",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(seg)
|
||||
node_ids.append(node_id)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Refresh to ensure ORM state
|
||||
db_session_with_containers.refresh(dataset)
|
||||
db_session_with_containers.refresh(document)
|
||||
|
||||
return dataset, document, node_ids
|
||||
|
||||
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Act
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Assert document status updated before reindex
|
||||
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
|
||||
assert updated.indexing_status == "parsing"
|
||||
assert updated.processing_started_at is not None
|
||||
|
||||
# Segments should be deleted
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining == 0
|
||||
|
||||
# Assert index processor clean was called with expected args
|
||||
clean_call = mock_external_dependencies["processor"].clean.call_args
|
||||
assert clean_call is not None
|
||||
args, kwargs = clean_call
|
||||
# args[0] is a Dataset instance (from another session) — validate by id
|
||||
assert getattr(args[0], "id", None) == dataset.id
|
||||
# args[1] should contain our node_ids
|
||||
assert set(args[1]) == set(node_ids)
|
||||
assert kwargs.get("with_keywords") is True
|
||||
assert kwargs.get("delete_child_chunks") is True
|
||||
|
||||
# Assert indexing runner invoked with the updated document
|
||||
run_call = mock_external_dependencies["runner_instance"].run.call_args
|
||||
assert run_call is not None
|
||||
run_docs = run_call[0][0]
|
||||
assert len(run_docs) == 1
|
||||
first = run_docs[0]
|
||||
assert getattr(first, "id", None) == document.id
|
||||
|
||||
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Force clean to raise; task should continue to indexing
|
||||
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
|
||||
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Indexing should still be triggered
|
||||
mock_external_dependencies["runner_instance"].run.assert_called_once()
|
||||
|
||||
# Segments should remain (since clean failed before DB delete)
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining > 0
|
||||
|
||||
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
|
||||
fake = Faker()
|
||||
# Act with non-existent document id
|
||||
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
|
||||
|
||||
# Neither processor nor runner should be called
|
||||
mock_external_dependencies["processor"].clean.assert_not_called()
|
||||
mock_external_dependencies["runner_instance"].run.assert_not_called()
|
||||
@@ -0,0 +1,276 @@
|
||||
"""Unit tests for account deletion synchronization.
|
||||
|
||||
This test module verifies the enterprise account deletion sync functionality,
|
||||
including Redis queuing, error handling, and community vs enterprise behavior.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from redis import RedisError
|
||||
|
||||
from services.enterprise.account_deletion_sync import (
|
||||
_queue_task,
|
||||
sync_account_deletion,
|
||||
sync_workspace_member_removal,
|
||||
)
|
||||
|
||||
|
||||
class TestQueueTask:
|
||||
"""Unit tests for the _queue_task helper function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self):
|
||||
"""Mock redis_client for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.redis_client") as mock_redis:
|
||||
yield mock_redis
|
||||
|
||||
@pytest.fixture
|
||||
def mock_uuid(self):
|
||||
"""Mock UUID generation for predictable task IDs."""
|
||||
with patch("services.enterprise.account_deletion_sync.uuid.uuid4") as mock_uuid_gen:
|
||||
mock_uuid_gen.return_value = MagicMock(hex="test-task-id-1234")
|
||||
yield mock_uuid_gen
|
||||
|
||||
def test_queue_task_success(self, mock_redis_client, mock_uuid):
|
||||
"""Test successful task queueing to Redis."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "test_source"
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_redis_client.lpush.assert_called_once()
|
||||
|
||||
# Verify the task payload structure
|
||||
call_args = mock_redis_client.lpush.call_args[0]
|
||||
assert call_args[0] == "enterprise:member:sync:queue"
|
||||
|
||||
import json
|
||||
|
||||
task_data = json.loads(call_args[1])
|
||||
assert task_data["workspace_id"] == workspace_id
|
||||
assert task_data["member_id"] == member_id
|
||||
assert task_data["source"] == source
|
||||
assert task_data["type"] == "sync_member_deletion_from_workspace"
|
||||
assert task_data["retry_count"] == 0
|
||||
assert "task_id" in task_data
|
||||
assert "created_at" in task_data
|
||||
|
||||
def test_queue_task_redis_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of Redis connection errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = RedisError("Connection failed")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
def test_queue_task_type_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of JSON serialization errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = TypeError("Cannot serialize")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
|
||||
class TestSyncWorkspaceMemberRemoval:
|
||||
"""Unit tests for sync_workspace_member_removal function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_enabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is True."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "workspace_member_removed"
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_called_once_with(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_disabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_workspace_member_removal_queue_failure(self, mock_queue_task):
|
||||
"""Test handling of queue task failures."""
|
||||
# Arrange
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestSyncAccountDeletion:
|
||||
"""Unit tests for sync_account_deletion function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""Mock database session for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.db.session") as mock_session:
|
||||
yield mock_session
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_account_deletion_enterprise_disabled(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_db_session.query.assert_not_called()
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_multiple_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with multiple workspace memberships."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
# Verify each workspace was queued
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-1", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-2", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-3", member_id=account_id, source="account_deleted")
|
||||
|
||||
def test_sync_account_deletion_no_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with no workspace memberships."""
|
||||
# Arrange
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = []
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_partial_failure(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when some tasks fail to queue."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Mock queue_task to fail for second workspace
|
||||
def queue_side_effect(workspace_id, member_id, source):
|
||||
return workspace_id != "tenant-2"
|
||||
|
||||
mock_queue_task.side_effect = queue_side_effect
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False # Should return False if any task fails
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
def test_sync_account_deletion_all_failures(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when all tasks fail to queue."""
|
||||
# Arrange
|
||||
mock_join = MagicMock()
|
||||
mock_join.tenant_id = "tenant-1"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_queue_task.assert_called_once()
|
||||
@@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
|
||||
mock_query.where.return_value = mock_delete_query
|
||||
mock_db.session.query.return_value = mock_query
|
||||
|
||||
delete_func("log-1")
|
||||
delete_func(mock_db.session, "log-1")
|
||||
|
||||
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
|
||||
mock_query.where.assert_called_once()
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@@ -1368,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.12.0"
|
||||
version = "1.12.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@@ -1518,5 +1518,6 @@ AMPLITUDE_API_KEY=
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
|
||||
@@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -682,6 +682,7 @@ x-shared-env: &shared-api-worker-env
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL:-90000}
|
||||
|
||||
@@ -707,7 +708,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -749,7 +750,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -788,7 +789,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -818,7 +819,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { FC } from 'react'
|
||||
import { RiQuestionLine } from '@remixicon/react'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { tooltipManager } from './TooltipManager'
|
||||
@@ -61,6 +61,20 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
isHoverTriggerRef.current = isHoverTrigger
|
||||
}, [isHoverTrigger])
|
||||
|
||||
const closeTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const clearCloseTimeout = useCallback(() => {
|
||||
if (closeTimeoutRef.current) {
|
||||
clearTimeout(closeTimeoutRef.current)
|
||||
closeTimeoutRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
clearCloseTimeout()
|
||||
}
|
||||
}, [clearCloseTimeout])
|
||||
|
||||
const close = () => setOpen(false)
|
||||
|
||||
const handleLeave = (isTrigger: boolean) => {
|
||||
@@ -71,7 +85,9 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
|
||||
// give time to move to the popup
|
||||
if (needsDelay) {
|
||||
setTimeout(() => {
|
||||
clearCloseTimeout()
|
||||
closeTimeoutRef.current = setTimeout(() => {
|
||||
closeTimeoutRef.current = null
|
||||
if (!isHoverPopupRef.current && !isHoverTriggerRef.current) {
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
@@ -79,6 +95,7 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
}, 300)
|
||||
}
|
||||
else {
|
||||
clearCloseTimeout()
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
}
|
||||
@@ -95,6 +112,7 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
onClick={() => triggerMethod === 'click' && setOpen(v => !v)}
|
||||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
clearCloseTimeout()
|
||||
setHoverTrigger()
|
||||
tooltipManager.register(close)
|
||||
setOpen(true)
|
||||
@@ -115,7 +133,12 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
!noDecoration && 'system-xs-regular relative max-w-[300px] break-words rounded-md bg-components-panel-bg px-3 py-2 text-left text-text-tertiary shadow-lg',
|
||||
popupClassName,
|
||||
)}
|
||||
onMouseEnter={() => triggerMethod === 'hover' && setHoverPopup()}
|
||||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
clearCloseTimeout()
|
||||
setHoverPopup()
|
||||
}
|
||||
}}
|
||||
onMouseLeave={() => triggerMethod === 'hover' && handleLeave(false)}
|
||||
>
|
||||
{popupContent}
|
||||
|
||||
@@ -159,69 +159,74 @@ const Apps = ({
|
||||
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex h-full flex-col border-l-[0.5px] border-divider-regular',
|
||||
'flex h-full min-h-0 flex-col overflow-hidden border-l-[0.5px] border-divider-regular',
|
||||
)}
|
||||
>
|
||||
{systemFeatures.enable_explore_banner && (
|
||||
<div className="mt-4 px-12">
|
||||
<Banner />
|
||||
</div>
|
||||
)}
|
||||
<div className={cn(
|
||||
'mt-6 flex items-center justify-between px-12',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
|
||||
{hasFilterCondition && (
|
||||
<>
|
||||
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
|
||||
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="w-[200px] self-start"
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col overflow-y-auto">
|
||||
{systemFeatures.enable_explore_banner && (
|
||||
<div className="mt-4 px-12">
|
||||
<Banner />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mt-2 px-12">
|
||||
<Category
|
||||
list={categories}
|
||||
value={currCategory}
|
||||
onChange={setCurrCategory}
|
||||
allCategoriesEn={allCategoriesEn}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className={cn(
|
||||
'relative mt-4 flex flex-1 shrink-0 grow flex-col overflow-auto pb-6',
|
||||
)}
|
||||
>
|
||||
<nav
|
||||
className={cn(
|
||||
s.appList,
|
||||
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
|
||||
<div className="sticky top-0 z-10 bg-background-body">
|
||||
<div className={cn(
|
||||
'flex items-center justify-between px-12 pt-6',
|
||||
)}
|
||||
>
|
||||
{searchFilteredList.map(app => (
|
||||
<AppCard
|
||||
key={app.app_id}
|
||||
isExplore
|
||||
app={app}
|
||||
canCreate={hasEditPermission}
|
||||
onCreate={() => {
|
||||
setCurrApp(app)
|
||||
setIsShowCreateModal(true)
|
||||
}}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
|
||||
{hasFilterCondition && (
|
||||
<>
|
||||
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
|
||||
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="w-[200px] self-start"
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
))}
|
||||
</nav>
|
||||
</div>
|
||||
|
||||
<div className="px-12 pb-4 pt-2">
|
||||
<Category
|
||||
list={categories}
|
||||
value={currCategory}
|
||||
onChange={setCurrCategory}
|
||||
allCategoriesEn={allCategoriesEn}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={cn(
|
||||
'relative flex flex-1 shrink-0 grow flex-col pb-6',
|
||||
)}
|
||||
>
|
||||
<nav
|
||||
className={cn(
|
||||
s.appList,
|
||||
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
|
||||
)}
|
||||
>
|
||||
{searchFilteredList.map(app => (
|
||||
<AppCard
|
||||
key={app.app_id}
|
||||
isExplore
|
||||
app={app}
|
||||
canCreate={hasEditPermission}
|
||||
onCreate={() => {
|
||||
setCurrApp(app)
|
||||
setIsShowCreateModal(true)
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
{isShowCreateModal && (
|
||||
<CreateAppModal
|
||||
|
||||
@@ -71,7 +71,7 @@ const Explore: FC<IExploreProps> = ({
|
||||
}
|
||||
>
|
||||
<Sidebar controlUpdateInstalledApps={controlUpdateInstalledApps} />
|
||||
<div className="w-0 grow">
|
||||
<div className="h-full min-h-0 w-0 grow">
|
||||
{children}
|
||||
</div>
|
||||
</ExploreContext.Provider>
|
||||
|
||||
@@ -599,20 +599,30 @@ describe('CommonCreateModal', () => {
|
||||
},
|
||||
})
|
||||
mockUsePluginStore.mockReturnValue(detailWithCredentials)
|
||||
const existingBuilder = createMockSubscriptionBuilder()
|
||||
mockVerifyCredentials.mockImplementation((params, { onSuccess }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
render(<CommonCreateModal {...defaultProps} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('modal-confirm'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockVerifyCredentials).toHaveBeenCalled()
|
||||
expect(mockVerifyCredentials).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: 'test-provider',
|
||||
subscriptionBuilderId: existingBuilder.id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
onError: expect.any(Function),
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('modal-confirm')).toHaveTextContent('pluginTrigger.modal.common.create')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -629,15 +639,12 @@ describe('CommonCreateModal', () => {
|
||||
},
|
||||
})
|
||||
mockUsePluginStore.mockReturnValue(detailWithCredentials)
|
||||
const existingBuilder = createMockSubscriptionBuilder()
|
||||
mockVerifyCredentials.mockImplementation((params, { onError }) => {
|
||||
onError(new Error('Verification failed'))
|
||||
})
|
||||
|
||||
render(<CommonCreateModal {...defaultProps} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('modal-confirm'))
|
||||
|
||||
|
||||
@@ -4,6 +4,17 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportStatus } from '@/models/app'
|
||||
import UpdateDSLModal from './update-dsl-modal'
|
||||
|
||||
class MockFileReader {
|
||||
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
|
||||
|
||||
readAsText(_file: Blob) {
|
||||
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
|
||||
this.onload?.call(this as unknown as FileReader, event)
|
||||
}
|
||||
}
|
||||
|
||||
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
|
||||
@@ -98,31 +98,46 @@ export const useNodesSyncDraft = () => {
|
||||
) => {
|
||||
if (getNodesReadOnly())
|
||||
return
|
||||
const postParams = getPostParams()
|
||||
|
||||
if (postParams) {
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
try {
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
// Get base params without hash
|
||||
const baseParams = getPostParams()
|
||||
if (!baseParams)
|
||||
return
|
||||
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
|
||||
try {
|
||||
// IMPORTANT: Get the LATEST hash right before sending the request
|
||||
// This ensures that even if queued, each request uses the most recent hash
|
||||
const latestHash = workflowStore.getState().syncWorkflowDraftHash
|
||||
|
||||
const postParams = {
|
||||
...baseParams,
|
||||
params: {
|
||||
...baseParams.params,
|
||||
hash: latestHash || null, // null for first-time, otherwise use latest hash
|
||||
},
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
}
|
||||
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft])
|
||||
|
||||
|
||||
119
web/contract/console/trigger.ts
Normal file
119
web/contract/console/trigger.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import type {
|
||||
TriggerLogEntity,
|
||||
TriggerOAuthClientParams,
|
||||
TriggerOAuthConfig,
|
||||
TriggerProviderApiEntity,
|
||||
TriggerSubscription,
|
||||
TriggerSubscriptionBuilder,
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { type } from '@orpc/contract'
|
||||
import { base } from '../base'
|
||||
|
||||
export const triggersContract = base
|
||||
.route({ path: '/workspaces/current/triggers', method: 'GET' })
|
||||
.input(type<{ query?: { type?: string } }>())
|
||||
.output(type<TriggerProviderApiEntity[]>())
|
||||
|
||||
export const triggerProviderInfoContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/info', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerProviderApiEntity>())
|
||||
|
||||
export const triggerSubscriptionsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/list', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerSubscription[]>())
|
||||
|
||||
export const triggerSubscriptionBuilderCreateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/create', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body?: { credential_type?: string }
|
||||
}>())
|
||||
.output(type<{ subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
|
||||
export const triggerSubscriptionBuilderUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<TriggerSubscriptionBuilder>())
|
||||
|
||||
export const triggerSubscriptionBuilderVerifyUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/verify-and-update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionVerifyContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/verify/{subscriptionId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionBuildContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/build/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
parameters?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<unknown>())
|
||||
|
||||
export const triggerSubscriptionDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/delete', method: 'POST' })
|
||||
.input(type<{ params: { subscriptionId: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerSubscriptionUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/update', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { subscriptionId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<{ result: string, id: string }>())
|
||||
|
||||
export const triggerSubscriptionBuilderLogsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/logs/{subscriptionBuilderId}', method: 'GET' })
|
||||
.input(type<{ params: { provider: string, subscriptionBuilderId: string } }>())
|
||||
.output(type<{ logs: TriggerLogEntity[] }>())
|
||||
|
||||
export const triggerOAuthConfigContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerOAuthConfig>())
|
||||
|
||||
export const triggerOAuthConfigureContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body: { client_params?: TriggerOAuthClientParams, enabled: boolean }
|
||||
}>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'DELETE' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthInitiateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/oauth/authorize', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
@@ -1,6 +1,23 @@
|
||||
import type { InferContractRouterInputs } from '@orpc/contract'
|
||||
import { bindPartnerStackContract, invoicesContract } from './console/billing'
|
||||
import { systemFeaturesContract } from './console/system'
|
||||
import {
|
||||
triggerOAuthConfigContract,
|
||||
triggerOAuthConfigureContract,
|
||||
triggerOAuthDeleteContract,
|
||||
triggerOAuthInitiateContract,
|
||||
triggerProviderInfoContract,
|
||||
triggersContract,
|
||||
triggerSubscriptionBuildContract,
|
||||
triggerSubscriptionBuilderCreateContract,
|
||||
triggerSubscriptionBuilderLogsContract,
|
||||
triggerSubscriptionBuilderUpdateContract,
|
||||
triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
triggerSubscriptionDeleteContract,
|
||||
triggerSubscriptionsContract,
|
||||
triggerSubscriptionUpdateContract,
|
||||
triggerSubscriptionVerifyContract,
|
||||
} from './console/trigger'
|
||||
import { trialAppDatasetsContract, trialAppInfoContract, trialAppParametersContract, trialAppWorkflowsContract } from './console/try-app'
|
||||
import { collectionPluginsContract, collectionsContract, searchAdvancedContract } from './marketplace'
|
||||
|
||||
@@ -24,6 +41,23 @@ export const consoleRouterContract = {
|
||||
invoices: invoicesContract,
|
||||
bindPartnerStack: bindPartnerStackContract,
|
||||
},
|
||||
triggers: {
|
||||
list: triggersContract,
|
||||
providerInfo: triggerProviderInfoContract,
|
||||
subscriptions: triggerSubscriptionsContract,
|
||||
subscriptionBuilderCreate: triggerSubscriptionBuilderCreateContract,
|
||||
subscriptionBuilderUpdate: triggerSubscriptionBuilderUpdateContract,
|
||||
subscriptionBuilderVerifyUpdate: triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
subscriptionVerify: triggerSubscriptionVerifyContract,
|
||||
subscriptionBuild: triggerSubscriptionBuildContract,
|
||||
subscriptionDelete: triggerSubscriptionDeleteContract,
|
||||
subscriptionUpdate: triggerSubscriptionUpdateContract,
|
||||
subscriptionBuilderLogs: triggerSubscriptionBuilderLogsContract,
|
||||
oauthConfig: triggerOAuthConfigContract,
|
||||
oauthConfigure: triggerOAuthConfigureContract,
|
||||
oauthDelete: triggerOAuthDeleteContract,
|
||||
oauthInitiate: triggerOAuthInitiateContract,
|
||||
},
|
||||
}
|
||||
|
||||
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.12.0",
|
||||
"version": "1.12.1",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
|
||||
"imports": {
|
||||
@@ -46,7 +46,8 @@
|
||||
"uglify-embed": "node ./bin/uglify-embed",
|
||||
"i18n:check": "tsx ./scripts/check-i18n.js",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage --reporter=dot --silent=passed-only",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:ci": "vitest run --coverage --reporter vitest-tiny-reporter --silent=passed-only",
|
||||
"test:watch": "vitest --watch",
|
||||
"analyze-component": "node ./scripts/analyze-component.js",
|
||||
"refactor-component": "node ./scripts/refactor-component.js",
|
||||
@@ -234,7 +235,8 @@
|
||||
"vite": "7.3.1",
|
||||
"vite-tsconfig-paths": "6.0.4",
|
||||
"vitest": "4.0.17",
|
||||
"vitest-canvas-mock": "1.1.3"
|
||||
"vitest-canvas-mock": "1.1.3",
|
||||
"vitest-tiny-reporter": "1.3.1"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
|
||||
15
web/pnpm-lock.yaml
generated
15
web/pnpm-lock.yaml
generated
@@ -582,6 +582,9 @@ importers:
|
||||
vitest-canvas-mock:
|
||||
specifier: 1.1.3
|
||||
version: 1.1.3(vitest@4.0.17)
|
||||
vitest-tiny-reporter:
|
||||
specifier: 1.3.1
|
||||
version: 1.3.1(@vitest/runner@4.0.17)(vitest@4.0.17)
|
||||
|
||||
packages:
|
||||
|
||||
@@ -7230,6 +7233,12 @@ packages:
|
||||
peerDependencies:
|
||||
vitest: ^3.0.0 || ^4.0.0
|
||||
|
||||
vitest-tiny-reporter@1.3.1:
|
||||
resolution: {integrity: sha512-9WfLruQBbxm4EqMIS0jDZmQjvMgsWgHUso9mHQWgjA6hM3tEVhjdG8wYo7ePFh1XbwEFzEo3XUQqkGoKZ/Td2Q==}
|
||||
peerDependencies:
|
||||
'@vitest/runner': ^2.0.0 || ^3.0.2 || ^4.0.0
|
||||
vitest: ^2.0.0 || ^3.0.2 || ^4.0.0
|
||||
|
||||
vitest@4.0.17:
|
||||
resolution: {integrity: sha512-FQMeF0DJdWY0iOnbv466n/0BudNdKj1l5jYgl5JVTwjSsZSlqyXFt/9+1sEyhR6CLowbZpV7O1sCHrzBhucKKg==}
|
||||
engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0}
|
||||
@@ -15228,6 +15237,12 @@ snapshots:
|
||||
moo-color: 1.0.3
|
||||
vitest: 4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
vitest-tiny-reporter@1.3.1(@vitest/runner@4.0.17)(vitest@4.0.17):
|
||||
dependencies:
|
||||
'@vitest/runner': 4.0.17
|
||||
tinyrainbow: 3.0.3
|
||||
vitest: 4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
vitest@4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2):
|
||||
dependencies:
|
||||
'@vitest/expect': 4.0.17
|
||||
|
||||
@@ -10,17 +10,14 @@ import type {
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
import { del, get, post } from './base'
|
||||
import { consoleClient, consoleQuery } from '@/service/client'
|
||||
import { get, post } from './base'
|
||||
import { useInvalid } from './use-base'
|
||||
|
||||
const NAME_SPACE = 'triggers'
|
||||
|
||||
// Trigger Provider Service - Provider ID Format: plugin_id/provider_name
|
||||
|
||||
// Convert backend API response to frontend ToolWithProvider format
|
||||
const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): TriggerWithProvider => {
|
||||
return {
|
||||
// Collection fields
|
||||
id: provider.plugin_id || provider.name,
|
||||
name: provider.name,
|
||||
author: provider.author,
|
||||
@@ -58,12 +55,9 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
labels: provider.tags || [],
|
||||
output_schema: event.output_schema || {},
|
||||
})),
|
||||
|
||||
// Trigger-specific schema fields
|
||||
subscription_constructor: provider.subscription_constructor,
|
||||
subscription_schema: provider.subscription_schema,
|
||||
supported_creation_methods: provider.supported_creation_methods,
|
||||
|
||||
meta: {
|
||||
version: '1.0',
|
||||
},
|
||||
@@ -72,22 +66,20 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
|
||||
export const useAllTriggerPlugins = (enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: [NAME_SPACE, 'all'],
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: {} }),
|
||||
queryFn: async () => {
|
||||
const response = await get<TriggerProviderApiEntity[]>('/workspaces/current/triggers')
|
||||
const response = await consoleClient.triggers.list({})
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerPluginsByType = (triggerType: string, enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: [NAME_SPACE, 'byType', triggerType],
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: { query: { type: triggerType } } }),
|
||||
queryFn: async () => {
|
||||
const response = await get<TriggerProviderApiEntity[]>(`/workspaces/current/triggers?type=${triggerType}`)
|
||||
const response = await consoleClient.triggers.list({ query: { type: triggerType } })
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled: enabled && !!triggerType,
|
||||
@@ -95,25 +87,23 @@ export const useTriggerPluginsByType = (triggerType: string, enabled = true) =>
|
||||
}
|
||||
|
||||
export const useInvalidateAllTriggerPlugins = () => {
|
||||
return useInvalid([NAME_SPACE, 'all'])
|
||||
return useInvalid(consoleQuery.triggers.list.queryKey({ input: {} }))
|
||||
}
|
||||
|
||||
// ===== Trigger Subscriptions Management =====
|
||||
|
||||
export const useTriggerProviderInfo = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerProviderApiEntity>({
|
||||
queryKey: [NAME_SPACE, 'provider-info', provider],
|
||||
queryFn: () => get<TriggerProviderApiEntity>(`/workspaces/current/trigger-provider/${provider}/info`),
|
||||
queryKey: consoleQuery.triggers.providerInfo.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.providerInfo({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerSubscriptions = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerSubscription[]>({
|
||||
queryKey: [NAME_SPACE, 'list-subscriptions', provider],
|
||||
queryFn: () => get<TriggerSubscription[]>(`/workspaces/current/trigger-provider/${provider}/subscriptions/list`),
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptions({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@@ -122,30 +112,30 @@ export const useInvalidateTriggerSubscriptions = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'subscriptions', provider],
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const useCreateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'create-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderCreate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
credential_type?: string
|
||||
}) => {
|
||||
const { provider, ...body } = payload
|
||||
return post<{ subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/create`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuilderCreate({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderUpdate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@@ -155,17 +145,17 @@ export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
credentials?: Record<string, unknown>
|
||||
}) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return post<TriggerSubscriptionBuilder>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/update/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuilderUpdate({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'verify-and-update-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderVerifyUpdate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@@ -183,7 +173,7 @@ export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
|
||||
export const useVerifyTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'verify-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionVerify.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionId: string
|
||||
@@ -208,24 +198,24 @@ export type BuildTriggerSubscriptionPayload = {
|
||||
|
||||
export const useBuildTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'build-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuild.mutationKey(),
|
||||
mutationFn: (payload: BuildTriggerSubscriptionPayload) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return post(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/build/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuild({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionDelete.mutationKey(),
|
||||
mutationFn: (subscriptionId: string) => {
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/delete`,
|
||||
)
|
||||
return consoleClient.triggers.subscriptionDelete({
|
||||
params: { subscriptionId },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -240,13 +230,13 @@ export type UpdateTriggerSubscriptionPayload = {
|
||||
|
||||
export const useUpdateTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionUpdate.mutationKey(),
|
||||
mutationFn: (payload: UpdateTriggerSubscriptionPayload) => {
|
||||
const { subscriptionId, ...body } = payload
|
||||
return post<{ result: string, id: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/update`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionUpdate({
|
||||
params: { subscriptionId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -262,10 +252,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
const { enabled = true, refetchInterval = false } = options
|
||||
|
||||
return useQuery<{ logs: TriggerLogEntity[] }>({
|
||||
queryKey: [NAME_SPACE, 'subscription-builder-logs', provider, subscriptionBuilderId],
|
||||
queryFn: () => get(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/logs/${subscriptionBuilderId}`,
|
||||
),
|
||||
queryKey: consoleQuery.triggers.subscriptionBuilderLogs.queryKey({ input: { params: { provider, subscriptionBuilderId } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptionBuilderLogs({ params: { provider, subscriptionBuilderId } }),
|
||||
enabled: enabled && !!provider && !!subscriptionBuilderId,
|
||||
refetchInterval,
|
||||
})
|
||||
@@ -274,8 +262,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
// ===== OAuth Management =====
|
||||
export const useTriggerOAuthConfig = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerOAuthConfig>({
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryFn: () => get<TriggerOAuthConfig>(`/workspaces/current/trigger-provider/${provider}/oauth/client`),
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.oauthConfig({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@@ -288,31 +276,31 @@ export type ConfigureTriggerOAuthPayload = {
|
||||
|
||||
export const useConfigureTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'configure-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthConfigure.mutationKey(),
|
||||
mutationFn: (payload: ConfigureTriggerOAuthPayload) => {
|
||||
const { provider, ...body } = payload
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.oauthConfigure({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthDelete.mutationKey(),
|
||||
mutationFn: (provider: string) => {
|
||||
return del<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
)
|
||||
return consoleClient.triggers.oauthDelete({
|
||||
params: { provider },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useInitiateTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'initiate-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthInitiate.mutationKey(),
|
||||
mutationFn: (provider: string) => {
|
||||
return get<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/oauth/authorize`,
|
||||
@@ -336,7 +324,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
return useQuery<{ options: FormOption[] }>({
|
||||
queryKey: [NAME_SPACE, 'dynamic-options', payload.plugin_id, payload.provider, payload.action, payload.parameter, payload.credential_id, payload.credentials, payload.extra],
|
||||
queryFn: () => {
|
||||
// Use new endpoint with POST when credentials provided (for edit mode)
|
||||
if (payload.credentials) {
|
||||
return post<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options-with-credentials',
|
||||
@@ -353,7 +340,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
{ silent: true },
|
||||
)
|
||||
}
|
||||
// Use original GET endpoint for normal cases
|
||||
return get<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options',
|
||||
{
|
||||
@@ -372,7 +358,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
enabled: enabled && !!payload.plugin_id && !!payload.provider && !!payload.action && !!payload.parameter && !!payload.credential_id,
|
||||
retry: 0,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -382,7 +367,7 @@ export const useInvalidateTriggerOAuthConfig = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
2
web/types/i18n.d.ts
vendored
2
web/types/i18n.d.ts
vendored
@@ -27,5 +27,3 @@ export type I18nKeysWithPrefix<
|
||||
> = Prefix extends ''
|
||||
? keyof Resources[NS]
|
||||
: Extract<keyof Resources[NS], `${Prefix}${string}`>
|
||||
|
||||
type A = I18nKeysWithPrefix<'billing'>
|
||||
|
||||
Reference in New Issue
Block a user