mirror of
https://github.com/langgenius/dify.git
synced 2026-02-09 09:44:00 +00:00
Compare commits
22 Commits
fix/fix-te
...
test/inpro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4971e11734 | ||
|
|
a297b06aac | ||
|
|
e988266f53 | ||
|
|
d9530f7bb7 | ||
|
|
b24e6edada | ||
|
|
59a9cbbf78 | ||
|
|
45164ce33e | ||
|
|
095b3ee234 | ||
|
|
cb970e54da | ||
|
|
e04f2a0786 | ||
|
|
7202a24bcf | ||
|
|
be8f265e43 | ||
|
|
9e54f086dc | ||
|
|
8c31b69c8e | ||
|
|
b886b3f6c8 | ||
|
|
ef0d18bb61 | ||
|
|
c56ad8e323 | ||
|
|
365f749ed5 | ||
|
|
f686197589 | ||
|
|
f584be9cf0 | ||
|
|
3bd228ddb7 | ||
|
|
0dfa59b1db |
@@ -1 +0,0 @@
|
||||
../../.agents/skills/component-refactoring
|
||||
@@ -1 +0,0 @@
|
||||
../../.agents/skills/frontend-code-review
|
||||
@@ -1 +0,0 @@
|
||||
../../.agents/skills/frontend-testing
|
||||
@@ -1 +0,0 @@
|
||||
../../.agents/skills/orpc-contract-first
|
||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -239,7 +239,7 @@
|
||||
/web/app/components/base/ @iamjoel @zxhlyh
|
||||
|
||||
# Frontend - Base Components Tests
|
||||
/web/app/components/base/**/__tests__/ @hyoban @CodingOnStar
|
||||
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
|
||||
|
||||
# Frontend - Utils and Hooks
|
||||
/web/utils/classnames.ts @iamjoel @zxhlyh
|
||||
|
||||
23
.github/workflows/autofix.yml
vendored
23
.github/workflows/autofix.yml
vendored
@@ -79,29 +79,6 @@ jobs:
|
||||
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
- name: Install web dependencies
|
||||
run: |
|
||||
cd web
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: ESLint autofix
|
||||
run: |
|
||||
cd web
|
||||
pnpm lint:fix || true
|
||||
|
||||
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
|
||||
- name: mdformat
|
||||
run: |
|
||||
|
||||
8
.github/workflows/deploy-hitl.yml
vendored
8
.github/workflows/deploy-hitl.yml
vendored
@@ -4,8 +4,7 @@ on:
|
||||
workflow_run:
|
||||
workflows: ["Build and Push API & Web"]
|
||||
branches:
|
||||
- "feat/hitl-frontend"
|
||||
- "feat/hitl-backend"
|
||||
- "feat/hitl"
|
||||
types:
|
||||
- completed
|
||||
|
||||
@@ -14,10 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
(
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
|
||||
)
|
||||
github.event.workflow_run.head_branch == 'feat/hitl'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v1
|
||||
|
||||
2
.github/workflows/web-tests.yml
vendored
2
.github/workflows/web-tests.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:coverage
|
||||
run: pnpm test:ci
|
||||
|
||||
- name: Coverage Summary
|
||||
if: always()
|
||||
|
||||
@@ -136,7 +136,6 @@ ignore_imports =
|
||||
core.workflow.nodes.llm.llm_utils -> models.provider
|
||||
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
|
||||
core.workflow.nodes.llm.node -> core.tools.signature
|
||||
core.workflow.nodes.template_transform.template_transform_node -> configs
|
||||
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
|
||||
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
|
||||
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager
|
||||
|
||||
@@ -739,8 +739,10 @@ def upgrade_db():
|
||||
|
||||
click.echo(click.style("Database migration successful!", fg="green"))
|
||||
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
logger.exception("Failed to execute database migration")
|
||||
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
|
||||
raise SystemExit(1)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any, Literal, TypeAlias
|
||||
@@ -54,6 +55,8 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
|
||||
|
||||
register_enum_models(console_ns, IconType)
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
|
||||
@@ -499,6 +502,7 @@ class AppListApi(Resource):
|
||||
select(Workflow).where(
|
||||
Workflow.version == Workflow.VERSION_DRAFT,
|
||||
Workflow.app_id.in_(workflow_capable_app_ids),
|
||||
Workflow.tenant_id == current_tenant_id,
|
||||
)
|
||||
)
|
||||
.scalars()
|
||||
@@ -510,12 +514,14 @@ class AppListApi(Resource):
|
||||
NodeType.TRIGGER_PLUGIN,
|
||||
}
|
||||
for workflow in draft_workflows:
|
||||
node_id = None
|
||||
try:
|
||||
for _, node_data in workflow.walk_nodes():
|
||||
for node_id, node_data in workflow.walk_nodes():
|
||||
if node_data.get("type") in trigger_node_types:
|
||||
draft_trigger_app_ids.add(str(workflow.app_id))
|
||||
break
|
||||
except Exception:
|
||||
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
|
||||
continue
|
||||
|
||||
for app in app_pagination.items:
|
||||
|
||||
@@ -47,6 +47,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
|
||||
code_limits: CodeNodeLimits | None = None,
|
||||
template_renderer: Jinja2TemplateRenderer | None = None,
|
||||
template_transform_max_output_length: int | None = None,
|
||||
http_request_http_client: HttpClientProtocol | None = None,
|
||||
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
|
||||
http_request_file_manager: FileManagerProtocol | None = None,
|
||||
@@ -68,6 +69,9 @@ class DifyNodeFactory(NodeFactory):
|
||||
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
|
||||
)
|
||||
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
|
||||
self._template_transform_max_output_length = (
|
||||
template_transform_max_output_length or dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
)
|
||||
self._http_request_http_client = http_request_http_client or ssrf_proxy
|
||||
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
|
||||
self._http_request_file_manager = http_request_file_manager or file_manager
|
||||
@@ -122,6 +126,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
template_renderer=self._template_renderer,
|
||||
max_output_length=self._template_transform_max_output_length,
|
||||
)
|
||||
|
||||
if node_type == NodeType.HTTP_REQUEST:
|
||||
|
||||
@@ -6,7 +6,8 @@ from yarl import URL
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.download import download_with_size_limit
|
||||
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
|
||||
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -43,28 +44,37 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
|
||||
return data.get("data", {}).get("plugins", [])
|
||||
|
||||
|
||||
def batch_fetch_plugin_manifests_ignore_deserialization_error(
|
||||
plugin_ids: list[str],
|
||||
) -> Sequence[MarketplacePluginDeclaration]:
|
||||
if len(plugin_ids) == 0:
|
||||
return []
|
||||
|
||||
url = str(marketplace_api_url / "api/v1/plugins/batch")
|
||||
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
|
||||
response.raise_for_status()
|
||||
result: list[MarketplacePluginDeclaration] = []
|
||||
for plugin in response.json()["data"]["plugins"]:
|
||||
try:
|
||||
result.append(MarketplacePluginDeclaration.model_validate(plugin))
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def record_install_plugin_event(plugin_unique_identifier: str):
|
||||
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
|
||||
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
|
||||
response.raise_for_status()
|
||||
|
||||
|
||||
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
|
||||
"""
|
||||
Fetch all plugin manifests from marketplace and cache them in Redis.
|
||||
This should be called once per check cycle to populate the instance-level cache.
|
||||
|
||||
Args:
|
||||
cache_key_prefix: Redis key prefix for caching plugin manifests
|
||||
cache_ttl: Cache TTL in seconds
|
||||
|
||||
Raises:
|
||||
httpx.HTTPError: If the HTTP request fails
|
||||
Exception: If any other error occurs during fetching or caching
|
||||
"""
|
||||
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
|
||||
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
|
||||
response.raise_for_status()
|
||||
|
||||
raw_json = response.json()
|
||||
plugins_data = raw_json.get("plugins", [])
|
||||
|
||||
# Parse and cache all plugin snapshots
|
||||
for plugin_data in plugins_data:
|
||||
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
|
||||
redis_client.setex(
|
||||
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
|
||||
time=cache_ttl,
|
||||
value=plugin_snapshot.model_dump_json(),
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from pydantic import BaseModel, Field, computed_field, model_validator
|
||||
|
||||
from core.model_runtime.entities.provider_entities import ProviderEntity
|
||||
from core.plugin.entities.endpoint import EndpointProviderDeclaration
|
||||
@@ -48,3 +48,15 @@ class MarketplacePluginDeclaration(BaseModel):
|
||||
if "tool" in data and not data["tool"]:
|
||||
del data["tool"]
|
||||
return data
|
||||
|
||||
|
||||
class MarketplacePluginSnapshot(BaseModel):
|
||||
org: str
|
||||
name: str
|
||||
latest_version: str
|
||||
latest_package_identifier: str
|
||||
latest_package_url: str
|
||||
|
||||
@computed_field
|
||||
def plugin_id(self) -> str:
|
||||
return f"{self.org}/{self.name}"
|
||||
|
||||
@@ -112,7 +112,7 @@ class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable):
|
||||
|
||||
class RAGPipelineVariable(BaseModel):
|
||||
belong_to_node_id: str = Field(description="belong to which node id, shared means public")
|
||||
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
|
||||
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
|
||||
label: str = Field(description="label")
|
||||
description: str | None = Field(description="description", default="")
|
||||
variable: str = Field(description="variable key", default="")
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from configs import dify_config
|
||||
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
|
||||
from core.workflow.node_events import NodeRunResult
|
||||
from core.workflow.nodes.base.node import Node
|
||||
@@ -16,12 +15,13 @@ if TYPE_CHECKING:
|
||||
from core.workflow.entities import GraphInitParams
|
||||
from core.workflow.runtime import GraphRuntimeState
|
||||
|
||||
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
|
||||
|
||||
|
||||
class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
node_type = NodeType.TEMPLATE_TRANSFORM
|
||||
_template_renderer: Jinja2TemplateRenderer
|
||||
_max_output_length: int
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -31,6 +31,7 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
graph_runtime_state: "GraphRuntimeState",
|
||||
*,
|
||||
template_renderer: Jinja2TemplateRenderer | None = None,
|
||||
max_output_length: int | None = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
id=id,
|
||||
@@ -40,6 +41,10 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
)
|
||||
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
|
||||
|
||||
if max_output_length is not None and max_output_length <= 0:
|
||||
raise ValueError("max_output_length must be a positive integer")
|
||||
self._max_output_length = max_output_length or DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
|
||||
|
||||
@classmethod
|
||||
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
|
||||
"""
|
||||
@@ -69,11 +74,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
except TemplateRenderError as e:
|
||||
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
|
||||
|
||||
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
|
||||
if len(rendered) > self._max_output_length:
|
||||
return NodeRunResult(
|
||||
inputs=variables,
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
|
||||
error=f"Output length exceeds {self._max_output_length} characters",
|
||||
)
|
||||
|
||||
return NodeRunResult(
|
||||
|
||||
@@ -10,6 +10,10 @@ import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7df29de0f6be'
|
||||
down_revision = '03ea244985ce'
|
||||
@@ -19,16 +23,31 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tenant_credit_pools',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
|
||||
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
|
||||
sa.Column('quota_used', sa.BigInteger(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
|
||||
)
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tenant_credit_pools',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
|
||||
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
|
||||
sa.Column('quota_used', sa.BigInteger(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
|
||||
)
|
||||
else:
|
||||
# For MySQL and other databases, UUID should be generated at application level
|
||||
op.create_table('tenant_credit_pools',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
|
||||
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
|
||||
sa.Column('quota_used', sa.BigInteger(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
|
||||
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
|
||||
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)
|
||||
|
||||
@@ -2166,7 +2166,9 @@ class TenantCreditPool(TypeBase):
|
||||
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
|
||||
)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
|
||||
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.12.0"
|
||||
version = "1.12.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@@ -1,16 +1,24 @@
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
|
||||
import click
|
||||
|
||||
import app
|
||||
from core.helper.marketplace import fetch_global_plugin_manifest
|
||||
from extensions.ext_database import db
|
||||
from models.account import TenantPluginAutoUpgradeStrategy
|
||||
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
|
||||
MAX_CONCURRENT_CHECK_TASKS = 20
|
||||
|
||||
# Import cache constants from the task module
|
||||
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
|
||||
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
|
||||
|
||||
|
||||
@app.celery.task(queue="plugin")
|
||||
def check_upgradable_plugin_task():
|
||||
@@ -40,6 +48,22 @@ def check_upgradable_plugin_task():
|
||||
) # make sure all strategies are checked in this interval
|
||||
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
|
||||
|
||||
if total_strategies == 0:
|
||||
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
|
||||
return
|
||||
|
||||
# Fetch and cache all plugin manifests before processing tenants
|
||||
# This reduces load on marketplace from 300k requests to 1 request per check cycle
|
||||
logger.info("fetching global plugin manifest from marketplace")
|
||||
try:
|
||||
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
|
||||
logger.info("successfully fetched and cached global plugin manifest")
|
||||
except Exception as e:
|
||||
logger.exception("failed to fetch global plugin manifest")
|
||||
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
|
||||
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
|
||||
return
|
||||
|
||||
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
|
||||
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
|
||||
for strategy in batch_strategies:
|
||||
|
||||
@@ -327,6 +327,17 @@ class AccountService:
|
||||
@staticmethod
|
||||
def delete_account(account: Account):
|
||||
"""Delete account. This method only adds a task to the queue for deletion."""
|
||||
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_account_deletion
|
||||
|
||||
sync_success = sync_account_deletion(account_id=account.id, source="account_deleted")
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise account deletion sync failed for account %s; proceeding with local deletion.",
|
||||
account.id,
|
||||
)
|
||||
|
||||
# Now proceed with async account deletion
|
||||
delete_account_task.delay(account.id)
|
||||
|
||||
@staticmethod
|
||||
@@ -1230,6 +1241,19 @@ class TenantService:
|
||||
if dify_config.BILLING_ENABLED:
|
||||
BillingService.clean_billing_info_cache(tenant.id)
|
||||
|
||||
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
|
||||
|
||||
sync_success = sync_workspace_member_removal(
|
||||
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
|
||||
)
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
|
||||
tenant.id,
|
||||
account.id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
|
||||
"""Update member role"""
|
||||
|
||||
115
api/services/enterprise/account_deletion_sync.py
Normal file
115
api/services/enterprise/account_deletion_sync.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.account import TenantAccountJoin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
|
||||
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
|
||||
|
||||
|
||||
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Queue an account deletion sync task to Redis.
|
||||
|
||||
Internal helper function. Do not call directly - use the public functions instead.
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID to sync
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (for debugging/tracking)
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
task = {
|
||||
"task_id": str(uuid.uuid4()),
|
||||
"workspace_id": workspace_id,
|
||||
"member_id": member_id,
|
||||
"retry_count": 0,
|
||||
"created_at": datetime.now(UTC).isoformat(),
|
||||
"source": source,
|
||||
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
|
||||
}
|
||||
|
||||
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
|
||||
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
|
||||
|
||||
logger.info(
|
||||
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
task["task_id"],
|
||||
source,
|
||||
)
|
||||
return True
|
||||
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.error(
|
||||
"Failed to queue account deletion sync for workspace %s, member %s: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
str(e),
|
||||
exc_info=True,
|
||||
)
|
||||
# Don't raise - we don't want to fail member deletion if queueing fails
|
||||
return False
|
||||
|
||||
|
||||
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync a single workspace member removal (enterprise only).
|
||||
|
||||
Queues a task for the enterprise backend to reassign resources from the removed member.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (e.g., "workspace_member_removed")
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued (or skipped in community), False if queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
|
||||
def sync_account_deletion(account_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync full account deletion across all workspaces (enterprise only).
|
||||
|
||||
Fetches all workspace memberships for the account and queues a sync task for each.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
account_id: The account ID being deleted
|
||||
source: Source of the sync request (e.g., "account_deleted")
|
||||
|
||||
Returns:
|
||||
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
# Fetch all workspaces the account belongs to
|
||||
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
|
||||
|
||||
# Queue sync task for each workspace
|
||||
success = True
|
||||
for join in workspace_joins:
|
||||
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
|
||||
success = False
|
||||
|
||||
return success
|
||||
@@ -14,6 +14,9 @@ from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Batch size for database operations to keep transactions short
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
|
||||
@shared_task(queue="dataset")
|
||||
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
|
||||
@@ -31,63 +34,179 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
if not doc_form:
|
||||
raise ValueError("doc_form is required")
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
).delete(synchronize_session=False)
|
||||
storage_keys_to_delete: list[str] = []
|
||||
index_node_ids: list[str] = []
|
||||
segment_ids: list[str] = []
|
||||
total_image_upload_file_ids: list[str] = []
|
||||
|
||||
try:
|
||||
# ============ Step 1: Query segment and file data (short read-only transaction) ============
|
||||
with session_factory.create_session() as session:
|
||||
# Get segments info
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
).all()
|
||||
# check segment is exist
|
||||
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
|
||||
# Collect image file IDs from segment content
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
|
||||
for image_file in image_files:
|
||||
try:
|
||||
if image_file and image_file.key:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file.id,
|
||||
)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(stmt)
|
||||
session.delete(segment)
|
||||
total_image_upload_file_ids.extend(image_upload_file_ids)
|
||||
|
||||
# Query storage keys for image files
|
||||
if total_image_upload_file_ids:
|
||||
image_files = session.scalars(
|
||||
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
|
||||
).all()
|
||||
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
|
||||
|
||||
# Query storage keys for document files
|
||||
if file_ids:
|
||||
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
|
||||
for file in files:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
|
||||
|
||||
session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
|
||||
if index_node_ids:
|
||||
try:
|
||||
# Fetch dataset in a fresh session to avoid DetachedInstanceError
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
|
||||
else:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
len(index_node_ids),
|
||||
)
|
||||
)
|
||||
|
||||
# ============ Step 3: Delete metadata binding (separate short transaction) ============
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
deleted_count = (
|
||||
session.query(DatasetMetadataBinding)
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
session.commit()
|
||||
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
|
||||
except Exception:
|
||||
logger.exception("Cleaned documents when documents deleted failed")
|
||||
logger.exception(
|
||||
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
|
||||
if total_image_upload_file_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
|
||||
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
|
||||
if segment_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(segment_ids), BATCH_SIZE):
|
||||
batch = segment_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
|
||||
session.execute(segment_delete_stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 6: Delete document-associated files (separate short transaction) ============
|
||||
if file_ids:
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
|
||||
dataset_id,
|
||||
file_ids,
|
||||
)
|
||||
|
||||
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
|
||||
storage_delete_failures = 0
|
||||
for storage_key in storage_keys_to_delete:
|
||||
try:
|
||||
storage.delete(storage_key)
|
||||
except Exception:
|
||||
storage_delete_failures += 1
|
||||
logger.exception("Failed to delete file from storage, key: %s", storage_key)
|
||||
if storage_delete_failures > 0:
|
||||
logger.warning(
|
||||
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
|
||||
storage_delete_failures,
|
||||
len(storage_keys_to_delete),
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
|
||||
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
|
||||
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
|
||||
f"storage_files: {len(storage_keys_to_delete)}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@@ -67,8 +68,14 @@ def delete_segment_from_index_task(
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
|
||||
for binding in segment_attachment_bindings:
|
||||
session.delete(binding)
|
||||
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
|
||||
|
||||
for i in range(0, len(segment_attachment_bind_ids), 1000):
|
||||
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
|
||||
)
|
||||
session.execute(segment_attachment_bind_delete_stmt)
|
||||
|
||||
# delete upload file
|
||||
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -68,7 +68,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
session.commit()
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
@@ -85,7 +84,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
if last_edited_time != page_edited_time:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
|
||||
@@ -8,7 +8,6 @@ from sqlalchemy import delete, select
|
||||
from core.db.session_factory import session_factory
|
||||
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
|
||||
@@ -27,7 +26,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -36,7 +35,6 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
@@ -56,7 +54,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
|
||||
@@ -6,8 +6,8 @@ import typing
|
||||
import click
|
||||
from celery import shared_task
|
||||
|
||||
from core.helper import marketplace
|
||||
from core.helper.marketplace import MarketplacePluginDeclaration
|
||||
from core.helper.marketplace import record_install_plugin_event
|
||||
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
|
||||
from core.plugin.entities.plugin import PluginInstallationSource
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from extensions.ext_redis import redis_client
|
||||
@@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
|
||||
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
|
||||
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
|
||||
CACHE_REDIS_TTL = 60 * 60 # 1 hour
|
||||
|
||||
|
||||
@@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
|
||||
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
|
||||
|
||||
|
||||
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
|
||||
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
|
||||
"""
|
||||
Get cached plugin manifest from Redis.
|
||||
Returns:
|
||||
- MarketplacePluginDeclaration: if found in cache
|
||||
- MarketplacePluginSnapshot: if found in cache
|
||||
- None: if cached as not found (marketplace returned no result)
|
||||
- False: if not in cache at all
|
||||
"""
|
||||
@@ -43,76 +43,31 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclar
|
||||
if cached_json is None:
|
||||
return None
|
||||
|
||||
return MarketplacePluginDeclaration.model_validate(cached_json)
|
||||
return MarketplacePluginSnapshot.model_validate(cached_json)
|
||||
except Exception:
|
||||
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
|
||||
return False
|
||||
|
||||
|
||||
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
|
||||
"""
|
||||
Cache plugin manifest in Redis.
|
||||
Args:
|
||||
plugin_id: The plugin ID
|
||||
manifest: The manifest to cache, or None if not found in marketplace
|
||||
"""
|
||||
try:
|
||||
key = _get_redis_cache_key(plugin_id)
|
||||
if manifest is None:
|
||||
# Cache the fact that this plugin was not found
|
||||
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
|
||||
else:
|
||||
# Cache the manifest data
|
||||
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
|
||||
except Exception:
|
||||
# If Redis fails, continue without caching
|
||||
# traceback.print_exc()
|
||||
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
|
||||
|
||||
|
||||
def marketplace_batch_fetch_plugin_manifests(
|
||||
plugin_ids_plain_list: list[str],
|
||||
) -> list[MarketplacePluginDeclaration]:
|
||||
"""Fetch plugin manifests with Redis caching support."""
|
||||
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
|
||||
not_cached_plugin_ids: list[str] = []
|
||||
) -> list[MarketplacePluginSnapshot]:
|
||||
"""
|
||||
Fetch plugin manifests from Redis cache only.
|
||||
This function assumes fetch_global_plugin_manifest() has been called
|
||||
to pre-populate the cache with all marketplace plugins.
|
||||
"""
|
||||
result: list[MarketplacePluginSnapshot] = []
|
||||
|
||||
# Check Redis cache for each plugin
|
||||
for plugin_id in plugin_ids_plain_list:
|
||||
cached_result = _get_cached_manifest(plugin_id)
|
||||
if cached_result is False:
|
||||
# Not in cache, need to fetch
|
||||
not_cached_plugin_ids.append(plugin_id)
|
||||
else:
|
||||
# Either found manifest or cached as None (not found in marketplace)
|
||||
# At this point, cached_result is either MarketplacePluginDeclaration or None
|
||||
if isinstance(cached_result, bool):
|
||||
# This should never happen due to the if condition above, but for type safety
|
||||
continue
|
||||
cached_manifests[plugin_id] = cached_result
|
||||
if not isinstance(cached_result, MarketplacePluginSnapshot):
|
||||
# cached_result is False (not in cache) or None (cached as not found)
|
||||
logger.warning("plugin %s not found in cache, skipping", plugin_id)
|
||||
continue
|
||||
|
||||
# Fetch uncached plugins from marketplace
|
||||
if not_cached_plugin_ids:
|
||||
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
|
||||
|
||||
# Cache the fetched manifests
|
||||
for manifest in manifests:
|
||||
cached_manifests[manifest.plugin_id] = manifest
|
||||
_set_cached_manifest(manifest.plugin_id, manifest)
|
||||
|
||||
# Cache plugins that were not found in marketplace
|
||||
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
|
||||
for plugin_id in not_cached_plugin_ids:
|
||||
if plugin_id not in fetched_plugin_ids:
|
||||
cached_manifests[plugin_id] = None
|
||||
_set_cached_manifest(plugin_id, None)
|
||||
|
||||
# Build result list from cached manifests
|
||||
result: list[MarketplacePluginDeclaration] = []
|
||||
for plugin_id in plugin_ids_plain_list:
|
||||
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
|
||||
if cached_manifest is not None:
|
||||
result.append(cached_manifest)
|
||||
result.append(cached_result)
|
||||
|
||||
return result
|
||||
|
||||
@@ -211,7 +166,7 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
# execute upgrade
|
||||
new_unique_identifier = manifest.latest_package_identifier
|
||||
|
||||
marketplace.record_install_plugin_event(new_unique_identifier)
|
||||
record_install_plugin_event(new_unique_identifier)
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",
|
||||
|
||||
@@ -259,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
|
||||
|
||||
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_archive_log(workflow_archive_log_id: str):
|
||||
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
def del_workflow_archive_log(session, workflow_archive_log_id: str):
|
||||
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -420,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
|
||||
total_files_deleted = 0
|
||||
|
||||
while True:
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# Get a batch of draft variable IDs along with their file_ids
|
||||
query_sql = """
|
||||
SELECT id, file_id FROM workflow_draft_variables
|
||||
|
||||
@@ -10,7 +10,10 @@ from models import Tenant
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import App, UploadFile
|
||||
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
|
||||
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
|
||||
from tasks.remove_app_and_related_data_task import (
|
||||
_delete_draft_variables,
|
||||
delete_draft_variables_batch,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -297,12 +300,18 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_before = session.query(UploadFile).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
@@ -315,8 +324,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -329,6 +342,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.side_effect = [Exception("Storage error"), None]
|
||||
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
@@ -339,8 +354,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -395,3 +414,275 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
if app2_obj:
|
||||
session.delete(app2_obj)
|
||||
session.commit()
|
||||
|
||||
|
||||
class TestDeleteDraftVariablesSessionCommit:
|
||||
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
|
||||
|
||||
@pytest.fixture
|
||||
def setup_offload_test_data(self, app_and_tenant):
|
||||
"""Create test data with offload files for session commit tests."""
|
||||
from core.variables.types import SegmentType
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
tenant, app = app_and_tenant
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
upload_file1 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file1.json",
|
||||
name="file1.json",
|
||||
size=1024,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
upload_file2 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file2.json",
|
||||
name="file2.json",
|
||||
size=2048,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
session.add(upload_file1)
|
||||
session.add(upload_file2)
|
||||
session.flush()
|
||||
|
||||
var_file1 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file1.id,
|
||||
size=1024,
|
||||
length=10,
|
||||
value_type=SegmentType.STRING,
|
||||
)
|
||||
var_file2 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file2.id,
|
||||
size=2048,
|
||||
length=20,
|
||||
value_type=SegmentType.OBJECT,
|
||||
)
|
||||
session.add(var_file1)
|
||||
session.add(var_file2)
|
||||
session.flush()
|
||||
|
||||
draft_var1 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_1",
|
||||
name="large_var_1",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file1.id,
|
||||
)
|
||||
draft_var2 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_2",
|
||||
name="large_var_2",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file2.id,
|
||||
)
|
||||
draft_var3 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_3",
|
||||
name="regular_var",
|
||||
value=StringSegment(value="regular_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(draft_var1)
|
||||
session.add(draft_var2)
|
||||
session.add(draft_var3)
|
||||
session.commit()
|
||||
|
||||
data = {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"upload_files": [upload_file1, upload_file2],
|
||||
"variable_files": [var_file1, var_file2],
|
||||
"draft_variables": [draft_var1, draft_var2, draft_var3],
|
||||
}
|
||||
|
||||
yield data
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
for table, ids in [
|
||||
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
|
||||
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
|
||||
(UploadFile, [uf.id for uf in data["upload_files"]]),
|
||||
]:
|
||||
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
@pytest.fixture
|
||||
def setup_commit_test_data(self, app_and_tenant):
|
||||
"""Create test data for session commit tests."""
|
||||
tenant, app = app_and_tenant
|
||||
variable_ids: list[str] = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
variables = []
|
||||
for i in range(10):
|
||||
var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id=f"node_{i}",
|
||||
name=f"var_{i}",
|
||||
value=StringSegment(value="test_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(var)
|
||||
variables.append(var)
|
||||
session.commit()
|
||||
variable_ids = [v.id for v in variables]
|
||||
|
||||
yield {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"variable_ids": variable_ids,
|
||||
}
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
cleanup_query = (
|
||||
delete(WorkflowDraftVariable)
|
||||
.where(WorkflowDraftVariable.id.in_(variable_ids))
|
||||
.execution_options(synchronize_session=False)
|
||||
)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
|
||||
"""Test that session.begin() is used for automatic transaction management."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
# Since session.begin() is used, the transaction is automatically committed
|
||||
# when the with block exits successfully. We verify this by checking that
|
||||
# data is actually persisted.
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
# Verify all data was deleted (proves transaction was committed)
|
||||
with session_factory.create_session() as session:
|
||||
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
|
||||
assert deleted_count == 10
|
||||
assert remaining_count == 0
|
||||
|
||||
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
|
||||
"""Test that data is actually persisted to database after batch deletion with commits."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
variable_ids = data["variable_ids"]
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Perform deletion with small batch size to force multiple commits
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify all data is deleted in a new session (proves commits worked)
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
# Verify specific IDs are deleted
|
||||
with session_factory.create_session() as session:
|
||||
remaining_vars = (
|
||||
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
|
||||
)
|
||||
assert remaining_vars == 0
|
||||
|
||||
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
|
||||
"""Test session behavior when deleting from an empty dataset."""
|
||||
nonexistent_app_id = str(uuid.uuid4())
|
||||
|
||||
# Should not raise any errors and should return 0
|
||||
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_session_commit_with_single_batch(self, setup_commit_test_data):
|
||||
"""Test that commit happens correctly when all data fits in a single batch."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Delete all in a single batch
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify data is persisted
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
|
||||
"""Test that invalid batch size raises ValueError."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=0)
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=-1)
|
||||
|
||||
@patch("extensions.ext_storage.storage")
|
||||
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
"""Test that session commits correctly when cleaning up offload data."""
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
|
||||
# Delete variables with offload data
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
assert deleted_count == 3
|
||||
|
||||
# Verify all data is persisted (deleted) in new session
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_after == 0
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
# Verify storage cleanup was called
|
||||
assert mock_storage.delete.call_count == 2
|
||||
|
||||
@@ -1016,7 +1016,7 @@ class TestAccountService:
|
||||
|
||||
def test_delete_account(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test account deletion (should add task to queue).
|
||||
Test account deletion (should add task to queue and sync to enterprise).
|
||||
"""
|
||||
fake = Faker()
|
||||
email = fake.email()
|
||||
@@ -1034,10 +1034,18 @@ class TestAccountService:
|
||||
password=password,
|
||||
)
|
||||
|
||||
with patch("services.account_service.delete_account_task") as mock_delete_task:
|
||||
with (
|
||||
patch("services.account_service.delete_account_task") as mock_delete_task,
|
||||
patch("services.enterprise.account_deletion_sync.sync_account_deletion") as mock_sync,
|
||||
):
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Delete account
|
||||
AccountService.delete_account(account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(account_id=account.id, source="account_deleted")
|
||||
|
||||
# Verify task was added to queue
|
||||
mock_delete_task.delay.assert_called_once_with(account.id)
|
||||
|
||||
@@ -1716,7 +1724,7 @@ class TestTenantService:
|
||||
|
||||
def test_remove_member_from_tenant_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful member removal from tenant.
|
||||
Test successful member removal from tenant (should sync to enterprise).
|
||||
"""
|
||||
fake = Faker()
|
||||
tenant_name = fake.company()
|
||||
@@ -1751,7 +1759,15 @@ class TestTenantService:
|
||||
TenantService.create_tenant_member(tenant, member_account, role="normal")
|
||||
|
||||
# Remove member
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(
|
||||
workspace_id=tenant.id, member_id=member_account.id, source="workspace_member_removed"
|
||||
)
|
||||
|
||||
# Verify member was removed
|
||||
from extensions.ext_database import db
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from tasks.document_indexing_update_task import document_indexing_update_task
|
||||
|
||||
|
||||
class TestDocumentIndexingUpdateTask:
|
||||
@pytest.fixture
|
||||
def mock_external_dependencies(self):
|
||||
"""Patch external collaborators used by the update task.
|
||||
- IndexProcessorFactory.init_index_processor().clean(...)
|
||||
- IndexingRunner.run([...])
|
||||
"""
|
||||
with (
|
||||
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
|
||||
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
|
||||
):
|
||||
processor_instance = MagicMock()
|
||||
mock_factory.return_value.init_index_processor.return_value = processor_instance
|
||||
|
||||
runner_instance = MagicMock()
|
||||
mock_runner.return_value = runner_instance
|
||||
|
||||
yield {
|
||||
"factory": mock_factory,
|
||||
"processor": processor_instance,
|
||||
"runner": mock_runner,
|
||||
"runner_instance": runner_instance,
|
||||
}
|
||||
|
||||
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
|
||||
fake = Faker()
|
||||
|
||||
# Account and tenant
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(name=fake.company(), status="normal")
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Dataset and document
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=64),
|
||||
data_source_type="upload_file",
|
||||
indexing_technique="high_quality",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(dataset)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
document = Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=0,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name=fake.file_name(),
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="waiting",
|
||||
enabled=True,
|
||||
doc_form="text_model",
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Segments
|
||||
node_ids = []
|
||||
for i in range(segment_count):
|
||||
node_id = f"node-{i + 1}"
|
||||
seg = DocumentSegment(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
position=i,
|
||||
content=fake.text(max_nb_chars=32),
|
||||
answer=None,
|
||||
word_count=10,
|
||||
tokens=5,
|
||||
index_node_id=node_id,
|
||||
status="completed",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(seg)
|
||||
node_ids.append(node_id)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Refresh to ensure ORM state
|
||||
db_session_with_containers.refresh(dataset)
|
||||
db_session_with_containers.refresh(document)
|
||||
|
||||
return dataset, document, node_ids
|
||||
|
||||
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Act
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Assert document status updated before reindex
|
||||
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
|
||||
assert updated.indexing_status == "parsing"
|
||||
assert updated.processing_started_at is not None
|
||||
|
||||
# Segments should be deleted
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining == 0
|
||||
|
||||
# Assert index processor clean was called with expected args
|
||||
clean_call = mock_external_dependencies["processor"].clean.call_args
|
||||
assert clean_call is not None
|
||||
args, kwargs = clean_call
|
||||
# args[0] is a Dataset instance (from another session) — validate by id
|
||||
assert getattr(args[0], "id", None) == dataset.id
|
||||
# args[1] should contain our node_ids
|
||||
assert set(args[1]) == set(node_ids)
|
||||
assert kwargs.get("with_keywords") is True
|
||||
assert kwargs.get("delete_child_chunks") is True
|
||||
|
||||
# Assert indexing runner invoked with the updated document
|
||||
run_call = mock_external_dependencies["runner_instance"].run.call_args
|
||||
assert run_call is not None
|
||||
run_docs = run_call[0][0]
|
||||
assert len(run_docs) == 1
|
||||
first = run_docs[0]
|
||||
assert getattr(first, "id", None) == document.id
|
||||
|
||||
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Force clean to raise; task should continue to indexing
|
||||
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
|
||||
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Indexing should still be triggered
|
||||
mock_external_dependencies["runner_instance"].run.assert_called_once()
|
||||
|
||||
# Segments should remain (since clean failed before DB delete)
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining > 0
|
||||
|
||||
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
|
||||
fake = Faker()
|
||||
# Act with non-existent document id
|
||||
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
|
||||
|
||||
# Neither processor nor runner should be called
|
||||
mock_external_dependencies["processor"].clean.assert_not_called()
|
||||
mock_external_dependencies["runner_instance"].run.assert_not_called()
|
||||
@@ -217,7 +217,6 @@ class TestTemplateTransformNode:
|
||||
@patch(
|
||||
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
|
||||
)
|
||||
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
|
||||
def test_run_output_length_exceeds_limit(
|
||||
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
|
||||
):
|
||||
@@ -231,6 +230,7 @@ class TestTemplateTransformNode:
|
||||
graph_init_params=graph_init_params,
|
||||
graph=mock_graph,
|
||||
graph_runtime_state=mock_graph_runtime_state,
|
||||
max_output_length=10,
|
||||
)
|
||||
|
||||
result = node._run()
|
||||
|
||||
@@ -0,0 +1,276 @@
|
||||
"""Unit tests for account deletion synchronization.
|
||||
|
||||
This test module verifies the enterprise account deletion sync functionality,
|
||||
including Redis queuing, error handling, and community vs enterprise behavior.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from redis import RedisError
|
||||
|
||||
from services.enterprise.account_deletion_sync import (
|
||||
_queue_task,
|
||||
sync_account_deletion,
|
||||
sync_workspace_member_removal,
|
||||
)
|
||||
|
||||
|
||||
class TestQueueTask:
|
||||
"""Unit tests for the _queue_task helper function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self):
|
||||
"""Mock redis_client for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.redis_client") as mock_redis:
|
||||
yield mock_redis
|
||||
|
||||
@pytest.fixture
|
||||
def mock_uuid(self):
|
||||
"""Mock UUID generation for predictable task IDs."""
|
||||
with patch("services.enterprise.account_deletion_sync.uuid.uuid4") as mock_uuid_gen:
|
||||
mock_uuid_gen.return_value = MagicMock(hex="test-task-id-1234")
|
||||
yield mock_uuid_gen
|
||||
|
||||
def test_queue_task_success(self, mock_redis_client, mock_uuid):
|
||||
"""Test successful task queueing to Redis."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "test_source"
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_redis_client.lpush.assert_called_once()
|
||||
|
||||
# Verify the task payload structure
|
||||
call_args = mock_redis_client.lpush.call_args[0]
|
||||
assert call_args[0] == "enterprise:member:sync:queue"
|
||||
|
||||
import json
|
||||
|
||||
task_data = json.loads(call_args[1])
|
||||
assert task_data["workspace_id"] == workspace_id
|
||||
assert task_data["member_id"] == member_id
|
||||
assert task_data["source"] == source
|
||||
assert task_data["type"] == "sync_member_deletion_from_workspace"
|
||||
assert task_data["retry_count"] == 0
|
||||
assert "task_id" in task_data
|
||||
assert "created_at" in task_data
|
||||
|
||||
def test_queue_task_redis_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of Redis connection errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = RedisError("Connection failed")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
def test_queue_task_type_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of JSON serialization errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = TypeError("Cannot serialize")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
|
||||
class TestSyncWorkspaceMemberRemoval:
|
||||
"""Unit tests for sync_workspace_member_removal function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_enabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is True."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "workspace_member_removed"
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_called_once_with(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_disabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_workspace_member_removal_queue_failure(self, mock_queue_task):
|
||||
"""Test handling of queue task failures."""
|
||||
# Arrange
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestSyncAccountDeletion:
|
||||
"""Unit tests for sync_account_deletion function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""Mock database session for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.db.session") as mock_session:
|
||||
yield mock_session
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_account_deletion_enterprise_disabled(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_db_session.query.assert_not_called()
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_multiple_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with multiple workspace memberships."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
# Verify each workspace was queued
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-1", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-2", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-3", member_id=account_id, source="account_deleted")
|
||||
|
||||
def test_sync_account_deletion_no_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with no workspace memberships."""
|
||||
# Arrange
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = []
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_partial_failure(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when some tasks fail to queue."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Mock queue_task to fail for second workspace
|
||||
def queue_side_effect(workspace_id, member_id, source):
|
||||
return workspace_id != "tenant-2"
|
||||
|
||||
mock_queue_task.side_effect = queue_side_effect
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False # Should return False if any task fails
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
def test_sync_account_deletion_all_failures(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when all tasks fail to queue."""
|
||||
# Arrange
|
||||
mock_join = MagicMock()
|
||||
mock_join.tenant_id = "tenant-1"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_queue_task.assert_called_once()
|
||||
@@ -114,6 +114,21 @@ def mock_db_session():
|
||||
session = MagicMock()
|
||||
# Ensure tests can observe session.close() via context manager teardown
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
|
||||
# Mock session.begin() context manager to auto-commit on exit
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
def _begin_exit_side_effect(*args, **kwargs):
|
||||
# session.begin().__exit__() should commit if no exception
|
||||
if args[0] is None: # No exception
|
||||
session.commit()
|
||||
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
|
||||
|
||||
@@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
|
||||
mock_query.where.return_value = mock_delete_query
|
||||
mock_db.session.query.return_value = mock_query
|
||||
|
||||
delete_func("log-1")
|
||||
delete_func(mock_db.session, "log-1")
|
||||
|
||||
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
|
||||
mock_query.where.assert_called_once()
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@@ -1368,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.12.0"
|
||||
version = "1.12.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -707,7 +707,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -749,7 +749,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -788,7 +788,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.0
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -818,7 +818,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.0
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@@ -109,6 +109,7 @@ const AgentTools: FC = () => {
|
||||
tool_parameters: paramsWithDefaultValue,
|
||||
notAuthor: !tool.is_team_authorization,
|
||||
enabled: true,
|
||||
type: tool.provider_type as CollectionType,
|
||||
}
|
||||
}
|
||||
const handleSelectTool = (tool: ToolDefaultValue) => {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable tailwindcss/classnames-order */
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import Effect from '.'
|
||||
|
||||
@@ -29,8 +28,8 @@ type Story = StoryObj<typeof meta>
|
||||
export const Playground: Story = {
|
||||
render: () => (
|
||||
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
|
||||
<Effect className="top-6 left-8" />
|
||||
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
|
||||
<Effect className="left-8 top-6" />
|
||||
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
|
||||
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
|
||||
Accent glow
|
||||
</div>
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { FC } from 'react'
|
||||
import { RiQuestionLine } from '@remixicon/react'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { tooltipManager } from './TooltipManager'
|
||||
@@ -61,6 +61,20 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
isHoverTriggerRef.current = isHoverTrigger
|
||||
}, [isHoverTrigger])
|
||||
|
||||
const closeTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const clearCloseTimeout = useCallback(() => {
|
||||
if (closeTimeoutRef.current) {
|
||||
clearTimeout(closeTimeoutRef.current)
|
||||
closeTimeoutRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
clearCloseTimeout()
|
||||
}
|
||||
}, [clearCloseTimeout])
|
||||
|
||||
const close = () => setOpen(false)
|
||||
|
||||
const handleLeave = (isTrigger: boolean) => {
|
||||
@@ -71,7 +85,9 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
|
||||
// give time to move to the popup
|
||||
if (needsDelay) {
|
||||
setTimeout(() => {
|
||||
clearCloseTimeout()
|
||||
closeTimeoutRef.current = setTimeout(() => {
|
||||
closeTimeoutRef.current = null
|
||||
if (!isHoverPopupRef.current && !isHoverTriggerRef.current) {
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
@@ -79,6 +95,7 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
}, 300)
|
||||
}
|
||||
else {
|
||||
clearCloseTimeout()
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
}
|
||||
@@ -95,6 +112,7 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
onClick={() => triggerMethod === 'click' && setOpen(v => !v)}
|
||||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
clearCloseTimeout()
|
||||
setHoverTrigger()
|
||||
tooltipManager.register(close)
|
||||
setOpen(true)
|
||||
@@ -115,7 +133,12 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
!noDecoration && 'system-xs-regular relative max-w-[300px] break-words rounded-md bg-components-panel-bg px-3 py-2 text-left text-text-tertiary shadow-lg',
|
||||
popupClassName,
|
||||
)}
|
||||
onMouseEnter={() => triggerMethod === 'hover' && setHoverPopup()}
|
||||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
clearCloseTimeout()
|
||||
setHoverPopup()
|
||||
}
|
||||
}}
|
||||
onMouseLeave={() => triggerMethod === 'hover' && handleLeave(false)}
|
||||
>
|
||||
{popupContent}
|
||||
|
||||
@@ -14,7 +14,6 @@ const ErrorMessage = ({
|
||||
errorMsg,
|
||||
}: ErrorMessageProps) => {
|
||||
return (
|
||||
// eslint-disable-next-line tailwindcss/migration-from-tailwind-2
|
||||
<div className={cn(
|
||||
'flex gap-x-0.5 rounded-xl border-[0.5px] border-components-panel-border bg-opacity-40 bg-toast-error-bg p-2 shadow-xs shadow-shadow-shadow-3',
|
||||
className,
|
||||
|
||||
@@ -159,69 +159,74 @@ const Apps = ({
|
||||
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex h-full flex-col border-l-[0.5px] border-divider-regular',
|
||||
'flex h-full min-h-0 flex-col overflow-hidden border-l-[0.5px] border-divider-regular',
|
||||
)}
|
||||
>
|
||||
{systemFeatures.enable_explore_banner && (
|
||||
<div className="mt-4 px-12">
|
||||
<Banner />
|
||||
</div>
|
||||
)}
|
||||
<div className={cn(
|
||||
'mt-6 flex items-center justify-between px-12',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
|
||||
{hasFilterCondition && (
|
||||
<>
|
||||
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
|
||||
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="w-[200px] self-start"
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col overflow-y-auto">
|
||||
{systemFeatures.enable_explore_banner && (
|
||||
<div className="mt-4 px-12">
|
||||
<Banner />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mt-2 px-12">
|
||||
<Category
|
||||
list={categories}
|
||||
value={currCategory}
|
||||
onChange={setCurrCategory}
|
||||
allCategoriesEn={allCategoriesEn}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className={cn(
|
||||
'relative mt-4 flex flex-1 shrink-0 grow flex-col overflow-auto pb-6',
|
||||
)}
|
||||
>
|
||||
<nav
|
||||
className={cn(
|
||||
s.appList,
|
||||
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
|
||||
<div className="sticky top-0 z-10 bg-background-body">
|
||||
<div className={cn(
|
||||
'flex items-center justify-between px-12 pt-6',
|
||||
)}
|
||||
>
|
||||
{searchFilteredList.map(app => (
|
||||
<AppCard
|
||||
key={app.app_id}
|
||||
isExplore
|
||||
app={app}
|
||||
canCreate={hasEditPermission}
|
||||
onCreate={() => {
|
||||
setCurrApp(app)
|
||||
setIsShowCreateModal(true)
|
||||
}}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
|
||||
{hasFilterCondition && (
|
||||
<>
|
||||
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
|
||||
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="w-[200px] self-start"
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
))}
|
||||
</nav>
|
||||
</div>
|
||||
|
||||
<div className="px-12 pb-4 pt-2">
|
||||
<Category
|
||||
list={categories}
|
||||
value={currCategory}
|
||||
onChange={setCurrCategory}
|
||||
allCategoriesEn={allCategoriesEn}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={cn(
|
||||
'relative flex flex-1 shrink-0 grow flex-col pb-6',
|
||||
)}
|
||||
>
|
||||
<nav
|
||||
className={cn(
|
||||
s.appList,
|
||||
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
|
||||
)}
|
||||
>
|
||||
{searchFilteredList.map(app => (
|
||||
<AppCard
|
||||
key={app.app_id}
|
||||
isExplore
|
||||
app={app}
|
||||
canCreate={hasEditPermission}
|
||||
onCreate={() => {
|
||||
setCurrApp(app)
|
||||
setIsShowCreateModal(true)
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
{isShowCreateModal && (
|
||||
<CreateAppModal
|
||||
|
||||
@@ -71,7 +71,7 @@ const Explore: FC<IExploreProps> = ({
|
||||
}
|
||||
>
|
||||
<Sidebar controlUpdateInstalledApps={controlUpdateInstalledApps} />
|
||||
<div className="w-0 grow">
|
||||
<div className="h-full min-h-0 w-0 grow">
|
||||
{children}
|
||||
</div>
|
||||
</ExploreContext.Provider>
|
||||
|
||||
@@ -599,20 +599,30 @@ describe('CommonCreateModal', () => {
|
||||
},
|
||||
})
|
||||
mockUsePluginStore.mockReturnValue(detailWithCredentials)
|
||||
const existingBuilder = createMockSubscriptionBuilder()
|
||||
mockVerifyCredentials.mockImplementation((params, { onSuccess }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
render(<CommonCreateModal {...defaultProps} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('modal-confirm'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockVerifyCredentials).toHaveBeenCalled()
|
||||
expect(mockVerifyCredentials).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: 'test-provider',
|
||||
subscriptionBuilderId: existingBuilder.id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
onError: expect.any(Function),
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('modal-confirm')).toHaveTextContent('pluginTrigger.modal.common.create')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -629,15 +639,12 @@ describe('CommonCreateModal', () => {
|
||||
},
|
||||
})
|
||||
mockUsePluginStore.mockReturnValue(detailWithCredentials)
|
||||
const existingBuilder = createMockSubscriptionBuilder()
|
||||
mockVerifyCredentials.mockImplementation((params, { onError }) => {
|
||||
onError(new Error('Verification failed'))
|
||||
})
|
||||
|
||||
render(<CommonCreateModal {...defaultProps} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('modal-confirm'))
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ export const useToolSelectorState = ({
|
||||
extra: {
|
||||
description: tool.tool_description,
|
||||
},
|
||||
type: tool.provider_type,
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
||||
@@ -4,6 +4,17 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportStatus } from '@/models/app'
|
||||
import UpdateDSLModal from './update-dsl-modal'
|
||||
|
||||
class MockFileReader {
|
||||
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
|
||||
|
||||
readAsText(_file: Blob) {
|
||||
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
|
||||
this.onload?.call(this as unknown as FileReader, event)
|
||||
}
|
||||
}
|
||||
|
||||
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
|
||||
@@ -98,31 +98,46 @@ export const useNodesSyncDraft = () => {
|
||||
) => {
|
||||
if (getNodesReadOnly())
|
||||
return
|
||||
const postParams = getPostParams()
|
||||
|
||||
if (postParams) {
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
try {
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
// Get base params without hash
|
||||
const baseParams = getPostParams()
|
||||
if (!baseParams)
|
||||
return
|
||||
|
||||
const {
|
||||
setSyncWorkflowDraftHash,
|
||||
setDraftUpdatedAt,
|
||||
} = workflowStore.getState()
|
||||
|
||||
try {
|
||||
// IMPORTANT: Get the LATEST hash right before sending the request
|
||||
// This ensures that even if queued, each request uses the most recent hash
|
||||
const latestHash = workflowStore.getState().syncWorkflowDraftHash
|
||||
|
||||
const postParams = {
|
||||
...baseParams,
|
||||
params: {
|
||||
...baseParams.params,
|
||||
hash: latestHash || null, // null for first-time, otherwise use latest hash
|
||||
},
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
|
||||
const res = await syncWorkflowDraft(postParams)
|
||||
setSyncWorkflowDraftHash(res.hash)
|
||||
setDraftUpdatedAt(res.updated_at)
|
||||
callback?.onSuccess?.()
|
||||
}
|
||||
catch (error: any) {
|
||||
if (error && error.json && !error.bodyUsed) {
|
||||
error.json().then((err: any) => {
|
||||
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
|
||||
handleRefreshWorkflowDraft()
|
||||
})
|
||||
}
|
||||
callback?.onError?.()
|
||||
}
|
||||
finally {
|
||||
callback?.onSettled?.()
|
||||
}
|
||||
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft])
|
||||
|
||||
|
||||
@@ -87,6 +87,7 @@ export type ToolValue = {
|
||||
enabled?: boolean
|
||||
extra?: { description?: string } & Record<string, unknown>
|
||||
credential_id?: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
export type DataSourceItem = {
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { spawnSync } from 'node:child_process'
|
||||
import { randomUUID } from 'node:crypto'
|
||||
import { createSerwistRoute } from '@serwist/turbopack'
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const revision = spawnSync('git', ['rev-parse', 'HEAD'], { encoding: 'utf-8' }).stdout?.trim() || randomUUID()
|
||||
|
||||
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
|
||||
additionalPrecacheEntries: [{ url: `${basePath}/_offline.html`, revision }],
|
||||
swSrc: 'app/sw.ts',
|
||||
nextConfig: {
|
||||
basePath,
|
||||
},
|
||||
useNativeEsbuild: true,
|
||||
})
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
/// <reference lib="webworker" />
|
||||
|
||||
import type { PrecacheEntry, SerwistGlobalConfig } from 'serwist'
|
||||
import { CacheableResponsePlugin, CacheFirst, ExpirationPlugin, NetworkFirst, Serwist, StaleWhileRevalidate } from 'serwist'
|
||||
import { defaultCache } from '@serwist/turbopack/worker'
|
||||
import { Serwist } from 'serwist'
|
||||
import { withLeadingSlash } from 'ufo'
|
||||
|
||||
declare global {
|
||||
// eslint-disable-next-line ts/consistent-type-definitions
|
||||
@@ -18,78 +20,30 @@ const scopePathname = new URL(self.registration.scope).pathname
|
||||
const basePath = scopePathname.replace(/\/serwist\/$/, '').replace(/\/$/, '')
|
||||
const offlineUrl = `${basePath}/_offline.html`
|
||||
|
||||
const normalizeManifestUrl = (url: string): string => {
|
||||
if (url.startsWith('/serwist/'))
|
||||
return url.replace(/^\/serwist\//, '/')
|
||||
|
||||
return withLeadingSlash(url)
|
||||
}
|
||||
|
||||
const manifest = self.__SW_MANIFEST?.map((entry) => {
|
||||
if (typeof entry === 'string')
|
||||
return normalizeManifestUrl(entry)
|
||||
|
||||
return {
|
||||
...entry,
|
||||
url: normalizeManifestUrl(entry.url),
|
||||
}
|
||||
})
|
||||
|
||||
const serwist = new Serwist({
|
||||
precacheEntries: self.__SW_MANIFEST,
|
||||
precacheEntries: manifest,
|
||||
skipWaiting: true,
|
||||
disableDevLogs: true,
|
||||
clientsClaim: true,
|
||||
navigationPreload: true,
|
||||
runtimeCaching: [
|
||||
{
|
||||
matcher: ({ url }) => url.origin === 'https://fonts.googleapis.com',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'google-fonts',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ url }) => url.origin === 'https://fonts.gstatic.com',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'google-fonts-webfonts',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ request }) => request.destination === 'image',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'images',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 64,
|
||||
maxAgeSeconds: 30 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ request }) => request.destination === 'script' || request.destination === 'style',
|
||||
handler: new StaleWhileRevalidate({
|
||||
cacheName: 'static-resources',
|
||||
plugins: [
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 32,
|
||||
maxAgeSeconds: 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ url, sameOrigin }) => sameOrigin && url.pathname.startsWith('/api/'),
|
||||
handler: new NetworkFirst({
|
||||
cacheName: 'api-cache',
|
||||
networkTimeoutSeconds: 10,
|
||||
plugins: [
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 16,
|
||||
maxAgeSeconds: 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
],
|
||||
runtimeCaching: defaultCache,
|
||||
fallbacks: {
|
||||
entries: [
|
||||
{
|
||||
|
||||
119
web/contract/console/trigger.ts
Normal file
119
web/contract/console/trigger.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import type {
|
||||
TriggerLogEntity,
|
||||
TriggerOAuthClientParams,
|
||||
TriggerOAuthConfig,
|
||||
TriggerProviderApiEntity,
|
||||
TriggerSubscription,
|
||||
TriggerSubscriptionBuilder,
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { type } from '@orpc/contract'
|
||||
import { base } from '../base'
|
||||
|
||||
export const triggersContract = base
|
||||
.route({ path: '/workspaces/current/triggers', method: 'GET' })
|
||||
.input(type<{ query?: { type?: string } }>())
|
||||
.output(type<TriggerProviderApiEntity[]>())
|
||||
|
||||
export const triggerProviderInfoContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/info', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerProviderApiEntity>())
|
||||
|
||||
export const triggerSubscriptionsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/list', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerSubscription[]>())
|
||||
|
||||
export const triggerSubscriptionBuilderCreateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/create', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body?: { credential_type?: string }
|
||||
}>())
|
||||
.output(type<{ subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
|
||||
export const triggerSubscriptionBuilderUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<TriggerSubscriptionBuilder>())
|
||||
|
||||
export const triggerSubscriptionBuilderVerifyUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/verify-and-update/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionVerifyContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/verify/{subscriptionId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionId: string }
|
||||
body?: { credentials?: Record<string, unknown> }
|
||||
}>())
|
||||
.output(type<{ verified: boolean }>())
|
||||
|
||||
export const triggerSubscriptionBuildContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/build/{subscriptionBuilderId}', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string, subscriptionBuilderId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
parameters?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<unknown>())
|
||||
|
||||
export const triggerSubscriptionDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/delete', method: 'POST' })
|
||||
.input(type<{ params: { subscriptionId: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerSubscriptionUpdateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{subscriptionId}/subscriptions/update', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { subscriptionId: string }
|
||||
body?: {
|
||||
name?: string
|
||||
properties?: Record<string, unknown>
|
||||
parameters?: Record<string, unknown>
|
||||
credentials?: Record<string, unknown>
|
||||
}
|
||||
}>())
|
||||
.output(type<{ result: string, id: string }>())
|
||||
|
||||
export const triggerSubscriptionBuilderLogsContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/builder/logs/{subscriptionBuilderId}', method: 'GET' })
|
||||
.input(type<{ params: { provider: string, subscriptionBuilderId: string } }>())
|
||||
.output(type<{ logs: TriggerLogEntity[] }>())
|
||||
|
||||
export const triggerOAuthConfigContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<TriggerOAuthConfig>())
|
||||
|
||||
export const triggerOAuthConfigureContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'POST' })
|
||||
.input(type<{
|
||||
params: { provider: string }
|
||||
body: { client_params?: TriggerOAuthClientParams, enabled: boolean }
|
||||
}>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthDeleteContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/oauth/client', method: 'DELETE' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ result: string }>())
|
||||
|
||||
export const triggerOAuthInitiateContract = base
|
||||
.route({ path: '/workspaces/current/trigger-provider/{provider}/subscriptions/oauth/authorize', method: 'GET' })
|
||||
.input(type<{ params: { provider: string } }>())
|
||||
.output(type<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>())
|
||||
@@ -1,6 +1,23 @@
|
||||
import type { InferContractRouterInputs } from '@orpc/contract'
|
||||
import { bindPartnerStackContract, invoicesContract } from './console/billing'
|
||||
import { systemFeaturesContract } from './console/system'
|
||||
import {
|
||||
triggerOAuthConfigContract,
|
||||
triggerOAuthConfigureContract,
|
||||
triggerOAuthDeleteContract,
|
||||
triggerOAuthInitiateContract,
|
||||
triggerProviderInfoContract,
|
||||
triggersContract,
|
||||
triggerSubscriptionBuildContract,
|
||||
triggerSubscriptionBuilderCreateContract,
|
||||
triggerSubscriptionBuilderLogsContract,
|
||||
triggerSubscriptionBuilderUpdateContract,
|
||||
triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
triggerSubscriptionDeleteContract,
|
||||
triggerSubscriptionsContract,
|
||||
triggerSubscriptionUpdateContract,
|
||||
triggerSubscriptionVerifyContract,
|
||||
} from './console/trigger'
|
||||
import { trialAppDatasetsContract, trialAppInfoContract, trialAppParametersContract, trialAppWorkflowsContract } from './console/try-app'
|
||||
import { collectionPluginsContract, collectionsContract, searchAdvancedContract } from './marketplace'
|
||||
|
||||
@@ -24,6 +41,23 @@ export const consoleRouterContract = {
|
||||
invoices: invoicesContract,
|
||||
bindPartnerStack: bindPartnerStackContract,
|
||||
},
|
||||
triggers: {
|
||||
list: triggersContract,
|
||||
providerInfo: triggerProviderInfoContract,
|
||||
subscriptions: triggerSubscriptionsContract,
|
||||
subscriptionBuilderCreate: triggerSubscriptionBuilderCreateContract,
|
||||
subscriptionBuilderUpdate: triggerSubscriptionBuilderUpdateContract,
|
||||
subscriptionBuilderVerifyUpdate: triggerSubscriptionBuilderVerifyUpdateContract,
|
||||
subscriptionVerify: triggerSubscriptionVerifyContract,
|
||||
subscriptionBuild: triggerSubscriptionBuildContract,
|
||||
subscriptionDelete: triggerSubscriptionDeleteContract,
|
||||
subscriptionUpdate: triggerSubscriptionUpdateContract,
|
||||
subscriptionBuilderLogs: triggerSubscriptionBuilderLogsContract,
|
||||
oauthConfig: triggerOAuthConfigContract,
|
||||
oauthConfigure: triggerOAuthConfigureContract,
|
||||
oauthDelete: triggerOAuthDeleteContract,
|
||||
oauthInitiate: triggerOAuthInitiateContract,
|
||||
},
|
||||
}
|
||||
|
||||
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
|
||||
|
||||
@@ -38,6 +38,11 @@ pnpm lint:tss
|
||||
|
||||
This command lints the entire project and is intended for final verification before committing or pushing changes.
|
||||
|
||||
### Introducing New Plugins or Rules
|
||||
|
||||
If a new rule causes many existing code errors or automatic fixes generate too many diffs, do not use the `--fix` option for automatic fixes.
|
||||
You can introduce the rule first, then use the `--suppress-all` option to temporarily suppress these errors, and gradually fix them in subsequent changes.
|
||||
|
||||
## Type Check
|
||||
|
||||
You should be able to see suggestions from TypeScript in your editor for all open files.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
// @ts-check
|
||||
import antfu from '@antfu/eslint-config'
|
||||
import pluginQuery from '@tanstack/eslint-plugin-query'
|
||||
import tailwindcss from 'eslint-plugin-better-tailwindcss'
|
||||
import sonar from 'eslint-plugin-sonarjs'
|
||||
import storybook from 'eslint-plugin-storybook'
|
||||
import tailwind from 'eslint-plugin-tailwindcss'
|
||||
import dify from './eslint-rules/index.js'
|
||||
|
||||
export default antfu(
|
||||
@@ -23,7 +23,7 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
nextjs: true,
|
||||
ignores: ['public', 'types/doc-paths.ts'],
|
||||
ignores: ['public', 'types/doc-paths.ts', 'eslint-suppressions.json'],
|
||||
typescript: {
|
||||
overrides: {
|
||||
'ts/consistent-type-definitions': ['error', 'type'],
|
||||
@@ -66,42 +66,16 @@ export default antfu(
|
||||
sonarjs: sonar,
|
||||
},
|
||||
},
|
||||
tailwind.configs['flat/recommended'],
|
||||
{
|
||||
settings: {
|
||||
tailwindcss: {
|
||||
// These are the default values but feel free to customize
|
||||
callees: ['classnames', 'clsx', 'ctl', 'cn', 'classNames'],
|
||||
config: 'tailwind.config.js', // returned from `loadConfig()` utility if not provided
|
||||
cssFiles: [
|
||||
'**/*.css',
|
||||
'!**/node_modules',
|
||||
'!**/.*',
|
||||
'!**/dist',
|
||||
'!**/build',
|
||||
'!**/.storybook',
|
||||
'!**/.next',
|
||||
'!**/.public',
|
||||
],
|
||||
cssFilesRefreshRate: 5_000,
|
||||
removeDuplicates: true,
|
||||
skipClassAttribute: false,
|
||||
whitelist: [],
|
||||
tags: [], // can be set to e.g. ['tw'] for use in tw`bg-blue`
|
||||
classRegex: '^class(Name)?$', // can be modified to support custom attributes. E.g. "^tw$" for `twin.macro`
|
||||
},
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
plugins: {
|
||||
tailwindcss,
|
||||
},
|
||||
rules: {
|
||||
// due to 1k lines of tailwind config, these rule have performance issue
|
||||
'tailwindcss/no-contradicting-classname': 'off',
|
||||
'tailwindcss/enforces-shorthand': 'off',
|
||||
'tailwindcss/no-custom-classname': 'off',
|
||||
'tailwindcss/no-unnecessary-arbitrary-value': 'off',
|
||||
|
||||
'tailwindcss/no-arbitrary-value': 'off',
|
||||
'tailwindcss/classnames-order': 'warn',
|
||||
'tailwindcss/enforces-negative-arbitrary-values': 'warn',
|
||||
'tailwindcss/migration-from-tailwind-2': 'warn',
|
||||
'tailwindcss/enforce-consistent-class-order': 'error',
|
||||
'tailwindcss/no-duplicate-classes': 'error',
|
||||
'tailwindcss/no-unnecessary-whitespace': 'error',
|
||||
'tailwindcss/no-unknown-classes': 'warn',
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -29,7 +29,7 @@ const remoteImageURLs = ([hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_W
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||
serverExternalPackages: ['esbuild-wasm'],
|
||||
serverExternalPackages: ['esbuild'],
|
||||
transpilePackages: ['echarts', 'zrender'],
|
||||
turbopack: {
|
||||
rules: codeInspectorPlugin({
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.12.0",
|
||||
"version": "1.12.1",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
|
||||
"imports": {
|
||||
@@ -46,7 +46,8 @@
|
||||
"uglify-embed": "node ./bin/uglify-embed",
|
||||
"i18n:check": "tsx ./scripts/check-i18n.js",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage --reporter=dot --silent=passed-only",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:ci": "vitest run --coverage --reporter vitest-tiny-reporter --silent=passed-only",
|
||||
"test:watch": "vitest --watch",
|
||||
"analyze-component": "node ./scripts/analyze-component.js",
|
||||
"refactor-component": "node ./scripts/refactor-component.js",
|
||||
@@ -153,8 +154,9 @@
|
||||
"sharp": "0.33.5",
|
||||
"sortablejs": "1.15.6",
|
||||
"string-ts": "2.3.1",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"tailwind-merge": "2.6.1",
|
||||
"tldts": "7.0.17",
|
||||
"ufo": "1.6.3",
|
||||
"use-context-selector": "2.0.0",
|
||||
"uuid": "10.0.0",
|
||||
"zod": "3.25.76",
|
||||
@@ -164,21 +166,21 @@
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "7.2.0",
|
||||
"@chromatic-com/storybook": "5.0.0",
|
||||
"@eslint-react/eslint-plugin": "2.8.1",
|
||||
"@eslint-react/eslint-plugin": "2.9.4",
|
||||
"@mdx-js/loader": "3.1.1",
|
||||
"@mdx-js/react": "3.1.1",
|
||||
"@next/bundle-analyzer": "16.1.5",
|
||||
"@next/eslint-plugin-next": "16.1.6",
|
||||
"@next/mdx": "16.1.5",
|
||||
"@rgrove/parse-xml": "4.2.0",
|
||||
"@serwist/turbopack": "9.5.0",
|
||||
"@serwist/turbopack": "9.5.4",
|
||||
"@storybook/addon-docs": "10.2.0",
|
||||
"@storybook/addon-links": "10.2.0",
|
||||
"@storybook/addon-onboarding": "10.2.0",
|
||||
"@storybook/addon-themes": "10.2.0",
|
||||
"@storybook/nextjs-vite": "10.2.0",
|
||||
"@storybook/react": "10.2.0",
|
||||
"@tanstack/eslint-plugin-query": "5.91.3",
|
||||
"@tanstack/eslint-plugin-query": "5.91.4",
|
||||
"@tanstack/react-devtools": "0.9.2",
|
||||
"@tanstack/react-form-devtools": "0.2.12",
|
||||
"@tanstack/react-query-devtools": "5.90.2",
|
||||
@@ -209,13 +211,13 @@
|
||||
"autoprefixer": "10.4.21",
|
||||
"code-inspector-plugin": "1.3.6",
|
||||
"cross-env": "10.1.0",
|
||||
"esbuild-wasm": "0.27.2",
|
||||
"esbuild": "0.27.2",
|
||||
"eslint": "9.39.2",
|
||||
"eslint-plugin-better-tailwindcss": "4.1.1",
|
||||
"eslint-plugin-react-hooks": "7.0.1",
|
||||
"eslint-plugin-react-refresh": "0.4.26",
|
||||
"eslint-plugin-react-refresh": "0.5.0",
|
||||
"eslint-plugin-sonarjs": "3.0.6",
|
||||
"eslint-plugin-storybook": "10.2.1",
|
||||
"eslint-plugin-tailwindcss": "3.18.2",
|
||||
"eslint-plugin-storybook": "10.2.6",
|
||||
"husky": "9.1.7",
|
||||
"jsdom": "27.3.0",
|
||||
"jsdom-testing-mocks": "1.16.0",
|
||||
@@ -225,16 +227,17 @@
|
||||
"postcss": "8.5.6",
|
||||
"react-scan": "0.4.3",
|
||||
"sass": "1.93.2",
|
||||
"serwist": "9.5.0",
|
||||
"serwist": "9.5.4",
|
||||
"storybook": "10.2.0",
|
||||
"tailwindcss": "3.4.18",
|
||||
"tailwindcss": "3.4.19",
|
||||
"tsx": "4.21.0",
|
||||
"typescript": "5.9.3",
|
||||
"uglify-js": "3.19.3",
|
||||
"vite": "7.3.1",
|
||||
"vite-tsconfig-paths": "6.0.4",
|
||||
"vitest": "4.0.17",
|
||||
"vitest-canvas-mock": "1.1.3"
|
||||
"vitest-canvas-mock": "1.1.3",
|
||||
"vitest-tiny-reporter": "1.3.1"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
|
||||
555
web/pnpm-lock.yaml
generated
555
web/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -10,17 +10,14 @@ import type {
|
||||
} from '@/app/components/workflow/block-selector/types'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
import { del, get, post } from './base'
|
||||
import { consoleClient, consoleQuery } from '@/service/client'
|
||||
import { get, post } from './base'
|
||||
import { useInvalid } from './use-base'
|
||||
|
||||
const NAME_SPACE = 'triggers'
|
||||
|
||||
// Trigger Provider Service - Provider ID Format: plugin_id/provider_name
|
||||
|
||||
// Convert backend API response to frontend ToolWithProvider format
|
||||
const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): TriggerWithProvider => {
|
||||
return {
|
||||
// Collection fields
|
||||
id: provider.plugin_id || provider.name,
|
||||
name: provider.name,
|
||||
author: provider.author,
|
||||
@@ -58,12 +55,9 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
labels: provider.tags || [],
|
||||
output_schema: event.output_schema || {},
|
||||
})),
|
||||
|
||||
// Trigger-specific schema fields
|
||||
subscription_constructor: provider.subscription_constructor,
|
||||
subscription_schema: provider.subscription_schema,
|
||||
supported_creation_methods: provider.supported_creation_methods,
|
||||
|
||||
meta: {
|
||||
version: '1.0',
|
||||
},
|
||||
@@ -72,22 +66,20 @@ const convertToTriggerWithProvider = (provider: TriggerProviderApiEntity): Trigg
|
||||
|
||||
export const useAllTriggerPlugins = (enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: [NAME_SPACE, 'all'],
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: {} }),
|
||||
queryFn: async () => {
|
||||
const response = await get<TriggerProviderApiEntity[]>('/workspaces/current/triggers')
|
||||
const response = await consoleClient.triggers.list({})
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerPluginsByType = (triggerType: string, enabled = true) => {
|
||||
return useQuery<TriggerWithProvider[]>({
|
||||
queryKey: [NAME_SPACE, 'byType', triggerType],
|
||||
queryKey: consoleQuery.triggers.list.queryKey({ input: { query: { type: triggerType } } }),
|
||||
queryFn: async () => {
|
||||
const response = await get<TriggerProviderApiEntity[]>(`/workspaces/current/triggers?type=${triggerType}`)
|
||||
const response = await consoleClient.triggers.list({ query: { type: triggerType } })
|
||||
return response.map(convertToTriggerWithProvider)
|
||||
},
|
||||
enabled: enabled && !!triggerType,
|
||||
@@ -95,25 +87,23 @@ export const useTriggerPluginsByType = (triggerType: string, enabled = true) =>
|
||||
}
|
||||
|
||||
export const useInvalidateAllTriggerPlugins = () => {
|
||||
return useInvalid([NAME_SPACE, 'all'])
|
||||
return useInvalid(consoleQuery.triggers.list.queryKey({ input: {} }))
|
||||
}
|
||||
|
||||
// ===== Trigger Subscriptions Management =====
|
||||
|
||||
export const useTriggerProviderInfo = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerProviderApiEntity>({
|
||||
queryKey: [NAME_SPACE, 'provider-info', provider],
|
||||
queryFn: () => get<TriggerProviderApiEntity>(`/workspaces/current/trigger-provider/${provider}/info`),
|
||||
queryKey: consoleQuery.triggers.providerInfo.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.providerInfo({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
export const useTriggerSubscriptions = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerSubscription[]>({
|
||||
queryKey: [NAME_SPACE, 'list-subscriptions', provider],
|
||||
queryFn: () => get<TriggerSubscription[]>(`/workspaces/current/trigger-provider/${provider}/subscriptions/list`),
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptions({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@@ -122,30 +112,30 @@ export const useInvalidateTriggerSubscriptions = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'subscriptions', provider],
|
||||
queryKey: consoleQuery.triggers.subscriptions.queryKey({ input: { params: { provider } } }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const useCreateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'create-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderCreate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
credential_type?: string
|
||||
}) => {
|
||||
const { provider, ...body } = payload
|
||||
return post<{ subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/create`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuilderCreate({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderUpdate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@@ -155,17 +145,17 @@ export const useUpdateTriggerSubscriptionBuilder = () => {
|
||||
credentials?: Record<string, unknown>
|
||||
}) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return post<TriggerSubscriptionBuilder>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/update/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuilderUpdate({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'verify-and-update-subscription-builder'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuilderVerifyUpdate.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionBuilderId: string
|
||||
@@ -183,7 +173,7 @@ export const useVerifyAndUpdateTriggerSubscriptionBuilder = () => {
|
||||
|
||||
export const useVerifyTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'verify-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionVerify.mutationKey(),
|
||||
mutationFn: (payload: {
|
||||
provider: string
|
||||
subscriptionId: string
|
||||
@@ -208,24 +198,24 @@ export type BuildTriggerSubscriptionPayload = {
|
||||
|
||||
export const useBuildTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'build-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionBuild.mutationKey(),
|
||||
mutationFn: (payload: BuildTriggerSubscriptionPayload) => {
|
||||
const { provider, subscriptionBuilderId, ...body } = payload
|
||||
return post(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/build/${subscriptionBuilderId}`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionBuild({
|
||||
params: { provider, subscriptionBuilderId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionDelete.mutationKey(),
|
||||
mutationFn: (subscriptionId: string) => {
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/delete`,
|
||||
)
|
||||
return consoleClient.triggers.subscriptionDelete({
|
||||
params: { subscriptionId },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -240,13 +230,13 @@ export type UpdateTriggerSubscriptionPayload = {
|
||||
|
||||
export const useUpdateTriggerSubscription = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'update-subscription'],
|
||||
mutationKey: consoleQuery.triggers.subscriptionUpdate.mutationKey(),
|
||||
mutationFn: (payload: UpdateTriggerSubscriptionPayload) => {
|
||||
const { subscriptionId, ...body } = payload
|
||||
return post<{ result: string, id: string }>(
|
||||
`/workspaces/current/trigger-provider/${subscriptionId}/subscriptions/update`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.subscriptionUpdate({
|
||||
params: { subscriptionId },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -262,10 +252,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
const { enabled = true, refetchInterval = false } = options
|
||||
|
||||
return useQuery<{ logs: TriggerLogEntity[] }>({
|
||||
queryKey: [NAME_SPACE, 'subscription-builder-logs', provider, subscriptionBuilderId],
|
||||
queryFn: () => get(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/builder/logs/${subscriptionBuilderId}`,
|
||||
),
|
||||
queryKey: consoleQuery.triggers.subscriptionBuilderLogs.queryKey({ input: { params: { provider, subscriptionBuilderId } } }),
|
||||
queryFn: () => consoleClient.triggers.subscriptionBuilderLogs({ params: { provider, subscriptionBuilderId } }),
|
||||
enabled: enabled && !!provider && !!subscriptionBuilderId,
|
||||
refetchInterval,
|
||||
})
|
||||
@@ -274,8 +262,8 @@ export const useTriggerSubscriptionBuilderLogs = (
|
||||
// ===== OAuth Management =====
|
||||
export const useTriggerOAuthConfig = (provider: string, enabled = true) => {
|
||||
return useQuery<TriggerOAuthConfig>({
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryFn: () => get<TriggerOAuthConfig>(`/workspaces/current/trigger-provider/${provider}/oauth/client`),
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
queryFn: () => consoleClient.triggers.oauthConfig({ params: { provider } }),
|
||||
enabled: enabled && !!provider,
|
||||
})
|
||||
}
|
||||
@@ -288,31 +276,31 @@ export type ConfigureTriggerOAuthPayload = {
|
||||
|
||||
export const useConfigureTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'configure-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthConfigure.mutationKey(),
|
||||
mutationFn: (payload: ConfigureTriggerOAuthPayload) => {
|
||||
const { provider, ...body } = payload
|
||||
return post<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
{ body },
|
||||
)
|
||||
return consoleClient.triggers.oauthConfigure({
|
||||
params: { provider },
|
||||
body,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'delete-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthDelete.mutationKey(),
|
||||
mutationFn: (provider: string) => {
|
||||
return del<{ result: string }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/oauth/client`,
|
||||
)
|
||||
return consoleClient.triggers.oauthDelete({
|
||||
params: { provider },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useInitiateTriggerOAuth = () => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'initiate-oauth'],
|
||||
mutationKey: consoleQuery.triggers.oauthInitiate.mutationKey(),
|
||||
mutationFn: (provider: string) => {
|
||||
return get<{ authorization_url: string, subscription_builder: TriggerSubscriptionBuilder }>(
|
||||
`/workspaces/current/trigger-provider/${provider}/subscriptions/oauth/authorize`,
|
||||
@@ -336,7 +324,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
return useQuery<{ options: FormOption[] }>({
|
||||
queryKey: [NAME_SPACE, 'dynamic-options', payload.plugin_id, payload.provider, payload.action, payload.parameter, payload.credential_id, payload.credentials, payload.extra],
|
||||
queryFn: () => {
|
||||
// Use new endpoint with POST when credentials provided (for edit mode)
|
||||
if (payload.credentials) {
|
||||
return post<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options-with-credentials',
|
||||
@@ -353,7 +340,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
{ silent: true },
|
||||
)
|
||||
}
|
||||
// Use original GET endpoint for normal cases
|
||||
return get<{ options: FormOption[] }>(
|
||||
'/workspaces/current/plugin/parameters/dynamic-options',
|
||||
{
|
||||
@@ -372,7 +358,6 @@ export const useTriggerPluginDynamicOptions = (payload: {
|
||||
enabled: enabled && !!payload.plugin_id && !!payload.provider && !!payload.action && !!payload.parameter && !!payload.credential_id,
|
||||
retry: 0,
|
||||
staleTime: 0,
|
||||
gcTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -382,7 +367,7 @@ export const useInvalidateTriggerOAuthConfig = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return (provider: string) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [NAME_SPACE, 'oauth-config', provider],
|
||||
queryKey: consoleQuery.triggers.oauthConfig.queryKey({ input: { params: { provider } } }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
2
web/types/i18n.d.ts
vendored
2
web/types/i18n.d.ts
vendored
@@ -27,5 +27,3 @@ export type I18nKeysWithPrefix<
|
||||
> = Prefix extends ''
|
||||
? keyof Resources[NS]
|
||||
: Extract<keyof Resources[NS], `${Prefix}${string}`>
|
||||
|
||||
type A = I18nKeysWithPrefix<'billing'>
|
||||
|
||||
@@ -1,157 +0,0 @@
|
||||
/**
|
||||
* Test suite for the classnames utility function
|
||||
* This utility combines the classnames library with tailwind-merge
|
||||
* to handle conditional CSS classes and merge conflicting Tailwind classes
|
||||
*/
|
||||
import { cn } from './classnames'
|
||||
|
||||
describe('classnames', () => {
|
||||
/**
|
||||
* Tests basic classnames library features:
|
||||
* - String concatenation
|
||||
* - Array handling
|
||||
* - Falsy value filtering
|
||||
* - Object-based conditional classes
|
||||
*/
|
||||
it('classnames libs feature', () => {
|
||||
expect(cn('foo')).toBe('foo')
|
||||
expect(cn('foo', 'bar')).toBe('foo bar')
|
||||
expect(cn(['foo', 'bar'])).toBe('foo bar')
|
||||
|
||||
expect(cn(undefined)).toBe('')
|
||||
expect(cn(null)).toBe('')
|
||||
expect(cn(false)).toBe('')
|
||||
|
||||
expect(cn({
|
||||
foo: true,
|
||||
bar: false,
|
||||
baz: true,
|
||||
})).toBe('foo baz')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests tailwind-merge functionality:
|
||||
* - Conflicting class resolution (last one wins)
|
||||
* - Modifier handling (hover, focus, etc.)
|
||||
* - Important prefix (!)
|
||||
* - Custom color classes
|
||||
* - Arbitrary values
|
||||
*/
|
||||
it('tailwind-merge', () => {
|
||||
/* eslint-disable tailwindcss/classnames-order */
|
||||
expect(cn('p-0')).toBe('p-0')
|
||||
expect(cn('text-right text-center text-left')).toBe('text-left')
|
||||
expect(cn('pl-4 p-8')).toBe('p-8')
|
||||
expect(cn('m-[2px] m-[4px]')).toBe('m-[4px]')
|
||||
expect(cn('m-1 m-[4px]')).toBe('m-[4px]')
|
||||
expect(cn('overflow-x-auto hover:overflow-x-hidden overflow-x-scroll')).toBe(
|
||||
'hover:overflow-x-hidden overflow-x-scroll',
|
||||
)
|
||||
expect(cn('h-10 h-min')).toBe('h-min')
|
||||
expect(cn('bg-grey-5 bg-hotpink')).toBe('bg-hotpink')
|
||||
|
||||
expect(cn('hover:block hover:inline')).toBe('hover:inline')
|
||||
|
||||
expect(cn('font-medium !font-bold')).toBe('font-medium !font-bold')
|
||||
expect(cn('!font-medium !font-bold')).toBe('!font-bold')
|
||||
|
||||
expect(cn('text-gray-100 text-primary-200')).toBe('text-primary-200')
|
||||
expect(cn('text-some-unknown-color text-components-input-bg-disabled text-primary-200')).toBe('text-primary-200')
|
||||
expect(cn('bg-some-unknown-color bg-components-input-bg-disabled bg-primary-200')).toBe('bg-primary-200')
|
||||
|
||||
expect(cn('border-t border-white/10')).toBe('border-t border-white/10')
|
||||
expect(cn('border-t border-white')).toBe('border-t border-white')
|
||||
expect(cn('text-3.5xl text-black')).toBe('text-3.5xl text-black')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests the integration of classnames and tailwind-merge:
|
||||
* - Object-based conditional classes with Tailwind conflict resolution
|
||||
*/
|
||||
it('classnames combined with tailwind-merge', () => {
|
||||
expect(cn('text-right', {
|
||||
'text-center': true,
|
||||
})).toBe('text-center')
|
||||
|
||||
expect(cn('text-right', {
|
||||
'text-center': false,
|
||||
})).toBe('text-right')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests handling of multiple mixed argument types:
|
||||
* - Strings, arrays, and objects in a single call
|
||||
* - Tailwind merge working across different argument types
|
||||
*/
|
||||
it('multiple mixed argument types', () => {
|
||||
expect(cn('foo', ['bar', 'baz'], { qux: true, quux: false })).toBe('foo bar baz qux')
|
||||
expect(cn('p-4', ['p-2', 'm-4'], { 'text-left': true, 'text-right': true })).toBe('p-2 m-4 text-right')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests nested array handling:
|
||||
* - Deep array flattening
|
||||
* - Tailwind merge with nested structures
|
||||
*/
|
||||
it('nested arrays', () => {
|
||||
expect(cn(['foo', ['bar', 'baz']])).toBe('foo bar baz')
|
||||
expect(cn(['p-4', ['p-2', 'text-center']])).toBe('p-2 text-center')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests empty input handling:
|
||||
* - Empty strings, arrays, and objects
|
||||
* - Mixed empty and non-empty values
|
||||
*/
|
||||
it('empty inputs', () => {
|
||||
expect(cn('')).toBe('')
|
||||
expect(cn([])).toBe('')
|
||||
expect(cn({})).toBe('')
|
||||
expect(cn('', [], {})).toBe('')
|
||||
expect(cn('foo', '', 'bar')).toBe('foo bar')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests number input handling:
|
||||
* - Truthy numbers converted to strings
|
||||
* - Zero treated as falsy
|
||||
*/
|
||||
it('numbers as inputs', () => {
|
||||
expect(cn(1)).toBe('1')
|
||||
expect(cn(0)).toBe('')
|
||||
expect(cn('foo', 1, 'bar')).toBe('foo 1 bar')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests multiple object arguments:
|
||||
* - Object merging
|
||||
* - Tailwind conflict resolution across objects
|
||||
*/
|
||||
it('multiple objects', () => {
|
||||
expect(cn({ foo: true }, { bar: true })).toBe('foo bar')
|
||||
expect(cn({ foo: true, bar: false }, { bar: true, baz: true })).toBe('foo bar baz')
|
||||
expect(cn({ 'p-4': true }, { 'p-2': true })).toBe('p-2')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests complex edge cases:
|
||||
* - Mixed falsy values
|
||||
* - Nested arrays with falsy values
|
||||
* - Multiple conflicting Tailwind classes
|
||||
*/
|
||||
it('complex edge cases', () => {
|
||||
expect(cn('foo', null, undefined, false, 'bar', 0, 1, '')).toBe('foo bar 1')
|
||||
expect(cn(['foo', null, ['bar', undefined, 'baz']])).toBe('foo bar baz')
|
||||
expect(cn('text-sm', { 'text-lg': false, 'text-xl': true }, 'text-2xl')).toBe('text-2xl')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests important (!) modifier behavior:
|
||||
* - Important modifiers in objects
|
||||
* - Conflict resolution with important prefix
|
||||
*/
|
||||
it('important modifier with objects', () => {
|
||||
expect(cn({ '!font-medium': true }, { '!font-bold': true })).toBe('!font-bold')
|
||||
expect(cn('font-normal', { '!font-bold': true })).toBe('font-normal !font-bold')
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user