mirror of
https://github.com/langgenius/dify.git
synced 2026-02-05 23:53:58 +00:00
Compare commits
10 Commits
copilot/fi
...
refactor/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
20ad46ed59 | ||
|
|
cb970e54da | ||
|
|
e04f2a0786 | ||
|
|
7202a24bcf | ||
|
|
be8f265e43 | ||
|
|
9e54f086dc | ||
|
|
8c31b69c8e | ||
|
|
b886b3f6c8 | ||
|
|
ef0d18bb61 | ||
|
|
c56ad8e323 |
23
.github/workflows/autofix.yml
vendored
23
.github/workflows/autofix.yml
vendored
@@ -79,29 +79,6 @@ jobs:
|
||||
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/pnpm-lock.yaml
|
||||
|
||||
- name: Install web dependencies
|
||||
run: |
|
||||
cd web
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: ESLint autofix
|
||||
run: |
|
||||
cd web
|
||||
pnpm lint:fix || true
|
||||
|
||||
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
|
||||
- name: mdformat
|
||||
run: |
|
||||
|
||||
2
.github/workflows/web-tests.yml
vendored
2
.github/workflows/web-tests.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:coverage
|
||||
run: pnpm test:ci
|
||||
|
||||
- name: Coverage Summary
|
||||
if: always()
|
||||
|
||||
@@ -136,7 +136,6 @@ ignore_imports =
|
||||
core.workflow.nodes.llm.llm_utils -> models.provider
|
||||
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
|
||||
core.workflow.nodes.llm.node -> core.tools.signature
|
||||
core.workflow.nodes.template_transform.template_transform_node -> configs
|
||||
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
|
||||
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
|
||||
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any, Literal, TypeAlias
|
||||
@@ -54,6 +55,8 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
|
||||
|
||||
register_enum_models(console_ns, IconType)
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
|
||||
@@ -499,6 +502,7 @@ class AppListApi(Resource):
|
||||
select(Workflow).where(
|
||||
Workflow.version == Workflow.VERSION_DRAFT,
|
||||
Workflow.app_id.in_(workflow_capable_app_ids),
|
||||
Workflow.tenant_id == current_tenant_id,
|
||||
)
|
||||
)
|
||||
.scalars()
|
||||
@@ -510,12 +514,14 @@ class AppListApi(Resource):
|
||||
NodeType.TRIGGER_PLUGIN,
|
||||
}
|
||||
for workflow in draft_workflows:
|
||||
node_id = None
|
||||
try:
|
||||
for _, node_data in workflow.walk_nodes():
|
||||
for node_id, node_data in workflow.walk_nodes():
|
||||
if node_data.get("type") in trigger_node_types:
|
||||
draft_trigger_app_ids.add(str(workflow.app_id))
|
||||
break
|
||||
except Exception:
|
||||
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
|
||||
continue
|
||||
|
||||
for app in app_pagination.items:
|
||||
|
||||
@@ -47,6 +47,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
|
||||
code_limits: CodeNodeLimits | None = None,
|
||||
template_renderer: Jinja2TemplateRenderer | None = None,
|
||||
template_transform_max_output_length: int | None = None,
|
||||
http_request_http_client: HttpClientProtocol | None = None,
|
||||
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
|
||||
http_request_file_manager: FileManagerProtocol | None = None,
|
||||
@@ -68,6 +69,9 @@ class DifyNodeFactory(NodeFactory):
|
||||
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
|
||||
)
|
||||
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
|
||||
self._template_transform_max_output_length = (
|
||||
template_transform_max_output_length or dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
)
|
||||
self._http_request_http_client = http_request_http_client or ssrf_proxy
|
||||
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
|
||||
self._http_request_file_manager = http_request_file_manager or file_manager
|
||||
@@ -122,6 +126,7 @@ class DifyNodeFactory(NodeFactory):
|
||||
graph_init_params=self.graph_init_params,
|
||||
graph_runtime_state=self.graph_runtime_state,
|
||||
template_renderer=self._template_renderer,
|
||||
max_output_length=self._template_transform_max_output_length,
|
||||
)
|
||||
|
||||
if node_type == NodeType.HTTP_REQUEST:
|
||||
|
||||
@@ -6,7 +6,8 @@ from yarl import URL
|
||||
|
||||
from configs import dify_config
|
||||
from core.helper.download import download_with_size_limit
|
||||
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
|
||||
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -43,28 +44,37 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
|
||||
return data.get("data", {}).get("plugins", [])
|
||||
|
||||
|
||||
def batch_fetch_plugin_manifests_ignore_deserialization_error(
|
||||
plugin_ids: list[str],
|
||||
) -> Sequence[MarketplacePluginDeclaration]:
|
||||
if len(plugin_ids) == 0:
|
||||
return []
|
||||
|
||||
url = str(marketplace_api_url / "api/v1/plugins/batch")
|
||||
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
|
||||
response.raise_for_status()
|
||||
result: list[MarketplacePluginDeclaration] = []
|
||||
for plugin in response.json()["data"]["plugins"]:
|
||||
try:
|
||||
result.append(MarketplacePluginDeclaration.model_validate(plugin))
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def record_install_plugin_event(plugin_unique_identifier: str):
|
||||
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
|
||||
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
|
||||
response.raise_for_status()
|
||||
|
||||
|
||||
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
|
||||
"""
|
||||
Fetch all plugin manifests from marketplace and cache them in Redis.
|
||||
This should be called once per check cycle to populate the instance-level cache.
|
||||
|
||||
Args:
|
||||
cache_key_prefix: Redis key prefix for caching plugin manifests
|
||||
cache_ttl: Cache TTL in seconds
|
||||
|
||||
Raises:
|
||||
httpx.HTTPError: If the HTTP request fails
|
||||
Exception: If any other error occurs during fetching or caching
|
||||
"""
|
||||
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
|
||||
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
|
||||
response.raise_for_status()
|
||||
|
||||
raw_json = response.json()
|
||||
plugins_data = raw_json.get("plugins", [])
|
||||
|
||||
# Parse and cache all plugin snapshots
|
||||
for plugin_data in plugins_data:
|
||||
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
|
||||
redis_client.setex(
|
||||
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
|
||||
time=cache_ttl,
|
||||
value=plugin_snapshot.model_dump_json(),
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from pydantic import BaseModel, Field, computed_field, model_validator
|
||||
|
||||
from core.model_runtime.entities.provider_entities import ProviderEntity
|
||||
from core.plugin.entities.endpoint import EndpointProviderDeclaration
|
||||
@@ -48,3 +48,15 @@ class MarketplacePluginDeclaration(BaseModel):
|
||||
if "tool" in data and not data["tool"]:
|
||||
del data["tool"]
|
||||
return data
|
||||
|
||||
|
||||
class MarketplacePluginSnapshot(BaseModel):
|
||||
org: str
|
||||
name: str
|
||||
latest_version: str
|
||||
latest_package_identifier: str
|
||||
latest_package_url: str
|
||||
|
||||
@computed_field
|
||||
def plugin_id(self) -> str:
|
||||
return f"{self.org}/{self.name}"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from configs import dify_config
|
||||
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
|
||||
from core.workflow.node_events import NodeRunResult
|
||||
from core.workflow.nodes.base.node import Node
|
||||
@@ -16,12 +15,13 @@ if TYPE_CHECKING:
|
||||
from core.workflow.entities import GraphInitParams
|
||||
from core.workflow.runtime import GraphRuntimeState
|
||||
|
||||
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
|
||||
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
|
||||
|
||||
|
||||
class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
node_type = NodeType.TEMPLATE_TRANSFORM
|
||||
_template_renderer: Jinja2TemplateRenderer
|
||||
_max_output_length: int
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -31,6 +31,7 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
graph_runtime_state: "GraphRuntimeState",
|
||||
*,
|
||||
template_renderer: Jinja2TemplateRenderer | None = None,
|
||||
max_output_length: int | None = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
id=id,
|
||||
@@ -40,6 +41,10 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
)
|
||||
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
|
||||
|
||||
if max_output_length is not None and max_output_length <= 0:
|
||||
raise ValueError("max_output_length must be a positive integer")
|
||||
self._max_output_length = max_output_length or DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
|
||||
|
||||
@classmethod
|
||||
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
|
||||
"""
|
||||
@@ -69,11 +74,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
|
||||
except TemplateRenderError as e:
|
||||
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
|
||||
|
||||
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
|
||||
if len(rendered) > self._max_output_length:
|
||||
return NodeRunResult(
|
||||
inputs=variables,
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
|
||||
error=f"Output length exceeds {self._max_output_length} characters",
|
||||
)
|
||||
|
||||
return NodeRunResult(
|
||||
|
||||
@@ -10,6 +10,10 @@ import models as models
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7df29de0f6be'
|
||||
down_revision = '03ea244985ce'
|
||||
@@ -19,16 +23,31 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tenant_credit_pools',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
|
||||
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
|
||||
sa.Column('quota_used', sa.BigInteger(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
|
||||
)
|
||||
conn = op.get_bind()
|
||||
|
||||
if _is_pg(conn):
|
||||
op.create_table('tenant_credit_pools',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
|
||||
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
|
||||
sa.Column('quota_used', sa.BigInteger(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
|
||||
)
|
||||
else:
|
||||
# For MySQL and other databases, UUID should be generated at application level
|
||||
op.create_table('tenant_credit_pools',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
|
||||
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
|
||||
sa.Column('quota_used', sa.BigInteger(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
|
||||
)
|
||||
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
|
||||
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
|
||||
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)
|
||||
|
||||
@@ -2166,7 +2166,9 @@ class TenantCreditPool(TypeBase):
|
||||
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
|
||||
id: Mapped[str] = mapped_column(
|
||||
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
|
||||
)
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
|
||||
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)
|
||||
|
||||
@@ -1,16 +1,24 @@
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
|
||||
import click
|
||||
|
||||
import app
|
||||
from core.helper.marketplace import fetch_global_plugin_manifest
|
||||
from extensions.ext_database import db
|
||||
from models.account import TenantPluginAutoUpgradeStrategy
|
||||
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
|
||||
MAX_CONCURRENT_CHECK_TASKS = 20
|
||||
|
||||
# Import cache constants from the task module
|
||||
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
|
||||
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
|
||||
|
||||
|
||||
@app.celery.task(queue="plugin")
|
||||
def check_upgradable_plugin_task():
|
||||
@@ -40,6 +48,22 @@ def check_upgradable_plugin_task():
|
||||
) # make sure all strategies are checked in this interval
|
||||
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
|
||||
|
||||
if total_strategies == 0:
|
||||
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
|
||||
return
|
||||
|
||||
# Fetch and cache all plugin manifests before processing tenants
|
||||
# This reduces load on marketplace from 300k requests to 1 request per check cycle
|
||||
logger.info("fetching global plugin manifest from marketplace")
|
||||
try:
|
||||
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
|
||||
logger.info("successfully fetched and cached global plugin manifest")
|
||||
except Exception as e:
|
||||
logger.exception("failed to fetch global plugin manifest")
|
||||
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
|
||||
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
|
||||
return
|
||||
|
||||
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
|
||||
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
|
||||
for strategy in batch_strategies:
|
||||
|
||||
@@ -327,6 +327,17 @@ class AccountService:
|
||||
@staticmethod
|
||||
def delete_account(account: Account):
|
||||
"""Delete account. This method only adds a task to the queue for deletion."""
|
||||
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_account_deletion
|
||||
|
||||
sync_success = sync_account_deletion(account_id=account.id, source="account_deleted")
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise account deletion sync failed for account %s; proceeding with local deletion.",
|
||||
account.id,
|
||||
)
|
||||
|
||||
# Now proceed with async account deletion
|
||||
delete_account_task.delay(account.id)
|
||||
|
||||
@staticmethod
|
||||
@@ -1230,6 +1241,19 @@ class TenantService:
|
||||
if dify_config.BILLING_ENABLED:
|
||||
BillingService.clean_billing_info_cache(tenant.id)
|
||||
|
||||
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
|
||||
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
|
||||
|
||||
sync_success = sync_workspace_member_removal(
|
||||
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
|
||||
)
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
|
||||
tenant.id,
|
||||
account.id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
|
||||
"""Update member role"""
|
||||
|
||||
115
api/services/enterprise/account_deletion_sync.py
Normal file
115
api/services/enterprise/account_deletion_sync.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.account import TenantAccountJoin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
|
||||
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
|
||||
|
||||
|
||||
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Queue an account deletion sync task to Redis.
|
||||
|
||||
Internal helper function. Do not call directly - use the public functions instead.
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID to sync
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (for debugging/tracking)
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
task = {
|
||||
"task_id": str(uuid.uuid4()),
|
||||
"workspace_id": workspace_id,
|
||||
"member_id": member_id,
|
||||
"retry_count": 0,
|
||||
"created_at": datetime.now(UTC).isoformat(),
|
||||
"source": source,
|
||||
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
|
||||
}
|
||||
|
||||
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
|
||||
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
|
||||
|
||||
logger.info(
|
||||
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
task["task_id"],
|
||||
source,
|
||||
)
|
||||
return True
|
||||
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.error(
|
||||
"Failed to queue account deletion sync for workspace %s, member %s: %s",
|
||||
workspace_id,
|
||||
member_id,
|
||||
str(e),
|
||||
exc_info=True,
|
||||
)
|
||||
# Don't raise - we don't want to fail member deletion if queueing fails
|
||||
return False
|
||||
|
||||
|
||||
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync a single workspace member removal (enterprise only).
|
||||
|
||||
Queues a task for the enterprise backend to reassign resources from the removed member.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID
|
||||
member_id: The member/account ID that was removed
|
||||
source: Source of the sync request (e.g., "workspace_member_removed")
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued (or skipped in community), False if queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
|
||||
def sync_account_deletion(account_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Sync full account deletion across all workspaces (enterprise only).
|
||||
|
||||
Fetches all workspace memberships for the account and queues a sync task for each.
|
||||
Handles enterprise edition check internally. Safe to call in community edition (no-op).
|
||||
|
||||
Args:
|
||||
account_id: The account ID being deleted
|
||||
source: Source of the sync request (e.g., "account_deleted")
|
||||
|
||||
Returns:
|
||||
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
|
||||
"""
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return True
|
||||
|
||||
# Fetch all workspaces the account belongs to
|
||||
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
|
||||
|
||||
# Queue sync task for each workspace
|
||||
success = True
|
||||
for join in workspace_joins:
|
||||
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
|
||||
success = False
|
||||
|
||||
return success
|
||||
@@ -8,7 +8,6 @@ from sqlalchemy import delete, select
|
||||
from core.db.session_factory import session_factory
|
||||
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
|
||||
@@ -27,7 +26,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -36,7 +35,6 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
@@ -56,7 +54,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
|
||||
@@ -6,8 +6,8 @@ import typing
|
||||
import click
|
||||
from celery import shared_task
|
||||
|
||||
from core.helper import marketplace
|
||||
from core.helper.marketplace import MarketplacePluginDeclaration
|
||||
from core.helper.marketplace import record_install_plugin_event
|
||||
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
|
||||
from core.plugin.entities.plugin import PluginInstallationSource
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
from extensions.ext_redis import redis_client
|
||||
@@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
|
||||
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
|
||||
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
|
||||
CACHE_REDIS_TTL = 60 * 60 # 1 hour
|
||||
|
||||
|
||||
@@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
|
||||
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
|
||||
|
||||
|
||||
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
|
||||
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
|
||||
"""
|
||||
Get cached plugin manifest from Redis.
|
||||
Returns:
|
||||
- MarketplacePluginDeclaration: if found in cache
|
||||
- MarketplacePluginSnapshot: if found in cache
|
||||
- None: if cached as not found (marketplace returned no result)
|
||||
- False: if not in cache at all
|
||||
"""
|
||||
@@ -43,76 +43,31 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclar
|
||||
if cached_json is None:
|
||||
return None
|
||||
|
||||
return MarketplacePluginDeclaration.model_validate(cached_json)
|
||||
return MarketplacePluginSnapshot.model_validate(cached_json)
|
||||
except Exception:
|
||||
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
|
||||
return False
|
||||
|
||||
|
||||
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
|
||||
"""
|
||||
Cache plugin manifest in Redis.
|
||||
Args:
|
||||
plugin_id: The plugin ID
|
||||
manifest: The manifest to cache, or None if not found in marketplace
|
||||
"""
|
||||
try:
|
||||
key = _get_redis_cache_key(plugin_id)
|
||||
if manifest is None:
|
||||
# Cache the fact that this plugin was not found
|
||||
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
|
||||
else:
|
||||
# Cache the manifest data
|
||||
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
|
||||
except Exception:
|
||||
# If Redis fails, continue without caching
|
||||
# traceback.print_exc()
|
||||
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
|
||||
|
||||
|
||||
def marketplace_batch_fetch_plugin_manifests(
|
||||
plugin_ids_plain_list: list[str],
|
||||
) -> list[MarketplacePluginDeclaration]:
|
||||
"""Fetch plugin manifests with Redis caching support."""
|
||||
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
|
||||
not_cached_plugin_ids: list[str] = []
|
||||
) -> list[MarketplacePluginSnapshot]:
|
||||
"""
|
||||
Fetch plugin manifests from Redis cache only.
|
||||
This function assumes fetch_global_plugin_manifest() has been called
|
||||
to pre-populate the cache with all marketplace plugins.
|
||||
"""
|
||||
result: list[MarketplacePluginSnapshot] = []
|
||||
|
||||
# Check Redis cache for each plugin
|
||||
for plugin_id in plugin_ids_plain_list:
|
||||
cached_result = _get_cached_manifest(plugin_id)
|
||||
if cached_result is False:
|
||||
# Not in cache, need to fetch
|
||||
not_cached_plugin_ids.append(plugin_id)
|
||||
else:
|
||||
# Either found manifest or cached as None (not found in marketplace)
|
||||
# At this point, cached_result is either MarketplacePluginDeclaration or None
|
||||
if isinstance(cached_result, bool):
|
||||
# This should never happen due to the if condition above, but for type safety
|
||||
continue
|
||||
cached_manifests[plugin_id] = cached_result
|
||||
if not isinstance(cached_result, MarketplacePluginSnapshot):
|
||||
# cached_result is False (not in cache) or None (cached as not found)
|
||||
logger.warning("plugin %s not found in cache, skipping", plugin_id)
|
||||
continue
|
||||
|
||||
# Fetch uncached plugins from marketplace
|
||||
if not_cached_plugin_ids:
|
||||
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
|
||||
|
||||
# Cache the fetched manifests
|
||||
for manifest in manifests:
|
||||
cached_manifests[manifest.plugin_id] = manifest
|
||||
_set_cached_manifest(manifest.plugin_id, manifest)
|
||||
|
||||
# Cache plugins that were not found in marketplace
|
||||
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
|
||||
for plugin_id in not_cached_plugin_ids:
|
||||
if plugin_id not in fetched_plugin_ids:
|
||||
cached_manifests[plugin_id] = None
|
||||
_set_cached_manifest(plugin_id, None)
|
||||
|
||||
# Build result list from cached manifests
|
||||
result: list[MarketplacePluginDeclaration] = []
|
||||
for plugin_id in plugin_ids_plain_list:
|
||||
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
|
||||
if cached_manifest is not None:
|
||||
result.append(cached_manifest)
|
||||
result.append(cached_result)
|
||||
|
||||
return result
|
||||
|
||||
@@ -211,7 +166,7 @@ def process_tenant_plugin_autoupgrade_check_task(
|
||||
# execute upgrade
|
||||
new_unique_identifier = manifest.latest_package_identifier
|
||||
|
||||
marketplace.record_install_plugin_event(new_unique_identifier)
|
||||
record_install_plugin_event(new_unique_identifier)
|
||||
click.echo(
|
||||
click.style(
|
||||
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",
|
||||
|
||||
@@ -1016,7 +1016,7 @@ class TestAccountService:
|
||||
|
||||
def test_delete_account(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test account deletion (should add task to queue).
|
||||
Test account deletion (should add task to queue and sync to enterprise).
|
||||
"""
|
||||
fake = Faker()
|
||||
email = fake.email()
|
||||
@@ -1034,10 +1034,18 @@ class TestAccountService:
|
||||
password=password,
|
||||
)
|
||||
|
||||
with patch("services.account_service.delete_account_task") as mock_delete_task:
|
||||
with (
|
||||
patch("services.account_service.delete_account_task") as mock_delete_task,
|
||||
patch("services.enterprise.account_deletion_sync.sync_account_deletion") as mock_sync,
|
||||
):
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Delete account
|
||||
AccountService.delete_account(account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(account_id=account.id, source="account_deleted")
|
||||
|
||||
# Verify task was added to queue
|
||||
mock_delete_task.delay.assert_called_once_with(account.id)
|
||||
|
||||
@@ -1716,7 +1724,7 @@ class TestTenantService:
|
||||
|
||||
def test_remove_member_from_tenant_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful member removal from tenant.
|
||||
Test successful member removal from tenant (should sync to enterprise).
|
||||
"""
|
||||
fake = Faker()
|
||||
tenant_name = fake.company()
|
||||
@@ -1751,7 +1759,15 @@ class TestTenantService:
|
||||
TenantService.create_tenant_member(tenant, member_account, role="normal")
|
||||
|
||||
# Remove member
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
|
||||
|
||||
# Verify sync was called
|
||||
mock_sync.assert_called_once_with(
|
||||
workspace_id=tenant.id, member_id=member_account.id, source="workspace_member_removed"
|
||||
)
|
||||
|
||||
# Verify member was removed
|
||||
from extensions.ext_database import db
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from tasks.document_indexing_update_task import document_indexing_update_task
|
||||
|
||||
|
||||
class TestDocumentIndexingUpdateTask:
|
||||
@pytest.fixture
|
||||
def mock_external_dependencies(self):
|
||||
"""Patch external collaborators used by the update task.
|
||||
- IndexProcessorFactory.init_index_processor().clean(...)
|
||||
- IndexingRunner.run([...])
|
||||
"""
|
||||
with (
|
||||
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
|
||||
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
|
||||
):
|
||||
processor_instance = MagicMock()
|
||||
mock_factory.return_value.init_index_processor.return_value = processor_instance
|
||||
|
||||
runner_instance = MagicMock()
|
||||
mock_runner.return_value = runner_instance
|
||||
|
||||
yield {
|
||||
"factory": mock_factory,
|
||||
"processor": processor_instance,
|
||||
"runner": mock_runner,
|
||||
"runner_instance": runner_instance,
|
||||
}
|
||||
|
||||
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
|
||||
fake = Faker()
|
||||
|
||||
# Account and tenant
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(name=fake.company(), status="normal")
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Dataset and document
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=64),
|
||||
data_source_type="upload_file",
|
||||
indexing_technique="high_quality",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(dataset)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
document = Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=0,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name=fake.file_name(),
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="waiting",
|
||||
enabled=True,
|
||||
doc_form="text_model",
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Segments
|
||||
node_ids = []
|
||||
for i in range(segment_count):
|
||||
node_id = f"node-{i + 1}"
|
||||
seg = DocumentSegment(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
position=i,
|
||||
content=fake.text(max_nb_chars=32),
|
||||
answer=None,
|
||||
word_count=10,
|
||||
tokens=5,
|
||||
index_node_id=node_id,
|
||||
status="completed",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(seg)
|
||||
node_ids.append(node_id)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Refresh to ensure ORM state
|
||||
db_session_with_containers.refresh(dataset)
|
||||
db_session_with_containers.refresh(document)
|
||||
|
||||
return dataset, document, node_ids
|
||||
|
||||
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Act
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Assert document status updated before reindex
|
||||
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
|
||||
assert updated.indexing_status == "parsing"
|
||||
assert updated.processing_started_at is not None
|
||||
|
||||
# Segments should be deleted
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining == 0
|
||||
|
||||
# Assert index processor clean was called with expected args
|
||||
clean_call = mock_external_dependencies["processor"].clean.call_args
|
||||
assert clean_call is not None
|
||||
args, kwargs = clean_call
|
||||
# args[0] is a Dataset instance (from another session) — validate by id
|
||||
assert getattr(args[0], "id", None) == dataset.id
|
||||
# args[1] should contain our node_ids
|
||||
assert set(args[1]) == set(node_ids)
|
||||
assert kwargs.get("with_keywords") is True
|
||||
assert kwargs.get("delete_child_chunks") is True
|
||||
|
||||
# Assert indexing runner invoked with the updated document
|
||||
run_call = mock_external_dependencies["runner_instance"].run.call_args
|
||||
assert run_call is not None
|
||||
run_docs = run_call[0][0]
|
||||
assert len(run_docs) == 1
|
||||
first = run_docs[0]
|
||||
assert getattr(first, "id", None) == document.id
|
||||
|
||||
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Force clean to raise; task should continue to indexing
|
||||
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
|
||||
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Indexing should still be triggered
|
||||
mock_external_dependencies["runner_instance"].run.assert_called_once()
|
||||
|
||||
# Segments should remain (since clean failed before DB delete)
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining > 0
|
||||
|
||||
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
|
||||
fake = Faker()
|
||||
# Act with non-existent document id
|
||||
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
|
||||
|
||||
# Neither processor nor runner should be called
|
||||
mock_external_dependencies["processor"].clean.assert_not_called()
|
||||
mock_external_dependencies["runner_instance"].run.assert_not_called()
|
||||
@@ -217,7 +217,6 @@ class TestTemplateTransformNode:
|
||||
@patch(
|
||||
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
|
||||
)
|
||||
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
|
||||
def test_run_output_length_exceeds_limit(
|
||||
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
|
||||
):
|
||||
@@ -231,6 +230,7 @@ class TestTemplateTransformNode:
|
||||
graph_init_params=graph_init_params,
|
||||
graph=mock_graph,
|
||||
graph_runtime_state=mock_graph_runtime_state,
|
||||
max_output_length=10,
|
||||
)
|
||||
|
||||
result = node._run()
|
||||
|
||||
@@ -0,0 +1,276 @@
|
||||
"""Unit tests for account deletion synchronization.
|
||||
|
||||
This test module verifies the enterprise account deletion sync functionality,
|
||||
including Redis queuing, error handling, and community vs enterprise behavior.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from redis import RedisError
|
||||
|
||||
from services.enterprise.account_deletion_sync import (
|
||||
_queue_task,
|
||||
sync_account_deletion,
|
||||
sync_workspace_member_removal,
|
||||
)
|
||||
|
||||
|
||||
class TestQueueTask:
|
||||
"""Unit tests for the _queue_task helper function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self):
|
||||
"""Mock redis_client for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.redis_client") as mock_redis:
|
||||
yield mock_redis
|
||||
|
||||
@pytest.fixture
|
||||
def mock_uuid(self):
|
||||
"""Mock UUID generation for predictable task IDs."""
|
||||
with patch("services.enterprise.account_deletion_sync.uuid.uuid4") as mock_uuid_gen:
|
||||
mock_uuid_gen.return_value = MagicMock(hex="test-task-id-1234")
|
||||
yield mock_uuid_gen
|
||||
|
||||
def test_queue_task_success(self, mock_redis_client, mock_uuid):
|
||||
"""Test successful task queueing to Redis."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "test_source"
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_redis_client.lpush.assert_called_once()
|
||||
|
||||
# Verify the task payload structure
|
||||
call_args = mock_redis_client.lpush.call_args[0]
|
||||
assert call_args[0] == "enterprise:member:sync:queue"
|
||||
|
||||
import json
|
||||
|
||||
task_data = json.loads(call_args[1])
|
||||
assert task_data["workspace_id"] == workspace_id
|
||||
assert task_data["member_id"] == member_id
|
||||
assert task_data["source"] == source
|
||||
assert task_data["type"] == "sync_member_deletion_from_workspace"
|
||||
assert task_data["retry_count"] == 0
|
||||
assert "task_id" in task_data
|
||||
assert "created_at" in task_data
|
||||
|
||||
def test_queue_task_redis_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of Redis connection errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = RedisError("Connection failed")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
def test_queue_task_type_error(self, mock_redis_client, caplog):
|
||||
"""Test handling of JSON serialization errors."""
|
||||
# Arrange
|
||||
mock_redis_client.lpush.side_effect = TypeError("Cannot serialize")
|
||||
|
||||
# Act
|
||||
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
assert "Failed to queue account deletion sync" in caplog.text
|
||||
|
||||
|
||||
class TestSyncWorkspaceMemberRemoval:
|
||||
"""Unit tests for sync_workspace_member_removal function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_enabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is True."""
|
||||
# Arrange
|
||||
workspace_id = "ws-123"
|
||||
member_id = "member-456"
|
||||
source = "workspace_member_removed"
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_called_once_with(workspace_id=workspace_id, member_id=member_id, source=source)
|
||||
|
||||
def test_sync_workspace_member_removal_enterprise_disabled(self, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_workspace_member_removal_queue_failure(self, mock_queue_task):
|
||||
"""Test handling of queue task failures."""
|
||||
# Arrange
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestSyncAccountDeletion:
|
||||
"""Unit tests for sync_account_deletion function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""Mock database session for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync.db.session") as mock_session:
|
||||
yield mock_session
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queue_task(self):
|
||||
"""Mock _queue_task for testing."""
|
||||
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
|
||||
mock_queue.return_value = True
|
||||
yield mock_queue
|
||||
|
||||
def test_sync_account_deletion_enterprise_disabled(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
|
||||
# Arrange
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = False
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_db_session.query.assert_not_called()
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_multiple_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with multiple workspace memberships."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
# Verify each workspace was queued
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-1", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-2", member_id=account_id, source="account_deleted")
|
||||
mock_queue_task.assert_any_call(workspace_id="tenant-3", member_id=account_id, source="account_deleted")
|
||||
|
||||
def test_sync_account_deletion_no_workspaces(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync for account with no workspace memberships."""
|
||||
# Arrange
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = []
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_queue_task.assert_not_called()
|
||||
|
||||
def test_sync_account_deletion_partial_failure(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when some tasks fail to queue."""
|
||||
# Arrange
|
||||
account_id = "acc-123"
|
||||
|
||||
# Mock workspace joins
|
||||
mock_join1 = MagicMock()
|
||||
mock_join1.tenant_id = "tenant-1"
|
||||
mock_join2 = MagicMock()
|
||||
mock_join2.tenant_id = "tenant-2"
|
||||
mock_join3 = MagicMock()
|
||||
mock_join3.tenant_id = "tenant-3"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Mock queue_task to fail for second workspace
|
||||
def queue_side_effect(workspace_id, member_id, source):
|
||||
return workspace_id != "tenant-2"
|
||||
|
||||
mock_queue_task.side_effect = queue_side_effect
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id=account_id, source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False # Should return False if any task fails
|
||||
assert mock_queue_task.call_count == 3
|
||||
|
||||
def test_sync_account_deletion_all_failures(self, mock_db_session, mock_queue_task):
|
||||
"""Test sync when all tasks fail to queue."""
|
||||
# Arrange
|
||||
mock_join = MagicMock()
|
||||
mock_join.tenant_id = "tenant-1"
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_query.filter_by.return_value.all.return_value = [mock_join]
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
mock_queue_task.return_value = False
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
|
||||
mock_config.ENTERPRISE_ENABLED = True
|
||||
|
||||
# Act
|
||||
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_queue_task.assert_called_once()
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable tailwindcss/classnames-order */
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import Effect from '.'
|
||||
|
||||
@@ -29,8 +28,8 @@ type Story = StoryObj<typeof meta>
|
||||
export const Playground: Story = {
|
||||
render: () => (
|
||||
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
|
||||
<Effect className="top-6 left-8" />
|
||||
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
|
||||
<Effect className="left-8 top-6" />
|
||||
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
|
||||
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
|
||||
Accent glow
|
||||
</div>
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { FC } from 'react'
|
||||
import { RiQuestionLine } from '@remixicon/react'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { tooltipManager } from './TooltipManager'
|
||||
@@ -61,6 +61,20 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
isHoverTriggerRef.current = isHoverTrigger
|
||||
}, [isHoverTrigger])
|
||||
|
||||
const closeTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const clearCloseTimeout = useCallback(() => {
|
||||
if (closeTimeoutRef.current) {
|
||||
clearTimeout(closeTimeoutRef.current)
|
||||
closeTimeoutRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
clearCloseTimeout()
|
||||
}
|
||||
}, [clearCloseTimeout])
|
||||
|
||||
const close = () => setOpen(false)
|
||||
|
||||
const handleLeave = (isTrigger: boolean) => {
|
||||
@@ -71,7 +85,9 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
|
||||
// give time to move to the popup
|
||||
if (needsDelay) {
|
||||
setTimeout(() => {
|
||||
clearCloseTimeout()
|
||||
closeTimeoutRef.current = setTimeout(() => {
|
||||
closeTimeoutRef.current = null
|
||||
if (!isHoverPopupRef.current && !isHoverTriggerRef.current) {
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
@@ -79,6 +95,7 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
}, 300)
|
||||
}
|
||||
else {
|
||||
clearCloseTimeout()
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
}
|
||||
@@ -95,6 +112,7 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
onClick={() => triggerMethod === 'click' && setOpen(v => !v)}
|
||||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
clearCloseTimeout()
|
||||
setHoverTrigger()
|
||||
tooltipManager.register(close)
|
||||
setOpen(true)
|
||||
@@ -115,7 +133,12 @@ const Tooltip: FC<TooltipProps> = ({
|
||||
!noDecoration && 'system-xs-regular relative max-w-[300px] break-words rounded-md bg-components-panel-bg px-3 py-2 text-left text-text-tertiary shadow-lg',
|
||||
popupClassName,
|
||||
)}
|
||||
onMouseEnter={() => triggerMethod === 'hover' && setHoverPopup()}
|
||||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
clearCloseTimeout()
|
||||
setHoverPopup()
|
||||
}
|
||||
}}
|
||||
onMouseLeave={() => triggerMethod === 'hover' && handleLeave(false)}
|
||||
>
|
||||
{popupContent}
|
||||
|
||||
@@ -14,7 +14,6 @@ const ErrorMessage = ({
|
||||
errorMsg,
|
||||
}: ErrorMessageProps) => {
|
||||
return (
|
||||
// eslint-disable-next-line tailwindcss/migration-from-tailwind-2
|
||||
<div className={cn(
|
||||
'flex gap-x-0.5 rounded-xl border-[0.5px] border-components-panel-border bg-opacity-40 bg-toast-error-bg p-2 shadow-xs shadow-shadow-shadow-3',
|
||||
className,
|
||||
|
||||
@@ -159,69 +159,74 @@ const Apps = ({
|
||||
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex h-full flex-col border-l-[0.5px] border-divider-regular',
|
||||
'flex h-full min-h-0 flex-col overflow-hidden border-l-[0.5px] border-divider-regular',
|
||||
)}
|
||||
>
|
||||
{systemFeatures.enable_explore_banner && (
|
||||
<div className="mt-4 px-12">
|
||||
<Banner />
|
||||
</div>
|
||||
)}
|
||||
<div className={cn(
|
||||
'mt-6 flex items-center justify-between px-12',
|
||||
)}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
|
||||
{hasFilterCondition && (
|
||||
<>
|
||||
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
|
||||
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="w-[200px] self-start"
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col overflow-y-auto">
|
||||
{systemFeatures.enable_explore_banner && (
|
||||
<div className="mt-4 px-12">
|
||||
<Banner />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mt-2 px-12">
|
||||
<Category
|
||||
list={categories}
|
||||
value={currCategory}
|
||||
onChange={setCurrCategory}
|
||||
allCategoriesEn={allCategoriesEn}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className={cn(
|
||||
'relative mt-4 flex flex-1 shrink-0 grow flex-col overflow-auto pb-6',
|
||||
)}
|
||||
>
|
||||
<nav
|
||||
className={cn(
|
||||
s.appList,
|
||||
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
|
||||
<div className="sticky top-0 z-10 bg-background-body">
|
||||
<div className={cn(
|
||||
'flex items-center justify-between px-12 pt-6',
|
||||
)}
|
||||
>
|
||||
{searchFilteredList.map(app => (
|
||||
<AppCard
|
||||
key={app.app_id}
|
||||
isExplore
|
||||
app={app}
|
||||
canCreate={hasEditPermission}
|
||||
onCreate={() => {
|
||||
setCurrApp(app)
|
||||
setIsShowCreateModal(true)
|
||||
}}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
|
||||
{hasFilterCondition && (
|
||||
<>
|
||||
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
|
||||
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="w-[200px] self-start"
|
||||
value={keywords}
|
||||
onChange={e => handleKeywordsChange(e.target.value)}
|
||||
onClear={() => handleKeywordsChange('')}
|
||||
/>
|
||||
))}
|
||||
</nav>
|
||||
</div>
|
||||
|
||||
<div className="px-12 pb-4 pt-2">
|
||||
<Category
|
||||
list={categories}
|
||||
value={currCategory}
|
||||
onChange={setCurrCategory}
|
||||
allCategoriesEn={allCategoriesEn}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className={cn(
|
||||
'relative flex flex-1 shrink-0 grow flex-col pb-6',
|
||||
)}
|
||||
>
|
||||
<nav
|
||||
className={cn(
|
||||
s.appList,
|
||||
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
|
||||
)}
|
||||
>
|
||||
{searchFilteredList.map(app => (
|
||||
<AppCard
|
||||
key={app.app_id}
|
||||
isExplore
|
||||
app={app}
|
||||
canCreate={hasEditPermission}
|
||||
onCreate={() => {
|
||||
setCurrApp(app)
|
||||
setIsShowCreateModal(true)
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
{isShowCreateModal && (
|
||||
<CreateAppModal
|
||||
|
||||
@@ -71,7 +71,7 @@ const Explore: FC<IExploreProps> = ({
|
||||
}
|
||||
>
|
||||
<Sidebar controlUpdateInstalledApps={controlUpdateInstalledApps} />
|
||||
<div className="w-0 grow">
|
||||
<div className="h-full min-h-0 w-0 grow">
|
||||
{children}
|
||||
</div>
|
||||
</ExploreContext.Provider>
|
||||
|
||||
@@ -599,20 +599,30 @@ describe('CommonCreateModal', () => {
|
||||
},
|
||||
})
|
||||
mockUsePluginStore.mockReturnValue(detailWithCredentials)
|
||||
const existingBuilder = createMockSubscriptionBuilder()
|
||||
mockVerifyCredentials.mockImplementation((params, { onSuccess }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
render(<CommonCreateModal {...defaultProps} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('modal-confirm'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockVerifyCredentials).toHaveBeenCalled()
|
||||
expect(mockVerifyCredentials).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: 'test-provider',
|
||||
subscriptionBuilderId: existingBuilder.id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
onError: expect.any(Function),
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('modal-confirm')).toHaveTextContent('pluginTrigger.modal.common.create')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -629,15 +639,12 @@ describe('CommonCreateModal', () => {
|
||||
},
|
||||
})
|
||||
mockUsePluginStore.mockReturnValue(detailWithCredentials)
|
||||
const existingBuilder = createMockSubscriptionBuilder()
|
||||
mockVerifyCredentials.mockImplementation((params, { onError }) => {
|
||||
onError(new Error('Verification failed'))
|
||||
})
|
||||
|
||||
render(<CommonCreateModal {...defaultProps} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
|
||||
|
||||
fireEvent.click(screen.getByTestId('modal-confirm'))
|
||||
|
||||
|
||||
@@ -4,6 +4,17 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportStatus } from '@/models/app'
|
||||
import UpdateDSLModal from './update-dsl-modal'
|
||||
|
||||
class MockFileReader {
|
||||
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
|
||||
|
||||
readAsText(_file: Blob) {
|
||||
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
|
||||
this.onload?.call(this as unknown as FileReader, event)
|
||||
}
|
||||
}
|
||||
|
||||
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { spawnSync } from 'node:child_process'
|
||||
import { randomUUID } from 'node:crypto'
|
||||
import { createSerwistRoute } from '@serwist/turbopack'
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const revision = spawnSync('git', ['rev-parse', 'HEAD'], { encoding: 'utf-8' }).stdout?.trim() || randomUUID()
|
||||
|
||||
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
|
||||
additionalPrecacheEntries: [{ url: `${basePath}/_offline.html`, revision }],
|
||||
swSrc: 'app/sw.ts',
|
||||
nextConfig: {
|
||||
basePath,
|
||||
},
|
||||
useNativeEsbuild: true,
|
||||
})
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
/// <reference lib="webworker" />
|
||||
|
||||
import type { PrecacheEntry, SerwistGlobalConfig } from 'serwist'
|
||||
import { CacheableResponsePlugin, CacheFirst, ExpirationPlugin, NetworkFirst, Serwist, StaleWhileRevalidate } from 'serwist'
|
||||
import { defaultCache } from '@serwist/turbopack/worker'
|
||||
import { Serwist } from 'serwist'
|
||||
import { withLeadingSlash } from 'ufo'
|
||||
|
||||
declare global {
|
||||
// eslint-disable-next-line ts/consistent-type-definitions
|
||||
@@ -18,78 +20,30 @@ const scopePathname = new URL(self.registration.scope).pathname
|
||||
const basePath = scopePathname.replace(/\/serwist\/$/, '').replace(/\/$/, '')
|
||||
const offlineUrl = `${basePath}/_offline.html`
|
||||
|
||||
const normalizeManifestUrl = (url: string): string => {
|
||||
if (url.startsWith('/serwist/'))
|
||||
return url.replace(/^\/serwist\//, '/')
|
||||
|
||||
return withLeadingSlash(url)
|
||||
}
|
||||
|
||||
const manifest = self.__SW_MANIFEST?.map((entry) => {
|
||||
if (typeof entry === 'string')
|
||||
return normalizeManifestUrl(entry)
|
||||
|
||||
return {
|
||||
...entry,
|
||||
url: normalizeManifestUrl(entry.url),
|
||||
}
|
||||
})
|
||||
|
||||
const serwist = new Serwist({
|
||||
precacheEntries: self.__SW_MANIFEST,
|
||||
precacheEntries: manifest,
|
||||
skipWaiting: true,
|
||||
disableDevLogs: true,
|
||||
clientsClaim: true,
|
||||
navigationPreload: true,
|
||||
runtimeCaching: [
|
||||
{
|
||||
matcher: ({ url }) => url.origin === 'https://fonts.googleapis.com',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'google-fonts',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ url }) => url.origin === 'https://fonts.gstatic.com',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'google-fonts-webfonts',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 4,
|
||||
maxAgeSeconds: 365 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ request }) => request.destination === 'image',
|
||||
handler: new CacheFirst({
|
||||
cacheName: 'images',
|
||||
plugins: [
|
||||
new CacheableResponsePlugin({ statuses: [0, 200] }),
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 64,
|
||||
maxAgeSeconds: 30 * 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ request }) => request.destination === 'script' || request.destination === 'style',
|
||||
handler: new StaleWhileRevalidate({
|
||||
cacheName: 'static-resources',
|
||||
plugins: [
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 32,
|
||||
maxAgeSeconds: 24 * 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
{
|
||||
matcher: ({ url, sameOrigin }) => sameOrigin && url.pathname.startsWith('/api/'),
|
||||
handler: new NetworkFirst({
|
||||
cacheName: 'api-cache',
|
||||
networkTimeoutSeconds: 10,
|
||||
plugins: [
|
||||
new ExpirationPlugin({
|
||||
maxEntries: 16,
|
||||
maxAgeSeconds: 60 * 60,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
],
|
||||
runtimeCaching: defaultCache,
|
||||
fallbacks: {
|
||||
entries: [
|
||||
{
|
||||
|
||||
@@ -38,6 +38,11 @@ pnpm lint:tss
|
||||
|
||||
This command lints the entire project and is intended for final verification before committing or pushing changes.
|
||||
|
||||
### Introducing New Plugins or Rules
|
||||
|
||||
If a new rule causes many existing code errors or automatic fixes generate too many diffs, do not use the `--fix` option for automatic fixes.
|
||||
You can introduce the rule first, then use the `--suppress-all` option to temporarily suppress these errors, and gradually fix them in subsequent changes.
|
||||
|
||||
## Type Check
|
||||
|
||||
You should be able to see suggestions from TypeScript in your editor for all open files.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,9 @@
|
||||
// @ts-check
|
||||
import antfu from '@antfu/eslint-config'
|
||||
import pluginQuery from '@tanstack/eslint-plugin-query'
|
||||
import tailwindcss from 'eslint-plugin-better-tailwindcss'
|
||||
import sonar from 'eslint-plugin-sonarjs'
|
||||
import storybook from 'eslint-plugin-storybook'
|
||||
import tailwind from 'eslint-plugin-tailwindcss'
|
||||
import dify from './eslint-rules/index.js'
|
||||
|
||||
export default antfu(
|
||||
@@ -23,7 +23,7 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
nextjs: true,
|
||||
ignores: ['public', 'types/doc-paths.ts'],
|
||||
ignores: ['public', 'types/doc-paths.ts', 'eslint-suppressions.json'],
|
||||
typescript: {
|
||||
overrides: {
|
||||
'ts/consistent-type-definitions': ['error', 'type'],
|
||||
@@ -66,42 +66,16 @@ export default antfu(
|
||||
sonarjs: sonar,
|
||||
},
|
||||
},
|
||||
tailwind.configs['flat/recommended'],
|
||||
{
|
||||
settings: {
|
||||
tailwindcss: {
|
||||
// These are the default values but feel free to customize
|
||||
callees: ['classnames', 'clsx', 'ctl', 'cn', 'classNames'],
|
||||
config: 'tailwind.config.js', // returned from `loadConfig()` utility if not provided
|
||||
cssFiles: [
|
||||
'**/*.css',
|
||||
'!**/node_modules',
|
||||
'!**/.*',
|
||||
'!**/dist',
|
||||
'!**/build',
|
||||
'!**/.storybook',
|
||||
'!**/.next',
|
||||
'!**/.public',
|
||||
],
|
||||
cssFilesRefreshRate: 5_000,
|
||||
removeDuplicates: true,
|
||||
skipClassAttribute: false,
|
||||
whitelist: [],
|
||||
tags: [], // can be set to e.g. ['tw'] for use in tw`bg-blue`
|
||||
classRegex: '^class(Name)?$', // can be modified to support custom attributes. E.g. "^tw$" for `twin.macro`
|
||||
},
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
plugins: {
|
||||
tailwindcss,
|
||||
},
|
||||
rules: {
|
||||
// due to 1k lines of tailwind config, these rule have performance issue
|
||||
'tailwindcss/no-contradicting-classname': 'off',
|
||||
'tailwindcss/enforces-shorthand': 'off',
|
||||
'tailwindcss/no-custom-classname': 'off',
|
||||
'tailwindcss/no-unnecessary-arbitrary-value': 'off',
|
||||
|
||||
'tailwindcss/no-arbitrary-value': 'off',
|
||||
'tailwindcss/classnames-order': 'warn',
|
||||
'tailwindcss/enforces-negative-arbitrary-values': 'warn',
|
||||
'tailwindcss/migration-from-tailwind-2': 'warn',
|
||||
'tailwindcss/enforce-consistent-class-order': 'error',
|
||||
'tailwindcss/no-duplicate-classes': 'error',
|
||||
'tailwindcss/no-unnecessary-whitespace': 'error',
|
||||
'tailwindcss/no-unknown-classes': 'warn',
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -29,7 +29,7 @@ const remoteImageURLs = ([hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_W
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
|
||||
serverExternalPackages: ['esbuild-wasm'],
|
||||
serverExternalPackages: ['esbuild'],
|
||||
transpilePackages: ['echarts', 'zrender'],
|
||||
turbopack: {
|
||||
rules: codeInspectorPlugin({
|
||||
|
||||
@@ -46,7 +46,8 @@
|
||||
"uglify-embed": "node ./bin/uglify-embed",
|
||||
"i18n:check": "tsx ./scripts/check-i18n.js",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage --reporter=dot --silent=passed-only",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:ci": "vitest run --coverage --reporter vitest-tiny-reporter --silent=passed-only",
|
||||
"test:watch": "vitest --watch",
|
||||
"analyze-component": "node ./scripts/analyze-component.js",
|
||||
"refactor-component": "node ./scripts/refactor-component.js",
|
||||
@@ -153,8 +154,9 @@
|
||||
"sharp": "0.33.5",
|
||||
"sortablejs": "1.15.6",
|
||||
"string-ts": "2.3.1",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"tailwind-merge": "2.6.1",
|
||||
"tldts": "7.0.17",
|
||||
"ufo": "1.6.3",
|
||||
"use-context-selector": "2.0.0",
|
||||
"uuid": "10.0.0",
|
||||
"zod": "3.25.76",
|
||||
@@ -164,21 +166,21 @@
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "7.2.0",
|
||||
"@chromatic-com/storybook": "5.0.0",
|
||||
"@eslint-react/eslint-plugin": "2.8.1",
|
||||
"@eslint-react/eslint-plugin": "2.9.4",
|
||||
"@mdx-js/loader": "3.1.1",
|
||||
"@mdx-js/react": "3.1.1",
|
||||
"@next/bundle-analyzer": "16.1.5",
|
||||
"@next/eslint-plugin-next": "16.1.6",
|
||||
"@next/mdx": "16.1.5",
|
||||
"@rgrove/parse-xml": "4.2.0",
|
||||
"@serwist/turbopack": "9.5.0",
|
||||
"@serwist/turbopack": "9.5.4",
|
||||
"@storybook/addon-docs": "10.2.0",
|
||||
"@storybook/addon-links": "10.2.0",
|
||||
"@storybook/addon-onboarding": "10.2.0",
|
||||
"@storybook/addon-themes": "10.2.0",
|
||||
"@storybook/nextjs-vite": "10.2.0",
|
||||
"@storybook/react": "10.2.0",
|
||||
"@tanstack/eslint-plugin-query": "5.91.3",
|
||||
"@tanstack/eslint-plugin-query": "5.91.4",
|
||||
"@tanstack/react-devtools": "0.9.2",
|
||||
"@tanstack/react-form-devtools": "0.2.12",
|
||||
"@tanstack/react-query-devtools": "5.90.2",
|
||||
@@ -209,13 +211,13 @@
|
||||
"autoprefixer": "10.4.21",
|
||||
"code-inspector-plugin": "1.3.6",
|
||||
"cross-env": "10.1.0",
|
||||
"esbuild-wasm": "0.27.2",
|
||||
"esbuild": "0.27.2",
|
||||
"eslint": "9.39.2",
|
||||
"eslint-plugin-better-tailwindcss": "4.1.1",
|
||||
"eslint-plugin-react-hooks": "7.0.1",
|
||||
"eslint-plugin-react-refresh": "0.4.26",
|
||||
"eslint-plugin-react-refresh": "0.5.0",
|
||||
"eslint-plugin-sonarjs": "3.0.6",
|
||||
"eslint-plugin-storybook": "10.2.1",
|
||||
"eslint-plugin-tailwindcss": "3.18.2",
|
||||
"eslint-plugin-storybook": "10.2.6",
|
||||
"husky": "9.1.7",
|
||||
"jsdom": "27.3.0",
|
||||
"jsdom-testing-mocks": "1.16.0",
|
||||
@@ -225,16 +227,17 @@
|
||||
"postcss": "8.5.6",
|
||||
"react-scan": "0.4.3",
|
||||
"sass": "1.93.2",
|
||||
"serwist": "9.5.0",
|
||||
"serwist": "9.5.4",
|
||||
"storybook": "10.2.0",
|
||||
"tailwindcss": "3.4.18",
|
||||
"tailwindcss": "3.4.19",
|
||||
"tsx": "4.21.0",
|
||||
"typescript": "5.9.3",
|
||||
"uglify-js": "3.19.3",
|
||||
"vite": "7.3.1",
|
||||
"vite-tsconfig-paths": "6.0.4",
|
||||
"vitest": "4.0.17",
|
||||
"vitest-canvas-mock": "1.1.3"
|
||||
"vitest-canvas-mock": "1.1.3",
|
||||
"vitest-tiny-reporter": "1.3.1"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
|
||||
555
web/pnpm-lock.yaml
generated
555
web/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,157 +0,0 @@
|
||||
/**
|
||||
* Test suite for the classnames utility function
|
||||
* This utility combines the classnames library with tailwind-merge
|
||||
* to handle conditional CSS classes and merge conflicting Tailwind classes
|
||||
*/
|
||||
import { cn } from './classnames'
|
||||
|
||||
describe('classnames', () => {
|
||||
/**
|
||||
* Tests basic classnames library features:
|
||||
* - String concatenation
|
||||
* - Array handling
|
||||
* - Falsy value filtering
|
||||
* - Object-based conditional classes
|
||||
*/
|
||||
it('classnames libs feature', () => {
|
||||
expect(cn('foo')).toBe('foo')
|
||||
expect(cn('foo', 'bar')).toBe('foo bar')
|
||||
expect(cn(['foo', 'bar'])).toBe('foo bar')
|
||||
|
||||
expect(cn(undefined)).toBe('')
|
||||
expect(cn(null)).toBe('')
|
||||
expect(cn(false)).toBe('')
|
||||
|
||||
expect(cn({
|
||||
foo: true,
|
||||
bar: false,
|
||||
baz: true,
|
||||
})).toBe('foo baz')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests tailwind-merge functionality:
|
||||
* - Conflicting class resolution (last one wins)
|
||||
* - Modifier handling (hover, focus, etc.)
|
||||
* - Important prefix (!)
|
||||
* - Custom color classes
|
||||
* - Arbitrary values
|
||||
*/
|
||||
it('tailwind-merge', () => {
|
||||
/* eslint-disable tailwindcss/classnames-order */
|
||||
expect(cn('p-0')).toBe('p-0')
|
||||
expect(cn('text-right text-center text-left')).toBe('text-left')
|
||||
expect(cn('pl-4 p-8')).toBe('p-8')
|
||||
expect(cn('m-[2px] m-[4px]')).toBe('m-[4px]')
|
||||
expect(cn('m-1 m-[4px]')).toBe('m-[4px]')
|
||||
expect(cn('overflow-x-auto hover:overflow-x-hidden overflow-x-scroll')).toBe(
|
||||
'hover:overflow-x-hidden overflow-x-scroll',
|
||||
)
|
||||
expect(cn('h-10 h-min')).toBe('h-min')
|
||||
expect(cn('bg-grey-5 bg-hotpink')).toBe('bg-hotpink')
|
||||
|
||||
expect(cn('hover:block hover:inline')).toBe('hover:inline')
|
||||
|
||||
expect(cn('font-medium !font-bold')).toBe('font-medium !font-bold')
|
||||
expect(cn('!font-medium !font-bold')).toBe('!font-bold')
|
||||
|
||||
expect(cn('text-gray-100 text-primary-200')).toBe('text-primary-200')
|
||||
expect(cn('text-some-unknown-color text-components-input-bg-disabled text-primary-200')).toBe('text-primary-200')
|
||||
expect(cn('bg-some-unknown-color bg-components-input-bg-disabled bg-primary-200')).toBe('bg-primary-200')
|
||||
|
||||
expect(cn('border-t border-white/10')).toBe('border-t border-white/10')
|
||||
expect(cn('border-t border-white')).toBe('border-t border-white')
|
||||
expect(cn('text-3.5xl text-black')).toBe('text-3.5xl text-black')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests the integration of classnames and tailwind-merge:
|
||||
* - Object-based conditional classes with Tailwind conflict resolution
|
||||
*/
|
||||
it('classnames combined with tailwind-merge', () => {
|
||||
expect(cn('text-right', {
|
||||
'text-center': true,
|
||||
})).toBe('text-center')
|
||||
|
||||
expect(cn('text-right', {
|
||||
'text-center': false,
|
||||
})).toBe('text-right')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests handling of multiple mixed argument types:
|
||||
* - Strings, arrays, and objects in a single call
|
||||
* - Tailwind merge working across different argument types
|
||||
*/
|
||||
it('multiple mixed argument types', () => {
|
||||
expect(cn('foo', ['bar', 'baz'], { qux: true, quux: false })).toBe('foo bar baz qux')
|
||||
expect(cn('p-4', ['p-2', 'm-4'], { 'text-left': true, 'text-right': true })).toBe('p-2 m-4 text-right')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests nested array handling:
|
||||
* - Deep array flattening
|
||||
* - Tailwind merge with nested structures
|
||||
*/
|
||||
it('nested arrays', () => {
|
||||
expect(cn(['foo', ['bar', 'baz']])).toBe('foo bar baz')
|
||||
expect(cn(['p-4', ['p-2', 'text-center']])).toBe('p-2 text-center')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests empty input handling:
|
||||
* - Empty strings, arrays, and objects
|
||||
* - Mixed empty and non-empty values
|
||||
*/
|
||||
it('empty inputs', () => {
|
||||
expect(cn('')).toBe('')
|
||||
expect(cn([])).toBe('')
|
||||
expect(cn({})).toBe('')
|
||||
expect(cn('', [], {})).toBe('')
|
||||
expect(cn('foo', '', 'bar')).toBe('foo bar')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests number input handling:
|
||||
* - Truthy numbers converted to strings
|
||||
* - Zero treated as falsy
|
||||
*/
|
||||
it('numbers as inputs', () => {
|
||||
expect(cn(1)).toBe('1')
|
||||
expect(cn(0)).toBe('')
|
||||
expect(cn('foo', 1, 'bar')).toBe('foo 1 bar')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests multiple object arguments:
|
||||
* - Object merging
|
||||
* - Tailwind conflict resolution across objects
|
||||
*/
|
||||
it('multiple objects', () => {
|
||||
expect(cn({ foo: true }, { bar: true })).toBe('foo bar')
|
||||
expect(cn({ foo: true, bar: false }, { bar: true, baz: true })).toBe('foo bar baz')
|
||||
expect(cn({ 'p-4': true }, { 'p-2': true })).toBe('p-2')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests complex edge cases:
|
||||
* - Mixed falsy values
|
||||
* - Nested arrays with falsy values
|
||||
* - Multiple conflicting Tailwind classes
|
||||
*/
|
||||
it('complex edge cases', () => {
|
||||
expect(cn('foo', null, undefined, false, 'bar', 0, 1, '')).toBe('foo bar 1')
|
||||
expect(cn(['foo', null, ['bar', undefined, 'baz']])).toBe('foo bar baz')
|
||||
expect(cn('text-sm', { 'text-lg': false, 'text-xl': true }, 'text-2xl')).toBe('text-2xl')
|
||||
})
|
||||
|
||||
/**
|
||||
* Tests important (!) modifier behavior:
|
||||
* - Important modifiers in objects
|
||||
* - Conflict resolution with important prefix
|
||||
*/
|
||||
it('important modifier with objects', () => {
|
||||
expect(cn({ '!font-medium': true }, { '!font-bold': true })).toBe('!font-bold')
|
||||
expect(cn('font-normal', { '!font-bold': true })).toBe('font-normal !font-bold')
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user