mirror of
https://github.com/langgenius/dify.git
synced 2026-02-06 16:13:56 +00:00
Compare commits
3 Commits
feat/add-u
...
test/refac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33980bc9f4 | ||
|
|
04fa763c25 | ||
|
|
41c553d3af |
8
.github/workflows/deploy-hitl.yml
vendored
8
.github/workflows/deploy-hitl.yml
vendored
@@ -4,7 +4,8 @@ on:
|
||||
workflow_run:
|
||||
workflows: ["Build and Push API & Web"]
|
||||
branches:
|
||||
- "build/feat/hitl"
|
||||
- "feat/hitl-frontend"
|
||||
- "feat/hitl-backend"
|
||||
types:
|
||||
- completed
|
||||
|
||||
@@ -13,7 +14,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'build/feat/hitl'
|
||||
(
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
|
||||
)
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v1
|
||||
|
||||
@@ -102,6 +102,8 @@ forbidden_modules =
|
||||
core.trigger
|
||||
core.variables
|
||||
ignore_imports =
|
||||
core.workflow.nodes.agent.agent_node -> core.db.session_factory
|
||||
core.workflow.nodes.agent.agent_node -> models.tools
|
||||
core.workflow.nodes.loop.loop_node -> core.app.workflow.node_factory
|
||||
core.workflow.graph_engine.command_channels.redis_channel -> extensions.ext_redis
|
||||
core.workflow.workflow_entry -> core.app.workflow.layers.observability
|
||||
|
||||
@@ -122,7 +122,7 @@ These commands assume you start from the repository root.
|
||||
|
||||
```bash
|
||||
cd api
|
||||
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
|
||||
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
|
||||
```
|
||||
|
||||
1. Optional: start Celery Beat (scheduled tasks, in a new terminal).
|
||||
|
||||
@@ -1155,16 +1155,6 @@ class CeleryScheduleTasksConfig(BaseSettings):
|
||||
default=0,
|
||||
)
|
||||
|
||||
# API token last_used_at batch update
|
||||
ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK: bool = Field(
|
||||
description="Enable periodic batch update of API token last_used_at timestamps",
|
||||
default=True,
|
||||
)
|
||||
API_TOKEN_LAST_USED_UPDATE_INTERVAL: int = Field(
|
||||
description="Interval in minutes for batch updating API token last_used_at (default 30)",
|
||||
default=30,
|
||||
)
|
||||
|
||||
# Trigger provider refresh (simple version)
|
||||
ENABLE_TRIGGER_PROVIDER_REFRESH_TASK: bool = Field(
|
||||
description="Enable trigger provider refresh poller",
|
||||
|
||||
@@ -10,7 +10,6 @@ from libs.helper import TimestampField
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models.dataset import Dataset
|
||||
from models.model import ApiToken, App
|
||||
from services.api_token_service import ApiTokenCache
|
||||
|
||||
from . import console_ns
|
||||
from .wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
@@ -132,11 +131,6 @@ class BaseApiKeyResource(Resource):
|
||||
if key is None:
|
||||
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
|
||||
|
||||
# Invalidate cache before deleting from database
|
||||
# Type assertion: key is guaranteed to be non-None here because abort() raises
|
||||
assert key is not None # nosec - for type checker only
|
||||
ApiTokenCache.delete(key.token, key.type)
|
||||
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
|
||||
from models.dataset import DatasetPermissionEnum
|
||||
from models.provider_ids import ModelProviderID
|
||||
from services.api_token_service import ApiTokenCache
|
||||
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
|
||||
|
||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||
@@ -821,11 +820,6 @@ class DatasetApiDeleteApi(Resource):
|
||||
if key is None:
|
||||
console_ns.abort(404, message="API key not found")
|
||||
|
||||
# Invalidate cache before deleting from database
|
||||
# Type assertion: key is guaranteed to be non-None here because abort() raises
|
||||
assert key is not None # nosec - for type checker only
|
||||
ApiTokenCache.delete(key.token, key.type)
|
||||
|
||||
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ class TagUpdateDeleteApi(Resource):
|
||||
|
||||
TagService.delete_tag(tag_id)
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@console_ns.route("/tag-bindings/create")
|
||||
|
||||
@@ -578,25 +578,6 @@ class PluginUpgradeFromGithubApi(Resource):
|
||||
raise ValueError(e)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/upgrade/batch")
|
||||
class PluginBatchUpgradeApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@plugin_permission_required(install_required=True)
|
||||
def post(self):
|
||||
"""
|
||||
Batch upgrade all marketplace plugins that have updates available
|
||||
"""
|
||||
_, tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
result = PluginService.batch_upgrade_plugins_from_marketplace(tenant_id)
|
||||
return jsonable_encoder(result)
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
|
||||
@console_ns.route("/workspaces/current/plugin/uninstall")
|
||||
class PluginUninstallApi(Resource):
|
||||
@console_ns.expect(console_ns.models[ParserUninstall.__name__])
|
||||
|
||||
@@ -396,7 +396,7 @@ class DatasetApi(DatasetApiResource):
|
||||
try:
|
||||
if DatasetService.delete_dataset(dataset_id_str, current_user):
|
||||
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
|
||||
return "", 204
|
||||
return 204
|
||||
else:
|
||||
raise NotFound("Dataset not found.")
|
||||
except services.errors.dataset.DatasetInUseError:
|
||||
@@ -557,7 +557,7 @@ class DatasetTagsApi(DatasetApiResource):
|
||||
payload = TagDeletePayload.model_validate(service_api_ns.payload or {})
|
||||
TagService.delete_tag(payload.tag_id)
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/tags/binding")
|
||||
@@ -581,7 +581,7 @@ class DatasetTagBindingApi(DatasetApiResource):
|
||||
payload = TagBindingPayload.model_validate(service_api_ns.payload or {})
|
||||
TagService.save_tag_binding({"tag_ids": payload.tag_ids, "target_id": payload.target_id, "type": "knowledge"})
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/tags/unbinding")
|
||||
@@ -605,7 +605,7 @@ class DatasetTagUnbindingApi(DatasetApiResource):
|
||||
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
|
||||
TagService.delete_tag_binding({"tag_id": payload.tag_id, "target_id": payload.target_id, "type": "knowledge"})
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/tags")
|
||||
|
||||
@@ -746,4 +746,4 @@ class DocumentApi(DatasetApiResource):
|
||||
except services.errors.document.DocumentIndexingError:
|
||||
raise DocumentIndexingError("Cannot delete document during indexing.")
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
@@ -128,7 +128,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
|
||||
DatasetService.check_dataset_permission(dataset, current_user)
|
||||
|
||||
MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata/built-in")
|
||||
|
||||
@@ -233,7 +233,7 @@ class DatasetSegmentApi(DatasetApiResource):
|
||||
if not segment:
|
||||
raise NotFound("Segment not found.")
|
||||
SegmentService.delete_segment(segment, document, dataset)
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
@service_api_ns.expect(service_api_ns.models[SegmentUpdatePayload.__name__])
|
||||
@service_api_ns.doc("update_segment")
|
||||
@@ -499,7 +499,7 @@ class DatasetChildChunkApi(DatasetApiResource):
|
||||
except ChildChunkDeleteIndexServiceError as e:
|
||||
raise ChildChunkDeleteIndexError(str(e))
|
||||
|
||||
return "", 204
|
||||
return 204
|
||||
|
||||
@service_api_ns.expect(service_api_ns.models[ChildChunkUpdatePayload.__name__])
|
||||
@service_api_ns.doc("update_child_chunk")
|
||||
|
||||
@@ -1,24 +1,27 @@
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum, auto
|
||||
from functools import wraps
|
||||
from typing import Concatenate, ParamSpec, TypeVar, cast
|
||||
from typing import Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from flask import current_app, request
|
||||
from flask_login import user_logged_in
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
|
||||
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.login import current_user
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantStatus
|
||||
from models.dataset import Dataset, RateLimitLog
|
||||
from models.model import ApiToken, App
|
||||
from services.api_token_service import ApiTokenCache, fetch_token_with_single_flight, record_token_usage
|
||||
from services.end_user_service import EndUserService
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
@@ -293,14 +296,7 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
|
||||
|
||||
def validate_and_get_api_token(scope: str | None = None):
|
||||
"""
|
||||
Validate and get API token with Redis caching.
|
||||
|
||||
This function uses a two-tier approach:
|
||||
1. First checks Redis cache for the token
|
||||
2. If not cached, queries database and caches the result
|
||||
|
||||
The last_used_at field is updated asynchronously via Celery task
|
||||
to avoid blocking the request.
|
||||
Validate and get API token.
|
||||
"""
|
||||
auth_header = request.headers.get("Authorization")
|
||||
if auth_header is None or " " not in auth_header:
|
||||
@@ -312,18 +308,29 @@ def validate_and_get_api_token(scope: str | None = None):
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Authorization scheme must be 'Bearer'")
|
||||
|
||||
# Try to get token from cache first
|
||||
# Returns a CachedApiToken (plain Python object), not a SQLAlchemy model
|
||||
cached_token = ApiTokenCache.get(auth_token, scope)
|
||||
if cached_token is not None:
|
||||
logger.debug("Token validation served from cache for scope: %s", scope)
|
||||
# Record usage in Redis for later batch update (no Celery task per request)
|
||||
record_token_usage(auth_token, scope)
|
||||
return cast(ApiToken, cached_token)
|
||||
current_time = naive_utc_now()
|
||||
cutoff_time = current_time - timedelta(minutes=1)
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
update_stmt = (
|
||||
update(ApiToken)
|
||||
.where(
|
||||
ApiToken.token == auth_token,
|
||||
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < cutoff_time)),
|
||||
ApiToken.type == scope,
|
||||
)
|
||||
.values(last_used_at=current_time)
|
||||
)
|
||||
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
|
||||
result = session.execute(update_stmt)
|
||||
api_token = session.scalar(stmt)
|
||||
|
||||
# Cache miss - use Redis lock for single-flight mode
|
||||
# This ensures only one request queries DB for the same token concurrently
|
||||
return fetch_token_with_single_flight(auth_token, scope)
|
||||
if hasattr(result, "rowcount") and result.rowcount > 0:
|
||||
session.commit()
|
||||
|
||||
if not api_token:
|
||||
raise Unauthorized("Access token is invalid")
|
||||
|
||||
return api_token
|
||||
|
||||
|
||||
class DatasetApiResource(Resource):
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import TYPE_CHECKING, Any, Union, cast
|
||||
|
||||
from packaging.version import Version
|
||||
from pydantic import ValidationError
|
||||
@@ -11,6 +11,7 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from core.agent.entities import AgentToolEntity
|
||||
from core.agent.plugin_entities import AgentStrategyParameter
|
||||
from core.db.session_factory import session_factory
|
||||
from core.file import File, FileTransferMethod
|
||||
from core.memory.token_buffer_memory import TokenBufferMemory
|
||||
from core.model_manager import ModelInstance, ModelManager
|
||||
@@ -49,6 +50,12 @@ from factories import file_factory
|
||||
from factories.agent_factory import get_plugin_agent_strategy
|
||||
from models import ToolFile
|
||||
from models.model import Conversation
|
||||
from models.tools import (
|
||||
ApiToolProvider,
|
||||
BuiltinToolProvider,
|
||||
MCPToolProvider,
|
||||
WorkflowToolProvider,
|
||||
)
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
|
||||
from .exc import (
|
||||
@@ -259,7 +266,7 @@ class AgentNode(Node[AgentNodeData]):
|
||||
value = cast(list[dict[str, Any]], value)
|
||||
tool_value = []
|
||||
for tool in value:
|
||||
provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN))
|
||||
provider_type = self._infer_tool_provider_type(tool, self.tenant_id)
|
||||
setting_params = tool.get("settings", {})
|
||||
parameters = tool.get("parameters", {})
|
||||
manual_input_params = [key for key, value in parameters.items() if value is not None]
|
||||
@@ -748,3 +755,34 @@ class AgentNode(Node[AgentNodeData]):
|
||||
llm_usage=llm_usage,
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _infer_tool_provider_type(tool_config: dict[str, Any], tenant_id: str) -> ToolProviderType:
|
||||
provider_type_str = tool_config.get("type")
|
||||
if provider_type_str:
|
||||
return ToolProviderType(provider_type_str)
|
||||
|
||||
provider_id = tool_config.get("provider_name")
|
||||
if not provider_id:
|
||||
return ToolProviderType.BUILT_IN
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
provider_map: dict[
|
||||
type[Union[WorkflowToolProvider, MCPToolProvider, ApiToolProvider, BuiltinToolProvider]],
|
||||
ToolProviderType,
|
||||
] = {
|
||||
WorkflowToolProvider: ToolProviderType.WORKFLOW,
|
||||
MCPToolProvider: ToolProviderType.MCP,
|
||||
ApiToolProvider: ToolProviderType.API,
|
||||
BuiltinToolProvider: ToolProviderType.BUILT_IN,
|
||||
}
|
||||
|
||||
for provider_model, provider_type in provider_map.items():
|
||||
stmt = select(provider_model).where(
|
||||
provider_model.id == provider_id,
|
||||
provider_model.tenant_id == tenant_id,
|
||||
)
|
||||
if session.scalar(stmt):
|
||||
return provider_type
|
||||
|
||||
raise AgentNodeError(f"Tool provider with ID '{provider_id}' not found.")
|
||||
|
||||
@@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then
|
||||
if [[ -z "${CELERY_QUEUES}" ]]; then
|
||||
if [[ "${EDITION}" == "CLOUD" ]]; then
|
||||
# Cloud edition: separate queues for dataset and trigger tasks
|
||||
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
else
|
||||
# Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues
|
||||
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
fi
|
||||
else
|
||||
DEFAULT_QUEUES="${CELERY_QUEUES}"
|
||||
|
||||
@@ -184,14 +184,6 @@ def init_app(app: DifyApp) -> Celery:
|
||||
"task": "schedule.trigger_provider_refresh_task.trigger_provider_refresh",
|
||||
"schedule": timedelta(minutes=dify_config.TRIGGER_PROVIDER_REFRESH_INTERVAL),
|
||||
}
|
||||
|
||||
if dify_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK:
|
||||
imports.append("schedule.update_api_token_last_used_task")
|
||||
beat_schedule["batch_update_api_token_last_used"] = {
|
||||
"task": "schedule.update_api_token_last_used_task.batch_update_api_token_last_used",
|
||||
"schedule": timedelta(minutes=dify_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL),
|
||||
}
|
||||
|
||||
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
|
||||
|
||||
return celery_app
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
"""
|
||||
Scheduled task to batch-update API token last_used_at timestamps.
|
||||
|
||||
Instead of updating the database on every request, token usage is recorded
|
||||
in Redis as lightweight SET keys (api_token_active:{scope}:{token}).
|
||||
This task runs periodically (default every 30 minutes) to flush those
|
||||
records into the database in a single batch operation.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
from sqlalchemy import update
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
import app
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.model import ApiToken
|
||||
from services.api_token_service import ACTIVE_TOKEN_KEY_PREFIX
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@app.celery.task(queue="api_token")
|
||||
def batch_update_api_token_last_used():
|
||||
"""
|
||||
Batch update last_used_at for all recently active API tokens.
|
||||
|
||||
Scans Redis for api_token_active:* keys, parses the token and scope
|
||||
from each key, and performs a batch database update.
|
||||
"""
|
||||
click.echo(click.style("batch_update_api_token_last_used: start.", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
updated_count = 0
|
||||
scanned_count = 0
|
||||
|
||||
try:
|
||||
# Collect all active token keys and their values (the actual usage timestamps)
|
||||
token_entries: list[tuple[str, str | None, datetime]] = [] # (token, scope, usage_time)
|
||||
keys_to_delete: list[str | bytes] = []
|
||||
|
||||
for key in redis_client.scan_iter(match=f"{ACTIVE_TOKEN_KEY_PREFIX}*", count=200):
|
||||
if isinstance(key, bytes):
|
||||
key = key.decode("utf-8")
|
||||
scanned_count += 1
|
||||
|
||||
# Read the value (ISO timestamp recorded at actual request time)
|
||||
value = redis_client.get(key)
|
||||
if not value:
|
||||
keys_to_delete.append(key)
|
||||
continue
|
||||
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode("utf-8")
|
||||
|
||||
try:
|
||||
usage_time = datetime.fromisoformat(value)
|
||||
except (ValueError, TypeError):
|
||||
logger.warning("Invalid timestamp in key %s: %s", key, value)
|
||||
keys_to_delete.append(key)
|
||||
continue
|
||||
|
||||
# Parse token info from key: api_token_active:{scope}:{token}
|
||||
suffix = key[len(ACTIVE_TOKEN_KEY_PREFIX) :]
|
||||
parts = suffix.split(":", 1)
|
||||
if len(parts) == 2:
|
||||
scope_str, token = parts
|
||||
scope = None if scope_str == "None" else scope_str
|
||||
token_entries.append((token, scope, usage_time))
|
||||
keys_to_delete.append(key)
|
||||
|
||||
if not token_entries:
|
||||
click.echo(click.style("batch_update_api_token_last_used: no active tokens found.", fg="yellow"))
|
||||
# Still clean up any invalid keys
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
return
|
||||
|
||||
# Update each token in its own short transaction to avoid long transactions
|
||||
for token, scope, usage_time in token_entries:
|
||||
with Session(db.engine, expire_on_commit=False) as session, session.begin():
|
||||
stmt = (
|
||||
update(ApiToken)
|
||||
.where(
|
||||
ApiToken.token == token,
|
||||
ApiToken.type == scope,
|
||||
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < usage_time)),
|
||||
)
|
||||
.values(last_used_at=usage_time)
|
||||
)
|
||||
result = session.execute(stmt)
|
||||
rowcount = getattr(result, "rowcount", 0)
|
||||
if rowcount > 0:
|
||||
updated_count += 1
|
||||
|
||||
# Delete processed keys from Redis
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
|
||||
except Exception:
|
||||
logger.exception("batch_update_api_token_last_used failed")
|
||||
|
||||
elapsed = time.perf_counter() - start_at
|
||||
click.echo(
|
||||
click.style(
|
||||
f"batch_update_api_token_last_used: done. "
|
||||
f"scanned={scanned_count}, updated={updated_count}, elapsed={elapsed:.2f}s",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
@@ -1,330 +0,0 @@
|
||||
"""
|
||||
API Token Service
|
||||
|
||||
Handles all API token caching, validation, and usage recording.
|
||||
Includes Redis cache operations, database queries, and single-flight concurrency control.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client, redis_fallback
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.model import ApiToken
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Pydantic DTO
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
class CachedApiToken(BaseModel):
|
||||
"""
|
||||
Pydantic model for cached API token data.
|
||||
|
||||
This is NOT a SQLAlchemy model instance, but a plain Pydantic model
|
||||
that mimics the ApiToken model interface for read-only access.
|
||||
"""
|
||||
|
||||
id: str
|
||||
app_id: str | None
|
||||
tenant_id: str | None
|
||||
type: str
|
||||
token: str
|
||||
last_used_at: datetime | None
|
||||
created_at: datetime | None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<CachedApiToken id={self.id} type={self.type}>"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Cache configuration
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
CACHE_KEY_PREFIX = "api_token"
|
||||
CACHE_TTL_SECONDS = 600 # 10 minutes
|
||||
CACHE_NULL_TTL_SECONDS = 60 # 1 minute for non-existent tokens
|
||||
ACTIVE_TOKEN_KEY_PREFIX = "api_token_active:"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Cache class
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
class ApiTokenCache:
|
||||
"""
|
||||
Redis cache wrapper for API tokens.
|
||||
Handles serialization, deserialization, and cache invalidation.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def make_active_key(token: str, scope: str | None = None) -> str:
|
||||
"""Generate Redis key for recording token usage."""
|
||||
return f"{ACTIVE_TOKEN_KEY_PREFIX}{scope}:{token}"
|
||||
|
||||
@staticmethod
|
||||
def _make_tenant_index_key(tenant_id: str) -> str:
|
||||
"""Generate Redis key for tenant token index."""
|
||||
return f"tenant_tokens:{tenant_id}"
|
||||
|
||||
@staticmethod
|
||||
def _make_cache_key(token: str, scope: str | None = None) -> str:
|
||||
"""Generate cache key for the given token and scope."""
|
||||
scope_str = scope or "any"
|
||||
return f"{CACHE_KEY_PREFIX}:{scope_str}:{token}"
|
||||
|
||||
@staticmethod
|
||||
def _serialize_token(api_token: Any) -> bytes:
|
||||
"""Serialize ApiToken object to JSON bytes."""
|
||||
if isinstance(api_token, CachedApiToken):
|
||||
return api_token.model_dump_json().encode("utf-8")
|
||||
|
||||
cached = CachedApiToken(
|
||||
id=str(api_token.id),
|
||||
app_id=str(api_token.app_id) if api_token.app_id else None,
|
||||
tenant_id=str(api_token.tenant_id) if api_token.tenant_id else None,
|
||||
type=api_token.type,
|
||||
token=api_token.token,
|
||||
last_used_at=api_token.last_used_at,
|
||||
created_at=api_token.created_at,
|
||||
)
|
||||
return cached.model_dump_json().encode("utf-8")
|
||||
|
||||
@staticmethod
|
||||
def _deserialize_token(cached_data: bytes | str) -> Any:
|
||||
"""Deserialize JSON bytes/string back to a CachedApiToken Pydantic model."""
|
||||
if cached_data in {b"null", "null"}:
|
||||
return None
|
||||
|
||||
try:
|
||||
if isinstance(cached_data, bytes):
|
||||
cached_data = cached_data.decode("utf-8")
|
||||
return CachedApiToken.model_validate_json(cached_data)
|
||||
except (ValueError, Exception) as e:
|
||||
logger.warning("Failed to deserialize token from cache: %s", e)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=None)
|
||||
def get(token: str, scope: str | None) -> Any | None:
|
||||
"""Get API token from cache."""
|
||||
cache_key = ApiTokenCache._make_cache_key(token, scope)
|
||||
cached_data = redis_client.get(cache_key)
|
||||
|
||||
if cached_data is None:
|
||||
logger.debug("Cache miss for token key: %s", cache_key)
|
||||
return None
|
||||
|
||||
logger.debug("Cache hit for token key: %s", cache_key)
|
||||
return ApiTokenCache._deserialize_token(cached_data)
|
||||
|
||||
@staticmethod
|
||||
def _add_to_tenant_index(tenant_id: str | None, cache_key: str) -> None:
|
||||
"""Add cache key to tenant index for efficient invalidation."""
|
||||
if not tenant_id:
|
||||
return
|
||||
|
||||
try:
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
redis_client.sadd(index_key, cache_key)
|
||||
redis_client.expire(index_key, CACHE_TTL_SECONDS + 60)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to update tenant index: %s", e)
|
||||
|
||||
@staticmethod
|
||||
def _remove_from_tenant_index(tenant_id: str | None, cache_key: str) -> None:
|
||||
"""Remove cache key from tenant index."""
|
||||
if not tenant_id:
|
||||
return
|
||||
|
||||
try:
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
redis_client.srem(index_key, cache_key)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to remove from tenant index: %s", e)
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=False)
|
||||
def set(token: str, scope: str | None, api_token: Any | None, ttl: int = CACHE_TTL_SECONDS) -> bool:
|
||||
"""Set API token in cache."""
|
||||
cache_key = ApiTokenCache._make_cache_key(token, scope)
|
||||
|
||||
if api_token is None:
|
||||
cached_value = b"null"
|
||||
ttl = CACHE_NULL_TTL_SECONDS
|
||||
else:
|
||||
cached_value = ApiTokenCache._serialize_token(api_token)
|
||||
|
||||
try:
|
||||
redis_client.setex(cache_key, ttl, cached_value)
|
||||
|
||||
if api_token is not None and hasattr(api_token, "tenant_id"):
|
||||
ApiTokenCache._add_to_tenant_index(api_token.tenant_id, cache_key)
|
||||
|
||||
logger.debug("Cached token with key: %s, ttl: %ss", cache_key, ttl)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to cache token: %s", e)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=False)
|
||||
def delete(token: str, scope: str | None = None) -> bool:
|
||||
"""Delete API token from cache."""
|
||||
if scope is None:
|
||||
pattern = f"{CACHE_KEY_PREFIX}:*:{token}"
|
||||
try:
|
||||
keys_to_delete = list(redis_client.scan_iter(match=pattern))
|
||||
if keys_to_delete:
|
||||
redis_client.delete(*keys_to_delete)
|
||||
logger.info("Deleted %d cache entries for token", len(keys_to_delete))
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to delete token cache with pattern: %s", e)
|
||||
return False
|
||||
else:
|
||||
cache_key = ApiTokenCache._make_cache_key(token, scope)
|
||||
try:
|
||||
tenant_id = None
|
||||
try:
|
||||
cached_data = redis_client.get(cache_key)
|
||||
if cached_data and cached_data != b"null":
|
||||
cached_token = ApiTokenCache._deserialize_token(cached_data)
|
||||
if cached_token:
|
||||
tenant_id = cached_token.tenant_id
|
||||
except Exception as e:
|
||||
logger.debug("Failed to get tenant_id for cache cleanup: %s", e)
|
||||
|
||||
redis_client.delete(cache_key)
|
||||
|
||||
if tenant_id:
|
||||
ApiTokenCache._remove_from_tenant_index(tenant_id, cache_key)
|
||||
|
||||
logger.info("Deleted cache for key: %s", cache_key)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to delete token cache: %s", e)
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
@redis_fallback(default_return=False)
|
||||
def invalidate_by_tenant(tenant_id: str) -> bool:
|
||||
"""Invalidate all API token caches for a specific tenant via tenant index."""
|
||||
try:
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
cache_keys = redis_client.smembers(index_key)
|
||||
|
||||
if cache_keys:
|
||||
deleted_count = 0
|
||||
for cache_key in cache_keys:
|
||||
if isinstance(cache_key, bytes):
|
||||
cache_key = cache_key.decode("utf-8")
|
||||
redis_client.delete(cache_key)
|
||||
deleted_count += 1
|
||||
|
||||
redis_client.delete(index_key)
|
||||
|
||||
logger.info(
|
||||
"Invalidated %d token cache entries for tenant: %s",
|
||||
deleted_count,
|
||||
tenant_id,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"No tenant index found for %s, relying on TTL expiration",
|
||||
tenant_id,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning("Failed to invalidate tenant token cache: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Token usage recording (for batch update)
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
def record_token_usage(auth_token: str, scope: str | None) -> None:
|
||||
"""
|
||||
Record token usage in Redis for later batch update by a scheduled job.
|
||||
|
||||
Instead of dispatching a Celery task per request, we simply SET a key in Redis.
|
||||
A Celery Beat scheduled task will periodically scan these keys and batch-update
|
||||
last_used_at in the database.
|
||||
"""
|
||||
try:
|
||||
key = ApiTokenCache.make_active_key(auth_token, scope)
|
||||
redis_client.set(key, naive_utc_now().isoformat(), ex=3600)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to record token usage: %s", e)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Database query + single-flight
|
||||
# ---------------------------------------------------------------------
|
||||
|
||||
|
||||
def query_token_from_db(auth_token: str, scope: str | None) -> ApiToken:
|
||||
"""
|
||||
Query API token from database and cache the result.
|
||||
|
||||
Raises Unauthorized if token is invalid.
|
||||
"""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
|
||||
api_token = session.scalar(stmt)
|
||||
|
||||
if not api_token:
|
||||
ApiTokenCache.set(auth_token, scope, None)
|
||||
raise Unauthorized("Access token is invalid")
|
||||
|
||||
ApiTokenCache.set(auth_token, scope, api_token)
|
||||
record_token_usage(auth_token, scope)
|
||||
return api_token
|
||||
|
||||
|
||||
def fetch_token_with_single_flight(auth_token: str, scope: str | None) -> ApiToken | Any:
|
||||
"""
|
||||
Fetch token from DB with single-flight pattern using Redis lock.
|
||||
|
||||
Ensures only one concurrent request queries the database for the same token.
|
||||
Falls back to direct query if lock acquisition fails.
|
||||
"""
|
||||
logger.debug("Token cache miss, attempting to acquire query lock for scope: %s", scope)
|
||||
|
||||
lock_key = f"api_token_query_lock:{scope}:{auth_token}"
|
||||
lock = redis_client.lock(lock_key, timeout=10, blocking_timeout=5)
|
||||
|
||||
try:
|
||||
if lock.acquire(blocking=True):
|
||||
try:
|
||||
cached_token = ApiTokenCache.get(auth_token, scope)
|
||||
if cached_token is not None:
|
||||
logger.debug("Token cached by concurrent request, using cached version")
|
||||
return cached_token
|
||||
|
||||
return query_token_from_db(auth_token, scope)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
logger.warning("Lock timeout for token: %s, proceeding with direct query", auth_token[:10])
|
||||
return query_token_from_db(auth_token, scope)
|
||||
except Unauthorized:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning("Redis lock failed for token query: %s, proceeding anyway", e)
|
||||
return query_token_from_db(auth_token, scope)
|
||||
@@ -1696,18 +1696,13 @@ class DocumentService:
|
||||
for document in documents
|
||||
if document.data_source_type == "upload_file" and document.data_source_info_dict
|
||||
]
|
||||
if dataset.doc_form is not None:
|
||||
batch_clean_document_task.delay(document_ids, dataset.id, dataset.doc_form, file_ids)
|
||||
|
||||
# Delete documents first, then dispatch cleanup task after commit
|
||||
# to avoid deadlock between main transaction and async task
|
||||
for document in documents:
|
||||
db.session.delete(document)
|
||||
db.session.commit()
|
||||
|
||||
# Dispatch cleanup task after commit to avoid lock contention
|
||||
# Task cleans up segments, files, and vector indexes
|
||||
if dataset.doc_form is not None:
|
||||
batch_clean_document_task.delay(document_ids, dataset.id, dataset.doc_form, file_ids)
|
||||
|
||||
@staticmethod
|
||||
def rename_document(dataset_id: str, document_id: str, name: str) -> Document:
|
||||
assert isinstance(current_user, Account)
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import builtins
|
||||
import logging
|
||||
from collections.abc import Mapping, Sequence
|
||||
from mimetypes import guess_type
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
@@ -341,91 +337,6 @@ class PluginService:
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def batch_upgrade_plugins_from_marketplace(tenant_id: str) -> dict[str, builtins.list[dict[str, Any]]]:
|
||||
"""
|
||||
Batch upgrade all marketplace plugins that have updates available
|
||||
|
||||
Returns a dict with:
|
||||
- success: list of successfully upgraded plugins
|
||||
- failed: list of failed upgrades with error messages
|
||||
- skipped: list of plugins skipped (no updates or errors)
|
||||
"""
|
||||
if not dify_config.MARKETPLACE_ENABLED:
|
||||
raise ValueError("marketplace is not enabled")
|
||||
|
||||
manager = PluginInstaller()
|
||||
result: dict[str, builtins.list[dict[str, Any]]] = {
|
||||
"success": [],
|
||||
"failed": [],
|
||||
"skipped": [],
|
||||
}
|
||||
|
||||
# Get all installed plugins
|
||||
plugins = manager.list_plugins(tenant_id)
|
||||
|
||||
# Filter marketplace plugins only
|
||||
marketplace_plugins = [plugin for plugin in plugins if plugin.source == PluginInstallationSource.Marketplace]
|
||||
|
||||
if not marketplace_plugins:
|
||||
return result
|
||||
|
||||
# Get latest versions for all marketplace plugins
|
||||
plugin_ids = [plugin.plugin_id for plugin in marketplace_plugins]
|
||||
latest_versions = PluginService.fetch_latest_plugin_version(plugin_ids)
|
||||
|
||||
# Upgrade each plugin if newer version is available
|
||||
for plugin in marketplace_plugins:
|
||||
try:
|
||||
latest_info = latest_versions.get(plugin.plugin_id)
|
||||
if not latest_info:
|
||||
result["skipped"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"reason": "no_update_info",
|
||||
"current_version": plugin.version,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if update is needed
|
||||
if latest_info.version == plugin.version:
|
||||
result["skipped"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"reason": "already_latest",
|
||||
"current_version": plugin.version,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Perform upgrade
|
||||
PluginService.upgrade_plugin_with_marketplace(
|
||||
tenant_id, plugin.plugin_unique_identifier, latest_info.unique_identifier
|
||||
)
|
||||
|
||||
result["success"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"from_version": plugin.version,
|
||||
"to_version": latest_info.version,
|
||||
"from_identifier": plugin.plugin_unique_identifier,
|
||||
"to_identifier": latest_info.unique_identifier,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to upgrade plugin %s", plugin.plugin_id)
|
||||
result["failed"].append(
|
||||
{
|
||||
"plugin_id": plugin.plugin_id,
|
||||
"current_version": plugin.version,
|
||||
"error": str(e),
|
||||
}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginDecodeResponse:
|
||||
"""
|
||||
|
||||
@@ -14,9 +14,6 @@ from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Batch size for database operations to keep transactions short
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
|
||||
@shared_task(queue="dataset")
|
||||
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
|
||||
@@ -34,179 +31,63 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
if not doc_form:
|
||||
raise ValueError("doc_form is required")
|
||||
|
||||
storage_keys_to_delete: list[str] = []
|
||||
index_node_ids: list[str] = []
|
||||
segment_ids: list[str] = []
|
||||
total_image_upload_file_ids: list[str] = []
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
try:
|
||||
# ============ Step 1: Query segment and file data (short read-only transaction) ============
|
||||
with session_factory.create_session() as session:
|
||||
# Get segments info
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
).all()
|
||||
|
||||
# check segment is exist
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
|
||||
# Collect image file IDs from segment content
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
total_image_upload_file_ids.extend(image_upload_file_ids)
|
||||
|
||||
# Query storage keys for image files
|
||||
if total_image_upload_file_ids:
|
||||
image_files = session.scalars(
|
||||
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
|
||||
).all()
|
||||
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
|
||||
|
||||
# Query storage keys for document files
|
||||
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
|
||||
for image_file in image_files:
|
||||
try:
|
||||
if image_file and image_file.key:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file.id,
|
||||
)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(stmt)
|
||||
session.delete(segment)
|
||||
if file_ids:
|
||||
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
|
||||
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
|
||||
for file in files:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
|
||||
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
|
||||
if index_node_ids:
|
||||
try:
|
||||
# Fetch dataset in a fresh session to avoid DetachedInstanceError
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
|
||||
else:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
len(index_node_ids),
|
||||
)
|
||||
session.commit()
|
||||
|
||||
# ============ Step 3: Delete metadata binding (separate short transaction) ============
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
deleted_count = (
|
||||
session.query(DatasetMetadataBinding)
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
session.commit()
|
||||
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
|
||||
if total_image_upload_file_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
|
||||
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
|
||||
if segment_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(segment_ids), BATCH_SIZE):
|
||||
batch = segment_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
|
||||
session.execute(segment_delete_stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 6: Delete document-associated files (separate short transaction) ============
|
||||
if file_ids:
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
|
||||
dataset_id,
|
||||
file_ids,
|
||||
)
|
||||
|
||||
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
|
||||
storage_delete_failures = 0
|
||||
for storage_key in storage_keys_to_delete:
|
||||
try:
|
||||
storage.delete(storage_key)
|
||||
except Exception:
|
||||
storage_delete_failures += 1
|
||||
logger.exception("Failed to delete file from storage, key: %s", storage_key)
|
||||
if storage_delete_failures > 0:
|
||||
logger.warning(
|
||||
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
|
||||
storage_delete_failures,
|
||||
len(storage_keys_to_delete),
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
|
||||
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
|
||||
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
|
||||
f"storage_files: {len(storage_keys_to_delete)}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
logger.exception("Cleaned documents when documents deleted failed")
|
||||
|
||||
@@ -3,7 +3,6 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@@ -68,14 +67,8 @@ def delete_segment_from_index_task(
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
|
||||
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
|
||||
|
||||
for i in range(0, len(segment_attachment_bind_ids), 1000):
|
||||
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
|
||||
)
|
||||
session.execute(segment_attachment_bind_delete_stmt)
|
||||
|
||||
for binding in segment_attachment_bindings:
|
||||
session.delete(binding)
|
||||
# delete upload file
|
||||
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
with session_factory.create_session() as session:
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -68,6 +68,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
session.commit()
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
@@ -84,6 +85,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
if last_edited_time != page_edited_time:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
|
||||
@@ -48,7 +48,6 @@ from models.workflow import (
|
||||
WorkflowArchiveLog,
|
||||
)
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from services.api_token_service import ApiTokenCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -135,12 +134,6 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str):
|
||||
|
||||
def _delete_app_api_tokens(tenant_id: str, app_id: str):
|
||||
def del_api_token(session, api_token_id: str):
|
||||
# Fetch token details for cache invalidation
|
||||
token_obj = session.query(ApiToken).where(ApiToken.id == api_token_id).first()
|
||||
if token_obj:
|
||||
# Invalidate cache before deletion
|
||||
ApiTokenCache.delete(token_obj.token, token_obj.type)
|
||||
|
||||
session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False)
|
||||
|
||||
_delete_records(
|
||||
|
||||
@@ -1,375 +0,0 @@
|
||||
"""
|
||||
Integration tests for API Token Cache with Redis.
|
||||
|
||||
These tests require:
|
||||
- Redis server running
|
||||
- Test database configured
|
||||
"""
|
||||
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.model import ApiToken
|
||||
from services.api_token_service import ApiTokenCache, CachedApiToken
|
||||
|
||||
|
||||
class TestApiTokenCacheRedisIntegration:
|
||||
"""Integration tests with real Redis."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup test fixtures and clean Redis."""
|
||||
self.test_token = "test-integration-token-123"
|
||||
self.test_scope = "app"
|
||||
self.cache_key = f"api_token:{self.test_scope}:{self.test_token}"
|
||||
|
||||
# Clean up any existing test data
|
||||
self._cleanup()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Cleanup test data from Redis."""
|
||||
self._cleanup()
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove test data from Redis."""
|
||||
try:
|
||||
redis_client.delete(self.cache_key)
|
||||
redis_client.delete(ApiTokenCache._make_tenant_index_key("test-tenant-id"))
|
||||
redis_client.delete(ApiTokenCache.make_active_key(self.test_token, self.test_scope))
|
||||
except Exception:
|
||||
pass # Ignore cleanup errors
|
||||
|
||||
def test_cache_set_and_get_with_real_redis(self):
|
||||
"""Test cache set and get operations with real Redis."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id-123"
|
||||
mock_token.app_id = "test-app-456"
|
||||
mock_token.tenant_id = "test-tenant-789"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = datetime.now()
|
||||
mock_token.created_at = datetime.now() - timedelta(days=30)
|
||||
|
||||
# Set in cache
|
||||
result = ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
assert result is True
|
||||
|
||||
# Verify in Redis
|
||||
cached_data = redis_client.get(self.cache_key)
|
||||
assert cached_data is not None
|
||||
|
||||
# Get from cache
|
||||
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
|
||||
assert cached_token is not None
|
||||
assert isinstance(cached_token, CachedApiToken)
|
||||
assert cached_token.id == "test-id-123"
|
||||
assert cached_token.app_id == "test-app-456"
|
||||
assert cached_token.tenant_id == "test-tenant-789"
|
||||
assert cached_token.type == "app"
|
||||
assert cached_token.token == self.test_token
|
||||
|
||||
def test_cache_ttl_with_real_redis(self):
|
||||
"""Test cache TTL is set correctly."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
|
||||
ttl = redis_client.ttl(self.cache_key)
|
||||
assert 595 <= ttl <= 600 # Should be around 600 seconds (10 minutes)
|
||||
|
||||
def test_cache_null_value_for_invalid_token(self):
|
||||
"""Test caching null value for invalid tokens."""
|
||||
result = ApiTokenCache.set(self.test_token, self.test_scope, None)
|
||||
assert result is True
|
||||
|
||||
cached_data = redis_client.get(self.cache_key)
|
||||
assert cached_data == b"null"
|
||||
|
||||
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
|
||||
assert cached_token is None
|
||||
|
||||
ttl = redis_client.ttl(self.cache_key)
|
||||
assert 55 <= ttl <= 60
|
||||
|
||||
def test_cache_delete_with_real_redis(self):
|
||||
"""Test cache deletion with real Redis."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
assert redis_client.exists(self.cache_key) == 1
|
||||
|
||||
result = ApiTokenCache.delete(self.test_token, self.test_scope)
|
||||
assert result is True
|
||||
assert redis_client.exists(self.cache_key) == 0
|
||||
|
||||
def test_tenant_index_creation(self):
|
||||
"""Test tenant index is created when caching token."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
tenant_id = "test-tenant-id"
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = tenant_id
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
|
||||
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
|
||||
assert redis_client.exists(index_key) == 1
|
||||
|
||||
members = redis_client.smembers(index_key)
|
||||
cache_keys = [m.decode("utf-8") if isinstance(m, bytes) else m for m in members]
|
||||
assert self.cache_key in cache_keys
|
||||
|
||||
def test_invalidate_by_tenant_via_index(self):
|
||||
"""Test tenant-wide cache invalidation using index (fast path)."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
tenant_id = "test-tenant-id"
|
||||
|
||||
for i in range(3):
|
||||
token_value = f"test-token-{i}"
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = f"test-id-{i}"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = tenant_id
|
||||
mock_token.type = "app"
|
||||
mock_token.token = token_value
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(token_value, "app", mock_token)
|
||||
|
||||
for i in range(3):
|
||||
key = f"api_token:app:test-token-{i}"
|
||||
assert redis_client.exists(key) == 1
|
||||
|
||||
result = ApiTokenCache.invalidate_by_tenant(tenant_id)
|
||||
assert result is True
|
||||
|
||||
for i in range(3):
|
||||
key = f"api_token:app:test-token-{i}"
|
||||
assert redis_client.exists(key) == 0
|
||||
|
||||
assert redis_client.exists(ApiTokenCache._make_tenant_index_key(tenant_id)) == 0
|
||||
|
||||
def test_concurrent_cache_access(self):
|
||||
"""Test concurrent cache access doesn't cause issues."""
|
||||
import concurrent.futures
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = self.test_token
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
|
||||
|
||||
def get_from_cache():
|
||||
return ApiTokenCache.get(self.test_token, self.test_scope)
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||
futures = [executor.submit(get_from_cache) for _ in range(50)]
|
||||
results = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||
|
||||
assert len(results) == 50
|
||||
assert all(r is not None for r in results)
|
||||
assert all(isinstance(r, CachedApiToken) for r in results)
|
||||
|
||||
|
||||
class TestTokenUsageRecording:
|
||||
"""Tests for recording token usage in Redis (batch update approach)."""
|
||||
|
||||
def setup_method(self):
|
||||
self.test_token = "test-usage-token"
|
||||
self.test_scope = "app"
|
||||
self.active_key = ApiTokenCache.make_active_key(self.test_token, self.test_scope)
|
||||
|
||||
def teardown_method(self):
|
||||
try:
|
||||
redis_client.delete(self.active_key)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_record_token_usage_sets_redis_key(self):
|
||||
"""Test that record_token_usage writes an active key to Redis."""
|
||||
from services.api_token_service import record_token_usage
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
|
||||
# Key should exist
|
||||
assert redis_client.exists(self.active_key) == 1
|
||||
|
||||
# Value should be an ISO timestamp
|
||||
value = redis_client.get(self.active_key)
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode("utf-8")
|
||||
datetime.fromisoformat(value) # Should not raise
|
||||
|
||||
def test_record_token_usage_has_ttl(self):
|
||||
"""Test that active keys have a TTL as safety net."""
|
||||
from services.api_token_service import record_token_usage
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
|
||||
ttl = redis_client.ttl(self.active_key)
|
||||
assert 3595 <= ttl <= 3600 # ~1 hour
|
||||
|
||||
def test_record_token_usage_overwrites(self):
|
||||
"""Test that repeated calls overwrite the same key (no accumulation)."""
|
||||
from services.api_token_service import record_token_usage
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
first_value = redis_client.get(self.active_key)
|
||||
|
||||
time.sleep(0.01) # Tiny delay so timestamp differs
|
||||
|
||||
record_token_usage(self.test_token, self.test_scope)
|
||||
second_value = redis_client.get(self.active_key)
|
||||
|
||||
# Key count should still be 1 (overwritten, not accumulated)
|
||||
assert redis_client.exists(self.active_key) == 1
|
||||
|
||||
|
||||
class TestEndToEndCacheFlow:
|
||||
"""End-to-end integration test for complete cache flow."""
|
||||
|
||||
@pytest.mark.usefixtures("db_session")
|
||||
def test_complete_flow_cache_miss_then_hit(self, db_session):
|
||||
"""
|
||||
Test complete flow:
|
||||
1. First request (cache miss) -> query DB -> cache result
|
||||
2. Second request (cache hit) -> return from cache
|
||||
3. Verify Redis state
|
||||
"""
|
||||
test_token_value = "test-e2e-token"
|
||||
test_scope = "app"
|
||||
|
||||
test_token = ApiToken()
|
||||
test_token.id = "test-e2e-id"
|
||||
test_token.token = test_token_value
|
||||
test_token.type = test_scope
|
||||
test_token.app_id = "test-app"
|
||||
test_token.tenant_id = "test-tenant"
|
||||
test_token.last_used_at = None
|
||||
test_token.created_at = datetime.now()
|
||||
|
||||
db_session.add(test_token)
|
||||
db_session.commit()
|
||||
|
||||
try:
|
||||
# Step 1: Cache miss - set token in cache
|
||||
ApiTokenCache.set(test_token_value, test_scope, test_token)
|
||||
|
||||
cache_key = f"api_token:{test_scope}:{test_token_value}"
|
||||
assert redis_client.exists(cache_key) == 1
|
||||
|
||||
# Step 2: Cache hit - get from cache
|
||||
cached_token = ApiTokenCache.get(test_token_value, test_scope)
|
||||
assert cached_token is not None
|
||||
assert cached_token.id == test_token.id
|
||||
assert cached_token.token == test_token_value
|
||||
|
||||
# Step 3: Verify tenant index
|
||||
index_key = ApiTokenCache._make_tenant_index_key(test_token.tenant_id)
|
||||
assert redis_client.exists(index_key) == 1
|
||||
assert cache_key.encode() in redis_client.smembers(index_key)
|
||||
|
||||
# Step 4: Delete and verify cleanup
|
||||
ApiTokenCache.delete(test_token_value, test_scope)
|
||||
assert redis_client.exists(cache_key) == 0
|
||||
assert cache_key.encode() not in redis_client.smembers(index_key)
|
||||
|
||||
finally:
|
||||
db_session.delete(test_token)
|
||||
db_session.commit()
|
||||
redis_client.delete(f"api_token:{test_scope}:{test_token_value}")
|
||||
redis_client.delete(ApiTokenCache._make_tenant_index_key(test_token.tenant_id))
|
||||
|
||||
def test_high_concurrency_simulation(self):
|
||||
"""Simulate high concurrency access to cache."""
|
||||
import concurrent.futures
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
test_token_value = "test-concurrent-token"
|
||||
test_scope = "app"
|
||||
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "concurrent-id"
|
||||
mock_token.app_id = "test-app"
|
||||
mock_token.tenant_id = "test-tenant"
|
||||
mock_token.type = test_scope
|
||||
mock_token.token = test_token_value
|
||||
mock_token.last_used_at = datetime.now()
|
||||
mock_token.created_at = datetime.now()
|
||||
|
||||
ApiTokenCache.set(test_token_value, test_scope, mock_token)
|
||||
|
||||
try:
|
||||
|
||||
def read_cache():
|
||||
return ApiTokenCache.get(test_token_value, test_scope)
|
||||
|
||||
start_time = time.time()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
|
||||
futures = [executor.submit(read_cache) for _ in range(100)]
|
||||
results = [f.result() for f in concurrent.futures.as_completed(futures)]
|
||||
elapsed = time.time() - start_time
|
||||
|
||||
assert len(results) == 100
|
||||
assert all(r is not None for r in results)
|
||||
|
||||
assert elapsed < 1.0, f"Too slow: {elapsed}s for 100 cache reads"
|
||||
|
||||
finally:
|
||||
ApiTokenCache.delete(test_token_value, test_scope)
|
||||
redis_client.delete(ApiTokenCache._make_tenant_index_key(mock_token.tenant_id))
|
||||
|
||||
|
||||
class TestRedisFailover:
|
||||
"""Test behavior when Redis is unavailable."""
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_graceful_degradation_when_redis_fails(self, mock_redis):
|
||||
"""Test system degrades gracefully when Redis is unavailable."""
|
||||
from redis import RedisError
|
||||
|
||||
mock_redis.get.side_effect = RedisError("Connection failed")
|
||||
mock_redis.setex.side_effect = RedisError("Connection failed")
|
||||
|
||||
result_get = ApiTokenCache.get("test-token", "app")
|
||||
assert result_get is None
|
||||
|
||||
result_set = ApiTokenCache.set("test-token", "app", None)
|
||||
assert result_set is False
|
||||
@@ -0,0 +1,197 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.workflow.nodes.agent.agent_node import AgentNode
|
||||
|
||||
|
||||
class TestInferToolProviderType:
|
||||
"""Test cases for AgentNode._infer_tool_provider_type method."""
|
||||
|
||||
def test_infer_type_from_config_workflow(self):
|
||||
"""Test inferring workflow provider type from config."""
|
||||
tool_config = {
|
||||
"type": "workflow",
|
||||
"provider_name": "workflow-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.WORKFLOW
|
||||
|
||||
def test_infer_type_from_config_builtin(self):
|
||||
"""Test inferring builtin provider type from config."""
|
||||
tool_config = {
|
||||
"type": "builtin",
|
||||
"provider_name": "builtin-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.BUILT_IN
|
||||
|
||||
def test_infer_type_from_config_api(self):
|
||||
"""Test inferring API provider type from config."""
|
||||
tool_config = {
|
||||
"type": "api",
|
||||
"provider_name": "api-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.API
|
||||
|
||||
def test_infer_type_from_config_mcp(self):
|
||||
"""Test inferring MCP provider type from config."""
|
||||
tool_config = {
|
||||
"type": "mcp",
|
||||
"provider_name": "mcp-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.MCP
|
||||
|
||||
def test_infer_type_invalid_config_value_raises_error(self):
|
||||
"""Test that invalid type value in config raises ValueError."""
|
||||
tool_config = {
|
||||
"type": "invalid-type",
|
||||
"provider_name": "workflow-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
def test_infer_workflow_type_from_database(self):
|
||||
"""Test inferring workflow provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "workflow-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First query (WorkflowToolProvider) returns a result
|
||||
mock_session.scalar.return_value = True
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.WORKFLOW
|
||||
# Should only query once (after finding WorkflowToolProvider)
|
||||
assert mock_session.scalar.call_count == 1
|
||||
|
||||
def test_infer_mcp_type_from_database(self):
|
||||
"""Test inferring MCP provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "mcp-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First query (WorkflowToolProvider) returns None
|
||||
# Second query (MCPToolProvider) returns a result
|
||||
mock_session.scalar.side_effect = [None, True]
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.MCP
|
||||
assert mock_session.scalar.call_count == 2
|
||||
|
||||
def test_infer_api_type_from_database(self):
|
||||
"""Test inferring API provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "api-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First query (WorkflowToolProvider) returns None
|
||||
# Second query (MCPToolProvider) returns None
|
||||
# Third query (ApiToolProvider) returns a result
|
||||
mock_session.scalar.side_effect = [None, None, True]
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.API
|
||||
assert mock_session.scalar.call_count == 3
|
||||
|
||||
def test_infer_builtin_type_from_database(self):
|
||||
"""Test inferring builtin provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "builtin-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First three queries return None
|
||||
# Fourth query (BuiltinToolProvider) returns a result
|
||||
mock_session.scalar.side_effect = [None, None, None, True]
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.BUILT_IN
|
||||
assert mock_session.scalar.call_count == 4
|
||||
|
||||
def test_infer_type_default_when_not_found(self):
|
||||
"""Test raising AgentNodeError when provider is not found in database."""
|
||||
tool_config = {
|
||||
"provider_name": "unknown-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# All queries return None
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
# Current implementation raises AgentNodeError when provider not found
|
||||
from core.workflow.nodes.agent.exc import AgentNodeError
|
||||
|
||||
with pytest.raises(AgentNodeError, match="Tool provider with ID 'unknown-provider-id' not found"):
|
||||
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
def test_infer_type_default_when_no_provider_name(self):
|
||||
"""Test defaulting to BUILT_IN when provider_name is missing."""
|
||||
tool_config = {}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.BUILT_IN
|
||||
|
||||
def test_infer_type_database_exception_propagates(self):
|
||||
"""Test that database exception propagates (current implementation doesn't catch it)."""
|
||||
tool_config = {
|
||||
"provider_name": "provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Database query raises exception
|
||||
mock_session.scalar.side_effect = Exception("Database error")
|
||||
|
||||
# Current implementation doesn't catch exceptions, so it propagates
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
@@ -132,8 +132,6 @@ class TestCelerySSLConfiguration:
|
||||
mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0
|
||||
mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False
|
||||
mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15
|
||||
mock_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK = False
|
||||
mock_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL = 30
|
||||
|
||||
with patch("extensions.ext_celery.dify_config", mock_config):
|
||||
from dify_app import DifyApp
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
"""
|
||||
Unit tests for API Token Cache module.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from services.api_token_service import (
|
||||
CACHE_KEY_PREFIX,
|
||||
CACHE_NULL_TTL_SECONDS,
|
||||
CACHE_TTL_SECONDS,
|
||||
ApiTokenCache,
|
||||
CachedApiToken,
|
||||
)
|
||||
|
||||
|
||||
class TestApiTokenCache:
|
||||
"""Test cases for ApiTokenCache class."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup test fixtures."""
|
||||
self.mock_token = MagicMock()
|
||||
self.mock_token.id = "test-token-id-123"
|
||||
self.mock_token.app_id = "test-app-id-456"
|
||||
self.mock_token.tenant_id = "test-tenant-id-789"
|
||||
self.mock_token.type = "app"
|
||||
self.mock_token.token = "test-token-value-abc"
|
||||
self.mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
|
||||
self.mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
def test_make_cache_key(self):
|
||||
"""Test cache key generation."""
|
||||
# Test with scope
|
||||
key = ApiTokenCache._make_cache_key("my-token", "app")
|
||||
assert key == f"{CACHE_KEY_PREFIX}:app:my-token"
|
||||
|
||||
# Test without scope
|
||||
key = ApiTokenCache._make_cache_key("my-token", None)
|
||||
assert key == f"{CACHE_KEY_PREFIX}:any:my-token"
|
||||
|
||||
def test_serialize_token(self):
|
||||
"""Test token serialization."""
|
||||
serialized = ApiTokenCache._serialize_token(self.mock_token)
|
||||
data = json.loads(serialized)
|
||||
|
||||
assert data["id"] == "test-token-id-123"
|
||||
assert data["app_id"] == "test-app-id-456"
|
||||
assert data["tenant_id"] == "test-tenant-id-789"
|
||||
assert data["type"] == "app"
|
||||
assert data["token"] == "test-token-value-abc"
|
||||
assert data["last_used_at"] == "2026-02-03T10:00:00"
|
||||
assert data["created_at"] == "2026-01-01T00:00:00"
|
||||
|
||||
def test_serialize_token_with_nulls(self):
|
||||
"""Test token serialization with None values."""
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "test-id"
|
||||
mock_token.app_id = None
|
||||
mock_token.tenant_id = None
|
||||
mock_token.type = "dataset"
|
||||
mock_token.token = "test-token"
|
||||
mock_token.last_used_at = None
|
||||
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
serialized = ApiTokenCache._serialize_token(mock_token)
|
||||
data = json.loads(serialized)
|
||||
|
||||
assert data["app_id"] is None
|
||||
assert data["tenant_id"] is None
|
||||
assert data["last_used_at"] is None
|
||||
|
||||
def test_deserialize_token(self):
|
||||
"""Test token deserialization."""
|
||||
cached_data = json.dumps(
|
||||
{
|
||||
"id": "test-id",
|
||||
"app_id": "test-app",
|
||||
"tenant_id": "test-tenant",
|
||||
"type": "app",
|
||||
"token": "test-token",
|
||||
"last_used_at": "2026-02-03T10:00:00",
|
||||
"created_at": "2026-01-01T00:00:00",
|
||||
}
|
||||
)
|
||||
|
||||
result = ApiTokenCache._deserialize_token(cached_data)
|
||||
|
||||
assert isinstance(result, CachedApiToken)
|
||||
assert result.id == "test-id"
|
||||
assert result.app_id == "test-app"
|
||||
assert result.tenant_id == "test-tenant"
|
||||
assert result.type == "app"
|
||||
assert result.token == "test-token"
|
||||
assert result.last_used_at == datetime(2026, 2, 3, 10, 0, 0)
|
||||
assert result.created_at == datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
def test_deserialize_null_token(self):
|
||||
"""Test deserialization of null token (cached miss)."""
|
||||
result = ApiTokenCache._deserialize_token("null")
|
||||
assert result is None
|
||||
|
||||
def test_deserialize_invalid_json(self):
|
||||
"""Test deserialization with invalid JSON."""
|
||||
result = ApiTokenCache._deserialize_token("invalid-json{")
|
||||
assert result is None
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_get_cache_hit(self, mock_redis):
|
||||
"""Test cache hit scenario."""
|
||||
cached_data = json.dumps(
|
||||
{
|
||||
"id": "test-id",
|
||||
"app_id": "test-app",
|
||||
"tenant_id": "test-tenant",
|
||||
"type": "app",
|
||||
"token": "test-token",
|
||||
"last_used_at": "2026-02-03T10:00:00",
|
||||
"created_at": "2026-01-01T00:00:00",
|
||||
}
|
||||
).encode("utf-8")
|
||||
mock_redis.get.return_value = cached_data
|
||||
|
||||
result = ApiTokenCache.get("test-token", "app")
|
||||
|
||||
assert result is not None
|
||||
assert isinstance(result, CachedApiToken)
|
||||
assert result.app_id == "test-app"
|
||||
mock_redis.get.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_get_cache_miss(self, mock_redis):
|
||||
"""Test cache miss scenario."""
|
||||
mock_redis.get.return_value = None
|
||||
|
||||
result = ApiTokenCache.get("test-token", "app")
|
||||
|
||||
assert result is None
|
||||
mock_redis.get.assert_called_once()
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_set_valid_token(self, mock_redis):
|
||||
"""Test setting a valid token in cache."""
|
||||
result = ApiTokenCache.set("test-token", "app", self.mock_token)
|
||||
|
||||
assert result is True
|
||||
mock_redis.setex.assert_called_once()
|
||||
args = mock_redis.setex.call_args[0]
|
||||
assert args[0] == f"{CACHE_KEY_PREFIX}:app:test-token"
|
||||
assert args[1] == CACHE_TTL_SECONDS
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_set_null_token(self, mock_redis):
|
||||
"""Test setting a null token (cache penetration prevention)."""
|
||||
result = ApiTokenCache.set("invalid-token", "app", None)
|
||||
|
||||
assert result is True
|
||||
mock_redis.setex.assert_called_once()
|
||||
args = mock_redis.setex.call_args[0]
|
||||
assert args[0] == f"{CACHE_KEY_PREFIX}:app:invalid-token"
|
||||
assert args[1] == CACHE_NULL_TTL_SECONDS
|
||||
assert args[2] == b"null"
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_delete_with_scope(self, mock_redis):
|
||||
"""Test deleting token cache with specific scope."""
|
||||
result = ApiTokenCache.delete("test-token", "app")
|
||||
|
||||
assert result is True
|
||||
mock_redis.delete.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_delete_without_scope(self, mock_redis):
|
||||
"""Test deleting token cache without scope (delete all)."""
|
||||
# Mock scan_iter to return an iterator of keys
|
||||
mock_redis.scan_iter.return_value = iter(
|
||||
[
|
||||
b"api_token:app:test-token",
|
||||
b"api_token:dataset:test-token",
|
||||
]
|
||||
)
|
||||
|
||||
result = ApiTokenCache.delete("test-token", None)
|
||||
|
||||
assert result is True
|
||||
# Verify scan_iter was called with the correct pattern
|
||||
mock_redis.scan_iter.assert_called_once()
|
||||
call_args = mock_redis.scan_iter.call_args
|
||||
assert call_args[1]["match"] == f"{CACHE_KEY_PREFIX}:*:test-token"
|
||||
|
||||
# Verify delete was called with all matched keys
|
||||
mock_redis.delete.assert_called_once_with(
|
||||
b"api_token:app:test-token",
|
||||
b"api_token:dataset:test-token",
|
||||
)
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_redis_fallback_on_exception(self, mock_redis):
|
||||
"""Test Redis fallback when Redis is unavailable."""
|
||||
from redis import RedisError
|
||||
|
||||
mock_redis.get.side_effect = RedisError("Connection failed")
|
||||
|
||||
result = ApiTokenCache.get("test-token", "app")
|
||||
|
||||
# Should return None (fallback) instead of raising exception
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestApiTokenCacheIntegration:
|
||||
"""Integration test scenarios."""
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_full_cache_lifecycle(self, mock_redis):
|
||||
"""Test complete cache lifecycle: set -> get -> delete."""
|
||||
# Setup mock token
|
||||
mock_token = MagicMock()
|
||||
mock_token.id = "id-123"
|
||||
mock_token.app_id = "app-456"
|
||||
mock_token.tenant_id = "tenant-789"
|
||||
mock_token.type = "app"
|
||||
mock_token.token = "token-abc"
|
||||
mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
|
||||
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
|
||||
|
||||
# 1. Set token in cache
|
||||
ApiTokenCache.set("token-abc", "app", mock_token)
|
||||
assert mock_redis.setex.called
|
||||
|
||||
# 2. Simulate cache hit
|
||||
cached_data = ApiTokenCache._serialize_token(mock_token)
|
||||
mock_redis.get.return_value = cached_data # bytes from model_dump_json().encode()
|
||||
|
||||
retrieved = ApiTokenCache.get("token-abc", "app")
|
||||
assert retrieved is not None
|
||||
assert isinstance(retrieved, CachedApiToken)
|
||||
|
||||
# 3. Delete from cache
|
||||
ApiTokenCache.delete("token-abc", "app")
|
||||
assert mock_redis.delete.called
|
||||
|
||||
@patch("services.api_token_service.redis_client")
|
||||
def test_cache_penetration_prevention(self, mock_redis):
|
||||
"""Test that non-existent tokens are cached as null."""
|
||||
# Set null token (cache miss)
|
||||
ApiTokenCache.set("non-existent-token", "app", None)
|
||||
|
||||
args = mock_redis.setex.call_args[0]
|
||||
assert args[2] == b"null"
|
||||
assert args[1] == CACHE_NULL_TTL_SECONDS # Shorter TTL for null values
|
||||
@@ -114,21 +114,6 @@ def mock_db_session():
|
||||
session = MagicMock()
|
||||
# Ensure tests can observe session.close() via context manager teardown
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
|
||||
# Mock session.begin() context manager to auto-commit on exit
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
def _begin_exit_side_effect(*args, **kwargs):
|
||||
# session.begin().__exit__() should commit if no exception
|
||||
if args[0] is None: # No exception
|
||||
session.commit()
|
||||
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
|
||||
|
||||
@@ -109,7 +109,6 @@ const AgentTools: FC = () => {
|
||||
tool_parameters: paramsWithDefaultValue,
|
||||
notAuthor: !tool.is_team_authorization,
|
||||
enabled: true,
|
||||
type: tool.provider_type as CollectionType,
|
||||
}
|
||||
}
|
||||
const handleSelectTool = (tool: ToolDefaultValue) => {
|
||||
|
||||
@@ -0,0 +1,129 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { DocType } from '@/models/datasets'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Radio from '@/app/components/base/radio'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useMetadataMap } from '@/hooks/use-metadata'
|
||||
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import s from '../style.module.css'
|
||||
|
||||
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
|
||||
return <div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
|
||||
}
|
||||
|
||||
const IconButton: FC<{ type: DocType, isChecked: boolean }> = ({ type, isChecked = false }) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
return (
|
||||
<Tooltip popupContent={metadataMap[type].text}>
|
||||
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
|
||||
<TypeIcon
|
||||
iconName={metadataMap[type].iconName || ''}
|
||||
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
type DocTypeSelectorProps = {
|
||||
docType: DocType | ''
|
||||
documentType?: DocType | ''
|
||||
tempDocType: DocType | ''
|
||||
onTempDocTypeChange: (type: DocType | '') => void
|
||||
onConfirm: () => void
|
||||
onCancel: () => void
|
||||
}
|
||||
|
||||
const DocTypeSelector: FC<DocTypeSelectorProps> = ({
|
||||
docType,
|
||||
documentType,
|
||||
tempDocType,
|
||||
onTempDocTypeChange,
|
||||
onConfirm,
|
||||
onCancel,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const isFirstTime = !docType && !documentType
|
||||
const currValue = tempDocType ?? documentType
|
||||
|
||||
return (
|
||||
<>
|
||||
{isFirstTime && (
|
||||
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
|
||||
)}
|
||||
<div className={s.operationWrapper}>
|
||||
{isFirstTime && (
|
||||
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
|
||||
)}
|
||||
{documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
|
||||
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
<Radio.Group value={currValue ?? ''} onChange={onTempDocTypeChange} className={s.radioGroup}>
|
||||
{CUSTOMIZABLE_DOC_TYPES.map(type => (
|
||||
<Radio key={type} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
|
||||
<IconButton type={type} isChecked={currValue === type} />
|
||||
</Radio>
|
||||
))}
|
||||
</Radio.Group>
|
||||
{isFirstTime && (
|
||||
<Button variant="primary" onClick={onConfirm} disabled={!tempDocType}>
|
||||
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
|
||||
</Button>
|
||||
)}
|
||||
{documentType && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={onConfirm} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>
|
||||
{t('operation.cancel', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
type DocumentTypeDisplayProps = {
|
||||
displayType: DocType | ''
|
||||
showChangeLink?: boolean
|
||||
onChangeClick?: () => void
|
||||
}
|
||||
|
||||
export const DocumentTypeDisplay: FC<DocumentTypeDisplayProps> = ({
|
||||
displayType,
|
||||
showChangeLink = false,
|
||||
onChangeClick,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const metadataMap = useMetadataMap()
|
||||
const effectiveType = displayType || 'book'
|
||||
|
||||
return (
|
||||
<div className={s.documentTypeShow}>
|
||||
{(displayType || !showChangeLink) && (
|
||||
<>
|
||||
<TypeIcon iconName={metadataMap[effectiveType]?.iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[effectiveType].text}
|
||||
{showChangeLink && (
|
||||
<div className="ml-1 inline-flex items-center gap-1">
|
||||
·
|
||||
<div onClick={onChangeClick} className="cursor-pointer hover:text-text-accent">
|
||||
{t('operation.change', { ns: 'common' })}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default DocTypeSelector
|
||||
@@ -0,0 +1,89 @@
|
||||
'use client'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { inputType } from '@/hooks/use-metadata'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { getTextWidthWithCanvas } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import s from '../style.module.css'
|
||||
|
||||
type FieldInfoProps = {
|
||||
label: string
|
||||
value?: string
|
||||
valueIcon?: ReactNode
|
||||
displayedValue?: string
|
||||
defaultValue?: string
|
||||
showEdit?: boolean
|
||||
inputType?: inputType
|
||||
selectOptions?: Array<{ value: string, name: string }>
|
||||
onUpdate?: (v: string) => void
|
||||
}
|
||||
|
||||
const FieldInfo: FC<FieldInfoProps> = ({
|
||||
label,
|
||||
value = '',
|
||||
valueIcon,
|
||||
displayedValue = '',
|
||||
defaultValue,
|
||||
showEdit = false,
|
||||
inputType = 'input',
|
||||
selectOptions = [],
|
||||
onUpdate,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
|
||||
const editAlignTop = showEdit && inputType === 'textarea'
|
||||
const readAlignTop = !showEdit && textNeedWrap
|
||||
|
||||
const renderContent = () => {
|
||||
if (!showEdit)
|
||||
return displayedValue
|
||||
|
||||
if (inputType === 'select') {
|
||||
return (
|
||||
<SimpleSelect
|
||||
onSelect={({ value }) => onUpdate?.(value as string)}
|
||||
items={selectOptions}
|
||||
defaultValue={value}
|
||||
className={s.select}
|
||||
wrapperClassName={s.selectWrapper}
|
||||
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
if (inputType === 'textarea') {
|
||||
return (
|
||||
<AutoHeightTextarea
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
className={s.textArea}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
|
||||
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
|
||||
<div className="flex grow items-center gap-1 text-text-secondary">
|
||||
{valueIcon}
|
||||
{renderContent()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default FieldInfo
|
||||
@@ -0,0 +1,88 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { metadataType } from '@/hooks/use-metadata'
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { get } from 'es-toolkit/compat'
|
||||
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
|
||||
import FieldInfo from './field-info'
|
||||
|
||||
const map2Options = (map: Record<string, string>) => {
|
||||
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
|
||||
}
|
||||
|
||||
function useCategoryMapResolver(mainField: metadataType | '') {
|
||||
const languageMap = useLanguages()
|
||||
const bookCategoryMap = useBookCategories()
|
||||
const personalDocCategoryMap = usePersonalDocCategories()
|
||||
const businessDocCategoryMap = useBusinessDocCategories()
|
||||
|
||||
return (field: string): Record<string, string> => {
|
||||
if (field === 'language')
|
||||
return languageMap
|
||||
if (field === 'category' && mainField === 'book')
|
||||
return bookCategoryMap
|
||||
if (field === 'document_type') {
|
||||
if (mainField === 'personal_document')
|
||||
return personalDocCategoryMap
|
||||
if (mainField === 'business_document')
|
||||
return businessDocCategoryMap
|
||||
}
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
type MetadataFieldListProps = {
|
||||
mainField: metadataType | ''
|
||||
canEdit?: boolean
|
||||
metadata?: Record<string, string>
|
||||
docDetail?: FullDocumentDetail
|
||||
onFieldUpdate?: (field: string, value: string) => void
|
||||
}
|
||||
|
||||
const MetadataFieldList: FC<MetadataFieldListProps> = ({
|
||||
mainField,
|
||||
canEdit = false,
|
||||
metadata,
|
||||
docDetail,
|
||||
onFieldUpdate,
|
||||
}) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
const getCategoryMap = useCategoryMapResolver(mainField)
|
||||
|
||||
if (!mainField)
|
||||
return null
|
||||
|
||||
const fieldMap = metadataMap[mainField]?.subFieldsMap
|
||||
const isFixedField = ['originInfo', 'technicalParameters'].includes(mainField)
|
||||
const sourceData = isFixedField ? docDetail : metadata
|
||||
|
||||
const getDisplayValue = (field: string) => {
|
||||
const val = get(sourceData, field, '')
|
||||
if (!val && val !== 0)
|
||||
return '-'
|
||||
if (fieldMap[field]?.inputType === 'select')
|
||||
return getCategoryMap(field)[val]
|
||||
if (fieldMap[field]?.render)
|
||||
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
|
||||
return val
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Object.keys(fieldMap).map(field => (
|
||||
<FieldInfo
|
||||
key={fieldMap[field]?.label}
|
||||
label={fieldMap[field]?.label}
|
||||
displayedValue={getDisplayValue(field)}
|
||||
value={get(sourceData, field, '')}
|
||||
inputType={fieldMap[field]?.inputType || 'input'}
|
||||
showEdit={canEdit}
|
||||
onUpdate={val => onFieldUpdate?.(field, val)}
|
||||
selectOptions={map2Options(getCategoryMap(field))}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default MetadataFieldList
|
||||
@@ -0,0 +1,137 @@
|
||||
'use client'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type { DocType, FullDocumentDetail } from '@/models/datasets'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { modifyDocMetadata } from '@/service/datasets'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { useDocumentContext } from '../../context'
|
||||
|
||||
type MetadataState = {
|
||||
documentType?: DocType | ''
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize raw doc_type: treat 'others' as empty string.
|
||||
*/
|
||||
const normalizeDocType = (rawDocType: string): DocType | '' => {
|
||||
return rawDocType === 'others' ? '' : rawDocType as DocType | ''
|
||||
}
|
||||
|
||||
type UseMetadataStateOptions = {
|
||||
docDetail?: FullDocumentDetail
|
||||
onUpdate?: () => void
|
||||
}
|
||||
|
||||
export function useMetadataState({ docDetail, onUpdate }: UseMetadataStateOptions) {
|
||||
const { doc_metadata = {} } = docDetail || {}
|
||||
const rawDocType = docDetail?.doc_type ?? ''
|
||||
const docType = normalizeDocType(rawDocType)
|
||||
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const datasetId = useDocumentContext(s => s.datasetId)
|
||||
const documentId = useDocumentContext(s => s.documentId)
|
||||
|
||||
// If no documentType yet, start in editing + showDocTypes mode
|
||||
const [editStatus, setEditStatus] = useState(!docType)
|
||||
const [metadataParams, setMetadataParams] = useState<MetadataState>(
|
||||
docType
|
||||
? { documentType: docType, metadata: (doc_metadata || {}) as Record<string, string> }
|
||||
: { metadata: {} },
|
||||
)
|
||||
const [showDocTypes, setShowDocTypes] = useState(!docType)
|
||||
const [tempDocType, setTempDocType] = useState<DocType | ''>('')
|
||||
const [saveLoading, setSaveLoading] = useState(false)
|
||||
|
||||
// Sync local state when the upstream docDetail changes (e.g. after save or navigation).
|
||||
// These setters are intentionally called together to batch-reset multiple pieces
|
||||
// of derived editing state that cannot be expressed as pure derived values.
|
||||
useEffect(() => {
|
||||
if (docDetail?.doc_type) {
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setEditStatus(false)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setShowDocTypes(false)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setTempDocType(docType)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setMetadataParams({
|
||||
documentType: docType,
|
||||
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
|
||||
})
|
||||
}
|
||||
}, [docDetail?.doc_type, docDetail?.doc_metadata, docType])
|
||||
|
||||
const confirmDocType = () => {
|
||||
if (!tempDocType)
|
||||
return
|
||||
setMetadataParams({
|
||||
documentType: tempDocType,
|
||||
// Clear metadata when switching to a different doc type
|
||||
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {},
|
||||
})
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
const cancelDocType = () => {
|
||||
setTempDocType(metadataParams.documentType ?? '')
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
const enableEdit = () => {
|
||||
setEditStatus(true)
|
||||
}
|
||||
|
||||
const cancelEdit = () => {
|
||||
setMetadataParams({ documentType: docType || '', metadata: { ...(docDetail?.doc_metadata || {}) } })
|
||||
setEditStatus(!docType)
|
||||
if (!docType)
|
||||
setShowDocTypes(true)
|
||||
}
|
||||
|
||||
const saveMetadata = async () => {
|
||||
setSaveLoading(true)
|
||||
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
|
||||
datasetId,
|
||||
documentId,
|
||||
body: {
|
||||
doc_type: metadataParams.documentType || docType || '',
|
||||
doc_metadata: metadataParams.metadata,
|
||||
},
|
||||
}) as Promise<CommonResponse>)
|
||||
if (!e)
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
else
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
onUpdate?.()
|
||||
setEditStatus(false)
|
||||
setSaveLoading(false)
|
||||
}
|
||||
|
||||
const updateMetadataField = (field: string, value: string) => {
|
||||
setMetadataParams(prev => ({ ...prev, metadata: { ...prev.metadata, [field]: value } }))
|
||||
}
|
||||
|
||||
return {
|
||||
docType,
|
||||
editStatus,
|
||||
showDocTypes,
|
||||
tempDocType,
|
||||
saveLoading,
|
||||
metadataParams,
|
||||
setTempDocType,
|
||||
setShowDocTypes,
|
||||
confirmDocType,
|
||||
cancelDocType,
|
||||
enableEdit,
|
||||
cancelEdit,
|
||||
saveMetadata,
|
||||
updateMetadataField,
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import Metadata, { FieldInfo } from './index'
|
||||
|
||||
// Mock document context
|
||||
@@ -121,7 +120,6 @@ vi.mock('@/hooks/use-metadata', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock getTextWidthWithCanvas
|
||||
vi.mock('@/utils', () => ({
|
||||
asyncRunSafe: async (promise: Promise<unknown>) => {
|
||||
try {
|
||||
@@ -135,33 +133,32 @@ vi.mock('@/utils', () => ({
|
||||
getTextWidthWithCanvas: () => 100,
|
||||
}))
|
||||
|
||||
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
|
||||
id: 'doc-1',
|
||||
name: 'Test Document',
|
||||
doc_type: 'book',
|
||||
doc_metadata: {
|
||||
title: 'Test Book',
|
||||
author: 'Test Author',
|
||||
language: 'en',
|
||||
},
|
||||
data_source_type: 'upload_file',
|
||||
segment_count: 10,
|
||||
hit_count: 5,
|
||||
...overrides,
|
||||
} as FullDocumentDetail)
|
||||
|
||||
describe('Metadata', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
|
||||
id: 'doc-1',
|
||||
name: 'Test Document',
|
||||
doc_type: 'book',
|
||||
doc_metadata: {
|
||||
title: 'Test Book',
|
||||
author: 'Test Author',
|
||||
language: 'en',
|
||||
},
|
||||
data_source_type: 'upload_file',
|
||||
segment_count: 10,
|
||||
hit_count: 5,
|
||||
...overrides,
|
||||
} as FullDocumentDetail)
|
||||
|
||||
const defaultProps = {
|
||||
docDetail: createMockDocDetail(),
|
||||
loading: false,
|
||||
onUpdate: vi.fn(),
|
||||
}
|
||||
|
||||
// Rendering tests
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
// Arrange & Act
|
||||
@@ -191,7 +188,7 @@ describe('Metadata', () => {
|
||||
// Arrange & Act
|
||||
render(<Metadata {...defaultProps} loading={true} />)
|
||||
|
||||
// Assert - Loading component should be rendered
|
||||
// Assert - Loading component should be rendered, title should not
|
||||
expect(screen.queryByText(/metadata\.title/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -204,7 +201,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Edit mode tests
|
||||
// Edit mode (tests useMetadataState hook integration)
|
||||
describe('Edit Mode', () => {
|
||||
it('should enter edit mode when edit button is clicked', () => {
|
||||
// Arrange
|
||||
@@ -303,7 +300,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Document type selection
|
||||
// Document type selection (tests DocTypeSelector sub-component integration)
|
||||
describe('Document Type Selection', () => {
|
||||
it('should show doc type selection when no doc_type exists', () => {
|
||||
// Arrange
|
||||
@@ -353,13 +350,13 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Origin info and technical parameters
|
||||
// Fixed fields (tests MetadataFieldList sub-component integration)
|
||||
describe('Fixed Fields', () => {
|
||||
it('should render origin info fields', () => {
|
||||
// Arrange & Act
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Assert - Origin info fields should be displayed
|
||||
// Assert
|
||||
expect(screen.getByText('Data Source Type')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -382,7 +379,7 @@ describe('Metadata', () => {
|
||||
// Act
|
||||
const { container } = render(<Metadata {...defaultProps} docDetail={docDetail} />)
|
||||
|
||||
// Assert - should render without crashing
|
||||
// Assert
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -390,7 +387,7 @@ describe('Metadata', () => {
|
||||
// Arrange & Act
|
||||
const { container } = render(<Metadata {...defaultProps} docDetail={undefined} loading={false} />)
|
||||
|
||||
// Assert - should render without crashing
|
||||
// Assert
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@@ -425,7 +422,6 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// FieldInfo component tests
|
||||
describe('FieldInfo', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@@ -543,3 +539,149 @@ describe('FieldInfo', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// --- useMetadataState hook coverage tests (via component interactions) ---
|
||||
describe('useMetadataState coverage', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const defaultProps = {
|
||||
docDetail: createMockDocDetail(),
|
||||
loading: false,
|
||||
onUpdate: vi.fn(),
|
||||
}
|
||||
|
||||
describe('cancelDocType', () => {
|
||||
it('should cancel doc type change and return to edit mode', () => {
|
||||
// Arrange
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode → click change to open doc type selector
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
fireEvent.click(screen.getByText(/operation\.change/i))
|
||||
|
||||
// Now in doc type selector mode — should show cancel button
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
|
||||
// Act — cancel the doc type change
|
||||
fireEvent.click(screen.getByText(/operation\.cancel/i))
|
||||
|
||||
// Assert — should be back to edit mode (cancel + save buttons visible)
|
||||
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('confirmDocType', () => {
|
||||
it('should confirm same doc type and return to edit mode keeping metadata', () => {
|
||||
// Arrange — useEffect syncs tempDocType='book' from docDetail
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode → click change to open doc type selector
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
fireEvent.click(screen.getByText(/operation\.change/i))
|
||||
|
||||
// DocTypeSelector shows save/cancel buttons
|
||||
expect(screen.getByText(/metadata\.docTypeChangeTitle/i)).toBeInTheDocument()
|
||||
|
||||
// Act — click save to confirm same doc type (tempDocType='book')
|
||||
fireEvent.click(screen.getByText(/operation\.save/i))
|
||||
|
||||
// Assert — should return to edit mode with metadata fields visible
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('cancelEdit when no docType', () => {
|
||||
it('should show doc type selection when cancel is clicked with doc_type others', () => {
|
||||
// Arrange — doc with 'others' type normalizes to '' internally.
|
||||
// The useEffect sees doc_type='others' (truthy) and syncs state,
|
||||
// so the component initially shows view mode. Enter edit → cancel to trigger cancelEdit.
|
||||
const docDetail = createMockDocDetail({ doc_type: 'others' })
|
||||
render(<Metadata {...defaultProps} docDetail={docDetail} />)
|
||||
|
||||
// 'others' is normalized to '' → useEffect fires (doc_type truthy) → view mode
|
||||
// The rendered type uses default 'book' fallback for display
|
||||
expect(screen.getByText(/operation\.edit/i)).toBeInTheDocument()
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
|
||||
// Act — cancel edit; internally docType is '' so cancelEdit goes to showDocTypes
|
||||
fireEvent.click(screen.getByText(/operation\.cancel/i))
|
||||
|
||||
// Assert — should show doc type selection since normalized docType was ''
|
||||
expect(screen.getByText(/metadata\.docTypeSelectTitle/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateMetadataField', () => {
|
||||
it('should update metadata field value via input', () => {
|
||||
// Arrange
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
|
||||
// Act — find an input and change its value (Title field)
|
||||
const inputs = screen.getAllByRole('textbox')
|
||||
expect(inputs.length).toBeGreaterThan(0)
|
||||
fireEvent.change(inputs[0], { target: { value: 'Updated Title' } })
|
||||
|
||||
// Assert — the input should have the new value
|
||||
expect(inputs[0]).toHaveValue('Updated Title')
|
||||
})
|
||||
})
|
||||
|
||||
describe('saveMetadata calls modifyDocMetadata with correct body', () => {
|
||||
it('should pass doc_type and doc_metadata in save request', async () => {
|
||||
// Arrange
|
||||
mockModifyDocMetadata.mockResolvedValueOnce({})
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
|
||||
// Act — save
|
||||
fireEvent.click(screen.getByText(/operation\.save/i))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockModifyDocMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
datasetId: 'test-dataset-id',
|
||||
documentId: 'test-document-id',
|
||||
body: expect.objectContaining({
|
||||
doc_type: 'book',
|
||||
}),
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useEffect sync', () => {
|
||||
it('should handle doc_metadata being null in effect sync', () => {
|
||||
// Arrange — first render with null metadata
|
||||
const { rerender } = render(
|
||||
<Metadata
|
||||
{...defaultProps}
|
||||
docDetail={createMockDocDetail({ doc_metadata: null })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act — rerender with a different doc_type to trigger useEffect sync
|
||||
rerender(
|
||||
<Metadata
|
||||
{...defaultProps}
|
||||
docDetail={createMockDocDetail({ doc_type: 'paper', doc_metadata: null })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert — should render without crashing, showing Paper type
|
||||
expect(screen.getByText('Paper')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,422 +1,124 @@
|
||||
'use client'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { inputType, metadataType } from '@/hooks/use-metadata'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type { DocType, FullDocumentDetail } from '@/models/datasets'
|
||||
import type { FC } from 'react'
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { PencilIcon } from '@heroicons/react/24/outline'
|
||||
import { get } from 'es-toolkit/compat'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import Radio from '@/app/components/base/radio'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
|
||||
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
|
||||
import { modifyDocMetadata } from '@/service/datasets'
|
||||
import { asyncRunSafe, getTextWidthWithCanvas } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useDocumentContext } from '../context'
|
||||
import { useMetadataMap } from '@/hooks/use-metadata'
|
||||
import DocTypeSelector, { DocumentTypeDisplay } from './components/doc-type-selector'
|
||||
import MetadataFieldList from './components/metadata-field-list'
|
||||
import { useMetadataState } from './hooks/use-metadata-state'
|
||||
import s from './style.module.css'
|
||||
|
||||
const map2Options = (map: { [key: string]: string }) => {
|
||||
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
|
||||
}
|
||||
export { default as FieldInfo } from './components/field-info'
|
||||
|
||||
type IFieldInfoProps = {
|
||||
label: string
|
||||
value?: string
|
||||
valueIcon?: ReactNode
|
||||
displayedValue?: string
|
||||
defaultValue?: string
|
||||
showEdit?: boolean
|
||||
inputType?: inputType
|
||||
selectOptions?: Array<{ value: string, name: string }>
|
||||
onUpdate?: (v: any) => void
|
||||
}
|
||||
|
||||
export const FieldInfo: FC<IFieldInfoProps> = ({
|
||||
label,
|
||||
value = '',
|
||||
valueIcon,
|
||||
displayedValue = '',
|
||||
defaultValue,
|
||||
showEdit = false,
|
||||
inputType = 'input',
|
||||
selectOptions = [],
|
||||
onUpdate,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
|
||||
const editAlignTop = showEdit && inputType === 'textarea'
|
||||
const readAlignTop = !showEdit && textNeedWrap
|
||||
|
||||
const renderContent = () => {
|
||||
if (!showEdit)
|
||||
return displayedValue
|
||||
|
||||
if (inputType === 'select') {
|
||||
return (
|
||||
<SimpleSelect
|
||||
onSelect={({ value }) => onUpdate?.(value as string)}
|
||||
items={selectOptions}
|
||||
defaultValue={value}
|
||||
className={s.select}
|
||||
wrapperClassName={s.selectWrapper}
|
||||
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
if (inputType === 'textarea') {
|
||||
return (
|
||||
<AutoHeightTextarea
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
className={s.textArea}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
|
||||
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
|
||||
<div className="flex grow items-center gap-1 text-text-secondary">
|
||||
{valueIcon}
|
||||
{renderContent()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
|
||||
return (
|
||||
<div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
|
||||
)
|
||||
}
|
||||
|
||||
const IconButton: FC<{
|
||||
type: DocType
|
||||
isChecked: boolean
|
||||
}> = ({ type, isChecked = false }) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
popupContent={metadataMap[type].text}
|
||||
>
|
||||
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
|
||||
<TypeIcon
|
||||
iconName={metadataMap[type].iconName || ''}
|
||||
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
type IMetadataProps = {
|
||||
type MetadataProps = {
|
||||
docDetail?: FullDocumentDetail
|
||||
loading: boolean
|
||||
onUpdate: () => void
|
||||
}
|
||||
|
||||
type MetadataState = {
|
||||
documentType?: DocType | ''
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
const Metadata: FC<IMetadataProps> = ({ docDetail, loading, onUpdate }) => {
|
||||
const { doc_metadata = {} } = docDetail || {}
|
||||
const rawDocType = docDetail?.doc_type ?? ''
|
||||
const doc_type = rawDocType === 'others' ? '' : rawDocType
|
||||
|
||||
const Metadata: FC<MetadataProps> = ({ docDetail, loading, onUpdate }) => {
|
||||
const { t } = useTranslation()
|
||||
const metadataMap = useMetadataMap()
|
||||
const languageMap = useLanguages()
|
||||
const bookCategoryMap = useBookCategories()
|
||||
const personalDocCategoryMap = usePersonalDocCategories()
|
||||
const businessDocCategoryMap = useBusinessDocCategories()
|
||||
const [editStatus, setEditStatus] = useState(!doc_type) // if no documentType, in editing status by default
|
||||
// the initial values are according to the documentType
|
||||
const [metadataParams, setMetadataParams] = useState<MetadataState>(
|
||||
doc_type
|
||||
? {
|
||||
documentType: doc_type as DocType,
|
||||
metadata: (doc_metadata || {}) as Record<string, string>,
|
||||
}
|
||||
: { metadata: {} },
|
||||
)
|
||||
const [showDocTypes, setShowDocTypes] = useState(!doc_type) // whether show doc types
|
||||
const [tempDocType, setTempDocType] = useState<DocType | ''>('') // for remember icon click
|
||||
const [saveLoading, setSaveLoading] = useState(false)
|
||||
|
||||
const { notify } = useContext(ToastContext)
|
||||
const datasetId = useDocumentContext(s => s.datasetId)
|
||||
const documentId = useDocumentContext(s => s.documentId)
|
||||
|
||||
useEffect(() => {
|
||||
if (docDetail?.doc_type) {
|
||||
setEditStatus(false)
|
||||
setShowDocTypes(false)
|
||||
setTempDocType(doc_type as DocType | '')
|
||||
setMetadataParams({
|
||||
documentType: doc_type as DocType | '',
|
||||
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
|
||||
})
|
||||
}
|
||||
}, [docDetail?.doc_type, docDetail?.doc_metadata, doc_type])
|
||||
|
||||
// confirm doc type
|
||||
const confirmDocType = () => {
|
||||
if (!tempDocType)
|
||||
return
|
||||
setMetadataParams({
|
||||
documentType: tempDocType,
|
||||
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {} as Record<string, string>, // change doc type, clear metadata
|
||||
})
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
// cancel doc type
|
||||
const cancelDocType = () => {
|
||||
setTempDocType(metadataParams.documentType ?? '')
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
// show doc type select
|
||||
const renderSelectDocType = () => {
|
||||
const { documentType } = metadataParams
|
||||
const {
|
||||
docType,
|
||||
editStatus,
|
||||
showDocTypes,
|
||||
tempDocType,
|
||||
saveLoading,
|
||||
metadataParams,
|
||||
setTempDocType,
|
||||
setShowDocTypes,
|
||||
confirmDocType,
|
||||
cancelDocType,
|
||||
enableEdit,
|
||||
cancelEdit,
|
||||
saveMetadata,
|
||||
updateMetadataField,
|
||||
} = useMetadataState({ docDetail, onUpdate })
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<>
|
||||
{!doc_type && !documentType && (
|
||||
<>
|
||||
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
|
||||
</>
|
||||
)}
|
||||
<div className={s.operationWrapper}>
|
||||
{!doc_type && !documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
{documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
|
||||
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
<Radio.Group value={tempDocType ?? documentType ?? ''} onChange={setTempDocType} className={s.radioGroup}>
|
||||
{CUSTOMIZABLE_DOC_TYPES.map((type, index) => {
|
||||
const currValue = tempDocType ?? documentType
|
||||
return (
|
||||
<Radio key={index} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
|
||||
<IconButton
|
||||
type={type}
|
||||
isChecked={currValue === type}
|
||||
/>
|
||||
</Radio>
|
||||
)
|
||||
})}
|
||||
</Radio.Group>
|
||||
{!doc_type && !documentType && (
|
||||
<Button
|
||||
variant="primary"
|
||||
onClick={confirmDocType}
|
||||
disabled={!tempDocType}
|
||||
>
|
||||
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
|
||||
</Button>
|
||||
)}
|
||||
{documentType && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={confirmDocType} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">{t('operation.save', { ns: 'common' })}</Button>
|
||||
<Button onClick={cancelDocType} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
// show metadata info and edit
|
||||
const renderFieldInfos = ({ mainField = 'book', canEdit }: { mainField?: metadataType | '', canEdit?: boolean }) => {
|
||||
if (!mainField)
|
||||
return null
|
||||
const fieldMap = metadataMap[mainField]?.subFieldsMap
|
||||
const sourceData = ['originInfo', 'technicalParameters'].includes(mainField) ? docDetail : metadataParams.metadata
|
||||
|
||||
const getTargetMap = (field: string) => {
|
||||
if (field === 'language')
|
||||
return languageMap
|
||||
if (field === 'category' && mainField === 'book')
|
||||
return bookCategoryMap
|
||||
|
||||
if (field === 'document_type') {
|
||||
if (mainField === 'personal_document')
|
||||
return personalDocCategoryMap
|
||||
if (mainField === 'business_document')
|
||||
return businessDocCategoryMap
|
||||
}
|
||||
return {} as any
|
||||
}
|
||||
|
||||
const getTargetValue = (field: string) => {
|
||||
const val = get(sourceData, field, '')
|
||||
if (!val && val !== 0)
|
||||
return '-'
|
||||
if (fieldMap[field]?.inputType === 'select')
|
||||
return getTargetMap(field)[val]
|
||||
if (fieldMap[field]?.render)
|
||||
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
|
||||
return val
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Object.keys(fieldMap).map((field) => {
|
||||
return (
|
||||
<FieldInfo
|
||||
key={fieldMap[field]?.label}
|
||||
label={fieldMap[field]?.label}
|
||||
displayedValue={getTargetValue(field)}
|
||||
value={get(sourceData, field, '')}
|
||||
inputType={fieldMap[field]?.inputType || 'input'}
|
||||
showEdit={canEdit}
|
||||
onUpdate={(val) => {
|
||||
setMetadataParams(pre => ({ ...pre, metadata: { ...pre.metadata, [field]: val } }))
|
||||
}}
|
||||
selectOptions={map2Options(getTargetMap(field))}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
<div className={`${s.main} bg-gray-25`}>
|
||||
<Loading type="app" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const enabledEdit = () => {
|
||||
setEditStatus(true)
|
||||
}
|
||||
|
||||
const onCancel = () => {
|
||||
setMetadataParams({ documentType: doc_type || '', metadata: { ...docDetail?.doc_metadata } })
|
||||
setEditStatus(!doc_type)
|
||||
if (!doc_type)
|
||||
setShowDocTypes(true)
|
||||
}
|
||||
|
||||
const onSave = async () => {
|
||||
setSaveLoading(true)
|
||||
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
|
||||
datasetId,
|
||||
documentId,
|
||||
body: {
|
||||
doc_type: metadataParams.documentType || doc_type || '',
|
||||
doc_metadata: metadataParams.metadata,
|
||||
},
|
||||
}) as Promise<CommonResponse>)
|
||||
if (!e)
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
else
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
onUpdate?.()
|
||||
setEditStatus(false)
|
||||
setSaveLoading(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={`${s.main} ${editStatus ? 'bg-white' : 'bg-gray-25'}`}>
|
||||
{loading
|
||||
? (<Loading type="app" />)
|
||||
: (
|
||||
<>
|
||||
<div className={s.titleWrapper}>
|
||||
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
|
||||
{!editStatus
|
||||
? (
|
||||
<Button onClick={enabledEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
|
||||
<PencilIcon className={s.opIcon} />
|
||||
{t('operation.edit', { ns: 'common' })}
|
||||
</Button>
|
||||
)
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
|
||||
<Button
|
||||
onClick={onSave}
|
||||
className={`${s.opBtn} ${s.opSaveBtn}`}
|
||||
variant="primary"
|
||||
loading={saveLoading}
|
||||
>
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{/* Header: title + action buttons */}
|
||||
<div className={s.titleWrapper}>
|
||||
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
|
||||
{!editStatus
|
||||
? (
|
||||
<Button onClick={enableEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
|
||||
<PencilIcon className={s.opIcon} />
|
||||
{t('operation.edit', { ns: 'common' })}
|
||||
</Button>
|
||||
)
|
||||
: !showDocTypes && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={cancelEdit} className={`${s.opBtn} ${s.opCancelBtn}`}>
|
||||
{t('operation.cancel', { ns: 'common' })}
|
||||
</Button>
|
||||
<Button onClick={saveMetadata} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary" loading={saveLoading}>
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
{/* show selected doc type and changing entry */}
|
||||
{!editStatus
|
||||
? (
|
||||
<div className={s.documentTypeShow}>
|
||||
<TypeIcon iconName={metadataMap[doc_type || 'book']?.iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[doc_type || 'book'].text}
|
||||
</div>
|
||||
)
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<div className={s.documentTypeShow}>
|
||||
{metadataParams.documentType && (
|
||||
<>
|
||||
<TypeIcon iconName={metadataMap[metadataParams.documentType || 'book'].iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[metadataParams.documentType || 'book'].text}
|
||||
{editStatus && (
|
||||
<div className="ml-1 inline-flex items-center gap-1">
|
||||
·
|
||||
<div
|
||||
onClick={() => { setShowDocTypes(true) }}
|
||||
className="cursor-pointer hover:text-text-accent"
|
||||
>
|
||||
{t('operation.change', { ns: 'common' })}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{(!doc_type && showDocTypes) ? null : <Divider />}
|
||||
{showDocTypes ? renderSelectDocType() : renderFieldInfos({ mainField: metadataParams.documentType, canEdit: editStatus })}
|
||||
{/* show fixed fields */}
|
||||
<Divider />
|
||||
{renderFieldInfos({ mainField: 'originInfo', canEdit: false })}
|
||||
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
|
||||
<Divider />
|
||||
{renderFieldInfos({ mainField: 'technicalParameters', canEdit: false })}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Document type display / selector */}
|
||||
{!editStatus
|
||||
? <DocumentTypeDisplay displayType={docType} />
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<DocumentTypeDisplay
|
||||
displayType={metadataParams.documentType || ''}
|
||||
showChangeLink={editStatus}
|
||||
onChangeClick={() => setShowDocTypes(true)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Divider between type display and fields (skip when in first-time selection) */}
|
||||
{(!docType && showDocTypes) ? null : <Divider />}
|
||||
|
||||
{/* Doc type selector or editable metadata fields */}
|
||||
{showDocTypes
|
||||
? (
|
||||
<DocTypeSelector
|
||||
docType={docType}
|
||||
documentType={metadataParams.documentType}
|
||||
tempDocType={tempDocType}
|
||||
onTempDocTypeChange={setTempDocType}
|
||||
onConfirm={confirmDocType}
|
||||
onCancel={cancelDocType}
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<MetadataFieldList
|
||||
mainField={metadataParams.documentType || ''}
|
||||
canEdit={editStatus}
|
||||
metadata={metadataParams.metadata}
|
||||
docDetail={docDetail}
|
||||
onFieldUpdate={updateMetadataField}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Fixed fields: origin info */}
|
||||
<Divider />
|
||||
<MetadataFieldList mainField="originInfo" docDetail={docDetail} />
|
||||
|
||||
{/* Fixed fields: technical parameters */}
|
||||
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
|
||||
<Divider />
|
||||
<MetadataFieldList mainField="technicalParameters" docDetail={docDetail} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -129,7 +129,6 @@ export const useToolSelectorState = ({
|
||||
extra: {
|
||||
description: tool.tool_description,
|
||||
},
|
||||
type: tool.provider_type,
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
||||
@@ -5,16 +5,14 @@ import {
|
||||
RiBookOpenLine,
|
||||
RiDragDropLine,
|
||||
RiEqualizer2Line,
|
||||
RiRefreshLine,
|
||||
} from '@remixicon/react'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import Link from 'next/link'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import TabSlider from '@/app/components/base/tab-slider'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import ReferenceSettingModal from '@/app/components/plugins/reference-setting-modal'
|
||||
import { MARKETPLACE_API_PREFIX, SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS } from '@/config'
|
||||
@@ -22,8 +20,7 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { usePluginInstallation } from '@/hooks/use-query-params'
|
||||
import { batchUpgradePlugins, fetchBundleInfoFromMarketPlace, fetchManifestFromMarketPlace } from '@/service/plugins'
|
||||
import { useInvalidateInstalledPluginList } from '@/service/use-plugins'
|
||||
import { fetchBundleInfoFromMarketPlace, fetchManifestFromMarketPlace } from '@/service/plugins'
|
||||
import { sleep } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { PLUGIN_PAGE_TABS_MAP } from '../hooks'
|
||||
@@ -51,8 +48,6 @@ const PluginPage = ({
|
||||
const { t } = useTranslation()
|
||||
const docLink = useDocLink()
|
||||
useDocumentTitle(t('metadata.title', { ns: 'plugin' }))
|
||||
const { notify } = useToastContext()
|
||||
const invalidateInstalledPluginList = useInvalidateInstalledPluginList()
|
||||
|
||||
// Use nuqs hook for installation state
|
||||
const [{ packageId, bundleInfo }, setInstallState] = usePluginInstallation()
|
||||
@@ -65,9 +60,6 @@ const PluginPage = ({
|
||||
setFalse: doHideInstallFromMarketplace,
|
||||
}] = useBoolean(false)
|
||||
|
||||
const [isBatchUpgrading, setIsBatchUpgrading] = useState(false)
|
||||
const [showBatchUpgradeTooltip, setShowBatchUpgradeTooltip] = useState(true)
|
||||
|
||||
const hideInstallFromMarketplace = () => {
|
||||
doHideInstallFromMarketplace()
|
||||
setInstallState(null)
|
||||
@@ -142,45 +134,6 @@ const PluginPage = ({
|
||||
enabled: isPluginsTab && canManagement,
|
||||
})
|
||||
|
||||
const handleBatchUpgrade = useCallback(async () => {
|
||||
// Hide tooltip immediately when clicked
|
||||
setShowBatchUpgradeTooltip(false)
|
||||
setIsBatchUpgrading(true)
|
||||
try {
|
||||
const result = await batchUpgradePlugins()
|
||||
const { success, failed, skipped } = result
|
||||
|
||||
// If there are updates (success or failed), show submitted message
|
||||
if (success.length > 0 || failed.length > 0) {
|
||||
notify({
|
||||
type: 'success',
|
||||
message: t('batchUpgrade.submittedMessage', { ns: 'plugin' }),
|
||||
})
|
||||
}
|
||||
// If all plugins are already up to date (only skipped)
|
||||
else if (skipped.length > 0) {
|
||||
notify({
|
||||
type: 'info',
|
||||
message: t('batchUpgrade.noUpdatesMessage', { ns: 'plugin' }),
|
||||
})
|
||||
}
|
||||
|
||||
invalidateInstalledPluginList()
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to batch upgrade plugins:', error)
|
||||
notify({
|
||||
type: 'error',
|
||||
message: t('batchUpgrade.errorMessage', { ns: 'plugin' }),
|
||||
})
|
||||
}
|
||||
finally {
|
||||
setIsBatchUpgrading(false)
|
||||
// Re-enable tooltip after a short delay
|
||||
setTimeout(() => setShowBatchUpgradeTooltip(true), 500)
|
||||
}
|
||||
}, [t, notify, invalidateInstalledPluginList])
|
||||
|
||||
const { dragging, fileUploader, fileChangeHandle, removeFile } = uploaderProps
|
||||
return (
|
||||
<div
|
||||
@@ -236,27 +189,6 @@ const PluginPage = ({
|
||||
</>
|
||||
)
|
||||
}
|
||||
{
|
||||
isPluginsTab && canManagement && (
|
||||
<>
|
||||
<Tooltip
|
||||
popupContent={t('batchUpgrade.tooltip', { ns: 'plugin' })}
|
||||
disabled={!showBatchUpgradeTooltip}
|
||||
>
|
||||
<Button
|
||||
variant="secondary-accent"
|
||||
className="px-3"
|
||||
onClick={handleBatchUpgrade}
|
||||
disabled={isBatchUpgrading}
|
||||
>
|
||||
<RiRefreshLine className={cn('mr-1 h-4 w-4', isBatchUpgrading && 'animate-spin')} />
|
||||
{t('batchUpgrade.button', { ns: 'plugin' })}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
<div className="mx-1 h-3.5 w-[1px] shrink-0 bg-divider-regular"></div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
<PluginTasks />
|
||||
{canManagement && (
|
||||
<InstallPluginDropdown
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { PropsWithChildren } from 'react'
|
||||
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportStatus } from '@/models/app'
|
||||
import UpdateDSLModal from './update-dsl-modal'
|
||||
|
||||
@@ -145,11 +145,6 @@ vi.mock('@/app/components/workflow/constants', () => ({
|
||||
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
|
||||
}))
|
||||
|
||||
afterEach(() => {
|
||||
cleanup()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('UpdateDSLModal', () => {
|
||||
const mockOnCancel = vi.fn()
|
||||
const mockOnBackup = vi.fn()
|
||||
|
||||
@@ -1,40 +1,17 @@
|
||||
'use client'
|
||||
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import {
|
||||
RiAlertFill,
|
||||
RiCloseLine,
|
||||
RiFileDownloadLine,
|
||||
} from '@remixicon/react'
|
||||
import {
|
||||
memo,
|
||||
useCallback,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { memo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import Uploader from '@/app/components/app/create-from-dsl-modal/uploader'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
|
||||
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import {
|
||||
initialEdges,
|
||||
initialNodes,
|
||||
} from '@/app/components/workflow/utils'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import {
|
||||
DSLImportMode,
|
||||
DSLImportStatus,
|
||||
} from '@/models/app'
|
||||
import {
|
||||
useImportPipelineDSL,
|
||||
useImportPipelineDSLConfirm,
|
||||
} from '@/service/use-pipeline'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { useUpdateDSLModal } from '../hooks/use-update-dsl-modal'
|
||||
import VersionMismatchModal from './version-mismatch-modal'
|
||||
|
||||
type UpdateDSLModalProps = {
|
||||
onCancel: () => void
|
||||
@@ -48,146 +25,17 @@ const UpdateDSLModal = ({
|
||||
onImport,
|
||||
}: UpdateDSLModalProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const [currentFile, setDSLFile] = useState<File>()
|
||||
const [fileContent, setFileContent] = useState<string>()
|
||||
const [loading, setLoading] = useState(false)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [show, setShow] = useState(true)
|
||||
const [showErrorModal, setShowErrorModal] = useState(false)
|
||||
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
|
||||
const [importId, setImportId] = useState<string>()
|
||||
const { handleCheckPluginDependencies } = usePluginDependencies()
|
||||
const { mutateAsync: importDSL } = useImportPipelineDSL()
|
||||
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
const readFile = (file: File) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = function (event) {
|
||||
const content = event.target?.result
|
||||
setFileContent(content as string)
|
||||
}
|
||||
reader.readAsText(file)
|
||||
}
|
||||
|
||||
const handleFile = (file?: File) => {
|
||||
setDSLFile(file)
|
||||
if (file)
|
||||
readFile(file)
|
||||
if (!file)
|
||||
setFileContent('')
|
||||
}
|
||||
|
||||
const handleWorkflowUpdate = useCallback(async (pipelineId: string) => {
|
||||
const {
|
||||
graph,
|
||||
hash,
|
||||
rag_pipeline_variables,
|
||||
} = await fetchWorkflowDraft(`/rag/pipelines/${pipelineId}/workflows/draft`)
|
||||
|
||||
const { nodes, edges, viewport } = graph
|
||||
|
||||
eventEmitter?.emit({
|
||||
type: WORKFLOW_DATA_UPDATE,
|
||||
payload: {
|
||||
nodes: initialNodes(nodes, edges),
|
||||
edges: initialEdges(edges, nodes),
|
||||
viewport,
|
||||
hash,
|
||||
rag_pipeline_variables: rag_pipeline_variables || [],
|
||||
},
|
||||
} as any)
|
||||
}, [eventEmitter])
|
||||
|
||||
const isCreatingRef = useRef(false)
|
||||
const handleImport: MouseEventHandler = useCallback(async () => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
if (isCreatingRef.current)
|
||||
return
|
||||
isCreatingRef.current = true
|
||||
if (!currentFile)
|
||||
return
|
||||
try {
|
||||
if (pipelineId && fileContent) {
|
||||
setLoading(true)
|
||||
const response = await importDSL({ mode: DSLImportMode.YAML_CONTENT, yaml_content: fileContent, pipeline_id: pipelineId })
|
||||
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
|
||||
|
||||
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
|
||||
if (!pipeline_id) {
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
return
|
||||
}
|
||||
handleWorkflowUpdate(pipeline_id)
|
||||
if (onImport)
|
||||
onImport()
|
||||
notify({
|
||||
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
|
||||
message: t(status === DSLImportStatus.COMPLETED ? 'common.importSuccess' : 'common.importWarning', { ns: 'workflow' }),
|
||||
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('common.importWarningDetails', { ns: 'workflow' }),
|
||||
})
|
||||
await handleCheckPluginDependencies(pipeline_id, true)
|
||||
setLoading(false)
|
||||
onCancel()
|
||||
}
|
||||
else if (status === DSLImportStatus.PENDING) {
|
||||
setShow(false)
|
||||
setTimeout(() => {
|
||||
setShowErrorModal(true)
|
||||
}, 300)
|
||||
setVersions({
|
||||
importedVersion: imported_dsl_version ?? '',
|
||||
systemVersion: current_dsl_version ?? '',
|
||||
})
|
||||
setImportId(id)
|
||||
}
|
||||
else {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line unused-imports/no-unused-vars
|
||||
catch (e) {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
isCreatingRef.current = false
|
||||
}, [currentFile, fileContent, onCancel, notify, t, onImport, handleWorkflowUpdate, handleCheckPluginDependencies, workflowStore, importDSL])
|
||||
|
||||
const onUpdateDSLConfirm: MouseEventHandler = async () => {
|
||||
try {
|
||||
if (!importId)
|
||||
return
|
||||
const response = await importDSLConfirm(importId)
|
||||
|
||||
const { status, pipeline_id } = response
|
||||
|
||||
if (status === DSLImportStatus.COMPLETED) {
|
||||
if (!pipeline_id) {
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
return
|
||||
}
|
||||
handleWorkflowUpdate(pipeline_id)
|
||||
await handleCheckPluginDependencies(pipeline_id, true)
|
||||
if (onImport)
|
||||
onImport()
|
||||
notify({ type: 'success', message: t('common.importSuccess', { ns: 'workflow' }) })
|
||||
setLoading(false)
|
||||
onCancel()
|
||||
}
|
||||
else if (status === DSLImportStatus.FAILED) {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line unused-imports/no-unused-vars
|
||||
catch (e) {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
}
|
||||
const {
|
||||
currentFile,
|
||||
handleFile,
|
||||
show,
|
||||
showErrorModal,
|
||||
setShowErrorModal,
|
||||
loading,
|
||||
versions,
|
||||
handleImport,
|
||||
onUpdateDSLConfirm,
|
||||
} = useUpdateDSLModal({ onCancel, onImport })
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -250,32 +98,12 @@ const UpdateDSLModal = ({
|
||||
</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
<Modal
|
||||
<VersionMismatchModal
|
||||
isShow={showErrorModal}
|
||||
versions={versions}
|
||||
onClose={() => setShowErrorModal(false)}
|
||||
className="w-[480px]"
|
||||
>
|
||||
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
|
||||
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
|
||||
<div className="system-md-regular flex grow flex-col text-text-secondary">
|
||||
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
|
||||
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
|
||||
<br />
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.importedVersion}</span>
|
||||
</div>
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.systemVersion}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
|
||||
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
|
||||
<Button variant="primary" destructive onClick={onUpdateDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
onConfirm={onUpdateDSLConfirm}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import VersionMismatchModal from './version-mismatch-modal'
|
||||
|
||||
describe('VersionMismatchModal', () => {
|
||||
const mockOnClose = vi.fn()
|
||||
const mockOnConfirm = vi.fn()
|
||||
|
||||
const defaultVersions = {
|
||||
importedVersion: '0.8.0',
|
||||
systemVersion: '1.0.0',
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
isShow: true,
|
||||
versions: defaultVersions,
|
||||
onClose: mockOnClose,
|
||||
onConfirm: mockOnConfirm,
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('rendering', () => {
|
||||
it('should render dialog when isShow is true', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render dialog when isShow is false', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} isShow={false} />)
|
||||
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render error title', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render all error description parts', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart1')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart2')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart3')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart4')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display imported and system version numbers', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('0.8.0')).toBeInTheDocument()
|
||||
expect(screen.getByText('1.0.0')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render cancel and confirm buttons', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByRole('button', { name: /app\.newApp\.Cancel/ })).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: /app\.newApp\.Confirm/ })).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('user interactions', () => {
|
||||
it('should call onClose when cancel button is clicked', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Cancel/ }))
|
||||
|
||||
expect(mockOnClose).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onConfirm when confirm button is clicked', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Confirm/ }))
|
||||
|
||||
expect(mockOnConfirm).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('button variants', () => {
|
||||
it('should render cancel button with secondary variant', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
const cancelBtn = screen.getByRole('button', { name: /app\.newApp\.Cancel/ })
|
||||
expect(cancelBtn).toHaveClass('btn-secondary')
|
||||
})
|
||||
|
||||
it('should render confirm button with primary destructive variant', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
const confirmBtn = screen.getByRole('button', { name: /app\.newApp\.Confirm/ })
|
||||
expect(confirmBtn).toHaveClass('btn-primary')
|
||||
expect(confirmBtn).toHaveClass('btn-destructive')
|
||||
})
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle undefined versions gracefully', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} versions={undefined} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty version strings', () => {
|
||||
const emptyVersions = { importedVersion: '', systemVersion: '' }
|
||||
render(<VersionMismatchModal {...defaultProps} versions={emptyVersions} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,54 @@
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
|
||||
type VersionMismatchModalProps = {
|
||||
isShow: boolean
|
||||
versions?: {
|
||||
importedVersion: string
|
||||
systemVersion: string
|
||||
}
|
||||
onClose: () => void
|
||||
onConfirm: MouseEventHandler
|
||||
}
|
||||
|
||||
const VersionMismatchModal = ({
|
||||
isShow,
|
||||
versions,
|
||||
onClose,
|
||||
onConfirm,
|
||||
}: VersionMismatchModalProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<Modal
|
||||
isShow={isShow}
|
||||
onClose={onClose}
|
||||
className="w-[480px]"
|
||||
>
|
||||
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
|
||||
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
|
||||
<div className="system-md-regular flex grow flex-col text-text-secondary">
|
||||
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
|
||||
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
|
||||
<br />
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.importedVersion}</span>
|
||||
</div>
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.systemVersion}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
|
||||
<Button variant="secondary" onClick={onClose}>{t('newApp.Cancel', { ns: 'app' })}</Button>
|
||||
<Button variant="primary" destructive onClick={onConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
export default VersionMismatchModal
|
||||
@@ -0,0 +1,551 @@
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportMode, DSLImportStatus } from '@/models/app'
|
||||
import { useUpdateDSLModal } from './use-update-dsl-modal'
|
||||
|
||||
// --- FileReader stub ---
|
||||
class MockFileReader {
|
||||
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
|
||||
|
||||
readAsText(_file: Blob) {
|
||||
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
|
||||
this.onload?.call(this as unknown as FileReader, event)
|
||||
}
|
||||
}
|
||||
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
|
||||
|
||||
// --- Module-level mock functions ---
|
||||
const mockNotify = vi.fn()
|
||||
const mockEmit = vi.fn()
|
||||
const mockImportDSL = vi.fn()
|
||||
const mockImportDSLConfirm = vi.fn()
|
||||
const mockHandleCheckPluginDependencies = vi.fn()
|
||||
|
||||
// --- Mocks ---
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({ t: (key: string) => key }),
|
||||
}))
|
||||
|
||||
vi.mock('use-context-selector', () => ({
|
||||
useContext: () => ({ notify: mockNotify }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
ToastContext: {},
|
||||
}))
|
||||
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({
|
||||
eventEmitter: { emit: mockEmit },
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/store', () => ({
|
||||
useWorkflowStore: () => ({
|
||||
getState: () => ({ pipelineId: 'test-pipeline-id' }),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/utils', () => ({
|
||||
initialNodes: (nodes: unknown[]) => nodes,
|
||||
initialEdges: (edges: unknown[]) => edges,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/constants', () => ({
|
||||
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/plugin-dependency/hooks', () => ({
|
||||
usePluginDependencies: () => ({
|
||||
handleCheckPluginDependencies: mockHandleCheckPluginDependencies,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-pipeline', () => ({
|
||||
useImportPipelineDSL: () => ({ mutateAsync: mockImportDSL }),
|
||||
useImportPipelineDSLConfirm: () => ({ mutateAsync: mockImportDSLConfirm }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/workflow', () => ({
|
||||
fetchWorkflowDraft: vi.fn().mockResolvedValue({
|
||||
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
|
||||
hash: 'test-hash',
|
||||
rag_pipeline_variables: [],
|
||||
}),
|
||||
}))
|
||||
|
||||
// --- Helpers ---
|
||||
const createFile = () => new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
|
||||
|
||||
// Cast MouseEventHandler to a plain callable for tests (event param is unused)
|
||||
type AsyncFn = () => Promise<void>
|
||||
|
||||
describe('useUpdateDSLModal', () => {
|
||||
const mockOnCancel = vi.fn()
|
||||
const mockOnImport = vi.fn()
|
||||
|
||||
const renderUpdateDSLModal = (overrides?: { onImport?: () => void }) =>
|
||||
renderHook(() =>
|
||||
useUpdateDSLModal({
|
||||
onCancel: mockOnCancel,
|
||||
onImport: overrides?.onImport ?? mockOnImport,
|
||||
}),
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
|
||||
})
|
||||
|
||||
// Initial state values
|
||||
describe('initial state', () => {
|
||||
it('should return correct defaults', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
expect(result.current.currentFile).toBeUndefined()
|
||||
expect(result.current.show).toBe(true)
|
||||
expect(result.current.showErrorModal).toBe(false)
|
||||
expect(result.current.loading).toBe(false)
|
||||
expect(result.current.versions).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
// File handling
|
||||
describe('handleFile', () => {
|
||||
it('should set currentFile when file is provided', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
const file = createFile()
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(file)
|
||||
})
|
||||
|
||||
expect(result.current.currentFile).toBe(file)
|
||||
})
|
||||
|
||||
it('should clear currentFile when called with undefined', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
act(() => {
|
||||
result.current.handleFile(undefined)
|
||||
})
|
||||
|
||||
expect(result.current.currentFile).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
// Modal state management
|
||||
describe('modal state', () => {
|
||||
it('should allow toggling showErrorModal', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
expect(result.current.showErrorModal).toBe(false)
|
||||
|
||||
act(() => {
|
||||
result.current.setShowErrorModal(true)
|
||||
})
|
||||
expect(result.current.showErrorModal).toBe(true)
|
||||
|
||||
act(() => {
|
||||
result.current.setShowErrorModal(false)
|
||||
})
|
||||
expect(result.current.showErrorModal).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
// Import flow
|
||||
describe('handleImport', () => {
|
||||
it('should call importDSL with correct parameters', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSL).toHaveBeenCalledWith({
|
||||
mode: DSLImportMode.YAML_CONTENT,
|
||||
yaml_content: 'test content',
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call importDSL when no file is selected', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSL).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// COMPLETED status
|
||||
it('should notify success on COMPLETED status', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
|
||||
})
|
||||
|
||||
it('should call onImport on successful import', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockOnImport).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onCancel on successful import', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should emit workflow update event on success', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockEmit).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call handleCheckPluginDependencies on success', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('test-pipeline-id', true)
|
||||
})
|
||||
|
||||
// COMPLETED_WITH_WARNINGS status
|
||||
it('should notify warning on COMPLETED_WITH_WARNINGS status', async () => {
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.COMPLETED_WITH_WARNINGS,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'warning' }))
|
||||
})
|
||||
|
||||
// PENDING status (version mismatch)
|
||||
it('should switch to version mismatch modal on PENDING status', async () => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
imported_dsl_version: '0.8.0',
|
||||
current_dsl_version: '1.0.0',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
expect(result.current.show).toBe(false)
|
||||
expect(result.current.showErrorModal).toBe(true)
|
||||
expect(result.current.versions).toEqual({
|
||||
importedVersion: '0.8.0',
|
||||
systemVersion: '1.0.0',
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
it('should default version strings to empty when undefined', async () => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
imported_dsl_version: undefined,
|
||||
current_dsl_version: undefined,
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
expect(result.current.versions).toEqual({
|
||||
importedVersion: '',
|
||||
systemVersion: '',
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
// FAILED / unknown status
|
||||
it('should notify error on FAILED status', async () => {
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.FAILED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
// Exception
|
||||
it('should notify error when importDSL throws', async () => {
|
||||
mockImportDSL.mockRejectedValue(new Error('Network error'))
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
// Missing pipeline_id
|
||||
it('should notify error when pipeline_id is missing on success', async () => {
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: undefined,
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
})
|
||||
|
||||
// Confirm flow (after PENDING → version mismatch)
|
||||
describe('onUpdateDSLConfirm', () => {
|
||||
// Helper: drive the hook into PENDING state so importId is set
|
||||
const setupPendingState = async (result: { current: ReturnType<typeof useUpdateDSLModal> }) => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
imported_dsl_version: '0.8.0',
|
||||
current_dsl_version: '1.0.0',
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
vi.clearAllMocks()
|
||||
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
|
||||
}
|
||||
|
||||
it('should call importDSLConfirm with the stored importId', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSLConfirm).toHaveBeenCalledWith('import-id')
|
||||
})
|
||||
|
||||
it('should notify success and call onCancel after successful confirm', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onImport after successful confirm', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockOnImport).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should notify error on FAILED confirm status', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.FAILED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
it('should notify error when confirm throws exception', async () => {
|
||||
mockImportDSLConfirm.mockRejectedValue(new Error('Confirm failed'))
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
it('should notify error when confirm succeeds but pipeline_id is missing', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: undefined,
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
it('should not call importDSLConfirm when importId is not set', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
// No pending state → importId is undefined
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSLConfirm).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// Optional onImport callback
|
||||
describe('optional onImport', () => {
|
||||
it('should work without onImport callback', async () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUpdateDSLModal({ onCancel: mockOnCancel }),
|
||||
)
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
// Should succeed without throwing
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
205
web/app/components/rag-pipeline/hooks/use-update-dsl-modal.ts
Normal file
205
web/app/components/rag-pipeline/hooks/use-update-dsl-modal.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import {
|
||||
useCallback,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
|
||||
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import {
|
||||
initialEdges,
|
||||
initialNodes,
|
||||
} from '@/app/components/workflow/utils'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import {
|
||||
DSLImportMode,
|
||||
DSLImportStatus,
|
||||
} from '@/models/app'
|
||||
import {
|
||||
useImportPipelineDSL,
|
||||
useImportPipelineDSLConfirm,
|
||||
} from '@/service/use-pipeline'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
|
||||
type VersionInfo = {
|
||||
importedVersion: string
|
||||
systemVersion: string
|
||||
}
|
||||
|
||||
type UseUpdateDSLModalParams = {
|
||||
onCancel: () => void
|
||||
onImport?: () => void
|
||||
}
|
||||
|
||||
const isCompletedStatus = (status: DSLImportStatus): boolean =>
|
||||
status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS
|
||||
|
||||
export const useUpdateDSLModal = ({ onCancel, onImport }: UseUpdateDSLModalParams) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const workflowStore = useWorkflowStore()
|
||||
const { handleCheckPluginDependencies } = usePluginDependencies()
|
||||
const { mutateAsync: importDSL } = useImportPipelineDSL()
|
||||
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
|
||||
|
||||
// File state
|
||||
const [currentFile, setDSLFile] = useState<File>()
|
||||
const [fileContent, setFileContent] = useState<string>()
|
||||
|
||||
// Modal state
|
||||
const [show, setShow] = useState(true)
|
||||
const [showErrorModal, setShowErrorModal] = useState(false)
|
||||
|
||||
// Import state
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [versions, setVersions] = useState<VersionInfo>()
|
||||
const [importId, setImportId] = useState<string>()
|
||||
const isCreatingRef = useRef(false)
|
||||
|
||||
const readFile = (file: File) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => {
|
||||
setFileContent(event.target?.result as string)
|
||||
}
|
||||
reader.readAsText(file)
|
||||
}
|
||||
|
||||
const handleFile = (file?: File) => {
|
||||
setDSLFile(file)
|
||||
if (file)
|
||||
readFile(file)
|
||||
if (!file)
|
||||
setFileContent('')
|
||||
}
|
||||
|
||||
const notifyError = useCallback(() => {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}, [notify, t])
|
||||
|
||||
const updateWorkflow = useCallback(async (pipelineId: string) => {
|
||||
const { graph, hash, rag_pipeline_variables } = await fetchWorkflowDraft(
|
||||
`/rag/pipelines/${pipelineId}/workflows/draft`,
|
||||
)
|
||||
const { nodes, edges, viewport } = graph
|
||||
|
||||
eventEmitter?.emit({
|
||||
type: WORKFLOW_DATA_UPDATE,
|
||||
payload: {
|
||||
nodes: initialNodes(nodes, edges),
|
||||
edges: initialEdges(edges, nodes),
|
||||
viewport,
|
||||
hash,
|
||||
rag_pipeline_variables: rag_pipeline_variables || [],
|
||||
},
|
||||
})
|
||||
}, [eventEmitter])
|
||||
|
||||
const completeImport = useCallback(async (
|
||||
pipelineId: string | undefined,
|
||||
status: DSLImportStatus = DSLImportStatus.COMPLETED,
|
||||
) => {
|
||||
if (!pipelineId) {
|
||||
notifyError()
|
||||
return
|
||||
}
|
||||
|
||||
updateWorkflow(pipelineId)
|
||||
onImport?.()
|
||||
|
||||
const isWarning = status === DSLImportStatus.COMPLETED_WITH_WARNINGS
|
||||
notify({
|
||||
type: isWarning ? 'warning' : 'success',
|
||||
message: t(isWarning ? 'common.importWarning' : 'common.importSuccess', { ns: 'workflow' }),
|
||||
children: isWarning && t('common.importWarningDetails', { ns: 'workflow' }),
|
||||
})
|
||||
|
||||
await handleCheckPluginDependencies(pipelineId, true)
|
||||
setLoading(false)
|
||||
onCancel()
|
||||
}, [updateWorkflow, onImport, notify, t, handleCheckPluginDependencies, onCancel, notifyError])
|
||||
|
||||
const showVersionMismatch = useCallback((
|
||||
id: string,
|
||||
importedVersion?: string,
|
||||
systemVersion?: string,
|
||||
) => {
|
||||
setShow(false)
|
||||
setTimeout(() => setShowErrorModal(true), 300)
|
||||
setVersions({
|
||||
importedVersion: importedVersion ?? '',
|
||||
systemVersion: systemVersion ?? '',
|
||||
})
|
||||
setImportId(id)
|
||||
}, [])
|
||||
|
||||
const handleImport: MouseEventHandler = useCallback(async () => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
if (isCreatingRef.current)
|
||||
return
|
||||
isCreatingRef.current = true
|
||||
if (!currentFile)
|
||||
return
|
||||
|
||||
try {
|
||||
if (!pipelineId || !fileContent)
|
||||
return
|
||||
|
||||
setLoading(true)
|
||||
const response = await importDSL({
|
||||
mode: DSLImportMode.YAML_CONTENT,
|
||||
yaml_content: fileContent,
|
||||
pipeline_id: pipelineId,
|
||||
})
|
||||
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
|
||||
|
||||
if (isCompletedStatus(status))
|
||||
await completeImport(pipeline_id, status)
|
||||
else if (status === DSLImportStatus.PENDING)
|
||||
showVersionMismatch(id, imported_dsl_version, current_dsl_version)
|
||||
else
|
||||
notifyError()
|
||||
}
|
||||
catch {
|
||||
notifyError()
|
||||
}
|
||||
isCreatingRef.current = false
|
||||
}, [currentFile, fileContent, workflowStore, importDSL, completeImport, showVersionMismatch, notifyError])
|
||||
|
||||
const onUpdateDSLConfirm: MouseEventHandler = useCallback(async () => {
|
||||
if (!importId)
|
||||
return
|
||||
|
||||
try {
|
||||
const { status, pipeline_id } = await importDSLConfirm(importId)
|
||||
|
||||
if (status === DSLImportStatus.COMPLETED) {
|
||||
await completeImport(pipeline_id)
|
||||
return
|
||||
}
|
||||
|
||||
if (status === DSLImportStatus.FAILED)
|
||||
notifyError()
|
||||
}
|
||||
catch {
|
||||
notifyError()
|
||||
}
|
||||
}, [importId, importDSLConfirm, completeImport, notifyError])
|
||||
|
||||
return {
|
||||
currentFile,
|
||||
handleFile,
|
||||
show,
|
||||
showErrorModal,
|
||||
setShowErrorModal,
|
||||
loading,
|
||||
versions,
|
||||
handleImport,
|
||||
onUpdateDSLConfirm,
|
||||
}
|
||||
}
|
||||
@@ -87,7 +87,6 @@ export type ToolValue = {
|
||||
enabled?: boolean
|
||||
extra?: { description?: string } & Record<string, unknown>
|
||||
credential_id?: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
export type DataSourceItem = {
|
||||
|
||||
@@ -4,7 +4,19 @@ import type { EventEmitter } from 'ahooks/lib/useEventEmitter'
|
||||
import { useEventEmitter } from 'ahooks'
|
||||
import { createContext, useContext } from 'use-context-selector'
|
||||
|
||||
const EventEmitterContext = createContext<{ eventEmitter: EventEmitter<string> | null }>({
|
||||
/**
|
||||
* Typed event object emitted via the shared EventEmitter.
|
||||
* Covers workflow updates, prompt-editor commands, DSL export checks, etc.
|
||||
*/
|
||||
export type EventEmitterMessage = {
|
||||
type: string
|
||||
payload?: unknown
|
||||
instanceId?: string
|
||||
}
|
||||
|
||||
export type EventEmitterValue = string | EventEmitterMessage
|
||||
|
||||
const EventEmitterContext = createContext<{ eventEmitter: EventEmitter<EventEmitterValue> | null }>({
|
||||
eventEmitter: null,
|
||||
})
|
||||
|
||||
@@ -16,7 +28,7 @@ type EventEmitterContextProviderProps = {
|
||||
export const EventEmitterContextProvider = ({
|
||||
children,
|
||||
}: EventEmitterContextProviderProps) => {
|
||||
const eventEmitter = useEventEmitter<string>()
|
||||
const eventEmitter = useEventEmitter<EventEmitterValue>()
|
||||
|
||||
return (
|
||||
<EventEmitterContext.Provider value={{ eventEmitter }}>
|
||||
|
||||
@@ -2266,14 +2266,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/metadata/index.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 4
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/new-segment.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@@ -3091,11 +3083,6 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/rag-pipeline/components/update-dsl-modal.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/rag-pipeline/hooks/use-DSL.ts": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "المحدد فقط",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "لن يتم تحديث الإضافات المحددة تلقائيًا",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "سيتم تحديث الإضافات المحددة فقط تلقائيًا. لم يتم تحديد أي إضافات حاليًا، لذلك لن يتم تحديث أي إضافات تلقائيًا.",
|
||||
"batchUpgrade.button": "تحديث الكل",
|
||||
"batchUpgrade.errorMessage": "فشل إرسال مهمة تحديث البرنامج المساعد. يرجى المحاولة مرة أخرى",
|
||||
"batchUpgrade.noUpdatesMessage": "جميع البرامج المساعدة محدثة",
|
||||
"batchUpgrade.submittedMessage": "تم إرسال مهمة تحديث البرنامج المساعد",
|
||||
"batchUpgrade.tooltip": "تحديث جميع البرامج المساعدة المثبتة من Marketplace إلى أحدث إصدار",
|
||||
"category.agents": "استراتيجيات الوكيل",
|
||||
"category.all": "الكل",
|
||||
"category.bundles": "حزم",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Nur ausgewählt",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Ausgewählte Plugins werden nicht automatisch aktualisiert",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Nur ausgewählte Plugins werden automatisch aktualisiert. Derzeit sind keine Plugins ausgewählt, daher werden keine Plugins automatisch aktualisiert.",
|
||||
"batchUpgrade.button": "Alle aktualisieren",
|
||||
"batchUpgrade.errorMessage": "Fehler beim Senden der Plugin-Aktualisierungsaufgabe. Bitte erneut versuchen",
|
||||
"batchUpgrade.noUpdatesMessage": "Alle Plugins sind auf dem neuesten Stand",
|
||||
"batchUpgrade.submittedMessage": "Plugin-Aktualisierungsaufgabe eingereicht",
|
||||
"batchUpgrade.tooltip": "Alle vom Marketplace installierten Plugins auf die neueste Version aktualisieren",
|
||||
"category.agents": "Agenten-Strategien",
|
||||
"category.all": "Alle",
|
||||
"category.bundles": "Bündel",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Only selected",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Selected plugins will not auto-update",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Only selected plugins will auto-update. No plugins are currently selected, so no plugins will auto-update.",
|
||||
"batchUpgrade.button": "Update All",
|
||||
"batchUpgrade.errorMessage": "Failed to submit plugin update task. Please try again.",
|
||||
"batchUpgrade.noUpdatesMessage": "All plugins are up to date",
|
||||
"batchUpgrade.submittedMessage": "Plugin update task submitted",
|
||||
"batchUpgrade.tooltip": "Update all plugins installed from Marketplace to the latest version",
|
||||
"category.agents": "Agent Strategies",
|
||||
"category.all": "All",
|
||||
"category.bundles": "Bundles",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Solo seleccionado",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Los plugins seleccionados no se actualizarán automáticamente",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Solo los plugins seleccionados se actualizarán automáticamente. Actualmente no hay plugins seleccionados, por lo que no se actualizarán automáticamente.",
|
||||
"batchUpgrade.button": "Actualizar todo",
|
||||
"batchUpgrade.errorMessage": "Error al enviar la tarea de actualización del plugin. Por favor, inténtelo de nuevo",
|
||||
"batchUpgrade.noUpdatesMessage": "Todos los plugins están actualizados",
|
||||
"batchUpgrade.submittedMessage": "Tarea de actualización de plugin enviada",
|
||||
"batchUpgrade.tooltip": "Actualizar todos los plugins instalados desde Marketplace a la última versión",
|
||||
"category.agents": "Estrategias de los agentes",
|
||||
"category.all": "Todo",
|
||||
"category.bundles": "Paquetes",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "فقط انتخاب شده",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "افزونههای انتخاب شده بهصورت خودکار بهروزرسانی نخواهند شد",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "فقط پلاگینهای انتخاب شده بهروزرسانی خودکار خواهند داشت. در حال حاضر هیچ پلاگینی انتخاب نشده است، بنابراین هیچ پلاگینی بهروزرسانی خودکار نخواهد شد.",
|
||||
"batchUpgrade.button": "بهروزرسانی همه",
|
||||
"batchUpgrade.errorMessage": "ارسال وظیفه بهروزرسانی افزونه ناموفق بود. لطفاً دوباره امتحان کنید",
|
||||
"batchUpgrade.noUpdatesMessage": "همه افزونهها بهروز هستند",
|
||||
"batchUpgrade.submittedMessage": "وظیفه بهروزرسانی افزونه ارسال شد",
|
||||
"batchUpgrade.tooltip": "بهروزرسانی همه افزونههای نصب شده از Marketplace به آخرین نسخه",
|
||||
"category.agents": "استراتژی های عامل",
|
||||
"category.all": "همه",
|
||||
"category.bundles": "بسته",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Seulement sélectionné",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Les plugins sélectionnés ne se mettront pas à jour automatiquement.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Seuls les plugins sélectionnés se mettront à jour automatiquement. Aucun plugin n'est actuellement sélectionné, donc aucun plugin ne se mettra à jour automatiquement.",
|
||||
"batchUpgrade.button": "Tout mettre à jour",
|
||||
"batchUpgrade.errorMessage": "Échec de la soumission de la tâche de mise à jour du plugin. Veuillez réessayer",
|
||||
"batchUpgrade.noUpdatesMessage": "Tous les plugins sont à jour",
|
||||
"batchUpgrade.submittedMessage": "Tâche de mise à jour du plugin soumise",
|
||||
"batchUpgrade.tooltip": "Mettre à jour tous les plugins installés depuis Marketplace vers la dernière version",
|
||||
"category.agents": "Stratégies des agents",
|
||||
"category.all": "Tout",
|
||||
"category.bundles": "Paquets",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "केवल चयनित",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "चुने हुए प्लगइन्स अपने आप अपडेट नहीं होंगे",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "केवल चयनित प्लगइन्स स्वतः अपडेट होंगे। वर्तमान में कोई प्लगइन चयनित नहीं है, इसलिए कोई प्लगइन स्वतः अपडेट नहीं होगा।",
|
||||
"batchUpgrade.button": "सभी को अपडेट करें",
|
||||
"batchUpgrade.errorMessage": "प्लगइन अपडेट कार्य सबमिट करने में विफल। कृपया पुनः प्रयास करें",
|
||||
"batchUpgrade.noUpdatesMessage": "सभी प्लगइन्स नवीनतम हैं",
|
||||
"batchUpgrade.submittedMessage": "प्लगइन अपडेट कार्य सबमिट किया गया",
|
||||
"batchUpgrade.tooltip": "Marketplace से इंस्टॉल किए गए सभी प्लगइन्स को नवीनतम संस्करण में अपडेट करें",
|
||||
"category.agents": "एजेंट रणनीतियाँ",
|
||||
"category.all": "सभी",
|
||||
"category.bundles": "बंडल",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Hanya dipilih",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Plugin yang dipilih tidak akan diperbarui secara otomatis",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Hanya plugin yang dipilih yang akan diperbarui secara otomatis. Saat ini tidak ada plugin yang dipilih, jadi tidak ada plugin yang akan diperbarui secara otomatis.",
|
||||
"batchUpgrade.button": "Perbarui semua",
|
||||
"batchUpgrade.errorMessage": "Gagal mengirim tugas pembaruan plugin. Silakan coba lagi",
|
||||
"batchUpgrade.noUpdatesMessage": "Semua plugin sudah terbaru",
|
||||
"batchUpgrade.submittedMessage": "Tugas pembaruan plugin telah dikirim",
|
||||
"batchUpgrade.tooltip": "Perbarui semua plugin yang diinstal dari Marketplace ke versi terbaru",
|
||||
"category.agents": "Strategi Agen",
|
||||
"category.all": "Semua",
|
||||
"category.bundles": "Bundel",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Solo selezionati",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "I plugin selezionati non verranno aggiornati automaticamente",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Solo i plugin selezionati si aggiorneranno automaticamente. Attualmente non ci sono plugin selezionati, quindi nessun plugin si aggiornerà automaticamente.",
|
||||
"batchUpgrade.button": "Aggiorna tutto",
|
||||
"batchUpgrade.errorMessage": "Invio dell'attività di aggiornamento del plugin non riuscito. Riprova",
|
||||
"batchUpgrade.noUpdatesMessage": "Tutti i plugin sono aggiornati",
|
||||
"batchUpgrade.submittedMessage": "Attività di aggiornamento plugin inviata",
|
||||
"batchUpgrade.tooltip": "Aggiorna tutti i plugin installati da Marketplace all'ultima versione",
|
||||
"category.agents": "Strategie degli agenti",
|
||||
"category.all": "Tutto",
|
||||
"category.bundles": "Pacchetti",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "選択されたもののみ",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "選択されたプラグインは自動更新されません",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "選択されたプラグインのみが自動更新されます。現在選択されているプラグインはないため、プラグインは自動更新されません。",
|
||||
"batchUpgrade.button": "すべて更新",
|
||||
"batchUpgrade.errorMessage": "プラグイン更新タスクの送信に失敗しました。もう一度お試しください",
|
||||
"batchUpgrade.noUpdatesMessage": "すべてのプラグインは最新です",
|
||||
"batchUpgrade.submittedMessage": "プラグイン更新タスクを送信しました",
|
||||
"batchUpgrade.tooltip": "Marketplaceからインストールされたすべてのプラグインを最新バージョンに更新",
|
||||
"category.agents": "エージェント戦略",
|
||||
"category.all": "すべて",
|
||||
"category.bundles": "バンドル",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "선택된 것만",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "선택한 플러그인은 자동으로 업데이트되지 않습니다.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "선택된 플러그인만 자동 업데이트됩니다. 현재 선택된 플러그인이 없으므로 자동 업데이트되는 플러그인은 없습니다.",
|
||||
"batchUpgrade.button": "모두 업데이트",
|
||||
"batchUpgrade.errorMessage": "플러그인 업데이트 작업 제출 실패. 다시 시도하세요",
|
||||
"batchUpgrade.noUpdatesMessage": "모든 플러그인이 최신 버전입니다",
|
||||
"batchUpgrade.submittedMessage": "플러그인 업데이트 작업을 제출했습니다",
|
||||
"batchUpgrade.tooltip": "Marketplace에서 설치된 모든 플러그인을 최신 버전으로 업데이트",
|
||||
"category.agents": "에이전트 전략",
|
||||
"category.all": "모두",
|
||||
"category.bundles": "번들",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Tylko wybrane",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Wybrane wtyczki nie będą aktualizować się automatycznie.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Tylko wybrane wtyczki będą się aktualizować automatycznie. Obecnie nie wybrano żadnych wtyczek, więc żadna wtyczka nie będzie się automatycznie aktualizować.",
|
||||
"batchUpgrade.button": "Aktualizuj wszystko",
|
||||
"batchUpgrade.errorMessage": "Nie udało się przesłać zadania aktualizacji wtyczki. Spróbuj ponownie",
|
||||
"batchUpgrade.noUpdatesMessage": "Wszystkie wtyczki są aktualne",
|
||||
"batchUpgrade.submittedMessage": "Przesłano zadanie aktualizacji wtyczki",
|
||||
"batchUpgrade.tooltip": "Zaktualizuj wszystkie wtyczki zainstalowane z Marketplace do najnowszej wersji",
|
||||
"category.agents": "Strategie agentów",
|
||||
"category.all": "Cały",
|
||||
"category.bundles": "Wiązki",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Somente selecionado",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Plugins selecionados não serão atualizados automaticamente",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Apenas plugins selecionados serão atualizados automaticamente. Nenhum plugin está atualmente selecionado, então nenhum plugin será atualizado automaticamente.",
|
||||
"batchUpgrade.button": "Atualizar tudo",
|
||||
"batchUpgrade.errorMessage": "Falha ao enviar tarefa de atualização de plugin. Por favor, tente novamente",
|
||||
"batchUpgrade.noUpdatesMessage": "Todos os plugins estão atualizados",
|
||||
"batchUpgrade.submittedMessage": "Tarefa de atualização de plugin enviada",
|
||||
"batchUpgrade.tooltip": "Atualizar todos os plugins instalados do Marketplace para a versão mais recente",
|
||||
"category.agents": "Estratégias do agente",
|
||||
"category.all": "Todo",
|
||||
"category.bundles": "Pacotes",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Numai selectat",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Pluginurile selectate nu se vor actualiza automat.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Numai pluginurile selectate se vor actualiza automat. Nu există pluginuri selectate în prezent, așa că niciun plugin nu se va actualiza automat.",
|
||||
"batchUpgrade.button": "Actualizează tot",
|
||||
"batchUpgrade.errorMessage": "Trimiterea sarcinii de actualizare a pluginului a eșuat. Vă rugăm să încercați din nou",
|
||||
"batchUpgrade.noUpdatesMessage": "Toate pluginurile sunt actualizate",
|
||||
"batchUpgrade.submittedMessage": "Sarcina de actualizare a pluginului a fost trimisă",
|
||||
"batchUpgrade.tooltip": "Actualizați toate pluginurile instalate din Marketplace la cea mai recentă versiune",
|
||||
"category.agents": "Strategii pentru agenți",
|
||||
"category.all": "Tot",
|
||||
"category.bundles": "Pachete",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Только выбрано",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Выбранные плагины не будут обновляться автоматически",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Только выбранные плагины будут автоматически обновляться. В данный момент плагины не выбраны, поэтому никакие плагины не будут автоматически обновляться.",
|
||||
"batchUpgrade.button": "Обновить все",
|
||||
"batchUpgrade.errorMessage": "Не удалось отправить задачу обновления плагина. Попробуйте снова",
|
||||
"batchUpgrade.noUpdatesMessage": "Все плагины обновлены",
|
||||
"batchUpgrade.submittedMessage": "Задача обновления плагина отправлена",
|
||||
"batchUpgrade.tooltip": "Обновить все плагины, установленные из Marketplace, до последней версии",
|
||||
"category.agents": "Агентские стратегии",
|
||||
"category.all": "Все",
|
||||
"category.bundles": "Пакеты",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Samo izbrano",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Izbrani vtičniki se ne bodo samodejno posodabljali.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Samo izbrani vtičniki se bodo samodejno posodabljali. Trenutno ni izbranih nobenih vtičnikov, zato se nobeni vtičniki ne bodo samodejno posodobili.",
|
||||
"batchUpgrade.button": "Posodobi vse",
|
||||
"batchUpgrade.errorMessage": "Pošiljanje naloge za posodobitev vtičnika ni uspelo. Prosimo, poskusite znova",
|
||||
"batchUpgrade.noUpdatesMessage": "Vsi vtičniki so posodobljeni",
|
||||
"batchUpgrade.submittedMessage": "Naloga za posodobitev vtičnika je bila poslana",
|
||||
"batchUpgrade.tooltip": "Posodobi vse vtičnike, nameščene iz Marketplace, na najnovejšo različico",
|
||||
"category.agents": "Strategije agenta",
|
||||
"category.all": "Vse",
|
||||
"category.bundles": "Paketi",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "เฉพาะที่เลือกไว้",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "ปลั๊กอินที่เลือกจะไม่อัปเดตอัตโนมัติ",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "เฉพาะปลั๊กอินที่เลือกจะอัปเดตโดยอัตโนมัติ ขณะนี้ไม่มีปลั๊กอินใดที่ถูกเลือก ดังนั้นจะไม่มีปลั๊กอินใดที่อัปเดตโดยอัตโนมัติ",
|
||||
"batchUpgrade.button": "อัปเดตทั้งหมด",
|
||||
"batchUpgrade.errorMessage": "ส่งงานอัปเดตปลั๊กอินล้มเหลว กรุณาลองอีกครั้ง",
|
||||
"batchUpgrade.noUpdatesMessage": "ปลั๊กอินทั้งหมดเป็นเวอร์ชันล่าสุดแล้ว",
|
||||
"batchUpgrade.submittedMessage": "ส่งงานอัปเดตปลั๊กอินแล้ว",
|
||||
"batchUpgrade.tooltip": "อัปเดตปลั๊กอินทั้งหมดที่ติดตั้งจาก Marketplace เป็นเวอร์ชันล่าสุด",
|
||||
"category.agents": "กลยุทธ์ตัวแทน",
|
||||
"category.all": "ทั้งหมด",
|
||||
"category.bundles": "ชุดรวม",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Sadece seçilen",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Seçilen eklentiler otomatik olarak güncellenmeyecek.",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Sadece seçilen eklentiler otomatik olarak güncellenecek. Şu anda hiçbir eklenti seçilmedi, bu yüzden hiçbir eklenti otomatik olarak güncellenmeyecek.",
|
||||
"batchUpgrade.button": "Tümünü güncelle",
|
||||
"batchUpgrade.errorMessage": "Eklenti güncelleme görevi gönderimi başarısız. Lütfen tekrar deneyin",
|
||||
"batchUpgrade.noUpdatesMessage": "Tüm eklentiler güncel",
|
||||
"batchUpgrade.submittedMessage": "Eklenti güncelleme görevi gönderildi",
|
||||
"batchUpgrade.tooltip": "Marketplace'ten yüklenen tüm eklentileri en son sürüme güncelle",
|
||||
"category.agents": "Ajan Stratejileri",
|
||||
"category.all": "Tüm",
|
||||
"category.bundles": "Paketler",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Тільки вибрані",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Вибрані плагіни не будуть оновлюватися автоматично",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Тільки вибрані плагіни будуть автоматично оновлюватись. Наразі жоден з плагінів не вибрано, тому жоден плагін не буде автоматично оновлений.",
|
||||
"batchUpgrade.button": "Оновити все",
|
||||
"batchUpgrade.errorMessage": "Не вдалося надіслати завдання оновлення плагіна. Спробуйте ще раз",
|
||||
"batchUpgrade.noUpdatesMessage": "Усі плагіни оновлені",
|
||||
"batchUpgrade.submittedMessage": "Завдання оновлення плагіна надіслано",
|
||||
"batchUpgrade.tooltip": "Оновити всі плагіни, встановлені з Marketplace, до останньої версії",
|
||||
"category.agents": "Стратегії агентів",
|
||||
"category.all": "Увесь",
|
||||
"category.bundles": "Пакети",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "Chỉ được chọn",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "Các plugin được chọn sẽ không tự động cập nhật",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "Chỉ những plugin được chọn mới tự động cập nhật. Hiện tại không có plugin nào được chọn, vì vậy sẽ không có plugin nào tự động cập nhật.",
|
||||
"batchUpgrade.button": "Cập nhật tất cả",
|
||||
"batchUpgrade.errorMessage": "Gửi tác vụ cập nhật plugin thất bại. Vui lòng thử lại",
|
||||
"batchUpgrade.noUpdatesMessage": "Tất cả các plugin đã được cập nhật",
|
||||
"batchUpgrade.submittedMessage": "Đã gửi tác vụ cập nhật plugin",
|
||||
"batchUpgrade.tooltip": "Cập nhật tất cả các plugin được cài đặt từ Marketplace lên phiên bản mới nhất",
|
||||
"category.agents": "Chiến lược đại lý",
|
||||
"category.all": "Tất cả",
|
||||
"category.bundles": "Bó",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "仅选定",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "选定的插件将不会自动更新",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "仅选定的插件将自动更新。目前未选择任何插件,因此不会自动更新任何插件。",
|
||||
"batchUpgrade.button": "全部更新",
|
||||
"batchUpgrade.errorMessage": "提交插件更新任务失败,请重试",
|
||||
"batchUpgrade.noUpdatesMessage": "所有插件已是最新版本",
|
||||
"batchUpgrade.submittedMessage": "已提交插件更新任务",
|
||||
"batchUpgrade.tooltip": "将所有从 Marketplace 安装的插件更新到最新版本",
|
||||
"category.agents": "Agent 策略",
|
||||
"category.all": "全部",
|
||||
"category.bundles": "插件集",
|
||||
|
||||
@@ -63,11 +63,6 @@
|
||||
"autoUpdate.upgradeMode.partial": "僅選擇",
|
||||
"autoUpdate.upgradeModePlaceholder.exclude": "選定的插件將不會自動更新",
|
||||
"autoUpdate.upgradeModePlaceholder.partial": "只有選定的插件會自動更新。目前未選定任何插件,因此不會自動更新任何插件。",
|
||||
"batchUpgrade.button": "全部更新",
|
||||
"batchUpgrade.errorMessage": "提交插件更新任務失敗,請重試",
|
||||
"batchUpgrade.noUpdatesMessage": "所有插件已是最新版本",
|
||||
"batchUpgrade.submittedMessage": "已提交插件更新任務",
|
||||
"batchUpgrade.tooltip": "將所有從 Marketplace 安裝的插件更新到最新版本",
|
||||
"category.agents": "代理策略",
|
||||
"category.all": "都",
|
||||
"category.bundles": "束",
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
"i18n:check": "tsx ./scripts/check-i18n.js",
|
||||
"test": "vitest run",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:ci": "vitest run --coverage --silent=passed-only",
|
||||
"test:ci": "vitest run --coverage --reporter vitest-tiny-reporter --silent=passed-only",
|
||||
"test:watch": "vitest --watch",
|
||||
"analyze-component": "node ./scripts/analyze-component.js",
|
||||
"refactor-component": "node ./scripts/refactor-component.js",
|
||||
@@ -236,7 +236,8 @@
|
||||
"vite": "7.3.1",
|
||||
"vite-tsconfig-paths": "6.0.4",
|
||||
"vitest": "4.0.17",
|
||||
"vitest-canvas-mock": "1.1.3"
|
||||
"vitest-canvas-mock": "1.1.3",
|
||||
"vitest-tiny-reporter": "1.3.1"
|
||||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
|
||||
15
web/pnpm-lock.yaml
generated
15
web/pnpm-lock.yaml
generated
@@ -585,6 +585,9 @@ importers:
|
||||
vitest-canvas-mock:
|
||||
specifier: 1.1.3
|
||||
version: 1.1.3(vitest@4.0.17)
|
||||
vitest-tiny-reporter:
|
||||
specifier: 1.3.1
|
||||
version: 1.3.1(@vitest/runner@4.0.17)(vitest@4.0.17)
|
||||
|
||||
packages:
|
||||
|
||||
@@ -7291,6 +7294,12 @@ packages:
|
||||
peerDependencies:
|
||||
vitest: ^3.0.0 || ^4.0.0
|
||||
|
||||
vitest-tiny-reporter@1.3.1:
|
||||
resolution: {integrity: sha512-9WfLruQBbxm4EqMIS0jDZmQjvMgsWgHUso9mHQWgjA6hM3tEVhjdG8wYo7ePFh1XbwEFzEo3XUQqkGoKZ/Td2Q==}
|
||||
peerDependencies:
|
||||
'@vitest/runner': ^2.0.0 || ^3.0.2 || ^4.0.0
|
||||
vitest: ^2.0.0 || ^3.0.2 || ^4.0.0
|
||||
|
||||
vitest@4.0.17:
|
||||
resolution: {integrity: sha512-FQMeF0DJdWY0iOnbv466n/0BudNdKj1l5jYgl5JVTwjSsZSlqyXFt/9+1sEyhR6CLowbZpV7O1sCHrzBhucKKg==}
|
||||
engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0}
|
||||
@@ -15342,6 +15351,12 @@ snapshots:
|
||||
moo-color: 1.0.3
|
||||
vitest: 4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
vitest-tiny-reporter@1.3.1(@vitest/runner@4.0.17)(vitest@4.0.17):
|
||||
dependencies:
|
||||
'@vitest/runner': 4.0.17
|
||||
tinyrainbow: 3.0.3
|
||||
vitest: 4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)
|
||||
|
||||
vitest@4.0.17(@types/node@18.15.0)(@vitest/browser-playwright@4.0.17)(jiti@1.21.7)(jsdom@27.3.0(canvas@3.2.1))(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2):
|
||||
dependencies:
|
||||
'@vitest/expect': 4.0.17
|
||||
|
||||
@@ -104,27 +104,3 @@ export const updatePermission = async (permissions: Permissions) => {
|
||||
export const uninstallPlugin = async (pluginId: string) => {
|
||||
return post<UninstallPluginResponse>('/workspaces/current/plugin/uninstall', { body: { plugin_installation_id: pluginId } })
|
||||
}
|
||||
|
||||
export const batchUpgradePlugins = async () => {
|
||||
return post<{
|
||||
success: Array<{
|
||||
plugin_id: string
|
||||
from_version: string
|
||||
to_version: string
|
||||
from_identifier: string
|
||||
to_identifier: string
|
||||
}>
|
||||
failed: Array<{
|
||||
plugin_id: string
|
||||
current_version: string
|
||||
error: string
|
||||
}>
|
||||
skipped: Array<{
|
||||
plugin_id: string
|
||||
reason: string
|
||||
current_version: string
|
||||
}>
|
||||
}>('/workspaces/current/plugin/upgrade/batch', {
|
||||
body: {},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { defineConfig, mergeConfig } from 'vitest/config'
|
||||
import viteConfig from './vite.config'
|
||||
|
||||
const isCI = !!process.env.CI
|
||||
|
||||
export default mergeConfig(viteConfig, defineConfig({
|
||||
test: {
|
||||
environment: 'jsdom',
|
||||
@@ -10,7 +8,7 @@ export default mergeConfig(viteConfig, defineConfig({
|
||||
setupFiles: ['./vitest.setup.ts'],
|
||||
coverage: {
|
||||
provider: 'v8',
|
||||
reporter: isCI ? ['json', 'json-summary'] : ['text', 'json', 'json-summary'],
|
||||
reporter: ['json', 'json-summary'],
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
Reference in New Issue
Block a user