Compare commits

..

1 Commits

Author SHA1 Message Date
Yansong Zhang
ce644e1549 add console_ns import 2026-02-04 16:03:48 +08:00
190 changed files with 4475 additions and 22659 deletions

View File

@@ -0,0 +1 @@
../../.agents/skills/component-refactoring

View File

@@ -0,0 +1 @@
../../.agents/skills/frontend-code-review

View File

@@ -0,0 +1 @@
../../.agents/skills/frontend-testing

View File

@@ -0,0 +1 @@
../../.agents/skills/orpc-contract-first

7
.github/CODEOWNERS vendored
View File

@@ -24,10 +24,6 @@
/api/services/tools/mcp_tools_manage_service.py @Nov1c444
/api/controllers/mcp/ @Nov1c444
/api/controllers/console/app/mcp_server.py @Nov1c444
# Backend - Tests
/api/tests/ @laipz8200 @QuantumGhost
/api/tests/**/*mcp* @Nov1c444
# Backend - Workflow - Engine (Core graph execution engine)
@@ -238,9 +234,6 @@
# Frontend - Base Components
/web/app/components/base/ @iamjoel @zxhlyh
# Frontend - Base Components Tests
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
# Frontend - Utils and Hooks
/web/utils/classnames.ts @iamjoel @zxhlyh
/web/utils/time.ts @iamjoel @zxhlyh

View File

@@ -79,6 +79,29 @@ jobs:
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 24
cache: pnpm
cache-dependency-path: ./web/pnpm-lock.yaml
- name: Install web dependencies
run: |
cd web
pnpm install --frozen-lockfile
- name: ESLint autofix
run: |
cd web
pnpm lint:fix || true
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
- name: mdformat
run: |

View File

@@ -4,7 +4,8 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "build/feat/hitl"
- "feat/hitl-frontend"
- "feat/hitl-backend"
types:
- completed
@@ -13,7 +14,10 @@ jobs:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
github.event.workflow_run.head_branch == 'build/feat/hitl'
(
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
)
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v1

View File

@@ -39,7 +39,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Run tests
run: pnpm test:ci
run: pnpm test:coverage
- name: Coverage Summary
if: always()

View File

@@ -136,6 +136,7 @@ ignore_imports =
core.workflow.nodes.llm.llm_utils -> models.provider
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
core.workflow.nodes.llm.node -> core.tools.signature
core.workflow.nodes.template_transform.template_transform_node -> configs
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager

View File

@@ -122,7 +122,7 @@ These commands assume you start from the repository root.
```bash
cd api
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
```
1. Optional: start Celery Beat (scheduled tasks, in a new terminal).

View File

@@ -739,10 +739,8 @@ def upgrade_db():
click.echo(click.style("Database migration successful!", fg="green"))
except Exception as e:
except Exception:
logger.exception("Failed to execute database migration")
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
raise SystemExit(1)
finally:
lock.release()
else:

View File

@@ -1155,16 +1155,6 @@ class CeleryScheduleTasksConfig(BaseSettings):
default=0,
)
# API token last_used_at batch update
ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK: bool = Field(
description="Enable periodic batch update of API token last_used_at timestamps",
default=True,
)
API_TOKEN_LAST_USED_UPDATE_INTERVAL: int = Field(
description="Interval in minutes for batch updating API token last_used_at (default 30)",
default=30,
)
# Trigger provider refresh (simple version)
ENABLE_TRIGGER_PROVIDER_REFRESH_TASK: bool = Field(
description="Enable trigger provider refresh poller",

View File

@@ -10,7 +10,6 @@ from libs.helper import TimestampField
from libs.login import current_account_with_tenant, login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from services.api_token_service import ApiTokenCache
from . import console_ns
from .wraps import account_initialization_required, edit_permission_required, setup_required
@@ -132,11 +131,6 @@ class BaseApiKeyResource(Resource):
if key is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
# Invalidate cache before deleting from database
# Type assertion: key is guaranteed to be non-None here because abort() raises
assert key is not None # nosec - for type checker only
ApiTokenCache.delete(key.token, key.type)
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()

View File

@@ -1,4 +1,3 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Literal, TypeAlias
@@ -55,8 +54,6 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
class AppListQuery(BaseModel):
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
@@ -502,7 +499,6 @@ class AppListApi(Resource):
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
Workflow.tenant_id == current_tenant_id,
)
)
.scalars()
@@ -514,14 +510,12 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
node_id = None
try:
for node_id, node_data in workflow.walk_nodes():
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
continue
for app in app_pagination.items:

View File

@@ -55,7 +55,6 @@ from libs.login import current_account_with_tenant, login_required
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.api_token_service import ApiTokenCache
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
# Register models for flask_restx to avoid dict type issues in Swagger
@@ -821,11 +820,6 @@ class DatasetApiDeleteApi(Resource):
if key is None:
console_ns.abort(404, message="API key not found")
# Invalidate cache before deleting from database
# Type assertion: key is guaranteed to be non-None here because abort() raises
assert key is not None # nosec - for type checker only
ApiTokenCache.delete(key.token, key.type)
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()

View File

@@ -120,7 +120,7 @@ class TagUpdateDeleteApi(Resource):
TagService.delete_tag(tag_id)
return "", 204
return 204
@console_ns.route("/tag-bindings/create")

View File

@@ -396,7 +396,7 @@ class DatasetApi(DatasetApiResource):
try:
if DatasetService.delete_dataset(dataset_id_str, current_user):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
return "", 204
return 204
else:
raise NotFound("Dataset not found.")
except services.errors.dataset.DatasetInUseError:
@@ -557,7 +557,7 @@ class DatasetTagsApi(DatasetApiResource):
payload = TagDeletePayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag(payload.tag_id)
return "", 204
return 204
@service_api_ns.route("/datasets/tags/binding")
@@ -581,7 +581,7 @@ class DatasetTagBindingApi(DatasetApiResource):
payload = TagBindingPayload.model_validate(service_api_ns.payload or {})
TagService.save_tag_binding({"tag_ids": payload.tag_ids, "target_id": payload.target_id, "type": "knowledge"})
return "", 204
return 204
@service_api_ns.route("/datasets/tags/unbinding")
@@ -605,7 +605,7 @@ class DatasetTagUnbindingApi(DatasetApiResource):
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag_binding({"tag_id": payload.tag_id, "target_id": payload.target_id, "type": "knowledge"})
return "", 204
return 204
@service_api_ns.route("/datasets/<uuid:dataset_id>/tags")

View File

@@ -746,4 +746,4 @@ class DocumentApi(DatasetApiResource):
except services.errors.document.DocumentIndexingError:
raise DocumentIndexingError("Cannot delete document during indexing.")
return "", 204
return 204

View File

@@ -128,7 +128,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
DatasetService.check_dataset_permission(dataset, current_user)
MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
return "", 204
return 204
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata/built-in")

View File

@@ -233,7 +233,7 @@ class DatasetSegmentApi(DatasetApiResource):
if not segment:
raise NotFound("Segment not found.")
SegmentService.delete_segment(segment, document, dataset)
return "", 204
return 204
@service_api_ns.expect(service_api_ns.models[SegmentUpdatePayload.__name__])
@service_api_ns.doc("update_segment")
@@ -499,7 +499,7 @@ class DatasetChildChunkApi(DatasetApiResource):
except ChildChunkDeleteIndexServiceError as e:
raise ChildChunkDeleteIndexError(str(e))
return "", 204
return 204
@service_api_ns.expect(service_api_ns.models[ChildChunkUpdatePayload.__name__])
@service_api_ns.doc("update_child_chunk")

View File

@@ -1,24 +1,27 @@
import logging
import time
from collections.abc import Callable
from datetime import timedelta
from enum import StrEnum, auto
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar, cast
from typing import Concatenate, ParamSpec, TypeVar
from flask import current_app, request
from flask_login import user_logged_in
from flask_restx import Resource
from pydantic import BaseModel
from sqlalchemy import select, update
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from libs.login import current_user
from models import Account, Tenant, TenantAccountJoin, TenantStatus
from models.dataset import Dataset, RateLimitLog
from models.model import ApiToken, App
from services.api_token_service import ApiTokenCache, fetch_token_with_single_flight, record_token_usage
from services.end_user_service import EndUserService
from services.feature_service import FeatureService
@@ -293,14 +296,7 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
def validate_and_get_api_token(scope: str | None = None):
"""
Validate and get API token with Redis caching.
This function uses a two-tier approach:
1. First checks Redis cache for the token
2. If not cached, queries database and caches the result
The last_used_at field is updated asynchronously via Celery task
to avoid blocking the request.
Validate and get API token.
"""
auth_header = request.headers.get("Authorization")
if auth_header is None or " " not in auth_header:
@@ -312,18 +308,29 @@ def validate_and_get_api_token(scope: str | None = None):
if auth_scheme != "bearer":
raise Unauthorized("Authorization scheme must be 'Bearer'")
# Try to get token from cache first
# Returns a CachedApiToken (plain Python object), not a SQLAlchemy model
cached_token = ApiTokenCache.get(auth_token, scope)
if cached_token is not None:
logger.debug("Token validation served from cache for scope: %s", scope)
# Record usage in Redis for later batch update (no Celery task per request)
record_token_usage(auth_token, scope)
return cast(ApiToken, cached_token)
current_time = naive_utc_now()
cutoff_time = current_time - timedelta(minutes=1)
with Session(db.engine, expire_on_commit=False) as session:
update_stmt = (
update(ApiToken)
.where(
ApiToken.token == auth_token,
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < cutoff_time)),
ApiToken.type == scope,
)
.values(last_used_at=current_time)
)
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
result = session.execute(update_stmt)
api_token = session.scalar(stmt)
# Cache miss - use Redis lock for single-flight mode
# This ensures only one request queries DB for the same token concurrently
return fetch_token_with_single_flight(auth_token, scope)
if hasattr(result, "rowcount") and result.rowcount > 0:
session.commit()
if not api_token:
raise Unauthorized("Access token is invalid")
return api_token
class DatasetApiResource(Resource):

View File

@@ -47,7 +47,6 @@ class DifyNodeFactory(NodeFactory):
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
code_limits: CodeNodeLimits | None = None,
template_renderer: Jinja2TemplateRenderer | None = None,
template_transform_max_output_length: int | None = None,
http_request_http_client: HttpClientProtocol | None = None,
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
http_request_file_manager: FileManagerProtocol | None = None,
@@ -69,9 +68,6 @@ class DifyNodeFactory(NodeFactory):
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
self._template_transform_max_output_length = (
template_transform_max_output_length or dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
)
self._http_request_http_client = http_request_http_client or ssrf_proxy
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
self._http_request_file_manager = http_request_file_manager or file_manager
@@ -126,7 +122,6 @@ class DifyNodeFactory(NodeFactory):
graph_init_params=self.graph_init_params,
graph_runtime_state=self.graph_runtime_state,
template_renderer=self._template_renderer,
max_output_length=self._template_transform_max_output_length,
)
if node_type == NodeType.HTTP_REQUEST:

View File

@@ -6,8 +6,7 @@ from yarl import URL
from configs import dify_config
from core.helper.download import download_with_size_limit
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
from extensions.ext_redis import redis_client
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
logger = logging.getLogger(__name__)
@@ -44,37 +43,28 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
return data.get("data", {}).get("plugins", [])
def batch_fetch_plugin_manifests_ignore_deserialization_error(
plugin_ids: list[str],
) -> Sequence[MarketplacePluginDeclaration]:
if len(plugin_ids) == 0:
return []
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
response.raise_for_status()
result: list[MarketplacePluginDeclaration] = []
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration.model_validate(plugin))
except Exception:
logger.exception(
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
)
return result
def record_install_plugin_event(plugin_unique_identifier: str):
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
response.raise_for_status()
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
"""
Fetch all plugin manifests from marketplace and cache them in Redis.
This should be called once per check cycle to populate the instance-level cache.
Args:
cache_key_prefix: Redis key prefix for caching plugin manifests
cache_ttl: Cache TTL in seconds
Raises:
httpx.HTTPError: If the HTTP request fails
Exception: If any other error occurs during fetching or caching
"""
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
response.raise_for_status()
raw_json = response.json()
plugins_data = raw_json.get("plugins", [])
# Parse and cache all plugin snapshots
for plugin_data in plugins_data:
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
redis_client.setex(
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
time=cache_ttl,
value=plugin_snapshot.model_dump_json(),
)

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field, computed_field, model_validator
from pydantic import BaseModel, Field, model_validator
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.plugin.entities.endpoint import EndpointProviderDeclaration
@@ -48,15 +48,3 @@ class MarketplacePluginDeclaration(BaseModel):
if "tool" in data and not data["tool"]:
del data["tool"]
return data
class MarketplacePluginSnapshot(BaseModel):
org: str
name: str
latest_version: str
latest_package_identifier: str
latest_package_url: str
@computed_field
def plugin_id(self) -> str:
return f"{self.org}/{self.name}"

View File

@@ -112,7 +112,7 @@ class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable):
class RAGPipelineVariable(BaseModel):
belong_to_node_id: str = Field(description="belong to which node id, shared means public")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
label: str = Field(description="label")
description: str | None = Field(description="description", default="")
variable: str = Field(description="variable key", default="")

View File

@@ -1,6 +1,7 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any
from configs import dify_config
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes.base.node import Node
@@ -15,13 +16,12 @@ if TYPE_CHECKING:
from core.workflow.entities import GraphInitParams
from core.workflow.runtime import GraphRuntimeState
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
class TemplateTransformNode(Node[TemplateTransformNodeData]):
node_type = NodeType.TEMPLATE_TRANSFORM
_template_renderer: Jinja2TemplateRenderer
_max_output_length: int
def __init__(
self,
@@ -31,7 +31,6 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
graph_runtime_state: "GraphRuntimeState",
*,
template_renderer: Jinja2TemplateRenderer | None = None,
max_output_length: int | None = None,
) -> None:
super().__init__(
id=id,
@@ -41,10 +40,6 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
if max_output_length is not None and max_output_length <= 0:
raise ValueError("max_output_length must be a positive integer")
self._max_output_length = max_output_length or DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
@classmethod
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
"""
@@ -74,11 +69,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
except TemplateRenderError as e:
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
if len(rendered) > self._max_output_length:
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
return NodeRunResult(
inputs=variables,
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Output length exceeds {self._max_output_length} characters",
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
)
return NodeRunResult(

View File

@@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then
if [[ -z "${CELERY_QUEUES}" ]]; then
if [[ "${EDITION}" == "CLOUD" ]]; then
# Cloud edition: separate queues for dataset and trigger tasks
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
else
# Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
fi
else
DEFAULT_QUEUES="${CELERY_QUEUES}"

View File

@@ -184,14 +184,6 @@ def init_app(app: DifyApp) -> Celery:
"task": "schedule.trigger_provider_refresh_task.trigger_provider_refresh",
"schedule": timedelta(minutes=dify_config.TRIGGER_PROVIDER_REFRESH_INTERVAL),
}
if dify_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK:
imports.append("schedule.update_api_token_last_used_task")
beat_schedule["batch_update_api_token_last_used"] = {
"task": "schedule.update_api_token_last_used_task.batch_update_api_token_last_used",
"schedule": timedelta(minutes=dify_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL),
}
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
return celery_app

View File

@@ -10,10 +10,6 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7df29de0f6be'
down_revision = '03ea244985ce'
@@ -23,31 +19,16 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
else:
# For MySQL and other databases, UUID should be generated at application level
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)

View File

@@ -2166,9 +2166,7 @@ class TenantCreditPool(TypeBase):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)

View File

@@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.12.1"
version = "1.12.0"
requires-python = ">=3.11,<3.13"
dependencies = [

View File

@@ -1,24 +1,16 @@
import logging
import math
import time
import click
import app
from core.helper.marketplace import fetch_global_plugin_manifest
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
logger = logging.getLogger(__name__)
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
MAX_CONCURRENT_CHECK_TASKS = 20
# Import cache constants from the task module
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
@app.celery.task(queue="plugin")
def check_upgradable_plugin_task():
@@ -48,22 +40,6 @@ def check_upgradable_plugin_task():
) # make sure all strategies are checked in this interval
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
if total_strategies == 0:
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
return
# Fetch and cache all plugin manifests before processing tenants
# This reduces load on marketplace from 300k requests to 1 request per check cycle
logger.info("fetching global plugin manifest from marketplace")
try:
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
logger.info("successfully fetched and cached global plugin manifest")
except Exception as e:
logger.exception("failed to fetch global plugin manifest")
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
return
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
for strategy in batch_strategies:

View File

@@ -1,114 +0,0 @@
"""
Scheduled task to batch-update API token last_used_at timestamps.
Instead of updating the database on every request, token usage is recorded
in Redis as lightweight SET keys (api_token_active:{scope}:{token}).
This task runs periodically (default every 30 minutes) to flush those
records into the database in a single batch operation.
"""
import logging
import time
from datetime import datetime
import click
from sqlalchemy import update
from sqlalchemy.orm import Session
import app
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.model import ApiToken
from services.api_token_service import ACTIVE_TOKEN_KEY_PREFIX
logger = logging.getLogger(__name__)
@app.celery.task(queue="api_token")
def batch_update_api_token_last_used():
"""
Batch update last_used_at for all recently active API tokens.
Scans Redis for api_token_active:* keys, parses the token and scope
from each key, and performs a batch database update.
"""
click.echo(click.style("batch_update_api_token_last_used: start.", fg="green"))
start_at = time.perf_counter()
updated_count = 0
scanned_count = 0
try:
# Collect all active token keys and their values (the actual usage timestamps)
token_entries: list[tuple[str, str | None, datetime]] = [] # (token, scope, usage_time)
keys_to_delete: list[str | bytes] = []
for key in redis_client.scan_iter(match=f"{ACTIVE_TOKEN_KEY_PREFIX}*", count=200):
if isinstance(key, bytes):
key = key.decode("utf-8")
scanned_count += 1
# Read the value (ISO timestamp recorded at actual request time)
value = redis_client.get(key)
if not value:
keys_to_delete.append(key)
continue
if isinstance(value, bytes):
value = value.decode("utf-8")
try:
usage_time = datetime.fromisoformat(value)
except (ValueError, TypeError):
logger.warning("Invalid timestamp in key %s: %s", key, value)
keys_to_delete.append(key)
continue
# Parse token info from key: api_token_active:{scope}:{token}
suffix = key[len(ACTIVE_TOKEN_KEY_PREFIX) :]
parts = suffix.split(":", 1)
if len(parts) == 2:
scope_str, token = parts
scope = None if scope_str == "None" else scope_str
token_entries.append((token, scope, usage_time))
keys_to_delete.append(key)
if not token_entries:
click.echo(click.style("batch_update_api_token_last_used: no active tokens found.", fg="yellow"))
# Still clean up any invalid keys
if keys_to_delete:
redis_client.delete(*keys_to_delete)
return
# Update each token in its own short transaction to avoid long transactions
for token, scope, usage_time in token_entries:
with Session(db.engine, expire_on_commit=False) as session, session.begin():
stmt = (
update(ApiToken)
.where(
ApiToken.token == token,
ApiToken.type == scope,
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < usage_time)),
)
.values(last_used_at=usage_time)
)
result = session.execute(stmt)
rowcount = getattr(result, "rowcount", 0)
if rowcount > 0:
updated_count += 1
# Delete processed keys from Redis
if keys_to_delete:
redis_client.delete(*keys_to_delete)
except Exception:
logger.exception("batch_update_api_token_last_used failed")
elapsed = time.perf_counter() - start_at
click.echo(
click.style(
f"batch_update_api_token_last_used: done. "
f"scanned={scanned_count}, updated={updated_count}, elapsed={elapsed:.2f}s",
fg="green",
)
)

View File

@@ -327,17 +327,6 @@ class AccountService:
@staticmethod
def delete_account(account: Account):
"""Delete account. This method only adds a task to the queue for deletion."""
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
from services.enterprise.account_deletion_sync import sync_account_deletion
sync_success = sync_account_deletion(account_id=account.id, source="account_deleted")
if not sync_success:
logger.warning(
"Enterprise account deletion sync failed for account %s; proceeding with local deletion.",
account.id,
)
# Now proceed with async account deletion
delete_account_task.delay(account.id)
@staticmethod
@@ -1241,19 +1230,6 @@ class TenantService:
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
sync_success = sync_workspace_member_removal(
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
)
if not sync_success:
logger.warning(
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
tenant.id,
account.id,
)
@staticmethod
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
"""Update member role"""

View File

@@ -1,330 +0,0 @@
"""
API Token Service
Handles all API token caching, validation, and usage recording.
Includes Redis cache operations, database queries, and single-flight concurrency control.
"""
import logging
from datetime import datetime
from typing import Any
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import Unauthorized
from extensions.ext_database import db
from extensions.ext_redis import redis_client, redis_fallback
from libs.datetime_utils import naive_utc_now
from models.model import ApiToken
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------
# Pydantic DTO
# ---------------------------------------------------------------------
class CachedApiToken(BaseModel):
"""
Pydantic model for cached API token data.
This is NOT a SQLAlchemy model instance, but a plain Pydantic model
that mimics the ApiToken model interface for read-only access.
"""
id: str
app_id: str | None
tenant_id: str | None
type: str
token: str
last_used_at: datetime | None
created_at: datetime | None
def __repr__(self) -> str:
return f"<CachedApiToken id={self.id} type={self.type}>"
# ---------------------------------------------------------------------
# Cache configuration
# ---------------------------------------------------------------------
CACHE_KEY_PREFIX = "api_token"
CACHE_TTL_SECONDS = 600 # 10 minutes
CACHE_NULL_TTL_SECONDS = 60 # 1 minute for non-existent tokens
ACTIVE_TOKEN_KEY_PREFIX = "api_token_active:"
# ---------------------------------------------------------------------
# Cache class
# ---------------------------------------------------------------------
class ApiTokenCache:
"""
Redis cache wrapper for API tokens.
Handles serialization, deserialization, and cache invalidation.
"""
@staticmethod
def make_active_key(token: str, scope: str | None = None) -> str:
"""Generate Redis key for recording token usage."""
return f"{ACTIVE_TOKEN_KEY_PREFIX}{scope}:{token}"
@staticmethod
def _make_tenant_index_key(tenant_id: str) -> str:
"""Generate Redis key for tenant token index."""
return f"tenant_tokens:{tenant_id}"
@staticmethod
def _make_cache_key(token: str, scope: str | None = None) -> str:
"""Generate cache key for the given token and scope."""
scope_str = scope or "any"
return f"{CACHE_KEY_PREFIX}:{scope_str}:{token}"
@staticmethod
def _serialize_token(api_token: Any) -> bytes:
"""Serialize ApiToken object to JSON bytes."""
if isinstance(api_token, CachedApiToken):
return api_token.model_dump_json().encode("utf-8")
cached = CachedApiToken(
id=str(api_token.id),
app_id=str(api_token.app_id) if api_token.app_id else None,
tenant_id=str(api_token.tenant_id) if api_token.tenant_id else None,
type=api_token.type,
token=api_token.token,
last_used_at=api_token.last_used_at,
created_at=api_token.created_at,
)
return cached.model_dump_json().encode("utf-8")
@staticmethod
def _deserialize_token(cached_data: bytes | str) -> Any:
"""Deserialize JSON bytes/string back to a CachedApiToken Pydantic model."""
if cached_data in {b"null", "null"}:
return None
try:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return CachedApiToken.model_validate_json(cached_data)
except (ValueError, Exception) as e:
logger.warning("Failed to deserialize token from cache: %s", e)
return None
@staticmethod
@redis_fallback(default_return=None)
def get(token: str, scope: str | None) -> Any | None:
"""Get API token from cache."""
cache_key = ApiTokenCache._make_cache_key(token, scope)
cached_data = redis_client.get(cache_key)
if cached_data is None:
logger.debug("Cache miss for token key: %s", cache_key)
return None
logger.debug("Cache hit for token key: %s", cache_key)
return ApiTokenCache._deserialize_token(cached_data)
@staticmethod
def _add_to_tenant_index(tenant_id: str | None, cache_key: str) -> None:
"""Add cache key to tenant index for efficient invalidation."""
if not tenant_id:
return
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
redis_client.sadd(index_key, cache_key)
redis_client.expire(index_key, CACHE_TTL_SECONDS + 60)
except Exception as e:
logger.warning("Failed to update tenant index: %s", e)
@staticmethod
def _remove_from_tenant_index(tenant_id: str | None, cache_key: str) -> None:
"""Remove cache key from tenant index."""
if not tenant_id:
return
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
redis_client.srem(index_key, cache_key)
except Exception as e:
logger.warning("Failed to remove from tenant index: %s", e)
@staticmethod
@redis_fallback(default_return=False)
def set(token: str, scope: str | None, api_token: Any | None, ttl: int = CACHE_TTL_SECONDS) -> bool:
"""Set API token in cache."""
cache_key = ApiTokenCache._make_cache_key(token, scope)
if api_token is None:
cached_value = b"null"
ttl = CACHE_NULL_TTL_SECONDS
else:
cached_value = ApiTokenCache._serialize_token(api_token)
try:
redis_client.setex(cache_key, ttl, cached_value)
if api_token is not None and hasattr(api_token, "tenant_id"):
ApiTokenCache._add_to_tenant_index(api_token.tenant_id, cache_key)
logger.debug("Cached token with key: %s, ttl: %ss", cache_key, ttl)
return True
except Exception as e:
logger.warning("Failed to cache token: %s", e)
return False
@staticmethod
@redis_fallback(default_return=False)
def delete(token: str, scope: str | None = None) -> bool:
"""Delete API token from cache."""
if scope is None:
pattern = f"{CACHE_KEY_PREFIX}:*:{token}"
try:
keys_to_delete = list(redis_client.scan_iter(match=pattern))
if keys_to_delete:
redis_client.delete(*keys_to_delete)
logger.info("Deleted %d cache entries for token", len(keys_to_delete))
return True
except Exception as e:
logger.warning("Failed to delete token cache with pattern: %s", e)
return False
else:
cache_key = ApiTokenCache._make_cache_key(token, scope)
try:
tenant_id = None
try:
cached_data = redis_client.get(cache_key)
if cached_data and cached_data != b"null":
cached_token = ApiTokenCache._deserialize_token(cached_data)
if cached_token:
tenant_id = cached_token.tenant_id
except Exception as e:
logger.debug("Failed to get tenant_id for cache cleanup: %s", e)
redis_client.delete(cache_key)
if tenant_id:
ApiTokenCache._remove_from_tenant_index(tenant_id, cache_key)
logger.info("Deleted cache for key: %s", cache_key)
return True
except Exception as e:
logger.warning("Failed to delete token cache: %s", e)
return False
@staticmethod
@redis_fallback(default_return=False)
def invalidate_by_tenant(tenant_id: str) -> bool:
"""Invalidate all API token caches for a specific tenant via tenant index."""
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
cache_keys = redis_client.smembers(index_key)
if cache_keys:
deleted_count = 0
for cache_key in cache_keys:
if isinstance(cache_key, bytes):
cache_key = cache_key.decode("utf-8")
redis_client.delete(cache_key)
deleted_count += 1
redis_client.delete(index_key)
logger.info(
"Invalidated %d token cache entries for tenant: %s",
deleted_count,
tenant_id,
)
else:
logger.info(
"No tenant index found for %s, relying on TTL expiration",
tenant_id,
)
return True
except Exception as e:
logger.warning("Failed to invalidate tenant token cache: %s", e)
return False
# ---------------------------------------------------------------------
# Token usage recording (for batch update)
# ---------------------------------------------------------------------
def record_token_usage(auth_token: str, scope: str | None) -> None:
"""
Record token usage in Redis for later batch update by a scheduled job.
Instead of dispatching a Celery task per request, we simply SET a key in Redis.
A Celery Beat scheduled task will periodically scan these keys and batch-update
last_used_at in the database.
"""
try:
key = ApiTokenCache.make_active_key(auth_token, scope)
redis_client.set(key, naive_utc_now().isoformat(), ex=3600)
except Exception as e:
logger.warning("Failed to record token usage: %s", e)
# ---------------------------------------------------------------------
# Database query + single-flight
# ---------------------------------------------------------------------
def query_token_from_db(auth_token: str, scope: str | None) -> ApiToken:
"""
Query API token from database and cache the result.
Raises Unauthorized if token is invalid.
"""
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
api_token = session.scalar(stmt)
if not api_token:
ApiTokenCache.set(auth_token, scope, None)
raise Unauthorized("Access token is invalid")
ApiTokenCache.set(auth_token, scope, api_token)
record_token_usage(auth_token, scope)
return api_token
def fetch_token_with_single_flight(auth_token: str, scope: str | None) -> ApiToken | Any:
"""
Fetch token from DB with single-flight pattern using Redis lock.
Ensures only one concurrent request queries the database for the same token.
Falls back to direct query if lock acquisition fails.
"""
logger.debug("Token cache miss, attempting to acquire query lock for scope: %s", scope)
lock_key = f"api_token_query_lock:{scope}:{auth_token}"
lock = redis_client.lock(lock_key, timeout=10, blocking_timeout=5)
try:
if lock.acquire(blocking=True):
try:
cached_token = ApiTokenCache.get(auth_token, scope)
if cached_token is not None:
logger.debug("Token cached by concurrent request, using cached version")
return cached_token
return query_token_from_db(auth_token, scope)
finally:
lock.release()
else:
logger.warning("Lock timeout for token: %s, proceeding with direct query", auth_token[:10])
return query_token_from_db(auth_token, scope)
except Unauthorized:
raise
except Exception as e:
logger.warning("Redis lock failed for token query: %s, proceeding anyway", e)
return query_token_from_db(auth_token, scope)

View File

@@ -1,115 +0,0 @@
import json
import logging
import uuid
from datetime import UTC, datetime
from redis import RedisError
from configs import dify_config
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.account import TenantAccountJoin
logger = logging.getLogger(__name__)
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
"""
Queue an account deletion sync task to Redis.
Internal helper function. Do not call directly - use the public functions instead.
Args:
workspace_id: The workspace/tenant ID to sync
member_id: The member/account ID that was removed
source: Source of the sync request (for debugging/tracking)
Returns:
bool: True if task was queued successfully, False otherwise
"""
try:
task = {
"task_id": str(uuid.uuid4()),
"workspace_id": workspace_id,
"member_id": member_id,
"retry_count": 0,
"created_at": datetime.now(UTC).isoformat(),
"source": source,
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
}
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
logger.info(
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
workspace_id,
member_id,
task["task_id"],
source,
)
return True
except (RedisError, TypeError) as e:
logger.error(
"Failed to queue account deletion sync for workspace %s, member %s: %s",
workspace_id,
member_id,
str(e),
exc_info=True,
)
# Don't raise - we don't want to fail member deletion if queueing fails
return False
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
"""
Sync a single workspace member removal (enterprise only).
Queues a task for the enterprise backend to reassign resources from the removed member.
Handles enterprise edition check internally. Safe to call in community edition (no-op).
Args:
workspace_id: The workspace/tenant ID
member_id: The member/account ID that was removed
source: Source of the sync request (e.g., "workspace_member_removed")
Returns:
bool: True if task was queued (or skipped in community), False if queueing failed
"""
if not dify_config.ENTERPRISE_ENABLED:
return True
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
def sync_account_deletion(account_id: str, *, source: str) -> bool:
"""
Sync full account deletion across all workspaces (enterprise only).
Fetches all workspace memberships for the account and queues a sync task for each.
Handles enterprise edition check internally. Safe to call in community edition (no-op).
Args:
account_id: The account ID being deleted
source: Source of the sync request (e.g., "account_deleted")
Returns:
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
"""
if not dify_config.ENTERPRISE_ENABLED:
return True
# Fetch all workspaces the account belongs to
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
# Queue sync task for each workspace
success = True
for join in workspace_joins:
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
success = False
return success

View File

@@ -14,9 +14,6 @@ from models.model import UploadFile
logger = logging.getLogger(__name__)
# Batch size for database operations to keep transactions short
BATCH_SIZE = 1000
@shared_task(queue="dataset")
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
@@ -34,179 +31,63 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
if not doc_form:
raise ValueError("doc_form is required")
storage_keys_to_delete: list[str] = []
index_node_ids: list[str] = []
segment_ids: list[str] = []
total_image_upload_file_ids: list[str] = []
with session_factory.create_session() as session:
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
).delete(synchronize_session=False)
try:
# ============ Step 1: Query segment and file data (short read-only transaction) ============
with session_factory.create_session() as session:
# Get segments info
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
).all()
# check segment is exist
if segments:
index_node_ids = [segment.index_node_id for segment in segments]
segment_ids = [segment.id for segment in segments]
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
# Collect image file IDs from segment content
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
total_image_upload_file_ids.extend(image_upload_file_ids)
# Query storage keys for image files
if total_image_upload_file_ids:
image_files = session.scalars(
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
).all()
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
# Query storage keys for document files
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
for image_file in image_files:
try:
if image_file and image_file.key:
storage.delete(image_file.key)
except Exception:
logger.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: %s",
image_file.id,
)
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
session.execute(stmt)
session.delete(segment)
if file_ids:
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
for file in files:
try:
storage.delete(file.key)
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
if index_node_ids:
try:
# Fetch dataset in a fresh session to avoid DetachedInstanceError
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
else:
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
except Exception:
logger.exception(
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
dataset_id,
document_ids,
len(index_node_ids),
)
session.commit()
# ============ Step 3: Delete metadata binding (separate short transaction) ============
try:
with session_factory.create_session() as session:
deleted_count = (
session.query(DatasetMetadataBinding)
.where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
)
.delete(synchronize_session=False)
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
fg="green",
)
session.commit()
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
)
except Exception:
logger.exception(
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
if total_image_upload_file_ids:
failed_batches = 0
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
session.execute(stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
i,
i + len(batch),
dataset_id,
)
if failed_batches > 0:
logger.warning(
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
failed_batches,
total_batches,
dataset_id,
)
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
if segment_ids:
failed_batches = 0
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(segment_ids), BATCH_SIZE):
batch = segment_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
session.execute(segment_delete_stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
i,
i + len(batch),
dataset_id,
document_ids,
)
if failed_batches > 0:
logger.warning(
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
failed_batches,
total_batches,
document_ids,
)
# ============ Step 6: Delete document-associated files (separate short transaction) ============
if file_ids:
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
session.commit()
except Exception:
logger.exception(
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
dataset_id,
file_ids,
)
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
storage_delete_failures = 0
for storage_key in storage_keys_to_delete:
try:
storage.delete(storage_key)
except Exception:
storage_delete_failures += 1
logger.exception("Failed to delete file from storage, key: %s", storage_key)
if storage_delete_failures > 0:
logger.warning(
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
storage_delete_failures,
len(storage_keys_to_delete),
dataset_id,
)
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
f"storage_files: {len(storage_keys_to_delete)}",
fg="green",
)
)
except Exception:
logger.exception(
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)
logger.exception("Cleaned documents when documents deleted failed")

View File

@@ -3,7 +3,6 @@ import time
import click
from celery import shared_task
from sqlalchemy import delete
from core.db.session_factory import session_factory
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
@@ -68,14 +67,8 @@ def delete_segment_from_index_task(
if segment_attachment_bindings:
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
for i in range(0, len(segment_attachment_bind_ids), 1000):
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
)
session.execute(segment_attachment_bind_delete_stmt)
for binding in segment_attachment_bindings:
session.delete(binding)
# delete upload file
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
session.commit()

View File

@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@@ -68,6 +68,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
document.indexing_status = "error"
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
document.stopped_at = naive_utc_now()
session.commit()
return
loader = NotionExtractor(
@@ -84,6 +85,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:

View File

@@ -8,6 +8,7 @@ from sqlalchemy import delete, select
from core.db.session_factory import session_factory
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
@@ -26,7 +27,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@@ -35,6 +36,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:
@@ -54,7 +56,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
db.session.commit()
end_at = time.perf_counter()
logger.info(
click.style(

View File

@@ -6,8 +6,8 @@ import typing
import click
from celery import shared_task
from core.helper.marketplace import record_install_plugin_event
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
from core.helper import marketplace
from core.helper.marketplace import MarketplacePluginDeclaration
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
@@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
logger = logging.getLogger(__name__)
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_TTL = 60 * 60 # 1 hour
@@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
"""
Get cached plugin manifest from Redis.
Returns:
- MarketplacePluginSnapshot: if found in cache
- MarketplacePluginDeclaration: if found in cache
- None: if cached as not found (marketplace returned no result)
- False: if not in cache at all
"""
@@ -43,31 +43,76 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapsh
if cached_json is None:
return None
return MarketplacePluginSnapshot.model_validate(cached_json)
return MarketplacePluginDeclaration.model_validate(cached_json)
except Exception:
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
return False
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
"""
Cache plugin manifest in Redis.
Args:
plugin_id: The plugin ID
manifest: The manifest to cache, or None if not found in marketplace
"""
try:
key = _get_redis_cache_key(plugin_id)
if manifest is None:
# Cache the fact that this plugin was not found
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
else:
# Cache the manifest data
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
except Exception:
# If Redis fails, continue without caching
# traceback.print_exc()
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
def marketplace_batch_fetch_plugin_manifests(
plugin_ids_plain_list: list[str],
) -> list[MarketplacePluginSnapshot]:
"""
Fetch plugin manifests from Redis cache only.
This function assumes fetch_global_plugin_manifest() has been called
to pre-populate the cache with all marketplace plugins.
"""
result: list[MarketplacePluginSnapshot] = []
) -> list[MarketplacePluginDeclaration]:
"""Fetch plugin manifests with Redis caching support."""
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
not_cached_plugin_ids: list[str] = []
# Check Redis cache for each plugin
for plugin_id in plugin_ids_plain_list:
cached_result = _get_cached_manifest(plugin_id)
if not isinstance(cached_result, MarketplacePluginSnapshot):
# cached_result is False (not in cache) or None (cached as not found)
logger.warning("plugin %s not found in cache, skipping", plugin_id)
continue
if cached_result is False:
# Not in cache, need to fetch
not_cached_plugin_ids.append(plugin_id)
else:
# Either found manifest or cached as None (not found in marketplace)
# At this point, cached_result is either MarketplacePluginDeclaration or None
if isinstance(cached_result, bool):
# This should never happen due to the if condition above, but for type safety
continue
cached_manifests[plugin_id] = cached_result
result.append(cached_result)
# Fetch uncached plugins from marketplace
if not_cached_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
# Cache the fetched manifests
for manifest in manifests:
cached_manifests[manifest.plugin_id] = manifest
_set_cached_manifest(manifest.plugin_id, manifest)
# Cache plugins that were not found in marketplace
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
for plugin_id in not_cached_plugin_ids:
if plugin_id not in fetched_plugin_ids:
cached_manifests[plugin_id] = None
_set_cached_manifest(plugin_id, None)
# Build result list from cached manifests
result: list[MarketplacePluginDeclaration] = []
for plugin_id in plugin_ids_plain_list:
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
if cached_manifest is not None:
result.append(cached_manifest)
return result
@@ -166,7 +211,7 @@ def process_tenant_plugin_autoupgrade_check_task(
# execute upgrade
new_unique_identifier = manifest.latest_package_identifier
record_install_plugin_event(new_unique_identifier)
marketplace.record_install_plugin_event(new_unique_identifier)
click.echo(
click.style(
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",

View File

@@ -48,7 +48,6 @@ from models.workflow import (
WorkflowArchiveLog,
)
from repositories.factory import DifyAPIRepositoryFactory
from services.api_token_service import ApiTokenCache
logger = logging.getLogger(__name__)
@@ -135,12 +134,6 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str):
def _delete_app_api_tokens(tenant_id: str, app_id: str):
def del_api_token(session, api_token_id: str):
# Fetch token details for cache invalidation
token_obj = session.query(ApiToken).where(ApiToken.id == api_token_id).first()
if token_obj:
# Invalidate cache before deletion
ApiTokenCache.delete(token_obj.token, token_obj.type)
session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False)
_delete_records(
@@ -266,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
def del_workflow_archive_log(session, workflow_archive_log_id: str):
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
def del_workflow_archive_log(workflow_archive_log_id: str):
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
synchronize_session=False
)
@@ -427,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
total_files_deleted = 0
while True:
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
# Get a batch of draft variable IDs along with their file_ids
query_sql = """
SELECT id, file_id FROM workflow_draft_variables

View File

@@ -1,375 +0,0 @@
"""
Integration tests for API Token Cache with Redis.
These tests require:
- Redis server running
- Test database configured
"""
import time
from datetime import datetime, timedelta
from unittest.mock import patch
import pytest
from extensions.ext_redis import redis_client
from models.model import ApiToken
from services.api_token_service import ApiTokenCache, CachedApiToken
class TestApiTokenCacheRedisIntegration:
"""Integration tests with real Redis."""
def setup_method(self):
"""Setup test fixtures and clean Redis."""
self.test_token = "test-integration-token-123"
self.test_scope = "app"
self.cache_key = f"api_token:{self.test_scope}:{self.test_token}"
# Clean up any existing test data
self._cleanup()
def teardown_method(self):
"""Cleanup test data from Redis."""
self._cleanup()
def _cleanup(self):
"""Remove test data from Redis."""
try:
redis_client.delete(self.cache_key)
redis_client.delete(ApiTokenCache._make_tenant_index_key("test-tenant-id"))
redis_client.delete(ApiTokenCache.make_active_key(self.test_token, self.test_scope))
except Exception:
pass # Ignore cleanup errors
def test_cache_set_and_get_with_real_redis(self):
"""Test cache set and get operations with real Redis."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id-123"
mock_token.app_id = "test-app-456"
mock_token.tenant_id = "test-tenant-789"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = datetime.now()
mock_token.created_at = datetime.now() - timedelta(days=30)
# Set in cache
result = ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
assert result is True
# Verify in Redis
cached_data = redis_client.get(self.cache_key)
assert cached_data is not None
# Get from cache
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
assert cached_token is not None
assert isinstance(cached_token, CachedApiToken)
assert cached_token.id == "test-id-123"
assert cached_token.app_id == "test-app-456"
assert cached_token.tenant_id == "test-tenant-789"
assert cached_token.type == "app"
assert cached_token.token == self.test_token
def test_cache_ttl_with_real_redis(self):
"""Test cache TTL is set correctly."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
ttl = redis_client.ttl(self.cache_key)
assert 595 <= ttl <= 600 # Should be around 600 seconds (10 minutes)
def test_cache_null_value_for_invalid_token(self):
"""Test caching null value for invalid tokens."""
result = ApiTokenCache.set(self.test_token, self.test_scope, None)
assert result is True
cached_data = redis_client.get(self.cache_key)
assert cached_data == b"null"
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
assert cached_token is None
ttl = redis_client.ttl(self.cache_key)
assert 55 <= ttl <= 60
def test_cache_delete_with_real_redis(self):
"""Test cache deletion with real Redis."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
assert redis_client.exists(self.cache_key) == 1
result = ApiTokenCache.delete(self.test_token, self.test_scope)
assert result is True
assert redis_client.exists(self.cache_key) == 0
def test_tenant_index_creation(self):
"""Test tenant index is created when caching token."""
from unittest.mock import MagicMock
tenant_id = "test-tenant-id"
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = tenant_id
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
assert redis_client.exists(index_key) == 1
members = redis_client.smembers(index_key)
cache_keys = [m.decode("utf-8") if isinstance(m, bytes) else m for m in members]
assert self.cache_key in cache_keys
def test_invalidate_by_tenant_via_index(self):
"""Test tenant-wide cache invalidation using index (fast path)."""
from unittest.mock import MagicMock
tenant_id = "test-tenant-id"
for i in range(3):
token_value = f"test-token-{i}"
mock_token = MagicMock()
mock_token.id = f"test-id-{i}"
mock_token.app_id = "test-app"
mock_token.tenant_id = tenant_id
mock_token.type = "app"
mock_token.token = token_value
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(token_value, "app", mock_token)
for i in range(3):
key = f"api_token:app:test-token-{i}"
assert redis_client.exists(key) == 1
result = ApiTokenCache.invalidate_by_tenant(tenant_id)
assert result is True
for i in range(3):
key = f"api_token:app:test-token-{i}"
assert redis_client.exists(key) == 0
assert redis_client.exists(ApiTokenCache._make_tenant_index_key(tenant_id)) == 0
def test_concurrent_cache_access(self):
"""Test concurrent cache access doesn't cause issues."""
import concurrent.futures
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
def get_from_cache():
return ApiTokenCache.get(self.test_token, self.test_scope)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = [executor.submit(get_from_cache) for _ in range(50)]
results = [f.result() for f in concurrent.futures.as_completed(futures)]
assert len(results) == 50
assert all(r is not None for r in results)
assert all(isinstance(r, CachedApiToken) for r in results)
class TestTokenUsageRecording:
"""Tests for recording token usage in Redis (batch update approach)."""
def setup_method(self):
self.test_token = "test-usage-token"
self.test_scope = "app"
self.active_key = ApiTokenCache.make_active_key(self.test_token, self.test_scope)
def teardown_method(self):
try:
redis_client.delete(self.active_key)
except Exception:
pass
def test_record_token_usage_sets_redis_key(self):
"""Test that record_token_usage writes an active key to Redis."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
# Key should exist
assert redis_client.exists(self.active_key) == 1
# Value should be an ISO timestamp
value = redis_client.get(self.active_key)
if isinstance(value, bytes):
value = value.decode("utf-8")
datetime.fromisoformat(value) # Should not raise
def test_record_token_usage_has_ttl(self):
"""Test that active keys have a TTL as safety net."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
ttl = redis_client.ttl(self.active_key)
assert 3595 <= ttl <= 3600 # ~1 hour
def test_record_token_usage_overwrites(self):
"""Test that repeated calls overwrite the same key (no accumulation)."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
first_value = redis_client.get(self.active_key)
time.sleep(0.01) # Tiny delay so timestamp differs
record_token_usage(self.test_token, self.test_scope)
second_value = redis_client.get(self.active_key)
# Key count should still be 1 (overwritten, not accumulated)
assert redis_client.exists(self.active_key) == 1
class TestEndToEndCacheFlow:
"""End-to-end integration test for complete cache flow."""
@pytest.mark.usefixtures("db_session")
def test_complete_flow_cache_miss_then_hit(self, db_session):
"""
Test complete flow:
1. First request (cache miss) -> query DB -> cache result
2. Second request (cache hit) -> return from cache
3. Verify Redis state
"""
test_token_value = "test-e2e-token"
test_scope = "app"
test_token = ApiToken()
test_token.id = "test-e2e-id"
test_token.token = test_token_value
test_token.type = test_scope
test_token.app_id = "test-app"
test_token.tenant_id = "test-tenant"
test_token.last_used_at = None
test_token.created_at = datetime.now()
db_session.add(test_token)
db_session.commit()
try:
# Step 1: Cache miss - set token in cache
ApiTokenCache.set(test_token_value, test_scope, test_token)
cache_key = f"api_token:{test_scope}:{test_token_value}"
assert redis_client.exists(cache_key) == 1
# Step 2: Cache hit - get from cache
cached_token = ApiTokenCache.get(test_token_value, test_scope)
assert cached_token is not None
assert cached_token.id == test_token.id
assert cached_token.token == test_token_value
# Step 3: Verify tenant index
index_key = ApiTokenCache._make_tenant_index_key(test_token.tenant_id)
assert redis_client.exists(index_key) == 1
assert cache_key.encode() in redis_client.smembers(index_key)
# Step 4: Delete and verify cleanup
ApiTokenCache.delete(test_token_value, test_scope)
assert redis_client.exists(cache_key) == 0
assert cache_key.encode() not in redis_client.smembers(index_key)
finally:
db_session.delete(test_token)
db_session.commit()
redis_client.delete(f"api_token:{test_scope}:{test_token_value}")
redis_client.delete(ApiTokenCache._make_tenant_index_key(test_token.tenant_id))
def test_high_concurrency_simulation(self):
"""Simulate high concurrency access to cache."""
import concurrent.futures
from unittest.mock import MagicMock
test_token_value = "test-concurrent-token"
test_scope = "app"
mock_token = MagicMock()
mock_token.id = "concurrent-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = test_scope
mock_token.token = test_token_value
mock_token.last_used_at = datetime.now()
mock_token.created_at = datetime.now()
ApiTokenCache.set(test_token_value, test_scope, mock_token)
try:
def read_cache():
return ApiTokenCache.get(test_token_value, test_scope)
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
futures = [executor.submit(read_cache) for _ in range(100)]
results = [f.result() for f in concurrent.futures.as_completed(futures)]
elapsed = time.time() - start_time
assert len(results) == 100
assert all(r is not None for r in results)
assert elapsed < 1.0, f"Too slow: {elapsed}s for 100 cache reads"
finally:
ApiTokenCache.delete(test_token_value, test_scope)
redis_client.delete(ApiTokenCache._make_tenant_index_key(mock_token.tenant_id))
class TestRedisFailover:
"""Test behavior when Redis is unavailable."""
@patch("services.api_token_service.redis_client")
def test_graceful_degradation_when_redis_fails(self, mock_redis):
"""Test system degrades gracefully when Redis is unavailable."""
from redis import RedisError
mock_redis.get.side_effect = RedisError("Connection failed")
mock_redis.setex.side_effect = RedisError("Connection failed")
result_get = ApiTokenCache.get("test-token", "app")
assert result_get is None
result_set = ApiTokenCache.set("test-token", "app", None)
assert result_set is False

View File

@@ -10,10 +10,7 @@ from models import Tenant
from models.enums import CreatorUserRole
from models.model import App, UploadFile
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
from tasks.remove_app_and_related_data_task import (
_delete_draft_variables,
delete_draft_variables_batch,
)
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
@pytest.fixture
@@ -300,18 +297,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
data = setup_offload_test_data
app_id = data["app"].id
upload_file_ids = [uf.id for uf in data["upload_files"]]
variable_file_ids = [vf.id for vf in data["variable_files"]]
mock_storage.delete.return_value = None
with session_factory.create_session() as session:
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
var_files_before = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
.count()
)
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
var_files_before = session.query(WorkflowDraftVariableFile).count()
upload_files_before = session.query(UploadFile).count()
assert draft_vars_before == 3
assert var_files_before == 2
assert upload_files_before == 2
@@ -324,12 +315,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
assert draft_vars_after == 0
with session_factory.create_session() as session:
var_files_after = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
.count()
)
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
var_files_after = session.query(WorkflowDraftVariableFile).count()
upload_files_after = session.query(UploadFile).count()
assert var_files_after == 0
assert upload_files_after == 0
@@ -342,8 +329,6 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
data = setup_offload_test_data
app_id = data["app"].id
upload_file_ids = [uf.id for uf in data["upload_files"]]
variable_file_ids = [vf.id for vf in data["variable_files"]]
mock_storage.delete.side_effect = [Exception("Storage error"), None]
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
@@ -354,12 +339,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
assert draft_vars_after == 0
with session_factory.create_session() as session:
var_files_after = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
.count()
)
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
var_files_after = session.query(WorkflowDraftVariableFile).count()
upload_files_after = session.query(UploadFile).count()
assert var_files_after == 0
assert upload_files_after == 0
@@ -414,275 +395,3 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
if app2_obj:
session.delete(app2_obj)
session.commit()
class TestDeleteDraftVariablesSessionCommit:
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
@pytest.fixture
def setup_offload_test_data(self, app_and_tenant):
"""Create test data with offload files for session commit tests."""
from core.variables.types import SegmentType
from libs.datetime_utils import naive_utc_now
tenant, app = app_and_tenant
with session_factory.create_session() as session:
upload_file1 = UploadFile(
tenant_id=tenant.id,
storage_type="local",
key="test/file1.json",
name="file1.json",
size=1024,
extension="json",
mime_type="application/json",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid.uuid4()),
created_at=naive_utc_now(),
used=False,
)
upload_file2 = UploadFile(
tenant_id=tenant.id,
storage_type="local",
key="test/file2.json",
name="file2.json",
size=2048,
extension="json",
mime_type="application/json",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid.uuid4()),
created_at=naive_utc_now(),
used=False,
)
session.add(upload_file1)
session.add(upload_file2)
session.flush()
var_file1 = WorkflowDraftVariableFile(
tenant_id=tenant.id,
app_id=app.id,
user_id=str(uuid.uuid4()),
upload_file_id=upload_file1.id,
size=1024,
length=10,
value_type=SegmentType.STRING,
)
var_file2 = WorkflowDraftVariableFile(
tenant_id=tenant.id,
app_id=app.id,
user_id=str(uuid.uuid4()),
upload_file_id=upload_file2.id,
size=2048,
length=20,
value_type=SegmentType.OBJECT,
)
session.add(var_file1)
session.add(var_file2)
session.flush()
draft_var1 = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id="node_1",
name="large_var_1",
value=StringSegment(value="truncated..."),
node_execution_id=str(uuid.uuid4()),
file_id=var_file1.id,
)
draft_var2 = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id="node_2",
name="large_var_2",
value=StringSegment(value="truncated..."),
node_execution_id=str(uuid.uuid4()),
file_id=var_file2.id,
)
draft_var3 = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id="node_3",
name="regular_var",
value=StringSegment(value="regular_value"),
node_execution_id=str(uuid.uuid4()),
)
session.add(draft_var1)
session.add(draft_var2)
session.add(draft_var3)
session.commit()
data = {
"app": app,
"tenant": tenant,
"upload_files": [upload_file1, upload_file2],
"variable_files": [var_file1, var_file2],
"draft_variables": [draft_var1, draft_var2, draft_var3],
}
yield data
with session_factory.create_session() as session:
for table, ids in [
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
(UploadFile, [uf.id for uf in data["upload_files"]]),
]:
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
session.execute(cleanup_query)
session.commit()
@pytest.fixture
def setup_commit_test_data(self, app_and_tenant):
"""Create test data for session commit tests."""
tenant, app = app_and_tenant
variable_ids: list[str] = []
with session_factory.create_session() as session:
variables = []
for i in range(10):
var = WorkflowDraftVariable.new_node_variable(
app_id=app.id,
node_id=f"node_{i}",
name=f"var_{i}",
value=StringSegment(value="test_value"),
node_execution_id=str(uuid.uuid4()),
)
session.add(var)
variables.append(var)
session.commit()
variable_ids = [v.id for v in variables]
yield {
"app": app,
"tenant": tenant,
"variable_ids": variable_ids,
}
with session_factory.create_session() as session:
cleanup_query = (
delete(WorkflowDraftVariable)
.where(WorkflowDraftVariable.id.in_(variable_ids))
.execution_options(synchronize_session=False)
)
session.execute(cleanup_query)
session.commit()
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
"""Test that session.begin() is used for automatic transaction management."""
data = setup_commit_test_data
app_id = data["app"].id
# Since session.begin() is used, the transaction is automatically committed
# when the with block exits successfully. We verify this by checking that
# data is actually persisted.
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
# Verify all data was deleted (proves transaction was committed)
with session_factory.create_session() as session:
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert deleted_count == 10
assert remaining_count == 0
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
"""Test that data is actually persisted to database after batch deletion with commits."""
data = setup_commit_test_data
app_id = data["app"].id
variable_ids = data["variable_ids"]
# Verify initial state
with session_factory.create_session() as session:
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert initial_count == 10
# Perform deletion with small batch size to force multiple commits
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
assert deleted_count == 10
# Verify all data is deleted in a new session (proves commits worked)
with session_factory.create_session() as session:
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert final_count == 0
# Verify specific IDs are deleted
with session_factory.create_session() as session:
remaining_vars = (
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
)
assert remaining_vars == 0
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
"""Test session behavior when deleting from an empty dataset."""
nonexistent_app_id = str(uuid.uuid4())
# Should not raise any errors and should return 0
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
assert deleted_count == 0
def test_session_commit_with_single_batch(self, setup_commit_test_data):
"""Test that commit happens correctly when all data fits in a single batch."""
data = setup_commit_test_data
app_id = data["app"].id
with session_factory.create_session() as session:
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert initial_count == 10
# Delete all in a single batch
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
assert deleted_count == 10
# Verify data is persisted
with session_factory.create_session() as session:
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
assert final_count == 0
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
"""Test that invalid batch size raises ValueError."""
data = setup_commit_test_data
app_id = data["app"].id
with pytest.raises(ValueError, match="batch_size must be positive"):
delete_draft_variables_batch(app_id, batch_size=0)
with pytest.raises(ValueError, match="batch_size must be positive"):
delete_draft_variables_batch(app_id, batch_size=-1)
@patch("extensions.ext_storage.storage")
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
"""Test that session commits correctly when cleaning up offload data."""
data = setup_offload_test_data
app_id = data["app"].id
upload_file_ids = [uf.id for uf in data["upload_files"]]
mock_storage.delete.return_value = None
# Verify initial state
with session_factory.create_session() as session:
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
var_files_before = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
.count()
)
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
assert draft_vars_before == 3
assert var_files_before == 2
assert upload_files_before == 2
# Delete variables with offload data
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
assert deleted_count == 3
# Verify all data is persisted (deleted) in new session
with session_factory.create_session() as session:
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
var_files_after = (
session.query(WorkflowDraftVariableFile)
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
.count()
)
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
assert draft_vars_after == 0
assert var_files_after == 0
assert upload_files_after == 0
# Verify storage cleanup was called
assert mock_storage.delete.call_count == 2

View File

@@ -1016,7 +1016,7 @@ class TestAccountService:
def test_delete_account(self, db_session_with_containers, mock_external_service_dependencies):
"""
Test account deletion (should add task to queue and sync to enterprise).
Test account deletion (should add task to queue).
"""
fake = Faker()
email = fake.email()
@@ -1034,18 +1034,10 @@ class TestAccountService:
password=password,
)
with (
patch("services.account_service.delete_account_task") as mock_delete_task,
patch("services.enterprise.account_deletion_sync.sync_account_deletion") as mock_sync,
):
mock_sync.return_value = True
with patch("services.account_service.delete_account_task") as mock_delete_task:
# Delete account
AccountService.delete_account(account)
# Verify sync was called
mock_sync.assert_called_once_with(account_id=account.id, source="account_deleted")
# Verify task was added to queue
mock_delete_task.delay.assert_called_once_with(account.id)
@@ -1724,7 +1716,7 @@ class TestTenantService:
def test_remove_member_from_tenant_success(self, db_session_with_containers, mock_external_service_dependencies):
"""
Test successful member removal from tenant (should sync to enterprise).
Test successful member removal from tenant.
"""
fake = Faker()
tenant_name = fake.company()
@@ -1759,15 +1751,7 @@ class TestTenantService:
TenantService.create_tenant_member(tenant, member_account, role="normal")
# Remove member
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
# Verify sync was called
mock_sync.assert_called_once_with(
workspace_id=tenant.id, member_id=member_account.id, source="workspace_member_removed"
)
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
# Verify member was removed
from extensions.ext_database import db

View File

@@ -1,182 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from faker import Faker
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, Document, DocumentSegment
from tasks.document_indexing_update_task import document_indexing_update_task
class TestDocumentIndexingUpdateTask:
@pytest.fixture
def mock_external_dependencies(self):
"""Patch external collaborators used by the update task.
- IndexProcessorFactory.init_index_processor().clean(...)
- IndexingRunner.run([...])
"""
with (
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
):
processor_instance = MagicMock()
mock_factory.return_value.init_index_processor.return_value = processor_instance
runner_instance = MagicMock()
mock_runner.return_value = runner_instance
yield {
"factory": mock_factory,
"processor": processor_instance,
"runner": mock_runner,
"runner_instance": runner_instance,
}
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
fake = Faker()
# Account and tenant
account = Account(
email=fake.email(),
name=fake.name(),
interface_language="en-US",
status="active",
)
db_session_with_containers.add(account)
db_session_with_containers.commit()
tenant = Tenant(name=fake.company(), status="normal")
db_session_with_containers.add(tenant)
db_session_with_containers.commit()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
current=True,
)
db_session_with_containers.add(join)
db_session_with_containers.commit()
# Dataset and document
dataset = Dataset(
tenant_id=tenant.id,
name=fake.company(),
description=fake.text(max_nb_chars=64),
data_source_type="upload_file",
indexing_technique="high_quality",
created_by=account.id,
)
db_session_with_containers.add(dataset)
db_session_with_containers.commit()
document = Document(
tenant_id=tenant.id,
dataset_id=dataset.id,
position=0,
data_source_type="upload_file",
batch="test_batch",
name=fake.file_name(),
created_from="upload_file",
created_by=account.id,
indexing_status="waiting",
enabled=True,
doc_form="text_model",
)
db_session_with_containers.add(document)
db_session_with_containers.commit()
# Segments
node_ids = []
for i in range(segment_count):
node_id = f"node-{i + 1}"
seg = DocumentSegment(
tenant_id=tenant.id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=32),
answer=None,
word_count=10,
tokens=5,
index_node_id=node_id,
status="completed",
created_by=account.id,
)
db_session_with_containers.add(seg)
node_ids.append(node_id)
db_session_with_containers.commit()
# Refresh to ensure ORM state
db_session_with_containers.refresh(dataset)
db_session_with_containers.refresh(document)
return dataset, document, node_ids
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Act
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Assert document status updated before reindex
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
assert updated.indexing_status == "parsing"
assert updated.processing_started_at is not None
# Segments should be deleted
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining == 0
# Assert index processor clean was called with expected args
clean_call = mock_external_dependencies["processor"].clean.call_args
assert clean_call is not None
args, kwargs = clean_call
# args[0] is a Dataset instance (from another session) — validate by id
assert getattr(args[0], "id", None) == dataset.id
# args[1] should contain our node_ids
assert set(args[1]) == set(node_ids)
assert kwargs.get("with_keywords") is True
assert kwargs.get("delete_child_chunks") is True
# Assert indexing runner invoked with the updated document
run_call = mock_external_dependencies["runner_instance"].run.call_args
assert run_call is not None
run_docs = run_call[0][0]
assert len(run_docs) == 1
first = run_docs[0]
assert getattr(first, "id", None) == document.id
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Force clean to raise; task should continue to indexing
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Indexing should still be triggered
mock_external_dependencies["runner_instance"].run.assert_called_once()
# Segments should remain (since clean failed before DB delete)
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining > 0
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
fake = Faker()
# Act with non-existent document id
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
# Neither processor nor runner should be called
mock_external_dependencies["processor"].clean.assert_not_called()
mock_external_dependencies["runner_instance"].run.assert_not_called()

View File

@@ -217,6 +217,7 @@ class TestTemplateTransformNode:
@patch(
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
)
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
def test_run_output_length_exceeds_limit(
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
):
@@ -230,7 +231,6 @@ class TestTemplateTransformNode:
graph_init_params=graph_init_params,
graph=mock_graph,
graph_runtime_state=mock_graph_runtime_state,
max_output_length=10,
)
result = node._run()

View File

@@ -132,8 +132,6 @@ class TestCelerySSLConfiguration:
mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0
mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False
mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15
mock_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK = False
mock_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL = 30
with patch("extensions.ext_celery.dify_config", mock_config):
from dify_app import DifyApp

View File

@@ -1,250 +0,0 @@
"""
Unit tests for API Token Cache module.
"""
import json
from datetime import datetime
from unittest.mock import MagicMock, patch
from services.api_token_service import (
CACHE_KEY_PREFIX,
CACHE_NULL_TTL_SECONDS,
CACHE_TTL_SECONDS,
ApiTokenCache,
CachedApiToken,
)
class TestApiTokenCache:
"""Test cases for ApiTokenCache class."""
def setup_method(self):
"""Setup test fixtures."""
self.mock_token = MagicMock()
self.mock_token.id = "test-token-id-123"
self.mock_token.app_id = "test-app-id-456"
self.mock_token.tenant_id = "test-tenant-id-789"
self.mock_token.type = "app"
self.mock_token.token = "test-token-value-abc"
self.mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
self.mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
def test_make_cache_key(self):
"""Test cache key generation."""
# Test with scope
key = ApiTokenCache._make_cache_key("my-token", "app")
assert key == f"{CACHE_KEY_PREFIX}:app:my-token"
# Test without scope
key = ApiTokenCache._make_cache_key("my-token", None)
assert key == f"{CACHE_KEY_PREFIX}:any:my-token"
def test_serialize_token(self):
"""Test token serialization."""
serialized = ApiTokenCache._serialize_token(self.mock_token)
data = json.loads(serialized)
assert data["id"] == "test-token-id-123"
assert data["app_id"] == "test-app-id-456"
assert data["tenant_id"] == "test-tenant-id-789"
assert data["type"] == "app"
assert data["token"] == "test-token-value-abc"
assert data["last_used_at"] == "2026-02-03T10:00:00"
assert data["created_at"] == "2026-01-01T00:00:00"
def test_serialize_token_with_nulls(self):
"""Test token serialization with None values."""
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = None
mock_token.tenant_id = None
mock_token.type = "dataset"
mock_token.token = "test-token"
mock_token.last_used_at = None
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
serialized = ApiTokenCache._serialize_token(mock_token)
data = json.loads(serialized)
assert data["app_id"] is None
assert data["tenant_id"] is None
assert data["last_used_at"] is None
def test_deserialize_token(self):
"""Test token deserialization."""
cached_data = json.dumps(
{
"id": "test-id",
"app_id": "test-app",
"tenant_id": "test-tenant",
"type": "app",
"token": "test-token",
"last_used_at": "2026-02-03T10:00:00",
"created_at": "2026-01-01T00:00:00",
}
)
result = ApiTokenCache._deserialize_token(cached_data)
assert isinstance(result, CachedApiToken)
assert result.id == "test-id"
assert result.app_id == "test-app"
assert result.tenant_id == "test-tenant"
assert result.type == "app"
assert result.token == "test-token"
assert result.last_used_at == datetime(2026, 2, 3, 10, 0, 0)
assert result.created_at == datetime(2026, 1, 1, 0, 0, 0)
def test_deserialize_null_token(self):
"""Test deserialization of null token (cached miss)."""
result = ApiTokenCache._deserialize_token("null")
assert result is None
def test_deserialize_invalid_json(self):
"""Test deserialization with invalid JSON."""
result = ApiTokenCache._deserialize_token("invalid-json{")
assert result is None
@patch("services.api_token_service.redis_client")
def test_get_cache_hit(self, mock_redis):
"""Test cache hit scenario."""
cached_data = json.dumps(
{
"id": "test-id",
"app_id": "test-app",
"tenant_id": "test-tenant",
"type": "app",
"token": "test-token",
"last_used_at": "2026-02-03T10:00:00",
"created_at": "2026-01-01T00:00:00",
}
).encode("utf-8")
mock_redis.get.return_value = cached_data
result = ApiTokenCache.get("test-token", "app")
assert result is not None
assert isinstance(result, CachedApiToken)
assert result.app_id == "test-app"
mock_redis.get.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
@patch("services.api_token_service.redis_client")
def test_get_cache_miss(self, mock_redis):
"""Test cache miss scenario."""
mock_redis.get.return_value = None
result = ApiTokenCache.get("test-token", "app")
assert result is None
mock_redis.get.assert_called_once()
@patch("services.api_token_service.redis_client")
def test_set_valid_token(self, mock_redis):
"""Test setting a valid token in cache."""
result = ApiTokenCache.set("test-token", "app", self.mock_token)
assert result is True
mock_redis.setex.assert_called_once()
args = mock_redis.setex.call_args[0]
assert args[0] == f"{CACHE_KEY_PREFIX}:app:test-token"
assert args[1] == CACHE_TTL_SECONDS
@patch("services.api_token_service.redis_client")
def test_set_null_token(self, mock_redis):
"""Test setting a null token (cache penetration prevention)."""
result = ApiTokenCache.set("invalid-token", "app", None)
assert result is True
mock_redis.setex.assert_called_once()
args = mock_redis.setex.call_args[0]
assert args[0] == f"{CACHE_KEY_PREFIX}:app:invalid-token"
assert args[1] == CACHE_NULL_TTL_SECONDS
assert args[2] == b"null"
@patch("services.api_token_service.redis_client")
def test_delete_with_scope(self, mock_redis):
"""Test deleting token cache with specific scope."""
result = ApiTokenCache.delete("test-token", "app")
assert result is True
mock_redis.delete.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
@patch("services.api_token_service.redis_client")
def test_delete_without_scope(self, mock_redis):
"""Test deleting token cache without scope (delete all)."""
# Mock scan_iter to return an iterator of keys
mock_redis.scan_iter.return_value = iter(
[
b"api_token:app:test-token",
b"api_token:dataset:test-token",
]
)
result = ApiTokenCache.delete("test-token", None)
assert result is True
# Verify scan_iter was called with the correct pattern
mock_redis.scan_iter.assert_called_once()
call_args = mock_redis.scan_iter.call_args
assert call_args[1]["match"] == f"{CACHE_KEY_PREFIX}:*:test-token"
# Verify delete was called with all matched keys
mock_redis.delete.assert_called_once_with(
b"api_token:app:test-token",
b"api_token:dataset:test-token",
)
@patch("services.api_token_service.redis_client")
def test_redis_fallback_on_exception(self, mock_redis):
"""Test Redis fallback when Redis is unavailable."""
from redis import RedisError
mock_redis.get.side_effect = RedisError("Connection failed")
result = ApiTokenCache.get("test-token", "app")
# Should return None (fallback) instead of raising exception
assert result is None
class TestApiTokenCacheIntegration:
"""Integration test scenarios."""
@patch("services.api_token_service.redis_client")
def test_full_cache_lifecycle(self, mock_redis):
"""Test complete cache lifecycle: set -> get -> delete."""
# Setup mock token
mock_token = MagicMock()
mock_token.id = "id-123"
mock_token.app_id = "app-456"
mock_token.tenant_id = "tenant-789"
mock_token.type = "app"
mock_token.token = "token-abc"
mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
# 1. Set token in cache
ApiTokenCache.set("token-abc", "app", mock_token)
assert mock_redis.setex.called
# 2. Simulate cache hit
cached_data = ApiTokenCache._serialize_token(mock_token)
mock_redis.get.return_value = cached_data # bytes from model_dump_json().encode()
retrieved = ApiTokenCache.get("token-abc", "app")
assert retrieved is not None
assert isinstance(retrieved, CachedApiToken)
# 3. Delete from cache
ApiTokenCache.delete("token-abc", "app")
assert mock_redis.delete.called
@patch("services.api_token_service.redis_client")
def test_cache_penetration_prevention(self, mock_redis):
"""Test that non-existent tokens are cached as null."""
# Set null token (cache miss)
ApiTokenCache.set("non-existent-token", "app", None)
args = mock_redis.setex.call_args[0]
assert args[2] == b"null"
assert args[1] == CACHE_NULL_TTL_SECONDS # Shorter TTL for null values

View File

@@ -1,276 +0,0 @@
"""Unit tests for account deletion synchronization.
This test module verifies the enterprise account deletion sync functionality,
including Redis queuing, error handling, and community vs enterprise behavior.
"""
from unittest.mock import MagicMock, patch
import pytest
from redis import RedisError
from services.enterprise.account_deletion_sync import (
_queue_task,
sync_account_deletion,
sync_workspace_member_removal,
)
class TestQueueTask:
"""Unit tests for the _queue_task helper function."""
@pytest.fixture
def mock_redis_client(self):
"""Mock redis_client for testing."""
with patch("services.enterprise.account_deletion_sync.redis_client") as mock_redis:
yield mock_redis
@pytest.fixture
def mock_uuid(self):
"""Mock UUID generation for predictable task IDs."""
with patch("services.enterprise.account_deletion_sync.uuid.uuid4") as mock_uuid_gen:
mock_uuid_gen.return_value = MagicMock(hex="test-task-id-1234")
yield mock_uuid_gen
def test_queue_task_success(self, mock_redis_client, mock_uuid):
"""Test successful task queueing to Redis."""
# Arrange
workspace_id = "ws-123"
member_id = "member-456"
source = "test_source"
# Act
result = _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
# Assert
assert result is True
mock_redis_client.lpush.assert_called_once()
# Verify the task payload structure
call_args = mock_redis_client.lpush.call_args[0]
assert call_args[0] == "enterprise:member:sync:queue"
import json
task_data = json.loads(call_args[1])
assert task_data["workspace_id"] == workspace_id
assert task_data["member_id"] == member_id
assert task_data["source"] == source
assert task_data["type"] == "sync_member_deletion_from_workspace"
assert task_data["retry_count"] == 0
assert "task_id" in task_data
assert "created_at" in task_data
def test_queue_task_redis_error(self, mock_redis_client, caplog):
"""Test handling of Redis connection errors."""
# Arrange
mock_redis_client.lpush.side_effect = RedisError("Connection failed")
# Act
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is False
assert "Failed to queue account deletion sync" in caplog.text
def test_queue_task_type_error(self, mock_redis_client, caplog):
"""Test handling of JSON serialization errors."""
# Arrange
mock_redis_client.lpush.side_effect = TypeError("Cannot serialize")
# Act
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is False
assert "Failed to queue account deletion sync" in caplog.text
class TestSyncWorkspaceMemberRemoval:
"""Unit tests for sync_workspace_member_removal function."""
@pytest.fixture
def mock_queue_task(self):
"""Mock _queue_task for testing."""
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
mock_queue.return_value = True
yield mock_queue
def test_sync_workspace_member_removal_enterprise_enabled(self, mock_queue_task):
"""Test sync when ENTERPRISE_ENABLED is True."""
# Arrange
workspace_id = "ws-123"
member_id = "member-456"
source = "workspace_member_removed"
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_workspace_member_removal(workspace_id=workspace_id, member_id=member_id, source=source)
# Assert
assert result is True
mock_queue_task.assert_called_once_with(workspace_id=workspace_id, member_id=member_id, source=source)
def test_sync_workspace_member_removal_enterprise_disabled(self, mock_queue_task):
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
# Arrange
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = False
# Act
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is True
mock_queue_task.assert_not_called()
def test_sync_workspace_member_removal_queue_failure(self, mock_queue_task):
"""Test handling of queue task failures."""
# Arrange
mock_queue_task.return_value = False
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is False
class TestSyncAccountDeletion:
"""Unit tests for sync_account_deletion function."""
@pytest.fixture
def mock_db_session(self):
"""Mock database session for testing."""
with patch("services.enterprise.account_deletion_sync.db.session") as mock_session:
yield mock_session
@pytest.fixture
def mock_queue_task(self):
"""Mock _queue_task for testing."""
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
mock_queue.return_value = True
yield mock_queue
def test_sync_account_deletion_enterprise_disabled(self, mock_db_session, mock_queue_task):
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
# Arrange
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = False
# Act
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
# Assert
assert result is True
mock_db_session.query.assert_not_called()
mock_queue_task.assert_not_called()
def test_sync_account_deletion_multiple_workspaces(self, mock_db_session, mock_queue_task):
"""Test sync for account with multiple workspace memberships."""
# Arrange
account_id = "acc-123"
# Mock workspace joins
mock_join1 = MagicMock()
mock_join1.tenant_id = "tenant-1"
mock_join2 = MagicMock()
mock_join2.tenant_id = "tenant-2"
mock_join3 = MagicMock()
mock_join3.tenant_id = "tenant-3"
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
mock_db_session.query.return_value = mock_query
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id=account_id, source="account_deleted")
# Assert
assert result is True
assert mock_queue_task.call_count == 3
# Verify each workspace was queued
mock_queue_task.assert_any_call(workspace_id="tenant-1", member_id=account_id, source="account_deleted")
mock_queue_task.assert_any_call(workspace_id="tenant-2", member_id=account_id, source="account_deleted")
mock_queue_task.assert_any_call(workspace_id="tenant-3", member_id=account_id, source="account_deleted")
def test_sync_account_deletion_no_workspaces(self, mock_db_session, mock_queue_task):
"""Test sync for account with no workspace memberships."""
# Arrange
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = []
mock_db_session.query.return_value = mock_query
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
# Assert
assert result is True
mock_queue_task.assert_not_called()
def test_sync_account_deletion_partial_failure(self, mock_db_session, mock_queue_task):
"""Test sync when some tasks fail to queue."""
# Arrange
account_id = "acc-123"
# Mock workspace joins
mock_join1 = MagicMock()
mock_join1.tenant_id = "tenant-1"
mock_join2 = MagicMock()
mock_join2.tenant_id = "tenant-2"
mock_join3 = MagicMock()
mock_join3.tenant_id = "tenant-3"
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
mock_db_session.query.return_value = mock_query
# Mock queue_task to fail for second workspace
def queue_side_effect(workspace_id, member_id, source):
return workspace_id != "tenant-2"
mock_queue_task.side_effect = queue_side_effect
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id=account_id, source="account_deleted")
# Assert
assert result is False # Should return False if any task fails
assert mock_queue_task.call_count == 3
def test_sync_account_deletion_all_failures(self, mock_db_session, mock_queue_task):
"""Test sync when all tasks fail to queue."""
# Arrange
mock_join = MagicMock()
mock_join.tenant_id = "tenant-1"
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = [mock_join]
mock_db_session.query.return_value = mock_query
mock_queue_task.return_value = False
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
# Assert
assert result is False
mock_queue_task.assert_called_once()

View File

@@ -114,21 +114,6 @@ def mock_db_session():
session = MagicMock()
# Ensure tests can observe session.close() via context manager teardown
session.close = MagicMock()
session.commit = MagicMock()
# Mock session.begin() context manager to auto-commit on exit
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def _begin_exit_side_effect(*args, **kwargs):
# session.begin().__exit__() should commit if no exception
if args[0] is None: # No exception
session.commit()
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session

View File

@@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
mock_query.where.return_value = mock_delete_query
mock_db.session.query.return_value = mock_query
delete_func(mock_db.session, "log-1")
delete_func("log-1")
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
mock_query.where.assert_called_once()

2
api/uv.lock generated
View File

@@ -1368,7 +1368,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.12.1"
version = "1.12.0"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },

View File

@@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@@ -63,7 +63,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@@ -102,7 +102,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@@ -132,7 +132,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.1
image: langgenius/dify-web:1.12.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@@ -707,7 +707,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@@ -749,7 +749,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@@ -788,7 +788,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.12.0
restart: always
environment:
# Use the shared environment variables.
@@ -818,7 +818,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.1
image: langgenius/dify-web:1.12.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@@ -109,7 +109,6 @@ const AgentTools: FC = () => {
tool_parameters: paramsWithDefaultValue,
notAuthor: !tool.is_team_authorization,
enabled: true,
type: tool.provider_type as CollectionType,
}
}
const handleSelectTool = (tool: ToolDefaultValue) => {

View File

@@ -1,4 +1,3 @@
import type { App } from '@/types/app'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { useRouter } from 'next/navigation'
import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'
@@ -14,8 +13,8 @@ import { getRedirection } from '@/utils/app-redirection'
import CreateAppModal from './index'
vi.mock('ahooks', () => ({
useDebounceFn: <T extends (...args: unknown[]) => unknown>(fn: T) => {
const run = (...args: Parameters<T>) => fn(...args)
useDebounceFn: (fn: (...args: any[]) => any) => {
const run = (...args: any[]) => fn(...args)
const cancel = vi.fn()
const flush = vi.fn()
return { run, cancel, flush }
@@ -84,7 +83,7 @@ describe('CreateAppModal', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUseRouter.mockReturnValue({ push: mockPush } as unknown as ReturnType<typeof useRouter>)
mockUseRouter.mockReturnValue({ push: mockPush } as any)
mockUseProviderContext.mockReturnValue({
plan: {
type: AppModeEnum.ADVANCED_CHAT,
@@ -93,10 +92,10 @@ describe('CreateAppModal', () => {
reset: {},
},
enableBilling: true,
} as unknown as ReturnType<typeof useProviderContext>)
} as any)
mockUseAppContext.mockReturnValue({
isCurrentWorkspaceEditor: true,
} as unknown as ReturnType<typeof useAppContext>)
} as any)
mockSetItem.mockClear()
Object.defineProperty(window, 'localStorage', {
value: {
@@ -119,8 +118,8 @@ describe('CreateAppModal', () => {
})
it('creates an app, notifies success, and fires callbacks', async () => {
const mockApp: Partial<App> = { id: 'app-1', mode: AppModeEnum.ADVANCED_CHAT }
mockCreateApp.mockResolvedValue(mockApp as App)
const mockApp = { id: 'app-1', mode: AppModeEnum.ADVANCED_CHAT }
mockCreateApp.mockResolvedValue(mockApp as any)
const { onClose, onSuccess } = renderModal()
const nameInput = screen.getByPlaceholderText('app.newApp.appNamePlaceholder')

View File

@@ -1,3 +1,4 @@
/* eslint-disable tailwindcss/classnames-order */
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Effect from '.'
@@ -28,8 +29,8 @@ type Story = StoryObj<typeof meta>
export const Playground: Story = {
render: () => (
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
<Effect className="left-8 top-6" />
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
<Effect className="top-6 left-8" />
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
Accent glow
</div>

View File

@@ -4,7 +4,7 @@ import type { FC } from 'react'
import { RiQuestionLine } from '@remixicon/react'
import { useBoolean } from 'ahooks'
import * as React from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useEffect, useRef, useState } from 'react'
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
import { cn } from '@/utils/classnames'
import { tooltipManager } from './TooltipManager'
@@ -61,20 +61,6 @@ const Tooltip: FC<TooltipProps> = ({
isHoverTriggerRef.current = isHoverTrigger
}, [isHoverTrigger])
const closeTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const clearCloseTimeout = useCallback(() => {
if (closeTimeoutRef.current) {
clearTimeout(closeTimeoutRef.current)
closeTimeoutRef.current = null
}
}, [])
useEffect(() => {
return () => {
clearCloseTimeout()
}
}, [clearCloseTimeout])
const close = () => setOpen(false)
const handleLeave = (isTrigger: boolean) => {
@@ -85,9 +71,7 @@ const Tooltip: FC<TooltipProps> = ({
// give time to move to the popup
if (needsDelay) {
clearCloseTimeout()
closeTimeoutRef.current = setTimeout(() => {
closeTimeoutRef.current = null
setTimeout(() => {
if (!isHoverPopupRef.current && !isHoverTriggerRef.current) {
setOpen(false)
tooltipManager.clear(close)
@@ -95,7 +79,6 @@ const Tooltip: FC<TooltipProps> = ({
}, 300)
}
else {
clearCloseTimeout()
setOpen(false)
tooltipManager.clear(close)
}
@@ -112,7 +95,6 @@ const Tooltip: FC<TooltipProps> = ({
onClick={() => triggerMethod === 'click' && setOpen(v => !v)}
onMouseEnter={() => {
if (triggerMethod === 'hover') {
clearCloseTimeout()
setHoverTrigger()
tooltipManager.register(close)
setOpen(true)
@@ -133,12 +115,7 @@ const Tooltip: FC<TooltipProps> = ({
!noDecoration && 'system-xs-regular relative max-w-[300px] break-words rounded-md bg-components-panel-bg px-3 py-2 text-left text-text-tertiary shadow-lg',
popupClassName,
)}
onMouseEnter={() => {
if (triggerMethod === 'hover') {
clearCloseTimeout()
setHoverPopup()
}
}}
onMouseEnter={() => triggerMethod === 'hover' && setHoverPopup()}
onMouseLeave={() => triggerMethod === 'hover' && handleLeave(false)}
>
{popupContent}

View File

@@ -216,22 +216,13 @@ describe('image-uploader utils', () => {
type FileCallback = (file: MockFile) => void
type EntriesCallback = (entries: FileSystemEntry[]) => void
// Helper to create mock FileSystemEntry with required properties
const createMockEntry = (props: {
isFile: boolean
isDirectory: boolean
name?: string
file?: (callback: FileCallback) => void
createReader?: () => { readEntries: (callback: EntriesCallback) => void }
}): FileSystemEntry => props as unknown as FileSystemEntry
it('should resolve with file array for file entry', async () => {
const mockFile: MockFile = { name: 'test.png' }
const mockEntry = createMockEntry({
const mockEntry = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile),
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toHaveLength(1)
@@ -241,11 +232,11 @@ describe('image-uploader utils', () => {
it('should resolve with file array with prefix for nested file', async () => {
const mockFile: MockFile = { name: 'test.png' }
const mockEntry = createMockEntry({
const mockEntry = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile),
})
}
const result = await traverseFileEntry(mockEntry, 'folder/')
expect(result).toHaveLength(1)
@@ -253,24 +244,24 @@ describe('image-uploader utils', () => {
})
it('should resolve empty array for unknown entry type', async () => {
const mockEntry = createMockEntry({
const mockEntry = {
isFile: false,
isDirectory: false,
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toEqual([])
})
it('should handle directory with no files', async () => {
const mockEntry = createMockEntry({
const mockEntry = {
isFile: false,
isDirectory: true,
name: 'empty-folder',
createReader: () => ({
readEntries: (callback: EntriesCallback) => callback([]),
}),
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toEqual([])
@@ -280,20 +271,20 @@ describe('image-uploader utils', () => {
const mockFile1: MockFile = { name: 'file1.png' }
const mockFile2: MockFile = { name: 'file2.png' }
const mockFileEntry1 = createMockEntry({
const mockFileEntry1 = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile1),
})
}
const mockFileEntry2 = createMockEntry({
const mockFileEntry2 = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile2),
})
}
let readCount = 0
const mockEntry = createMockEntry({
const mockEntry = {
isFile: false,
isDirectory: true,
name: 'folder',
@@ -301,14 +292,14 @@ describe('image-uploader utils', () => {
readEntries: (callback: EntriesCallback) => {
if (readCount === 0) {
readCount++
callback([mockFileEntry1, mockFileEntry2])
callback([mockFileEntry1, mockFileEntry2] as unknown as FileSystemEntry[])
}
else {
callback([])
}
},
}),
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toHaveLength(2)

View File

@@ -18,17 +18,17 @@ type FileWithPath = {
relativePath?: string
} & File
export const traverseFileEntry = (entry: FileSystemEntry, prefix = ''): Promise<FileWithPath[]> => {
export const traverseFileEntry = (entry: any, prefix = ''): Promise<FileWithPath[]> => {
return new Promise((resolve) => {
if (entry.isFile) {
(entry as FileSystemFileEntry).file((file: FileWithPath) => {
entry.file((file: FileWithPath) => {
file.relativePath = `${prefix}${file.name}`
resolve([file])
})
}
else if (entry.isDirectory) {
const reader = (entry as FileSystemDirectoryEntry).createReader()
const entries: FileSystemEntry[] = []
const reader = entry.createReader()
const entries: any[] = []
const read = () => {
reader.readEntries(async (results: FileSystemEntry[]) => {
if (!results.length) {

View File

@@ -1,218 +0,0 @@
'use client'
import { useDebounceFn } from 'ahooks'
import { useRouter } from 'next/navigation'
import { useCallback, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import { useImportPipelineDSL, useImportPipelineDSLConfirm } from '@/service/use-pipeline'
export enum CreateFromDSLModalTab {
FROM_FILE = 'from-file',
FROM_URL = 'from-url',
}
export type UseDSLImportOptions = {
activeTab?: CreateFromDSLModalTab
dslUrl?: string
onSuccess?: () => void
onClose?: () => void
}
export type DSLVersions = {
importedVersion: string
systemVersion: string
}
export const useDSLImport = ({
activeTab = CreateFromDSLModalTab.FROM_FILE,
dslUrl = '',
onSuccess,
onClose,
}: UseDSLImportOptions) => {
const { push } = useRouter()
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [currentTab, setCurrentTab] = useState(activeTab)
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
const [showConfirmModal, setShowConfirmModal] = useState(false)
const [versions, setVersions] = useState<DSLVersions>()
const [importId, setImportId] = useState<string>()
const [isConfirming, setIsConfirming] = useState(false)
const { handleCheckPluginDependencies } = usePluginDependencies()
const isCreatingRef = useRef(false)
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const readFile = useCallback((file: File) => {
const reader = new FileReader()
reader.onload = (event) => {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}, [])
const handleFile = useCallback((file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}, [readFile])
const onCreate = useCallback(async () => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE && !currentFile)
return
if (currentTab === CreateFromDSLModalTab.FROM_URL && !dslUrlValue)
return
if (isCreatingRef.current)
return
isCreatingRef.current = true
let response
if (currentTab === CreateFromDSLModalTab.FROM_FILE) {
response = await importDSL({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: fileContent || '',
})
}
if (currentTab === CreateFromDSLModalTab.FROM_URL) {
response = await importDSL({
mode: DSLImportMode.YAML_URL,
yaml_url: dslUrlValue || '',
})
}
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
return
}
const { id, status, pipeline_id, dataset_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
onSuccess?.()
onClose?.()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'creation.successTip' : 'creation.caution', { ns: 'datasetPipeline' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('newApp.appCreateDSLWarning', { ns: 'app' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`/datasets/${dataset_id}/pipeline`)
isCreatingRef.current = false
}
else if (status === DSLImportStatus.PENDING) {
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
onClose?.()
setTimeout(() => {
setShowConfirmModal(true)
}, 300)
setImportId(id)
isCreatingRef.current = false
}
else {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
}
}, [
currentTab,
currentFile,
dslUrlValue,
fileContent,
importDSL,
notify,
t,
onSuccess,
onClose,
handleCheckPluginDependencies,
push,
])
const { run: handleCreateApp } = useDebounceFn(onCreate, { wait: 300 })
const onDSLConfirm = useCallback(async () => {
if (!importId)
return
setIsConfirming(true)
const response = await importDSLConfirm(importId)
setIsConfirming(false)
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
return
}
const { status, pipeline_id, dataset_id } = response
if (status === DSLImportStatus.COMPLETED) {
onSuccess?.()
setShowConfirmModal(false)
notify({
type: 'success',
message: t('creation.successTip', { ns: 'datasetPipeline' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`/datasets/${dataset_id}/pipeline`)
}
else if (status === DSLImportStatus.FAILED) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
}
}, [importId, importDSLConfirm, notify, t, onSuccess, handleCheckPluginDependencies, push])
const handleCancelConfirm = useCallback(() => {
setShowConfirmModal(false)
}, [])
const buttonDisabled = useMemo(() => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE)
return !currentFile
if (currentTab === CreateFromDSLModalTab.FROM_URL)
return !dslUrlValue
return false
}, [currentTab, currentFile, dslUrlValue])
return {
// State
currentFile,
currentTab,
dslUrlValue,
showConfirmModal,
versions,
buttonDisabled,
isConfirming,
// Actions
setCurrentTab,
setDslUrlValue,
handleFile,
handleCreateApp,
onDSLConfirm,
handleCancelConfirm,
}
}

View File

@@ -1,18 +1,24 @@
'use client'
import { useKeyPress } from 'ahooks'
import { useDebounceFn, useKeyPress } from 'ahooks'
import { noop } from 'es-toolkit/function'
import { useRouter } from 'next/navigation'
import { useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Button from '@/app/components/base/button'
import Input from '@/app/components/base/input'
import Modal from '@/app/components/base/modal'
import DSLConfirmModal from './dsl-confirm-modal'
import { ToastContext } from '@/app/components/base/toast'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import { useImportPipelineDSL, useImportPipelineDSLConfirm } from '@/service/use-pipeline'
import Header from './header'
import { CreateFromDSLModalTab, useDSLImport } from './hooks/use-dsl-import'
import Tab from './tab'
import Uploader from './uploader'
export { CreateFromDSLModalTab }
type CreateFromDSLModalProps = {
show: boolean
onSuccess?: () => void
@@ -21,6 +27,11 @@ type CreateFromDSLModalProps = {
dslUrl?: string
}
export enum CreateFromDSLModalTab {
FROM_FILE = 'from-file',
FROM_URL = 'from-url',
}
const CreateFromDSLModal = ({
show,
onSuccess,
@@ -28,34 +39,150 @@ const CreateFromDSLModal = ({
activeTab = CreateFromDSLModalTab.FROM_FILE,
dslUrl = '',
}: CreateFromDSLModalProps) => {
const { push } = useRouter()
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [currentTab, setCurrentTab] = useState(activeTab)
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
const [showErrorModal, setShowErrorModal] = useState(false)
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
const [importId, setImportId] = useState<string>()
const { handleCheckPluginDependencies } = usePluginDependencies()
const {
currentFile,
currentTab,
dslUrlValue,
showConfirmModal,
versions,
buttonDisabled,
isConfirming,
setCurrentTab,
setDslUrlValue,
handleFile,
handleCreateApp,
onDSLConfirm,
handleCancelConfirm,
} = useDSLImport({
activeTab,
dslUrl,
onSuccess,
onClose,
})
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = function (event) {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const isCreatingRef = useRef(false)
const { mutateAsync: importDSL } = useImportPipelineDSL()
const onCreate = async () => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE && !currentFile)
return
if (currentTab === CreateFromDSLModalTab.FROM_URL && !dslUrlValue)
return
if (isCreatingRef.current)
return
isCreatingRef.current = true
let response
if (currentTab === CreateFromDSLModalTab.FROM_FILE) {
response = await importDSL({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: fileContent || '',
})
}
if (currentTab === CreateFromDSLModalTab.FROM_URL) {
response = await importDSL({
mode: DSLImportMode.YAML_URL,
yaml_url: dslUrlValue || '',
})
}
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
return
}
const { id, status, pipeline_id, dataset_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
if (onSuccess)
onSuccess()
if (onClose)
onClose()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'creation.successTip' : 'creation.caution', { ns: 'datasetPipeline' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('newApp.appCreateDSLWarning', { ns: 'app' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`/datasets/${dataset_id}/pipeline`)
isCreatingRef.current = false
}
else if (status === DSLImportStatus.PENDING) {
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
if (onClose)
onClose()
setTimeout(() => {
setShowErrorModal(true)
}, 300)
setImportId(id)
isCreatingRef.current = false
}
else {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
}
}
const { run: handleCreateApp } = useDebounceFn(onCreate, { wait: 300 })
useKeyPress('esc', () => {
if (show && !showConfirmModal)
if (show && !showErrorModal)
onClose()
})
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const onDSLConfirm = async () => {
if (!importId)
return
const response = await importDSLConfirm(importId)
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
return
}
const { status, pipeline_id, dataset_id } = response
if (status === DSLImportStatus.COMPLETED) {
if (onSuccess)
onSuccess()
if (onClose)
onClose()
notify({
type: 'success',
message: t('creation.successTip', { ns: 'datasetPipeline' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`datasets/${dataset_id}/pipeline`)
}
else if (status === DSLImportStatus.FAILED) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
}
}
const buttonDisabled = useMemo(() => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE)
return !currentFile
if (currentTab === CreateFromDSLModalTab.FROM_URL)
return !dslUrlValue
return false
}, [currentTab, currentFile, dslUrlValue])
return (
<>
<Modal
@@ -69,25 +196,29 @@ const CreateFromDSLModal = ({
setCurrentTab={setCurrentTab}
/>
<div className="px-6 py-4">
{currentTab === CreateFromDSLModalTab.FROM_FILE && (
<Uploader
className="mt-0"
file={currentFile}
updateFile={handleFile}
/>
)}
{currentTab === CreateFromDSLModalTab.FROM_URL && (
<div>
<div className="system-md-semibold leading6 mb-1 text-text-secondary">
DSL URL
</div>
<Input
placeholder={t('importFromDSLUrlPlaceholder', { ns: 'app' }) || ''}
value={dslUrlValue}
onChange={e => setDslUrlValue(e.target.value)}
{
currentTab === CreateFromDSLModalTab.FROM_FILE && (
<Uploader
className="mt-0"
file={currentFile}
updateFile={handleFile}
/>
</div>
)}
)
}
{
currentTab === CreateFromDSLModalTab.FROM_URL && (
<div>
<div className="system-md-semibold leading6 mb-1 text-text-secondary">
DSL URL
</div>
<Input
placeholder={t('importFromDSLUrlPlaceholder', { ns: 'app' }) || ''}
value={dslUrlValue}
onChange={e => setDslUrlValue(e.target.value)}
/>
</div>
)
}
</div>
<div className="flex justify-end gap-x-2 p-6 pt-5">
<Button onClick={onClose}>
@@ -103,14 +234,32 @@ const CreateFromDSLModal = ({
</Button>
</div>
</Modal>
{showConfirmModal && (
<DSLConfirmModal
versions={versions}
onCancel={handleCancelConfirm}
onConfirm={onDSLConfirm}
confirmDisabled={isConfirming}
/>
)}
<Modal
isShow={showErrorModal}
onClose={() => setShowErrorModal(false)}
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
</>
)
}

View File

@@ -1,334 +0,0 @@
import type { FileListItemProps } from './file-list-item'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_COMPLETE, PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
import FileListItem from './file-list-item'
// Mock theme hook - can be changed per test
let mockTheme = 'light'
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: mockTheme }),
}))
// Mock theme types
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock SimplePieChart with dynamic import handling
vi.mock('next/dynamic', () => ({
default: () => {
const DynamicComponent = ({ percentage, stroke, fill }: { percentage: number, stroke: string, fill: string }) => (
<div data-testid="pie-chart" data-percentage={percentage} data-stroke={stroke} data-fill={fill}>
Pie Chart:
{' '}
{percentage}
%
</div>
)
DynamicComponent.displayName = 'SimplePieChart'
return DynamicComponent
},
}))
// Mock DocumentFileIcon
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ name, extension, size }: { name: string, extension: string, size: string }) => (
<div data-testid="document-icon" data-name={name} data-extension={extension} data-size={size}>
Document Icon
</div>
),
}))
describe('FileListItem', () => {
const createMockFile = (overrides: Partial<File> = {}): File => ({
name: 'test-document.pdf',
size: 1024 * 100, // 100KB
type: 'application/pdf',
lastModified: Date.now(),
...overrides,
} as File)
const createMockFileItem = (overrides: Partial<FileItem> = {}): FileItem => ({
fileID: 'file-123',
file: createMockFile(overrides.file as Partial<File>),
progress: PROGRESS_NOT_STARTED,
...overrides,
})
const defaultProps: FileListItemProps = {
fileItem: createMockFileItem(),
onPreview: vi.fn(),
onRemove: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
mockTheme = 'light'
})
describe('rendering', () => {
it('should render the file item container', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('flex', 'h-12', 'items-center', 'rounded-lg')
})
it('should render document icon with correct props', () => {
render(<FileListItem {...defaultProps} />)
const icon = screen.getByTestId('document-icon')
expect(icon).toBeInTheDocument()
expect(icon).toHaveAttribute('data-name', 'test-document.pdf')
expect(icon).toHaveAttribute('data-extension', 'pdf')
expect(icon).toHaveAttribute('data-size', 'xl')
})
it('should render file name', () => {
render(<FileListItem {...defaultProps} />)
expect(screen.getByText('test-document.pdf')).toBeInTheDocument()
})
it('should render file extension in uppercase via CSS class', () => {
render(<FileListItem {...defaultProps} />)
const extensionSpan = screen.getByText('pdf')
expect(extensionSpan).toBeInTheDocument()
expect(extensionSpan).toHaveClass('uppercase')
})
it('should render file size', () => {
render(<FileListItem {...defaultProps} />)
// Default mock file is 100KB (1024 * 100 bytes)
expect(screen.getByText('100.00 KB')).toBeInTheDocument()
})
it('should render delete button', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const deleteButton = container.querySelector('.cursor-pointer')
expect(deleteButton).toBeInTheDocument()
})
})
describe('progress states', () => {
it('should show progress chart when uploading (0-99)', () => {
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toBeInTheDocument()
expect(pieChart).toHaveAttribute('data-percentage', '50')
})
it('should show progress chart at 0%', () => {
const fileItem = createMockFileItem({ progress: 0 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-percentage', '0')
})
it('should not show progress chart when complete (100)', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_COMPLETE })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
it('should not show progress chart when not started (-1)', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_NOT_STARTED })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('error state', () => {
it('should show error indicator when progress is PROGRESS_ERROR', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_ERROR })
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const errorIndicator = container.querySelector('.text-text-destructive')
expect(errorIndicator).toBeInTheDocument()
})
it('should not show error indicator when not in error state', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const errorIndicator = container.querySelector('.text-text-destructive')
expect(errorIndicator).not.toBeInTheDocument()
})
})
describe('theme handling', () => {
it('should use correct chart color for light theme', () => {
mockTheme = 'light'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#296dff')
expect(pieChart).toHaveAttribute('data-fill', '#296dff')
})
it('should use correct chart color for dark theme', () => {
mockTheme = 'dark'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#5289ff')
expect(pieChart).toHaveAttribute('data-fill', '#5289ff')
})
})
describe('event handlers', () => {
it('should call onPreview when item is clicked with file id', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem({
file: createMockFile({ id: 'uploaded-id' } as Partial<File>),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} />)
const item = screen.getByText('test-document.pdf').closest('[class*="flex h-12"]')!
fireEvent.click(item)
expect(onPreview).toHaveBeenCalledTimes(1)
expect(onPreview).toHaveBeenCalledWith(fileItem.file)
})
it('should not call onPreview when file has no id', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem()
render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} />)
const item = screen.getByText('test-document.pdf').closest('[class*="flex h-12"]')!
fireEvent.click(item)
expect(onPreview).not.toHaveBeenCalled()
})
it('should call onRemove when delete button is clicked', () => {
const onRemove = vi.fn()
const fileItem = createMockFileItem()
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onRemove).toHaveBeenCalledWith('file-123')
})
it('should stop propagation when delete button is clicked', () => {
const onPreview = vi.fn()
const onRemove = vi.fn()
const fileItem = createMockFileItem({
file: createMockFile({ id: 'uploaded-id' } as Partial<File>),
})
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onPreview).not.toHaveBeenCalled()
})
})
describe('file type handling', () => {
it('should handle files with multiple dots in name', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'my.document.file.docx' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('my.document.file.docx')).toBeInTheDocument()
expect(screen.getByText('docx')).toBeInTheDocument()
})
it('should handle files without extension', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'README' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
// File name appears once, and extension area shows empty string
expect(screen.getByText('README')).toBeInTheDocument()
})
it('should handle various file extensions', () => {
const extensions = ['txt', 'md', 'json', 'csv', 'xlsx']
extensions.forEach((ext) => {
const fileItem = createMockFileItem({
file: createMockFile({ name: `file.${ext}` }),
})
const { unmount } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText(ext)).toBeInTheDocument()
unmount()
})
})
})
describe('file size display', () => {
it('should display size in KB for small files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 KB')).toBeInTheDocument()
})
it('should display size in MB for larger files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 * 1024 }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 MB')).toBeInTheDocument()
})
})
describe('upload progress values', () => {
it('should show chart at progress 1', () => {
const fileItem = createMockFileItem({ progress: 1 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toBeInTheDocument()
})
it('should show chart at progress 99', () => {
const fileItem = createMockFileItem({ progress: 99 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toHaveAttribute('data-percentage', '99')
})
it('should not show chart at progress 100', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('styling', () => {
it('should have proper shadow styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('shadow-xs')
})
it('should have proper border styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('border', 'border-components-panel-border')
})
it('should truncate long file names', () => {
const longFileName = 'this-is-a-very-long-file-name-that-should-be-truncated.pdf'
const fileItem = createMockFileItem({
file: createMockFile({ name: longFileName }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const nameElement = screen.getByText(longFileName)
expect(nameElement).toHaveClass('truncate')
})
})
})

View File

@@ -1,89 +0,0 @@
'use client'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiErrorWarningFill } from '@remixicon/react'
import dynamic from 'next/dynamic'
import { useMemo } from 'react'
import DocumentFileIcon from '@/app/components/datasets/common/document-file-icon'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import { formatFileSize, getFileExtension } from '@/utils/format'
import { PROGRESS_COMPLETE, PROGRESS_ERROR } from '../constants'
const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false })
export type FileListItemProps = {
fileItem: FileItem
onPreview: (file: File) => void
onRemove: (fileID: string) => void
}
const FileListItem = ({
fileItem,
onPreview,
onRemove,
}: FileListItemProps) => {
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
const isUploading = fileItem.progress >= 0 && fileItem.progress < PROGRESS_COMPLETE
const isError = fileItem.progress === PROGRESS_ERROR
const handleClick = () => {
if (fileItem.file?.id)
onPreview(fileItem.file)
}
const handleRemove = (e: React.MouseEvent) => {
e.stopPropagation()
onRemove(fileItem.fileID)
}
return (
<div
onClick={handleClick}
className="flex h-12 max-w-[640px] items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg text-xs leading-3 text-text-tertiary shadow-xs"
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="xl"
className="shrink-0"
name={fileItem.file.name}
extension={getFileExtension(fileItem.file.name)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-sm leading-4 text-text-secondary">
{fileItem.file.name}
</div>
</div>
<div className="w-full truncate leading-3 text-text-tertiary">
<span className="uppercase">{getFileExtension(fileItem.file.name)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{formatFileSize(fileItem.file.size)}</span>
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{isUploading && (
<SimplePieChart
percentage={fileItem.progress}
stroke={chartColor}
fill={chartColor}
animationDuration={0}
/>
)}
{isError && (
<RiErrorWarningFill className="size-4 text-text-destructive" />
)}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={handleRemove}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
)
}
export default FileListItem

View File

@@ -1,210 +0,0 @@
import type { RefObject } from 'react'
import type { UploadDropzoneProps } from './upload-dropzone'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import UploadDropzone from './upload-dropzone'
// Helper to create mock ref objects for testing
const createMockRef = <T,>(value: T | null = null): RefObject<T | null> => ({ current: value })
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string, options?: Record<string, unknown>) => {
const translations: Record<string, string> = {
'stepOne.uploader.button': 'Drag and drop files, or',
'stepOne.uploader.buttonSingleFile': 'Drag and drop file, or',
'stepOne.uploader.browse': 'Browse',
'stepOne.uploader.tip': 'Supports {{supportTypes}}, Max {{size}}MB each, up to {{batchCount}} files at a time, {{totalCount}} files total',
}
let result = translations[key] || key
if (options && typeof options === 'object') {
Object.entries(options).forEach(([k, v]) => {
result = result.replace(`{{${k}}}`, String(v))
})
}
return result
},
}),
}))
describe('UploadDropzone', () => {
const defaultProps: UploadDropzoneProps = {
dropRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
dragRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
fileUploaderRef: createMockRef<HTMLInputElement>() as RefObject<HTMLInputElement | null>,
dragging: false,
supportBatchUpload: true,
supportTypesShowNames: 'PDF, DOCX, TXT',
fileUploadConfig: {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
},
acceptTypes: ['.pdf', '.docx', '.txt'],
onSelectFile: vi.fn(),
onFileChange: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the dropzone container', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
})
it('should render hidden file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toBeInTheDocument()
expect(input).toHaveClass('hidden')
expect(input).toHaveAttribute('type', 'file')
})
it('should render upload icon', () => {
render(<UploadDropzone {...defaultProps} />)
const icon = document.querySelector('svg')
expect(icon).toBeInTheDocument()
})
it('should render browse label when extensions are allowed', () => {
render(<UploadDropzone {...defaultProps} />)
expect(screen.getByText('Browse')).toBeInTheDocument()
})
it('should not render browse label when no extensions allowed', () => {
render(<UploadDropzone {...defaultProps} acceptTypes={[]} />)
expect(screen.queryByText('Browse')).not.toBeInTheDocument()
})
it('should render file size and count limits', () => {
render(<UploadDropzone {...defaultProps} />)
const tipText = screen.getByText(/Supports.*Max.*15MB/i)
expect(tipText).toBeInTheDocument()
})
})
describe('file input configuration', () => {
it('should allow multiple files when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('multiple')
})
it('should not allow multiple files when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).not.toHaveAttribute('multiple')
})
it('should set accept attribute with correct types', () => {
render(<UploadDropzone {...defaultProps} acceptTypes={['.pdf', '.docx']} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('accept', '.pdf,.docx')
})
})
describe('text content', () => {
it('should show batch upload text when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should show single file text when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
expect(screen.getByText(/Drag and drop file/i)).toBeInTheDocument()
})
})
describe('dragging state', () => {
it('should apply dragging styles when dragging is true', () => {
const { container } = render(<UploadDropzone {...defaultProps} dragging={true} />)
const dropzone = container.querySelector('[class*="border-components-dropzone-border-accent"]')
expect(dropzone).toBeInTheDocument()
})
it('should render drag overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).toBeInTheDocument()
})
it('should not render drag overlay when not dragging', () => {
render(<UploadDropzone {...defaultProps} dragging={false} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).not.toBeInTheDocument()
})
})
describe('event handlers', () => {
it('should call onSelectFile when browse label is clicked', () => {
const onSelectFile = vi.fn()
render(<UploadDropzone {...defaultProps} onSelectFile={onSelectFile} />)
const browseLabel = screen.getByText('Browse')
fireEvent.click(browseLabel)
expect(onSelectFile).toHaveBeenCalledTimes(1)
})
it('should call onFileChange when files are selected', () => {
const onFileChange = vi.fn()
render(<UploadDropzone {...defaultProps} onFileChange={onFileChange} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
const file = new File(['content'], 'test.pdf', { type: 'application/pdf' })
fireEvent.change(input, { target: { files: [file] } })
expect(onFileChange).toHaveBeenCalledTimes(1)
})
})
describe('refs', () => {
it('should attach dropRef to drop container', () => {
const dropRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dropRef={dropRef as RefObject<HTMLDivElement | null>} />)
expect(dropRef.current).toBeInstanceOf(HTMLDivElement)
})
it('should attach fileUploaderRef to input element', () => {
const fileUploaderRef = createMockRef<HTMLInputElement>()
render(<UploadDropzone {...defaultProps} fileUploaderRef={fileUploaderRef as RefObject<HTMLInputElement | null>} />)
expect(fileUploaderRef.current).toBeInstanceOf(HTMLInputElement)
})
it('should attach dragRef to overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
expect(dragRef.current).toBeInstanceOf(HTMLDivElement)
})
})
describe('styling', () => {
it('should have base dropzone styling', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
expect(dropzone).toHaveClass('rounded-xl')
})
it('should have cursor-pointer on browse label', () => {
render(<UploadDropzone {...defaultProps} />)
const browseLabel = screen.getByText('Browse')
expect(browseLabel).toHaveClass('cursor-pointer')
})
})
describe('accessibility', () => {
it('should have an accessible file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('id', 'fileUploader')
})
})
})

View File

@@ -1,84 +0,0 @@
'use client'
import type { RefObject } from 'react'
import type { FileUploadConfig } from '../hooks/use-file-upload'
import { RiUploadCloud2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { cn } from '@/utils/classnames'
export type UploadDropzoneProps = {
dropRef: RefObject<HTMLDivElement | null>
dragRef: RefObject<HTMLDivElement | null>
fileUploaderRef: RefObject<HTMLInputElement | null>
dragging: boolean
supportBatchUpload: boolean
supportTypesShowNames: string
fileUploadConfig: FileUploadConfig
acceptTypes: string[]
onSelectFile: () => void
onFileChange: (e: React.ChangeEvent<HTMLInputElement>) => void
}
const UploadDropzone = ({
dropRef,
dragRef,
fileUploaderRef,
dragging,
supportBatchUpload,
supportTypesShowNames,
fileUploadConfig,
acceptTypes,
onSelectFile,
onFileChange,
}: UploadDropzoneProps) => {
const { t } = useTranslation()
return (
<>
<input
ref={fileUploaderRef}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={acceptTypes.join(',')}
onChange={onFileChange}
/>
<div
ref={dropRef}
className={cn(
'relative mb-2 box-border flex min-h-20 max-w-[640px] flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary',
dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent',
)}
>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload
? t('stepOne.uploader.button', { ns: 'datasetCreation' })
: t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{acceptTypes.length > 0 && (
<label
className="ml-1 cursor-pointer text-text-accent"
onClick={onSelectFile}
>
{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}
</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
</>
)
}
export default UploadDropzone

View File

@@ -1,3 +0,0 @@
export const PROGRESS_NOT_STARTED = -1
export const PROGRESS_ERROR = -2
export const PROGRESS_COMPLETE = 100

View File

@@ -1,921 +0,0 @@
import type { ReactNode } from 'react'
import type { CustomFile, FileItem } from '@/models/datasets'
import { act, render, renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ToastContext } from '@/app/components/base/toast'
import { PROGRESS_COMPLETE, PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
// Import after mocks
import { useFileUpload } from './use-file-upload'
// Mock notify function
const mockNotify = vi.fn()
const mockClose = vi.fn()
// Mock ToastContext
vi.mock('use-context-selector', async () => {
const actual = await vi.importActual<typeof import('use-context-selector')>('use-context-selector')
return {
...actual,
useContext: vi.fn(() => ({ notify: mockNotify, close: mockClose })),
}
})
// Mock upload service
const mockUpload = vi.fn()
vi.mock('@/service/base', () => ({
upload: (...args: unknown[]) => mockUpload(...args),
}))
// Mock file upload config
const mockFileUploadConfig = {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
}
const mockSupportTypes = {
allowed_extensions: ['pdf', 'docx', 'txt', 'md'],
}
vi.mock('@/service/use-common', () => ({
useFileUploadConfig: () => ({ data: mockFileUploadConfig }),
useFileSupportTypes: () => ({ data: mockSupportTypes }),
}))
// Mock i18n
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => key,
}),
}))
// Mock locale
vi.mock('@/context/i18n', () => ({
useLocale: () => 'en-US',
}))
vi.mock('@/i18n-config/language', () => ({
LanguagesSupported: ['en-US', 'zh-Hans'],
}))
// Mock config
vi.mock('@/config', () => ({
IS_CE_EDITION: false,
}))
// Mock file upload error message
vi.mock('@/app/components/base/file-uploader/utils', () => ({
getFileUploadErrorMessage: (_e: unknown, defaultMsg: string) => defaultMsg,
}))
const createWrapper = () => {
return ({ children }: { children: ReactNode }) => (
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
{children}
</ToastContext.Provider>
)
}
describe('useFileUpload', () => {
const defaultOptions = {
fileList: [] as FileItem[],
prepareFileList: vi.fn(),
onFileUpdate: vi.fn(),
onFileListUpdate: vi.fn(),
onPreview: vi.fn(),
supportBatchUpload: true,
}
beforeEach(() => {
vi.clearAllMocks()
mockUpload.mockReset()
// Default mock to return a resolved promise to avoid unhandled rejections
mockUpload.mockResolvedValue({ id: 'default-id' })
mockNotify.mockReset()
})
describe('initialization', () => {
it('should initialize with default values', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(result.current.dragging).toBe(false)
expect(result.current.hideUpload).toBe(false)
expect(result.current.dropRef.current).toBeNull()
expect(result.current.dragRef.current).toBeNull()
expect(result.current.fileUploaderRef.current).toBeNull()
})
it('should set hideUpload true when not batch upload and has files', () => {
const { result } = renderHook(
() => useFileUpload({
...defaultOptions,
supportBatchUpload: false,
fileList: [{ fileID: 'file-1', file: {} as CustomFile, progress: 100 }],
}),
{ wrapper: createWrapper() },
)
expect(result.current.hideUpload).toBe(true)
})
it('should compute acceptTypes correctly', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(result.current.acceptTypes).toEqual(['.pdf', '.docx', '.txt', '.md'])
})
it('should compute supportTypesShowNames correctly', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(result.current.supportTypesShowNames).toContain('PDF')
expect(result.current.supportTypesShowNames).toContain('DOCX')
expect(result.current.supportTypesShowNames).toContain('TXT')
// 'md' is mapped to 'markdown' in the extensionMap
expect(result.current.supportTypesShowNames).toContain('MARKDOWN')
})
it('should set batch limit to 1 when not batch upload', () => {
const { result } = renderHook(
() => useFileUpload({
...defaultOptions,
supportBatchUpload: false,
}),
{ wrapper: createWrapper() },
)
expect(result.current.fileUploadConfig.batch_count_limit).toBe(1)
expect(result.current.fileUploadConfig.file_upload_limit).toBe(1)
})
})
describe('selectHandle', () => {
it('should trigger click on file input', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
const mockClick = vi.fn()
const mockInput = { click: mockClick } as unknown as HTMLInputElement
Object.defineProperty(result.current.fileUploaderRef, 'current', {
value: mockInput,
writable: true,
})
act(() => {
result.current.selectHandle()
})
expect(mockClick).toHaveBeenCalled()
})
it('should do nothing when file input ref is null', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(() => {
act(() => {
result.current.selectHandle()
})
}).not.toThrow()
})
})
describe('handlePreview', () => {
it('should call onPreview when file has id', () => {
const onPreview = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onPreview }),
{ wrapper: createWrapper() },
)
const mockFile = { id: 'file-123', name: 'test.pdf', size: 1024 } as CustomFile
act(() => {
result.current.handlePreview(mockFile)
})
expect(onPreview).toHaveBeenCalledWith(mockFile)
})
it('should not call onPreview when file has no id', () => {
const onPreview = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onPreview }),
{ wrapper: createWrapper() },
)
const mockFile = { name: 'test.pdf', size: 1024 } as CustomFile
act(() => {
result.current.handlePreview(mockFile)
})
expect(onPreview).not.toHaveBeenCalled()
})
})
describe('removeFile', () => {
it('should call onFileListUpdate with filtered list', () => {
const onFileListUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileListUpdate }),
{ wrapper: createWrapper() },
)
act(() => {
result.current.removeFile('file-to-remove')
})
expect(onFileListUpdate).toHaveBeenCalled()
})
it('should clear file input value', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
const mockInput = { value: 'some-file' } as HTMLInputElement
Object.defineProperty(result.current.fileUploaderRef, 'current', {
value: mockInput,
writable: true,
})
act(() => {
result.current.removeFile('file-123')
})
expect(mockInput.value).toBe('')
})
})
describe('fileChangeHandle', () => {
it('should handle valid files', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should limit files to batch count', () => {
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const files = Array.from({ length: 10 }, (_, i) =>
new File(['content'], `file${i}.pdf`, { type: 'application/pdf' }))
const event = {
target: { files },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
// Should be called with at most batch_count_limit files
if (prepareFileList.mock.calls.length > 0) {
const calledFiles = prepareFileList.mock.calls[0][0]
expect(calledFiles.length).toBeLessThanOrEqual(mockFileUploadConfig.batch_count_limit)
}
})
it('should reject invalid file types', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.exe', { type: 'application/x-msdownload' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
it('should reject files exceeding size limit', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
// Create a file larger than the limit (15MB)
const largeFile = new File([new ArrayBuffer(20 * 1024 * 1024)], 'large.pdf', { type: 'application/pdf' })
const event = {
target: { files: [largeFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
it('should handle null files', () => {
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const event = {
target: { files: null },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(prepareFileList).not.toHaveBeenCalled()
})
})
describe('drag and drop handlers', () => {
const TestDropzone = ({ options }: { options: typeof defaultOptions }) => {
const {
dropRef,
dragRef,
dragging,
} = useFileUpload(options)
return (
<div>
<div ref={dropRef} data-testid="dropzone">
{dragging && <div ref={dragRef} data-testid="drag-overlay" />}
</div>
<span data-testid="dragging">{String(dragging)}</span>
</div>
)
}
it('should set dragging true on dragenter', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={defaultOptions} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragEnterEvent)
})
expect(getByTestId('dragging').textContent).toBe('true')
})
it('should handle dragover event', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={defaultOptions} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragOverEvent)
})
expect(dropzone).toBeInTheDocument()
})
it('should set dragging false on dragleave from drag overlay', async () => {
const { getByTestId, queryByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={defaultOptions} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragEnterEvent)
})
expect(getByTestId('dragging').textContent).toBe('true')
const dragOverlay = queryByTestId('drag-overlay')
if (dragOverlay) {
await act(async () => {
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
Object.defineProperty(dragLeaveEvent, 'target', { value: dragOverlay })
dropzone.dispatchEvent(dragLeaveEvent)
})
}
})
it('should handle drop with files', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => mockFile,
webkitGetAsEntry: () => null,
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should handle drop without dataTransfer', async () => {
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', { value: null })
dropzone.dispatchEvent(dropEvent)
})
expect(prepareFileList).not.toHaveBeenCalled()
})
it('should limit to single file on drop when supportBatchUpload is false', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, supportBatchUpload: false, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
const files = [
new File(['content1'], 'test1.pdf', { type: 'application/pdf' }),
new File(['content2'], 'test2.pdf', { type: 'application/pdf' }),
]
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: files.map(f => ({
getAsFile: () => f,
webkitGetAsEntry: () => null,
})),
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
if (prepareFileList.mock.calls.length > 0) {
const calledFiles = prepareFileList.mock.calls[0][0]
expect(calledFiles.length).toBe(1)
}
})
})
it('should handle drop with FileSystemFileEntry', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => mockFile,
webkitGetAsEntry: () => ({
isFile: true,
isDirectory: false,
file: (callback: (file: File) => void) => callback(mockFile),
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should handle drop with FileSystemDirectoryEntry', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const mockFile = new File(['content'], 'nested.pdf', { type: 'application/pdf' })
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
let callCount = 0
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => null,
webkitGetAsEntry: () => ({
isFile: false,
isDirectory: true,
name: 'folder',
createReader: () => ({
readEntries: (callback: (entries: Array<{ isFile: boolean, isDirectory: boolean, name?: string, file?: (cb: (f: File) => void) => void }>) => void) => {
// First call returns file entry, second call returns empty (signals end)
if (callCount === 0) {
callCount++
callback([{
isFile: true,
isDirectory: false,
name: 'nested.pdf',
file: (cb: (f: File) => void) => cb(mockFile),
}])
}
else {
callback([])
}
},
}),
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should handle drop with empty directory', async () => {
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => null,
webkitGetAsEntry: () => ({
isFile: false,
isDirectory: true,
name: 'empty-folder',
createReader: () => ({
readEntries: (callback: (entries: never[]) => void) => {
callback([])
},
}),
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
// Should not prepare file list if no valid files
await new Promise(resolve => setTimeout(resolve, 100))
})
it('should handle entry that is neither file nor directory', async () => {
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => null,
webkitGetAsEntry: () => ({
isFile: false,
isDirectory: false,
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
// Should not throw and should handle gracefully
await new Promise(resolve => setTimeout(resolve, 100))
})
})
describe('file upload', () => {
it('should call upload with correct parameters', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id', name: 'test.pdf' })
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
})
it('should update progress during upload', async () => {
let progressCallback: ((e: ProgressEvent) => void) | undefined
mockUpload.mockImplementation(async (options: { onprogress: (e: ProgressEvent) => void }) => {
progressCallback = options.onprogress
return { id: 'uploaded-id' }
})
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
if (progressCallback) {
act(() => {
progressCallback!({
lengthComputable: true,
loaded: 50,
total: 100,
} as ProgressEvent)
})
expect(onFileUpdate).toHaveBeenCalled()
}
})
it('should handle upload error', async () => {
mockUpload.mockRejectedValue(new Error('Upload failed'))
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
})
it('should update file with PROGRESS_COMPLETE on success', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id', name: 'test.pdf' })
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
const completeCalls = onFileUpdate.mock.calls.filter(
([, progress]) => progress === PROGRESS_COMPLETE,
)
expect(completeCalls.length).toBeGreaterThan(0)
})
})
it('should update file with PROGRESS_ERROR on failure', async () => {
mockUpload.mockRejectedValue(new Error('Upload failed'))
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
const errorCalls = onFileUpdate.mock.calls.filter(
([, progress]) => progress === PROGRESS_ERROR,
)
expect(errorCalls.length).toBeGreaterThan(0)
})
})
})
describe('file count validation', () => {
it('should reject when total files exceed limit', () => {
const existingFiles: FileItem[] = Array.from({ length: 8 }, (_, i) => ({
fileID: `existing-${i}`,
file: { name: `existing-${i}.pdf`, size: 1024 } as CustomFile,
progress: 100,
}))
const { result } = renderHook(
() => useFileUpload({
...defaultOptions,
fileList: existingFiles,
}),
{ wrapper: createWrapper() },
)
const files = Array.from({ length: 5 }, (_, i) =>
new File(['content'], `new-${i}.pdf`, { type: 'application/pdf' }))
const event = {
target: { files },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
})
describe('progress constants', () => {
it('should use PROGRESS_NOT_STARTED for new files', async () => {
mockUpload.mockResolvedValue({ id: 'file-id' })
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
if (prepareFileList.mock.calls.length > 0) {
const files = prepareFileList.mock.calls[0][0]
expect(files[0].progress).toBe(PROGRESS_NOT_STARTED)
}
})
})
})
})

View File

@@ -1,351 +0,0 @@
'use client'
import type { RefObject } from 'react'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils'
import { ToastContext } from '@/app/components/base/toast'
import { IS_CE_EDITION } from '@/config'
import { useLocale } from '@/context/i18n'
import { LanguagesSupported } from '@/i18n-config/language'
import { upload } from '@/service/base'
import { useFileSupportTypes, useFileUploadConfig } from '@/service/use-common'
import { getFileExtension } from '@/utils/format'
import { PROGRESS_COMPLETE, PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
export type FileUploadConfig = {
file_size_limit: number
batch_count_limit: number
file_upload_limit: number
}
export type UseFileUploadOptions = {
fileList: FileItem[]
prepareFileList: (files: FileItem[]) => void
onFileUpdate: (fileItem: FileItem, progress: number, list: FileItem[]) => void
onFileListUpdate?: (files: FileItem[]) => void
onPreview: (file: File) => void
supportBatchUpload?: boolean
/**
* Optional list of allowed file extensions. If not provided, fetches from API.
* Pass this when you need custom extension filtering instead of using the global config.
*/
allowedExtensions?: string[]
}
export type UseFileUploadReturn = {
// Refs
dropRef: RefObject<HTMLDivElement | null>
dragRef: RefObject<HTMLDivElement | null>
fileUploaderRef: RefObject<HTMLInputElement | null>
// State
dragging: boolean
// Config
fileUploadConfig: FileUploadConfig
acceptTypes: string[]
supportTypesShowNames: string
hideUpload: boolean
// Handlers
selectHandle: () => void
fileChangeHandle: (e: React.ChangeEvent<HTMLInputElement>) => void
removeFile: (fileID: string) => void
handlePreview: (file: File) => void
}
type FileWithPath = {
relativePath?: string
} & File
export const useFileUpload = ({
fileList,
prepareFileList,
onFileUpdate,
onFileListUpdate,
onPreview,
supportBatchUpload = false,
allowedExtensions,
}: UseFileUploadOptions): UseFileUploadReturn => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const locale = useLocale()
const [dragging, setDragging] = useState(false)
const dropRef = useRef<HTMLDivElement>(null)
const dragRef = useRef<HTMLDivElement>(null)
const fileUploaderRef = useRef<HTMLInputElement>(null)
const fileListRef = useRef<FileItem[]>([])
const hideUpload = !supportBatchUpload && fileList.length > 0
const { data: fileUploadConfigResponse } = useFileUploadConfig()
const { data: supportFileTypesResponse } = useFileSupportTypes()
// Use provided allowedExtensions or fetch from API
const supportTypes = useMemo(
() => allowedExtensions ?? supportFileTypesResponse?.allowed_extensions ?? [],
[allowedExtensions, supportFileTypesResponse?.allowed_extensions],
)
const supportTypesShowNames = useMemo(() => {
const extensionMap: { [key: string]: string } = {
md: 'markdown',
pptx: 'pptx',
htm: 'html',
xlsx: 'xlsx',
docx: 'docx',
}
return [...supportTypes]
.map(item => extensionMap[item] || item)
.map(item => item.toLowerCase())
.filter((item, index, self) => self.indexOf(item) === index)
.map(item => item.toUpperCase())
.join(locale !== LanguagesSupported[1] ? ', ' : '、 ')
}, [supportTypes, locale])
const acceptTypes = useMemo(() => supportTypes.map((ext: string) => `.${ext}`), [supportTypes])
const fileUploadConfig = useMemo(() => ({
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
}), [fileUploadConfigResponse, supportBatchUpload])
const isValid = useCallback((file: File) => {
const { size } = file
const ext = `.${getFileExtension(file.name)}`
const isValidType = acceptTypes.includes(ext.toLowerCase())
if (!isValidType)
notify({ type: 'error', message: t('stepOne.uploader.validation.typeError', { ns: 'datasetCreation' }) })
const isValidSize = size <= fileUploadConfig.file_size_limit * 1024 * 1024
if (!isValidSize)
notify({ type: 'error', message: t('stepOne.uploader.validation.size', { ns: 'datasetCreation', size: fileUploadConfig.file_size_limit }) })
return isValidType && isValidSize
}, [fileUploadConfig, notify, t, acceptTypes])
const fileUpload = useCallback(async (fileItem: FileItem): Promise<FileItem> => {
const formData = new FormData()
formData.append('file', fileItem.file)
const onProgress = (e: ProgressEvent) => {
if (e.lengthComputable) {
const percent = Math.floor(e.loaded / e.total * 100)
onFileUpdate(fileItem, percent, fileListRef.current)
}
}
return upload({
xhr: new XMLHttpRequest(),
data: formData,
onprogress: onProgress,
}, false, undefined, '?source=datasets')
.then((res) => {
const completeFile = {
fileID: fileItem.fileID,
file: res as unknown as File,
progress: PROGRESS_NOT_STARTED,
}
const index = fileListRef.current.findIndex(item => item.fileID === fileItem.fileID)
fileListRef.current[index] = completeFile
onFileUpdate(completeFile, PROGRESS_COMPLETE, fileListRef.current)
return Promise.resolve({ ...completeFile })
})
.catch((e) => {
const errorMessage = getFileUploadErrorMessage(e, t('stepOne.uploader.failed', { ns: 'datasetCreation' }), t)
notify({ type: 'error', message: errorMessage })
onFileUpdate(fileItem, PROGRESS_ERROR, fileListRef.current)
return Promise.resolve({ ...fileItem })
})
.finally()
}, [notify, onFileUpdate, t])
const uploadBatchFiles = useCallback((bFiles: FileItem[]) => {
bFiles.forEach(bf => (bf.progress = 0))
return Promise.all(bFiles.map(fileUpload))
}, [fileUpload])
const uploadMultipleFiles = useCallback(async (files: FileItem[]) => {
const batchCountLimit = fileUploadConfig.batch_count_limit
const length = files.length
let start = 0
let end = 0
while (start < length) {
if (start + batchCountLimit > length)
end = length
else
end = start + batchCountLimit
const bFiles = files.slice(start, end)
await uploadBatchFiles(bFiles)
start = end
}
}, [fileUploadConfig, uploadBatchFiles])
const initialUpload = useCallback((files: File[]) => {
const filesCountLimit = fileUploadConfig.file_upload_limit
if (!files.length)
return false
if (files.length + fileList.length > filesCountLimit && !IS_CE_EDITION) {
notify({ type: 'error', message: t('stepOne.uploader.validation.filesNumber', { ns: 'datasetCreation', filesNumber: filesCountLimit }) })
return false
}
const preparedFiles = files.map((file, index) => ({
fileID: `file${index}-${Date.now()}`,
file,
progress: PROGRESS_NOT_STARTED,
}))
const newFiles = [...fileListRef.current, ...preparedFiles]
prepareFileList(newFiles)
fileListRef.current = newFiles
uploadMultipleFiles(preparedFiles)
}, [prepareFileList, uploadMultipleFiles, notify, t, fileList, fileUploadConfig])
const traverseFileEntry = useCallback(
(entry: FileSystemEntry, prefix = ''): Promise<FileWithPath[]> => {
return new Promise((resolve) => {
if (entry.isFile) {
(entry as FileSystemFileEntry).file((file: FileWithPath) => {
file.relativePath = `${prefix}${file.name}`
resolve([file])
})
}
else if (entry.isDirectory) {
const reader = (entry as FileSystemDirectoryEntry).createReader()
const entries: FileSystemEntry[] = []
const read = () => {
reader.readEntries(async (results: FileSystemEntry[]) => {
if (!results.length) {
const files = await Promise.all(
entries.map(ent =>
traverseFileEntry(ent, `${prefix}${entry.name}/`),
),
)
resolve(files.flat())
}
else {
entries.push(...results)
read()
}
})
}
read()
}
else {
resolve([])
}
})
},
[],
)
const handleDragEnter = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target !== dragRef.current)
setDragging(true)
}, [])
const handleDragOver = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}, [])
const handleDragLeave = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target === dragRef.current)
setDragging(false)
}, [])
const handleDrop = useCallback(
async (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDragging(false)
if (!e.dataTransfer)
return
const nested = await Promise.all(
Array.from(e.dataTransfer.items).map((it) => {
const entry = (it as DataTransferItem & { webkitGetAsEntry?: () => FileSystemEntry | null }).webkitGetAsEntry?.()
if (entry)
return traverseFileEntry(entry)
const f = it.getAsFile?.()
return f ? Promise.resolve([f as FileWithPath]) : Promise.resolve([])
}),
)
let files = nested.flat()
if (!supportBatchUpload)
files = files.slice(0, 1)
files = files.slice(0, fileUploadConfig.batch_count_limit)
const valid = files.filter(isValid)
initialUpload(valid)
},
[initialUpload, isValid, supportBatchUpload, traverseFileEntry, fileUploadConfig],
)
const selectHandle = useCallback(() => {
if (fileUploaderRef.current)
fileUploaderRef.current.click()
}, [])
const removeFile = useCallback((fileID: string) => {
if (fileUploaderRef.current)
fileUploaderRef.current.value = ''
fileListRef.current = fileListRef.current.filter(item => item.fileID !== fileID)
onFileListUpdate?.([...fileListRef.current])
}, [onFileListUpdate])
const fileChangeHandle = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
let files = Array.from(e.target.files ?? []) as File[]
files = files.slice(0, fileUploadConfig.batch_count_limit)
initialUpload(files.filter(isValid))
}, [isValid, initialUpload, fileUploadConfig])
const handlePreview = useCallback((file: File) => {
if (file?.id)
onPreview(file)
}, [onPreview])
useEffect(() => {
const dropArea = dropRef.current
dropArea?.addEventListener('dragenter', handleDragEnter)
dropArea?.addEventListener('dragover', handleDragOver)
dropArea?.addEventListener('dragleave', handleDragLeave)
dropArea?.addEventListener('drop', handleDrop)
return () => {
dropArea?.removeEventListener('dragenter', handleDragEnter)
dropArea?.removeEventListener('dragover', handleDragOver)
dropArea?.removeEventListener('dragleave', handleDragLeave)
dropArea?.removeEventListener('drop', handleDrop)
}
}, [handleDragEnter, handleDragOver, handleDragLeave, handleDrop])
return {
// Refs
dropRef,
dragRef,
fileUploaderRef,
// State
dragging,
// Config
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
// Handlers
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
}
}

View File

@@ -1,278 +0,0 @@
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_NOT_STARTED } from './constants'
import FileUploader from './index'
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => {
const translations: Record<string, string> = {
'stepOne.uploader.title': 'Upload Files',
'stepOne.uploader.button': 'Drag and drop files, or',
'stepOne.uploader.buttonSingleFile': 'Drag and drop file, or',
'stepOne.uploader.browse': 'Browse',
'stepOne.uploader.tip': 'Supports various file types',
}
return translations[key] || key
},
}),
}))
// Mock ToastContext
const mockNotify = vi.fn()
vi.mock('use-context-selector', async () => {
const actual = await vi.importActual<typeof import('use-context-selector')>('use-context-selector')
return {
...actual,
useContext: vi.fn(() => ({ notify: mockNotify })),
}
})
// Mock services
vi.mock('@/service/base', () => ({
upload: vi.fn().mockResolvedValue({ id: 'uploaded-id' }),
}))
vi.mock('@/service/use-common', () => ({
useFileUploadConfig: () => ({
data: { file_size_limit: 15, batch_count_limit: 5, file_upload_limit: 10 },
}),
useFileSupportTypes: () => ({
data: { allowed_extensions: ['pdf', 'docx', 'txt'] },
}),
}))
vi.mock('@/context/i18n', () => ({
useLocale: () => 'en-US',
}))
vi.mock('@/i18n-config/language', () => ({
LanguagesSupported: ['en-US', 'zh-Hans'],
}))
vi.mock('@/config', () => ({
IS_CE_EDITION: false,
}))
vi.mock('@/app/components/base/file-uploader/utils', () => ({
getFileUploadErrorMessage: () => 'Upload error',
}))
// Mock theme
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: 'light' }),
}))
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock DocumentFileIcon - uses relative path from file-list-item.tsx
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ extension }: { extension: string }) => <div data-testid="document-icon">{extension}</div>,
}))
// Mock SimplePieChart
vi.mock('next/dynamic', () => ({
default: () => {
const Component = ({ percentage }: { percentage: number }) => (
<div data-testid="pie-chart">
{percentage}
%
</div>
)
return Component
},
}))
describe('FileUploader', () => {
const createMockFile = (overrides: Partial<File> = {}): File => ({
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
...overrides,
} as File)
const createMockFileItem = (overrides: Partial<FileItem> = {}): FileItem => ({
fileID: `file-${Date.now()}`,
file: createMockFile(overrides.file as Partial<File>),
progress: PROGRESS_NOT_STARTED,
...overrides,
})
const defaultProps = {
fileList: [] as FileItem[],
prepareFileList: vi.fn(),
onFileUpdate: vi.fn(),
onFileListUpdate: vi.fn(),
onPreview: vi.fn(),
supportBatchUpload: true,
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the component', () => {
render(<FileUploader {...defaultProps} />)
expect(screen.getByText('Upload Files')).toBeInTheDocument()
})
it('should render dropzone when no files', () => {
render(<FileUploader {...defaultProps} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should render browse button', () => {
render(<FileUploader {...defaultProps} />)
expect(screen.getByText('Browse')).toBeInTheDocument()
})
it('should apply custom title className', () => {
render(<FileUploader {...defaultProps} titleClassName="custom-class" />)
const title = screen.getByText('Upload Files')
expect(title).toHaveClass('custom-class')
})
})
describe('file list rendering', () => {
it('should render file items when fileList has items', () => {
const fileList = [
createMockFileItem({ file: createMockFile({ name: 'file1.pdf' }) }),
createMockFileItem({ file: createMockFile({ name: 'file2.pdf' }) }),
]
render(<FileUploader {...defaultProps} fileList={fileList} />)
expect(screen.getByText('file1.pdf')).toBeInTheDocument()
expect(screen.getByText('file2.pdf')).toBeInTheDocument()
})
it('should render document icons for files', () => {
const fileList = [createMockFileItem()]
render(<FileUploader {...defaultProps} fileList={fileList} />)
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
})
describe('batch upload mode', () => {
it('should show dropzone with batch upload enabled', () => {
render(<FileUploader {...defaultProps} supportBatchUpload={true} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should show single file text when batch upload disabled', () => {
render(<FileUploader {...defaultProps} supportBatchUpload={false} />)
expect(screen.getByText(/Drag and drop file/i)).toBeInTheDocument()
})
it('should hide dropzone when not batch upload and has files', () => {
const fileList = [createMockFileItem()]
render(<FileUploader {...defaultProps} supportBatchUpload={false} fileList={fileList} />)
expect(screen.queryByText(/Drag and drop/i)).not.toBeInTheDocument()
})
})
describe('event handlers', () => {
it('should handle file preview click', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem({
file: createMockFile({ id: 'file-id' } as Partial<File>),
})
const { container } = render(<FileUploader {...defaultProps} fileList={[fileItem]} onPreview={onPreview} />)
// Find the file list item container by its class pattern
const fileElement = container.querySelector('[class*="flex h-12"]')
if (fileElement)
fireEvent.click(fileElement)
expect(onPreview).toHaveBeenCalledWith(fileItem.file)
})
it('should handle file remove click', () => {
const onFileListUpdate = vi.fn()
const fileItem = createMockFileItem()
const { container } = render(
<FileUploader {...defaultProps} fileList={[fileItem]} onFileListUpdate={onFileListUpdate} />,
)
// Find the delete button (the span with cursor-pointer containing the icon)
const deleteButtons = container.querySelectorAll('[class*="cursor-pointer"]')
// Get the last one which should be the delete button (not the browse label)
const deleteButton = deleteButtons[deleteButtons.length - 1]
if (deleteButton)
fireEvent.click(deleteButton)
expect(onFileListUpdate).toHaveBeenCalled()
})
it('should handle browse button click', () => {
render(<FileUploader {...defaultProps} />)
// The browse label should trigger file input click
const browseLabel = screen.getByText('Browse')
expect(browseLabel).toHaveClass('cursor-pointer')
})
})
describe('upload progress', () => {
it('should show progress chart for uploading files', () => {
const fileItem = createMockFileItem({ progress: 50 })
render(<FileUploader {...defaultProps} fileList={[fileItem]} />)
expect(screen.getByTestId('pie-chart')).toBeInTheDocument()
expect(screen.getByText('50%')).toBeInTheDocument()
})
it('should not show progress chart for completed files', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileUploader {...defaultProps} fileList={[fileItem]} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
it('should not show progress chart for not started files', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_NOT_STARTED })
render(<FileUploader {...defaultProps} fileList={[fileItem]} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('multiple files', () => {
it('should render all files in the list', () => {
const fileList = [
createMockFileItem({ fileID: 'f1', file: createMockFile({ name: 'doc1.pdf' }) }),
createMockFileItem({ fileID: 'f2', file: createMockFile({ name: 'doc2.docx' }) }),
createMockFileItem({ fileID: 'f3', file: createMockFile({ name: 'doc3.txt' }) }),
]
render(<FileUploader {...defaultProps} fileList={fileList} />)
expect(screen.getByText('doc1.pdf')).toBeInTheDocument()
expect(screen.getByText('doc2.docx')).toBeInTheDocument()
expect(screen.getByText('doc3.txt')).toBeInTheDocument()
})
})
describe('styling', () => {
it('should have correct container width', () => {
const { container } = render(<FileUploader {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('w-[640px]')
})
it('should have proper spacing', () => {
const { container } = render(<FileUploader {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('mb-5')
})
})
})

View File

@@ -1,10 +1,23 @@
'use client'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiUploadCloud2Line } from '@remixicon/react'
import * as React from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils'
import SimplePieChart from '@/app/components/base/simple-pie-chart'
import { ToastContext } from '@/app/components/base/toast'
import { IS_CE_EDITION } from '@/config'
import { useLocale } from '@/context/i18n'
import useTheme from '@/hooks/use-theme'
import { LanguagesSupported } from '@/i18n-config/language'
import { upload } from '@/service/base'
import { useFileSupportTypes, useFileUploadConfig } from '@/service/use-common'
import { Theme } from '@/types/app'
import { cn } from '@/utils/classnames'
import FileListItem from './components/file-list-item'
import UploadDropzone from './components/upload-dropzone'
import { useFileUpload } from './hooks/use-file-upload'
import DocumentFileIcon from '../../common/document-file-icon'
type IFileUploaderProps = {
fileList: FileItem[]
@@ -26,62 +39,358 @@ const FileUploader = ({
supportBatchUpload = false,
}: IFileUploaderProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const locale = useLocale()
const [dragging, setDragging] = useState(false)
const dropRef = useRef<HTMLDivElement>(null)
const dragRef = useRef<HTMLDivElement>(null)
const fileUploader = useRef<HTMLInputElement>(null)
const hideUpload = !supportBatchUpload && fileList.length > 0
const {
dropRef,
dragRef,
fileUploaderRef,
dragging,
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
} = useFileUpload({
fileList,
prepareFileList,
onFileUpdate,
onFileListUpdate,
onPreview,
supportBatchUpload,
})
const { data: fileUploadConfigResponse } = useFileUploadConfig()
const { data: supportFileTypesResponse } = useFileSupportTypes()
const supportTypes = supportFileTypesResponse?.allowed_extensions || []
const supportTypesShowNames = (() => {
const extensionMap: { [key: string]: string } = {
md: 'markdown',
pptx: 'pptx',
htm: 'html',
xlsx: 'xlsx',
docx: 'docx',
}
return [...supportTypes]
.map(item => extensionMap[item] || item) // map to standardized extension
.map(item => item.toLowerCase()) // convert to lower case
.filter((item, index, self) => self.indexOf(item) === index) // remove duplicates
.map(item => item.toUpperCase()) // convert to upper case
.join(locale !== LanguagesSupported[1] ? ', ' : '、 ')
})()
const ACCEPTS = supportTypes.map((ext: string) => `.${ext}`)
const fileUploadConfig = useMemo(() => ({
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
}), [fileUploadConfigResponse, supportBatchUpload])
const fileListRef = useRef<FileItem[]>([])
// utils
const getFileType = (currentFile: File) => {
if (!currentFile)
return ''
const arr = currentFile.name.split('.')
return arr[arr.length - 1]
}
const getFileSize = (size: number) => {
if (size / 1024 < 10)
return `${(size / 1024).toFixed(2)}KB`
return `${(size / 1024 / 1024).toFixed(2)}MB`
}
const isValid = useCallback((file: File) => {
const { size } = file
const ext = `.${getFileType(file)}`
const isValidType = ACCEPTS.includes(ext.toLowerCase())
if (!isValidType)
notify({ type: 'error', message: t('stepOne.uploader.validation.typeError', { ns: 'datasetCreation' }) })
const isValidSize = size <= fileUploadConfig.file_size_limit * 1024 * 1024
if (!isValidSize)
notify({ type: 'error', message: t('stepOne.uploader.validation.size', { ns: 'datasetCreation', size: fileUploadConfig.file_size_limit }) })
return isValidType && isValidSize
}, [fileUploadConfig, notify, t, ACCEPTS])
const fileUpload = useCallback(async (fileItem: FileItem): Promise<FileItem> => {
const formData = new FormData()
formData.append('file', fileItem.file)
const onProgress = (e: ProgressEvent) => {
if (e.lengthComputable) {
const percent = Math.floor(e.loaded / e.total * 100)
onFileUpdate(fileItem, percent, fileListRef.current)
}
}
return upload({
xhr: new XMLHttpRequest(),
data: formData,
onprogress: onProgress,
}, false, undefined, '?source=datasets')
.then((res) => {
const completeFile = {
fileID: fileItem.fileID,
file: res as unknown as File,
progress: -1,
}
const index = fileListRef.current.findIndex(item => item.fileID === fileItem.fileID)
fileListRef.current[index] = completeFile
onFileUpdate(completeFile, 100, fileListRef.current)
return Promise.resolve({ ...completeFile })
})
.catch((e) => {
const errorMessage = getFileUploadErrorMessage(e, t('stepOne.uploader.failed', { ns: 'datasetCreation' }), t)
notify({ type: 'error', message: errorMessage })
onFileUpdate(fileItem, -2, fileListRef.current)
return Promise.resolve({ ...fileItem })
})
.finally()
}, [fileListRef, notify, onFileUpdate, t])
const uploadBatchFiles = useCallback((bFiles: FileItem[]) => {
bFiles.forEach(bf => (bf.progress = 0))
return Promise.all(bFiles.map(fileUpload))
}, [fileUpload])
const uploadMultipleFiles = useCallback(async (files: FileItem[]) => {
const batchCountLimit = fileUploadConfig.batch_count_limit
const length = files.length
let start = 0
let end = 0
while (start < length) {
if (start + batchCountLimit > length)
end = length
else
end = start + batchCountLimit
const bFiles = files.slice(start, end)
await uploadBatchFiles(bFiles)
start = end
}
}, [fileUploadConfig, uploadBatchFiles])
const initialUpload = useCallback((files: File[]) => {
const filesCountLimit = fileUploadConfig.file_upload_limit
if (!files.length)
return false
if (files.length + fileList.length > filesCountLimit && !IS_CE_EDITION) {
notify({ type: 'error', message: t('stepOne.uploader.validation.filesNumber', { ns: 'datasetCreation', filesNumber: filesCountLimit }) })
return false
}
const preparedFiles = files.map((file, index) => ({
fileID: `file${index}-${Date.now()}`,
file,
progress: -1,
}))
const newFiles = [...fileListRef.current, ...preparedFiles]
prepareFileList(newFiles)
fileListRef.current = newFiles
uploadMultipleFiles(preparedFiles)
}, [prepareFileList, uploadMultipleFiles, notify, t, fileList, fileUploadConfig])
const handleDragEnter = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target !== dragRef.current)
setDragging(true)
}
const handleDragOver = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}
const handleDragLeave = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target === dragRef.current)
setDragging(false)
}
type FileWithPath = {
relativePath?: string
} & File
const traverseFileEntry = useCallback(
(entry: any, prefix = ''): Promise<FileWithPath[]> => {
return new Promise((resolve) => {
if (entry.isFile) {
entry.file((file: FileWithPath) => {
file.relativePath = `${prefix}${file.name}`
resolve([file])
})
}
else if (entry.isDirectory) {
const reader = entry.createReader()
const entries: any[] = []
const read = () => {
reader.readEntries(async (results: FileSystemEntry[]) => {
if (!results.length) {
const files = await Promise.all(
entries.map(ent =>
traverseFileEntry(ent, `${prefix}${entry.name}/`),
),
)
resolve(files.flat())
}
else {
entries.push(...results)
read()
}
})
}
read()
}
else {
resolve([])
}
})
},
[],
)
const handleDrop = useCallback(
async (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDragging(false)
if (!e.dataTransfer)
return
const nested = await Promise.all(
Array.from(e.dataTransfer.items).map((it) => {
const entry = (it as any).webkitGetAsEntry?.()
if (entry)
return traverseFileEntry(entry)
const f = it.getAsFile?.()
return f ? Promise.resolve([f]) : Promise.resolve([])
}),
)
let files = nested.flat()
if (!supportBatchUpload)
files = files.slice(0, 1)
files = files.slice(0, fileUploadConfig.batch_count_limit)
const valid = files.filter(isValid)
initialUpload(valid)
},
[initialUpload, isValid, supportBatchUpload, traverseFileEntry, fileUploadConfig],
)
const selectHandle = () => {
if (fileUploader.current)
fileUploader.current.click()
}
const removeFile = (fileID: string) => {
if (fileUploader.current)
fileUploader.current.value = ''
fileListRef.current = fileListRef.current.filter(item => item.fileID !== fileID)
onFileListUpdate?.([...fileListRef.current])
}
const fileChangeHandle = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
let files = Array.from(e.target.files ?? []) as File[]
files = files.slice(0, fileUploadConfig.batch_count_limit)
initialUpload(files.filter(isValid))
}, [isValid, initialUpload, fileUploadConfig])
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
useEffect(() => {
dropRef.current?.addEventListener('dragenter', handleDragEnter)
dropRef.current?.addEventListener('dragover', handleDragOver)
dropRef.current?.addEventListener('dragleave', handleDragLeave)
dropRef.current?.addEventListener('drop', handleDrop)
return () => {
dropRef.current?.removeEventListener('dragenter', handleDragEnter)
dropRef.current?.removeEventListener('dragover', handleDragOver)
dropRef.current?.removeEventListener('dragleave', handleDragLeave)
dropRef.current?.removeEventListener('drop', handleDrop)
}
}, [handleDrop])
return (
<div className="mb-5 w-[640px]">
<div className={cn('mb-1 text-sm font-semibold leading-6 text-text-secondary', titleClassName)}>
{t('stepOne.uploader.title', { ns: 'datasetCreation' })}
</div>
{!hideUpload && (
<UploadDropzone
dropRef={dropRef}
dragRef={dragRef}
fileUploaderRef={fileUploaderRef}
dragging={dragging}
supportBatchUpload={supportBatchUpload}
supportTypesShowNames={supportTypesShowNames}
fileUploadConfig={fileUploadConfig}
acceptTypes={acceptTypes}
onSelectFile={selectHandle}
onFileChange={fileChangeHandle}
<input
ref={fileUploader}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={ACCEPTS.join(',')}
onChange={fileChangeHandle}
/>
)}
{fileList.length > 0 && (
<div className="max-w-[640px] cursor-default space-y-1">
{fileList.map(fileItem => (
<FileListItem
key={fileItem.fileID}
fileItem={fileItem}
onPreview={handlePreview}
onRemove={removeFile}
/>
))}
<div className={cn('mb-1 text-sm font-semibold leading-6 text-text-secondary', titleClassName)}>{t('stepOne.uploader.title', { ns: 'datasetCreation' })}</div>
{!hideUpload && (
<div ref={dropRef} className={cn('relative mb-2 box-border flex min-h-20 max-w-[640px] flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary', dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent')}>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload ? t('stepOne.uploader.button', { ns: 'datasetCreation' }) : t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{supportTypes.length > 0 && (
<label className="ml-1 cursor-pointer text-text-accent" onClick={selectHandle}>{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
)}
<div className="max-w-[640px] cursor-default space-y-1">
{fileList.map((fileItem, index) => (
<div
key={`${fileItem.fileID}-${index}`}
onClick={() => fileItem.file?.id && onPreview(fileItem.file)}
className={cn(
'flex h-12 max-w-[640px] items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg text-xs leading-3 text-text-tertiary shadow-xs',
// 'border-state-destructive-border bg-state-destructive-hover',
)}
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="xl"
className="shrink-0"
name={fileItem.file.name}
extension={getFileType(fileItem.file)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-sm leading-4 text-text-secondary">{fileItem.file.name}</div>
</div>
<div className="w-full truncate leading-3 text-text-tertiary">
<span className="uppercase">{getFileType(fileItem.file)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{getFileSize(fileItem.file.size)}</span>
{/* <span className='px-1 text-text-quaternary'>·</span>
<span>10k characters</span> */}
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{/* <span className="flex justify-center items-center w-6 h-6 cursor-pointer">
<RiErrorWarningFill className='size-4 text-text-warning' />
</span> */}
{(fileItem.progress < 100 && fileItem.progress >= 0) && (
// <div className={s.percent}>{`${fileItem.progress}%`}</div>
<SimplePieChart percentage={fileItem.progress} stroke={chartColor} fill={chartColor} animationDuration={0} />
)}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={(e) => {
e.stopPropagation()
removeFile(fileItem.fileID)
}}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
))}
</div>
</div>
)
}

View File

@@ -154,7 +154,7 @@ export const GeneralChunkingOptions: FC<GeneralChunkingOptionsProps> = ({
</div>
))}
{
showSummaryIndexSetting && IS_CE_EDITION && (
showSummaryIndexSetting && (
<div className="mt-3">
<SummaryIndexSetting
entry="create-document"

View File

@@ -12,7 +12,6 @@ import Divider from '@/app/components/base/divider'
import { ParentChildChunk } from '@/app/components/base/icons/src/vender/knowledge'
import RadioCard from '@/app/components/base/radio-card'
import SummaryIndexSetting from '@/app/components/datasets/settings/summary-index-setting'
import { IS_CE_EDITION } from '@/config'
import { ChunkingMode } from '@/models/datasets'
import FileList from '../../assets/file-list-3-fill.svg'
import Note from '../../assets/note-mod.svg'
@@ -192,7 +191,7 @@ export const ParentChildOptions: FC<ParentChildOptionsProps> = ({
</div>
))}
{
showSummaryIndexSetting && IS_CE_EDITION && (
showSummaryIndexSetting && (
<div className="mt-3">
<SummaryIndexSetting
entry="create-document"

View File

@@ -1,262 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { render } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { DataSourceType } from '@/models/datasets'
import { DatasourceType } from '@/models/pipeline'
import DocumentSourceIcon from './document-source-icon'
const createMockDoc = (overrides: Record<string, unknown> = {}): SimpleDocumentDetail => ({
id: 'doc-1',
position: 1,
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {},
dataset_process_rule_id: 'rule-1',
dataset_id: 'dataset-1',
batch: 'batch-1',
name: 'test-document.txt',
created_from: 'web',
created_by: 'user-1',
created_at: Date.now(),
tokens: 100,
indexing_status: 'completed',
error: null,
enabled: true,
disabled_at: null,
disabled_by: null,
archived: false,
archived_reason: null,
archived_by: null,
archived_at: null,
updated_at: Date.now(),
doc_type: null,
doc_metadata: undefined,
doc_language: 'en',
display_status: 'available',
word_count: 100,
hit_count: 10,
doc_form: 'text_model',
...overrides,
}) as unknown as SimpleDocumentDetail
describe('DocumentSourceIcon', () => {
describe('Rendering', () => {
it('should render without crashing', () => {
const doc = createMockDoc()
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Local File Icon', () => {
it('should render FileTypeIcon for FILE data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
data_source_info: {
upload_file: { extension: 'pdf' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} fileType="pdf" />)
const icon = container.querySelector('svg, img')
expect(icon).toBeInTheDocument()
})
it('should render FileTypeIcon for localFile data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.localFile,
created_from: 'rag-pipeline',
data_source_info: {
extension: 'docx',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
const icon = container.querySelector('svg, img')
expect(icon).toBeInTheDocument()
})
it('should use extension from upload_file for legacy data source', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
created_from: 'web',
data_source_info: {
upload_file: { extension: 'txt' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should use fileType prop as fallback for extension', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
created_from: 'web',
data_source_info: {},
})
const { container } = render(<DocumentSourceIcon doc={doc} fileType="csv" />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Notion Icon', () => {
it('should render NotionIcon for NOTION data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.NOTION,
created_from: 'web',
data_source_info: {
notion_page_icon: 'https://notion.so/icon.png',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should render NotionIcon for onlineDocument data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDocument,
created_from: 'rag-pipeline',
data_source_info: {
page: { page_icon: 'https://notion.so/icon.png' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should use page_icon for rag-pipeline created documents', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.NOTION,
created_from: 'rag-pipeline',
data_source_info: {
page: { page_icon: 'https://notion.so/custom-icon.png' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Web Crawl Icon', () => {
it('should render globe icon for WEB data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.WEB,
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
const icon = container.querySelector('svg')
expect(icon).toBeInTheDocument()
expect(icon).toHaveClass('mr-1.5')
expect(icon).toHaveClass('size-4')
})
it('should render globe icon for websiteCrawl data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.websiteCrawl,
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
const icon = container.querySelector('svg')
expect(icon).toBeInTheDocument()
})
})
describe('Online Drive Icon', () => {
it('should render FileTypeIcon for onlineDrive data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: 'document.xlsx',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should extract extension from file name', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: 'spreadsheet.xlsx',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should handle file name without extension', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: 'noextension',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should handle empty file name', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: '',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should handle hidden files (starting with dot)', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: '.gitignore',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Unknown Data Source Type', () => {
it('should return null for unknown data source type', () => {
const doc = createMockDoc({
data_source_type: 'unknown',
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeNull()
})
})
describe('Edge Cases', () => {
it('should handle undefined data_source_info', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
data_source_info: undefined,
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should memoize the component', () => {
const doc = createMockDoc()
const { rerender, container } = render(<DocumentSourceIcon doc={doc} />)
const firstRender = container.innerHTML
rerender(<DocumentSourceIcon doc={doc} />)
expect(container.innerHTML).toBe(firstRender)
})
})
})

View File

@@ -1,100 +0,0 @@
import type { FC } from 'react'
import type { LegacyDataSourceInfo, LocalFileInfo, OnlineDocumentInfo, OnlineDriveInfo, SimpleDocumentDetail } from '@/models/datasets'
import { RiGlobalLine } from '@remixicon/react'
import * as React from 'react'
import FileTypeIcon from '@/app/components/base/file-uploader/file-type-icon'
import NotionIcon from '@/app/components/base/notion-icon'
import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type'
import { DataSourceType } from '@/models/datasets'
import { DatasourceType } from '@/models/pipeline'
type DocumentSourceIconProps = {
doc: SimpleDocumentDetail
fileType?: string
}
const isLocalFile = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.localFile || dataSourceType === DataSourceType.FILE
}
const isOnlineDocument = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDocument || dataSourceType === DataSourceType.NOTION
}
const isWebsiteCrawl = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.websiteCrawl || dataSourceType === DataSourceType.WEB
}
const isOnlineDrive = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDrive
}
const isCreateFromRAGPipeline = (createdFrom: string) => {
return createdFrom === 'rag-pipeline'
}
const getFileExtension = (fileName: string): string => {
if (!fileName)
return ''
const parts = fileName.split('.')
if (parts.length <= 1 || (parts[0] === '' && parts.length === 2))
return ''
return parts[parts.length - 1].toLowerCase()
}
const DocumentSourceIcon: FC<DocumentSourceIconProps> = React.memo(({
doc,
fileType,
}) => {
if (isOnlineDocument(doc.data_source_type)) {
return (
<NotionIcon
className="mr-1.5"
type="page"
src={
isCreateFromRAGPipeline(doc.created_from)
? (doc.data_source_info as OnlineDocumentInfo).page.page_icon
: (doc.data_source_info as LegacyDataSourceInfo).notion_page_icon
}
/>
)
}
if (isLocalFile(doc.data_source_type)) {
return (
<FileTypeIcon
type={
extensionToFileType(
isCreateFromRAGPipeline(doc.created_from)
? (doc?.data_source_info as LocalFileInfo)?.extension
: ((doc?.data_source_info as LegacyDataSourceInfo)?.upload_file?.extension ?? fileType),
)
}
className="mr-1.5"
/>
)
}
if (isOnlineDrive(doc.data_source_type)) {
return (
<FileTypeIcon
type={
extensionToFileType(
getFileExtension((doc?.data_source_info as unknown as OnlineDriveInfo)?.name),
)
}
className="mr-1.5"
/>
)
}
if (isWebsiteCrawl(doc.data_source_type)) {
return <RiGlobalLine className="mr-1.5 size-4" />
}
return null
})
DocumentSourceIcon.displayName = 'DocumentSourceIcon'
export default DocumentSourceIcon

View File

@@ -1,342 +0,0 @@
import type { ReactNode } from 'react'
import type { SimpleDocumentDetail } from '@/models/datasets'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DataSourceType } from '@/models/datasets'
import DocumentTableRow from './document-table-row'
const mockPush = vi.fn()
vi.mock('next/navigation', () => ({
useRouter: () => ({
push: mockPush,
}),
}))
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false, gcTime: 0 },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
<table>
<tbody>
{children}
</tbody>
</table>
</QueryClientProvider>
)
}
type LocalDoc = SimpleDocumentDetail & { percent?: number }
const createMockDoc = (overrides: Record<string, unknown> = {}): LocalDoc => ({
id: 'doc-1',
position: 1,
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {
upload_file: { name: 'test.txt', extension: 'txt' },
},
dataset_process_rule_id: 'rule-1',
dataset_id: 'dataset-1',
batch: 'batch-1',
name: 'test-document.txt',
created_from: 'web',
created_by: 'user-1',
created_at: Date.now(),
tokens: 100,
indexing_status: 'completed',
error: null,
enabled: true,
disabled_at: null,
disabled_by: null,
archived: false,
archived_reason: null,
archived_by: null,
archived_at: null,
updated_at: Date.now(),
doc_type: null,
doc_metadata: undefined,
doc_language: 'en',
display_status: 'available',
word_count: 500,
hit_count: 10,
doc_form: 'text_model',
...overrides,
}) as unknown as LocalDoc
// Helper to find the custom checkbox div (Checkbox component renders as a div, not a native checkbox)
const findCheckbox = (container: HTMLElement): HTMLElement | null => {
return container.querySelector('[class*="shadow-xs"]')
}
describe('DocumentTableRow', () => {
const defaultProps = {
doc: createMockDoc(),
index: 0,
datasetId: 'dataset-1',
isSelected: false,
isGeneralMode: true,
isQAMode: false,
embeddingAvailable: true,
selectedIds: [],
onSelectOne: vi.fn(),
onSelectedIdChange: vi.fn(),
onShowRenameModal: vi.fn(),
onUpdate: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('Rendering', () => {
it('should render without crashing', () => {
render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('test-document.txt')).toBeInTheDocument()
})
it('should render index number correctly', () => {
render(<DocumentTableRow {...defaultProps} index={5} />, { wrapper: createWrapper() })
expect(screen.getByText('6')).toBeInTheDocument()
})
it('should render document name with tooltip', () => {
render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('test-document.txt')).toBeInTheDocument()
})
it('should render checkbox element', () => {
const { container } = render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
const checkbox = findCheckbox(container)
expect(checkbox).toBeInTheDocument()
})
})
describe('Selection', () => {
it('should show check icon when isSelected is true', () => {
const { container } = render(<DocumentTableRow {...defaultProps} isSelected />, { wrapper: createWrapper() })
// When selected, the checkbox should have a check icon (RiCheckLine svg)
const checkbox = findCheckbox(container)
expect(checkbox).toBeInTheDocument()
const checkIcon = checkbox?.querySelector('svg')
expect(checkIcon).toBeInTheDocument()
})
it('should not show check icon when isSelected is false', () => {
const { container } = render(<DocumentTableRow {...defaultProps} isSelected={false} />, { wrapper: createWrapper() })
const checkbox = findCheckbox(container)
expect(checkbox).toBeInTheDocument()
// When not selected, there should be no check icon inside the checkbox
const checkIcon = checkbox?.querySelector('svg')
expect(checkIcon).not.toBeInTheDocument()
})
it('should call onSelectOne when checkbox is clicked', () => {
const onSelectOne = vi.fn()
const { container } = render(<DocumentTableRow {...defaultProps} onSelectOne={onSelectOne} />, { wrapper: createWrapper() })
const checkbox = findCheckbox(container)
if (checkbox) {
fireEvent.click(checkbox)
expect(onSelectOne).toHaveBeenCalledWith('doc-1')
}
})
it('should stop propagation when checkbox container is clicked', () => {
const { container } = render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
// Click the div containing the checkbox (which has stopPropagation)
const checkboxContainer = container.querySelector('td')?.querySelector('div')
if (checkboxContainer) {
fireEvent.click(checkboxContainer)
expect(mockPush).not.toHaveBeenCalled()
}
})
})
describe('Row Navigation', () => {
it('should navigate to document detail on row click', () => {
render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
const row = screen.getByRole('row')
fireEvent.click(row)
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-1/documents/doc-1')
})
it('should navigate with correct datasetId and documentId', () => {
render(
<DocumentTableRow
{...defaultProps}
datasetId="custom-dataset"
doc={createMockDoc({ id: 'custom-doc' })}
/>,
{ wrapper: createWrapper() },
)
const row = screen.getByRole('row')
fireEvent.click(row)
expect(mockPush).toHaveBeenCalledWith('/datasets/custom-dataset/documents/custom-doc')
})
})
describe('Word Count Display', () => {
it('should display word count less than 1000 as is', () => {
const doc = createMockDoc({ word_count: 500 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('500')).toBeInTheDocument()
})
it('should display word count 1000 or more in k format', () => {
const doc = createMockDoc({ word_count: 1500 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('1.5k')).toBeInTheDocument()
})
it('should display 0 with empty style when word_count is 0', () => {
const doc = createMockDoc({ word_count: 0 })
const { container } = render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
const zeroCells = container.querySelectorAll('.text-text-tertiary')
expect(zeroCells.length).toBeGreaterThan(0)
})
it('should handle undefined word_count', () => {
const doc = createMockDoc({ word_count: undefined as unknown as number })
const { container } = render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(container).toBeInTheDocument()
})
})
describe('Hit Count Display', () => {
it('should display hit count less than 1000 as is', () => {
const doc = createMockDoc({ hit_count: 100 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('100')).toBeInTheDocument()
})
it('should display hit count 1000 or more in k format', () => {
const doc = createMockDoc({ hit_count: 2500 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('2.5k')).toBeInTheDocument()
})
it('should display 0 with empty style when hit_count is 0', () => {
const doc = createMockDoc({ hit_count: 0 })
const { container } = render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
const zeroCells = container.querySelectorAll('.text-text-tertiary')
expect(zeroCells.length).toBeGreaterThan(0)
})
})
describe('Chunking Mode', () => {
it('should render ChunkingModeLabel with general mode', () => {
render(<DocumentTableRow {...defaultProps} isGeneralMode isQAMode={false} />, { wrapper: createWrapper() })
// ChunkingModeLabel should be rendered
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should render ChunkingModeLabel with QA mode', () => {
render(<DocumentTableRow {...defaultProps} isGeneralMode={false} isQAMode />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Summary Status', () => {
it('should render SummaryStatus when summary_index_status is present', () => {
const doc = createMockDoc({ summary_index_status: 'completed' })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should not render SummaryStatus when summary_index_status is absent', () => {
const doc = createMockDoc({ summary_index_status: undefined })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Rename Action', () => {
it('should call onShowRenameModal when rename button is clicked', () => {
const onShowRenameModal = vi.fn()
const { container } = render(
<DocumentTableRow {...defaultProps} onShowRenameModal={onShowRenameModal} />,
{ wrapper: createWrapper() },
)
// Find the rename button by finding the RiEditLine icon's parent
const renameButtons = container.querySelectorAll('.cursor-pointer.rounded-md')
if (renameButtons.length > 0) {
fireEvent.click(renameButtons[0])
expect(onShowRenameModal).toHaveBeenCalledWith(defaultProps.doc)
expect(mockPush).not.toHaveBeenCalled()
}
})
})
describe('Operations', () => {
it('should pass selectedIds to Operations component', () => {
render(<DocumentTableRow {...defaultProps} selectedIds={['doc-1', 'doc-2']} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should pass onSelectedIdChange to Operations component', () => {
const onSelectedIdChange = vi.fn()
render(<DocumentTableRow {...defaultProps} onSelectedIdChange={onSelectedIdChange} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Document Source Icon', () => {
it('should render with FILE data source type', () => {
const doc = createMockDoc({ data_source_type: DataSourceType.FILE })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should render with NOTION data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.NOTION,
data_source_info: { notion_page_icon: 'icon.png' },
})
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should render with WEB data source type', () => {
const doc = createMockDoc({ data_source_type: DataSourceType.WEB })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Edge Cases', () => {
it('should handle document with very long name', () => {
const doc = createMockDoc({ name: `${'a'.repeat(500)}.txt` })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should handle document with special characters in name', () => {
const doc = createMockDoc({ name: '<script>test</script>.txt' })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('<script>test</script>.txt')).toBeInTheDocument()
})
it('should memoize the component', () => {
const wrapper = createWrapper()
const { rerender } = render(<DocumentTableRow {...defaultProps} />, { wrapper })
rerender(<DocumentTableRow {...defaultProps} />)
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
})

View File

@@ -1,152 +0,0 @@
import type { FC } from 'react'
import type { SimpleDocumentDetail } from '@/models/datasets'
import { RiEditLine } from '@remixicon/react'
import { pick } from 'es-toolkit/object'
import { useRouter } from 'next/navigation'
import * as React from 'react'
import { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import Checkbox from '@/app/components/base/checkbox'
import Tooltip from '@/app/components/base/tooltip'
import ChunkingModeLabel from '@/app/components/datasets/common/chunking-mode-label'
import Operations from '@/app/components/datasets/documents/components/operations'
import SummaryStatus from '@/app/components/datasets/documents/detail/completed/common/summary-status'
import StatusItem from '@/app/components/datasets/documents/status-item'
import useTimestamp from '@/hooks/use-timestamp'
import { DataSourceType } from '@/models/datasets'
import { formatNumber } from '@/utils/format'
import DocumentSourceIcon from './document-source-icon'
import { renderTdValue } from './utils'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type DocumentTableRowProps = {
doc: LocalDoc
index: number
datasetId: string
isSelected: boolean
isGeneralMode: boolean
isQAMode: boolean
embeddingAvailable: boolean
selectedIds: string[]
onSelectOne: (docId: string) => void
onSelectedIdChange: (ids: string[]) => void
onShowRenameModal: (doc: LocalDoc) => void
onUpdate: () => void
}
const renderCount = (count: number | undefined) => {
if (!count)
return renderTdValue(0, true)
if (count < 1000)
return count
return `${formatNumber((count / 1000).toFixed(1))}k`
}
const DocumentTableRow: FC<DocumentTableRowProps> = React.memo(({
doc,
index,
datasetId,
isSelected,
isGeneralMode,
isQAMode,
embeddingAvailable,
selectedIds,
onSelectOne,
onSelectedIdChange,
onShowRenameModal,
onUpdate,
}) => {
const { t } = useTranslation()
const { formatTime } = useTimestamp()
const router = useRouter()
const isFile = doc.data_source_type === DataSourceType.FILE
const fileType = isFile ? doc.data_source_detail_dict?.upload_file?.extension : ''
const handleRowClick = useCallback(() => {
router.push(`/datasets/${datasetId}/documents/${doc.id}`)
}, [router, datasetId, doc.id])
const handleCheckboxClick = useCallback((e: React.MouseEvent) => {
e.stopPropagation()
}, [])
const handleRenameClick = useCallback((e: React.MouseEvent) => {
e.stopPropagation()
onShowRenameModal(doc)
}, [doc, onShowRenameModal])
return (
<tr
className="h-8 cursor-pointer border-b border-divider-subtle hover:bg-background-default-hover"
onClick={handleRowClick}
>
<td className="text-left align-middle text-xs text-text-tertiary">
<div className="flex items-center" onClick={handleCheckboxClick}>
<Checkbox
className="mr-2 shrink-0"
checked={isSelected}
onCheck={() => onSelectOne(doc.id)}
/>
{index + 1}
</div>
</td>
<td>
<div className="group mr-6 flex max-w-[460px] items-center hover:mr-0">
<div className="flex shrink-0 items-center">
<DocumentSourceIcon doc={doc} fileType={fileType} />
</div>
<Tooltip popupContent={doc.name}>
<span className="grow-1 truncate text-sm">{doc.name}</span>
</Tooltip>
{doc.summary_index_status && (
<div className="ml-1 hidden shrink-0 group-hover:flex">
<SummaryStatus status={doc.summary_index_status} />
</div>
)}
<div className="hidden shrink-0 group-hover:ml-auto group-hover:flex">
<Tooltip popupContent={t('list.table.rename', { ns: 'datasetDocuments' })}>
<div
className="cursor-pointer rounded-md p-1 hover:bg-state-base-hover"
onClick={handleRenameClick}
>
<RiEditLine className="h-4 w-4 text-text-tertiary" />
</div>
</Tooltip>
</div>
</div>
</td>
<td>
<ChunkingModeLabel
isGeneralMode={isGeneralMode}
isQAMode={isQAMode}
/>
</td>
<td>{renderCount(doc.word_count)}</td>
<td>{renderCount(doc.hit_count)}</td>
<td className="text-[13px] text-text-secondary">
{formatTime(doc.created_at, t('dateTimeFormat', { ns: 'datasetHitTesting' }) as string)}
</td>
<td>
<StatusItem status={doc.display_status} />
</td>
<td>
<Operations
selectedIds={selectedIds}
onSelectedIdChange={onSelectedIdChange}
embeddingAvailable={embeddingAvailable}
datasetId={datasetId}
detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form', 'display_status'])}
onUpdate={onUpdate}
/>
</td>
</tr>
)
})
DocumentTableRow.displayName = 'DocumentTableRow'
export default DocumentTableRow

View File

@@ -1,4 +0,0 @@
export { default as DocumentSourceIcon } from './document-source-icon'
export { default as DocumentTableRow } from './document-table-row'
export { default as SortHeader } from './sort-header'
export { renderTdValue } from './utils'

View File

@@ -1,124 +0,0 @@
import { fireEvent, render, screen } from '@testing-library/react'
import { describe, expect, it, vi } from 'vitest'
import SortHeader from './sort-header'
describe('SortHeader', () => {
const defaultProps = {
field: 'name' as const,
label: 'File Name',
currentSortField: null,
sortOrder: 'desc' as const,
onSort: vi.fn(),
}
describe('rendering', () => {
it('should render the label', () => {
render(<SortHeader {...defaultProps} />)
expect(screen.getByText('File Name')).toBeInTheDocument()
})
it('should render the sort icon', () => {
const { container } = render(<SortHeader {...defaultProps} />)
const icon = container.querySelector('svg')
expect(icon).toBeInTheDocument()
})
})
describe('inactive state', () => {
it('should have disabled text color when not active', () => {
const { container } = render(<SortHeader {...defaultProps} />)
const icon = container.querySelector('svg')
expect(icon).toHaveClass('text-text-disabled')
})
it('should not be rotated when not active', () => {
const { container } = render(<SortHeader {...defaultProps} />)
const icon = container.querySelector('svg')
expect(icon).not.toHaveClass('rotate-180')
})
})
describe('active state', () => {
it('should have tertiary text color when active', () => {
const { container } = render(
<SortHeader {...defaultProps} currentSortField="name" />,
)
const icon = container.querySelector('svg')
expect(icon).toHaveClass('text-text-tertiary')
})
it('should not be rotated when active and desc', () => {
const { container } = render(
<SortHeader {...defaultProps} currentSortField="name" sortOrder="desc" />,
)
const icon = container.querySelector('svg')
expect(icon).not.toHaveClass('rotate-180')
})
it('should be rotated when active and asc', () => {
const { container } = render(
<SortHeader {...defaultProps} currentSortField="name" sortOrder="asc" />,
)
const icon = container.querySelector('svg')
expect(icon).toHaveClass('rotate-180')
})
})
describe('interaction', () => {
it('should call onSort when clicked', () => {
const onSort = vi.fn()
render(<SortHeader {...defaultProps} onSort={onSort} />)
fireEvent.click(screen.getByText('File Name'))
expect(onSort).toHaveBeenCalledWith('name')
})
it('should call onSort with correct field', () => {
const onSort = vi.fn()
render(<SortHeader {...defaultProps} field="word_count" onSort={onSort} />)
fireEvent.click(screen.getByText('File Name'))
expect(onSort).toHaveBeenCalledWith('word_count')
})
})
describe('different fields', () => {
it('should work with word_count field', () => {
render(
<SortHeader
{...defaultProps}
field="word_count"
label="Words"
currentSortField="word_count"
/>,
)
expect(screen.getByText('Words')).toBeInTheDocument()
})
it('should work with hit_count field', () => {
render(
<SortHeader
{...defaultProps}
field="hit_count"
label="Hit Count"
currentSortField="hit_count"
/>,
)
expect(screen.getByText('Hit Count')).toBeInTheDocument()
})
it('should work with created_at field', () => {
render(
<SortHeader
{...defaultProps}
field="created_at"
label="Upload Time"
currentSortField="created_at"
/>,
)
expect(screen.getByText('Upload Time')).toBeInTheDocument()
})
})
})

View File

@@ -1,44 +0,0 @@
import type { FC } from 'react'
import type { SortField, SortOrder } from '../hooks'
import { RiArrowDownLine } from '@remixicon/react'
import * as React from 'react'
import { cn } from '@/utils/classnames'
type SortHeaderProps = {
field: Exclude<SortField, null>
label: string
currentSortField: SortField
sortOrder: SortOrder
onSort: (field: SortField) => void
}
const SortHeader: FC<SortHeaderProps> = React.memo(({
field,
label,
currentSortField,
sortOrder,
onSort,
}) => {
const isActive = currentSortField === field
const isDesc = isActive && sortOrder === 'desc'
return (
<div
className="flex cursor-pointer items-center hover:text-text-secondary"
onClick={() => onSort(field)}
>
{label}
<RiArrowDownLine
className={cn(
'ml-0.5 h-3 w-3 transition-all',
isActive ? 'text-text-tertiary' : 'text-text-disabled',
isActive && !isDesc ? 'rotate-180' : '',
)}
/>
</div>
)
})
SortHeader.displayName = 'SortHeader'
export default SortHeader

View File

@@ -1,90 +0,0 @@
import { render, screen } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { renderTdValue } from './utils'
describe('renderTdValue', () => {
describe('Rendering', () => {
it('should render string value correctly', () => {
const { container } = render(<>{renderTdValue('test value')}</>)
expect(screen.getByText('test value')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
it('should render number value correctly', () => {
const { container } = render(<>{renderTdValue(42)}</>)
expect(screen.getByText('42')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
it('should render zero correctly', () => {
const { container } = render(<>{renderTdValue(0)}</>)
expect(screen.getByText('0')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
})
describe('Null and undefined handling', () => {
it('should render dash for null value', () => {
render(<>{renderTdValue(null)}</>)
expect(screen.getByText('-')).toBeInTheDocument()
})
it('should render dash for null value with empty style', () => {
const { container } = render(<>{renderTdValue(null, true)}</>)
expect(screen.getByText('-')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-tertiary')
})
})
describe('Empty style', () => {
it('should apply text-text-tertiary class when isEmptyStyle is true', () => {
const { container } = render(<>{renderTdValue('value', true)}</>)
expect(container.querySelector('div')).toHaveClass('text-text-tertiary')
})
it('should apply text-text-secondary class when isEmptyStyle is false', () => {
const { container } = render(<>{renderTdValue('value', false)}</>)
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
it('should apply text-text-secondary class when isEmptyStyle is not provided', () => {
const { container } = render(<>{renderTdValue('value')}</>)
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
})
describe('Edge Cases', () => {
it('should handle empty string', () => {
render(<>{renderTdValue('')}</>)
// Empty string should still render but with no visible text
const div = document.querySelector('div')
expect(div).toBeInTheDocument()
})
it('should handle large numbers', () => {
render(<>{renderTdValue(1234567890)}</>)
expect(screen.getByText('1234567890')).toBeInTheDocument()
})
it('should handle negative numbers', () => {
render(<>{renderTdValue(-42)}</>)
expect(screen.getByText('-42')).toBeInTheDocument()
})
it('should handle special characters in string', () => {
render(<>{renderTdValue('<script>alert("xss")</script>')}</>)
expect(screen.getByText('<script>alert("xss")</script>')).toBeInTheDocument()
})
it('should handle unicode characters', () => {
render(<>{renderTdValue('Test Unicode: \u4E2D\u6587')}</>)
expect(screen.getByText('Test Unicode: \u4E2D\u6587')).toBeInTheDocument()
})
it('should handle very long strings', () => {
const longString = 'a'.repeat(1000)
render(<>{renderTdValue(longString)}</>)
expect(screen.getByText(longString)).toBeInTheDocument()
})
})
})

View File

@@ -1,16 +0,0 @@
import type { ReactNode } from 'react'
import { cn } from '@/utils/classnames'
import s from '../../../style.module.css'
export const renderTdValue = (value: string | number | null, isEmptyStyle = false): ReactNode => {
const className = cn(
isEmptyStyle ? 'text-text-tertiary' : 'text-text-secondary',
s.tdValue,
)
return (
<div className={className}>
{value ?? '-'}
</div>
)
}

View File

@@ -1,4 +0,0 @@
export { useDocumentActions } from './use-document-actions'
export { useDocumentSelection } from './use-document-selection'
export { useDocumentSort } from './use-document-sort'
export type { SortField, SortOrder } from './use-document-sort'

View File

@@ -1,438 +0,0 @@
import type { ReactNode } from 'react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { act, renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DocumentActionType } from '@/models/datasets'
import * as useDocument from '@/service/knowledge/use-document'
import { useDocumentActions } from './use-document-actions'
vi.mock('@/service/knowledge/use-document')
const mockUseDocumentArchive = vi.mocked(useDocument.useDocumentArchive)
const mockUseDocumentSummary = vi.mocked(useDocument.useDocumentSummary)
const mockUseDocumentEnable = vi.mocked(useDocument.useDocumentEnable)
const mockUseDocumentDisable = vi.mocked(useDocument.useDocumentDisable)
const mockUseDocumentDelete = vi.mocked(useDocument.useDocumentDelete)
const mockUseDocumentBatchRetryIndex = vi.mocked(useDocument.useDocumentBatchRetryIndex)
const mockUseDocumentDownloadZip = vi.mocked(useDocument.useDocumentDownloadZip)
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
}
describe('useDocumentActions', () => {
const mockMutateAsync = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
// Setup all mocks with default values
const createMockMutation = () => ({
mutateAsync: mockMutateAsync,
isPending: false,
isError: false,
isSuccess: false,
isIdle: true,
data: undefined,
error: null,
mutate: vi.fn(),
reset: vi.fn(),
status: 'idle' as const,
variables: undefined,
context: undefined,
failureCount: 0,
failureReason: null,
submittedAt: 0,
})
mockUseDocumentArchive.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentArchive>)
mockUseDocumentSummary.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentSummary>)
mockUseDocumentEnable.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentEnable>)
mockUseDocumentDisable.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentDisable>)
mockUseDocumentDelete.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentDelete>)
mockUseDocumentBatchRetryIndex.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentBatchRetryIndex>)
mockUseDocumentDownloadZip.mockReturnValue({
...createMockMutation(),
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
})
describe('handleAction', () => {
it('should call archive mutation when archive action is triggered', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.archive)()
})
expect(mockMutateAsync).toHaveBeenCalledWith({
datasetId: 'ds1',
documentIds: ['doc1'],
})
})
it('should call onUpdate on successful action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.enable)()
})
await waitFor(() => {
expect(onUpdate).toHaveBeenCalled()
})
})
it('should call onClearSelection on delete action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.delete)()
})
await waitFor(() => {
expect(onClearSelection).toHaveBeenCalled()
})
})
})
describe('handleBatchReIndex', () => {
it('should call retry index mutation', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1', 'doc2'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchReIndex()
})
expect(mockMutateAsync).toHaveBeenCalledWith({
datasetId: 'ds1',
documentIds: ['doc1', 'doc2'],
})
})
it('should call onClearSelection on success', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchReIndex()
})
await waitFor(() => {
expect(onClearSelection).toHaveBeenCalled()
expect(onUpdate).toHaveBeenCalled()
})
})
})
describe('handleBatchDownload', () => {
it('should not proceed when already downloading', async () => {
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: true,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
expect(mockMutateAsync).not.toHaveBeenCalled()
})
it('should call download mutation with downloadable ids', async () => {
const mockBlob = new Blob(['test'])
mockMutateAsync.mockResolvedValue(mockBlob)
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1', 'doc2'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
expect(mockMutateAsync).toHaveBeenCalledWith({
datasetId: 'ds1',
documentIds: ['doc1'],
})
})
})
describe('isDownloadingZip', () => {
it('should reflect isPending state from mutation', () => {
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: true,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: [],
downloadableSelectedIds: [],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
expect(result.current.isDownloadingZip).toBe(true)
})
})
describe('error handling', () => {
it('should show error toast when handleAction fails', async () => {
mockMutateAsync.mockRejectedValue(new Error('Action failed'))
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.archive)()
})
// onUpdate should not be called on error
expect(onUpdate).not.toHaveBeenCalled()
})
it('should show error toast when handleBatchReIndex fails', async () => {
mockMutateAsync.mockRejectedValue(new Error('Re-index failed'))
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchReIndex()
})
// onUpdate and onClearSelection should not be called on error
expect(onUpdate).not.toHaveBeenCalled()
expect(onClearSelection).not.toHaveBeenCalled()
})
it('should show error toast when handleBatchDownload fails', async () => {
mockMutateAsync.mockRejectedValue(new Error('Download failed'))
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
// Mutation was called but failed
expect(mockMutateAsync).toHaveBeenCalled()
})
it('should show error toast when handleBatchDownload returns null blob', async () => {
mockMutateAsync.mockResolvedValue(null)
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
// Mutation was called but returned null
expect(mockMutateAsync).toHaveBeenCalled()
})
})
describe('all action types', () => {
it('should handle summary action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.summary)()
})
expect(mockMutateAsync).toHaveBeenCalled()
await waitFor(() => {
expect(onUpdate).toHaveBeenCalled()
})
})
it('should handle disable action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.disable)()
})
expect(mockMutateAsync).toHaveBeenCalled()
await waitFor(() => {
expect(onUpdate).toHaveBeenCalled()
})
})
})
})

View File

@@ -1,126 +0,0 @@
import type { CommonResponse } from '@/models/common'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { DocumentActionType } from '@/models/datasets'
import {
useDocumentArchive,
useDocumentBatchRetryIndex,
useDocumentDelete,
useDocumentDisable,
useDocumentDownloadZip,
useDocumentEnable,
useDocumentSummary,
} from '@/service/knowledge/use-document'
import { asyncRunSafe } from '@/utils'
import { downloadBlob } from '@/utils/download'
type UseDocumentActionsOptions = {
datasetId: string
selectedIds: string[]
downloadableSelectedIds: string[]
onUpdate: () => void
onClearSelection: () => void
}
/**
* Generate a random ZIP filename for bulk document downloads.
* We intentionally avoid leaking dataset info in the exported archive name.
*/
const generateDocsZipFileName = (): string => {
const randomPart = (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function')
? crypto.randomUUID()
: `${Date.now().toString(36)}${Math.random().toString(36).slice(2, 10)}`
return `${randomPart}-docs.zip`
}
export const useDocumentActions = ({
datasetId,
selectedIds,
downloadableSelectedIds,
onUpdate,
onClearSelection,
}: UseDocumentActionsOptions) => {
const { t } = useTranslation()
const { mutateAsync: archiveDocument } = useDocumentArchive()
const { mutateAsync: generateSummary } = useDocumentSummary()
const { mutateAsync: enableDocument } = useDocumentEnable()
const { mutateAsync: disableDocument } = useDocumentDisable()
const { mutateAsync: deleteDocument } = useDocumentDelete()
const { mutateAsync: retryIndexDocument } = useDocumentBatchRetryIndex()
const { mutateAsync: requestDocumentsZip, isPending: isDownloadingZip } = useDocumentDownloadZip()
type SupportedActionType
= | typeof DocumentActionType.archive
| typeof DocumentActionType.summary
| typeof DocumentActionType.enable
| typeof DocumentActionType.disable
| typeof DocumentActionType.delete
const actionMutationMap = useMemo(() => ({
[DocumentActionType.archive]: archiveDocument,
[DocumentActionType.summary]: generateSummary,
[DocumentActionType.enable]: enableDocument,
[DocumentActionType.disable]: disableDocument,
[DocumentActionType.delete]: deleteDocument,
} as const), [archiveDocument, generateSummary, enableDocument, disableDocument, deleteDocument])
const handleAction = useCallback((actionName: SupportedActionType) => {
return async () => {
const opApi = actionMutationMap[actionName]
if (!opApi)
return
const [e] = await asyncRunSafe<CommonResponse>(
opApi({ datasetId, documentIds: selectedIds }),
)
if (!e) {
if (actionName === DocumentActionType.delete)
onClearSelection()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}
}, [actionMutationMap, datasetId, selectedIds, onClearSelection, onUpdate, t])
const handleBatchReIndex = useCallback(async () => {
const [e] = await asyncRunSafe<CommonResponse>(
retryIndexDocument({ datasetId, documentIds: selectedIds }),
)
if (!e) {
onClearSelection()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}, [retryIndexDocument, datasetId, selectedIds, onClearSelection, onUpdate, t])
const handleBatchDownload = useCallback(async () => {
if (isDownloadingZip)
return
const [e, blob] = await asyncRunSafe(
requestDocumentsZip({ datasetId, documentIds: downloadableSelectedIds }),
)
if (e || !blob) {
Toast.notify({ type: 'error', message: t('actionMsg.downloadUnsuccessfully', { ns: 'common' }) })
return
}
downloadBlob({ data: blob, fileName: generateDocsZipFileName() })
}, [datasetId, downloadableSelectedIds, isDownloadingZip, requestDocumentsZip, t])
return {
handleAction,
handleBatchReIndex,
handleBatchDownload,
isDownloadingZip,
}
}

View File

@@ -1,317 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { act, renderHook } from '@testing-library/react'
import { describe, expect, it, vi } from 'vitest'
import { DataSourceType } from '@/models/datasets'
import { useDocumentSelection } from './use-document-selection'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
const createMockDocument = (overrides: Partial<LocalDoc> = {}): LocalDoc => ({
id: 'doc1',
name: 'Test Document',
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {},
word_count: 100,
hit_count: 10,
created_at: 1000000,
position: 1,
doc_form: 'text_model',
enabled: true,
archived: false,
display_status: 'available',
created_from: 'api',
...overrides,
} as LocalDoc)
describe('useDocumentSelection', () => {
describe('isAllSelected', () => {
it('should return false when documents is empty', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: [],
onSelectedIdChange,
}),
)
expect(result.current.isAllSelected).toBe(false)
})
it('should return true when all documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
expect(result.current.isAllSelected).toBe(true)
})
it('should return false when not all documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
expect(result.current.isAllSelected).toBe(false)
})
})
describe('isSomeSelected', () => {
it('should return false when no documents are selected', () => {
const docs = [createMockDocument({ id: 'doc1' })]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: [],
onSelectedIdChange,
}),
)
expect(result.current.isSomeSelected).toBe(false)
})
it('should return true when some documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
expect(result.current.isSomeSelected).toBe(true)
})
})
describe('onSelectAll', () => {
it('should select all documents when none are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: [],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectAll()
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc1', 'doc2'])
})
it('should deselect all when all are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectAll()
})
expect(onSelectedIdChange).toHaveBeenCalledWith([])
})
it('should add to existing selection when some are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
createMockDocument({ id: 'doc3' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectAll()
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc1', 'doc2', 'doc3'])
})
})
describe('onSelectOne', () => {
it('should add document to selection when not selected', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: [],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectOne('doc1')
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc1'])
})
it('should remove document from selection when already selected', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectOne('doc1')
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc2'])
})
})
describe('hasErrorDocumentsSelected', () => {
it('should return false when no error documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1', display_status: 'available' }),
createMockDocument({ id: 'doc2', display_status: 'error' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
expect(result.current.hasErrorDocumentsSelected).toBe(false)
})
it('should return true when an error document is selected', () => {
const docs = [
createMockDocument({ id: 'doc1', display_status: 'available' }),
createMockDocument({ id: 'doc2', display_status: 'error' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc2'],
onSelectedIdChange,
}),
)
expect(result.current.hasErrorDocumentsSelected).toBe(true)
})
})
describe('downloadableSelectedIds', () => {
it('should return only FILE type documents from selection', () => {
const docs = [
createMockDocument({ id: 'doc1', data_source_type: DataSourceType.FILE }),
createMockDocument({ id: 'doc2', data_source_type: DataSourceType.NOTION }),
createMockDocument({ id: 'doc3', data_source_type: DataSourceType.FILE }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2', 'doc3'],
onSelectedIdChange,
}),
)
expect(result.current.downloadableSelectedIds).toEqual(['doc1', 'doc3'])
})
it('should return empty array when no FILE documents selected', () => {
const docs = [
createMockDocument({ id: 'doc1', data_source_type: DataSourceType.NOTION }),
createMockDocument({ id: 'doc2', data_source_type: DataSourceType.WEB }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
expect(result.current.downloadableSelectedIds).toEqual([])
})
})
describe('clearSelection', () => {
it('should call onSelectedIdChange with empty array', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
act(() => {
result.current.clearSelection()
})
expect(onSelectedIdChange).toHaveBeenCalledWith([])
})
})
})

View File

@@ -1,66 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { uniq } from 'es-toolkit/array'
import { useCallback, useMemo } from 'react'
import { DataSourceType } from '@/models/datasets'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type UseDocumentSelectionOptions = {
documents: LocalDoc[]
selectedIds: string[]
onSelectedIdChange: (selectedIds: string[]) => void
}
export const useDocumentSelection = ({
documents,
selectedIds,
onSelectedIdChange,
}: UseDocumentSelectionOptions) => {
const isAllSelected = useMemo(() => {
return documents.length > 0 && documents.every(doc => selectedIds.includes(doc.id))
}, [documents, selectedIds])
const isSomeSelected = useMemo(() => {
return documents.some(doc => selectedIds.includes(doc.id))
}, [documents, selectedIds])
const onSelectAll = useCallback(() => {
if (isAllSelected)
onSelectedIdChange([])
else
onSelectedIdChange(uniq([...selectedIds, ...documents.map(doc => doc.id)]))
}, [isAllSelected, documents, onSelectedIdChange, selectedIds])
const onSelectOne = useCallback((docId: string) => {
onSelectedIdChange(
selectedIds.includes(docId)
? selectedIds.filter(id => id !== docId)
: [...selectedIds, docId],
)
}, [selectedIds, onSelectedIdChange])
const hasErrorDocumentsSelected = useMemo(() => {
return documents.some(doc => selectedIds.includes(doc.id) && doc.display_status === 'error')
}, [documents, selectedIds])
const downloadableSelectedIds = useMemo(() => {
const selectedSet = new Set(selectedIds)
return documents
.filter(doc => selectedSet.has(doc.id) && doc.data_source_type === DataSourceType.FILE)
.map(doc => doc.id)
}, [documents, selectedIds])
const clearSelection = useCallback(() => {
onSelectedIdChange([])
}, [onSelectedIdChange])
return {
isAllSelected,
isSomeSelected,
onSelectAll,
onSelectOne,
hasErrorDocumentsSelected,
downloadableSelectedIds,
clearSelection,
}
}

View File

@@ -1,340 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { act, renderHook } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { useDocumentSort } from './use-document-sort'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
const createMockDocument = (overrides: Partial<LocalDoc> = {}): LocalDoc => ({
id: 'doc1',
name: 'Test Document',
data_source_type: 'upload_file',
data_source_info: {},
data_source_detail_dict: {},
word_count: 100,
hit_count: 10,
created_at: 1000000,
position: 1,
doc_form: 'text_model',
enabled: true,
archived: false,
display_status: 'available',
created_from: 'api',
...overrides,
} as LocalDoc)
describe('useDocumentSort', () => {
describe('initial state', () => {
it('should return null sortField initially', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
expect(result.current.sortField).toBeNull()
expect(result.current.sortOrder).toBe('desc')
})
it('should return documents unchanged when no sort is applied', () => {
const docs = [
createMockDocument({ id: 'doc1', name: 'B' }),
createMockDocument({ id: 'doc2', name: 'A' }),
]
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
expect(result.current.sortedDocuments).toEqual(docs)
})
})
describe('handleSort', () => {
it('should set sort field when called', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortField).toBe('name')
expect(result.current.sortOrder).toBe('desc')
})
it('should toggle sort order when same field is clicked twice', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('desc')
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('asc')
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('desc')
})
it('should reset to desc when different field is selected', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('asc')
act(() => {
result.current.handleSort('word_count')
})
expect(result.current.sortField).toBe('word_count')
expect(result.current.sortOrder).toBe('desc')
})
it('should not change state when null is passed', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort(null)
})
expect(result.current.sortField).toBeNull()
})
})
describe('sorting documents', () => {
const docs = [
createMockDocument({ id: 'doc1', name: 'Banana', word_count: 200, hit_count: 5, created_at: 3000 }),
createMockDocument({ id: 'doc2', name: 'Apple', word_count: 100, hit_count: 10, created_at: 1000 }),
createMockDocument({ id: 'doc3', name: 'Cherry', word_count: 300, hit_count: 1, created_at: 2000 }),
]
it('should sort by name descending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
const names = result.current.sortedDocuments.map(d => d.name)
expect(names).toEqual(['Cherry', 'Banana', 'Apple'])
})
it('should sort by name ascending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
act(() => {
result.current.handleSort('name')
})
const names = result.current.sortedDocuments.map(d => d.name)
expect(names).toEqual(['Apple', 'Banana', 'Cherry'])
})
it('should sort by word_count descending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('word_count')
})
const counts = result.current.sortedDocuments.map(d => d.word_count)
expect(counts).toEqual([300, 200, 100])
})
it('should sort by hit_count ascending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('hit_count')
})
act(() => {
result.current.handleSort('hit_count')
})
const counts = result.current.sortedDocuments.map(d => d.hit_count)
expect(counts).toEqual([1, 5, 10])
})
it('should sort by created_at descending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('created_at')
})
const times = result.current.sortedDocuments.map(d => d.created_at)
expect(times).toEqual([3000, 2000, 1000])
})
})
describe('status filtering', () => {
const docs = [
createMockDocument({ id: 'doc1', display_status: 'available' }),
createMockDocument({ id: 'doc2', display_status: 'error' }),
createMockDocument({ id: 'doc3', display_status: 'available' }),
]
it('should not filter when statusFilterValue is empty', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
expect(result.current.sortedDocuments.length).toBe(3)
})
it('should not filter when statusFilterValue is all', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: 'all',
remoteSortValue: '',
}),
)
expect(result.current.sortedDocuments.length).toBe(3)
})
})
describe('remoteSortValue reset', () => {
it('should reset sort state when remoteSortValue changes', () => {
const { result, rerender } = renderHook(
({ remoteSortValue }) =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue,
}),
{ initialProps: { remoteSortValue: 'initial' } },
)
act(() => {
result.current.handleSort('name')
})
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortField).toBe('name')
expect(result.current.sortOrder).toBe('asc')
rerender({ remoteSortValue: 'changed' })
expect(result.current.sortField).toBeNull()
expect(result.current.sortOrder).toBe('desc')
})
})
describe('edge cases', () => {
it('should handle documents with missing values', () => {
const docs = [
createMockDocument({ id: 'doc1', name: undefined as unknown as string, word_count: undefined }),
createMockDocument({ id: 'doc2', name: 'Test', word_count: 100 }),
]
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortedDocuments.length).toBe(2)
})
it('should handle empty documents array', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortedDocuments).toEqual([])
})
})
})

View File

@@ -1,102 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { useCallback, useMemo, useRef, useState } from 'react'
import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter'
export type SortField = 'name' | 'word_count' | 'hit_count' | 'created_at' | null
export type SortOrder = 'asc' | 'desc'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type UseDocumentSortOptions = {
documents: LocalDoc[]
statusFilterValue: string
remoteSortValue: string
}
export const useDocumentSort = ({
documents,
statusFilterValue,
remoteSortValue,
}: UseDocumentSortOptions) => {
const [sortField, setSortField] = useState<SortField>(null)
const [sortOrder, setSortOrder] = useState<SortOrder>('desc')
const prevRemoteSortValueRef = useRef(remoteSortValue)
// Reset sort when remote sort changes
if (prevRemoteSortValueRef.current !== remoteSortValue) {
prevRemoteSortValueRef.current = remoteSortValue
setSortField(null)
setSortOrder('desc')
}
const handleSort = useCallback((field: SortField) => {
if (field === null)
return
if (sortField === field) {
setSortOrder(prev => prev === 'asc' ? 'desc' : 'asc')
}
else {
setSortField(field)
setSortOrder('desc')
}
}, [sortField])
const sortedDocuments = useMemo(() => {
let filteredDocs = documents
if (statusFilterValue && statusFilterValue !== 'all') {
filteredDocs = filteredDocs.filter(doc =>
typeof doc.display_status === 'string'
&& normalizeStatusForQuery(doc.display_status) === statusFilterValue,
)
}
if (!sortField)
return filteredDocs
const sortedDocs = [...filteredDocs].sort((a, b) => {
let aValue: string | number
let bValue: string | number
switch (sortField) {
case 'name':
aValue = a.name?.toLowerCase() || ''
bValue = b.name?.toLowerCase() || ''
break
case 'word_count':
aValue = a.word_count || 0
bValue = b.word_count || 0
break
case 'hit_count':
aValue = a.hit_count || 0
bValue = b.hit_count || 0
break
case 'created_at':
aValue = a.created_at
bValue = b.created_at
break
default:
return 0
}
if (sortField === 'name') {
const result = (aValue as string).localeCompare(bValue as string)
return sortOrder === 'asc' ? result : -result
}
else {
const result = (aValue as number) - (bValue as number)
return sortOrder === 'asc' ? result : -result
}
})
return sortedDocs
}, [documents, sortField, sortOrder, statusFilterValue])
return {
sortField,
sortOrder,
handleSort,
sortedDocuments,
}
}

View File

@@ -1,487 +0,0 @@
import type { ReactNode } from 'react'
import type { Props as PaginationProps } from '@/app/components/base/pagination'
import type { SimpleDocumentDetail } from '@/models/datasets'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ChunkingMode, DataSourceType } from '@/models/datasets'
import DocumentList from '../list'
const mockPush = vi.fn()
vi.mock('next/navigation', () => ({
useRouter: () => ({
push: mockPush,
}),
}))
vi.mock('@/context/dataset-detail', () => ({
useDatasetDetailContextWithSelector: (selector: (state: { dataset: { doc_form: string } }) => unknown) =>
selector({ dataset: { doc_form: ChunkingMode.text } }),
}))
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false, gcTime: 0 },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
}
const createMockDoc = (overrides: Partial<SimpleDocumentDetail> = {}): SimpleDocumentDetail => ({
id: `doc-${Math.random().toString(36).substr(2, 9)}`,
position: 1,
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {
upload_file: { name: 'test.txt', extension: 'txt' },
},
dataset_process_rule_id: 'rule-1',
batch: 'batch-1',
name: 'test-document.txt',
created_from: 'web',
created_by: 'user-1',
created_at: Date.now(),
tokens: 100,
indexing_status: 'completed',
error: null,
enabled: true,
disabled_at: null,
disabled_by: null,
archived: false,
archived_reason: null,
archived_by: null,
archived_at: null,
updated_at: Date.now(),
doc_type: null,
doc_metadata: undefined,
display_status: 'available',
word_count: 500,
hit_count: 10,
doc_form: 'text_model',
...overrides,
} as SimpleDocumentDetail)
const defaultPagination: PaginationProps = {
current: 1,
onChange: vi.fn(),
total: 100,
}
describe('DocumentList', () => {
const defaultProps = {
embeddingAvailable: true,
documents: [
createMockDoc({ id: 'doc-1', name: 'Document 1.txt', word_count: 100, hit_count: 5 }),
createMockDoc({ id: 'doc-2', name: 'Document 2.txt', word_count: 200, hit_count: 10 }),
createMockDoc({ id: 'doc-3', name: 'Document 3.txt', word_count: 300, hit_count: 15 }),
],
selectedIds: [] as string[],
onSelectedIdChange: vi.fn(),
datasetId: 'dataset-1',
pagination: defaultPagination,
onUpdate: vi.fn(),
onManageMetadata: vi.fn(),
statusFilterValue: '',
remoteSortValue: '',
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('Rendering', () => {
it('should render without crashing', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render all documents', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('Document 1.txt')).toBeInTheDocument()
expect(screen.getByText('Document 2.txt')).toBeInTheDocument()
expect(screen.getByText('Document 3.txt')).toBeInTheDocument()
})
it('should render table headers', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('#')).toBeInTheDocument()
})
it('should render pagination when total is provided', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Pagination component should be present
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should not render pagination when total is 0', () => {
const props = {
...defaultProps,
pagination: { ...defaultPagination, total: 0 },
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render empty table when no documents', () => {
const props = { ...defaultProps, documents: [] }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Selection', () => {
// Helper to find checkboxes (custom div components, not native checkboxes)
const findCheckboxes = (container: HTMLElement): NodeListOf<Element> => {
return container.querySelectorAll('[class*="shadow-xs"]')
}
it('should render header checkbox when embeddingAvailable', () => {
const { container } = render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
const checkboxes = findCheckboxes(container)
expect(checkboxes.length).toBeGreaterThan(0)
})
it('should not render header checkbox when embedding not available', () => {
const props = { ...defaultProps, embeddingAvailable: false }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// Row checkboxes should still be there, but header checkbox should be hidden
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should call onSelectedIdChange when select all is clicked', () => {
const onSelectedIdChange = vi.fn()
const props = { ...defaultProps, onSelectedIdChange }
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
const checkboxes = findCheckboxes(container)
if (checkboxes.length > 0) {
fireEvent.click(checkboxes[0])
expect(onSelectedIdChange).toHaveBeenCalled()
}
})
it('should show all checkboxes as checked when all are selected', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1', 'doc-2', 'doc-3'],
}
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
const checkboxes = findCheckboxes(container)
// When checked, checkbox should have a check icon (svg) inside
checkboxes.forEach((checkbox) => {
const checkIcon = checkbox.querySelector('svg')
expect(checkIcon).toBeInTheDocument()
})
})
it('should show indeterminate state when some are selected', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
// First checkbox is the header checkbox which should be indeterminate
const checkboxes = findCheckboxes(container)
expect(checkboxes.length).toBeGreaterThan(0)
// Header checkbox should show indeterminate icon, not check icon
// Just verify it's rendered
expect(checkboxes[0]).toBeInTheDocument()
})
it('should call onSelectedIdChange with single document when row checkbox is clicked', () => {
const onSelectedIdChange = vi.fn()
const props = { ...defaultProps, onSelectedIdChange }
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
// Click the second checkbox (first row checkbox)
const checkboxes = findCheckboxes(container)
if (checkboxes.length > 1) {
fireEvent.click(checkboxes[1])
expect(onSelectedIdChange).toHaveBeenCalled()
}
})
})
describe('Sorting', () => {
it('should render sort headers for sortable columns', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Find svg icons which indicate sortable columns
const sortIcons = document.querySelectorAll('svg')
expect(sortIcons.length).toBeGreaterThan(0)
})
it('should update sort order when sort header is clicked', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Find and click a sort header by its parent div containing the label text
const sortableHeaders = document.querySelectorAll('[class*="cursor-pointer"]')
if (sortableHeaders.length > 0) {
fireEvent.click(sortableHeaders[0])
}
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Batch Actions', () => {
it('should show batch action bar when documents are selected', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1', 'doc-2'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction component should be visible
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should not show batch action bar when no documents selected', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// BatchAction should not be present
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with archive option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction component should be visible when documents are selected
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with enable option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with disable option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with delete option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should clear selection when cancel is clicked', () => {
const onSelectedIdChange = vi.fn()
const props = {
...defaultProps,
selectedIds: ['doc-1'],
onSelectedIdChange,
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
const cancelButton = screen.queryByRole('button', { name: /cancel/i })
if (cancelButton) {
fireEvent.click(cancelButton)
expect(onSelectedIdChange).toHaveBeenCalledWith([])
}
})
it('should show download option for downloadable documents', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
documents: [
createMockDoc({ id: 'doc-1', data_source_type: DataSourceType.FILE }),
],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction should be visible
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should show re-index option for error documents', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
documents: [
createMockDoc({ id: 'doc-1', display_status: 'error' }),
],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction with re-index should be present for error documents
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Row Click Navigation', () => {
it('should navigate to document detail when row is clicked', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
const rows = screen.getAllByRole('row')
// First row is header, second row is first document
if (rows.length > 1) {
fireEvent.click(rows[1])
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-1/documents/doc-1')
}
})
})
describe('Rename Modal', () => {
it('should not show rename modal initially', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// RenameModal should not be visible initially
const modal = screen.queryByRole('dialog')
expect(modal).not.toBeInTheDocument()
})
it('should show rename modal when rename button is clicked', () => {
const { container } = render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Find and click the rename button in the first row
const renameButtons = container.querySelectorAll('.cursor-pointer.rounded-md')
if (renameButtons.length > 0) {
fireEvent.click(renameButtons[0])
}
// After clicking rename, the modal should potentially be visible
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should call onUpdate when document is renamed', () => {
const onUpdate = vi.fn()
const props = { ...defaultProps, onUpdate }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// The handleRenamed callback wraps onUpdate
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Edit Metadata Modal', () => {
it('should handle edit metadata action', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
const editButton = screen.queryByRole('button', { name: /metadata/i })
if (editButton) {
fireEvent.click(editButton)
}
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should call onManageMetadata when manage metadata is triggered', () => {
const onManageMetadata = vi.fn()
const props = {
...defaultProps,
selectedIds: ['doc-1'],
onManageMetadata,
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// The onShowManage callback in EditMetadataBatchModal should call hideEditModal then onManageMetadata
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Chunking Mode', () => {
it('should render with general mode', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render with QA mode', () => {
// This test uses the default mock which returns ChunkingMode.text
// The component will compute isQAMode based on doc_form
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render with parent-child mode', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Edge Cases', () => {
it('should handle empty documents array', () => {
const props = { ...defaultProps, documents: [] }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle documents with missing optional fields', () => {
const docWithMissingFields = createMockDoc({
word_count: undefined as unknown as number,
hit_count: undefined as unknown as number,
})
const props = {
...defaultProps,
documents: [docWithMissingFields],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle status filter value', () => {
const props = {
...defaultProps,
statusFilterValue: 'completed',
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle remote sort value', () => {
const props = {
...defaultProps,
remoteSortValue: 'created_at',
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle large number of documents', () => {
const manyDocs = Array.from({ length: 20 }, (_, i) =>
createMockDoc({ id: `doc-${i}`, name: `Document ${i}.txt` }))
const props = { ...defaultProps, documents: manyDocs }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
}, 10000)
})
})

View File

@@ -1,3 +0,0 @@
// Re-export from parent for backwards compatibility
export { default } from '../list'
export { renderTdValue } from './components'

View File

@@ -1,26 +1,67 @@
'use client'
import type { FC } from 'react'
import type { Props as PaginationProps } from '@/app/components/base/pagination'
import type { SimpleDocumentDetail } from '@/models/datasets'
import type { CommonResponse } from '@/models/common'
import type { LegacyDataSourceInfo, LocalFileInfo, OnlineDocumentInfo, OnlineDriveInfo, SimpleDocumentDetail } from '@/models/datasets'
import {
RiArrowDownLine,
RiEditLine,
RiGlobalLine,
} from '@remixicon/react'
import { useBoolean } from 'ahooks'
import { uniq } from 'es-toolkit/array'
import { pick } from 'es-toolkit/object'
import { useRouter } from 'next/navigation'
import * as React from 'react'
import { useCallback, useState } from 'react'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Checkbox from '@/app/components/base/checkbox'
import FileTypeIcon from '@/app/components/base/file-uploader/file-type-icon'
import NotionIcon from '@/app/components/base/notion-icon'
import Pagination from '@/app/components/base/pagination'
import Toast from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import ChunkingModeLabel from '@/app/components/datasets/common/chunking-mode-label'
import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter'
import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type'
import EditMetadataBatchModal from '@/app/components/datasets/metadata/edit-metadata-batch/modal'
import useBatchEditDocumentMetadata from '@/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata'
import { useDatasetDetailContextWithSelector as useDatasetDetailContext } from '@/context/dataset-detail'
import { ChunkingMode, DocumentActionType } from '@/models/datasets'
import useTimestamp from '@/hooks/use-timestamp'
import { ChunkingMode, DataSourceType, DocumentActionType } from '@/models/datasets'
import { DatasourceType } from '@/models/pipeline'
import { useDocumentArchive, useDocumentBatchRetryIndex, useDocumentDelete, useDocumentDisable, useDocumentDownloadZip, useDocumentEnable, useDocumentSummary } from '@/service/knowledge/use-document'
import { asyncRunSafe } from '@/utils'
import { cn } from '@/utils/classnames'
import { downloadBlob } from '@/utils/download'
import { formatNumber } from '@/utils/format'
import BatchAction from '../detail/completed/common/batch-action'
import SummaryStatus from '../detail/completed/common/summary-status'
import StatusItem from '../status-item'
import s from '../style.module.css'
import { DocumentTableRow, renderTdValue, SortHeader } from './document-list/components'
import { useDocumentActions, useDocumentSelection, useDocumentSort } from './document-list/hooks'
import Operations from './operations'
import RenameModal from './rename-modal'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
export const renderTdValue = (value: string | number | null, isEmptyStyle = false) => {
return (
<div className={cn(isEmptyStyle ? 'text-text-tertiary' : 'text-text-secondary', s.tdValue)}>
{value ?? '-'}
</div>
)
}
type DocumentListProps = {
const renderCount = (count: number | undefined) => {
if (!count)
return renderTdValue(0, true)
if (count < 1000)
return count
return `${formatNumber((count / 1000).toFixed(1))}k`
}
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type IDocumentListProps = {
embeddingAvailable: boolean
documents: LocalDoc[]
selectedIds: string[]
@@ -36,7 +77,7 @@ type DocumentListProps = {
/**
* Document list component including basic information
*/
const DocumentList: FC<DocumentListProps> = ({
const DocumentList: FC<IDocumentListProps> = ({
embeddingAvailable,
documents = [],
selectedIds,
@@ -49,43 +90,20 @@ const DocumentList: FC<DocumentListProps> = ({
remoteSortValue,
}) => {
const { t } = useTranslation()
const { formatTime } = useTimestamp()
const router = useRouter()
const datasetConfig = useDatasetDetailContext(s => s.dataset)
const chunkingMode = datasetConfig?.doc_form
const isGeneralMode = chunkingMode !== ChunkingMode.parentChild
const isQAMode = chunkingMode === ChunkingMode.qa
const [sortField, setSortField] = useState<'name' | 'word_count' | 'hit_count' | 'created_at' | null>(null)
const [sortOrder, setSortOrder] = useState<'asc' | 'desc'>('desc')
// Sorting
const { sortField, sortOrder, handleSort, sortedDocuments } = useDocumentSort({
documents,
statusFilterValue,
remoteSortValue,
})
useEffect(() => {
setSortField(null)
setSortOrder('desc')
}, [remoteSortValue])
// Selection
const {
isAllSelected,
isSomeSelected,
onSelectAll,
onSelectOne,
hasErrorDocumentsSelected,
downloadableSelectedIds,
clearSelection,
} = useDocumentSelection({
documents: sortedDocuments,
selectedIds,
onSelectedIdChange,
})
// Actions
const { handleAction, handleBatchReIndex, handleBatchDownload } = useDocumentActions({
datasetId,
selectedIds,
downloadableSelectedIds,
onUpdate,
onClearSelection: clearSelection,
})
// Batch edit metadata
const {
isShowEditModal,
showEditModal,
@@ -95,26 +113,233 @@ const DocumentList: FC<DocumentListProps> = ({
} = useBatchEditDocumentMetadata({
datasetId,
docList: documents.filter(doc => selectedIds.includes(doc.id)),
selectedDocumentIds: selectedIds,
selectedDocumentIds: selectedIds, // Pass all selected IDs separately
onUpdate,
})
// Rename modal
const localDocs = useMemo(() => {
let filteredDocs = documents
if (statusFilterValue && statusFilterValue !== 'all') {
filteredDocs = filteredDocs.filter(doc =>
typeof doc.display_status === 'string'
&& normalizeStatusForQuery(doc.display_status) === statusFilterValue,
)
}
if (!sortField)
return filteredDocs
const sortedDocs = [...filteredDocs].sort((a, b) => {
let aValue: any
let bValue: any
switch (sortField) {
case 'name':
aValue = a.name?.toLowerCase() || ''
bValue = b.name?.toLowerCase() || ''
break
case 'word_count':
aValue = a.word_count || 0
bValue = b.word_count || 0
break
case 'hit_count':
aValue = a.hit_count || 0
bValue = b.hit_count || 0
break
case 'created_at':
aValue = a.created_at
bValue = b.created_at
break
default:
return 0
}
if (sortField === 'name') {
const result = aValue.localeCompare(bValue)
return sortOrder === 'asc' ? result : -result
}
else {
const result = aValue - bValue
return sortOrder === 'asc' ? result : -result
}
})
return sortedDocs
}, [documents, sortField, sortOrder, statusFilterValue])
const handleSort = (field: 'name' | 'word_count' | 'hit_count' | 'created_at') => {
if (sortField === field) {
setSortOrder(sortOrder === 'asc' ? 'desc' : 'asc')
}
else {
setSortField(field)
setSortOrder('desc')
}
}
const renderSortHeader = (field: 'name' | 'word_count' | 'hit_count' | 'created_at', label: string) => {
const isActive = sortField === field
const isDesc = isActive && sortOrder === 'desc'
return (
<div className="flex cursor-pointer items-center hover:text-text-secondary" onClick={() => handleSort(field)}>
{label}
<RiArrowDownLine
className={cn('ml-0.5 h-3 w-3 transition-all', isActive ? 'text-text-tertiary' : 'text-text-disabled', isActive && !isDesc ? 'rotate-180' : '')}
/>
</div>
)
}
const [currDocument, setCurrDocument] = useState<LocalDoc | null>(null)
const [isShowRenameModal, {
setTrue: setShowRenameModalTrue,
setFalse: setShowRenameModalFalse,
}] = useBoolean(false)
const handleShowRenameModal = useCallback((doc: LocalDoc) => {
setCurrDocument(doc)
setShowRenameModalTrue()
}, [setShowRenameModalTrue])
const handleRenamed = useCallback(() => {
onUpdate()
}, [onUpdate])
const isAllSelected = useMemo(() => {
return localDocs.length > 0 && localDocs.every(doc => selectedIds.includes(doc.id))
}, [localDocs, selectedIds])
const isSomeSelected = useMemo(() => {
return localDocs.some(doc => selectedIds.includes(doc.id))
}, [localDocs, selectedIds])
const onSelectedAll = useCallback(() => {
if (isAllSelected)
onSelectedIdChange([])
else
onSelectedIdChange(uniq([...selectedIds, ...localDocs.map(doc => doc.id)]))
}, [isAllSelected, localDocs, onSelectedIdChange, selectedIds])
const { mutateAsync: archiveDocument } = useDocumentArchive()
const { mutateAsync: generateSummary } = useDocumentSummary()
const { mutateAsync: enableDocument } = useDocumentEnable()
const { mutateAsync: disableDocument } = useDocumentDisable()
const { mutateAsync: deleteDocument } = useDocumentDelete()
const { mutateAsync: retryIndexDocument } = useDocumentBatchRetryIndex()
const { mutateAsync: requestDocumentsZip, isPending: isDownloadingZip } = useDocumentDownloadZip()
const handleAction = (actionName: DocumentActionType) => {
return async () => {
let opApi
switch (actionName) {
case DocumentActionType.archive:
opApi = archiveDocument
break
case DocumentActionType.summary:
opApi = generateSummary
break
case DocumentActionType.enable:
opApi = enableDocument
break
case DocumentActionType.disable:
opApi = disableDocument
break
default:
opApi = deleteDocument
break
}
const [e] = await asyncRunSafe<CommonResponse>(opApi({ datasetId, documentIds: selectedIds }) as Promise<CommonResponse>)
if (!e) {
if (actionName === DocumentActionType.delete)
onSelectedIdChange([])
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else { Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) }) }
}
}
const handleBatchReIndex = async () => {
const [e] = await asyncRunSafe<CommonResponse>(retryIndexDocument({ datasetId, documentIds: selectedIds }))
if (!e) {
onSelectedIdChange([])
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}
const hasErrorDocumentsSelected = useMemo(() => {
return localDocs.some(doc => selectedIds.includes(doc.id) && doc.display_status === 'error')
}, [localDocs, selectedIds])
const getFileExtension = useCallback((fileName: string): string => {
if (!fileName)
return ''
const parts = fileName.split('.')
if (parts.length <= 1 || (parts[0] === '' && parts.length === 2))
return ''
return parts[parts.length - 1].toLowerCase()
}, [])
const isCreateFromRAGPipeline = useCallback((createdFrom: string) => {
return createdFrom === 'rag-pipeline'
}, [])
/**
* Calculate the data source type
* DataSourceType: FILE, NOTION, WEB (legacy)
* DatasourceType: localFile, onlineDocument, websiteCrawl, onlineDrive (new)
*/
const isLocalFile = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.localFile || dataSourceType === DataSourceType.FILE
}, [])
const isOnlineDocument = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDocument || dataSourceType === DataSourceType.NOTION
}, [])
const isWebsiteCrawl = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.websiteCrawl || dataSourceType === DataSourceType.WEB
}, [])
const isOnlineDrive = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDrive
}, [])
const downloadableSelectedIds = useMemo(() => {
const selectedSet = new Set(selectedIds)
return localDocs
.filter(doc => selectedSet.has(doc.id) && doc.data_source_type === DataSourceType.FILE)
.map(doc => doc.id)
}, [localDocs, selectedIds])
/**
* Generate a random ZIP filename for bulk document downloads.
* We intentionally avoid leaking dataset info in the exported archive name.
*/
const generateDocsZipFileName = useCallback((): string => {
// Prefer UUID for uniqueness; fall back to time+random when unavailable.
const randomPart = (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function')
? crypto.randomUUID()
: `${Date.now().toString(36)}${Math.random().toString(36).slice(2, 10)}`
return `${randomPart}-docs.zip`
}, [])
const handleBatchDownload = useCallback(async () => {
if (isDownloadingZip)
return
// Download as a single ZIP to avoid browser caps on multiple automatic downloads.
const [e, blob] = await asyncRunSafe(requestDocumentsZip({ datasetId, documentIds: downloadableSelectedIds }))
if (e || !blob) {
Toast.notify({ type: 'error', message: t('actionMsg.downloadUnsuccessfully', { ns: 'common' }) })
return
}
downloadBlob({ data: blob, fileName: generateDocsZipFileName() })
}, [datasetId, downloadableSelectedIds, generateDocsZipFileName, isDownloadingZip, requestDocumentsZip, t])
return (
<div className="relative mt-3 flex h-full w-full flex-col">
<div className="relative h-0 grow overflow-x-auto">
@@ -128,76 +353,157 @@ const DocumentList: FC<DocumentListProps> = ({
className="mr-2 shrink-0"
checked={isAllSelected}
indeterminate={!isAllSelected && isSomeSelected}
onCheck={onSelectAll}
onCheck={onSelectedAll}
/>
)}
#
</div>
</td>
<td>
<SortHeader
field="name"
label={t('list.table.header.fileName', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('name', t('list.table.header.fileName', { ns: 'datasetDocuments' }))}
</td>
<td className="w-[130px]">{t('list.table.header.chunkingMode', { ns: 'datasetDocuments' })}</td>
<td className="w-24">
<SortHeader
field="word_count"
label={t('list.table.header.words', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('word_count', t('list.table.header.words', { ns: 'datasetDocuments' }))}
</td>
<td className="w-44">
<SortHeader
field="hit_count"
label={t('list.table.header.hitCount', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('hit_count', t('list.table.header.hitCount', { ns: 'datasetDocuments' }))}
</td>
<td className="w-44">
<SortHeader
field="created_at"
label={t('list.table.header.uploadTime', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('created_at', t('list.table.header.uploadTime', { ns: 'datasetDocuments' }))}
</td>
<td className="w-40">{t('list.table.header.status', { ns: 'datasetDocuments' })}</td>
<td className="w-20">{t('list.table.header.action', { ns: 'datasetDocuments' })}</td>
</tr>
</thead>
<tbody className="text-text-secondary">
{sortedDocuments.map((doc, index) => (
<DocumentTableRow
key={doc.id}
doc={doc}
index={index}
datasetId={datasetId}
isSelected={selectedIds.includes(doc.id)}
isGeneralMode={isGeneralMode}
isQAMode={isQAMode}
embeddingAvailable={embeddingAvailable}
selectedIds={selectedIds}
onSelectOne={onSelectOne}
onSelectedIdChange={onSelectedIdChange}
onShowRenameModal={handleShowRenameModal}
onUpdate={onUpdate}
/>
))}
{localDocs.map((doc, index) => {
const isFile = isLocalFile(doc.data_source_type)
const fileType = isFile ? doc.data_source_detail_dict?.upload_file?.extension : ''
return (
<tr
key={doc.id}
className="h-8 cursor-pointer border-b border-divider-subtle hover:bg-background-default-hover"
onClick={() => {
router.push(`/datasets/${datasetId}/documents/${doc.id}`)
}}
>
<td className="text-left align-middle text-xs text-text-tertiary">
<div className="flex items-center" onClick={e => e.stopPropagation()}>
<Checkbox
className="mr-2 shrink-0"
checked={selectedIds.includes(doc.id)}
onCheck={() => {
onSelectedIdChange(
selectedIds.includes(doc.id)
? selectedIds.filter(id => id !== doc.id)
: [...selectedIds, doc.id],
)
}}
/>
{index + 1}
</div>
</td>
<td>
<div className="group mr-6 flex max-w-[460px] items-center hover:mr-0">
<div className="flex shrink-0 items-center">
{isOnlineDocument(doc.data_source_type) && (
<NotionIcon
className="mr-1.5"
type="page"
src={
isCreateFromRAGPipeline(doc.created_from)
? (doc.data_source_info as OnlineDocumentInfo).page.page_icon
: (doc.data_source_info as LegacyDataSourceInfo).notion_page_icon
}
/>
)}
{isLocalFile(doc.data_source_type) && (
<FileTypeIcon
type={
extensionToFileType(
isCreateFromRAGPipeline(doc.created_from)
? (doc?.data_source_info as LocalFileInfo)?.extension
: ((doc?.data_source_info as LegacyDataSourceInfo)?.upload_file?.extension ?? fileType),
)
}
className="mr-1.5"
/>
)}
{isOnlineDrive(doc.data_source_type) && (
<FileTypeIcon
type={
extensionToFileType(
getFileExtension((doc?.data_source_info as unknown as OnlineDriveInfo)?.name),
)
}
className="mr-1.5"
/>
)}
{isWebsiteCrawl(doc.data_source_type) && (
<RiGlobalLine className="mr-1.5 size-4" />
)}
</div>
<Tooltip
popupContent={doc.name}
>
<span className="grow-1 truncate text-sm">{doc.name}</span>
</Tooltip>
{
doc.summary_index_status && (
<div className="ml-1 hidden shrink-0 group-hover:flex">
<SummaryStatus status={doc.summary_index_status} />
</div>
)
}
<div className="hidden shrink-0 group-hover:ml-auto group-hover:flex">
<Tooltip
popupContent={t('list.table.rename', { ns: 'datasetDocuments' })}
>
<div
className="cursor-pointer rounded-md p-1 hover:bg-state-base-hover"
onClick={(e) => {
e.stopPropagation()
handleShowRenameModal(doc)
}}
>
<RiEditLine className="h-4 w-4 text-text-tertiary" />
</div>
</Tooltip>
</div>
</div>
</td>
<td>
<ChunkingModeLabel
isGeneralMode={isGeneralMode}
isQAMode={isQAMode}
/>
</td>
<td>{renderCount(doc.word_count)}</td>
<td>{renderCount(doc.hit_count)}</td>
<td className="text-[13px] text-text-secondary">
{formatTime(doc.created_at, t('dateTimeFormat', { ns: 'datasetHitTesting' }) as string)}
</td>
<td>
<StatusItem status={doc.display_status} />
</td>
<td>
<Operations
selectedIds={selectedIds}
onSelectedIdChange={onSelectedIdChange}
embeddingAvailable={embeddingAvailable}
datasetId={datasetId}
detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form', 'display_status'])}
onUpdate={onUpdate}
/>
</td>
</tr>
)
})}
</tbody>
</table>
</div>
{selectedIds.length > 0 && (
{(selectedIds.length > 0) && (
<BatchAction
className="absolute bottom-16 left-0 z-20"
selectedIds={selectedIds}
@@ -209,10 +515,12 @@ const DocumentList: FC<DocumentListProps> = ({
onBatchDelete={handleAction(DocumentActionType.delete)}
onEditMetadata={showEditModal}
onBatchReIndex={hasErrorDocumentsSelected ? handleBatchReIndex : undefined}
onCancel={clearSelection}
onCancel={() => {
onSelectedIdChange([])
}}
/>
)}
{/* Show Pagination only if the total is more than the limit */}
{!!pagination.total && (
<Pagination
{...pagination}
@@ -248,5 +556,3 @@ const DocumentList: FC<DocumentListProps> = ({
}
export default DocumentList
export { renderTdValue }

View File

@@ -26,7 +26,6 @@ import CustomPopover from '@/app/components/base/popover'
import Switch from '@/app/components/base/switch'
import { ToastContext } from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import { IS_CE_EDITION } from '@/config'
import { DataSourceType, DocumentActionType } from '@/models/datasets'
import {
useDocumentArchive,
@@ -264,14 +263,10 @@ const Operations = ({
<span className={s.actionName}>{t('list.action.sync', { ns: 'datasetDocuments' })}</span>
</div>
)}
{
IS_CE_EDITION && (
<div className={s.actionItem} onClick={() => onOperate('summary')}>
<SearchLinesSparkle className="h-4 w-4 text-text-tertiary" />
<span className={s.actionName}>{t('list.action.summary', { ns: 'datasetDocuments' })}</span>
</div>
)
}
<div className={s.actionItem} onClick={() => onOperate('summary')}>
<SearchLinesSparkle className="h-4 w-4 text-text-tertiary" />
<span className={s.actionName}>{t('list.action.summary', { ns: 'datasetDocuments' })}</span>
</div>
<Divider className="my-1" />
</>
)}

View File

@@ -1,351 +0,0 @@
import type { FileListItemProps } from './file-list-item'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
import FileListItem from './file-list-item'
// Mock theme hook - can be changed per test
let mockTheme = 'light'
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: mockTheme }),
}))
// Mock theme types
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock SimplePieChart with dynamic import handling
vi.mock('next/dynamic', () => ({
default: () => {
const DynamicComponent = ({ percentage, stroke, fill }: { percentage: number, stroke: string, fill: string }) => (
<div data-testid="pie-chart" data-percentage={percentage} data-stroke={stroke} data-fill={fill}>
Pie Chart:
{' '}
{percentage}
%
</div>
)
DynamicComponent.displayName = 'SimplePieChart'
return DynamicComponent
},
}))
// Mock DocumentFileIcon
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ name, extension, size }: { name: string, extension: string, size: string }) => (
<div data-testid="document-icon" data-name={name} data-extension={extension} data-size={size}>
Document Icon
</div>
),
}))
describe('FileListItem', () => {
const createMockFile = (overrides: Partial<File> = {}): File => ({
name: 'test-document.pdf',
size: 1024 * 100, // 100KB
type: 'application/pdf',
lastModified: Date.now(),
...overrides,
} as File)
const createMockFileItem = (overrides: Partial<FileItem> = {}): FileItem => ({
fileID: 'file-123',
file: createMockFile(overrides.file as Partial<File>),
progress: PROGRESS_NOT_STARTED,
...overrides,
})
const defaultProps: FileListItemProps = {
fileItem: createMockFileItem(),
onPreview: vi.fn(),
onRemove: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the file item container', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('flex', 'h-12', 'items-center', 'rounded-lg')
})
it('should render document icon with correct props', () => {
render(<FileListItem {...defaultProps} />)
const icon = screen.getByTestId('document-icon')
expect(icon).toBeInTheDocument()
expect(icon).toHaveAttribute('data-name', 'test-document.pdf')
expect(icon).toHaveAttribute('data-extension', 'pdf')
expect(icon).toHaveAttribute('data-size', 'lg')
})
it('should render file name', () => {
render(<FileListItem {...defaultProps} />)
expect(screen.getByText('test-document.pdf')).toBeInTheDocument()
})
it('should render file extension in uppercase via CSS class', () => {
render(<FileListItem {...defaultProps} />)
// Extension is rendered in lowercase but styled with uppercase CSS
const extensionSpan = screen.getByText('pdf')
expect(extensionSpan).toBeInTheDocument()
expect(extensionSpan).toHaveClass('uppercase')
})
it('should render file size', () => {
render(<FileListItem {...defaultProps} />)
// 100KB (102400 bytes) formatted with formatFileSize
expect(screen.getByText('100.00 KB')).toBeInTheDocument()
})
it('should render delete button', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const deleteButton = container.querySelector('.cursor-pointer')
expect(deleteButton).toBeInTheDocument()
})
})
describe('progress states', () => {
it('should show progress chart when uploading (0-99)', () => {
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toBeInTheDocument()
expect(pieChart).toHaveAttribute('data-percentage', '50')
})
it('should show progress chart at 0%', () => {
const fileItem = createMockFileItem({ progress: 0 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-percentage', '0')
})
it('should not show progress chart when complete (100)', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
it('should not show progress chart when not started (-1)', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_NOT_STARTED })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('error state', () => {
it('should show error icon when progress is PROGRESS_ERROR', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_ERROR })
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const errorIcon = container.querySelector('.text-text-destructive')
expect(errorIcon).toBeInTheDocument()
})
it('should apply error styling to container', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_ERROR })
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('border-state-destructive-border', 'bg-state-destructive-hover')
})
it('should not show error styling when not in error state', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).not.toHaveClass('border-state-destructive-border')
})
})
describe('theme handling', () => {
it('should use correct chart color for light theme', () => {
mockTheme = 'light'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#296dff')
expect(pieChart).toHaveAttribute('data-fill', '#296dff')
})
it('should use correct chart color for dark theme', () => {
mockTheme = 'dark'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#5289ff')
expect(pieChart).toHaveAttribute('data-fill', '#5289ff')
})
})
describe('event handlers', () => {
it('should call onPreview when item is clicked', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem()
render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} />)
const item = screen.getByText('test-document.pdf').closest('[class*="flex h-12"]')!
fireEvent.click(item)
expect(onPreview).toHaveBeenCalledTimes(1)
expect(onPreview).toHaveBeenCalledWith(fileItem.file)
})
it('should call onRemove when delete button is clicked', () => {
const onRemove = vi.fn()
const fileItem = createMockFileItem()
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onRemove).toHaveBeenCalledWith('file-123')
})
it('should stop propagation when delete button is clicked', () => {
const onPreview = vi.fn()
const onRemove = vi.fn()
const { container } = render(<FileListItem {...defaultProps} onPreview={onPreview} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onPreview).not.toHaveBeenCalled()
})
})
describe('file type handling', () => {
it('should handle files with multiple dots in name', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'my.document.file.docx' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('my.document.file.docx')).toBeInTheDocument()
// Extension is lowercase with uppercase CSS class
expect(screen.getByText('docx')).toBeInTheDocument()
})
it('should handle files without extension', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'README' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
// getFileType returns 'README' when there's no extension (last part after split)
expect(screen.getAllByText('README')).toHaveLength(2) // filename and extension
})
it('should handle various file extensions', () => {
const extensions = ['txt', 'md', 'json', 'csv', 'xlsx']
extensions.forEach((ext) => {
const fileItem = createMockFileItem({
file: createMockFile({ name: `file.${ext}` }),
})
const { unmount } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
// Extension is rendered in lowercase with uppercase CSS class
expect(screen.getByText(ext)).toBeInTheDocument()
unmount()
})
})
})
describe('file size display', () => {
it('should display size in KB for small files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 }), // 5KB
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 KB')).toBeInTheDocument()
})
it('should display size in MB for larger files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 * 1024 }), // 5MB
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 MB')).toBeInTheDocument()
})
it('should display size at threshold (10KB)', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 10 * 1024 }), // 10KB
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('10.00 KB')).toBeInTheDocument()
})
})
describe('upload progress values', () => {
it('should show chart at progress 1', () => {
const fileItem = createMockFileItem({ progress: 1 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toBeInTheDocument()
})
it('should show chart at progress 99', () => {
const fileItem = createMockFileItem({ progress: 99 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toHaveAttribute('data-percentage', '99')
})
it('should not show chart at progress 100', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('styling', () => {
it('should have proper shadow styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('shadow-xs')
})
it('should have proper border styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('border', 'border-components-panel-border')
})
it('should truncate long file names', () => {
const longFileName = 'this-is-a-very-long-file-name-that-should-be-truncated.pdf'
const fileItem = createMockFileItem({
file: createMockFile({ name: longFileName }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const nameElement = screen.getByText(longFileName)
expect(nameElement).toHaveClass('truncate')
})
})
})

View File

@@ -1,85 +0,0 @@
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiErrorWarningFill } from '@remixicon/react'
import dynamic from 'next/dynamic'
import { useMemo } from 'react'
import DocumentFileIcon from '@/app/components/datasets/common/document-file-icon'
import { getFileType } from '@/app/components/datasets/common/image-uploader/utils'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import { cn } from '@/utils/classnames'
import { formatFileSize } from '@/utils/format'
import { PROGRESS_ERROR } from '../constants'
const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false })
export type FileListItemProps = {
fileItem: FileItem
onPreview: (file: File) => void
onRemove: (fileID: string) => void
}
const FileListItem = ({
fileItem,
onPreview,
onRemove,
}: FileListItemProps) => {
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
const isUploading = fileItem.progress >= 0 && fileItem.progress < 100
const isError = fileItem.progress === PROGRESS_ERROR
const handleClick = () => {
onPreview(fileItem.file)
}
const handleRemove = (e: React.MouseEvent) => {
e.stopPropagation()
onRemove(fileItem.fileID)
}
return (
<div
onClick={handleClick}
className={cn(
'flex h-12 items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg shadow-xs shadow-shadow-shadow-4',
isError && 'border-state-destructive-border bg-state-destructive-hover',
)}
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="lg"
className="shrink-0"
name={fileItem.file.name}
extension={getFileType(fileItem.file)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-xs text-text-secondary">{fileItem.file.name}</div>
</div>
<div className="w-full truncate text-2xs leading-3 text-text-tertiary">
<span className="uppercase">{getFileType(fileItem.file)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{formatFileSize(fileItem.file.size)}</span>
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{isUploading && (
<SimplePieChart percentage={fileItem.progress} stroke={chartColor} fill={chartColor} animationDuration={0} />
)}
{isError && (
<RiErrorWarningFill className="size-4 text-text-destructive" />
)}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={handleRemove}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
)
}
export default FileListItem

View File

@@ -1,231 +0,0 @@
import type { RefObject } from 'react'
import type { UploadDropzoneProps } from './upload-dropzone'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import UploadDropzone from './upload-dropzone'
// Helper to create mock ref objects for testing
const createMockRef = <T,>(value: T | null = null): RefObject<T | null> => ({ current: value })
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string, options?: { ns?: string }) => {
const translations: Record<string, string> = {
'stepOne.uploader.button': 'Drag and drop files, or',
'stepOne.uploader.buttonSingleFile': 'Drag and drop file, or',
'stepOne.uploader.browse': 'Browse',
'stepOne.uploader.tip': 'Supports {{supportTypes}}, Max {{size}}MB each, up to {{batchCount}} files at a time, {{totalCount}} files total',
}
let result = translations[key] || key
if (options && typeof options === 'object') {
Object.entries(options).forEach(([k, v]) => {
result = result.replace(`{{${k}}}`, String(v))
})
}
return result
},
}),
}))
describe('UploadDropzone', () => {
const defaultProps: UploadDropzoneProps = {
dropRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
dragRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
fileUploaderRef: createMockRef<HTMLInputElement>() as RefObject<HTMLInputElement | null>,
dragging: false,
supportBatchUpload: true,
supportTypesShowNames: 'PDF, DOCX, TXT',
fileUploadConfig: {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
},
acceptTypes: ['.pdf', '.docx', '.txt'],
onSelectFile: vi.fn(),
onFileChange: vi.fn(),
allowedExtensions: ['pdf', 'docx', 'txt'],
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the dropzone container', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
})
it('should render hidden file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toBeInTheDocument()
expect(input).toHaveClass('hidden')
expect(input).toHaveAttribute('type', 'file')
})
it('should render upload icon', () => {
render(<UploadDropzone {...defaultProps} />)
const icon = document.querySelector('svg')
expect(icon).toBeInTheDocument()
})
it('should render browse label when extensions are allowed', () => {
render(<UploadDropzone {...defaultProps} />)
expect(screen.getByText('Browse')).toBeInTheDocument()
})
it('should not render browse label when no extensions allowed', () => {
render(<UploadDropzone {...defaultProps} allowedExtensions={[]} />)
expect(screen.queryByText('Browse')).not.toBeInTheDocument()
})
it('should render file size and count limits', () => {
render(<UploadDropzone {...defaultProps} />)
const tipText = screen.getByText(/Supports.*Max.*15MB/i)
expect(tipText).toBeInTheDocument()
})
})
describe('file input configuration', () => {
it('should allow multiple files when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('multiple')
})
it('should not allow multiple files when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).not.toHaveAttribute('multiple')
})
it('should set accept attribute with correct types', () => {
render(<UploadDropzone {...defaultProps} acceptTypes={['.pdf', '.docx']} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('accept', '.pdf,.docx')
})
})
describe('text content', () => {
it('should show batch upload text when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should show single file text when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
expect(screen.getByText(/Drag and drop file/i)).toBeInTheDocument()
})
})
describe('dragging state', () => {
it('should apply dragging styles when dragging is true', () => {
const { container } = render(<UploadDropzone {...defaultProps} dragging={true} />)
const dropzone = container.querySelector('[class*="border-components-dropzone-border-accent"]')
expect(dropzone).toBeInTheDocument()
})
it('should render drag overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).toBeInTheDocument()
})
it('should not render drag overlay when not dragging', () => {
render(<UploadDropzone {...defaultProps} dragging={false} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).not.toBeInTheDocument()
})
})
describe('event handlers', () => {
it('should call onSelectFile when browse label is clicked', () => {
const onSelectFile = vi.fn()
render(<UploadDropzone {...defaultProps} onSelectFile={onSelectFile} />)
const browseLabel = screen.getByText('Browse')
fireEvent.click(browseLabel)
expect(onSelectFile).toHaveBeenCalledTimes(1)
})
it('should call onFileChange when files are selected', () => {
const onFileChange = vi.fn()
render(<UploadDropzone {...defaultProps} onFileChange={onFileChange} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
const file = new File(['content'], 'test.pdf', { type: 'application/pdf' })
fireEvent.change(input, { target: { files: [file] } })
expect(onFileChange).toHaveBeenCalledTimes(1)
})
})
describe('refs', () => {
it('should attach dropRef to drop container', () => {
const dropRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dropRef={dropRef as RefObject<HTMLDivElement | null>} />)
expect(dropRef.current).toBeInstanceOf(HTMLDivElement)
})
it('should attach fileUploaderRef to input element', () => {
const fileUploaderRef = createMockRef<HTMLInputElement>()
render(<UploadDropzone {...defaultProps} fileUploaderRef={fileUploaderRef as RefObject<HTMLInputElement | null>} />)
expect(fileUploaderRef.current).toBeInstanceOf(HTMLInputElement)
})
it('should attach dragRef to overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
expect(dragRef.current).toBeInstanceOf(HTMLDivElement)
})
})
describe('styling', () => {
it('should have base dropzone styling', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
expect(dropzone).toHaveClass('rounded-xl')
})
it('should have cursor-pointer on browse label', () => {
render(<UploadDropzone {...defaultProps} />)
const browseLabel = screen.getByText('Browse')
expect(browseLabel).toHaveClass('cursor-pointer')
})
})
describe('accessibility', () => {
it('should have an accessible file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('id', 'fileUploader')
})
})
})

View File

@@ -1,83 +0,0 @@
import type { ChangeEvent, RefObject } from 'react'
import { RiUploadCloud2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { cn } from '@/utils/classnames'
type FileUploadConfig = {
file_size_limit: number
batch_count_limit: number
file_upload_limit: number
}
export type UploadDropzoneProps = {
dropRef: RefObject<HTMLDivElement | null>
dragRef: RefObject<HTMLDivElement | null>
fileUploaderRef: RefObject<HTMLInputElement | null>
dragging: boolean
supportBatchUpload: boolean
supportTypesShowNames: string
fileUploadConfig: FileUploadConfig
acceptTypes: string[]
onSelectFile: () => void
onFileChange: (e: ChangeEvent<HTMLInputElement>) => void
allowedExtensions: string[]
}
const UploadDropzone = ({
dropRef,
dragRef,
fileUploaderRef,
dragging,
supportBatchUpload,
supportTypesShowNames,
fileUploadConfig,
acceptTypes,
onSelectFile,
onFileChange,
allowedExtensions,
}: UploadDropzoneProps) => {
const { t } = useTranslation()
return (
<>
<input
ref={fileUploaderRef}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={acceptTypes.join(',')}
onChange={onFileChange}
/>
<div
ref={dropRef}
className={cn(
'relative box-border flex min-h-20 flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary',
dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent',
)}
>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload ? t('stepOne.uploader.button', { ns: 'datasetCreation' }) : t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{allowedExtensions.length > 0 && (
<label className="ml-1 cursor-pointer text-text-accent" onClick={onSelectFile}>{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
</>
)
}
export default UploadDropzone

Some files were not shown because too many files have changed in this diff Show More