mirror of
https://github.com/langgenius/dify.git
synced 2026-02-06 16:13:56 +00:00
Compare commits
19 Commits
fix/workfl
...
hotfix/1.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
840a8f3fc2 | ||
|
|
b4a5296fd1 | ||
|
|
fcb53383df | ||
|
|
540e1db83c | ||
|
|
2f75e38c08 | ||
|
|
cd03e0a9ef | ||
|
|
df2421d187 | ||
|
|
0ba321d840 | ||
|
|
d8402f686e | ||
|
|
8bd8dee767 | ||
|
|
05f2764d7c | ||
|
|
f5d6c250ed | ||
|
|
45daec7541 | ||
|
|
c14a8bb437 | ||
|
|
b76c8fa853 | ||
|
|
8c3e77cd0c | ||
|
|
476946f122 | ||
|
|
62a698a883 | ||
|
|
ebca36ffbb |
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any, Literal, TypeAlias
|
||||
@@ -54,6 +55,8 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
|
||||
|
||||
register_enum_models(console_ns, IconType)
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
|
||||
@@ -499,6 +502,7 @@ class AppListApi(Resource):
|
||||
select(Workflow).where(
|
||||
Workflow.version == Workflow.VERSION_DRAFT,
|
||||
Workflow.app_id.in_(workflow_capable_app_ids),
|
||||
Workflow.tenant_id == current_tenant_id,
|
||||
)
|
||||
)
|
||||
.scalars()
|
||||
@@ -510,12 +514,14 @@ class AppListApi(Resource):
|
||||
NodeType.TRIGGER_PLUGIN,
|
||||
}
|
||||
for workflow in draft_workflows:
|
||||
node_id = None
|
||||
try:
|
||||
for _, node_data in workflow.walk_nodes():
|
||||
for node_id, node_data in workflow.walk_nodes():
|
||||
if node_data.get("type") in trigger_node_types:
|
||||
draft_trigger_app_ids.add(str(workflow.app_id))
|
||||
break
|
||||
except Exception:
|
||||
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
|
||||
continue
|
||||
|
||||
for app in app_pagination.items:
|
||||
|
||||
@@ -1,14 +1,27 @@
|
||||
from typing import Literal
|
||||
from uuid import UUID
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Namespace, Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
from controllers.fastopenapi import console_router
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.tag_service import TagService
|
||||
|
||||
dataset_tag_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"type": fields.String,
|
||||
"binding_count": fields.String,
|
||||
}
|
||||
|
||||
|
||||
def build_dataset_tag_fields(api_or_ns: Namespace):
|
||||
return api_or_ns.model("DataSetTag", dataset_tag_fields)
|
||||
|
||||
|
||||
class TagBasePayload(BaseModel):
|
||||
name: str = Field(description="Tag name", min_length=1, max_length=50)
|
||||
@@ -32,129 +45,115 @@ class TagListQueryParam(BaseModel):
|
||||
keyword: str | None = Field(None, description="Search keyword")
|
||||
|
||||
|
||||
class TagResponse(BaseModel):
|
||||
id: str = Field(description="Tag ID")
|
||||
name: str = Field(description="Tag name")
|
||||
type: str = Field(description="Tag type")
|
||||
binding_count: int = Field(description="Number of bindings")
|
||||
|
||||
|
||||
class TagBindingResult(BaseModel):
|
||||
result: Literal["success"] = Field(description="Operation result", examples=["success"])
|
||||
|
||||
|
||||
@console_router.get(
|
||||
"/tags",
|
||||
response_model=list[TagResponse],
|
||||
tags=["console"],
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
TagBasePayload,
|
||||
TagBindingPayload,
|
||||
TagBindingRemovePayload,
|
||||
TagListQueryParam,
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def list_tags(query: TagListQueryParam) -> list[TagResponse]:
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
tags = TagService.get_tags(query.type, current_tenant_id, query.keyword)
|
||||
|
||||
return [
|
||||
TagResponse(
|
||||
id=tag.id,
|
||||
name=tag.name,
|
||||
type=tag.type,
|
||||
binding_count=int(tag.binding_count),
|
||||
)
|
||||
for tag in tags
|
||||
]
|
||||
|
||||
|
||||
@console_router.post(
|
||||
"/tags",
|
||||
response_model=TagResponse,
|
||||
tags=["console"],
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def create_tag(payload: TagBasePayload) -> TagResponse:
|
||||
current_user, _ = current_account_with_tenant()
|
||||
# The role of the current user in the tag table must be admin, owner, or editor
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
@console_ns.route("/tags")
|
||||
class TagListApi(Resource):
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@console_ns.doc(
|
||||
params={"type": 'Tag type filter. Can be "knowledge" or "app".', "keyword": "Search keyword for tag name."}
|
||||
)
|
||||
@marshal_with(dataset_tag_fields)
|
||||
def get(self):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
raw_args = request.args.to_dict()
|
||||
param = TagListQueryParam.model_validate(raw_args)
|
||||
tags = TagService.get_tags(param.type, current_tenant_id, param.keyword)
|
||||
|
||||
tag = TagService.save_tags(payload.model_dump())
|
||||
return tags, 200
|
||||
|
||||
return TagResponse(id=tag.id, name=tag.name, type=tag.type, binding_count=0)
|
||||
@console_ns.expect(console_ns.models[TagBasePayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
# The role of the current user in the ta table must be admin, owner, or editor
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
|
||||
payload = TagBasePayload.model_validate(console_ns.payload or {})
|
||||
tag = TagService.save_tags(payload.model_dump())
|
||||
|
||||
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
|
||||
|
||||
return response, 200
|
||||
|
||||
|
||||
@console_router.patch(
|
||||
"/tags/<uuid:tag_id>",
|
||||
response_model=TagResponse,
|
||||
tags=["console"],
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def update_tag(tag_id: UUID, payload: TagBasePayload) -> TagResponse:
|
||||
current_user, _ = current_account_with_tenant()
|
||||
tag_id_str = str(tag_id)
|
||||
# The role of the current user in the ta table must be admin, owner, or editor
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
@console_ns.route("/tags/<uuid:tag_id>")
|
||||
class TagUpdateDeleteApi(Resource):
|
||||
@console_ns.expect(console_ns.models[TagBasePayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def patch(self, tag_id):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
tag_id = str(tag_id)
|
||||
# The role of the current user in the ta table must be admin, owner, or editor
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
|
||||
tag = TagService.update_tags(payload.model_dump(), tag_id_str)
|
||||
payload = TagBasePayload.model_validate(console_ns.payload or {})
|
||||
tag = TagService.update_tags(payload.model_dump(), tag_id)
|
||||
|
||||
binding_count = TagService.get_tag_binding_count(tag_id_str)
|
||||
binding_count = TagService.get_tag_binding_count(tag_id)
|
||||
|
||||
return TagResponse(id=tag.id, name=tag.name, type=tag.type, binding_count=binding_count)
|
||||
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count}
|
||||
|
||||
return response, 200
|
||||
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
def delete(self, tag_id):
|
||||
tag_id = str(tag_id)
|
||||
|
||||
TagService.delete_tag(tag_id)
|
||||
|
||||
return 204
|
||||
|
||||
|
||||
@console_router.delete(
|
||||
"/tags/<uuid:tag_id>",
|
||||
tags=["console"],
|
||||
status_code=204,
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
def delete_tag(tag_id: UUID) -> None:
|
||||
tag_id_str = str(tag_id)
|
||||
@console_ns.route("/tag-bindings/create")
|
||||
class TagBindingCreateApi(Resource):
|
||||
@console_ns.expect(console_ns.models[TagBindingPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
|
||||
TagService.delete_tag(tag_id_str)
|
||||
payload = TagBindingPayload.model_validate(console_ns.payload or {})
|
||||
TagService.save_tag_binding(payload.model_dump())
|
||||
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
@console_router.post(
|
||||
"/tag-bindings/create",
|
||||
response_model=TagBindingResult,
|
||||
tags=["console"],
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def create_tag_binding(payload: TagBindingPayload) -> TagBindingResult:
|
||||
current_user, _ = current_account_with_tenant()
|
||||
# The role of the current user in the tag table must be admin, owner, editor, or dataset_operator
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
@console_ns.route("/tag-bindings/remove")
|
||||
class TagBindingDeleteApi(Resource):
|
||||
@console_ns.expect(console_ns.models[TagBindingRemovePayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
# The role of the current user in the ta table must be admin, owner, editor, or dataset_operator
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
|
||||
TagService.save_tag_binding(payload.model_dump())
|
||||
payload = TagBindingRemovePayload.model_validate(console_ns.payload or {})
|
||||
TagService.delete_tag_binding(payload.model_dump())
|
||||
|
||||
return TagBindingResult(result="success")
|
||||
|
||||
|
||||
@console_router.post(
|
||||
"/tag-bindings/remove",
|
||||
response_model=TagBindingResult,
|
||||
tags=["console"],
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def delete_tag_binding(payload: TagBindingRemovePayload) -> TagBindingResult:
|
||||
current_user, _ = current_account_with_tenant()
|
||||
# The role of the current user in the tag table must be admin, owner, editor, or dataset_operator
|
||||
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
|
||||
raise Forbidden()
|
||||
|
||||
TagService.delete_tag_binding(payload.model_dump())
|
||||
|
||||
return TagBindingResult(result="success")
|
||||
return {"result": "success"}, 200
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.11.4"
|
||||
version = "1.12.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@@ -24,7 +24,7 @@ class TagService:
|
||||
escaped_keyword = escape_like_pattern(keyword)
|
||||
query = query.where(sa.and_(Tag.name.ilike(f"%{escaped_keyword}%", escape="\\")))
|
||||
query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
|
||||
results = query.order_by(Tag.created_at.desc()).all()
|
||||
results: list = query.order_by(Tag.created_at.desc()).all()
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -14,6 +14,9 @@ from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Batch size for database operations to keep transactions short
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
|
||||
@shared_task(queue="dataset")
|
||||
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
|
||||
@@ -31,63 +34,179 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
if not doc_form:
|
||||
raise ValueError("doc_form is required")
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
).delete(synchronize_session=False)
|
||||
storage_keys_to_delete: list[str] = []
|
||||
index_node_ids: list[str] = []
|
||||
segment_ids: list[str] = []
|
||||
total_image_upload_file_ids: list[str] = []
|
||||
|
||||
try:
|
||||
# ============ Step 1: Query segment and file data (short read-only transaction) ============
|
||||
with session_factory.create_session() as session:
|
||||
# Get segments info
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
).all()
|
||||
# check segment is exist
|
||||
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
|
||||
# Collect image file IDs from segment content
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
|
||||
for image_file in image_files:
|
||||
try:
|
||||
if image_file and image_file.key:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file.id,
|
||||
)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(stmt)
|
||||
session.delete(segment)
|
||||
total_image_upload_file_ids.extend(image_upload_file_ids)
|
||||
|
||||
# Query storage keys for image files
|
||||
if total_image_upload_file_ids:
|
||||
image_files = session.scalars(
|
||||
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
|
||||
).all()
|
||||
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
|
||||
|
||||
# Query storage keys for document files
|
||||
if file_ids:
|
||||
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
|
||||
for file in files:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
|
||||
|
||||
session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
|
||||
if index_node_ids:
|
||||
try:
|
||||
# Fetch dataset in a fresh session to avoid DetachedInstanceError
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
|
||||
else:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
len(index_node_ids),
|
||||
)
|
||||
)
|
||||
|
||||
# ============ Step 3: Delete metadata binding (separate short transaction) ============
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
deleted_count = (
|
||||
session.query(DatasetMetadataBinding)
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
session.commit()
|
||||
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
|
||||
except Exception:
|
||||
logger.exception("Cleaned documents when documents deleted failed")
|
||||
logger.exception(
|
||||
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
|
||||
if total_image_upload_file_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
|
||||
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
|
||||
if segment_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(segment_ids), BATCH_SIZE):
|
||||
batch = segment_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
|
||||
session.execute(segment_delete_stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 6: Delete document-associated files (separate short transaction) ============
|
||||
if file_ids:
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
|
||||
dataset_id,
|
||||
file_ids,
|
||||
)
|
||||
|
||||
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
|
||||
storage_delete_failures = 0
|
||||
for storage_key in storage_keys_to_delete:
|
||||
try:
|
||||
storage.delete(storage_key)
|
||||
except Exception:
|
||||
storage_delete_failures += 1
|
||||
logger.exception("Failed to delete file from storage, key: %s", storage_key)
|
||||
if storage_delete_failures > 0:
|
||||
logger.warning(
|
||||
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
|
||||
storage_delete_failures,
|
||||
len(storage_keys_to_delete),
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
|
||||
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
|
||||
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
|
||||
f"storage_files: {len(storage_keys_to_delete)}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@@ -67,8 +68,14 @@ def delete_segment_from_index_task(
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
|
||||
for binding in segment_attachment_bindings:
|
||||
session.delete(binding)
|
||||
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
|
||||
|
||||
for i in range(0, len(segment_attachment_bind_ids), 1000):
|
||||
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
|
||||
)
|
||||
session.execute(segment_attachment_bind_delete_stmt)
|
||||
|
||||
# delete upload file
|
||||
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -68,7 +68,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
session.commit()
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
@@ -85,7 +84,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
if last_edited_time != page_edited_time:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
|
||||
@@ -8,7 +8,6 @@ from sqlalchemy import delete, select
|
||||
from core.db.session_factory import session_factory
|
||||
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
|
||||
@@ -27,7 +26,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -36,7 +35,6 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
@@ -56,7 +54,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
db.session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
|
||||
@@ -259,8 +259,8 @@ def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
|
||||
|
||||
def _delete_app_workflow_archive_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_archive_log(workflow_archive_log_id: str):
|
||||
db.session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
def del_workflow_archive_log(session, workflow_archive_log_id: str):
|
||||
session.query(WorkflowArchiveLog).where(WorkflowArchiveLog.id == workflow_archive_log_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
@@ -420,7 +420,7 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int:
|
||||
total_files_deleted = 0
|
||||
|
||||
while True:
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# Get a batch of draft variable IDs along with their file_ids
|
||||
query_sql = """
|
||||
SELECT id, file_id FROM workflow_draft_variables
|
||||
|
||||
@@ -10,7 +10,10 @@ from models import Tenant
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import App, UploadFile
|
||||
from models.workflow import WorkflowDraftVariable, WorkflowDraftVariableFile
|
||||
from tasks.remove_app_and_related_data_task import _delete_draft_variables, delete_draft_variables_batch
|
||||
from tasks.remove_app_and_related_data_task import (
|
||||
_delete_draft_variables,
|
||||
delete_draft_variables_batch,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -297,12 +300,18 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_with_offload_data(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_before = session.query(UploadFile).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
@@ -315,8 +324,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -329,6 +342,8 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
def test_delete_draft_variables_storage_failure_continues_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
variable_file_ids = [vf.id for vf in data["variable_files"]]
|
||||
mock_storage.delete.side_effect = [Exception("Storage error"), None]
|
||||
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
@@ -339,8 +354,12 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
assert draft_vars_after == 0
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
var_files_after = session.query(WorkflowDraftVariableFile).count()
|
||||
upload_files_after = session.query(UploadFile).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_(variable_file_ids))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
@@ -395,3 +414,275 @@ class TestDeleteDraftVariablesWithOffloadIntegration:
|
||||
if app2_obj:
|
||||
session.delete(app2_obj)
|
||||
session.commit()
|
||||
|
||||
|
||||
class TestDeleteDraftVariablesSessionCommit:
|
||||
"""Test suite to verify session commit behavior in delete_draft_variables_batch."""
|
||||
|
||||
@pytest.fixture
|
||||
def setup_offload_test_data(self, app_and_tenant):
|
||||
"""Create test data with offload files for session commit tests."""
|
||||
from core.variables.types import SegmentType
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
tenant, app = app_and_tenant
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
upload_file1 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file1.json",
|
||||
name="file1.json",
|
||||
size=1024,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
upload_file2 = UploadFile(
|
||||
tenant_id=tenant.id,
|
||||
storage_type="local",
|
||||
key="test/file2.json",
|
||||
name="file2.json",
|
||||
size=2048,
|
||||
extension="json",
|
||||
mime_type="application/json",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=str(uuid.uuid4()),
|
||||
created_at=naive_utc_now(),
|
||||
used=False,
|
||||
)
|
||||
session.add(upload_file1)
|
||||
session.add(upload_file2)
|
||||
session.flush()
|
||||
|
||||
var_file1 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file1.id,
|
||||
size=1024,
|
||||
length=10,
|
||||
value_type=SegmentType.STRING,
|
||||
)
|
||||
var_file2 = WorkflowDraftVariableFile(
|
||||
tenant_id=tenant.id,
|
||||
app_id=app.id,
|
||||
user_id=str(uuid.uuid4()),
|
||||
upload_file_id=upload_file2.id,
|
||||
size=2048,
|
||||
length=20,
|
||||
value_type=SegmentType.OBJECT,
|
||||
)
|
||||
session.add(var_file1)
|
||||
session.add(var_file2)
|
||||
session.flush()
|
||||
|
||||
draft_var1 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_1",
|
||||
name="large_var_1",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file1.id,
|
||||
)
|
||||
draft_var2 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_2",
|
||||
name="large_var_2",
|
||||
value=StringSegment(value="truncated..."),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
file_id=var_file2.id,
|
||||
)
|
||||
draft_var3 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id="node_3",
|
||||
name="regular_var",
|
||||
value=StringSegment(value="regular_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(draft_var1)
|
||||
session.add(draft_var2)
|
||||
session.add(draft_var3)
|
||||
session.commit()
|
||||
|
||||
data = {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"upload_files": [upload_file1, upload_file2],
|
||||
"variable_files": [var_file1, var_file2],
|
||||
"draft_variables": [draft_var1, draft_var2, draft_var3],
|
||||
}
|
||||
|
||||
yield data
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
for table, ids in [
|
||||
(WorkflowDraftVariable, [v.id for v in data["draft_variables"]]),
|
||||
(WorkflowDraftVariableFile, [vf.id for vf in data["variable_files"]]),
|
||||
(UploadFile, [uf.id for uf in data["upload_files"]]),
|
||||
]:
|
||||
cleanup_query = delete(table).where(table.id.in_(ids)).execution_options(synchronize_session=False)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
@pytest.fixture
|
||||
def setup_commit_test_data(self, app_and_tenant):
|
||||
"""Create test data for session commit tests."""
|
||||
tenant, app = app_and_tenant
|
||||
variable_ids: list[str] = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
variables = []
|
||||
for i in range(10):
|
||||
var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=app.id,
|
||||
node_id=f"node_{i}",
|
||||
name=f"var_{i}",
|
||||
value=StringSegment(value="test_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
session.add(var)
|
||||
variables.append(var)
|
||||
session.commit()
|
||||
variable_ids = [v.id for v in variables]
|
||||
|
||||
yield {
|
||||
"app": app,
|
||||
"tenant": tenant,
|
||||
"variable_ids": variable_ids,
|
||||
}
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
cleanup_query = (
|
||||
delete(WorkflowDraftVariable)
|
||||
.where(WorkflowDraftVariable.id.in_(variable_ids))
|
||||
.execution_options(synchronize_session=False)
|
||||
)
|
||||
session.execute(cleanup_query)
|
||||
session.commit()
|
||||
|
||||
def test_session_commit_is_called_after_each_batch(self, setup_commit_test_data):
|
||||
"""Test that session.begin() is used for automatic transaction management."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
# Since session.begin() is used, the transaction is automatically committed
|
||||
# when the with block exits successfully. We verify this by checking that
|
||||
# data is actually persisted.
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
# Verify all data was deleted (proves transaction was committed)
|
||||
with session_factory.create_session() as session:
|
||||
remaining_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
|
||||
assert deleted_count == 10
|
||||
assert remaining_count == 0
|
||||
|
||||
def test_data_persisted_after_batch_deletion(self, setup_commit_test_data):
|
||||
"""Test that data is actually persisted to database after batch deletion with commits."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
variable_ids = data["variable_ids"]
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Perform deletion with small batch size to force multiple commits
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=3)
|
||||
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify all data is deleted in a new session (proves commits worked)
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
# Verify specific IDs are deleted
|
||||
with session_factory.create_session() as session:
|
||||
remaining_vars = (
|
||||
session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id.in_(variable_ids)).count()
|
||||
)
|
||||
assert remaining_vars == 0
|
||||
|
||||
def test_session_commit_with_empty_dataset(self, setup_commit_test_data):
|
||||
"""Test session behavior when deleting from an empty dataset."""
|
||||
nonexistent_app_id = str(uuid.uuid4())
|
||||
|
||||
# Should not raise any errors and should return 0
|
||||
deleted_count = delete_draft_variables_batch(nonexistent_app_id, batch_size=10)
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_session_commit_with_single_batch(self, setup_commit_test_data):
|
||||
"""Test that commit happens correctly when all data fits in a single batch."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
initial_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert initial_count == 10
|
||||
|
||||
# Delete all in a single batch
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=100)
|
||||
assert deleted_count == 10
|
||||
|
||||
# Verify data is persisted
|
||||
with session_factory.create_session() as session:
|
||||
final_count = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
assert final_count == 0
|
||||
|
||||
def test_invalid_batch_size_raises_error(self, setup_commit_test_data):
|
||||
"""Test that invalid batch size raises ValueError."""
|
||||
data = setup_commit_test_data
|
||||
app_id = data["app"].id
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=0)
|
||||
|
||||
with pytest.raises(ValueError, match="batch_size must be positive"):
|
||||
delete_draft_variables_batch(app_id, batch_size=-1)
|
||||
|
||||
@patch("extensions.ext_storage.storage")
|
||||
def test_session_commit_with_offload_data_cleanup(self, mock_storage, setup_offload_test_data):
|
||||
"""Test that session commits correctly when cleaning up offload data."""
|
||||
data = setup_offload_test_data
|
||||
app_id = data["app"].id
|
||||
upload_file_ids = [uf.id for uf in data["upload_files"]]
|
||||
mock_storage.delete.return_value = None
|
||||
|
||||
# Verify initial state
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_before = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_before = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_before = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_before == 3
|
||||
assert var_files_before == 2
|
||||
assert upload_files_before == 2
|
||||
|
||||
# Delete variables with offload data
|
||||
deleted_count = delete_draft_variables_batch(app_id, batch_size=10)
|
||||
assert deleted_count == 3
|
||||
|
||||
# Verify all data is persisted (deleted) in new session
|
||||
with session_factory.create_session() as session:
|
||||
draft_vars_after = session.query(WorkflowDraftVariable).filter_by(app_id=app_id).count()
|
||||
var_files_after = (
|
||||
session.query(WorkflowDraftVariableFile)
|
||||
.where(WorkflowDraftVariableFile.id.in_([vf.id for vf in data["variable_files"]]))
|
||||
.count()
|
||||
)
|
||||
upload_files_after = session.query(UploadFile).where(UploadFile.id.in_(upload_file_ids)).count()
|
||||
assert draft_vars_after == 0
|
||||
assert var_files_after == 0
|
||||
assert upload_files_after == 0
|
||||
|
||||
# Verify storage cleanup was called
|
||||
assert mock_storage.delete.call_count == 2
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from tasks.document_indexing_update_task import document_indexing_update_task
|
||||
|
||||
|
||||
class TestDocumentIndexingUpdateTask:
|
||||
@pytest.fixture
|
||||
def mock_external_dependencies(self):
|
||||
"""Patch external collaborators used by the update task.
|
||||
- IndexProcessorFactory.init_index_processor().clean(...)
|
||||
- IndexingRunner.run([...])
|
||||
"""
|
||||
with (
|
||||
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
|
||||
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
|
||||
):
|
||||
processor_instance = MagicMock()
|
||||
mock_factory.return_value.init_index_processor.return_value = processor_instance
|
||||
|
||||
runner_instance = MagicMock()
|
||||
mock_runner.return_value = runner_instance
|
||||
|
||||
yield {
|
||||
"factory": mock_factory,
|
||||
"processor": processor_instance,
|
||||
"runner": mock_runner,
|
||||
"runner_instance": runner_instance,
|
||||
}
|
||||
|
||||
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
|
||||
fake = Faker()
|
||||
|
||||
# Account and tenant
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(name=fake.company(), status="normal")
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Dataset and document
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant.id,
|
||||
name=fake.company(),
|
||||
description=fake.text(max_nb_chars=64),
|
||||
data_source_type="upload_file",
|
||||
indexing_technique="high_quality",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(dataset)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
document = Document(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
position=0,
|
||||
data_source_type="upload_file",
|
||||
batch="test_batch",
|
||||
name=fake.file_name(),
|
||||
created_from="upload_file",
|
||||
created_by=account.id,
|
||||
indexing_status="waiting",
|
||||
enabled=True,
|
||||
doc_form="text_model",
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Segments
|
||||
node_ids = []
|
||||
for i in range(segment_count):
|
||||
node_id = f"node-{i + 1}"
|
||||
seg = DocumentSegment(
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
document_id=document.id,
|
||||
position=i,
|
||||
content=fake.text(max_nb_chars=32),
|
||||
answer=None,
|
||||
word_count=10,
|
||||
tokens=5,
|
||||
index_node_id=node_id,
|
||||
status="completed",
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(seg)
|
||||
node_ids.append(node_id)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Refresh to ensure ORM state
|
||||
db_session_with_containers.refresh(dataset)
|
||||
db_session_with_containers.refresh(document)
|
||||
|
||||
return dataset, document, node_ids
|
||||
|
||||
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Act
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Assert document status updated before reindex
|
||||
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
|
||||
assert updated.indexing_status == "parsing"
|
||||
assert updated.processing_started_at is not None
|
||||
|
||||
# Segments should be deleted
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining == 0
|
||||
|
||||
# Assert index processor clean was called with expected args
|
||||
clean_call = mock_external_dependencies["processor"].clean.call_args
|
||||
assert clean_call is not None
|
||||
args, kwargs = clean_call
|
||||
# args[0] is a Dataset instance (from another session) — validate by id
|
||||
assert getattr(args[0], "id", None) == dataset.id
|
||||
# args[1] should contain our node_ids
|
||||
assert set(args[1]) == set(node_ids)
|
||||
assert kwargs.get("with_keywords") is True
|
||||
assert kwargs.get("delete_child_chunks") is True
|
||||
|
||||
# Assert indexing runner invoked with the updated document
|
||||
run_call = mock_external_dependencies["runner_instance"].run.call_args
|
||||
assert run_call is not None
|
||||
run_docs = run_call[0][0]
|
||||
assert len(run_docs) == 1
|
||||
first = run_docs[0]
|
||||
assert getattr(first, "id", None) == document.id
|
||||
|
||||
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
|
||||
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
|
||||
|
||||
# Force clean to raise; task should continue to indexing
|
||||
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
|
||||
|
||||
document_indexing_update_task(dataset.id, document.id)
|
||||
|
||||
# Ensure we see committed changes from another session
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
# Indexing should still be triggered
|
||||
mock_external_dependencies["runner_instance"].run.assert_called_once()
|
||||
|
||||
# Segments should remain (since clean failed before DB delete)
|
||||
remaining = (
|
||||
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
|
||||
)
|
||||
assert remaining > 0
|
||||
|
||||
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
|
||||
fake = Faker()
|
||||
# Act with non-existent document id
|
||||
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
|
||||
|
||||
# Neither processor nor runner should be called
|
||||
mock_external_dependencies["processor"].clean.assert_not_called()
|
||||
mock_external_dependencies["runner_instance"].run.assert_not_called()
|
||||
@@ -1,222 +0,0 @@
|
||||
import builtins
|
||||
import contextlib
|
||||
import importlib
|
||||
import sys
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
from extensions.ext_database import db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.config["SECRET_KEY"] = "test-secret"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
|
||||
|
||||
db.init_app(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def fix_method_view_issue(monkeypatch):
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
monkeypatch.setattr(builtins, "MethodView", MethodView, raising=False)
|
||||
|
||||
|
||||
def _create_isolated_router():
|
||||
import controllers.fastopenapi
|
||||
|
||||
router_class = type(controllers.fastopenapi.console_router)
|
||||
return router_class()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _patch_auth_and_router(temp_router):
|
||||
def noop(func):
|
||||
return func
|
||||
|
||||
default_user = MagicMock(has_edit_permission=True, is_dataset_editor=False)
|
||||
|
||||
with (
|
||||
patch("controllers.fastopenapi.console_router", temp_router),
|
||||
patch("extensions.ext_fastopenapi.console_router", temp_router),
|
||||
patch("controllers.console.wraps.setup_required", side_effect=noop),
|
||||
patch("libs.login.login_required", side_effect=noop),
|
||||
patch("controllers.console.wraps.account_initialization_required", side_effect=noop),
|
||||
patch("controllers.console.wraps.edit_permission_required", side_effect=noop),
|
||||
patch("libs.login.current_account_with_tenant", return_value=(default_user, "tenant-id")),
|
||||
patch("configs.dify_config.EDITION", "CLOUD"),
|
||||
):
|
||||
import extensions.ext_fastopenapi
|
||||
|
||||
importlib.reload(extensions.ext_fastopenapi)
|
||||
|
||||
yield
|
||||
|
||||
|
||||
def _force_reload_module(target_module: str, alias_module: str):
|
||||
if target_module in sys.modules:
|
||||
del sys.modules[target_module]
|
||||
if alias_module in sys.modules:
|
||||
del sys.modules[alias_module]
|
||||
|
||||
module = importlib.import_module(target_module)
|
||||
sys.modules[alias_module] = sys.modules[target_module]
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def _dedupe_routes(router):
|
||||
seen = set()
|
||||
unique_routes = []
|
||||
for path, method, endpoint in reversed(router.get_routes()):
|
||||
key = (path, method, endpoint.__name__)
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
unique_routes.append((path, method, endpoint))
|
||||
router._routes = list(reversed(unique_routes))
|
||||
|
||||
|
||||
def _cleanup_modules(target_module: str, alias_module: str):
|
||||
if target_module in sys.modules:
|
||||
del sys.modules[target_module]
|
||||
if alias_module in sys.modules:
|
||||
del sys.modules[alias_module]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_tags_module_env():
|
||||
target_module = "controllers.console.tag.tags"
|
||||
alias_module = "api.controllers.console.tag.tags"
|
||||
temp_router = _create_isolated_router()
|
||||
|
||||
try:
|
||||
with _patch_auth_and_router(temp_router):
|
||||
tags_module = _force_reload_module(target_module, alias_module)
|
||||
_dedupe_routes(temp_router)
|
||||
yield tags_module
|
||||
finally:
|
||||
_cleanup_modules(target_module, alias_module)
|
||||
|
||||
|
||||
def test_list_tags_success(app: Flask, mock_tags_module_env):
|
||||
# Arrange
|
||||
tag = SimpleNamespace(id="tag-1", name="Alpha", type="app", binding_count=2)
|
||||
with patch("controllers.console.tag.tags.TagService.get_tags", return_value=[tag]):
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.get("/console/api/tags?type=app&keyword=Alpha")
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == [
|
||||
{"id": "tag-1", "name": "Alpha", "type": "app", "binding_count": 2},
|
||||
]
|
||||
|
||||
|
||||
def test_create_tag_success(app: Flask, mock_tags_module_env):
|
||||
# Arrange
|
||||
tag = SimpleNamespace(id="tag-2", name="Beta", type="app")
|
||||
with patch("controllers.console.tag.tags.TagService.save_tags", return_value=tag) as mock_save:
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.post("/console/api/tags", json={"name": "Beta", "type": "app"})
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == {
|
||||
"id": "tag-2",
|
||||
"name": "Beta",
|
||||
"type": "app",
|
||||
"binding_count": 0,
|
||||
}
|
||||
mock_save.assert_called_once_with({"name": "Beta", "type": "app"})
|
||||
|
||||
|
||||
def test_update_tag_success(app: Flask, mock_tags_module_env):
|
||||
# Arrange
|
||||
tag = SimpleNamespace(id="tag-3", name="Gamma", type="app")
|
||||
with (
|
||||
patch("controllers.console.tag.tags.TagService.update_tags", return_value=tag) as mock_update,
|
||||
patch("controllers.console.tag.tags.TagService.get_tag_binding_count", return_value=4),
|
||||
):
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.patch(
|
||||
"/console/api/tags/11111111-1111-1111-1111-111111111111",
|
||||
json={"name": "Gamma", "type": "app"},
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == {
|
||||
"id": "tag-3",
|
||||
"name": "Gamma",
|
||||
"type": "app",
|
||||
"binding_count": 4,
|
||||
}
|
||||
mock_update.assert_called_once_with(
|
||||
{"name": "Gamma", "type": "app"},
|
||||
"11111111-1111-1111-1111-111111111111",
|
||||
)
|
||||
|
||||
|
||||
def test_delete_tag_success(app: Flask, mock_tags_module_env):
|
||||
# Arrange
|
||||
with patch("controllers.console.tag.tags.TagService.delete_tag") as mock_delete:
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.delete("/console/api/tags/11111111-1111-1111-1111-111111111111")
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 204
|
||||
mock_delete.assert_called_once_with("11111111-1111-1111-1111-111111111111")
|
||||
|
||||
|
||||
def test_create_tag_binding_success(app: Flask, mock_tags_module_env):
|
||||
# Arrange
|
||||
payload = {"tag_ids": ["tag-1", "tag-2"], "target_id": "target-1", "type": "app"}
|
||||
with patch("controllers.console.tag.tags.TagService.save_tag_binding") as mock_bind:
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.post("/console/api/tag-bindings/create", json=payload)
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == {"result": "success"}
|
||||
mock_bind.assert_called_once_with(payload)
|
||||
|
||||
|
||||
def test_delete_tag_binding_success(app: Flask, mock_tags_module_env):
|
||||
# Arrange
|
||||
payload = {"tag_id": "tag-1", "target_id": "target-1", "type": "app"}
|
||||
with patch("controllers.console.tag.tags.TagService.delete_tag_binding") as mock_unbind:
|
||||
ext_fastopenapi.init_app(app)
|
||||
client = app.test_client()
|
||||
|
||||
# Act
|
||||
response = client.post("/console/api/tag-bindings/remove", json=payload)
|
||||
|
||||
# Assert
|
||||
assert response.status_code == 200
|
||||
assert response.get_json() == {"result": "success"}
|
||||
mock_unbind.assert_called_once_with(payload)
|
||||
@@ -114,6 +114,21 @@ def mock_db_session():
|
||||
session = MagicMock()
|
||||
# Ensure tests can observe session.close() via context manager teardown
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
|
||||
# Mock session.begin() context manager to auto-commit on exit
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
def _begin_exit_side_effect(*args, **kwargs):
|
||||
# session.begin().__exit__() should commit if no exception
|
||||
if args[0] is None: # No exception
|
||||
session.commit()
|
||||
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
|
||||
|
||||
@@ -350,7 +350,7 @@ class TestDeleteWorkflowArchiveLogs:
|
||||
mock_query.where.return_value = mock_delete_query
|
||||
mock_db.session.query.return_value = mock_query
|
||||
|
||||
delete_func("log-1")
|
||||
delete_func(mock_db.session, "log-1")
|
||||
|
||||
mock_db.session.query.assert_called_once_with(WorkflowArchiveLog)
|
||||
mock_query.where.assert_called_once()
|
||||
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@@ -1368,7 +1368,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.11.4"
|
||||
version = "1.12.1"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -x
|
||||
set -euxo pipefail
|
||||
|
||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||
cd "$SCRIPT_DIR/../.."
|
||||
|
||||
@@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.4
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
@@ -662,13 +662,14 @@ services:
|
||||
- "${IRIS_SUPER_SERVER_PORT:-1972}:1972"
|
||||
- "${IRIS_WEB_SERVER_PORT:-52773}:52773"
|
||||
volumes:
|
||||
- ./volumes/iris:/opt/iris
|
||||
- ./volumes/iris:/durable
|
||||
- ./iris/iris-init.script:/iris-init.script
|
||||
- ./iris/docker-entrypoint.sh:/custom-entrypoint.sh
|
||||
entrypoint: ["/custom-entrypoint.sh"]
|
||||
tty: true
|
||||
environment:
|
||||
TZ: ${IRIS_TIMEZONE:-UTC}
|
||||
ISC_DATA_DIRECTORY: /durable/iris
|
||||
|
||||
# Oracle vector database
|
||||
oracle:
|
||||
|
||||
@@ -707,7 +707,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -749,7 +749,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -788,7 +788,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.4
|
||||
image: langgenius/dify-api:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@@ -818,7 +818,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.4
|
||||
image: langgenius/dify-web:1.12.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
@@ -1348,13 +1348,14 @@ services:
|
||||
- "${IRIS_SUPER_SERVER_PORT:-1972}:1972"
|
||||
- "${IRIS_WEB_SERVER_PORT:-52773}:52773"
|
||||
volumes:
|
||||
- ./volumes/iris:/opt/iris
|
||||
- ./volumes/iris:/durable
|
||||
- ./iris/iris-init.script:/iris-init.script
|
||||
- ./iris/docker-entrypoint.sh:/custom-entrypoint.sh
|
||||
entrypoint: ["/custom-entrypoint.sh"]
|
||||
tty: true
|
||||
environment:
|
||||
TZ: ${IRIS_TIMEZONE:-UTC}
|
||||
ISC_DATA_DIRECTORY: /durable/iris
|
||||
|
||||
# Oracle vector database
|
||||
oracle:
|
||||
|
||||
@@ -1,15 +1,33 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# IRIS configuration flag file
|
||||
IRIS_CONFIG_DONE="/opt/iris/.iris-configured"
|
||||
# IRIS configuration flag file (stored in durable directory to persist with data)
|
||||
IRIS_CONFIG_DONE="/durable/.iris-configured"
|
||||
|
||||
# Function to wait for IRIS to be ready
|
||||
wait_for_iris() {
|
||||
echo "Waiting for IRIS to be ready..."
|
||||
local max_attempts=30
|
||||
local attempt=1
|
||||
while [ "$attempt" -le "$max_attempts" ]; do
|
||||
if iris qlist IRIS 2>/dev/null | grep -q "running"; then
|
||||
echo "IRIS is ready."
|
||||
return 0
|
||||
fi
|
||||
echo "Attempt $attempt/$max_attempts: IRIS not ready yet, waiting..."
|
||||
sleep 2
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
echo "ERROR: IRIS failed to start within expected time." >&2
|
||||
return 1
|
||||
}
|
||||
|
||||
# Function to configure IRIS
|
||||
configure_iris() {
|
||||
echo "Configuring IRIS for first-time setup..."
|
||||
|
||||
# Wait for IRIS to be fully started
|
||||
sleep 5
|
||||
wait_for_iris
|
||||
|
||||
# Execute the initialization script
|
||||
iris session IRIS < /iris-init.script
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import Cookies from 'js-cookie'
|
||||
import { usePathname, useRouter, useSearchParams } from 'next/navigation'
|
||||
import { parseAsString, useQueryState } from 'nuqs'
|
||||
import { parseAsBoolean, useQueryState } from 'nuqs'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import {
|
||||
EDUCATION_VERIFY_URL_SEARCHPARAMS_ACTION,
|
||||
@@ -28,7 +28,7 @@ export const AppInitializer = ({
|
||||
const [init, setInit] = useState(false)
|
||||
const [oauthNewUser, setOauthNewUser] = useQueryState(
|
||||
'oauth_new_user',
|
||||
parseAsString.withOptions({ history: 'replace' }),
|
||||
parseAsBoolean.withOptions({ history: 'replace' }),
|
||||
)
|
||||
|
||||
const isSetupFinished = useCallback(async () => {
|
||||
@@ -46,7 +46,7 @@ export const AppInitializer = ({
|
||||
(async () => {
|
||||
const action = searchParams.get('action')
|
||||
|
||||
if (oauthNewUser === 'true') {
|
||||
if (oauthNewUser) {
|
||||
let utmInfo = null
|
||||
const utmInfoStr = Cookies.get('utm_info')
|
||||
if (utmInfoStr) {
|
||||
|
||||
@@ -109,6 +109,7 @@ const AgentTools: FC = () => {
|
||||
tool_parameters: paramsWithDefaultValue,
|
||||
notAuthor: !tool.is_team_authorization,
|
||||
enabled: true,
|
||||
type: tool.provider_type as CollectionType,
|
||||
}
|
||||
}
|
||||
const handleSelectTool = (tool: ToolDefaultValue) => {
|
||||
|
||||
@@ -62,19 +62,19 @@ const AppCard = ({
|
||||
{app.description}
|
||||
</div>
|
||||
</div>
|
||||
{canCreate && (
|
||||
{(canCreate || isTrialApp) && (
|
||||
<div className={cn('absolute bottom-0 left-0 right-0 hidden bg-gradient-to-t from-components-panel-gradient-2 from-[60.27%] to-transparent p-4 pt-8 group-hover:flex')}>
|
||||
<div className={cn('grid h-8 w-full grid-cols-1 items-center space-x-2', isTrialApp && 'grid-cols-2')}>
|
||||
<Button variant="primary" onClick={() => onCreate()}>
|
||||
<PlusIcon className="mr-1 h-4 w-4" />
|
||||
<span className="text-xs">{t('newApp.useTemplate', { ns: 'app' })}</span>
|
||||
</Button>
|
||||
{isTrialApp && (
|
||||
<Button onClick={showTryAPPPanel(app.app_id)}>
|
||||
<RiInformation2Line className="mr-1 size-4" />
|
||||
<span>{t('appCard.try', { ns: 'explore' })}</span>
|
||||
<div className={cn('grid h-8 w-full grid-cols-1 items-center space-x-2', canCreate && 'grid-cols-2')}>
|
||||
{canCreate && (
|
||||
<Button variant="primary" onClick={() => onCreate()}>
|
||||
<PlusIcon className="mr-1 h-4 w-4" />
|
||||
<span className="text-xs">{t('newApp.useTemplate', { ns: 'app' })}</span>
|
||||
</Button>
|
||||
)}
|
||||
<Button onClick={showTryAPPPanel(app.app_id)}>
|
||||
<RiInformation2Line className="mr-1 size-4" />
|
||||
<span>{t('appCard.try', { ns: 'explore' })}</span>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -154,7 +154,7 @@ export const GeneralChunkingOptions: FC<GeneralChunkingOptionsProps> = ({
|
||||
</div>
|
||||
))}
|
||||
{
|
||||
showSummaryIndexSetting && (
|
||||
showSummaryIndexSetting && IS_CE_EDITION && (
|
||||
<div className="mt-3">
|
||||
<SummaryIndexSetting
|
||||
entry="create-document"
|
||||
|
||||
@@ -12,6 +12,7 @@ import Divider from '@/app/components/base/divider'
|
||||
import { ParentChildChunk } from '@/app/components/base/icons/src/vender/knowledge'
|
||||
import RadioCard from '@/app/components/base/radio-card'
|
||||
import SummaryIndexSetting from '@/app/components/datasets/settings/summary-index-setting'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
import FileList from '../../assets/file-list-3-fill.svg'
|
||||
import Note from '../../assets/note-mod.svg'
|
||||
@@ -191,7 +192,7 @@ export const ParentChildOptions: FC<ParentChildOptionsProps> = ({
|
||||
</div>
|
||||
))}
|
||||
{
|
||||
showSummaryIndexSetting && (
|
||||
showSummaryIndexSetting && IS_CE_EDITION && (
|
||||
<div className="mt-3">
|
||||
<SummaryIndexSetting
|
||||
entry="create-document"
|
||||
|
||||
@@ -26,6 +26,7 @@ import CustomPopover from '@/app/components/base/popover'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import { DataSourceType, DocumentActionType } from '@/models/datasets'
|
||||
import {
|
||||
useDocumentArchive,
|
||||
@@ -263,10 +264,14 @@ const Operations = ({
|
||||
<span className={s.actionName}>{t('list.action.sync', { ns: 'datasetDocuments' })}</span>
|
||||
</div>
|
||||
)}
|
||||
<div className={s.actionItem} onClick={() => onOperate('summary')}>
|
||||
<SearchLinesSparkle className="h-4 w-4 text-text-tertiary" />
|
||||
<span className={s.actionName}>{t('list.action.summary', { ns: 'datasetDocuments' })}</span>
|
||||
</div>
|
||||
{
|
||||
IS_CE_EDITION && (
|
||||
<div className={s.actionItem} onClick={() => onOperate('summary')}>
|
||||
<SearchLinesSparkle className="h-4 w-4 text-text-tertiary" />
|
||||
<span className={s.actionName}>{t('list.action.summary', { ns: 'datasetDocuments' })}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
<Divider className="my-1" />
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -7,6 +7,7 @@ import Button from '@/app/components/base/button'
|
||||
import Confirm from '@/app/components/base/confirm'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { SearchLinesSparkle } from '@/app/components/base/icons/src/vender/knowledge'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
const i18nPrefix = 'batchAction'
|
||||
@@ -87,7 +88,7 @@ const BatchAction: FC<IBatchActionProps> = ({
|
||||
<span className="px-0.5">{t('metadata.metadata', { ns: 'dataset' })}</span>
|
||||
</Button>
|
||||
)}
|
||||
{onBatchSummary && (
|
||||
{onBatchSummary && IS_CE_EDITION && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="gap-x-0.5 px-3"
|
||||
|
||||
@@ -3,8 +3,6 @@ import type { FC, ReactNode } from 'react'
|
||||
import type { SliceProps } from './type'
|
||||
import { autoUpdate, flip, FloatingFocusManager, offset, shift, useDismiss, useFloating, useHover, useInteractions, useRole } from '@floating-ui/react'
|
||||
import { RiDeleteBinLine } from '@remixicon/react'
|
||||
// @ts-expect-error no types available
|
||||
import lineClamp from 'line-clamp'
|
||||
import { useState } from 'react'
|
||||
import ActionButton, { ActionButtonState } from '@/app/components/base/action-button'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@@ -58,12 +56,8 @@ export const EditSlice: FC<EditSliceProps> = (props) => {
|
||||
<>
|
||||
<SliceContainer
|
||||
{...rest}
|
||||
className={cn('mr-0 block', className)}
|
||||
ref={(ref) => {
|
||||
refs.setReference(ref)
|
||||
if (ref)
|
||||
lineClamp(ref, 4)
|
||||
}}
|
||||
className={cn('mr-0 line-clamp-4 block', className)}
|
||||
ref={refs.setReference}
|
||||
{...getReferenceProps()}
|
||||
>
|
||||
<SliceLabel
|
||||
|
||||
@@ -21,6 +21,7 @@ import RetrievalMethodConfig from '@/app/components/datasets/common/retrieval-me
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import { useSelector as useAppContextWithSelector } from '@/context/app-context'
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
@@ -359,7 +360,7 @@ const Form = () => {
|
||||
{
|
||||
indexMethod === IndexingType.QUALIFIED
|
||||
&& [ChunkingMode.text, ChunkingMode.parentChild].includes(currentDataset?.doc_form as ChunkingMode)
|
||||
&& (
|
||||
&& IS_CE_EDITION && (
|
||||
<>
|
||||
<Divider
|
||||
type="horizontal"
|
||||
|
||||
@@ -74,11 +74,15 @@ const AppCard = ({
|
||||
</div>
|
||||
{isExplore && (canCreate || isTrialApp) && (
|
||||
<div className={cn('absolute bottom-0 left-0 right-0 hidden bg-gradient-to-t from-components-panel-gradient-2 from-[60.27%] to-transparent p-4 pt-8 group-hover:flex')}>
|
||||
<div className={cn('grid h-8 w-full grid-cols-2 space-x-2')}>
|
||||
<Button variant="primary" className="h-7" onClick={() => onCreate()}>
|
||||
<PlusIcon className="mr-1 h-4 w-4" />
|
||||
<span className="text-xs">{t('appCard.addToWorkspace', { ns: 'explore' })}</span>
|
||||
</Button>
|
||||
<div className={cn('grid h-8 w-full grid-cols-1 space-x-2', canCreate && 'grid-cols-2')}>
|
||||
{
|
||||
canCreate && (
|
||||
<Button variant="primary" className="h-7" onClick={() => onCreate()}>
|
||||
<PlusIcon className="mr-1 h-4 w-4" />
|
||||
<span className="text-xs">{t('appCard.addToWorkspace', { ns: 'explore' })}</span>
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
<Button className="h-7" onClick={showTryAPPPanel(app.app_id)}>
|
||||
<RiInformation2Line className="mr-1 size-4" />
|
||||
<span>{t('appCard.try', { ns: 'explore' })}</span>
|
||||
|
||||
@@ -16,6 +16,14 @@ vi.mock('react-i18next', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/config', async (importOriginal) => {
|
||||
const actual = await importOriginal() as object
|
||||
return {
|
||||
...actual,
|
||||
IS_CLOUD_EDITION: true,
|
||||
}
|
||||
})
|
||||
|
||||
const mockUseGetTryAppInfo = vi.fn()
|
||||
|
||||
vi.mock('@/service/use-try-app', () => ({
|
||||
|
||||
@@ -14,6 +14,14 @@ vi.mock('react-i18next', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/config', async (importOriginal) => {
|
||||
const actual = await importOriginal() as object
|
||||
return {
|
||||
...actual,
|
||||
IS_CLOUD_EDITION: true,
|
||||
}
|
||||
})
|
||||
|
||||
describe('Tab', () => {
|
||||
afterEach(() => {
|
||||
cleanup()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { TriggerSubscriptionBuilder } from '@/app/components/workflow/block-selector/types'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
// Import after mocks
|
||||
@@ -821,6 +821,9 @@ describe('CommonCreateModal', () => {
|
||||
expect(mockCreateBuilder).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// Flush pending state updates from createBuilder promise resolution
|
||||
await act(async () => {})
|
||||
|
||||
const input = screen.getByTestId('form-field-webhook_url')
|
||||
fireEvent.change(input, { target: { value: 'https://example.com/webhook' } })
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ export const useToolSelectorState = ({
|
||||
extra: {
|
||||
description: tool.tool_description,
|
||||
},
|
||||
type: tool.provider_type,
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { PropsWithChildren } from 'react'
|
||||
import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportStatus } from '@/models/app'
|
||||
import UpdateDSLModal from './update-dsl-modal'
|
||||
@@ -140,13 +140,13 @@ class MockFileReader {
|
||||
onload: ((e: { target: { result: string | null } }) => void) | null = null
|
||||
|
||||
readAsText(_file: File) {
|
||||
// Simulate async file reading
|
||||
setTimeout(() => {
|
||||
// Simulate async file reading using queueMicrotask for more reliable async behavior
|
||||
queueMicrotask(() => {
|
||||
this.result = 'test file content'
|
||||
if (this.onload) {
|
||||
this.onload({ target: { result: this.result } })
|
||||
}
|
||||
}, 0)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,6 +174,7 @@ describe('UpdateDSLModal', () => {
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
|
||||
|
||||
// Mock FileReader
|
||||
originalFileReader = globalThis.FileReader
|
||||
@@ -472,14 +473,14 @@ describe('UpdateDSLModal', () => {
|
||||
await waitFor(() => {
|
||||
const importButton = screen.getByText('common.overwriteAndImport')
|
||||
expect(importButton).not.toBeDisabled()
|
||||
})
|
||||
}, { timeout: 1000 })
|
||||
|
||||
const importButton = screen.getByText('common.overwriteAndImport')
|
||||
fireEvent.click(importButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOnImport).toHaveBeenCalled()
|
||||
})
|
||||
}, { timeout: 1000 })
|
||||
})
|
||||
|
||||
it('should show warning notification on import with warnings', async () => {
|
||||
@@ -647,6 +648,8 @@ describe('UpdateDSLModal', () => {
|
||||
})
|
||||
|
||||
it('should show error modal when import status is PENDING', async () => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
@@ -659,20 +662,29 @@ describe('UpdateDSLModal', () => {
|
||||
|
||||
const fileInput = screen.getByTestId('file-input')
|
||||
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
|
||||
fireEvent.change(fileInput, { target: { files: [file] } })
|
||||
|
||||
await waitFor(() => {
|
||||
const importButton = screen.getByText('common.overwriteAndImport')
|
||||
expect(importButton).not.toBeDisabled()
|
||||
await act(async () => {
|
||||
fireEvent.change(fileInput, { target: { files: [file] } })
|
||||
// Flush microtasks scheduled by the FileReader mock (which uses queueMicrotask)
|
||||
await new Promise<void>(resolve => queueMicrotask(resolve))
|
||||
})
|
||||
|
||||
const importButton = screen.getByText('common.overwriteAndImport')
|
||||
fireEvent.click(importButton)
|
||||
expect(importButton).not.toBeDisabled()
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(importButton)
|
||||
// Flush the promise resolution from mockImportDSL
|
||||
await Promise.resolve()
|
||||
// Advance past the 300ms setTimeout in the component
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
// Wait for the error modal to be shown after setTimeout
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
}, { timeout: 500 })
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
it('should show version info in error modal', async () => {
|
||||
|
||||
@@ -61,6 +61,12 @@ vi.mock('@/service/use-pipeline', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock download utility
|
||||
const mockDownloadBlob = vi.fn()
|
||||
vi.mock('@/utils/download', () => ({
|
||||
downloadBlob: (...args: unknown[]) => mockDownloadBlob(...args),
|
||||
}))
|
||||
|
||||
// Mock workflow service
|
||||
const mockFetchWorkflowDraft = vi.fn()
|
||||
vi.mock('@/service/workflow', () => ({
|
||||
@@ -77,33 +83,9 @@ vi.mock('@/app/components/workflow/constants', () => ({
|
||||
// ============================================================================
|
||||
|
||||
describe('useDSL', () => {
|
||||
let mockLink: { href: string, download: string, click: ReturnType<typeof vi.fn> }
|
||||
let originalCreateElement: typeof document.createElement
|
||||
let mockCreateObjectURL: ReturnType<typeof vi.spyOn>
|
||||
let mockRevokeObjectURL: ReturnType<typeof vi.spyOn>
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Create a proper mock link element
|
||||
mockLink = {
|
||||
href: '',
|
||||
download: '',
|
||||
click: vi.fn(),
|
||||
}
|
||||
|
||||
// Save original and mock selectively - only intercept 'a' elements
|
||||
originalCreateElement = document.createElement.bind(document)
|
||||
document.createElement = vi.fn((tagName: string) => {
|
||||
if (tagName === 'a') {
|
||||
return mockLink as unknown as HTMLElement
|
||||
}
|
||||
return originalCreateElement(tagName)
|
||||
}) as typeof document.createElement
|
||||
|
||||
mockCreateObjectURL = vi.spyOn(URL, 'createObjectURL').mockReturnValue('blob:test-url')
|
||||
mockRevokeObjectURL = vi.spyOn(URL, 'revokeObjectURL').mockImplementation(() => {})
|
||||
|
||||
// Default store state
|
||||
mockWorkflowStoreGetState.mockReturnValue({
|
||||
pipelineId: 'test-pipeline-id',
|
||||
@@ -118,9 +100,6 @@ describe('useDSL', () => {
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
document.createElement = originalCreateElement
|
||||
mockCreateObjectURL.mockRestore()
|
||||
mockRevokeObjectURL.mockRestore()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
@@ -187,9 +166,7 @@ describe('useDSL', () => {
|
||||
await result.current.handleExportDSL()
|
||||
})
|
||||
|
||||
expect(document.createElement).toHaveBeenCalledWith('a')
|
||||
expect(mockCreateObjectURL).toHaveBeenCalled()
|
||||
expect(mockRevokeObjectURL).toHaveBeenCalledWith('blob:test-url')
|
||||
expect(mockDownloadBlob).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use correct file extension for download', async () => {
|
||||
@@ -199,17 +176,25 @@ describe('useDSL', () => {
|
||||
await result.current.handleExportDSL()
|
||||
})
|
||||
|
||||
expect(mockLink.download).toBe('Test Knowledge Base.pipeline')
|
||||
expect(mockDownloadBlob).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
fileName: 'Test Knowledge Base.pipeline',
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
it('should trigger download click', async () => {
|
||||
it('should pass blob data to downloadBlob', async () => {
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleExportDSL()
|
||||
})
|
||||
|
||||
expect(mockLink.click).toHaveBeenCalled()
|
||||
expect(mockDownloadBlob).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.any(Blob),
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
it('should show error notification on export failure', async () => {
|
||||
|
||||
@@ -172,6 +172,9 @@ describe('EditCustomCollectionModal', () => {
|
||||
expect(parseParamsSchemaMock).toHaveBeenCalledWith('{}')
|
||||
})
|
||||
|
||||
// Flush pending state updates from parseParamsSchema promise resolution
|
||||
await act(async () => {})
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByText('common.operation.save'))
|
||||
})
|
||||
@@ -184,6 +187,10 @@ describe('EditCustomCollectionModal', () => {
|
||||
credentials: {
|
||||
auth_type: 'none',
|
||||
},
|
||||
icon: {
|
||||
content: '🕵️',
|
||||
background: '#FEF7C3',
|
||||
},
|
||||
labels: [],
|
||||
}))
|
||||
expect(toastNotifySpy).not.toHaveBeenCalled()
|
||||
|
||||
@@ -87,6 +87,7 @@ export type ToolValue = {
|
||||
enabled?: boolean
|
||||
extra?: { description?: string } & Record<string, unknown>
|
||||
credential_id?: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
export type DataSourceItem = {
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
Group,
|
||||
} from '@/app/components/workflow/nodes/_base/components/layout'
|
||||
import VarReferencePicker from '@/app/components/workflow/nodes/_base/components/variable/var-reference-picker'
|
||||
import { IS_CE_EDITION } from '@/config'
|
||||
import Split from '../_base/components/split'
|
||||
import ChunkStructure from './components/chunk-structure'
|
||||
import EmbeddingModel from './components/embedding-model'
|
||||
@@ -172,7 +173,7 @@ const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
|
||||
{
|
||||
data.indexing_technique === IndexMethodEnum.QUALIFIED
|
||||
&& [ChunkStructureEnum.general, ChunkStructureEnum.parent_child].includes(data.chunk_structure)
|
||||
&& (
|
||||
&& IS_CE_EDITION && (
|
||||
<>
|
||||
<SummaryIndexSetting
|
||||
summaryIndexSetting={data.summary_index_setting}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.11.4",
|
||||
"version": "1.12.1",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
|
||||
"imports": {
|
||||
@@ -117,7 +117,6 @@
|
||||
"ky": "1.12.0",
|
||||
"lamejs": "1.2.1",
|
||||
"lexical": "0.38.2",
|
||||
"line-clamp": "1.0.0",
|
||||
"mermaid": "11.11.0",
|
||||
"mime": "4.1.0",
|
||||
"mitt": "3.0.1",
|
||||
|
||||
8
web/pnpm-lock.yaml
generated
8
web/pnpm-lock.yaml
generated
@@ -233,9 +233,6 @@ importers:
|
||||
lexical:
|
||||
specifier: 0.38.2
|
||||
version: 0.38.2
|
||||
line-clamp:
|
||||
specifier: 1.0.0
|
||||
version: 1.0.0
|
||||
mermaid:
|
||||
specifier: 11.11.0
|
||||
version: 11.11.0
|
||||
@@ -5403,9 +5400,6 @@ packages:
|
||||
resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==}
|
||||
engines: {node: '>=14'}
|
||||
|
||||
line-clamp@1.0.0:
|
||||
resolution: {integrity: sha512-dCDlvMj572RIRBQ3x9aIX0DTdt2St1bMdpi64jVTAi5vqBck7wf+J97//+J7+pS80rFJaYa8HiyXCTp0flpnBA==}
|
||||
|
||||
lines-and-columns@1.2.4:
|
||||
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
|
||||
|
||||
@@ -12913,8 +12907,6 @@ snapshots:
|
||||
|
||||
lilconfig@3.1.3: {}
|
||||
|
||||
line-clamp@1.0.0: {}
|
||||
|
||||
lines-and-columns@1.2.4: {}
|
||||
|
||||
lint-staged@15.5.2:
|
||||
|
||||
80
web/service/client.spec.ts
Normal file
80
web/service/client.spec.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const loadGetBaseURL = async (isClientValue: boolean) => {
|
||||
vi.resetModules()
|
||||
vi.doMock('@/utils/client', () => ({ isClient: isClientValue }))
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
// eslint-disable-next-line next/no-assign-module-variable
|
||||
const module = await import('./client')
|
||||
warnSpy.mockClear()
|
||||
return { getBaseURL: module.getBaseURL, warnSpy }
|
||||
}
|
||||
|
||||
// Scenario: base URL selection and warnings.
|
||||
describe('getBaseURL', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
// Scenario: client environment uses window origin.
|
||||
it('should use window origin when running on the client', async () => {
|
||||
// Arrange
|
||||
const { origin } = window.location
|
||||
const { getBaseURL, warnSpy } = await loadGetBaseURL(true)
|
||||
|
||||
// Act
|
||||
const url = getBaseURL('/api')
|
||||
|
||||
// Assert
|
||||
expect(url.href).toBe(`${origin}/api`)
|
||||
expect(warnSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// Scenario: server environment falls back to localhost with warning.
|
||||
it('should fall back to localhost and warn on the server', async () => {
|
||||
// Arrange
|
||||
const { getBaseURL, warnSpy } = await loadGetBaseURL(false)
|
||||
|
||||
// Act
|
||||
const url = getBaseURL('/api')
|
||||
|
||||
// Assert
|
||||
expect(url.href).toBe('http://localhost/api')
|
||||
expect(warnSpy).toHaveBeenCalledTimes(1)
|
||||
expect(warnSpy).toHaveBeenCalledWith('Using localhost as base URL in server environment, please configure accordingly.')
|
||||
})
|
||||
|
||||
// Scenario: non-http protocols surface warnings.
|
||||
it('should warn when protocol is not http or https', async () => {
|
||||
// Arrange
|
||||
const { getBaseURL, warnSpy } = await loadGetBaseURL(true)
|
||||
|
||||
// Act
|
||||
const url = getBaseURL('localhost:5001/console/api')
|
||||
|
||||
// Assert
|
||||
expect(url.protocol).toBe('localhost:')
|
||||
expect(url.href).toBe('localhost:5001/console/api')
|
||||
expect(warnSpy).toHaveBeenCalledTimes(1)
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
'Unexpected protocol for API requests, expected http or https. Current protocol: localhost:. Please configure accordingly.',
|
||||
)
|
||||
})
|
||||
|
||||
// Scenario: absolute http URLs are preserved.
|
||||
it('should keep absolute http URLs intact', async () => {
|
||||
// Arrange
|
||||
const { getBaseURL, warnSpy } = await loadGetBaseURL(true)
|
||||
|
||||
// Act
|
||||
const url = getBaseURL('https://api.example.com/console/api')
|
||||
|
||||
// Assert
|
||||
expect(url.href).toBe('https://api.example.com/console/api')
|
||||
expect(warnSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -13,12 +13,38 @@ import {
|
||||
consoleRouterContract,
|
||||
marketplaceRouterContract,
|
||||
} from '@/contract/router'
|
||||
import { isClient } from '@/utils/client'
|
||||
import { request } from './base'
|
||||
|
||||
const getMarketplaceHeaders = () => new Headers({
|
||||
'X-Dify-Version': !IS_MARKETPLACE ? APP_VERSION : '999.0.0',
|
||||
})
|
||||
|
||||
function isURL(path: string) {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new URL(path)
|
||||
return true
|
||||
}
|
||||
catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export function getBaseURL(path: string) {
|
||||
const url = new URL(path, isURL(path) ? undefined : isClient ? window.location.origin : 'http://localhost')
|
||||
|
||||
if (!isClient && !isURL(path)) {
|
||||
console.warn('Using localhost as base URL in server environment, please configure accordingly.')
|
||||
}
|
||||
|
||||
if (url.protocol !== 'http:' && url.protocol !== 'https:') {
|
||||
console.warn(`Unexpected protocol for API requests, expected http or https. Current protocol: ${url.protocol}. Please configure accordingly.`)
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
const marketplaceLink = new OpenAPILink(marketplaceRouterContract, {
|
||||
url: MARKETPLACE_API_PREFIX,
|
||||
headers: () => (getMarketplaceHeaders()),
|
||||
@@ -39,7 +65,7 @@ export const marketplaceClient: JsonifiedClient<ContractRouterClient<typeof mark
|
||||
export const marketplaceQuery = createTanstackQueryUtils(marketplaceClient, { path: ['marketplace'] })
|
||||
|
||||
const consoleLink = new OpenAPILink(consoleRouterContract, {
|
||||
url: API_PREFIX,
|
||||
url: getBaseURL(API_PREFIX),
|
||||
fetch: (input, init) => {
|
||||
return request(
|
||||
input.url,
|
||||
|
||||
Reference in New Issue
Block a user