Compare commits

...

3 Commits

Author SHA1 Message Date
GareArc
cc0cf10d77 fix: update request parameters in EnterpriseService to use 'params' instead of 'json' for DELETE requests
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
2025-12-25 22:28:25 -08:00
jyong
2f3be1a32a fix: does not save segment vector when there is no attachment_ids 2025-12-15 15:57:56 +08:00
Jyong
1fcbfac7e9 fix: delete knowledge pipeline but pipeline and workflow don't delete (#29591)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-12-15 15:52:39 +08:00
5 changed files with 1261 additions and 16 deletions

View File

@@ -15,4 +15,5 @@ def handle(sender: Dataset, **kwargs):
dataset.index_struct,
dataset.collection_binding_id,
dataset.doc_form,
dataset.pipeline_id,
)

View File

@@ -2817,20 +2817,20 @@ class SegmentService:
db.session.add(binding)
db.session.commit()
# save vector index
try:
VectorService.create_segments_vector(
[args["keywords"]], [segment_document], dataset, document.doc_form
)
except Exception as e:
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
return segment
# save vector index
try:
VectorService.create_segments_vector(
[args["keywords"]], [segment_document], dataset, document.doc_form
)
except Exception as e:
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
segment_document.error = str(e)
db.session.commit()
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
return segment
except LockNotOwnedError:
pass

View File

@@ -110,5 +110,5 @@ class EnterpriseService:
if not app_id:
raise ValueError("app_id must be provided.")
body = {"appId": app_id}
EnterpriseRequest.send_request("DELETE", "/webapp/clean", json=body)
params = {"appId": app_id}
EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params)

View File

@@ -9,6 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
from extensions.ext_database import db
from extensions.ext_storage import storage
from models import WorkflowType
from models.dataset import (
AppDatasetJoin,
Dataset,
@@ -18,9 +19,11 @@ from models.dataset import (
DatasetQuery,
Document,
DocumentSegment,
Pipeline,
SegmentAttachmentBinding,
)
from models.model import UploadFile
from models.workflow import Workflow
logger = logging.getLogger(__name__)
@@ -34,6 +37,7 @@ def clean_dataset_task(
index_struct: str,
collection_binding_id: str,
doc_form: str,
pipeline_id: str | None = None,
):
"""
Clean dataset when dataset deleted.
@@ -135,6 +139,14 @@ def clean_dataset_task(
# delete dataset metadata
db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset_id).delete()
db.session.query(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset_id).delete()
# delete pipeline and workflow
if pipeline_id:
db.session.query(Pipeline).where(Pipeline.id == pipeline_id).delete()
db.session.query(Workflow).where(
Workflow.tenant_id == tenant_id,
Workflow.app_id == pipeline_id,
Workflow.type == WorkflowType.RAG_PIPELINE,
).delete()
# delete files
if documents:
for document in documents:

File diff suppressed because it is too large Load Diff