mirror of
https://github.com/langgenius/dify.git
synced 2026-01-02 20:47:20 +00:00
Compare commits
3 Commits
refactor/q
...
release/e-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc0cf10d77 | ||
|
|
2f3be1a32a | ||
|
|
1fcbfac7e9 |
@@ -15,4 +15,5 @@ def handle(sender: Dataset, **kwargs):
|
||||
dataset.index_struct,
|
||||
dataset.collection_binding_id,
|
||||
dataset.doc_form,
|
||||
dataset.pipeline_id,
|
||||
)
|
||||
|
||||
@@ -2817,20 +2817,20 @@ class SegmentService:
|
||||
db.session.add(binding)
|
||||
db.session.commit()
|
||||
|
||||
# save vector index
|
||||
try:
|
||||
VectorService.create_segments_vector(
|
||||
[args["keywords"]], [segment_document], dataset, document.doc_form
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("create segment index failed")
|
||||
segment_document.enabled = False
|
||||
segment_document.disabled_at = naive_utc_now()
|
||||
segment_document.status = "error"
|
||||
segment_document.error = str(e)
|
||||
db.session.commit()
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
|
||||
return segment
|
||||
# save vector index
|
||||
try:
|
||||
VectorService.create_segments_vector(
|
||||
[args["keywords"]], [segment_document], dataset, document.doc_form
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("create segment index failed")
|
||||
segment_document.enabled = False
|
||||
segment_document.disabled_at = naive_utc_now()
|
||||
segment_document.status = "error"
|
||||
segment_document.error = str(e)
|
||||
db.session.commit()
|
||||
segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_document.id).first()
|
||||
return segment
|
||||
except LockNotOwnedError:
|
||||
pass
|
||||
|
||||
|
||||
@@ -110,5 +110,5 @@ class EnterpriseService:
|
||||
if not app_id:
|
||||
raise ValueError("app_id must be provided.")
|
||||
|
||||
body = {"appId": app_id}
|
||||
EnterpriseRequest.send_request("DELETE", "/webapp/clean", json=body)
|
||||
params = {"appId": app_id}
|
||||
EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params)
|
||||
|
||||
@@ -9,6 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
|
||||
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models import WorkflowType
|
||||
from models.dataset import (
|
||||
AppDatasetJoin,
|
||||
Dataset,
|
||||
@@ -18,9 +19,11 @@ from models.dataset import (
|
||||
DatasetQuery,
|
||||
Document,
|
||||
DocumentSegment,
|
||||
Pipeline,
|
||||
SegmentAttachmentBinding,
|
||||
)
|
||||
from models.model import UploadFile
|
||||
from models.workflow import Workflow
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,6 +37,7 @@ def clean_dataset_task(
|
||||
index_struct: str,
|
||||
collection_binding_id: str,
|
||||
doc_form: str,
|
||||
pipeline_id: str | None = None,
|
||||
):
|
||||
"""
|
||||
Clean dataset when dataset deleted.
|
||||
@@ -135,6 +139,14 @@ def clean_dataset_task(
|
||||
# delete dataset metadata
|
||||
db.session.query(DatasetMetadata).where(DatasetMetadata.dataset_id == dataset_id).delete()
|
||||
db.session.query(DatasetMetadataBinding).where(DatasetMetadataBinding.dataset_id == dataset_id).delete()
|
||||
# delete pipeline and workflow
|
||||
if pipeline_id:
|
||||
db.session.query(Pipeline).where(Pipeline.id == pipeline_id).delete()
|
||||
db.session.query(Workflow).where(
|
||||
Workflow.tenant_id == tenant_id,
|
||||
Workflow.app_id == pipeline_id,
|
||||
Workflow.type == WorkflowType.RAG_PIPELINE,
|
||||
).delete()
|
||||
# delete files
|
||||
if documents:
|
||||
for document in documents:
|
||||
|
||||
1232
api/tests/unit_tests/tasks/test_clean_dataset_task.py
Normal file
1232
api/tests/unit_tests/tasks/test_clean_dataset_task.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user