feat: enable tenant isolation on duplicate document indexing tasks (#29080)
Some checks failed
autofix.ci / autofix (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Main CI Pipeline / Check Changed Files (push) Has been cancelled
Main CI Pipeline / API Tests (push) Has been cancelled
Main CI Pipeline / Web Tests (push) Has been cancelled
Main CI Pipeline / Style Check (push) Has been cancelled
Main CI Pipeline / VDB Tests (push) Has been cancelled
Main CI Pipeline / DB Migration Test (push) Has been cancelled
Mark stale issues and pull requests / stale (push) Has been cancelled

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
hj24
2025-12-08 17:54:57 +08:00
committed by GitHub
parent e6d504558a
commit 3cb944f318
18 changed files with 2097 additions and 158 deletions

View File

@@ -114,7 +114,13 @@ def _document_indexing_with_tenant_queue(
try:
_document_indexing(dataset_id, document_ids)
except Exception:
logger.exception("Error processing document indexing %s for tenant %s: %s", dataset_id, tenant_id)
logger.exception(
"Error processing document indexing %s for tenant %s: %s",
dataset_id,
tenant_id,
document_ids,
exc_info=True,
)
finally:
tenant_isolated_task_queue = TenantIsolatedTaskQueue(tenant_id, "document_indexing")
@@ -122,7 +128,7 @@ def _document_indexing_with_tenant_queue(
# Use rpop to get the next task from the queue (FIFO order)
next_tasks = tenant_isolated_task_queue.pull_tasks(count=dify_config.TENANT_ISOLATED_TASK_CONCURRENCY)
logger.info("document indexing tenant isolation queue next tasks: %s", next_tasks)
logger.info("document indexing tenant isolation queue %s next tasks: %s", tenant_id, next_tasks)
if next_tasks:
for next_task in next_tasks:

View File

@@ -1,13 +1,16 @@
import logging
import time
from collections.abc import Callable, Sequence
import click
from celery import shared_task
from sqlalchemy import select
from configs import dify_config
from core.entities.document_task import DocumentTask
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from core.rag.pipeline.queue import TenantIsolatedTaskQueue
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
@@ -24,8 +27,55 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
:param dataset_id:
:param document_ids:
.. warning:: TO BE DEPRECATED
This function will be deprecated and removed in a future version.
Use normal_duplicate_document_indexing_task or priority_duplicate_document_indexing_task instead.
Usage: duplicate_document_indexing_task.delay(dataset_id, document_ids)
"""
logger.warning("duplicate document indexing task received: %s - %s", dataset_id, document_ids)
_duplicate_document_indexing_task(dataset_id, document_ids)
def _duplicate_document_indexing_task_with_tenant_queue(
tenant_id: str, dataset_id: str, document_ids: Sequence[str], task_func: Callable[[str, str, Sequence[str]], None]
):
try:
_duplicate_document_indexing_task(dataset_id, document_ids)
except Exception:
logger.exception(
"Error processing duplicate document indexing %s for tenant %s: %s",
dataset_id,
tenant_id,
document_ids,
exc_info=True,
)
finally:
tenant_isolated_task_queue = TenantIsolatedTaskQueue(tenant_id, "duplicate_document_indexing")
# Check if there are waiting tasks in the queue
# Use rpop to get the next task from the queue (FIFO order)
next_tasks = tenant_isolated_task_queue.pull_tasks(count=dify_config.TENANT_ISOLATED_TASK_CONCURRENCY)
logger.info("duplicate document indexing tenant isolation queue %s next tasks: %s", tenant_id, next_tasks)
if next_tasks:
for next_task in next_tasks:
document_task = DocumentTask(**next_task)
# Process the next waiting task
# Keep the flag set to indicate a task is running
tenant_isolated_task_queue.set_task_waiting_time()
task_func.delay( # type: ignore
tenant_id=document_task.tenant_id,
dataset_id=document_task.dataset_id,
document_ids=document_task.document_ids,
)
else:
# No more waiting tasks, clear the flag
tenant_isolated_task_queue.delete_task_key()
def _duplicate_document_indexing_task(dataset_id: str, document_ids: Sequence[str]):
documents = []
start_at = time.perf_counter()
@@ -110,3 +160,35 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
logger.exception("duplicate_document_indexing_task failed, dataset_id: %s", dataset_id)
finally:
db.session.close()
@shared_task(queue="dataset")
def normal_duplicate_document_indexing_task(tenant_id: str, dataset_id: str, document_ids: Sequence[str]):
"""
Async process duplicate documents
:param tenant_id:
:param dataset_id:
:param document_ids:
Usage: normal_duplicate_document_indexing_task.delay(tenant_id, dataset_id, document_ids)
"""
logger.info("normal duplicate document indexing task received: %s - %s - %s", tenant_id, dataset_id, document_ids)
_duplicate_document_indexing_task_with_tenant_queue(
tenant_id, dataset_id, document_ids, normal_duplicate_document_indexing_task
)
@shared_task(queue="priority_dataset")
def priority_duplicate_document_indexing_task(tenant_id: str, dataset_id: str, document_ids: Sequence[str]):
"""
Async process duplicate documents
:param tenant_id:
:param dataset_id:
:param document_ids:
Usage: priority_duplicate_document_indexing_task.delay(tenant_id, dataset_id, document_ids)
"""
logger.info("priority duplicate document indexing task received: %s - %s - %s", tenant_id, dataset_id, document_ids)
_duplicate_document_indexing_task_with_tenant_queue(
tenant_id, dataset_id, document_ids, priority_duplicate_document_indexing_task
)

View File

@@ -47,6 +47,8 @@ def priority_rag_pipeline_run_task(
)
rag_pipeline_invoke_entities = json.loads(rag_pipeline_invoke_entities_content)
logger.info("tenant %s received %d rag pipeline invoke entities", tenant_id, len(rag_pipeline_invoke_entities))
# Get Flask app object for thread context
flask_app = current_app._get_current_object() # type: ignore
@@ -66,7 +68,7 @@ def priority_rag_pipeline_run_task(
end_at = time.perf_counter()
logging.info(
click.style(
f"tenant_id: {tenant_id} , Rag pipeline run completed. Latency: {end_at - start_at}s", fg="green"
f"tenant_id: {tenant_id}, Rag pipeline run completed. Latency: {end_at - start_at}s", fg="green"
)
)
except Exception:
@@ -78,7 +80,7 @@ def priority_rag_pipeline_run_task(
# Check if there are waiting tasks in the queue
# Use rpop to get the next task from the queue (FIFO order)
next_file_ids = tenant_isolated_task_queue.pull_tasks(count=dify_config.TENANT_ISOLATED_TASK_CONCURRENCY)
logger.info("priority rag pipeline tenant isolation queue next files: %s", next_file_ids)
logger.info("priority rag pipeline tenant isolation queue %s next files: %s", tenant_id, next_file_ids)
if next_file_ids:
for next_file_id in next_file_ids:

View File

@@ -47,6 +47,8 @@ def rag_pipeline_run_task(
)
rag_pipeline_invoke_entities = json.loads(rag_pipeline_invoke_entities_content)
logger.info("tenant %s received %d rag pipeline invoke entities", tenant_id, len(rag_pipeline_invoke_entities))
# Get Flask app object for thread context
flask_app = current_app._get_current_object() # type: ignore
@@ -66,7 +68,7 @@ def rag_pipeline_run_task(
end_at = time.perf_counter()
logging.info(
click.style(
f"tenant_id: {tenant_id} , Rag pipeline run completed. Latency: {end_at - start_at}s", fg="green"
f"tenant_id: {tenant_id}, Rag pipeline run completed. Latency: {end_at - start_at}s", fg="green"
)
)
except Exception:
@@ -78,7 +80,7 @@ def rag_pipeline_run_task(
# Check if there are waiting tasks in the queue
# Use rpop to get the next task from the queue (FIFO order)
next_file_ids = tenant_isolated_task_queue.pull_tasks(count=dify_config.TENANT_ISOLATED_TASK_CONCURRENCY)
logger.info("rag pipeline tenant isolation queue next files: %s", next_file_ids)
logger.info("rag pipeline tenant isolation queue %s next files: %s", tenant_id, next_file_ids)
if next_file_ids:
for next_file_id in next_file_ids: