mirror of
https://github.com/langgenius/dify.git
synced 2026-02-06 08:08:57 +00:00
Compare commits
3 Commits
feat/hitl
...
refactor/s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4971e11734 | ||
|
|
a297b06aac | ||
|
|
e988266f53 |
8
.github/workflows/deploy-hitl.yml
vendored
8
.github/workflows/deploy-hitl.yml
vendored
@@ -4,8 +4,7 @@ on:
|
||||
workflow_run:
|
||||
workflows: ["Build and Push API & Web"]
|
||||
branches:
|
||||
- "feat/hitl-frontend"
|
||||
- "feat/hitl-backend"
|
||||
- "feat/hitl"
|
||||
types:
|
||||
- completed
|
||||
|
||||
@@ -14,10 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
(
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
|
||||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
|
||||
)
|
||||
github.event.workflow_run.head_branch == 'feat/hitl'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v1
|
||||
|
||||
@@ -102,8 +102,6 @@ forbidden_modules =
|
||||
core.trigger
|
||||
core.variables
|
||||
ignore_imports =
|
||||
core.workflow.nodes.agent.agent_node -> core.db.session_factory
|
||||
core.workflow.nodes.agent.agent_node -> models.tools
|
||||
core.workflow.nodes.loop.loop_node -> core.app.workflow.node_factory
|
||||
core.workflow.graph_engine.command_channels.redis_channel -> extensions.ext_redis
|
||||
core.workflow.workflow_entry -> core.app.workflow.layers.observability
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Union, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from packaging.version import Version
|
||||
from pydantic import ValidationError
|
||||
@@ -11,7 +11,6 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from core.agent.entities import AgentToolEntity
|
||||
from core.agent.plugin_entities import AgentStrategyParameter
|
||||
from core.db.session_factory import session_factory
|
||||
from core.file import File, FileTransferMethod
|
||||
from core.memory.token_buffer_memory import TokenBufferMemory
|
||||
from core.model_manager import ModelInstance, ModelManager
|
||||
@@ -50,12 +49,6 @@ from factories import file_factory
|
||||
from factories.agent_factory import get_plugin_agent_strategy
|
||||
from models import ToolFile
|
||||
from models.model import Conversation
|
||||
from models.tools import (
|
||||
ApiToolProvider,
|
||||
BuiltinToolProvider,
|
||||
MCPToolProvider,
|
||||
WorkflowToolProvider,
|
||||
)
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
|
||||
from .exc import (
|
||||
@@ -266,7 +259,7 @@ class AgentNode(Node[AgentNodeData]):
|
||||
value = cast(list[dict[str, Any]], value)
|
||||
tool_value = []
|
||||
for tool in value:
|
||||
provider_type = self._infer_tool_provider_type(tool, self.tenant_id)
|
||||
provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN))
|
||||
setting_params = tool.get("settings", {})
|
||||
parameters = tool.get("parameters", {})
|
||||
manual_input_params = [key for key, value in parameters.items() if value is not None]
|
||||
@@ -755,34 +748,3 @@ class AgentNode(Node[AgentNodeData]):
|
||||
llm_usage=llm_usage,
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _infer_tool_provider_type(tool_config: dict[str, Any], tenant_id: str) -> ToolProviderType:
|
||||
provider_type_str = tool_config.get("type")
|
||||
if provider_type_str:
|
||||
return ToolProviderType(provider_type_str)
|
||||
|
||||
provider_id = tool_config.get("provider_name")
|
||||
if not provider_id:
|
||||
return ToolProviderType.BUILT_IN
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
provider_map: dict[
|
||||
type[Union[WorkflowToolProvider, MCPToolProvider, ApiToolProvider, BuiltinToolProvider]],
|
||||
ToolProviderType,
|
||||
] = {
|
||||
WorkflowToolProvider: ToolProviderType.WORKFLOW,
|
||||
MCPToolProvider: ToolProviderType.MCP,
|
||||
ApiToolProvider: ToolProviderType.API,
|
||||
BuiltinToolProvider: ToolProviderType.BUILT_IN,
|
||||
}
|
||||
|
||||
for provider_model, provider_type in provider_map.items():
|
||||
stmt = select(provider_model).where(
|
||||
provider_model.id == provider_id,
|
||||
provider_model.tenant_id == tenant_id,
|
||||
)
|
||||
if session.scalar(stmt):
|
||||
return provider_type
|
||||
|
||||
raise AgentNodeError(f"Tool provider with ID '{provider_id}' not found.")
|
||||
|
||||
@@ -14,6 +14,9 @@ from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Batch size for database operations to keep transactions short
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
|
||||
@shared_task(queue="dataset")
|
||||
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
|
||||
@@ -31,63 +34,179 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
if not doc_form:
|
||||
raise ValueError("doc_form is required")
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
).delete(synchronize_session=False)
|
||||
storage_keys_to_delete: list[str] = []
|
||||
index_node_ids: list[str] = []
|
||||
segment_ids: list[str] = []
|
||||
total_image_upload_file_ids: list[str] = []
|
||||
|
||||
try:
|
||||
# ============ Step 1: Query segment and file data (short read-only transaction) ============
|
||||
with session_factory.create_session() as session:
|
||||
# Get segments info
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
).all()
|
||||
# check segment is exist
|
||||
|
||||
if segments:
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
|
||||
# Collect image file IDs from segment content
|
||||
for segment in segments:
|
||||
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
||||
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
|
||||
for image_file in image_files:
|
||||
try:
|
||||
if image_file and image_file.key:
|
||||
storage.delete(image_file.key)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Delete image_files failed when storage deleted, \
|
||||
image_upload_file_is: %s",
|
||||
image_file.id,
|
||||
)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
|
||||
session.execute(stmt)
|
||||
session.delete(segment)
|
||||
total_image_upload_file_ids.extend(image_upload_file_ids)
|
||||
|
||||
# Query storage keys for image files
|
||||
if total_image_upload_file_ids:
|
||||
image_files = session.scalars(
|
||||
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
|
||||
).all()
|
||||
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
|
||||
|
||||
# Query storage keys for document files
|
||||
if file_ids:
|
||||
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
|
||||
for file in files:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
except Exception:
|
||||
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
|
||||
|
||||
session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
|
||||
if index_node_ids:
|
||||
try:
|
||||
# Fetch dataset in a fresh session to avoid DetachedInstanceError
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
|
||||
else:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
len(index_node_ids),
|
||||
)
|
||||
)
|
||||
|
||||
# ============ Step 3: Delete metadata binding (separate short transaction) ============
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
deleted_count = (
|
||||
session.query(DatasetMetadataBinding)
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
session.commit()
|
||||
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
|
||||
except Exception:
|
||||
logger.exception("Cleaned documents when documents deleted failed")
|
||||
logger.exception(
|
||||
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
|
||||
if total_image_upload_file_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
|
||||
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
|
||||
if segment_ids:
|
||||
failed_batches = 0
|
||||
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
|
||||
for i in range(0, len(segment_ids), BATCH_SIZE):
|
||||
batch = segment_ids[i : i + BATCH_SIZE]
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
|
||||
session.execute(segment_delete_stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
failed_batches += 1
|
||||
logger.exception(
|
||||
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
|
||||
i,
|
||||
i + len(batch),
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
if failed_batches > 0:
|
||||
logger.warning(
|
||||
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
|
||||
failed_batches,
|
||||
total_batches,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
# ============ Step 6: Delete document-associated files (separate short transaction) ============
|
||||
if file_ids:
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
|
||||
dataset_id,
|
||||
file_ids,
|
||||
)
|
||||
|
||||
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
|
||||
storage_delete_failures = 0
|
||||
for storage_key in storage_keys_to_delete:
|
||||
try:
|
||||
storage.delete(storage_key)
|
||||
except Exception:
|
||||
storage_delete_failures += 1
|
||||
logger.exception("Failed to delete file from storage, key: %s", storage_key)
|
||||
if storage_delete_failures > 0:
|
||||
logger.warning(
|
||||
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
|
||||
storage_delete_failures,
|
||||
len(storage_keys_to_delete),
|
||||
dataset_id,
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
|
||||
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
|
||||
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
|
||||
f"storage_files: {len(storage_keys_to_delete)}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
|
||||
dataset_id,
|
||||
document_ids,
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@@ -67,8 +68,14 @@ def delete_segment_from_index_task(
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
|
||||
for binding in segment_attachment_bindings:
|
||||
session.delete(binding)
|
||||
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
|
||||
|
||||
for i in range(0, len(segment_attachment_bind_ids), 1000):
|
||||
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
|
||||
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
|
||||
)
|
||||
session.execute(segment_attachment_bind_delete_stmt)
|
||||
|
||||
# delete upload file
|
||||
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
|
||||
if not document:
|
||||
@@ -68,7 +68,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
session.commit()
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
@@ -85,7 +84,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
if last_edited_time != page_edited_time:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
session.commit()
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
|
||||
@@ -1,197 +0,0 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.workflow.nodes.agent.agent_node import AgentNode
|
||||
|
||||
|
||||
class TestInferToolProviderType:
|
||||
"""Test cases for AgentNode._infer_tool_provider_type method."""
|
||||
|
||||
def test_infer_type_from_config_workflow(self):
|
||||
"""Test inferring workflow provider type from config."""
|
||||
tool_config = {
|
||||
"type": "workflow",
|
||||
"provider_name": "workflow-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.WORKFLOW
|
||||
|
||||
def test_infer_type_from_config_builtin(self):
|
||||
"""Test inferring builtin provider type from config."""
|
||||
tool_config = {
|
||||
"type": "builtin",
|
||||
"provider_name": "builtin-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.BUILT_IN
|
||||
|
||||
def test_infer_type_from_config_api(self):
|
||||
"""Test inferring API provider type from config."""
|
||||
tool_config = {
|
||||
"type": "api",
|
||||
"provider_name": "api-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.API
|
||||
|
||||
def test_infer_type_from_config_mcp(self):
|
||||
"""Test inferring MCP provider type from config."""
|
||||
tool_config = {
|
||||
"type": "mcp",
|
||||
"provider_name": "mcp-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.MCP
|
||||
|
||||
def test_infer_type_invalid_config_value_raises_error(self):
|
||||
"""Test that invalid type value in config raises ValueError."""
|
||||
tool_config = {
|
||||
"type": "invalid-type",
|
||||
"provider_name": "workflow-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
def test_infer_workflow_type_from_database(self):
|
||||
"""Test inferring workflow provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "workflow-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First query (WorkflowToolProvider) returns a result
|
||||
mock_session.scalar.return_value = True
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.WORKFLOW
|
||||
# Should only query once (after finding WorkflowToolProvider)
|
||||
assert mock_session.scalar.call_count == 1
|
||||
|
||||
def test_infer_mcp_type_from_database(self):
|
||||
"""Test inferring MCP provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "mcp-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First query (WorkflowToolProvider) returns None
|
||||
# Second query (MCPToolProvider) returns a result
|
||||
mock_session.scalar.side_effect = [None, True]
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.MCP
|
||||
assert mock_session.scalar.call_count == 2
|
||||
|
||||
def test_infer_api_type_from_database(self):
|
||||
"""Test inferring API provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "api-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First query (WorkflowToolProvider) returns None
|
||||
# Second query (MCPToolProvider) returns None
|
||||
# Third query (ApiToolProvider) returns a result
|
||||
mock_session.scalar.side_effect = [None, None, True]
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.API
|
||||
assert mock_session.scalar.call_count == 3
|
||||
|
||||
def test_infer_builtin_type_from_database(self):
|
||||
"""Test inferring builtin provider type from database."""
|
||||
tool_config = {
|
||||
"provider_name": "builtin-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# First three queries return None
|
||||
# Fourth query (BuiltinToolProvider) returns a result
|
||||
mock_session.scalar.side_effect = [None, None, None, True]
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.BUILT_IN
|
||||
assert mock_session.scalar.call_count == 4
|
||||
|
||||
def test_infer_type_default_when_not_found(self):
|
||||
"""Test raising AgentNodeError when provider is not found in database."""
|
||||
tool_config = {
|
||||
"provider_name": "unknown-provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# All queries return None
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
# Current implementation raises AgentNodeError when provider not found
|
||||
from core.workflow.nodes.agent.exc import AgentNodeError
|
||||
|
||||
with pytest.raises(AgentNodeError, match="Tool provider with ID 'unknown-provider-id' not found"):
|
||||
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
def test_infer_type_default_when_no_provider_name(self):
|
||||
"""Test defaulting to BUILT_IN when provider_name is missing."""
|
||||
tool_config = {}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
|
||||
assert result == ToolProviderType.BUILT_IN
|
||||
|
||||
def test_infer_type_database_exception_propagates(self):
|
||||
"""Test that database exception propagates (current implementation doesn't catch it)."""
|
||||
tool_config = {
|
||||
"provider_name": "provider-id",
|
||||
}
|
||||
tenant_id = "test-tenant"
|
||||
|
||||
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
|
||||
mock_session = MagicMock()
|
||||
mock_create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# Database query raises exception
|
||||
mock_session.scalar.side_effect = Exception("Database error")
|
||||
|
||||
# Current implementation doesn't catch exceptions, so it propagates
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
|
||||
@@ -114,6 +114,21 @@ def mock_db_session():
|
||||
session = MagicMock()
|
||||
# Ensure tests can observe session.close() via context manager teardown
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
|
||||
# Mock session.begin() context manager to auto-commit on exit
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
def _begin_exit_side_effect(*args, **kwargs):
|
||||
# session.begin().__exit__() should commit if no exception
|
||||
if args[0] is None: # No exception
|
||||
session.commit()
|
||||
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
|
||||
|
||||
@@ -109,6 +109,7 @@ const AgentTools: FC = () => {
|
||||
tool_parameters: paramsWithDefaultValue,
|
||||
notAuthor: !tool.is_team_authorization,
|
||||
enabled: true,
|
||||
type: tool.provider_type as CollectionType,
|
||||
}
|
||||
}
|
||||
const handleSelectTool = (tool: ToolDefaultValue) => {
|
||||
|
||||
@@ -129,6 +129,7 @@ export const useToolSelectorState = ({
|
||||
extra: {
|
||||
description: tool.tool_description,
|
||||
},
|
||||
type: tool.provider_type,
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
||||
@@ -87,6 +87,7 @@ export type ToolValue = {
|
||||
enabled?: boolean
|
||||
extra?: { description?: string } & Record<string, unknown>
|
||||
credential_id?: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
export type DataSourceItem = {
|
||||
|
||||
Reference in New Issue
Block a user