Compare commits

..

22 Commits

Author SHA1 Message Date
twwu
3f63d3aa45 fix: add invalidCustomizedTemplateList to dependencies in Popup component 2025-09-18 18:17:34 +08:00
twwu
2c9c246052 refactor: replace useInvalid with useInvalidCustomizedTemplateList in pipeline components 2025-09-18 18:10:57 +08:00
quicksand
680eb7a9f6 fix(datasets): retrieval_model null issue when updating dataset info (#25907) 2025-09-18 17:58:06 +08:00
crazywoola
878420463c fix: Message => str (#25876) 2025-09-18 17:57:57 +08:00
zxhlyh
4692e20daf fix: workflow header style (#25922) 2025-09-18 17:53:40 +08:00
QuantumGhost
13fe2ca8fe fix(api): fix single stepping variable loading (#25908) 2025-09-18 17:30:02 +08:00
zxhlyh
1264e7d4f6 fix: use invalid last run (#25911) 2025-09-18 16:52:27 +08:00
Yunlu Wen
4f45978cd9 fix: remote code execution in email endpoints (#25753)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-09-18 16:45:34 +08:00
Saurabh Singh
5a0bf8e028 feat: make SQLALCHEMY_POOL_TIMEOUT configurable (#25468)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-09-18 16:13:56 +08:00
Wu Tianwei
ffa163a8a8 refactor: simplify portal interactions and manage state in Configure component (#25906)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-18 15:57:33 +08:00
Novice
8f86f5749d chore: Update the value of sys.dialogue_count to start from 1. (#25905) 2025-09-18 15:52:52 +08:00
17hz
00d3bf15f3 perf(web): optimize ESLint performance with concurrency flag and remove oxlint (#25899)
Co-authored-by: Claude <noreply@anthropic.com>
2025-09-18 15:50:42 +08:00
17hz
7196c09e9d chore(workflows): remove redundant eslint command from style workflow (#25900) 2025-09-18 15:50:09 +08:00
zxhlyh
fadd9e0bf4 fix: workflow logs list (#25903) 2025-09-18 15:45:37 +08:00
zxhlyh
d8b4bbe067 fix: datasource pinned list (#25896) 2025-09-18 14:52:33 +08:00
GuanMu
24611e375a fix: update Python base image to use bullseye variant (#25895) 2025-09-18 14:38:56 +08:00
lyzno1
ccec582cea chore: add missing template translations in ja-JP (#25892) 2025-09-18 14:37:26 +08:00
Bowen Liang
b2e4107c17 chore: improve opendal storage and ensure closing file after reading files in load_stream method (#25874) 2025-09-18 14:09:19 +08:00
quicksand
87aa070486 feat(api/commands): add migrate-oss to migrate from Local/OpenDAL to … (#25828) 2025-09-18 14:09:00 +08:00
Novice
21230a8eb2 fix: handle None description in MCP tool transformation (#25872)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-09-18 13:11:38 +08:00
-LAN-
85cda47c70 feat: knowledge pipeline (#25360)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: twwu <twwu@dify.ai>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: jyong <718720800@qq.com>
Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com>
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: quicksand <quicksandzn@gmail.com>
Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
Co-authored-by: zxhlyh <jasonapring2015@outlook.com>
Co-authored-by: Yongtao Huang <yongtaoh2022@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Joel <iamjoel007@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: nite-knite <nkCoding@gmail.com>
Co-authored-by: Hanqing Zhao <sherry9277@gmail.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Harry <xh001x@hotmail.com>
2025-09-18 12:49:10 +08:00
zyssyz123
7dadb33003 fix: remove billing cache when add or delete app or member (#25885)
Some checks are pending
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
Main CI Pipeline / Check Changed Files (push) Waiting to run
Main CI Pipeline / API Tests (push) Blocked by required conditions
Main CI Pipeline / Web Tests (push) Blocked by required conditions
Main CI Pipeline / Style Check (push) Waiting to run
Main CI Pipeline / VDB Tests (push) Blocked by required conditions
Main CI Pipeline / DB Migration Test (push) Blocked by required conditions
2025-09-18 12:18:07 +08:00
60 changed files with 1146 additions and 1925 deletions

View File

@@ -1,4 +1,4 @@
FROM mcr.microsoft.com/devcontainers/python:3.12
FROM mcr.microsoft.com/devcontainers/python:3.12-bullseye
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev

View File

@@ -102,7 +102,6 @@ jobs:
working-directory: ./web
run: |
pnpm run lint
pnpm run eslint
docker-compose-template:
name: Docker Compose Template

View File

@@ -76,6 +76,7 @@ DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
SQLALCHEMY_POOL_PRE_PING=true
SQLALCHEMY_POOL_TIMEOUT=30
# Storage configuration
# use for store upload files, private keys...

View File

@@ -25,13 +25,15 @@ from events.app_event import app_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from extensions.storage.opendal_storage import OpenDALStorage
from extensions.storage.storage_type import StorageType
from libs.helper import email as email_validate
from libs.password import hash_password, password_pattern, valid_password
from libs.rsa import generate_key_pair
from models import Tenant
from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment
from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation, UploadFile
from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
from models.provider import Provider, ProviderModel
from models.provider_ids import DatasourceProviderID, ToolProviderID
@@ -1597,3 +1599,197 @@ def install_rag_pipeline_plugins(input_file, output_file, workers):
workers,
)
click.echo(click.style("Installing rag pipeline plugins successfully", fg="green"))
@click.command(
"migrate-oss",
help="Migrate files from Local or OpenDAL source to a cloud OSS storage (destination must NOT be local/opendal).",
)
@click.option(
"--path",
"paths",
multiple=True,
help="Storage path prefixes to migrate (repeatable). Defaults: privkeys, upload_files, image_files,"
" tools, website_files, keyword_files, ops_trace",
)
@click.option(
"--source",
type=click.Choice(["local", "opendal"], case_sensitive=False),
default="opendal",
show_default=True,
help="Source storage type to read from",
)
@click.option("--overwrite", is_flag=True, default=False, help="Overwrite destination if file already exists")
@click.option("--dry-run", is_flag=True, default=False, help="Show what would be migrated without uploading")
@click.option("-f", "--force", is_flag=True, help="Skip confirmation and run without prompts")
@click.option(
"--update-db/--no-update-db",
default=True,
help="Update upload_files.storage_type from source type to current storage after migration",
)
def migrate_oss(
paths: tuple[str, ...],
source: str,
overwrite: bool,
dry_run: bool,
force: bool,
update_db: bool,
):
"""
Copy all files under selected prefixes from a source storage
(Local filesystem or OpenDAL-backed) into the currently configured
destination storage backend, then optionally update DB records.
Expected usage: set STORAGE_TYPE (and its credentials) to your target backend.
"""
# Ensure target storage is not local/opendal
if dify_config.STORAGE_TYPE in (StorageType.LOCAL, StorageType.OPENDAL):
click.echo(
click.style(
"Target STORAGE_TYPE must be a cloud OSS (not 'local' or 'opendal').\n"
"Please set STORAGE_TYPE to one of: s3, aliyun-oss, azure-blob, google-storage, tencent-cos, \n"
"volcengine-tos, supabase, oci-storage, huawei-obs, baidu-obs, clickzetta-volume.",
fg="red",
)
)
return
# Default paths if none specified
default_paths = ("privkeys", "upload_files", "image_files", "tools", "website_files", "keyword_files", "ops_trace")
path_list = list(paths) if paths else list(default_paths)
is_source_local = source.lower() == "local"
click.echo(click.style("Preparing migration to target storage.", fg="yellow"))
click.echo(click.style(f"Target storage type: {dify_config.STORAGE_TYPE}", fg="white"))
if is_source_local:
src_root = dify_config.STORAGE_LOCAL_PATH
click.echo(click.style(f"Source: local fs, root: {src_root}", fg="white"))
else:
click.echo(click.style(f"Source: opendal scheme={dify_config.OPENDAL_SCHEME}", fg="white"))
click.echo(click.style(f"Paths to migrate: {', '.join(path_list)}", fg="white"))
click.echo("")
if not force:
click.confirm("Proceed with migration?", abort=True)
# Instantiate source storage
try:
if is_source_local:
src_root = dify_config.STORAGE_LOCAL_PATH
source_storage = OpenDALStorage(scheme="fs", root=src_root)
else:
source_storage = OpenDALStorage(scheme=dify_config.OPENDAL_SCHEME)
except Exception as e:
click.echo(click.style(f"Failed to initialize source storage: {str(e)}", fg="red"))
return
total_files = 0
copied_files = 0
skipped_files = 0
errored_files = 0
copied_upload_file_keys: list[str] = []
for prefix in path_list:
click.echo(click.style(f"Scanning source path: {prefix}", fg="white"))
try:
keys = source_storage.scan(path=prefix, files=True, directories=False)
except FileNotFoundError:
click.echo(click.style(f" -> Skipping missing path: {prefix}", fg="yellow"))
continue
except NotImplementedError:
click.echo(click.style(" -> Source storage does not support scanning.", fg="red"))
return
except Exception as e:
click.echo(click.style(f" -> Error scanning '{prefix}': {str(e)}", fg="red"))
continue
click.echo(click.style(f"Found {len(keys)} files under {prefix}", fg="white"))
for key in keys:
total_files += 1
# check destination existence
if not overwrite:
try:
if storage.exists(key):
skipped_files += 1
continue
except Exception as e:
# existence check failures should not block migration attempt
# but should be surfaced to user as a warning for visibility
click.echo(
click.style(
f" -> Warning: failed target existence check for {key}: {str(e)}",
fg="yellow",
)
)
if dry_run:
copied_files += 1
continue
# read from source and write to destination
try:
data = source_storage.load_once(key)
except FileNotFoundError:
errored_files += 1
click.echo(click.style(f" -> Missing on source: {key}", fg="yellow"))
continue
except Exception as e:
errored_files += 1
click.echo(click.style(f" -> Error reading {key}: {str(e)}", fg="red"))
continue
try:
storage.save(key, data)
copied_files += 1
if prefix == "upload_files":
copied_upload_file_keys.append(key)
except Exception as e:
errored_files += 1
click.echo(click.style(f" -> Error writing {key} to target: {str(e)}", fg="red"))
continue
click.echo("")
click.echo(click.style("Migration summary:", fg="yellow"))
click.echo(click.style(f" Total: {total_files}", fg="white"))
click.echo(click.style(f" Copied: {copied_files}", fg="green"))
click.echo(click.style(f" Skipped: {skipped_files}", fg="white"))
if errored_files:
click.echo(click.style(f" Errors: {errored_files}", fg="red"))
if dry_run:
click.echo(click.style("Dry-run complete. No changes were made.", fg="green"))
return
if errored_files:
click.echo(
click.style(
"Some files failed to migrate. Review errors above before updating DB records.",
fg="yellow",
)
)
if update_db and not force:
if not click.confirm("Proceed to update DB storage_type despite errors?", default=False):
update_db = False
# Optionally update DB records for upload_files.storage_type (only for successfully copied upload_files)
if update_db:
if not copied_upload_file_keys:
click.echo(click.style("No upload_files copied. Skipping DB storage_type update.", fg="yellow"))
else:
try:
source_storage_type = StorageType.LOCAL if is_source_local else StorageType.OPENDAL
updated = (
db.session.query(UploadFile)
.where(
UploadFile.storage_type == source_storage_type,
UploadFile.key.in_(copied_upload_file_keys),
)
.update({UploadFile.storage_type: dify_config.STORAGE_TYPE}, synchronize_session=False)
)
db.session.commit()
click.echo(click.style(f"Updated storage_type for {updated} upload_files records.", fg="green"))
except Exception as e:
db.session.rollback()
click.echo(click.style(f"Failed to update DB storage_type: {str(e)}", fg="red"))

View File

@@ -1,3 +1,4 @@
from enum import StrEnum
from typing import Literal
from pydantic import (
@@ -711,11 +712,35 @@ class ToolConfig(BaseSettings):
)
class TemplateMode(StrEnum):
# unsafe mode allows flexible operations in templates, but may cause security vulnerabilities
UNSAFE = "unsafe"
# sandbox mode restricts some unsafe operations like accessing __class__.
# however, it is still not 100% safe, for example, cpu exploitation can happen.
SANDBOX = "sandbox"
# templating is disabled
DISABLED = "disabled"
class MailConfig(BaseSettings):
"""
Configuration for email services
"""
MAIL_TEMPLATING_MODE: TemplateMode = Field(
description="Template mode for email services",
default=TemplateMode.SANDBOX,
)
MAIL_TEMPLATING_TIMEOUT: int = Field(
description="""
Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
Only available in sandbox mode.""",
default=3,
)
MAIL_TYPE: str | None = Field(
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,

View File

@@ -187,6 +187,11 @@ class DatabaseConfig(BaseSettings):
default=False,
)
SQLALCHEMY_POOL_TIMEOUT: NonNegativeInt = Field(
description="Number of seconds to wait for a connection from the pool before raising a timeout error.",
default=30,
)
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
description="Number of processes for the retrieval service, default to CPU cores.",
default=os.cpu_count() or 1,
@@ -216,6 +221,7 @@ class DatabaseConfig(BaseSettings):
"connect_args": connect_args,
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
"pool_reset_on_return": None,
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
}

View File

@@ -261,6 +261,8 @@ class MessageSuggestedQuestionApi(WebApiResource):
questions = MessageService.get_suggested_questions_after_answer(
app_model=app_model, user=end_user, message_id=message_id, invoke_from=InvokeFrom.WEB_APP
)
# questions is a list of strings, not a list of Message objects
# so we can directly return it
except MessageNotExistsError:
raise NotFound("Message not found")
except ConversationNotExistsError:

View File

@@ -420,7 +420,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
db.session.refresh(conversation)
# get conversation dialogue count
self._dialogue_count = get_thread_messages_length(conversation.id)
# NOTE: dialogue_count should not start from 0,
# because during the first conversation, dialogue_count should be 1.
self._dialogue_count = get_thread_messages_length(conversation.id) + 1
# init queue manager
queue_manager = MessageBasedAppQueueManager(

View File

@@ -178,6 +178,7 @@ class PluginDependency(BaseModel):
class Marketplace(BaseModel):
marketplace_plugin_unique_identifier: str
version: str | None = None
@property
def plugin_unique_identifier(self) -> str:
@@ -185,6 +186,7 @@ class PluginDependency(BaseModel):
class Package(BaseModel):
plugin_unique_identifier: str
version: str | None = None
type: Type
value: Github | Marketplace | Package

View File

@@ -219,7 +219,7 @@ class LLMNode(Node):
model_instance=model_instance,
)
query = None
query: str | None = None
if self._node_data.memory:
query = self._node_data.memory.query_prompt_template
if not query and (

View File

@@ -402,6 +402,8 @@ class WorkflowEntry:
input_value = user_inputs.get(node_variable)
if not input_value:
input_value = user_inputs.get(node_variable_key)
if input_value is None:
continue
if isinstance(input_value, dict) and "type" in input_value and "transfer_method" in input_value:
input_value = file_factory.build_from_mapping(mapping=input_value, tenant_id=tenant_id)

View File

@@ -15,6 +15,7 @@ def init_app(app: DifyApp):
install_plugins,
install_rag_pipeline_plugins,
migrate_data_for_plugin,
migrate_oss,
old_metadata_migration,
remove_orphaned_files_on_storage,
reset_email,
@@ -47,6 +48,7 @@ def init_app(app: DifyApp):
remove_orphaned_files_on_storage,
setup_system_tool_oauth_client,
cleanup_orphaned_draft_variables,
migrate_oss,
setup_datasource_oauth_client,
transform_datasource_credentials,
install_rag_pipeline_plugins,

View File

@@ -3,8 +3,9 @@ import os
from collections.abc import Generator
from pathlib import Path
import opendal # type: ignore[import]
from dotenv import dotenv_values
from opendal import Operator
from opendal.layers import RetryLayer
from extensions.storage.base_storage import BaseStorage
@@ -34,10 +35,9 @@ class OpenDALStorage(BaseStorage):
root = kwargs.get("root", "storage")
Path(root).mkdir(parents=True, exist_ok=True)
self.op = opendal.Operator(scheme=scheme, **kwargs) # type: ignore
retry_layer = RetryLayer(max_times=3, factor=2.0, jitter=True)
self.op = Operator(scheme=scheme, **kwargs).layer(retry_layer)
logger.debug("opendal operator created with scheme %s", scheme)
retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True)
self.op = self.op.layer(retry_layer)
logger.debug("added retry layer to opendal operator")
def save(self, filename: str, data: bytes):
@@ -57,22 +57,24 @@ class OpenDALStorage(BaseStorage):
raise FileNotFoundError("File not found")
batch_size = 4096
file = self.op.open(path=filename, mode="rb")
while chunk := file.read(batch_size):
yield chunk
with self.op.open(
path=filename,
mode="rb",
chunck=batch_size,
) as file:
while chunk := file.read(batch_size):
yield chunk
logger.debug("file %s loaded as stream", filename)
def download(self, filename: str, target_filepath: str):
if not self.exists(filename):
raise FileNotFoundError("File not found")
with Path(target_filepath).open("wb") as f:
f.write(self.op.read(path=filename))
Path(target_filepath).write_bytes(self.op.read(path=filename))
logger.debug("file %s downloaded to %s", filename, target_filepath)
def exists(self, filename: str) -> bool:
res: bool = self.op.exists(path=filename)
return res
return self.op.exists(path=filename)
def delete(self, filename: str):
if self.exists(filename):
@@ -85,7 +87,7 @@ class OpenDALStorage(BaseStorage):
if not self.exists(path):
raise FileNotFoundError("Path not found")
all_files = self.op.scan(path=path)
all_files = self.op.list(path=path)
if files and directories:
logger.debug("files and directories on %s scanned", path)
return [f.path for f in all_files]

View File

@@ -156,7 +156,7 @@ def upgrade():
sa.Column('type', sa.String(20), nullable=False),
sa.Column('file_id', models.types.StringUUID(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('workflow_node_execution_offload_pkey')),
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'), postgresql_nulls_not_distinct=False)
sa.UniqueConstraint('node_execution_id', 'type', name=op.f('workflow_node_execution_offload_node_execution_id_key'))
)
with op.batch_alter_table('datasets', schema=None) as batch_op:
batch_op.add_column(sa.Column('keyword_number', sa.Integer(), server_default=sa.text('10'), nullable=True))

View File

@@ -890,12 +890,18 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
class WorkflowNodeExecutionOffload(Base):
__tablename__ = "workflow_node_execution_offload"
__table_args__ = (
# PostgreSQL 14 treats NULL values as distinct in unique constraints by default,
# allowing multiple records with NULL values for the same column combination.
#
# This behavior allows us to have multiple records with NULL node_execution_id,
# simplifying garbage collection process.
UniqueConstraint(
"node_execution_id",
"type",
# Treat `NULL` as distinct for this unique index, so
# we can have mutitple records with `NULL` node_exeution_id, simplify garbage collection process.
postgresql_nulls_not_distinct=False,
# Note: PostgreSQL 15+ supports explicit `nulls distinct` behavior through
# `postgresql_nulls_not_distinct=False`, which would make our intention clearer.
# We rely on PostgreSQL's default behavior of treating NULLs as distinct values.
# postgresql_nulls_not_distinct=False,
),
)
_HASH_COL_SIZE = 64

View File

@@ -183,7 +183,7 @@ storage = [
"cos-python-sdk-v5==1.9.30",
"esdk-obs-python==3.24.6.1",
"google-cloud-storage==2.16.0",
"opendal~=0.45.16",
"opendal~=0.46.0",
"oss2==2.18.5",
"supabase~=2.18.1",
"tos~=2.7.1",

View File

@@ -1041,6 +1041,8 @@ class TenantService:
db.session.add(ta)
db.session.commit()
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
return ta
@staticmethod
@@ -1199,6 +1201,9 @@ class TenantService:
db.session.delete(ta)
db.session.commit()
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
@staticmethod
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
"""Update member role"""

View File

@@ -20,6 +20,7 @@ from libs.login import current_user
from models.account import Account
from models.model import App, AppMode, AppModelConfig, Site
from models.tools import ApiToolProvider
from services.billing_service import BillingService
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
from services.tag_service import TagService
@@ -162,6 +163,9 @@ class AppService:
# update web app setting as private
EnterpriseService.WebAppAuth.update_app_access_mode(app.id, "private")
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(app.tenant_id)
return app
def get_app(self, app: App) -> App:
@@ -337,6 +341,9 @@ class AppService:
if FeatureService.get_system_features().webapp_auth.enabled:
EnterpriseService.WebAppAuth.cleanup_webapp(app.id)
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(app.tenant_id)
# Trigger asynchronous deletion of app and related data
remove_app_and_related_data_task.delay(tenant_id=app.tenant_id, app_id=app.id)

View File

@@ -5,6 +5,7 @@ import httpx
from tenacity import retry, retry_if_exception_type, stop_before_delay, wait_fixed
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.helper import RateLimiter
from models.account import Account, TenantAccountJoin, TenantAccountRole
@@ -173,3 +174,7 @@ class BillingService:
res = cls._send_request("POST", "/compliance/download", json=json)
cls.compliance_download_rate_limiter.increment_rate_limit(limiter_key)
return res
@classmethod
def clean_billing_info_cache(cls, tenant_id: str):
redis_client.delete(f"tenant:{tenant_id}:billing_info")

View File

@@ -532,7 +532,8 @@ class DatasetService:
filtered_data["updated_by"] = user.id
filtered_data["updated_at"] = naive_utc_now()
# update Retrieval model
filtered_data["retrieval_model"] = data["retrieval_model"]
if data.get("retrieval_model"):
filtered_data["retrieval_model"] = data["retrieval_model"]
# update icon info
if data.get("icon_info"):
filtered_data["icon_info"] = data.get("icon_info")

View File

@@ -217,7 +217,7 @@ class MessageService:
@classmethod
def get_suggested_questions_after_answer(
cls, app_model: App, user: Union[Account, EndUser] | None, message_id: str, invoke_from: InvokeFrom
) -> list[Message]:
) -> list[str]:
if not user:
raise ValueError("user cannot be None")
@@ -288,7 +288,7 @@ class MessageService:
)
with measure_time() as timer:
questions: list[Message] = LLMGenerator.generate_suggested_questions_after_answer(
questions: list[str] = LLMGenerator.generate_suggested_questions_after_answer(
tenant_id=app_model.tenant_id, histories=histories
)

View File

@@ -1,9 +1,14 @@
import re
from configs import dify_config
from core.helper import marketplace
from core.plugin.entities.plugin import PluginDependency, PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from models.provider_ids import ModelProviderID, ToolProviderID
# Compile regex pattern for version extraction at module level for better performance
_VERSION_REGEX = re.compile(r":(?P<version>[0-9]+(?:\.[0-9]+){2}(?:[+-][0-9A-Za-z.-]+)?)(?:@|$)")
class DependenciesAnalysisService:
@classmethod
@@ -49,6 +54,13 @@ class DependenciesAnalysisService:
for dependency in dependencies:
unique_identifier = dependency.value.plugin_unique_identifier
if unique_identifier in missing_plugin_unique_identifiers:
# Extract version for Marketplace dependencies
if dependency.type == PluginDependency.Type.Marketplace:
version_match = _VERSION_REGEX.search(unique_identifier)
if version_match:
dependency.value.version = version_match.group("version")
# Create and append the dependency (same for all types)
leaked_dependencies.append(
PluginDependency(
type=dependency.type,

View File

@@ -262,7 +262,7 @@ class ToolTransformService:
author=user.name if user else "Anonymous",
name=tool.name,
label=I18nObject(en_US=tool.name, zh_Hans=tool.name),
description=I18nObject(en_US=tool.description, zh_Hans=tool.description),
description=I18nObject(en_US=tool.description or "", zh_Hans=tool.description or ""),
parameters=ToolTransformService.convert_mcp_schema_to_parameter(tool.inputSchema),
labels=[],
)

View File

@@ -1008,7 +1008,7 @@ def _setup_variable_pool(
if workflow.type != WorkflowType.WORKFLOW.value:
system_variable.query = query
system_variable.conversation_id = conversation_id
system_variable.dialogue_count = 0
system_variable.dialogue_count = 1
else:
system_variable = SystemVariable.empty()

View File

@@ -1,17 +1,46 @@
import logging
import time
from collections.abc import Mapping
from typing import Any
import click
from celery import shared_task
from flask import render_template_string
from jinja2.runtime import Context
from jinja2.sandbox import ImmutableSandboxedEnvironment
from configs import dify_config
from configs.feature import TemplateMode
from extensions.ext_mail import mail
from libs.email_i18n import get_email_i18n_service
logger = logging.getLogger(__name__)
class SandboxedEnvironment(ImmutableSandboxedEnvironment):
def __init__(self, timeout: int, *args: Any, **kwargs: Any):
self._timeout_time = time.time() + timeout
super().__init__(*args, **kwargs)
def call(self, context: Context, obj: Any, *args: Any, **kwargs: Any) -> Any:
if time.time() > self._timeout_time:
raise TimeoutError("Template rendering timeout")
return super().call(context, obj, *args, **kwargs)
def _render_template_with_strategy(body: str, substitutions: Mapping[str, str]) -> str:
mode = dify_config.MAIL_TEMPLATING_MODE
timeout = dify_config.MAIL_TEMPLATING_TIMEOUT
if mode == TemplateMode.UNSAFE:
return render_template_string(body, **substitutions)
if mode == TemplateMode.SANDBOX:
tmpl = SandboxedEnvironment(timeout=timeout).from_string(body)
return tmpl.render(substitutions)
if mode == TemplateMode.DISABLED:
return body
raise ValueError(f"Unsupported mail templating mode: {mode}")
@shared_task(queue="mail")
def send_inner_email_task(to: list[str], subject: str, body: str, substitutions: Mapping[str, str]):
if not mail.is_inited():
@@ -21,7 +50,7 @@ def send_inner_email_task(to: list[str], subject: str, body: str, substitutions:
start_at = time.perf_counter()
try:
html_content = render_template_string(body, **substitutions)
html_content = _render_template_with_strategy(body, substitutions)
email_service = get_email_i18n_service()
email_service.send_raw_email(to=to, subject=subject, html_content=html_content)

View File

@@ -4,6 +4,7 @@ from unittest.mock import create_autospec, patch
import pytest
from faker import Faker
from sqlalchemy import Engine
from werkzeug.exceptions import NotFound
from configs import dify_config
@@ -17,6 +18,12 @@ from services.file_service import FileService
class TestFileService:
"""Integration tests for FileService using testcontainers."""
@pytest.fixture
def engine(self, db_session_with_containers):
bind = db_session_with_containers.get_bind()
assert isinstance(bind, Engine)
return bind
@pytest.fixture
def mock_external_service_dependencies(self):
"""Mock setup for external service dependencies."""
@@ -156,7 +163,7 @@ class TestFileService:
return upload_file
# Test upload_file method
def test_upload_file_success(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_success(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test successful file upload with valid parameters.
"""
@@ -167,7 +174,7 @@ class TestFileService:
content = b"test file content"
mimetype = "application/pdf"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -187,13 +194,9 @@ class TestFileService:
# Verify storage was called
mock_external_service_dependencies["storage"].save.assert_called_once()
# Verify database state
from extensions.ext_database import db
db.session.refresh(upload_file)
assert upload_file.id is not None
def test_upload_file_with_end_user(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_with_end_user(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test file upload with end user instead of account.
"""
@@ -204,7 +207,7 @@ class TestFileService:
content = b"test image content"
mimetype = "image/jpeg"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -215,7 +218,9 @@ class TestFileService:
assert upload_file.created_by == end_user.id
assert upload_file.created_by_role == CreatorUserRole.END_USER.value
def test_upload_file_with_datasets_source(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_with_datasets_source(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file upload with datasets source parameter.
"""
@@ -226,7 +231,7 @@ class TestFileService:
content = b"test file content"
mimetype = "application/pdf"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -239,7 +244,7 @@ class TestFileService:
assert upload_file.source_url == "https://example.com/source"
def test_upload_file_invalid_filename_characters(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file upload with invalid filename characters.
@@ -252,14 +257,16 @@ class TestFileService:
mimetype = "text/plain"
with pytest.raises(ValueError, match="Filename contains invalid characters"):
FileService.upload_file(
FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
user=account,
)
def test_upload_file_filename_too_long(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_filename_too_long(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file upload with filename that exceeds length limit.
"""
@@ -272,7 +279,7 @@ class TestFileService:
content = b"test content"
mimetype = "text/plain"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -288,7 +295,7 @@ class TestFileService:
assert len(base_name) <= 200
def test_upload_file_datasets_unsupported_type(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file upload for datasets with unsupported file type.
@@ -301,7 +308,7 @@ class TestFileService:
mimetype = "image/jpeg"
with pytest.raises(UnsupportedFileTypeError):
FileService.upload_file(
FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -309,7 +316,7 @@ class TestFileService:
source="datasets",
)
def test_upload_file_too_large(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_too_large(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test file upload with file size exceeding limit.
"""
@@ -322,7 +329,7 @@ class TestFileService:
mimetype = "image/jpeg"
with pytest.raises(FileTooLargeError):
FileService.upload_file(
FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -331,7 +338,7 @@ class TestFileService:
# Test is_file_size_within_limit method
def test_is_file_size_within_limit_image_success(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file size check for image files within limit.
@@ -339,12 +346,12 @@ class TestFileService:
extension = "jpg"
file_size = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 # Exactly at limit
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is True
def test_is_file_size_within_limit_video_success(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file size check for video files within limit.
@@ -352,12 +359,12 @@ class TestFileService:
extension = "mp4"
file_size = dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT * 1024 * 1024 # Exactly at limit
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is True
def test_is_file_size_within_limit_audio_success(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file size check for audio files within limit.
@@ -365,12 +372,12 @@ class TestFileService:
extension = "mp3"
file_size = dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT * 1024 * 1024 # Exactly at limit
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is True
def test_is_file_size_within_limit_document_success(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file size check for document files within limit.
@@ -378,12 +385,12 @@ class TestFileService:
extension = "pdf"
file_size = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 # Exactly at limit
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is True
def test_is_file_size_within_limit_image_exceeded(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file size check for image files exceeding limit.
@@ -391,12 +398,12 @@ class TestFileService:
extension = "jpg"
file_size = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 + 1 # Exceeds limit
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is False
def test_is_file_size_within_limit_unknown_extension(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file size check for unknown file extension.
@@ -404,12 +411,12 @@ class TestFileService:
extension = "xyz"
file_size = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 # Uses default limit
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is True
# Test upload_text method
def test_upload_text_success(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_text_success(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test successful text upload.
"""
@@ -422,21 +429,25 @@ class TestFileService:
mock_current_user.current_tenant_id = str(fake.uuid4())
mock_current_user.id = str(fake.uuid4())
with patch("services.file_service.current_user", mock_current_user):
upload_file = FileService.upload_text(text=text, text_name=text_name)
upload_file = FileService(engine).upload_text(
text=text,
text_name=text_name,
user_id=mock_current_user.id,
tenant_id=mock_current_user.current_tenant_id,
)
assert upload_file is not None
assert upload_file.name == text_name
assert upload_file.size == len(text)
assert upload_file.extension == "txt"
assert upload_file.mime_type == "text/plain"
assert upload_file.used is True
assert upload_file.used_by == mock_current_user.id
assert upload_file is not None
assert upload_file.name == text_name
assert upload_file.size == len(text)
assert upload_file.extension == "txt"
assert upload_file.mime_type == "text/plain"
assert upload_file.used is True
assert upload_file.used_by == mock_current_user.id
# Verify storage was called
mock_external_service_dependencies["storage"].save.assert_called_once()
# Verify storage was called
mock_external_service_dependencies["storage"].save.assert_called_once()
def test_upload_text_name_too_long(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_text_name_too_long(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test text upload with name that exceeds length limit.
"""
@@ -449,15 +460,19 @@ class TestFileService:
mock_current_user.current_tenant_id = str(fake.uuid4())
mock_current_user.id = str(fake.uuid4())
with patch("services.file_service.current_user", mock_current_user):
upload_file = FileService.upload_text(text=text, text_name=long_name)
upload_file = FileService(engine).upload_text(
text=text,
text_name=long_name,
user_id=mock_current_user.id,
tenant_id=mock_current_user.current_tenant_id,
)
# Verify name was truncated
assert len(upload_file.name) <= 200
assert upload_file.name == "a" * 200
# Verify name was truncated
assert len(upload_file.name) <= 200
assert upload_file.name == "a" * 200
# Test get_file_preview method
def test_get_file_preview_success(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_file_preview_success(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test successful file preview generation.
"""
@@ -473,12 +488,14 @@ class TestFileService:
db.session.commit()
result = FileService.get_file_preview(file_id=upload_file.id)
result = FileService(engine).get_file_preview(file_id=upload_file.id)
assert result == "extracted text content"
mock_external_service_dependencies["extract_processor"].load_from_upload_file.assert_called_once()
def test_get_file_preview_file_not_found(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_file_preview_file_not_found(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file preview with non-existent file.
"""
@@ -486,10 +503,10 @@ class TestFileService:
non_existent_id = str(fake.uuid4())
with pytest.raises(NotFound, match="File not found"):
FileService.get_file_preview(file_id=non_existent_id)
FileService(engine).get_file_preview(file_id=non_existent_id)
def test_get_file_preview_unsupported_file_type(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file preview with unsupported file type.
@@ -507,9 +524,11 @@ class TestFileService:
db.session.commit()
with pytest.raises(UnsupportedFileTypeError):
FileService.get_file_preview(file_id=upload_file.id)
FileService(engine).get_file_preview(file_id=upload_file.id)
def test_get_file_preview_text_truncation(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_file_preview_text_truncation(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file preview with text that exceeds preview limit.
"""
@@ -529,13 +548,13 @@ class TestFileService:
long_text = "x" * 5000 # Longer than PREVIEW_WORDS_LIMIT
mock_external_service_dependencies["extract_processor"].load_from_upload_file.return_value = long_text
result = FileService.get_file_preview(file_id=upload_file.id)
result = FileService(engine).get_file_preview(file_id=upload_file.id)
assert len(result) == 3000 # PREVIEW_WORDS_LIMIT
assert result == "x" * 3000
# Test get_image_preview method
def test_get_image_preview_success(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_image_preview_success(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test successful image preview generation.
"""
@@ -555,7 +574,7 @@ class TestFileService:
nonce = "test_nonce"
sign = "test_signature"
generator, mime_type = FileService.get_image_preview(
generator, mime_type = FileService(engine).get_image_preview(
file_id=upload_file.id,
timestamp=timestamp,
nonce=nonce,
@@ -566,7 +585,9 @@ class TestFileService:
assert mime_type == upload_file.mime_type
mock_external_service_dependencies["file_helpers"].verify_image_signature.assert_called_once()
def test_get_image_preview_invalid_signature(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_image_preview_invalid_signature(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test image preview with invalid signature.
"""
@@ -584,14 +605,16 @@ class TestFileService:
sign = "invalid_signature"
with pytest.raises(NotFound, match="File not found or signature is invalid"):
FileService.get_image_preview(
FileService(engine).get_image_preview(
file_id=upload_file.id,
timestamp=timestamp,
nonce=nonce,
sign=sign,
)
def test_get_image_preview_file_not_found(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_image_preview_file_not_found(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test image preview with non-existent file.
"""
@@ -603,7 +626,7 @@ class TestFileService:
sign = "test_signature"
with pytest.raises(NotFound, match="File not found or signature is invalid"):
FileService.get_image_preview(
FileService(engine).get_image_preview(
file_id=non_existent_id,
timestamp=timestamp,
nonce=nonce,
@@ -611,7 +634,7 @@ class TestFileService:
)
def test_get_image_preview_unsupported_file_type(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test image preview with non-image file type.
@@ -633,7 +656,7 @@ class TestFileService:
sign = "test_signature"
with pytest.raises(UnsupportedFileTypeError):
FileService.get_image_preview(
FileService(engine).get_image_preview(
file_id=upload_file.id,
timestamp=timestamp,
nonce=nonce,
@@ -642,7 +665,7 @@ class TestFileService:
# Test get_file_generator_by_file_id method
def test_get_file_generator_by_file_id_success(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test successful file generator retrieval.
@@ -657,7 +680,7 @@ class TestFileService:
nonce = "test_nonce"
sign = "test_signature"
generator, file_obj = FileService.get_file_generator_by_file_id(
generator, file_obj = FileService(engine).get_file_generator_by_file_id(
file_id=upload_file.id,
timestamp=timestamp,
nonce=nonce,
@@ -665,11 +688,11 @@ class TestFileService:
)
assert generator is not None
assert file_obj == upload_file
assert file_obj.id == upload_file.id
mock_external_service_dependencies["file_helpers"].verify_file_signature.assert_called_once()
def test_get_file_generator_by_file_id_invalid_signature(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file generator retrieval with invalid signature.
@@ -688,7 +711,7 @@ class TestFileService:
sign = "invalid_signature"
with pytest.raises(NotFound, match="File not found or signature is invalid"):
FileService.get_file_generator_by_file_id(
FileService(engine).get_file_generator_by_file_id(
file_id=upload_file.id,
timestamp=timestamp,
nonce=nonce,
@@ -696,7 +719,7 @@ class TestFileService:
)
def test_get_file_generator_by_file_id_file_not_found(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file generator retrieval with non-existent file.
@@ -709,7 +732,7 @@ class TestFileService:
sign = "test_signature"
with pytest.raises(NotFound, match="File not found or signature is invalid"):
FileService.get_file_generator_by_file_id(
FileService(engine).get_file_generator_by_file_id(
file_id=non_existent_id,
timestamp=timestamp,
nonce=nonce,
@@ -717,7 +740,9 @@ class TestFileService:
)
# Test get_public_image_preview method
def test_get_public_image_preview_success(self, db_session_with_containers, mock_external_service_dependencies):
def test_get_public_image_preview_success(
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test successful public image preview generation.
"""
@@ -733,14 +758,14 @@ class TestFileService:
db.session.commit()
generator, mime_type = FileService.get_public_image_preview(file_id=upload_file.id)
generator, mime_type = FileService(engine).get_public_image_preview(file_id=upload_file.id)
assert generator is not None
assert mime_type == upload_file.mime_type
mock_external_service_dependencies["storage"].load.assert_called_once()
def test_get_public_image_preview_file_not_found(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test public image preview with non-existent file.
@@ -749,10 +774,10 @@ class TestFileService:
non_existent_id = str(fake.uuid4())
with pytest.raises(NotFound, match="File not found or signature is invalid"):
FileService.get_public_image_preview(file_id=non_existent_id)
FileService(engine).get_public_image_preview(file_id=non_existent_id)
def test_get_public_image_preview_unsupported_file_type(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test public image preview with non-image file type.
@@ -770,10 +795,10 @@ class TestFileService:
db.session.commit()
with pytest.raises(UnsupportedFileTypeError):
FileService.get_public_image_preview(file_id=upload_file.id)
FileService(engine).get_public_image_preview(file_id=upload_file.id)
# Test edge cases and boundary conditions
def test_upload_file_empty_content(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_empty_content(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test file upload with empty content.
"""
@@ -784,7 +809,7 @@ class TestFileService:
content = b""
mimetype = "text/plain"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -795,7 +820,7 @@ class TestFileService:
assert upload_file.size == 0
def test_upload_file_special_characters_in_name(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file upload with special characters in filename (but valid ones).
@@ -807,7 +832,7 @@ class TestFileService:
content = b"test content"
mimetype = "text/plain"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -818,7 +843,7 @@ class TestFileService:
assert upload_file.name == filename
def test_upload_file_different_case_extensions(
self, db_session_with_containers, mock_external_service_dependencies
self, db_session_with_containers, engine, mock_external_service_dependencies
):
"""
Test file upload with different case extensions.
@@ -830,7 +855,7 @@ class TestFileService:
content = b"test content"
mimetype = "application/pdf"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -840,7 +865,7 @@ class TestFileService:
assert upload_file is not None
assert upload_file.extension == "pdf" # Should be converted to lowercase
def test_upload_text_empty_text(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_text_empty_text(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test text upload with empty text.
"""
@@ -853,13 +878,17 @@ class TestFileService:
mock_current_user.current_tenant_id = str(fake.uuid4())
mock_current_user.id = str(fake.uuid4())
with patch("services.file_service.current_user", mock_current_user):
upload_file = FileService.upload_text(text=text, text_name=text_name)
upload_file = FileService(engine).upload_text(
text=text,
text_name=text_name,
user_id=mock_current_user.id,
tenant_id=mock_current_user.current_tenant_id,
)
assert upload_file is not None
assert upload_file.size == 0
assert upload_file is not None
assert upload_file.size == 0
def test_file_size_limits_edge_cases(self, db_session_with_containers, mock_external_service_dependencies):
def test_file_size_limits_edge_cases(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test file size limits with edge case values.
"""
@@ -871,15 +900,15 @@ class TestFileService:
("pdf", dify_config.UPLOAD_FILE_SIZE_LIMIT),
]:
file_size = limit_config * 1024 * 1024
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is True
# Test one byte over limit
file_size = limit_config * 1024 * 1024 + 1
result = FileService.is_file_size_within_limit(extension=extension, file_size=file_size)
result = FileService(engine).is_file_size_within_limit(extension=extension, file_size=file_size)
assert result is False
def test_upload_file_with_source_url(self, db_session_with_containers, mock_external_service_dependencies):
def test_upload_file_with_source_url(self, db_session_with_containers, engine, mock_external_service_dependencies):
"""
Test file upload with source URL that gets overridden by signed URL.
"""
@@ -891,7 +920,7 @@ class TestFileService:
mimetype = "application/pdf"
source_url = "https://original-source.com/file.pdf"
upload_file = FileService.upload_file(
upload_file = FileService(engine).upload_file(
filename=filename,
content=content,
mimetype=mimetype,
@@ -904,7 +933,7 @@ class TestFileService:
# The signed URL should only be set when source_url is empty
# Let's test that scenario
upload_file2 = FileService.upload_file(
upload_file2 = FileService(engine).upload_file(
filename="test2.pdf",
content=b"test content 2",
mimetype="application/pdf",

View File

@@ -108,6 +108,7 @@ class TestWorkflowDraftVariableService:
created_by=app.created_by,
environment_variables=[],
conversation_variables=[],
rag_pipeline_variables=[],
)
from extensions.ext_database import db

View File

@@ -91,6 +91,7 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch):
"pool_size": 30,
"pool_use_lifo": False,
"pool_reset_on_return": None,
"pool_timeout": 30,
}
assert config["CONSOLE_WEB_URL"] == "https://example.com"

View File

@@ -21,8 +21,11 @@ def get_example_filename() -> str:
return "test.txt"
def get_example_data() -> bytes:
return b"test"
def get_example_data(length: int = 4) -> bytes:
chars = "test"
result = "".join(chars[i % len(chars)] for i in range(length)).encode()
assert len(result) == length
return result
def get_example_filepath() -> str:

View File

@@ -57,12 +57,19 @@ class TestOpenDAL:
def test_load_stream(self):
"""Test loading data as a stream."""
filename = get_example_filename()
data = get_example_data()
chunks = 5
chunk_size = 4096
data = get_example_data(length=chunk_size * chunks)
self.storage.save(filename, data)
generator = self.storage.load_stream(filename)
assert isinstance(generator, Generator)
assert next(generator) == data
for i in range(chunks):
fetched = next(generator)
assert len(fetched) == chunk_size
assert fetched == data[i * chunk_size : (i + 1) * chunk_size]
with pytest.raises(StopIteration):
next(generator)
def test_download(self):
"""Test downloading data to a file."""

View File

@@ -0,0 +1,212 @@
"""Test cases for MCP tool transformation functionality."""
from unittest.mock import Mock
import pytest
from core.mcp.types import Tool as MCPTool
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolProviderType
from models.tools import MCPToolProvider
from services.tools.tools_transform_service import ToolTransformService
@pytest.fixture
def mock_user():
"""Provides a mock user object."""
user = Mock()
user.name = "Test User"
return user
@pytest.fixture
def mock_provider(mock_user):
"""Provides a mock MCPToolProvider with a loaded user."""
provider = Mock(spec=MCPToolProvider)
provider.load_user.return_value = mock_user
return provider
@pytest.fixture
def mock_provider_no_user():
"""Provides a mock MCPToolProvider with no user."""
provider = Mock(spec=MCPToolProvider)
provider.load_user.return_value = None
return provider
@pytest.fixture
def mock_provider_full(mock_user):
"""Provides a fully configured mock MCPToolProvider for detailed tests."""
provider = Mock(spec=MCPToolProvider)
provider.id = "provider-id-123"
provider.server_identifier = "server-identifier-456"
provider.name = "Test MCP Provider"
provider.provider_icon = "icon.png"
provider.authed = True
provider.masked_server_url = "https://*****.com/mcp"
provider.timeout = 30
provider.sse_read_timeout = 300
provider.masked_headers = {"Authorization": "Bearer *****"}
provider.decrypted_headers = {"Authorization": "Bearer secret-token"}
# Mock timestamp
mock_updated_at = Mock()
mock_updated_at.timestamp.return_value = 1234567890
provider.updated_at = mock_updated_at
provider.load_user.return_value = mock_user
return provider
@pytest.fixture
def sample_mcp_tools():
"""Provides sample MCP tools for testing."""
return {
"simple": MCPTool(
name="simple_tool", description="A simple test tool", inputSchema={"type": "object", "properties": {}}
),
"none_desc": MCPTool(name="tool_none_desc", description=None, inputSchema={"type": "object", "properties": {}}),
"complex": MCPTool(
name="complex_tool",
description="A tool with complex parameters",
inputSchema={
"type": "object",
"properties": {
"text": {"type": "string", "description": "Input text"},
"count": {"type": "integer", "description": "Number of items", "minimum": 1, "maximum": 100},
"options": {"type": "array", "items": {"type": "string"}, "description": "List of options"},
},
"required": ["text"],
},
),
}
class TestMCPToolTransform:
"""Test cases for MCP tool transformation methods."""
def test_mcp_tool_to_user_tool_with_none_description(self, mock_provider):
"""Test that mcp_tool_to_user_tool handles None description correctly."""
# Create MCP tools with None description
tools = [
MCPTool(
name="tool1",
description=None, # This is the case that caused the error
inputSchema={"type": "object", "properties": {}},
),
MCPTool(
name="tool2",
description=None,
inputSchema={
"type": "object",
"properties": {"param1": {"type": "string", "description": "A parameter"}},
},
),
]
# Call the method
result = ToolTransformService.mcp_tool_to_user_tool(mock_provider, tools)
# Verify the result
assert len(result) == 2
assert all(isinstance(tool, ToolApiEntity) for tool in result)
# Check first tool
assert result[0].name == "tool1"
assert result[0].author == "Test User"
assert isinstance(result[0].label, I18nObject)
assert result[0].label.en_US == "tool1"
assert isinstance(result[0].description, I18nObject)
assert result[0].description.en_US == "" # Should be empty string, not None
assert result[0].description.zh_Hans == ""
# Check second tool
assert result[1].name == "tool2"
assert result[1].description.en_US == ""
assert result[1].description.zh_Hans == ""
def test_mcp_tool_to_user_tool_with_description(self, mock_provider):
"""Test that mcp_tool_to_user_tool handles normal description correctly."""
# Create MCP tools with description
tools = [
MCPTool(
name="tool_with_desc",
description="This is a test tool that does something useful",
inputSchema={"type": "object", "properties": {}},
)
]
# Call the method
result = ToolTransformService.mcp_tool_to_user_tool(mock_provider, tools)
# Verify the result
assert len(result) == 1
assert isinstance(result[0], ToolApiEntity)
assert result[0].name == "tool_with_desc"
assert result[0].description.en_US == "This is a test tool that does something useful"
assert result[0].description.zh_Hans == "This is a test tool that does something useful"
def test_mcp_tool_to_user_tool_with_no_user(self, mock_provider_no_user):
"""Test that mcp_tool_to_user_tool handles None user correctly."""
# Create MCP tool
tools = [MCPTool(name="tool1", description="Test tool", inputSchema={"type": "object", "properties": {}})]
# Call the method
result = ToolTransformService.mcp_tool_to_user_tool(mock_provider_no_user, tools)
# Verify the result
assert len(result) == 1
assert result[0].author == "Anonymous"
def test_mcp_tool_to_user_tool_with_complex_schema(self, mock_provider, sample_mcp_tools):
"""Test that mcp_tool_to_user_tool correctly converts complex input schemas."""
# Use complex tool from fixtures
tools = [sample_mcp_tools["complex"]]
# Call the method
result = ToolTransformService.mcp_tool_to_user_tool(mock_provider, tools)
# Verify the result
assert len(result) == 1
assert result[0].name == "complex_tool"
assert result[0].parameters is not None
# The actual parameter conversion is handled by convert_mcp_schema_to_parameter
# which should be tested separately
def test_mcp_provider_to_user_provider_for_list(self, mock_provider_full):
"""Test mcp_provider_to_user_provider with for_list=True."""
# Set tools data with null description
mock_provider_full.tools = '[{"name": "tool1", "description": null, "inputSchema": {}}]'
# Call the method with for_list=True
result = ToolTransformService.mcp_provider_to_user_provider(mock_provider_full, for_list=True)
# Verify the result
assert isinstance(result, ToolProviderApiEntity)
assert result.id == "provider-id-123" # Should use provider.id when for_list=True
assert result.name == "Test MCP Provider"
assert result.type == ToolProviderType.MCP
assert result.is_team_authorization is True
assert result.server_url == "https://*****.com/mcp"
assert len(result.tools) == 1
assert result.tools[0].description.en_US == "" # Should handle None description
def test_mcp_provider_to_user_provider_not_for_list(self, mock_provider_full):
"""Test mcp_provider_to_user_provider with for_list=False."""
# Set tools data with description
mock_provider_full.tools = '[{"name": "tool1", "description": "Tool description", "inputSchema": {}}]'
# Call the method with for_list=False
result = ToolTransformService.mcp_provider_to_user_provider(mock_provider_full, for_list=False)
# Verify the result
assert isinstance(result, ToolProviderApiEntity)
assert result.id == "server-identifier-456" # Should use server_identifier when for_list=False
assert result.server_identifier == "server-identifier-456"
assert result.timeout == 30
assert result.sse_read_timeout == 300
assert result.original_headers == {"Authorization": "Bearer secret-token"}
assert len(result.tools) == 1
assert result.tools[0].description.en_US == "Tool description"

22
api/uv.lock generated
View File

@@ -1647,7 +1647,7 @@ storage = [
{ name = "cos-python-sdk-v5", specifier = "==1.9.30" },
{ name = "esdk-obs-python", specifier = "==3.24.6.1" },
{ name = "google-cloud-storage", specifier = "==2.16.0" },
{ name = "opendal", specifier = "~=0.45.16" },
{ name = "opendal", specifier = "~=0.46.0" },
{ name = "oss2", specifier = "==2.18.5" },
{ name = "supabase", specifier = "~=2.18.1" },
{ name = "tos", specifier = "~=2.7.1" },
@@ -3825,18 +3825,18 @@ wheels = [
[[package]]
name = "opendal"
version = "0.45.20"
version = "0.46.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2f/3f/927dfe1349ae58b9238b8eafba747af648d660a9425f486dda01a10f0b78/opendal-0.45.20.tar.gz", hash = "sha256:9f6f90d9e9f9d6e9e5a34aa7729169ef34d2f1869ad1e01ddc39b1c0ce0c9405", size = 990267, upload-time = "2025-05-26T07:02:11.819Z" }
sdist = { url = "https://files.pythonhosted.org/packages/33/db/9c37efe16afe6371d66a0be94fa701c281108820198f18443dc997fbf3d8/opendal-0.46.0.tar.gz", hash = "sha256:334aa4c5b3cc0776598ef8d3c154f074f6a9d87981b951d70db1407efed3b06c", size = 989391, upload-time = "2025-07-17T06:58:52.913Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/84/77/6427e16b8630f0cc71f4a1b01648ed3264f1e04f1f6d9b5d09e5c6a4dd2f/opendal-0.45.20-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35acdd8001e4a741532834fdbff3020ffb10b40028bb49fbe93c4f8197d66d8c", size = 26910966, upload-time = "2025-05-26T07:01:24.987Z" },
{ url = "https://files.pythonhosted.org/packages/12/1f/83e415334739f1ab4dba55cdd349abf0b66612249055afb422a354b96ac8/opendal-0.45.20-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:629bfe8d384364bced6cbeb01f49b99779fa5151c68048a1869ff645ddcfcb25", size = 13002770, upload-time = "2025-05-26T07:01:30.385Z" },
{ url = "https://files.pythonhosted.org/packages/49/94/c5de6ed54a02d7413636c2ccefa71d8dd09c2ada1cd6ecab202feb1fdeda/opendal-0.45.20-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cc5ac7e441fb93d86d1673112d9fb08580fc3226f864434f4a56a72efec53", size = 14387218, upload-time = "2025-05-26T07:01:33.017Z" },
{ url = "https://files.pythonhosted.org/packages/c6/83/713a1e1de8cbbd69af50e26644bbdeef3c1068b89f442417376fa3c0f591/opendal-0.45.20-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:45a3adae1f473052234fc4054a6f210df3ded9aff10db8d545d0a37eff3b13cc", size = 13424302, upload-time = "2025-05-26T07:01:36.417Z" },
{ url = "https://files.pythonhosted.org/packages/c7/78/c9651e753aaf6eb61887ca372a3f9c2ae57dae03c3159d24deaf018c26dc/opendal-0.45.20-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d8947857052c85a4b0e251d50e23f5f68f0cdd9e509e32e614a5e4b2fc7424c4", size = 13622483, upload-time = "2025-05-26T07:01:38.886Z" },
{ url = "https://files.pythonhosted.org/packages/3c/9d/5d8c20c0fc93df5e349e5694167de30afdc54c5755704cc64764a6cbb309/opendal-0.45.20-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:891d2f9114efeef648973049ed15e56477e8feb9e48b540bd8d6105ea22a253c", size = 13320229, upload-time = "2025-05-26T07:01:41.965Z" },
{ url = "https://files.pythonhosted.org/packages/21/39/05262f748a2085522e0c85f03eab945589313dc9caedc002872c39162776/opendal-0.45.20-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:539de9b825f6783d6289d88c0c9ac5415daa4d892d761e3540c565bda51e8997", size = 14574280, upload-time = "2025-05-26T07:01:44.413Z" },
{ url = "https://files.pythonhosted.org/packages/74/83/cc7c6de29b0a7585cd445258d174ca204d37729c3874ad08e515b0bf331c/opendal-0.45.20-cp311-abi3-win_amd64.whl", hash = "sha256:145efd56aa33b493d5b652c3e4f5ae5097ab69d38c132d80f108e9f5c1e4d863", size = 14929888, upload-time = "2025-05-26T07:01:46.929Z" },
{ url = "https://files.pythonhosted.org/packages/6c/05/a8d9c6a935a181d38b55c2cb7121394a6bdd819909ff453a17e78f45672a/opendal-0.46.0-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8cd4db71694c93e99055349714c7f7c7177e4767428e9e4bc592e4055edb6dba", size = 26502380, upload-time = "2025-07-17T06:58:16.173Z" },
{ url = "https://files.pythonhosted.org/packages/57/8d/cf684b246fa38ab946f3d11671230d07b5b14d2aeb152b68bd51f4b2210b/opendal-0.46.0-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3019f923a7e1c5db86a36cee95d0c899ca7379e355bda9eb37e16d076c1f42f3", size = 12684482, upload-time = "2025-07-17T06:58:18.462Z" },
{ url = "https://files.pythonhosted.org/packages/ad/71/36a97a8258cd0f0dd902561d0329a339f5a39a9896f0380763f526e9af89/opendal-0.46.0-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e202ded0be5410546193f563258e9a78a57337f5c2bb553b8802a420c2ef683", size = 14114685, upload-time = "2025-07-17T06:58:20.728Z" },
{ url = "https://files.pythonhosted.org/packages/b7/fa/9a30c17428a12246c6ae17b406e7214a9a3caecec37af6860d27e99f9b66/opendal-0.46.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7db426ba8171d665953836653a596ef1bad3732a1c4dd2e3fa68bc20beee7afc", size = 13191783, upload-time = "2025-07-17T06:58:23.181Z" },
{ url = "https://files.pythonhosted.org/packages/f8/32/4f7351ee242b63c817896afb373e5d5f28e1d9ca4e51b69a7b2e934694cf/opendal-0.46.0-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:898444dc072201044ed8c1dcce0929ebda8b10b92ba9c95248cf7fcbbc9dc1d7", size = 13358943, upload-time = "2025-07-17T06:58:25.281Z" },
{ url = "https://files.pythonhosted.org/packages/77/e5/f650cf79ffbf7c7c8d7466fe9b4fa04cda97d950f915b8b3e2ced29f0f3e/opendal-0.46.0-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:998e7a80a3468fd3f8604873aec6777fd25d3101fdbb1b63a4dc5fef14797086", size = 13015627, upload-time = "2025-07-17T06:58:27.28Z" },
{ url = "https://files.pythonhosted.org/packages/c4/d1/77b731016edd494514447322d6b02a2a49c41ad6deeaa824dd2958479574/opendal-0.46.0-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:093098658482e7b87d16bf2931b5ef0ee22ed6a695f945874c696da72a6d057a", size = 14314675, upload-time = "2025-07-17T06:58:29.622Z" },
{ url = "https://files.pythonhosted.org/packages/1e/93/328f7c72ccf04b915ab88802342d8f79322b7fba5509513b509681651224/opendal-0.46.0-cp311-abi3-win_amd64.whl", hash = "sha256:f5e58abc86db005879340a9187372a8c105c456c762943139a48dde63aad790d", size = 14904045, upload-time = "2025-07-17T06:58:31.692Z" },
]
[[package]]

View File

@@ -225,6 +225,9 @@ SQLALCHEMY_ECHO=false
SQLALCHEMY_POOL_PRE_PING=false
# Whether to enable the Last in first out option or use default FIFO queue if is false
SQLALCHEMY_POOL_USE_LIFO=false
# Number of seconds to wait for a connection from the pool before raising a timeout error.
# Default is 30
SQLALCHEMY_POOL_TIMEOUT=30
# Maximum number of connections to the database
# Default is 100

View File

@@ -62,6 +62,7 @@ x-shared-env: &shared-api-worker-env
SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO:-false}
SQLALCHEMY_POOL_PRE_PING: ${SQLALCHEMY_POOL_PRE_PING:-false}
SQLALCHEMY_POOL_USE_LIFO: ${SQLALCHEMY_POOL_USE_LIFO:-false}
SQLALCHEMY_POOL_TIMEOUT: ${SQLALCHEMY_POOL_TIMEOUT:-30}
POSTGRES_MAX_CONNECTIONS: ${POSTGRES_MAX_CONNECTIONS:-100}
POSTGRES_SHARED_BUFFERS: ${POSTGRES_SHARED_BUFFERS:-128MB}
POSTGRES_WORK_MEM: ${POSTGRES_WORK_MEM:-4MB}

View File

@@ -21,8 +21,8 @@ const DetailPanel: FC<ILogDetail> = ({ runID, onClose }) => {
</span>
<h1 className='system-xl-semibold shrink-0 px-4 py-1 text-text-primary'>{t('appLog.runDetail.workflowTitle')}</h1>
<Run
runDetailUrl={`/apps/${appDetail?.id}/workflow-runs/${runID}`}
tracingListUrl={`/apps/${appDetail?.id}/workflow-runs/${runID}/node-executions`}
runDetailUrl={runID ? `/apps/${appDetail?.id}/workflow-runs/${runID}` : ''}
tracingListUrl={runID ? `/apps/${appDetail?.id}/workflow-runs/${runID}/node-executions` : ''}
/>
</div>
)

View File

@@ -1,5 +1,5 @@
'use client'
import React, { useCallback, useMemo, useState } from 'react'
import React, { useCallback, useState } from 'react'
import {
FloatingPortal,
autoUpdate,
@@ -7,7 +7,6 @@ import {
offset,
shift,
size,
useClick,
useDismiss,
useFloating,
useFocus,
@@ -41,7 +40,6 @@ export function usePortalToFollowElem({
}: PortalToFollowElemOptions = {}) {
const [localOpen, setLocalOpen] = useState(false)
const open = controlledOpen ?? localOpen
const isControlled = controlledOpen !== undefined
const handleOpenChange = useCallback((newOpen: boolean) => {
setLocalOpen(newOpen)
setControlledOpen?.(newOpen)
@@ -73,24 +71,15 @@ export function usePortalToFollowElem({
const hover = useHover(context, {
move: false,
enabled: !isControlled,
enabled: controlledOpen === undefined,
})
const focus = useFocus(context, {
enabled: !isControlled,
enabled: controlledOpen === undefined,
})
const dismiss = useDismiss(context)
const role = useRole(context, { role: 'tooltip' })
const click = useClick(context)
const interactionsArray = useMemo(() => {
const result = [hover, focus, dismiss, role]
if (!isControlled)
result.push(click)
return result
}, [isControlled, hover, focus, dismiss, role, click])
const interactions = useInteractions(interactionsArray)
const interactions = useInteractions([hover, focus, dismiss, role])
return React.useMemo(
() => ({
@@ -149,7 +138,7 @@ export const PortalToFollowElemTrigger = (
context.getReferenceProps({
ref,
...props,
...(children.props || {}),
...children.props,
'data-state': context.open ? 'open' : 'closed',
} as React.HTMLProps<HTMLElement>),
)

View File

@@ -9,8 +9,7 @@ import Button from '@/app/components/base/button'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import type { PipelineTemplate } from '@/models/pipeline'
import { PipelineTemplateListQueryKeyPrefix, useUpdateTemplateInfo } from '@/service/use-pipeline'
import { useInvalid } from '@/service/use-base'
import { useInvalidCustomizedTemplateList, useUpdateTemplateInfo } from '@/service/use-pipeline'
type EditPipelineInfoProps = {
onClose: () => void
@@ -63,7 +62,7 @@ const EditPipelineInfo = ({
}, [])
const { mutateAsync: updatePipeline } = useUpdateTemplateInfo()
const invalidCustomizedTemplateList = useInvalid([...PipelineTemplateListQueryKeyPrefix, 'customized'])
const invalidCustomizedTemplateList = useInvalidCustomizedTemplateList()
const handleSave = useCallback(async () => {
if (!name) {

View File

@@ -5,9 +5,9 @@ import EditPipelineInfo from './edit-pipeline-info'
import type { PipelineTemplate } from '@/models/pipeline'
import Confirm from '@/app/components/base/confirm'
import {
PipelineTemplateListQueryKeyPrefix,
useDeleteTemplate,
useExportTemplateDSL,
useInvalidCustomizedTemplateList,
usePipelineTemplateById,
} from '@/service/use-pipeline'
import { downloadFile } from '@/utils/format'
@@ -18,7 +18,6 @@ import Details from './details'
import Content from './content'
import Actions from './actions'
import { useCreatePipelineDatasetFromCustomized } from '@/service/knowledge/use-create-dataset'
import { useInvalid } from '@/service/use-base'
import { useInvalidDatasetList } from '@/service/knowledge/use-dataset'
type TemplateCardProps = {
@@ -128,7 +127,7 @@ const TemplateCard = ({
}, [])
const { mutateAsync: deletePipeline } = useDeleteTemplate()
const invalidCustomizedTemplateList = useInvalid([...PipelineTemplateListQueryKeyPrefix, 'customized'])
const invalidCustomizedTemplateList = useInvalidCustomizedTemplateList()
const onConfirmDelete = useCallback(async () => {
await deletePipeline(pipeline.id, {

View File

@@ -91,7 +91,7 @@ const PageSelector = ({
if (current.expand) {
current.expand = false
newDataList = [...dataList.filter(item => !descendantsIds.includes(item.page_id))]
newDataList = dataList.filter(item => !descendantsIds.includes(item.page_id))
}
else {
current.expand = true
@@ -110,7 +110,7 @@ const PageSelector = ({
}, [dataList, listMapWithChildrenAndDescendants, pagesMap])
const handleCheck = useCallback((index: number) => {
const copyValue = new Set([...checkedIds])
const copyValue = new Set(checkedIds)
const current = currentDataList[index]
const pageId = current.page_id
const currentWithChildrenAndDescendants = listMapWithChildrenAndDescendants[pageId]
@@ -138,7 +138,7 @@ const PageSelector = ({
}
}
onSelect(new Set([...copyValue]))
onSelect(new Set(copyValue))
}, [currentDataList, isMultipleChoice, listMapWithChildrenAndDescendants, onSelect, searchValue, checkedIds])
const handlePreview = useCallback((index: number) => {

View File

@@ -1,6 +1,8 @@
import {
memo,
useCallback,
useMemo,
useState,
} from 'react'
import {
RiAddLine,
@@ -36,6 +38,7 @@ const Configure = ({
disabled,
}: ConfigureProps) => {
const { t } = useTranslation()
const [open, setOpen] = useState(false)
const canApiKey = item.credential_schema?.length
const oAuthData = item.oauth_schema || {}
const canOAuth = oAuthData.client_schema?.length
@@ -53,16 +56,27 @@ const Configure = ({
}
}, [pluginPayload, t])
const handleToggle = useCallback(() => {
setOpen(v => !v)
}, [])
const handleUpdate = useCallback(() => {
setOpen(false)
onUpdate?.()
}, [onUpdate])
return (
<>
<PortalToFollowElem
open={open}
onOpenChange={setOpen}
placement='bottom-end'
offset={{
mainAxis: 4,
crossAxis: -4,
}}
>
<PortalToFollowElemTrigger>
<PortalToFollowElemTrigger onClick={handleToggle}>
<Button
variant='secondary-accent'
>
@@ -76,7 +90,7 @@ const Configure = ({
!!canOAuth && (
<AddOAuthButton
{...oAuthButtonProps}
onUpdate={onUpdate}
onUpdate={handleUpdate}
oAuthData={{
schema: oAuthData.client_schema || [],
is_oauth_custom_client_enabled: oAuthData.is_oauth_custom_client_enabled,
@@ -102,7 +116,7 @@ const Configure = ({
<AddApiKeyButton
{...apiKeyButtonProps}
formSchemas={item.credential_schema}
onUpdate={onUpdate}
onUpdate={handleUpdate}
disabled={disabled}
/>
)

View File

@@ -25,7 +25,7 @@ export const useMarketplaceAllPlugins = (providers: any[], searchText: string) =
} = useMarketplacePlugins()
const getCollectionPlugins = useCallback(async () => {
const collectionPlugins = await getMarketplacePluginsByCollectionId('__model-settings-pinned-models')
const collectionPlugins = await getMarketplacePluginsByCollectionId('__datasource-settings-pinned-datasources')
setCollectionPlugins(collectionPlugins)
}, [])
@@ -59,7 +59,7 @@ export const useMarketplaceAllPlugins = (providers: any[], searchText: string) =
}, [queryPlugins, queryPluginsWithDebounced, searchText, exclude])
const allPlugins = useMemo(() => {
const allPlugins = [...collectionPlugins.filter(plugin => !exclude.includes(plugin.plugin_id))]
const allPlugins = collectionPlugins.filter(plugin => !exclude.includes(plugin.plugin_id))
if (plugins?.length) {
for (let i = 0; i < plugins.length; i++) {

View File

@@ -43,7 +43,7 @@ const StrategyDetail: FC<Props> = ({
const outputSchema = useMemo(() => {
const res: any[] = []
if (!detail.output_schema)
if (!detail.output_schema || !detail.output_schema.properties)
return []
Object.keys(detail.output_schema.properties).forEach((outputKey) => {
const output = detail.output_schema.properties[outputKey]

View File

@@ -33,6 +33,7 @@ import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
import { useInvalid } from '@/service/use-base'
import {
publishedPipelineInfoQueryKeyPrefix,
useInvalidCustomizedTemplateList,
usePublishAsCustomizedPipeline,
} from '@/service/use-pipeline'
import Confirm from '@/app/components/base/confirm'
@@ -158,6 +159,8 @@ const Popup = () => {
push(`/datasets/${datasetId}/documents/create-from-pipeline`)
}, [datasetId, push])
const invalidCustomizedTemplateList = useInvalidCustomizedTemplateList()
const handlePublishAsKnowledgePipeline = useCallback(async (
name: string,
icon: IconInfo,
@@ -189,6 +192,7 @@ const Popup = () => {
</div>
),
})
invalidCustomizedTemplateList()
}
catch {
notify({ type: 'error', message: t('datasetPipeline.publishTemplate.error.message') })
@@ -205,6 +209,7 @@ const Popup = () => {
hidePublishAsKnowledgePipelineModal,
notify,
t,
invalidCustomizedTemplateList,
])
const handleClickPublishAsKnowledgePipeline = useCallback(() => {

View File

@@ -38,7 +38,7 @@ const Header = ({
return (
<div
className='absolute left-0 top-0 z-10 flex h-14 w-full items-center justify-between bg-mask-top2bottom-gray-50-to-transparent px-3'
className='absolute left-0 top-7 z-10 flex h-0 w-full items-center justify-between bg-mask-top2bottom-gray-50-to-transparent px-3'
>
{(inWorkflowCanvas || isPipelineCanvas) && maximizeCanvas && <div className='h-14 w-[52px]' />}
{

View File

@@ -765,7 +765,7 @@ export const useNodesInteractions = () => {
nodesWithSameType.length > 0
? `${defaultValue.title} ${nodesWithSameType.length + 1}`
: defaultValue.title,
...(toolDefaultValue || {}),
...toolDefaultValue,
selected: true,
_showAddVariablePopup:
(nodeType === BlockEnum.VariableAssigner
@@ -866,7 +866,7 @@ export const useNodesInteractions = () => {
const nodesConnectedSourceOrTargetHandleIdsMap
= getNodesConnectedSourceOrTargetHandleIdsMap(
[...(newEdge ? [{ type: 'add', edge: newEdge }] : [])],
(newEdge ? [{ type: 'add', edge: newEdge }] : []),
nodes,
)
const newNodes = produce(nodes, (draft: Node[]) => {
@@ -1331,7 +1331,7 @@ export const useNodesInteractions = () => {
nodesWithSameType.length > 0
? `${defaultValue.title} ${nodesWithSameType.length + 1}`
: defaultValue.title,
...(toolDefaultValue || {}),
...toolDefaultValue,
_connectedSourceHandleIds: [],
_connectedTargetHandleIds: [],
selected: currentNode.data.selected,

View File

@@ -181,7 +181,7 @@ const useConfig = (id: string, payload: AgentNodeType) => {
const outputSchema = useMemo(() => {
const res: any[] = []
if (!inputs.output_schema)
if (!inputs.output_schema || !inputs.output_schema.properties)
return []
Object.keys(inputs.output_schema.properties).forEach((outputKey) => {
const output = inputs.output_schema.properties[outputKey]

View File

@@ -28,7 +28,7 @@ export const useReplaceDataSourceNode = (id: string) => {
const { newNode } = generateNewNode({
data: {
...(defaultValue as any),
...(toolDefaultValue || {}),
...toolDefaultValue,
},
position: {
x: emptyNode.position.x,

View File

@@ -89,7 +89,7 @@ const nodeDefault: NodeDefault<ToolNodeType> = {
const currTool = currCollection?.tools.find(tool => tool.name === payload.tool_name)
const output_schema = currTool?.output_schema
let res: any[] = []
if (!output_schema) {
if (!output_schema || !output_schema.properties) {
res = TOOL_OUTPUT_STRUCT
}
else {

View File

@@ -65,7 +65,7 @@ const AddBlock = ({
data: {
...(defaultValue as any),
title: nodesWithSameType.length > 0 ? `${defaultValue.title} ${nodesWithSameType.length + 1}` : defaultValue.title,
...(toolDefaultValue || {}),
...toolDefaultValue,
_isCandidate: true,
},
position: {

View File

@@ -34,6 +34,7 @@ const translation = {
publishPipeline: {
success: {
message: 'ナレッジパイプラインが公開されました',
tip: '<CustomLink>ドキュメントに移動</CustomLink>して、ドキュメントを追加または管理してください。',
},
error: {
message: '知識パイプラインの公開に失敗しました',
@@ -54,6 +55,7 @@ const translation = {
errorTip: 'パイプラインDSLのエクスポートに失敗しました',
},
details: {
createdBy: '{{author}}により作成',
structure: '構造',
structureTooltip: 'チャンク構造は、ドキュメントがどのように分割され、インデックスされるかを決定します。一般、親子、Q&Aモードを提供し、各ナレッジベースにユニークです。',
},
@@ -94,11 +96,13 @@ const translation = {
description: 'ユーザー入力フィールドは、パイプライン実行プロセス中に必要な変数を定義および収集するために使用されます。ユーザーは、フィールドタイプをカスタマイズし、異なるデータソースやドキュメント処理ステップのニーズに応じて入力値を柔軟に構成できます。',
},
addDocuments: {
title: 'ドキュメントを追加する',
steps: {
chooseDatasource: 'データソースを選択する',
processDocuments: 'ドキュメントを処理する',
processingDocuments: '文書の処理',
chooseDatasource: 'データソースを選択する',
},
backToDataSource: 'データソース',
stepOne: {
preview: 'プレビュー',
},
@@ -110,25 +114,33 @@ const translation = {
learnMore: 'もっと学ぶ',
},
characters: 'キャラクター',
backToDataSource: 'データソース',
title: 'ドキュメントを追加する',
selectOnlineDocumentTip: '最大{{count}}ページまで処理',
selectOnlineDriveTip: '最大{{count}}ファイルまで処理、各ファイル最大{{fileSize}}MB',
},
documentSettings: {
title: 'ドキュメント設定',
},
onlineDocument: {},
onlineDocument: {
pageSelectorTitle: '{{name}}ページ',
},
onlineDrive: {
notConnected: '{{name}}が接続されていません',
notConnectedTip: '{{name}}と同期するには、まず{{name}}への接続を確立する必要があります。',
breadcrumbs: {
allFiles: 'すべてのファイル',
searchPlaceholder: 'ファイルを検索...',
allBuckets: 'すべてのクラウドストレージバケット',
allFiles: 'すべてのファイル',
searchResult: '"{{folderName}}"フォルダ内で{{searchResultsLength}}件のアイテムを見つけました',
searchPlaceholder: 'ファイルを検索...',
},
notSupportedFileType: 'このファイルタイプはサポートされていません',
emptyFolder: 'このフォルダーは空です',
emptySearchResult: 'アイテムは見つかりませんでした',
notSupportedFileType: 'このファイルタイプはサポートされていません',
resetKeywords: 'キーワードをリセットする',
},
credentialSelector: {},
credentialSelector: {
name: '{{credentialName}}の{{pluginName}}',
},
configurationTip: '{{pluginName}}を設定',
conversion: {
confirm: {
title: '確認',

View File

@@ -24,6 +24,10 @@ const translation = {
externalAPIPanelDocumentation: '外部ナレッジベース連携 API の作成方法',
localDocs: 'ローカルドキュメント',
documentCount: ' ドキュメント',
docAllEnabled_one: '{{count}}ドキュメントが有効',
docAllEnabled_other: 'すべての{{count}}ドキュメントが有効',
partialEnabled_one: '合計{{count}}ドキュメント、{{num}}利用可能',
partialEnabled_other: '合計{{count}}ドキュメント、{{num}}利用可能',
wordCount: ' k 単語',
appCount: ' リンクされたアプリ',
createDataset: 'ナレッジベースを作成',
@@ -119,10 +123,6 @@ const translation = {
description: '全文検索とベクトル検索を同時に実行し、ユーザーのクエリに最適なマッチを選択するために Rerank 付けを行います。Rerank モデル API の設定が必要です。',
recommend: '推奨',
},
invertedIndex: {
title: '転置インデックス',
description: '効率的な検索に使用される構造です。各用語が含まれるドキュメントまたは Web ページを指すように、用語ごとに整理されています。',
},
change: '変更',
changeRetrievalMethod: '検索方法の変更',
keyword_search: {
@@ -226,6 +226,17 @@ const translation = {
updated: '更新された',
createFromPipeline: '知識パイプラインから作成する',
externalKnowledgeBase: '外部知識ベース',
serviceApi: {
title: 'サービスAPI',
enabled: 'サービス中',
disabled: '無効',
card: {
title: 'バックエンドサービスAPI',
endpoint: 'サービスAPIエンドポイント',
apiKey: 'APIキー',
apiReference: 'APIリファレンス',
},
},
}
export default translation

View File

@@ -25,13 +25,15 @@ const translation = {
},
result: {
resultPreview: {
viewDetails: '詳細を見る',
loading: '処理中です...お待ちください',
error: '実行中にエラーが発生しました',
viewDetails: '詳細を見る',
footerTip: 'テスト実行モードでは、最大{{count}}チャンクまでプレビュー',
},
},
ragToolSuggestions: {
title: 'RAGのための提案',
noRecommendationPluginsInstalled: '推奨プラグインがインストールされていません。<CustomLink>マーケットプレイス</CustomLink>で詳細をご確認ください',
},
}

View File

@@ -46,6 +46,7 @@ const translation = {
setVarValuePlaceholder: '変数値を設定',
needConnectTip: '接続されていないステップがあります',
maxTreeDepth: '1 ブランチあたりの最大ノード数:{{depth}}',
needAdd: '{{node}}ノードを追加する必要があります',
needEndNode: '終了ブロックを追加する必要があります',
needAnswerNode: '回答ブロックを追加する必要があります',
workflowProcess: 'ワークフロー処理',

View File

@@ -23,13 +23,13 @@
"build": "next build",
"build:docker": "next build && node scripts/optimize-standalone.js",
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
"lint": "npx oxlint && pnpm eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"lint-only-show-error": "npx oxlint && pnpm eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet",
"fix": "eslint --fix .",
"eslint": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"eslint-fix": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix",
"eslint-fix-only-show-error": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix --quiet",
"eslint-complexity": "eslint --rule 'complexity: [error, {max: 15}]' --quiet",
"lint": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"lint-only-show-error": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet",
"fix": "eslint --concurrency=auto --fix .",
"eslint": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"eslint-fix": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix",
"eslint-fix-only-show-error": "eslint --concurrency=auto --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix --quiet",
"eslint-complexity": "eslint --concurrency=auto --rule 'complexity: [error, {max: 15}]' --quiet",
"prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky",
"gen-icons": "node ./app/components/base/icons/script.mjs",
"uglify-embed": "node ./bin/uglify-embed",
@@ -205,7 +205,7 @@
"bing-translate-api": "^4.0.2",
"code-inspector-plugin": "^0.18.1",
"cross-env": "^7.0.3",
"eslint": "^9.32.0",
"eslint": "^9.35.0",
"eslint-config-next": "15.5.0",
"eslint-plugin-oxlint": "^1.6.0",
"eslint-plugin-react-hooks": "^5.1.0",
@@ -235,10 +235,10 @@
},
"lint-staged": {
"**/*.js?(x)": [
"eslint --fix"
"eslint --concurrency=auto --fix"
],
"**/*.ts?(x)": [
"eslint --fix"
"eslint --concurrency=auto --fix"
]
},
"pnpm": {

612
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -23,7 +23,7 @@ import type {
} from '@/types/workflow'
import { removeAccessToken } from '@/app/components/share/utils'
import type { FetchOptionType, ResponseError } from './fetch'
import { ContentType, base, baseOptions, getAccessToken } from './fetch'
import { ContentType, base, getAccessToken, getBaseOptions } from './fetch'
import { asyncRunSafe } from '@/utils'
import type {
DataSourceNodeCompletedResponse,
@@ -400,6 +400,7 @@ export const ssePost = async (
const token = localStorage.getItem('console_token')
const baseOptions = getBaseOptions()
const options = Object.assign({}, baseOptions, {
method: 'POST',
signal: abortController.signal,

View File

@@ -111,7 +111,7 @@ const baseClient = ky.create({
timeout: TIME_OUT,
})
export const baseOptions: RequestInit = {
export const getBaseOptions = (): RequestInit => ({
method: 'GET',
mode: 'cors',
credentials: 'include', // always send cookies、HTTP Basic authentication.
@@ -119,9 +119,10 @@ export const baseOptions: RequestInit = {
'Content-Type': ContentType.json,
}),
redirect: 'follow',
}
})
async function base<T>(url: string, options: FetchOptionType = {}, otherOptions: IOtherOptions = {}): Promise<T> {
const baseOptions = getBaseOptions()
const { params, body, headers, ...init } = Object.assign({}, baseOptions, options)
const {
isPublicAPI = false,

View File

@@ -48,6 +48,10 @@ export const usePipelineTemplateList = (params: PipelineTemplateListParams) => {
})
}
export const useInvalidCustomizedTemplateList = () => {
return useInvalid([...PipelineTemplateListQueryKeyPrefix, 'customized'])
}
export const usePipelineTemplateById = (params: PipelineTemplateByIdRequest, enabled: boolean) => {
const { template_id, type } = params
return useQuery<PipelineTemplateByIdResponse>({

View File

@@ -118,7 +118,7 @@ export const useLastRun = (flowType: FlowType, flowId: string, nodeId: string, e
}
export const useInvalidLastRun = (flowType: FlowType, flowId: string, nodeId: string) => {
return useInvalid([NAME_SPACE, flowType, 'last-run', flowId, nodeId])
return useInvalid([...useLastRunKey, flowType, flowId, nodeId])
}
// Rerun workflow or change the version of workflow