Compare commits

...

5 Commits

Author SHA1 Message Date
QuantumGhost
c8560bacb3 chore: bump version to 1.13.2 (#33681) 2026-03-18 21:54:17 +08:00
wangxiaolei
0f1b8bf5f9 fix: fix max_retries is hardcode (#33619) 2026-03-18 20:25:02 +08:00
QuantumGhost
652211ad96 fix(api): Preserving the content transform logic in fetch_prompt_messages (#33666)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-18 20:20:17 +08:00
wangxiaolei
c049249bc1 feat: remove weaviate client __del__ method (#33593)
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-18 20:20:10 +08:00
wangxiaolei
138083dfc8 fix(api): make CreatorUserRole accept both end-user and end_user (#33638)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-03-18 20:19:57 +08:00
12 changed files with 187 additions and 27 deletions

View File

@@ -1,4 +1,4 @@
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, field_validator
from pydantic_settings import BaseSettings
@@ -116,3 +116,13 @@ class RedisConfig(BaseSettings):
description="Maximum connections in the Redis connection pool (unset for library default)",
default=None,
)
@field_validator("REDIS_MAX_CONNECTIONS", mode="before")
@classmethod
def _empty_string_to_none_for_max_conns(cls, v):
"""Allow empty string in env/.env to mean 'unset' (None)."""
if v is None:
return None
if isinstance(v, str) and v.strip() == "":
return None
return v

View File

@@ -5,6 +5,7 @@ This module provides integration with Weaviate vector database for storing and r
document embeddings used in retrieval-augmented generation workflows.
"""
import atexit
import datetime
import json
import logging
@@ -37,6 +38,32 @@ _weaviate_client: weaviate.WeaviateClient | None = None
_weaviate_client_lock = threading.Lock()
def _shutdown_weaviate_client() -> None:
"""
Best-effort shutdown hook to close the module-level Weaviate client.
This is registered with atexit so that HTTP/gRPC resources are released
when the Python interpreter exits.
"""
global _weaviate_client
# Ensure thread-safety when accessing the shared client instance
with _weaviate_client_lock:
client = _weaviate_client
_weaviate_client = None
if client is not None:
try:
client.close()
except Exception:
# Best-effort cleanup; log at debug level and ignore errors.
logger.debug("Failed to close Weaviate client during shutdown", exc_info=True)
# Register the shutdown hook once per process.
atexit.register(_shutdown_weaviate_client)
class WeaviateConfig(BaseModel):
"""
Configuration model for Weaviate connection settings.
@@ -85,18 +112,6 @@ class WeaviateVector(BaseVector):
self._client = self._init_client(config)
self._attributes = attributes
def __del__(self):
"""
Destructor to properly close the Weaviate client connection.
Prevents connection leaks and resource warnings.
"""
if hasattr(self, "_client") and self._client is not None:
try:
self._client.close()
except Exception as e:
# Ignore errors during cleanup as object is being destroyed
logger.warning("Error closing Weaviate client %s", e, exc_info=True)
def _init_client(self, config: WeaviateConfig) -> weaviate.WeaviateClient:
"""
Initializes and returns a connected Weaviate client.

View File

@@ -101,7 +101,6 @@ class HttpRequestNode(Node[HttpRequestNodeData]):
timeout=self._get_request_timeout(self.node_data),
variable_pool=self.graph_runtime_state.variable_pool,
http_request_config=self._http_request_config,
max_retries=0,
ssl_verify=self.node_data.ssl_verify,
http_client=self._http_client,
file_manager=self._file_manager,

View File

@@ -256,9 +256,13 @@ def fetch_prompt_messages(
):
continue
prompt_message_content.append(content_item)
if prompt_message_content:
if not prompt_message_content:
continue
if len(prompt_message_content) == 1 and prompt_message_content[0].type == PromptMessageContentType.TEXT:
prompt_message.content = prompt_message_content[0].data
else:
prompt_message.content = prompt_message_content
filtered_prompt_messages.append(prompt_message)
filtered_prompt_messages.append(prompt_message)
elif not prompt_message.is_empty():
filtered_prompt_messages.append(prompt_message)

View File

@@ -11,6 +11,13 @@ class CreatorUserRole(StrEnum):
ACCOUNT = "account"
END_USER = "end_user"
@classmethod
def _missing_(cls, value):
if value == "end-user":
return cls.END_USER
else:
return super()._missing_(value)
class WorkflowRunTriggeredFrom(StrEnum):
DEBUGGING = "debugging"

View File

@@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.13.1"
version = "1.13.2"
requires-python = ">=3.11,<3.13"
dependencies = [

View File

@@ -0,0 +1,106 @@
from unittest import mock
import pytest
from core.model_manager import ModelInstance
from dify_graph.model_runtime.entities import ImagePromptMessageContent, PromptMessageRole, TextPromptMessageContent
from dify_graph.model_runtime.entities.message_entities import SystemPromptMessage
from dify_graph.nodes.llm import llm_utils
from dify_graph.nodes.llm.entities import LLMNodeChatModelMessage
from dify_graph.nodes.llm.exc import NoPromptFoundError
from dify_graph.runtime import VariablePool
def _fetch_prompt_messages_with_mocked_content(content):
variable_pool = VariablePool.empty()
model_instance = mock.MagicMock(spec=ModelInstance)
prompt_template = [
LLMNodeChatModelMessage(
text="You are a classifier.",
role=PromptMessageRole.SYSTEM,
edition_type="basic",
)
]
with (
mock.patch(
"dify_graph.nodes.llm.llm_utils.fetch_model_schema",
return_value=mock.MagicMock(features=[]),
),
mock.patch(
"dify_graph.nodes.llm.llm_utils.handle_list_messages",
return_value=[SystemPromptMessage(content=content)],
),
mock.patch(
"dify_graph.nodes.llm.llm_utils.handle_memory_chat_mode",
return_value=[],
),
):
return llm_utils.fetch_prompt_messages(
sys_query=None,
sys_files=[],
context=None,
memory=None,
model_instance=model_instance,
prompt_template=prompt_template,
stop=["END"],
memory_config=None,
vision_enabled=False,
vision_detail=ImagePromptMessageContent.DETAIL.HIGH,
variable_pool=variable_pool,
jinja2_variables=[],
template_renderer=None,
)
def test_fetch_prompt_messages_skips_messages_when_all_contents_are_filtered_out():
with pytest.raises(NoPromptFoundError):
_fetch_prompt_messages_with_mocked_content(
[
ImagePromptMessageContent(
format="url",
url="https://example.com/image.png",
mime_type="image/png",
),
]
)
def test_fetch_prompt_messages_flattens_single_text_content_after_filtering_unsupported_multimodal_items():
prompt_messages, stop = _fetch_prompt_messages_with_mocked_content(
[
TextPromptMessageContent(data="You are a classifier."),
ImagePromptMessageContent(
format="url",
url="https://example.com/image.png",
mime_type="image/png",
),
]
)
assert stop == ["END"]
assert prompt_messages == [SystemPromptMessage(content="You are a classifier.")]
def test_fetch_prompt_messages_keeps_list_content_when_multiple_supported_items_remain():
prompt_messages, stop = _fetch_prompt_messages_with_mocked_content(
[
TextPromptMessageContent(data="You are"),
TextPromptMessageContent(data=" a classifier."),
ImagePromptMessageContent(
format="url",
url="https://example.com/image.png",
mime_type="image/png",
),
]
)
assert stop == ["END"]
assert prompt_messages == [
SystemPromptMessage(
content=[
TextPromptMessageContent(data="You are"),
TextPromptMessageContent(data=" a classifier."),
]
)
]

View File

@@ -0,0 +1,19 @@
import pytest
from models.enums import CreatorUserRole
def test_creator_user_role_missing_maps_hyphen_to_enum():
# given an alias with hyphen
value = "end-user"
# when converting to enum (invokes StrEnum._missing_ override)
role = CreatorUserRole(value)
# then it should map to END_USER
assert role is CreatorUserRole.END_USER
def test_creator_user_role_missing_raises_for_unknown():
with pytest.raises(ValueError):
CreatorUserRole("unknown")

2
api/uv.lock generated
View File

@@ -1533,7 +1533,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.13.1"
version = "1.13.2"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },

View File

@@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.13.1
image: langgenius/dify-api:1.13.2
restart: always
environment:
# Use the shared environment variables.
@@ -63,7 +63,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.13.1
image: langgenius/dify-api:1.13.2
restart: always
environment:
# Use the shared environment variables.
@@ -102,7 +102,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.13.1
image: langgenius/dify-api:1.13.2
restart: always
environment:
# Use the shared environment variables.
@@ -132,7 +132,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.13.1
image: langgenius/dify-web:1.13.2
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@@ -728,7 +728,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.13.1
image: langgenius/dify-api:1.13.2
restart: always
environment:
# Use the shared environment variables.
@@ -770,7 +770,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.13.1
image: langgenius/dify-api:1.13.2
restart: always
environment:
# Use the shared environment variables.
@@ -809,7 +809,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.13.1
image: langgenius/dify-api:1.13.2
restart: always
environment:
# Use the shared environment variables.
@@ -839,7 +839,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.13.1
image: langgenius/dify-web:1.13.2
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@@ -1,7 +1,7 @@
{
"name": "dify-web",
"type": "module",
"version": "1.13.1",
"version": "1.13.2",
"private": true,
"packageManager": "pnpm@10.32.1",
"imports": {