mirror of
https://github.com/langgenius/dify.git
synced 2026-04-02 14:16:51 +00:00
Compare commits
2 Commits
fix-partne
...
codex-add-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50156f822b | ||
|
|
4e1d060439 |
@@ -3,6 +3,7 @@ from typing import Union
|
||||
|
||||
from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from pydantic import TypeAdapter
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
@@ -75,17 +76,15 @@ class MessageService:
|
||||
fetch_limit = limit + 1
|
||||
|
||||
if first_id:
|
||||
first_message = (
|
||||
db.session.query(Message)
|
||||
.where(Message.conversation_id == conversation.id, Message.id == first_id)
|
||||
.first()
|
||||
first_message = db.session.scalar(
|
||||
select(Message).where(Message.conversation_id == conversation.id, Message.id == first_id).limit(1)
|
||||
)
|
||||
|
||||
if not first_message:
|
||||
raise FirstMessageNotExistsError()
|
||||
|
||||
history_messages = (
|
||||
db.session.query(Message)
|
||||
history_messages = db.session.scalars(
|
||||
select(Message)
|
||||
.where(
|
||||
Message.conversation_id == conversation.id,
|
||||
Message.created_at < first_message.created_at,
|
||||
@@ -93,16 +92,14 @@ class MessageService:
|
||||
)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(fetch_limit)
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
else:
|
||||
history_messages = (
|
||||
db.session.query(Message)
|
||||
history_messages = db.session.scalars(
|
||||
select(Message)
|
||||
.where(Message.conversation_id == conversation.id)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(fetch_limit)
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
|
||||
has_more = False
|
||||
if len(history_messages) > limit:
|
||||
@@ -129,7 +126,7 @@ class MessageService:
|
||||
if not user:
|
||||
return InfiniteScrollPagination(data=[], limit=limit, has_more=False)
|
||||
|
||||
base_query = db.session.query(Message)
|
||||
stmt = select(Message)
|
||||
|
||||
fetch_limit = limit + 1
|
||||
|
||||
@@ -138,28 +135,27 @@ class MessageService:
|
||||
app_model=app_model, user=user, conversation_id=conversation_id
|
||||
)
|
||||
|
||||
base_query = base_query.where(Message.conversation_id == conversation.id)
|
||||
stmt = stmt.where(Message.conversation_id == conversation.id)
|
||||
|
||||
# Check if include_ids is not None and not empty to avoid WHERE false condition
|
||||
if include_ids is not None:
|
||||
if len(include_ids) == 0:
|
||||
return InfiniteScrollPagination(data=[], limit=limit, has_more=False)
|
||||
base_query = base_query.where(Message.id.in_(include_ids))
|
||||
stmt = stmt.where(Message.id.in_(include_ids))
|
||||
|
||||
if last_id:
|
||||
last_message = base_query.where(Message.id == last_id).first()
|
||||
last_message = db.session.scalar(stmt.where(Message.id == last_id).limit(1))
|
||||
|
||||
if not last_message:
|
||||
raise LastMessageNotExistsError()
|
||||
|
||||
history_messages = (
|
||||
base_query.where(Message.created_at < last_message.created_at, Message.id != last_message.id)
|
||||
history_messages = db.session.scalars(
|
||||
stmt.where(Message.created_at < last_message.created_at, Message.id != last_message.id)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(fetch_limit)
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
else:
|
||||
history_messages = base_query.order_by(Message.created_at.desc()).limit(fetch_limit).all()
|
||||
history_messages = db.session.scalars(stmt.order_by(Message.created_at.desc()).limit(fetch_limit)).all()
|
||||
|
||||
has_more = False
|
||||
if len(history_messages) > limit:
|
||||
@@ -214,21 +210,20 @@ class MessageService:
|
||||
def get_all_messages_feedbacks(cls, app_model: App, page: int, limit: int):
|
||||
"""Get all feedbacks of an app"""
|
||||
offset = (page - 1) * limit
|
||||
feedbacks = (
|
||||
db.session.query(MessageFeedback)
|
||||
feedbacks = db.session.scalars(
|
||||
select(MessageFeedback)
|
||||
.where(MessageFeedback.app_id == app_model.id)
|
||||
.order_by(MessageFeedback.created_at.desc(), MessageFeedback.id.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
|
||||
return [record.to_dict() for record in feedbacks]
|
||||
|
||||
@classmethod
|
||||
def get_message(cls, app_model: App, user: Union[Account, EndUser] | None, message_id: str):
|
||||
message = (
|
||||
db.session.query(Message)
|
||||
message = db.session.scalar(
|
||||
select(Message)
|
||||
.where(
|
||||
Message.id == message_id,
|
||||
Message.app_id == app_model.id,
|
||||
@@ -236,7 +231,7 @@ class MessageService:
|
||||
Message.from_end_user_id == (user.id if isinstance(user, EndUser) else None),
|
||||
Message.from_account_id == (user.id if isinstance(user, Account) else None),
|
||||
)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if not message:
|
||||
@@ -282,10 +277,10 @@ class MessageService:
|
||||
)
|
||||
else:
|
||||
if not conversation.override_model_configs:
|
||||
app_model_config = (
|
||||
db.session.query(AppModelConfig)
|
||||
app_model_config = db.session.scalar(
|
||||
select(AppModelConfig)
|
||||
.where(AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
else:
|
||||
conversation_override_model_configs = _app_model_config_adapter.validate_json(
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from core.ops.entities.config_entity import BaseTracingConfig
|
||||
from core.ops.ops_trace_manager import OpsTraceManager, provider_config_map
|
||||
from extensions.ext_database import db
|
||||
@@ -15,17 +17,17 @@ class OpsService:
|
||||
:param tracing_provider: tracing provider
|
||||
:return:
|
||||
"""
|
||||
trace_config_data: TraceAppConfig | None = (
|
||||
db.session.query(TraceAppConfig)
|
||||
trace_config_data: TraceAppConfig | None = db.session.scalar(
|
||||
select(TraceAppConfig)
|
||||
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if not trace_config_data:
|
||||
return None
|
||||
|
||||
# decrypt_token and obfuscated_token
|
||||
app = db.session.query(App).where(App.id == app_id).first()
|
||||
app = db.session.get(App, app_id)
|
||||
if not app:
|
||||
return None
|
||||
tenant_id = app.tenant_id
|
||||
@@ -182,17 +184,17 @@ class OpsService:
|
||||
project_url = None
|
||||
|
||||
# check if trace config already exists
|
||||
trace_config_data: TraceAppConfig | None = (
|
||||
db.session.query(TraceAppConfig)
|
||||
trace_config_data: TraceAppConfig | None = db.session.scalar(
|
||||
select(TraceAppConfig)
|
||||
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if trace_config_data:
|
||||
return None
|
||||
|
||||
# get tenant id
|
||||
app = db.session.query(App).where(App.id == app_id).first()
|
||||
app = db.session.get(App, app_id)
|
||||
if not app:
|
||||
return None
|
||||
tenant_id = app.tenant_id
|
||||
@@ -224,17 +226,17 @@ class OpsService:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
|
||||
# check if trace config already exists
|
||||
current_trace_config = (
|
||||
db.session.query(TraceAppConfig)
|
||||
current_trace_config = db.session.scalar(
|
||||
select(TraceAppConfig)
|
||||
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if not current_trace_config:
|
||||
return None
|
||||
|
||||
# get tenant id
|
||||
app = db.session.query(App).where(App.id == app_id).first()
|
||||
app = db.session.get(App, app_id)
|
||||
if not app:
|
||||
return None
|
||||
tenant_id = app.tenant_id
|
||||
@@ -261,10 +263,10 @@ class OpsService:
|
||||
:param tracing_provider: tracing provider
|
||||
:return:
|
||||
"""
|
||||
trace_config = (
|
||||
db.session.query(TraceAppConfig)
|
||||
trace_config = db.session.scalar(
|
||||
select(TraceAppConfig)
|
||||
.where(TraceAppConfig.app_id == app_id, TraceAppConfig.tracing_provider == tracing_provider)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if not trace_config:
|
||||
|
||||
@@ -151,12 +151,7 @@ class TestMessageServicePaginationByFirstId:
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_first_id(
|
||||
@@ -196,12 +191,7 @@ class TestMessageServicePaginationByFirstId:
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_first_id(
|
||||
@@ -246,31 +236,8 @@ class TestMessageServicePaginationByFirstId:
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
# Setup query mocks
|
||||
mock_query_first = MagicMock()
|
||||
mock_query_history = MagicMock()
|
||||
|
||||
query_calls = []
|
||||
|
||||
def query_side_effect(*args):
|
||||
if args[0] == Message:
|
||||
query_calls.append(args)
|
||||
if len(query_calls) == 1:
|
||||
return mock_query_first
|
||||
else:
|
||||
return mock_query_history
|
||||
|
||||
mock_db.session.query.side_effect = [mock_query_first, mock_query_history]
|
||||
|
||||
# Setup first message query
|
||||
mock_query_first.where.return_value = mock_query_first
|
||||
mock_query_first.first.return_value = first_message
|
||||
|
||||
# Setup history messages query
|
||||
mock_query_history.where.return_value = mock_query_history
|
||||
mock_query_history.order_by.return_value = mock_query_history
|
||||
mock_query_history.limit.return_value = mock_query_history
|
||||
mock_query_history.all.return_value = history_messages
|
||||
mock_db.session.scalar.return_value = first_message
|
||||
mock_db.session.scalars.return_value.all.return_value = history_messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_first_id(
|
||||
@@ -285,8 +252,6 @@ class TestMessageServicePaginationByFirstId:
|
||||
# Assert
|
||||
assert len(result.data) == 5
|
||||
assert result.has_more is False
|
||||
mock_query_first.where.assert_called_once()
|
||||
mock_query_history.where.assert_called_once()
|
||||
|
||||
# Test 06: First message not found
|
||||
@patch("services.message_service.db")
|
||||
@@ -300,10 +265,7 @@ class TestMessageServicePaginationByFirstId:
|
||||
|
||||
mock_conversation_service.get_conversation.return_value = conversation
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.first.return_value = None # Message not found
|
||||
mock_db.session.scalar.return_value = None # Message not found
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(FirstMessageNotExistsError):
|
||||
@@ -336,12 +298,7 @@ class TestMessageServicePaginationByFirstId:
|
||||
for i in range(11)
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_first_id(
|
||||
@@ -369,12 +326,7 @@ class TestMessageServicePaginationByFirstId:
|
||||
|
||||
mock_conversation_service.get_conversation.return_value = conversation
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = []
|
||||
mock_db.session.scalars.return_value.all.return_value = []
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_first_id(
|
||||
@@ -443,12 +395,7 @@ class TestMessageServicePaginationByLastId:
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_last_id(
|
||||
@@ -485,22 +432,8 @@ class TestMessageServicePaginationByLastId:
|
||||
for i in range(6, 10)
|
||||
]
|
||||
|
||||
# Setup base query mock that returns itself for chaining
|
||||
mock_base_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_base_query
|
||||
|
||||
# First where() call for last_id lookup
|
||||
mock_query_last = MagicMock()
|
||||
mock_query_last.first.return_value = last_message
|
||||
|
||||
# Second where() call for history messages
|
||||
mock_query_history = MagicMock()
|
||||
mock_query_history.order_by.return_value = mock_query_history
|
||||
mock_query_history.limit.return_value = mock_query_history
|
||||
mock_query_history.all.return_value = new_messages
|
||||
|
||||
# Setup where() to return different mocks on consecutive calls
|
||||
mock_base_query.where.side_effect = [mock_query_last, mock_query_history]
|
||||
mock_db.session.scalar.return_value = last_message
|
||||
mock_db.session.scalars.return_value.all.return_value = new_messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_last_id(
|
||||
@@ -522,10 +455,7 @@ class TestMessageServicePaginationByLastId:
|
||||
app = factory.create_app_mock()
|
||||
user = factory.create_end_user_mock()
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.first.return_value = None # Message not found
|
||||
mock_db.session.scalar.return_value = None # Message not found
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(LastMessageNotExistsError):
|
||||
@@ -557,12 +487,7 @@ class TestMessageServicePaginationByLastId:
|
||||
for i in range(5)
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_last_id(
|
||||
@@ -576,8 +501,6 @@ class TestMessageServicePaginationByLastId:
|
||||
# Assert
|
||||
assert len(result.data) == 5
|
||||
assert result.has_more is False
|
||||
# Verify conversation_id was used in query
|
||||
mock_query.where.assert_called()
|
||||
mock_conversation_service.get_conversation.assert_called_once()
|
||||
|
||||
# Test 14: Pagination with include_ids filter
|
||||
@@ -594,12 +517,7 @@ class TestMessageServicePaginationByLastId:
|
||||
factory.create_message_mock(message_id="msg-003"),
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_last_id(
|
||||
@@ -632,12 +550,7 @@ class TestMessageServicePaginationByLastId:
|
||||
for i in range(11)
|
||||
]
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.all.return_value = messages
|
||||
mock_db.session.scalars.return_value.all.return_value = messages
|
||||
|
||||
# Act
|
||||
result = MessageService.pagination_by_last_id(
|
||||
@@ -743,17 +656,13 @@ class TestMessageServiceGetMessage:
|
||||
user = factory.create_end_user_mock(user_id="end-user-123")
|
||||
message = factory.create_message_mock()
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.first.return_value = message
|
||||
mock_db.session.scalar.return_value = message
|
||||
|
||||
# Act
|
||||
result = MessageService.get_message(app_model=app, user=user, message_id="msg-123")
|
||||
|
||||
# Assert
|
||||
assert result == message
|
||||
mock_query.where.assert_called_once()
|
||||
|
||||
# Test 21: get_message success for Account (Admin)
|
||||
@patch("services.message_service.db")
|
||||
@@ -767,10 +676,7 @@ class TestMessageServiceGetMessage:
|
||||
user.id = "account-123"
|
||||
message = factory.create_message_mock()
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.first.return_value = message
|
||||
mock_db.session.scalar.return_value = message
|
||||
|
||||
# Act
|
||||
result = MessageService.get_message(app_model=app, user=user, message_id="msg-123")
|
||||
@@ -786,10 +692,7 @@ class TestMessageServiceGetMessage:
|
||||
app = factory.create_app_mock()
|
||||
user = factory.create_end_user_mock()
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.first.return_value = None
|
||||
mock_db.session.scalar.return_value = None
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(MessageNotExistsError):
|
||||
@@ -899,21 +802,13 @@ class TestMessageServiceFeedback:
|
||||
feedback = MagicMock()
|
||||
feedback.to_dict.return_value = {"id": "fb-1"}
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.order_by.return_value = mock_query
|
||||
mock_query.limit.return_value = mock_query
|
||||
mock_query.offset.return_value = mock_query
|
||||
mock_query.all.return_value = [feedback]
|
||||
mock_db.session.scalars.return_value.all.return_value = [feedback]
|
||||
|
||||
# Act
|
||||
result = MessageService.get_all_messages_feedbacks(app_model=app, page=1, limit=10)
|
||||
|
||||
# Assert
|
||||
assert result == [{"id": "fb-1"}]
|
||||
mock_query.limit.assert_called_with(10)
|
||||
mock_query.offset.assert_called_with(0)
|
||||
|
||||
|
||||
class TestMessageServiceSuggestedQuestions:
|
||||
@@ -1015,10 +910,7 @@ class TestMessageServiceSuggestedQuestions:
|
||||
app_model_config.suggested_questions_after_answer_dict = {"enabled": True}
|
||||
app_model_config.model_dict = {"provider": "openai", "name": "gpt-4"}
|
||||
|
||||
mock_query = MagicMock()
|
||||
mock_db.session.query.return_value = mock_query
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.first.return_value = app_model_config
|
||||
mock_db.session.scalar.return_value = app_model_config
|
||||
|
||||
mock_llm_gen.generate_suggested_questions_after_answer.return_value = ["Q1?"]
|
||||
|
||||
@@ -1029,7 +921,6 @@ class TestMessageServiceSuggestedQuestions:
|
||||
|
||||
# Assert
|
||||
assert result == ["Q1?"]
|
||||
mock_query.first.assert_called_once()
|
||||
mock_llm_gen.generate_suggested_questions_after_answer.assert_called_once()
|
||||
|
||||
# Test 30: get_suggested_questions_after_answer - Disabled Error
|
||||
|
||||
@@ -12,28 +12,27 @@ class TestOpsService:
|
||||
@patch("services.ops_service.OpsTraceManager")
|
||||
def test_get_tracing_app_config_no_config(self, mock_ops_trace_manager, mock_db):
|
||||
# Arrange
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = None
|
||||
mock_db.session.scalar.return_value = None
|
||||
|
||||
# Act
|
||||
result = OpsService.get_tracing_app_config("app_id", "arize")
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
mock_db.session.query.assert_called_with(TraceAppConfig)
|
||||
|
||||
@patch("services.ops_service.db")
|
||||
@patch("services.ops_service.OpsTraceManager")
|
||||
def test_get_tracing_app_config_no_app(self, mock_ops_trace_manager, mock_db):
|
||||
# Arrange
|
||||
trace_config = MagicMock(spec=TraceAppConfig)
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, None]
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
mock_db.session.get.return_value = None
|
||||
|
||||
# Act
|
||||
result = OpsService.get_tracing_app_config("app_id", "arize")
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
assert mock_db.session.query.call_count == 2
|
||||
|
||||
@patch("services.ops_service.db")
|
||||
@patch("services.ops_service.OpsTraceManager")
|
||||
@@ -43,7 +42,8 @@ class TestOpsService:
|
||||
trace_config.tracing_config = None
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
mock_db.session.get.return_value = app
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="Tracing config cannot be None."):
|
||||
@@ -72,7 +72,8 @@ class TestOpsService:
|
||||
trace_config.to_dict.return_value = {"tracing_config": {"project_url": default_url}}
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
mock_db.session.get.return_value = app
|
||||
|
||||
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
|
||||
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {}
|
||||
@@ -97,7 +98,8 @@ class TestOpsService:
|
||||
trace_config.to_dict.return_value = {"tracing_config": {"project_url": "success_url"}}
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
mock_db.session.get.return_value = app
|
||||
|
||||
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
|
||||
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {}
|
||||
@@ -118,7 +120,8 @@ class TestOpsService:
|
||||
trace_config.to_dict.return_value = {"tracing_config": {"project_url": "https://api.langfuse.com/project/key"}}
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
mock_db.session.get.return_value = app
|
||||
|
||||
mock_ops_trace_manager.decrypt_tracing_config.return_value = {"host": "https://api.langfuse.com"}
|
||||
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {"host": "https://api.langfuse.com"}
|
||||
@@ -139,7 +142,8 @@ class TestOpsService:
|
||||
trace_config.to_dict.return_value = {"tracing_config": {"project_url": "https://api.langfuse.com/"}}
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [trace_config, app]
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
mock_db.session.get.return_value = app
|
||||
|
||||
mock_ops_trace_manager.decrypt_tracing_config.return_value = {"host": "https://api.langfuse.com"}
|
||||
mock_ops_trace_manager.obfuscated_decrypt_token.return_value = {"host": "https://api.langfuse.com"}
|
||||
@@ -189,7 +193,7 @@ class TestOpsService:
|
||||
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
|
||||
mock_ops_trace_manager.get_trace_config_project_url.side_effect = Exception("error")
|
||||
mock_ops_trace_manager.get_trace_config_project_key.side_effect = Exception("error")
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock(spec=TraceAppConfig)
|
||||
mock_db.session.scalar.return_value = MagicMock(spec=TraceAppConfig)
|
||||
|
||||
# Act
|
||||
result = OpsService.create_tracing_app_config("app_id", provider, config)
|
||||
@@ -206,7 +210,8 @@ class TestOpsService:
|
||||
mock_ops_trace_manager.get_trace_config_project_key.return_value = "key"
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [None, app]
|
||||
mock_db.session.scalar.return_value = None
|
||||
mock_db.session.get.return_value = app
|
||||
mock_ops_trace_manager.encrypt_tracing_config.return_value = {}
|
||||
|
||||
# Act
|
||||
@@ -223,7 +228,7 @@ class TestOpsService:
|
||||
# Arrange
|
||||
provider = TracingProviderEnum.ARIZE
|
||||
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock(spec=TraceAppConfig)
|
||||
mock_db.session.scalar.return_value = MagicMock(spec=TraceAppConfig)
|
||||
|
||||
# Act
|
||||
result = OpsService.create_tracing_app_config("app_id", provider, {})
|
||||
@@ -237,7 +242,8 @@ class TestOpsService:
|
||||
# Arrange
|
||||
provider = TracingProviderEnum.ARIZE
|
||||
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [None, None]
|
||||
mock_db.session.scalar.return_value = None
|
||||
mock_db.session.get.return_value = None
|
||||
|
||||
# Act
|
||||
result = OpsService.create_tracing_app_config("app_id", provider, {})
|
||||
@@ -253,7 +259,8 @@ class TestOpsService:
|
||||
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [None, app]
|
||||
mock_db.session.scalar.return_value = None
|
||||
mock_db.session.get.return_value = app
|
||||
mock_ops_trace_manager.encrypt_tracing_config.return_value = {}
|
||||
|
||||
# Act
|
||||
@@ -274,7 +281,8 @@ class TestOpsService:
|
||||
mock_ops_trace_manager.get_trace_config_project_url.return_value = "http://project_url"
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [None, app]
|
||||
mock_db.session.scalar.return_value = None
|
||||
mock_db.session.get.return_value = app
|
||||
mock_ops_trace_manager.encrypt_tracing_config.return_value = {"encrypted": "config"}
|
||||
|
||||
# Act
|
||||
@@ -297,7 +305,7 @@ class TestOpsService:
|
||||
def test_update_tracing_app_config_no_config(self, mock_ops_trace_manager, mock_db):
|
||||
# Arrange
|
||||
provider = TracingProviderEnum.ARIZE
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = None
|
||||
mock_db.session.scalar.return_value = None
|
||||
|
||||
# Act
|
||||
result = OpsService.update_tracing_app_config("app_id", provider, {})
|
||||
@@ -311,7 +319,8 @@ class TestOpsService:
|
||||
# Arrange
|
||||
provider = TracingProviderEnum.ARIZE
|
||||
current_config = MagicMock(spec=TraceAppConfig)
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [current_config, None]
|
||||
mock_db.session.scalar.return_value = current_config
|
||||
mock_db.session.get.return_value = None
|
||||
|
||||
# Act
|
||||
result = OpsService.update_tracing_app_config("app_id", provider, {})
|
||||
@@ -327,7 +336,8 @@ class TestOpsService:
|
||||
current_config = MagicMock(spec=TraceAppConfig)
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [current_config, app]
|
||||
mock_db.session.scalar.return_value = current_config
|
||||
mock_db.session.get.return_value = app
|
||||
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
|
||||
mock_ops_trace_manager.check_trace_config_is_effective.return_value = False
|
||||
|
||||
@@ -344,7 +354,8 @@ class TestOpsService:
|
||||
current_config.to_dict.return_value = {"some": "data"}
|
||||
app = MagicMock(spec=App)
|
||||
app.tenant_id = "tenant_id"
|
||||
mock_db.session.query.return_value.where.return_value.first.side_effect = [current_config, app]
|
||||
mock_db.session.scalar.return_value = current_config
|
||||
mock_db.session.get.return_value = app
|
||||
mock_ops_trace_manager.decrypt_tracing_config.return_value = {}
|
||||
mock_ops_trace_manager.check_trace_config_is_effective.return_value = True
|
||||
|
||||
@@ -358,7 +369,7 @@ class TestOpsService:
|
||||
@patch("services.ops_service.db")
|
||||
def test_delete_tracing_app_config_no_config(self, mock_db):
|
||||
# Arrange
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = None
|
||||
mock_db.session.scalar.return_value = None
|
||||
|
||||
# Act
|
||||
result = OpsService.delete_tracing_app_config("app_id", "arize")
|
||||
@@ -370,7 +381,7 @@ class TestOpsService:
|
||||
def test_delete_tracing_app_config_success(self, mock_db):
|
||||
# Arrange
|
||||
trace_config = MagicMock(spec=TraceAppConfig)
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = trace_config
|
||||
mock_db.session.scalar.return_value = trace_config
|
||||
|
||||
# Act
|
||||
result = OpsService.delete_tracing_app_config("app_id", "arize")
|
||||
|
||||
19
dev/start-podman-compose
Executable file
19
dev/start-podman-compose
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||
ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
PROJECT_NAME="${PODMAN_PROJECT_NAME:-dify}"
|
||||
COMPOSE_FILE="${PODMAN_COMPOSE_FILE:-$ROOT/docker/podman-compose.middleware.yaml}"
|
||||
NETWORK_NAME="${PODMAN_NETWORK_NAME:-dify}"
|
||||
|
||||
cd "$ROOT/docker"
|
||||
|
||||
podman compose -f "$COMPOSE_FILE" -p "$PROJECT_NAME" down -v --remove-orphans || true
|
||||
|
||||
if ! podman network inspect "$NETWORK_NAME" >/dev/null 2>&1; then
|
||||
podman network create "$NETWORK_NAME"
|
||||
fi
|
||||
|
||||
podman compose -f "$COMPOSE_FILE" --profile postgresql --profile weaviate -p "$PROJECT_NAME" up -d
|
||||
20
dev/start-web-image
Executable file
20
dev/start-web-image
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BACKEND_URL="${DIFY_WEB_BACKEND_URL:-http://localhost:5001}"
|
||||
IMAGE="${DIFY_WEB_IMAGE:-docker.io/langgenius/dify-web:1.13.2}"
|
||||
HOST_BIND="${DIFY_WEB_HOSTNAME:-localhost}"
|
||||
WEB_URL="${DIFY_WEB_FRONTEND_URL:-}"
|
||||
CONTAINER_NAME="${DIFY_WEB_CONTAINER_NAME:-dify-web}"
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN="${DIFY_WEB_NEXT_PUBLIC_COOKIE_DOMAIN:-}"
|
||||
|
||||
podman rm -f "$CONTAINER_NAME" >/dev/null 2>&1 || true
|
||||
|
||||
exec podman run --rm --name "$CONTAINER_NAME" --network host \
|
||||
-e HOSTNAME="${HOST_BIND}" \
|
||||
-e CONSOLE_API_URL="${BACKEND_URL}" \
|
||||
-e APP_API_URL="${BACKEND_URL}" \
|
||||
-e SERVICE_API_URL="${BACKEND_URL}" \
|
||||
${WEB_URL:+-e CONSOLE_WEB_URL="${WEB_URL}"} \
|
||||
${NEXT_PUBLIC_COOKIE_DOMAIN:+-e NEXT_PUBLIC_COOKIE_DOMAIN="${NEXT_PUBLIC_COOKIE_DOMAIN}"} \
|
||||
"${IMAGE}"
|
||||
10
dev/stop-podman-compose
Executable file
10
dev/stop-podman-compose
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||
ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
COMPOSE_FILE="${PODMAN_COMPOSE_FILE:-$ROOT/docker/podman-compose.middleware.yaml}"
|
||||
PROJECT_NAME="${PODMAN_PROJECT_NAME:-dify}"
|
||||
|
||||
cd "$ROOT/docker"
|
||||
podman compose -f "$COMPOSE_FILE" -p "$PROJECT_NAME" down -v
|
||||
267
docker/podman-compose.middleware.yaml
Normal file
267
docker/podman-compose.middleware.yaml
Normal file
@@ -0,0 +1,267 @@
|
||||
services:
|
||||
# The postgres database.
|
||||
db_postgres:
|
||||
image: docker.io/postgres:15-alpine
|
||||
profiles:
|
||||
- ""
|
||||
- postgresql
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-difyai123456}
|
||||
POSTGRES_DB: ${DB_DATABASE:-dify}
|
||||
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
|
||||
command: >
|
||||
postgres -c 'max_connections=${POSTGRES_MAX_CONNECTIONS:-100}'
|
||||
-c 'shared_buffers=${POSTGRES_SHARED_BUFFERS:-128MB}'
|
||||
-c 'work_mem=${POSTGRES_WORK_MEM:-4MB}'
|
||||
-c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}'
|
||||
-c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}'
|
||||
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}'
|
||||
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}'
|
||||
volumes:
|
||||
- ${PGDATA_HOST_VOLUME:-./volumes/db/data}:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "${EXPOSE_POSTGRES_PORT:-5432}:5432"
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"pg_isready",
|
||||
"-h",
|
||||
"db_postgres",
|
||||
"-U",
|
||||
"${DB_USERNAME:-postgres}",
|
||||
"-d",
|
||||
"${DB_DATABASE:-dify}",
|
||||
]
|
||||
interval: 1s
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
networks:
|
||||
- dify
|
||||
|
||||
db_mysql:
|
||||
image: docker.io/mysql:8.0
|
||||
profiles:
|
||||
- mysql
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
|
||||
MYSQL_DATABASE: ${DB_DATABASE:-dify}
|
||||
command: >
|
||||
--max_connections=1000
|
||||
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
|
||||
--innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M}
|
||||
--innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2}
|
||||
volumes:
|
||||
- ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql
|
||||
ports:
|
||||
- "${EXPOSE_MYSQL_PORT:-3306}:3306"
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"mysqladmin",
|
||||
"ping",
|
||||
"-u",
|
||||
"root",
|
||||
"-p${DB_PASSWORD:-difyai123456}",
|
||||
]
|
||||
interval: 1s
|
||||
timeout: 3s
|
||||
retries: 30
|
||||
networks:
|
||||
- dify
|
||||
|
||||
# The redis cache.
|
||||
redis:
|
||||
image: docker.io/redis:6-alpine
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
REDISCLI_AUTH: ${REDIS_PASSWORD:-difyai123456}
|
||||
volumes:
|
||||
# Mount the redis data directory to the container.
|
||||
- ${REDIS_HOST_VOLUME:-./volumes/redis/data}:/data
|
||||
# Set the redis password when startup redis server.
|
||||
command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
|
||||
ports:
|
||||
- "${EXPOSE_REDIS_PORT:-6379}:6379"
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG",
|
||||
]
|
||||
networks:
|
||||
- dify
|
||||
|
||||
# The DifySandbox
|
||||
sandbox:
|
||||
image: docker.io/langgenius/dify-sandbox:0.2.12
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
# The DifySandbox configurations
|
||||
# Make sure you are changing this key for your deployment with a strong key.
|
||||
# You can generate a strong key using `openssl rand -base64 42`.
|
||||
API_KEY: ${SANDBOX_API_KEY:-dify-sandbox}
|
||||
GIN_MODE: ${SANDBOX_GIN_MODE:-release}
|
||||
WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15}
|
||||
ENABLE_NETWORK: ${SANDBOX_ENABLE_NETWORK:-true}
|
||||
HTTP_PROXY: ${SANDBOX_HTTP_PROXY:-http://ssrf_proxy:3128}
|
||||
HTTPS_PROXY: ${SANDBOX_HTTPS_PROXY:-http://ssrf_proxy:3128}
|
||||
SANDBOX_PORT: ${SANDBOX_PORT:-8194}
|
||||
PIP_MIRROR_URL: ${PIP_MIRROR_URL:-}
|
||||
volumes:
|
||||
- ./volumes/sandbox/dependencies:/dependencies
|
||||
- ./volumes/sandbox/conf:/conf
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8194/health"]
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: docker.io/langgenius/dify-plugin-daemon:0.5.4-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
|
||||
REDIS_HOST: ${REDIS_HOST:-redis}
|
||||
REDIS_PORT: ${REDIS_PORT:-6379}
|
||||
REDIS_PASSWORD: ${REDIS_PASSWORD:-difyai123456}
|
||||
SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002}
|
||||
SERVER_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi}
|
||||
MAX_PLUGIN_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800}
|
||||
PPROF_ENABLED: ${PLUGIN_PPROF_ENABLED:-false}
|
||||
DIFY_INNER_API_URL: ${PLUGIN_DIFY_INNER_API_URL:-http://host.containers.internal:5001}
|
||||
DIFY_INNER_API_KEY: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1}
|
||||
PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0}
|
||||
PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003}
|
||||
PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd}
|
||||
PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120}
|
||||
PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600}
|
||||
PIP_MIRROR_URL: ${PIP_MIRROR_URL:-}
|
||||
PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local}
|
||||
PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage}
|
||||
PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin}
|
||||
PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages}
|
||||
PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets}
|
||||
PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-}
|
||||
S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false}
|
||||
S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false}
|
||||
S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-}
|
||||
S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false}
|
||||
AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-}
|
||||
AWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-}
|
||||
AWS_REGION: ${PLUGIN_AWS_REGION:-}
|
||||
AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-}
|
||||
AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-}
|
||||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||
VOLCENGINE_TOS_ENDPOINT: ${PLUGIN_VOLCENGINE_TOS_ENDPOINT:-}
|
||||
VOLCENGINE_TOS_ACCESS_KEY: ${PLUGIN_VOLCENGINE_TOS_ACCESS_KEY:-}
|
||||
VOLCENGINE_TOS_SECRET_KEY: ${PLUGIN_VOLCENGINE_TOS_SECRET_KEY:-}
|
||||
VOLCENGINE_TOS_REGION: ${PLUGIN_VOLCENGINE_TOS_REGION:-}
|
||||
THIRD_PARTY_SIGNATURE_VERIFICATION_ENABLED: true
|
||||
THIRD_PARTY_SIGNATURE_VERIFICATION_PUBLIC_KEYS: /app/keys/publickey.pem
|
||||
FORCE_VERIFYING_SIGNATURE: false
|
||||
depends_on:
|
||||
db_postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_started
|
||||
ports:
|
||||
- "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}"
|
||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||
volumes:
|
||||
- ./volumes/plugin_daemon:/app/storage
|
||||
networks:
|
||||
- dify
|
||||
|
||||
# ssrf_proxy server
|
||||
# for more information, please refer to
|
||||
# https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
|
||||
ssrf_proxy:
|
||||
image: docker.io/ubuntu/squid:latest
|
||||
restart: always
|
||||
volumes:
|
||||
- ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
|
||||
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
|
||||
entrypoint:
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
"cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
|
||||
]
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
# pls clearly modify the squid env vars to fit your network environment.
|
||||
HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
|
||||
COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid}
|
||||
REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194}
|
||||
SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox}
|
||||
SANDBOX_PORT: ${SSRF_SANDBOX_PORT:-8194}
|
||||
ports:
|
||||
- "${EXPOSE_SSRF_PROXY_PORT:-3128}:${SSRF_HTTP_PORT:-3128}"
|
||||
- "${EXPOSE_SANDBOX_PORT:-8194}:${SANDBOX_PORT:-8194}"
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- dify
|
||||
|
||||
# The Weaviate vector store.
|
||||
weaviate:
|
||||
image: docker.io/semitechnologies/weaviate:1.27.0
|
||||
profiles:
|
||||
- ""
|
||||
- weaviate
|
||||
restart: always
|
||||
volumes:
|
||||
# Mount the Weaviate data directory to the con tainer.
|
||||
- ${WEAVIATE_HOST_VOLUME:-./volumes/weaviate}:/var/lib/weaviate
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
environment:
|
||||
# The Weaviate configurations
|
||||
# You can refer to the [Weaviate](https://weaviate.io/developers/weaviate/config-refs/env-vars) documentation for more information.
|
||||
PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
|
||||
QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25}
|
||||
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-false}
|
||||
DEFAULT_VECTORIZER_MODULE: ${WEAVIATE_DEFAULT_VECTORIZER_MODULE:-none}
|
||||
CLUSTER_HOSTNAME: ${WEAVIATE_CLUSTER_HOSTNAME:-node1}
|
||||
AUTHENTICATION_APIKEY_ENABLED: ${WEAVIATE_AUTHENTICATION_APIKEY_ENABLED:-true}
|
||||
AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}
|
||||
AUTHENTICATION_APIKEY_USERS: ${WEAVIATE_AUTHENTICATION_APIKEY_USERS:-hello@dify.ai}
|
||||
AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true}
|
||||
AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai}
|
||||
DISABLE_TELEMETRY: ${WEAVIATE_DISABLE_TELEMETRY:-false}
|
||||
ports:
|
||||
- "${EXPOSE_WEAVIATE_PORT:-8080}:8080"
|
||||
- "${EXPOSE_WEAVIATE_GRPC_PORT:-50051}:50051"
|
||||
|
||||
networks:
|
||||
# create a network between sandbox, api and ssrf_proxy, and can not access outside.
|
||||
ssrf_proxy_network:
|
||||
driver: bridge
|
||||
internal: true
|
||||
dify:
|
||||
name: dify
|
||||
driver: bridge
|
||||
Reference in New Issue
Block a user