Compare commits

...

2 Commits

Author SHA1 Message Date
Harry
48e694eafe fix: agentbox using nginx for traffic proxy
Some checks are pending
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions
2026-02-15 11:31:22 +08:00
Harry
cb6445a977 feat: fallback handle for skill exception when draft not sync yet 2026-02-15 02:00:27 +08:00
12 changed files with 114 additions and 26 deletions

View File

@@ -744,9 +744,10 @@ CLI_API_URL=http://localhost:5001
# Base URL for storage file ticket API endpoints (upload/download).
# Used by sandbox containers (internal or external like e2b) that need an absolute,
# routable address to reach the Dify API file endpoints.
# Falls back to FILES_URL if not specified.
# Required for sandbox runtime file access.
# For local development: http://localhost:5001
# For Docker deployment: http://api:5001
# For all-in-one Docker deployment with nginx: http://localhost
# For public/remote sandbox environments (e.g., e2b): use a public domain or IP
FILES_API_URL=http://localhost:5001
# Optional defaults for SSH sandbox provider setup (for manual config/CLI usage).

View File

@@ -377,8 +377,8 @@ class FileAccessConfig(BaseSettings):
description="Base URL for storage file ticket API endpoints."
" Used by sandbox containers (internal or external like e2b) that need"
" an absolute, routable address to upload/download files via the API."
" Falls back to FILES_URL if not specified."
" For Docker deployments, set to http://api:5001.",
" For all-in-one Docker deployments, set to http://localhost."
" For public sandbox environments, set to a public domain or IP.",
default="",
)

View File

@@ -4,8 +4,10 @@ from controllers.console import console_ns
from controllers.console.app.error import DraftWorkflowNotExist
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, current_account_with_tenant, setup_required
from core.skill.entities.api_entities import NodeSkillInfo
from libs.login import login_required
from models import App
from models._workflow_exc import NodeNotFoundError
from models.model import AppMode
from services.skill_service import SkillService
from services.workflow_service import WorkflowService
@@ -40,12 +42,15 @@ class NodeSkillsApi(Resource):
if not workflow:
raise DraftWorkflowNotExist()
skill_info = SkillService.get_node_skill_info(
app=app_model,
workflow=workflow,
node_id=node_id,
user_id=current_user.id,
)
try:
skill_info = SkillService.get_node_skill_info(
app=app_model,
workflow=workflow,
node_id=node_id,
user_id=current_user.id,
)
except NodeNotFoundError:
return NodeSkillInfo.empty(node_id=node_id).model_dump()
return skill_info.model_dump()

View File

@@ -10,3 +10,8 @@ class NodeSkillInfo(BaseModel):
tool_dependencies: list[ToolDependency] = Field(
default_factory=list, description="Tool dependencies extracted from skill prompts"
)
@staticmethod
def empty(node_id: str = "") -> "NodeSkillInfo":
"""Create an empty NodeSkillInfo with no tool dependencies."""
return NodeSkillInfo(node_id=node_id, tool_dependencies=[])

View File

@@ -7,7 +7,9 @@ from core.skill.entities.skill_document import SkillDocument
from core.skill.entities.tool_dependencies import ToolDependencies, ToolDependency
from core.skill.skill_compiler import SkillCompiler
from core.skill.skill_manager import SkillManager
from core.workflow.entities.graph_config import NodeConfigData, NodeConfigDict
from core.workflow.enums import NodeType
from models._workflow_exc import NodeNotFoundError
from models.model import App
from models.workflow import Workflow
from services.app_asset_service import AppAssetService
@@ -34,8 +36,10 @@ class SkillService:
Returns:
NodeSkillInfo containing tool dependencies for the node
"""
node_config = workflow.get_node_config_by_id(node_id)
node_data = node_config.get("data", {})
node_config: NodeConfigDict = workflow.get_node_config_by_id(node_id)
if not node_config:
raise NodeNotFoundError(f"Node with ID {node_id} not found in workflow {workflow.id}")
node_data: NodeConfigData = node_config["data"]
node_type = node_data.get("type", "")
# Only LLM nodes support skills currently
@@ -84,7 +88,7 @@ class SkillService:
return result
@staticmethod
def _has_skill(node_data: dict[str, Any]) -> bool:
def _has_skill(node_data: NodeConfigData) -> bool:
"""Check if node has any skill prompts."""
prompt_template = node_data.get("prompt_template", [])
if isinstance(prompt_template, list):

View File

@@ -14,7 +14,7 @@ Usage:
url = StorageTicketService.create_upload_url("path/to/file.txt", expires_in=300, max_bytes=10*1024*1024)
URL format:
{FILES_API_URL}/files/storage-files/{token} (falls back to FILES_URL)
{FILES_API_URL}/files/storage-files/{token}
The token is validated by looking up the Redis key, which contains:
- op: "download" or "upload"
@@ -137,6 +137,17 @@ class StorageTicketService:
@classmethod
def _build_url(cls, token: str) -> str:
"""Build the full URL for a token."""
base_url = dify_config.FILES_API_URL
"""Build the full URL for a token.
FILES_API_URL is dedicated to sandbox runtime file access (agentbox/e2b/etc.).
This endpoint must be routable from the runtime environment.
"""
base_url = dify_config.FILES_API_URL.strip()
if not base_url:
raise ValueError(
"FILES_API_URL is required for sandbox runtime file access. "
"Set FILES_API_URL to a URL reachable by your sandbox runtime. "
"For public sandbox environments (e.g. e2b), use a public domain or IP."
)
base_url = base_url.rstrip("/")
return f"{base_url}/files/storage-files/{token}"

View File

@@ -156,6 +156,7 @@ def test_bundle_import_zip_storage_key():
def test_storage_ticket_service(monkeypatch: pytest.MonkeyPatch):
"""Test StorageTicketService creates and retrieves tickets."""
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
monkeypatch.setattr(dify_config, "FILES_API_URL", "http://files-api.local", raising=False)
mock_redis = MagicMock()
stored_data = {}
@@ -172,7 +173,7 @@ def test_storage_ticket_service(monkeypatch: pytest.MonkeyPatch):
with patch("services.storage_ticket_service.redis_client", mock_redis):
url = StorageTicketService.create_download_url("test/path/file.txt", expires_in=300, filename="file.txt")
assert url.startswith("http://files.local/files/storage-files/")
assert url.startswith("http://files-api.local/files/storage-files/")
token = url.split("/")[-1]
ticket = StorageTicketService.get_ticket(token)
@@ -207,6 +208,7 @@ def test_ticket_url_generation(monkeypatch: pytest.MonkeyPatch):
key = AssetPaths.draft(tenant_id, app_id, resource_id)
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
monkeypatch.setattr(dify_config, "FILES_API_URL", "http://files-api.local", raising=False)
mock_redis = MagicMock()
mock_redis.setex = MagicMock()
@@ -222,7 +224,7 @@ def test_ticket_url_generation(monkeypatch: pytest.MonkeyPatch):
)
url = storage.get_download_url(key, expires_in=120)
assert url.startswith("http://files.local/files/storage-files/")
assert url.startswith("http://files-api.local/files/storage-files/")
token = url.split("/")[-1]
assert len(token) == 36 # UUID format
@@ -235,6 +237,7 @@ def test_upload_ticket_url_generation(monkeypatch: pytest.MonkeyPatch):
key = AssetPaths.draft(tenant_id, app_id, resource_id)
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
monkeypatch.setattr(dify_config, "FILES_API_URL", "http://files-api.local", raising=False)
mock_redis = MagicMock()
mock_redis.setex = MagicMock()
@@ -249,7 +252,7 @@ def test_upload_ticket_url_generation(monkeypatch: pytest.MonkeyPatch):
)
url = storage.get_upload_url(key, expires_in=120)
assert url.startswith("http://files.local/files/storage-files/")
assert url.startswith("http://files-api.local/files/storage-files/")
token = url.split("/")[-1]
assert len(token) == 36 # UUID format
@@ -289,3 +292,32 @@ def test_storage_ticket_pydantic():
upload_json = upload_ticket.model_dump_json()
restored_upload = StorageTicket.model_validate_json(upload_json)
assert restored_upload.max_bytes == 1024
def test_storage_ticket_uses_files_api_url_when_set(monkeypatch: pytest.MonkeyPatch):
"""Test that FILES_API_URL is used for runtime ticket URLs."""
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
monkeypatch.setattr(dify_config, "FILES_API_URL", "https://runtime.example.com", raising=False)
mock_redis = MagicMock()
mock_redis.setex = MagicMock()
with patch("services.storage_ticket_service.redis_client", mock_redis):
url = StorageTicketService.create_download_url("test/path/file.txt", expires_in=300, filename="file.txt")
assert url.startswith("https://runtime.example.com/files/storage-files/")
def test_storage_ticket_requires_files_api_url(monkeypatch: pytest.MonkeyPatch):
"""Test that ticket generation fails when FILES_API_URL is empty."""
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
monkeypatch.setattr(dify_config, "FILES_API_URL", "", raising=False)
mock_redis = MagicMock()
mock_redis.setex = MagicMock()
with (
patch("services.storage_ticket_service.redis_client", mock_redis),
pytest.raises(ValueError, match="FILES_API_URL is required"),
):
StorageTicketService.create_download_url("test/path/file.txt", expires_in=300, filename="file.txt")

View File

@@ -1015,10 +1015,15 @@ SANDBOX_DIFY_CLI_ROOT=
CLI_API_URL=http://api:5001
# Base URL for storage file ticket API endpoints (upload/download).
# Used by sandbox containers (internal or external like e2b) that need an absolute,
# routable address to reach the Dify API file endpoints.
# Falls back to FILES_URL if not specified.
FILES_API_URL=http://api:5001
# Used by sandbox runtimes (agentbox/e2b/etc.) to upload/download files.
# This URL must be routable from the sandbox runtime environment.
#
# For all-in-one Docker deployment:
# - Use http://localhost (agentbox has localhost:80 -> nginx:80 socat forwarding)
#
# For public/remote sandbox environments (e.g., e2b, remote agentbox, middleware stack):
# - Use a publicly reachable domain or IP (e.g., https://files.example.com)
FILES_API_URL=http://localhost
# The sandbox service endpoint.
CODE_EXECUTION_ENDPOINT=http://sandbox:8194
@@ -1218,10 +1223,14 @@ AGENTBOX_SSH_USERNAME=agentbox
AGENTBOX_SSH_PASSWORD=agentbox
# SSH port exposed inside the docker network
AGENTBOX_SSH_PORT=22
# socat target host for localhost forwarding inside agentbox
# socat target host for localhost:5001 forwarding inside agentbox
AGENTBOX_SOCAT_TARGET_HOST=api
# socat target port for localhost forwarding inside agentbox
# socat target port for localhost:5001 forwarding inside agentbox
AGENTBOX_SOCAT_TARGET_PORT=5001
# socat target host for localhost:80 forwarding inside agentbox
AGENTBOX_NGINX_HOST=nginx
# socat target port for localhost:80 forwarding inside agentbox
AGENTBOX_NGINX_PORT=80
# ------------------------------
# Environment Variables for weaviate Service

View File

@@ -45,6 +45,7 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
1. **Running Middleware Services**:
- Navigate to the `docker` directory.
- Execute `docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d` to start PostgreSQL/MySQL (per `DB_TYPE`) plus the bundled Weaviate instance.
- If you use a public/remote sandbox runtime (e.g., e2b or remote SSH sandbox), set `FILES_API_URL` in `api/.env` to a publicly reachable domain/IP.
> Compose automatically loads `COMPOSE_PROFILES=${DB_TYPE:-postgresql},weaviate` from `middleware.env`, so no extra `--profile` flags are needed. Adjust variables in `middleware.env` if you want a different combination of services.

View File

@@ -278,8 +278,12 @@ services:
AGENTBOX_SSH_USERNAME: ${AGENTBOX_SSH_USERNAME:-agentbox}
AGENTBOX_SSH_PASSWORD: ${AGENTBOX_SSH_PASSWORD:-agentbox}
AGENTBOX_SSH_PORT: ${AGENTBOX_SSH_PORT:-22}
# localhost:5001 -> api:5001 (API direct access)
AGENTBOX_SOCAT_TARGET_HOST: ${AGENTBOX_SOCAT_TARGET_HOST:-api}
AGENTBOX_SOCAT_TARGET_PORT: ${AGENTBOX_SOCAT_TARGET_PORT:-5001}
# localhost:80 -> nginx:80 (for FILES_API_URL=http://localhost)
AGENTBOX_NGINX_HOST: ${AGENTBOX_NGINX_HOST:-nginx}
AGENTBOX_NGINX_PORT: ${AGENTBOX_NGINX_PORT:-80}
command: >
sh -c "
set -e;
@@ -299,10 +303,12 @@ services:
fi;
grep -q '^PasswordAuthentication' /etc/ssh/sshd_config && sed -i 's/^PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config || echo 'PasswordAuthentication yes' >> /etc/ssh/sshd_config;
nohup socat TCP-LISTEN:$${AGENTBOX_SOCAT_TARGET_PORT},bind=127.0.0.1,fork,reuseaddr TCP:$${AGENTBOX_SOCAT_TARGET_HOST}:$${AGENTBOX_SOCAT_TARGET_PORT} >/tmp/socat.log 2>&1 &
nohup socat TCP-LISTEN:$${AGENTBOX_NGINX_PORT},bind=127.0.0.1,fork,reuseaddr TCP:$${AGENTBOX_NGINX_HOST}:$${AGENTBOX_NGINX_PORT} >/tmp/socat_nginx.log 2>&1 &
exec /usr/sbin/sshd -D -p $${AGENTBOX_SSH_PORT}
"
depends_on:
- api
- nginx
# plugin daemon
plugin_daemon:

View File

@@ -439,7 +439,7 @@ x-shared-env: &shared-api-worker-env
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: ${OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES:-5}
SANDBOX_DIFY_CLI_ROOT: ${SANDBOX_DIFY_CLI_ROOT:-}
CLI_API_URL: ${CLI_API_URL:-http://api:5001}
FILES_API_URL: ${FILES_API_URL:-http://api:5001}
FILES_API_URL: ${FILES_API_URL:-http://localhost}
CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194}
CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox}
CODE_EXECUTION_SSL_VERIFY: ${CODE_EXECUTION_SSL_VERIFY:-True}
@@ -516,6 +516,8 @@ x-shared-env: &shared-api-worker-env
AGENTBOX_SSH_PORT: ${AGENTBOX_SSH_PORT:-22}
AGENTBOX_SOCAT_TARGET_HOST: ${AGENTBOX_SOCAT_TARGET_HOST:-api}
AGENTBOX_SOCAT_TARGET_PORT: ${AGENTBOX_SOCAT_TARGET_PORT:-5001}
AGENTBOX_NGINX_HOST: ${AGENTBOX_NGINX_HOST:-nginx}
AGENTBOX_NGINX_PORT: ${AGENTBOX_NGINX_PORT:-80}
WEAVIATE_PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate}
WEAVIATE_QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25}
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-true}
@@ -983,8 +985,12 @@ services:
AGENTBOX_SSH_USERNAME: ${AGENTBOX_SSH_USERNAME:-agentbox}
AGENTBOX_SSH_PASSWORD: ${AGENTBOX_SSH_PASSWORD:-agentbox}
AGENTBOX_SSH_PORT: ${AGENTBOX_SSH_PORT:-22}
# localhost:5001 -> api:5001 (API direct access)
AGENTBOX_SOCAT_TARGET_HOST: ${AGENTBOX_SOCAT_TARGET_HOST:-api}
AGENTBOX_SOCAT_TARGET_PORT: ${AGENTBOX_SOCAT_TARGET_PORT:-5001}
# localhost:80 -> nginx:80 (for FILES_API_URL=http://localhost)
AGENTBOX_NGINX_HOST: ${AGENTBOX_NGINX_HOST:-nginx}
AGENTBOX_NGINX_PORT: ${AGENTBOX_NGINX_PORT:-80}
command: >
sh -c "
set -e;
@@ -1004,10 +1010,12 @@ services:
fi;
grep -q '^PasswordAuthentication' /etc/ssh/sshd_config && sed -i 's/^PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config || echo 'PasswordAuthentication yes' >> /etc/ssh/sshd_config;
nohup socat TCP-LISTEN:$${AGENTBOX_SOCAT_TARGET_PORT},bind=127.0.0.1,fork,reuseaddr TCP:$${AGENTBOX_SOCAT_TARGET_HOST}:$${AGENTBOX_SOCAT_TARGET_PORT} >/tmp/socat.log 2>&1 &
nohup socat TCP-LISTEN:$${AGENTBOX_NGINX_PORT},bind=127.0.0.1,fork,reuseaddr TCP:$${AGENTBOX_NGINX_HOST}:$${AGENTBOX_NGINX_PORT} >/tmp/socat_nginx.log 2>&1 &
exec /usr/sbin/sshd -D -p $${AGENTBOX_SSH_PORT}
"
depends_on:
- api
- nginx
# plugin daemon
plugin_daemon:

View File

@@ -106,6 +106,12 @@ SANDBOX_PORT=8194
# ------------------------------
# Environment Variables for agentbox Service
# ------------------------------
# IMPORTANT:
# This middleware stack does not include nginx localhost loopback for file URLs.
# If you use a public/remote sandbox runtime (for example e2b or a remote SSH sandbox),
# configure FILES_API_URL in api/.env to a publicly reachable domain/IP so sandbox
# runtimes can download/upload files.
# Localhost/127.0.0.1 is only suitable when the sandbox can reach the same host network.
AGENTBOX_SSH_USERNAME=agentbox
AGENTBOX_SSH_PASSWORD=agentbox
AGENTBOX_SSH_PORT=22