Compare commits

...

6 Commits

Author SHA1 Message Date
Stephen Zhou
b3902374ac chore: drop slow lint rules (#31205)
Some checks failed
autofix.ci / autofix (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Main CI Pipeline / Check Changed Files (push) Has been cancelled
Main CI Pipeline / API Tests (push) Has been cancelled
Main CI Pipeline / Web Tests (push) Has been cancelled
Main CI Pipeline / Style Check (push) Has been cancelled
Main CI Pipeline / VDB Tests (push) Has been cancelled
Main CI Pipeline / DB Migration Test (push) Has been cancelled
2026-01-19 12:45:02 +08:00
wangxiaolei
3b225c01da refactor: refactor workflow context (#30607) 2026-01-19 12:18:51 +08:00
Xiyuan Chen
72ce6ca437 feat: implement workspace permission checks for member invitations an… (#31202) 2026-01-18 19:35:50 -08:00
NFish
269c85d5a3 feat: ee workspace permission control (#30841) 2026-01-19 11:06:04 +08:00
非法操作
b0545635b8 chore: improve clear workflow_run task (#31124)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: hj24 <mambahj24@gmail.com>
2026-01-19 10:58:57 +08:00
Stephen Zhou
13d648cf7b chore: no custom lint cache location (#31195) 2026-01-19 10:37:49 +08:00
36 changed files with 1640 additions and 801 deletions

View File

@@ -71,6 +71,8 @@ def create_app() -> DifyApp:
def initialize_extensions(app: DifyApp):
# Initialize Flask context capture for workflow execution
from context.flask_app_context import init_flask_context
from extensions import (
ext_app_metrics,
ext_blueprints,
@@ -100,6 +102,8 @@ def initialize_extensions(app: DifyApp):
ext_warnings,
)
init_flask_context()
extensions = [
ext_timezone,
ext_logging,

View File

@@ -862,8 +862,27 @@ def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[
@click.command("clean-workflow-runs", help="Clean expired workflow runs and related data for free tenants.")
@click.option("--days", default=30, show_default=True, help="Delete workflow runs created before N days ago.")
@click.option(
"--before-days",
"--days",
default=30,
show_default=True,
type=click.IntRange(min=0),
help="Delete workflow runs created before N days ago.",
)
@click.option("--batch-size", default=200, show_default=True, help="Batch size for selecting workflow runs.")
@click.option(
"--from-days-ago",
default=None,
type=click.IntRange(min=0),
help="Lower bound in days ago (older). Must be paired with --to-days-ago.",
)
@click.option(
"--to-days-ago",
default=None,
type=click.IntRange(min=0),
help="Upper bound in days ago (newer). Must be paired with --from-days-ago.",
)
@click.option(
"--start-from",
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
@@ -882,8 +901,10 @@ def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[
help="Preview cleanup results without deleting any workflow run data.",
)
def clean_workflow_runs(
days: int,
before_days: int,
batch_size: int,
from_days_ago: int | None,
to_days_ago: int | None,
start_from: datetime.datetime | None,
end_before: datetime.datetime | None,
dry_run: bool,
@@ -894,11 +915,24 @@ def clean_workflow_runs(
if (start_from is None) ^ (end_before is None):
raise click.UsageError("--start-from and --end-before must be provided together.")
if (from_days_ago is None) ^ (to_days_ago is None):
raise click.UsageError("--from-days-ago and --to-days-ago must be provided together.")
if from_days_ago is not None and to_days_ago is not None:
if start_from or end_before:
raise click.UsageError("Choose either day offsets or explicit dates, not both.")
if from_days_ago <= to_days_ago:
raise click.UsageError("--from-days-ago must be greater than --to-days-ago.")
now = datetime.datetime.now()
start_from = now - datetime.timedelta(days=from_days_ago)
end_before = now - datetime.timedelta(days=to_days_ago)
before_days = 0
start_time = datetime.datetime.now(datetime.UTC)
click.echo(click.style(f"Starting workflow run cleanup at {start_time.isoformat()}.", fg="white"))
WorkflowRunCleanup(
days=days,
days=before_days,
batch_size=batch_size,
start_from=start_from,
end_before=end_before,

74
api/context/__init__.py Normal file
View File

@@ -0,0 +1,74 @@
"""
Core Context - Framework-agnostic context management.
This module provides context management that is independent of any specific
web framework. Framework-specific implementations register their context
capture functions at application initialization time.
This ensures the workflow layer remains completely decoupled from Flask
or any other web framework.
"""
import contextvars
from collections.abc import Callable
from core.workflow.context.execution_context import (
ExecutionContext,
IExecutionContext,
NullAppContext,
)
# Global capturer function - set by framework-specific modules
_capturer: Callable[[], IExecutionContext] | None = None
def register_context_capturer(capturer: Callable[[], IExecutionContext]) -> None:
"""
Register a context capture function.
This should be called by framework-specific modules (e.g., Flask)
during application initialization.
Args:
capturer: Function that captures current context and returns IExecutionContext
"""
global _capturer
_capturer = capturer
def capture_current_context() -> IExecutionContext:
"""
Capture current execution context.
This function uses the registered context capturer. If no capturer
is registered, it returns a minimal context with only contextvars
(suitable for non-framework environments like tests or standalone scripts).
Returns:
IExecutionContext with captured context
"""
if _capturer is None:
# No framework registered - return minimal context
return ExecutionContext(
app_context=NullAppContext(),
context_vars=contextvars.copy_context(),
)
return _capturer()
def reset_context_provider() -> None:
"""
Reset the context capturer.
This is primarily useful for testing to ensure a clean state.
"""
global _capturer
_capturer = None
__all__ = [
"capture_current_context",
"register_context_capturer",
"reset_context_provider",
]

View File

@@ -0,0 +1,198 @@
"""
Flask App Context - Flask implementation of AppContext interface.
"""
import contextvars
from collections.abc import Generator
from contextlib import contextmanager
from typing import Any, final
from flask import Flask, current_app, g
from context import register_context_capturer
from core.workflow.context.execution_context import (
AppContext,
IExecutionContext,
)
@final
class FlaskAppContext(AppContext):
"""
Flask implementation of AppContext.
This adapts Flask's app context to the AppContext interface.
"""
def __init__(self, flask_app: Flask) -> None:
"""
Initialize Flask app context.
Args:
flask_app: The Flask application instance
"""
self._flask_app = flask_app
def get_config(self, key: str, default: Any = None) -> Any:
"""Get configuration value from Flask app config."""
return self._flask_app.config.get(key, default)
def get_extension(self, name: str) -> Any:
"""Get Flask extension by name."""
return self._flask_app.extensions.get(name)
@contextmanager
def enter(self) -> Generator[None, None, None]:
"""Enter Flask app context."""
with self._flask_app.app_context():
yield
@property
def flask_app(self) -> Flask:
"""Get the underlying Flask app instance."""
return self._flask_app
def capture_flask_context(user: Any = None) -> IExecutionContext:
"""
Capture current Flask execution context.
This function captures the Flask app context and contextvars from the
current environment. It should be called from within a Flask request or
app context.
Args:
user: Optional user object to include in context
Returns:
IExecutionContext with captured Flask context
Raises:
RuntimeError: If called outside Flask context
"""
# Get Flask app instance
flask_app = current_app._get_current_object() # type: ignore
# Save current user if available
saved_user = user
if saved_user is None:
# Check for user in g (flask-login)
if hasattr(g, "_login_user"):
saved_user = g._login_user
# Capture contextvars
context_vars = contextvars.copy_context()
return FlaskExecutionContext(
flask_app=flask_app,
context_vars=context_vars,
user=saved_user,
)
@final
class FlaskExecutionContext:
"""
Flask-specific execution context.
This is a specialized version of ExecutionContext that includes Flask app
context. It provides the same interface as ExecutionContext but with
Flask-specific implementation.
"""
def __init__(
self,
flask_app: Flask,
context_vars: contextvars.Context,
user: Any = None,
) -> None:
"""
Initialize Flask execution context.
Args:
flask_app: Flask application instance
context_vars: Python contextvars
user: Optional user object
"""
self._app_context = FlaskAppContext(flask_app)
self._context_vars = context_vars
self._user = user
self._flask_app = flask_app
@property
def app_context(self) -> FlaskAppContext:
"""Get Flask app context."""
return self._app_context
@property
def context_vars(self) -> contextvars.Context:
"""Get context variables."""
return self._context_vars
@property
def user(self) -> Any:
"""Get user object."""
return self._user
def __enter__(self) -> "FlaskExecutionContext":
"""Enter the Flask execution context."""
# Restore context variables
for var, val in self._context_vars.items():
var.set(val)
# Save current user from g if available
saved_user = None
if hasattr(g, "_login_user"):
saved_user = g._login_user
# Enter Flask app context
self._cm = self._app_context.enter()
self._cm.__enter__()
# Restore user in new app context
if saved_user is not None:
g._login_user = saved_user
return self
def __exit__(self, *args: Any) -> None:
"""Exit the Flask execution context."""
if hasattr(self, "_cm"):
self._cm.__exit__(*args)
@contextmanager
def enter(self) -> Generator[None, None, None]:
"""Enter Flask execution context as context manager."""
# Restore context variables
for var, val in self._context_vars.items():
var.set(val)
# Save current user from g if available
saved_user = None
if hasattr(g, "_login_user"):
saved_user = g._login_user
# Enter Flask app context
with self._flask_app.app_context():
# Restore user in new app context
if saved_user is not None:
g._login_user = saved_user
yield
def init_flask_context() -> None:
"""
Initialize Flask context capture by registering the capturer.
This function should be called during Flask application initialization
to register the Flask-specific context capturer with the core context module.
Example:
app = Flask(__name__)
init_flask_context() # Register Flask context capturer
Note:
This function does not need the app instance as it uses Flask's
`current_app` to get the app when capturing context.
"""
register_context_capturer(capture_flask_context)

View File

@@ -69,6 +69,13 @@ class ActivateCheckApi(Resource):
if invitation:
data = invitation.get("data", {})
tenant = invitation.get("tenant", None)
# Check workspace permission
if tenant:
from libs.workspace_permission import check_workspace_member_invite_permission
check_workspace_member_invite_permission(tenant.id)
workspace_name = tenant.name if tenant else None
workspace_id = tenant.id if tenant else None
invitee_email = data.get("email") if data else None

View File

@@ -107,6 +107,12 @@ class MemberInviteEmailApi(Resource):
inviter = current_user
if not inviter.current_tenant:
raise ValueError("No current tenant")
# Check workspace permission for member invitations
from libs.workspace_permission import check_workspace_member_invite_permission
check_workspace_member_invite_permission(inviter.current_tenant.id)
invitation_results = []
console_web_url = dify_config.CONSOLE_WEB_URL

View File

@@ -20,6 +20,7 @@ from controllers.console.error import AccountNotLinkTenantError
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
only_edition_enterprise,
setup_required,
)
from enums.cloud_plan import CloudPlan
@@ -28,6 +29,7 @@ from libs.helper import TimestampField
from libs.login import current_account_with_tenant, login_required
from models.account import Tenant, TenantStatus
from services.account_service import TenantService
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
from services.file_service import FileService
from services.workspace_service import WorkspaceService
@@ -288,3 +290,31 @@ class WorkspaceInfoApi(Resource):
db.session.commit()
return {"result": "success", "tenant": marshal(WorkspaceService.get_tenant_info(tenant), tenant_fields)}
@console_ns.route("/workspaces/current/permission")
class WorkspacePermissionApi(Resource):
"""Get workspace permissions for the current workspace."""
@setup_required
@login_required
@account_initialization_required
@only_edition_enterprise
def get(self):
"""
Get workspace permission settings.
Returns permission flags that control workspace features like member invitations and owner transfer.
"""
_, current_tenant_id = current_account_with_tenant()
if not current_tenant_id:
raise ValueError("No current tenant")
# Get workspace permissions from enterprise service
permission = EnterpriseService.WorkspacePermissionService.get_permission(current_tenant_id)
return {
"workspace_id": permission.workspace_id,
"allow_member_invite": permission.allow_member_invite,
"allow_owner_transfer": permission.allow_owner_transfer,
}, 200

View File

@@ -286,13 +286,12 @@ def enable_change_email(view: Callable[P, R]):
def is_allow_transfer_owner(view: Callable[P, R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
_, current_tenant_id = current_account_with_tenant()
features = FeatureService.get_features(current_tenant_id)
if features.is_allow_transfer_workspace:
return view(*args, **kwargs)
from libs.workspace_permission import check_workspace_owner_transfer_permission
# otherwise, return 403
abort(403)
_, current_tenant_id = current_account_with_tenant()
# Check both billing/plan level and workspace policy level permissions
check_workspace_owner_transfer_permission(current_tenant_id)
return view(*args, **kwargs)
return decorated

View File

@@ -8,7 +8,7 @@ from typing import Any, Literal, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
from sqlalchemy import select
from sqlalchemy.orm import Session, sessionmaker
from sqlalchemy.orm import sessionmaker
import contexts
from configs import dify_config
@@ -23,6 +23,7 @@ from core.app.apps.workflow.generate_response_converter import WorkflowAppGenera
from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
from core.db.session_factory import session_factory
from core.helper.trace_id_helper import extract_external_trace_id_from_args
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.ops.ops_trace_manager import TraceQueueManager
@@ -476,7 +477,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
:return:
"""
with preserve_flask_contexts(flask_app, context_vars=context):
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
workflow = session.scalar(
select(Workflow).where(
Workflow.tenant_id == application_generate_entity.app_config.tenant_id,

View File

@@ -5,7 +5,6 @@ import logging
from collections.abc import Generator, Mapping, Sequence
from typing import Any, cast
from flask import has_request_context
from sqlalchemy import select
from core.db.session_factory import session_factory
@@ -29,6 +28,21 @@ from models.workflow import Workflow
logger = logging.getLogger(__name__)
def _try_resolve_user_from_request() -> Account | EndUser | None:
"""
Try to resolve user from Flask request context.
Returns None if not in a request context or if user is not available.
"""
# Note: `current_user` is a LocalProxy. Never compare it with None directly.
# Use _get_current_object() to dereference the proxy
user = getattr(current_user, "_get_current_object", lambda: current_user)()
# Check if we got a valid user object
if user is not None and hasattr(user, "id"):
return user
return None
class WorkflowTool(Tool):
"""
Workflow tool.
@@ -209,21 +223,13 @@ class WorkflowTool(Tool):
Returns:
Account | EndUser | None: The resolved user object, or None if resolution fails.
"""
if has_request_context():
return self._resolve_user_from_request()
else:
return self._resolve_user_from_database(user_id=user_id)
# Try to resolve user from request context first
user = _try_resolve_user_from_request()
if user is not None:
return user
def _resolve_user_from_request(self) -> Account | EndUser | None:
"""
Resolve user from Flask request context.
"""
try:
# Note: `current_user` is a LocalProxy. Never compare it with None directly.
return getattr(current_user, "_get_current_object", lambda: current_user)()
except Exception as e:
logger.warning("Failed to resolve user from request context: %s", e)
return None
# Fall back to database resolution
return self._resolve_user_from_database(user_id=user_id)
def _resolve_user_from_database(self, user_id: str) -> Account | EndUser | None:
"""

View File

@@ -0,0 +1,22 @@
"""
Execution Context - Context management for workflow execution.
This package provides Flask-independent context management for workflow
execution in multi-threaded environments.
"""
from core.workflow.context.execution_context import (
AppContext,
ExecutionContext,
IExecutionContext,
NullAppContext,
capture_current_context,
)
__all__ = [
"AppContext",
"ExecutionContext",
"IExecutionContext",
"NullAppContext",
"capture_current_context",
]

View File

@@ -0,0 +1,216 @@
"""
Execution Context - Abstracted context management for workflow execution.
"""
import contextvars
from abc import ABC, abstractmethod
from collections.abc import Generator
from contextlib import AbstractContextManager, contextmanager
from typing import Any, Protocol, final, runtime_checkable
class AppContext(ABC):
"""
Abstract application context interface.
This abstraction allows workflow execution to work with or without Flask
by providing a common interface for application context management.
"""
@abstractmethod
def get_config(self, key: str, default: Any = None) -> Any:
"""Get configuration value by key."""
pass
@abstractmethod
def get_extension(self, name: str) -> Any:
"""Get Flask extension by name (e.g., 'db', 'cache')."""
pass
@abstractmethod
def enter(self) -> AbstractContextManager[None]:
"""Enter the application context."""
pass
@runtime_checkable
class IExecutionContext(Protocol):
"""
Protocol for execution context.
This protocol defines the interface that all execution contexts must implement,
allowing both ExecutionContext and FlaskExecutionContext to be used interchangeably.
"""
def __enter__(self) -> "IExecutionContext":
"""Enter the execution context."""
...
def __exit__(self, *args: Any) -> None:
"""Exit the execution context."""
...
@property
def user(self) -> Any:
"""Get user object."""
...
@final
class ExecutionContext:
"""
Execution context for workflow execution in worker threads.
This class encapsulates all context needed for workflow execution:
- Application context (Flask app or standalone)
- Context variables for Python contextvars
- User information (optional)
It is designed to be serializable and passable to worker threads.
"""
def __init__(
self,
app_context: AppContext | None = None,
context_vars: contextvars.Context | None = None,
user: Any = None,
) -> None:
"""
Initialize execution context.
Args:
app_context: Application context (Flask or standalone)
context_vars: Python contextvars to preserve
user: User object (optional)
"""
self._app_context = app_context
self._context_vars = context_vars
self._user = user
@property
def app_context(self) -> AppContext | None:
"""Get application context."""
return self._app_context
@property
def context_vars(self) -> contextvars.Context | None:
"""Get context variables."""
return self._context_vars
@property
def user(self) -> Any:
"""Get user object."""
return self._user
@contextmanager
def enter(self) -> Generator[None, None, None]:
"""
Enter this execution context.
This is a convenience method that creates a context manager.
"""
# Restore context variables if provided
if self._context_vars:
for var, val in self._context_vars.items():
var.set(val)
# Enter app context if available
if self._app_context is not None:
with self._app_context.enter():
yield
else:
yield
def __enter__(self) -> "ExecutionContext":
"""Enter the execution context."""
self._cm = self.enter()
self._cm.__enter__()
return self
def __exit__(self, *args: Any) -> None:
"""Exit the execution context."""
if hasattr(self, "_cm"):
self._cm.__exit__(*args)
class NullAppContext(AppContext):
"""
Null implementation of AppContext for non-Flask environments.
This is used when running without Flask (e.g., in tests or standalone mode).
"""
def __init__(self, config: dict[str, Any] | None = None) -> None:
"""
Initialize null app context.
Args:
config: Optional configuration dictionary
"""
self._config = config or {}
self._extensions: dict[str, Any] = {}
def get_config(self, key: str, default: Any = None) -> Any:
"""Get configuration value by key."""
return self._config.get(key, default)
def get_extension(self, name: str) -> Any:
"""Get extension by name."""
return self._extensions.get(name)
def set_extension(self, name: str, extension: Any) -> None:
"""Set extension by name."""
self._extensions[name] = extension
@contextmanager
def enter(self) -> Generator[None, None, None]:
"""Enter null context (no-op)."""
yield
class ExecutionContextBuilder:
"""
Builder for creating ExecutionContext instances.
This provides a fluent API for building execution contexts.
"""
def __init__(self) -> None:
self._app_context: AppContext | None = None
self._context_vars: contextvars.Context | None = None
self._user: Any = None
def with_app_context(self, app_context: AppContext) -> "ExecutionContextBuilder":
"""Set application context."""
self._app_context = app_context
return self
def with_context_vars(self, context_vars: contextvars.Context) -> "ExecutionContextBuilder":
"""Set context variables."""
self._context_vars = context_vars
return self
def with_user(self, user: Any) -> "ExecutionContextBuilder":
"""Set user."""
self._user = user
return self
def build(self) -> ExecutionContext:
"""Build the execution context."""
return ExecutionContext(
app_context=self._app_context,
context_vars=self._context_vars,
user=self._user,
)
def capture_current_context() -> IExecutionContext:
"""
Capture current execution context from the calling environment.
Returns:
IExecutionContext with captured context
"""
from context import capture_current_context
return capture_current_context()

View File

@@ -7,15 +7,13 @@ Domain-Driven Design principles for improved maintainability and testability.
from __future__ import annotations
import contextvars
import logging
import queue
import threading
from collections.abc import Generator
from typing import TYPE_CHECKING, cast, final
from flask import Flask, current_app
from core.workflow.context import capture_current_context
from core.workflow.enums import NodeExecutionType
from core.workflow.graph import Graph
from core.workflow.graph_events import (
@@ -159,17 +157,8 @@ class GraphEngine:
self._layers: list[GraphEngineLayer] = []
# === Worker Pool Setup ===
# Capture Flask app context for worker threads
flask_app: Flask | None = None
try:
app = current_app._get_current_object() # type: ignore
if isinstance(app, Flask):
flask_app = app
except RuntimeError:
pass
# Capture context variables for worker threads
context_vars = contextvars.copy_context()
# Capture execution context for worker threads
execution_context = capture_current_context()
# Create worker pool for parallel node execution
self._worker_pool = WorkerPool(
@@ -177,8 +166,7 @@ class GraphEngine:
event_queue=self._event_queue,
graph=self._graph,
layers=self._layers,
flask_app=flask_app,
context_vars=context_vars,
execution_context=execution_context,
min_workers=self._min_workers,
max_workers=self._max_workers,
scale_up_threshold=self._scale_up_threshold,

View File

@@ -5,26 +5,27 @@ Workers pull node IDs from the ready_queue, execute nodes, and push events
to the event_queue for the dispatcher to process.
"""
import contextvars
import queue
import threading
import time
from collections.abc import Sequence
from datetime import datetime
from typing import final
from typing import TYPE_CHECKING, final
from uuid import uuid4
from flask import Flask
from typing_extensions import override
from core.workflow.context import IExecutionContext
from core.workflow.graph import Graph
from core.workflow.graph_engine.layers.base import GraphEngineLayer
from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent
from core.workflow.nodes.base.node import Node
from libs.flask_utils import preserve_flask_contexts
from .ready_queue import ReadyQueue
if TYPE_CHECKING:
pass
@final
class Worker(threading.Thread):
@@ -44,8 +45,7 @@ class Worker(threading.Thread):
layers: Sequence[GraphEngineLayer],
stop_event: threading.Event,
worker_id: int = 0,
flask_app: Flask | None = None,
context_vars: contextvars.Context | None = None,
execution_context: IExecutionContext | None = None,
) -> None:
"""
Initialize worker thread.
@@ -56,19 +56,17 @@ class Worker(threading.Thread):
graph: Graph containing nodes to execute
layers: Graph engine layers for node execution hooks
worker_id: Unique identifier for this worker
flask_app: Optional Flask application for context preservation
context_vars: Optional context variables to preserve in worker thread
execution_context: Optional execution context for context preservation
"""
super().__init__(name=f"GraphWorker-{worker_id}", daemon=True)
self._ready_queue = ready_queue
self._event_queue = event_queue
self._graph = graph
self._worker_id = worker_id
self._flask_app = flask_app
self._context_vars = context_vars
self._last_task_time = time.time()
self._execution_context = execution_context
self._stop_event = stop_event
self._layers = layers if layers is not None else []
self._last_task_time = time.time()
def stop(self) -> None:
"""Worker is controlled via shared stop_event from GraphEngine.
@@ -135,11 +133,9 @@ class Worker(threading.Thread):
error: Exception | None = None
if self._flask_app and self._context_vars:
with preserve_flask_contexts(
flask_app=self._flask_app,
context_vars=self._context_vars,
):
# Execute the node with preserved context if execution context is provided
if self._execution_context is not None:
with self._execution_context:
self._invoke_node_run_start_hooks(node)
try:
node_events = node.run()

View File

@@ -8,9 +8,10 @@ DynamicScaler, and WorkerFactory into a single class.
import logging
import queue
import threading
from typing import TYPE_CHECKING, final
from typing import final
from configs import dify_config
from core.workflow.context import IExecutionContext
from core.workflow.graph import Graph
from core.workflow.graph_events import GraphNodeEventBase
@@ -20,11 +21,6 @@ from ..worker import Worker
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from contextvars import Context
from flask import Flask
@final
class WorkerPool:
@@ -42,8 +38,7 @@ class WorkerPool:
graph: Graph,
layers: list[GraphEngineLayer],
stop_event: threading.Event,
flask_app: "Flask | None" = None,
context_vars: "Context | None" = None,
execution_context: IExecutionContext | None = None,
min_workers: int | None = None,
max_workers: int | None = None,
scale_up_threshold: int | None = None,
@@ -57,8 +52,7 @@ class WorkerPool:
event_queue: Queue for worker events
graph: The workflow graph
layers: Graph engine layers for node execution hooks
flask_app: Optional Flask app for context preservation
context_vars: Optional context variables
execution_context: Optional execution context for context preservation
min_workers: Minimum number of workers
max_workers: Maximum number of workers
scale_up_threshold: Queue depth to trigger scale up
@@ -67,8 +61,7 @@ class WorkerPool:
self._ready_queue = ready_queue
self._event_queue = event_queue
self._graph = graph
self._flask_app = flask_app
self._context_vars = context_vars
self._execution_context = execution_context
self._layers = layers
# Scaling parameters with defaults
@@ -152,8 +145,7 @@ class WorkerPool:
graph=self._graph,
layers=self._layers,
worker_id=worker_id,
flask_app=self._flask_app,
context_vars=self._context_vars,
execution_context=self._execution_context,
stop_event=self._stop_event,
)

View File

@@ -1,11 +1,9 @@
import contextvars
import logging
from collections.abc import Generator, Mapping, Sequence
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
from datetime import UTC, datetime
from typing import TYPE_CHECKING, Any, NewType, cast
from flask import Flask, current_app
from typing_extensions import TypeIs
from core.model_runtime.entities.llm_entities import LLMUsage
@@ -39,7 +37,6 @@ from core.workflow.nodes.base.node import Node
from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData
from core.workflow.runtime import VariablePool
from libs.datetime_utils import naive_utc_now
from libs.flask_utils import preserve_flask_contexts
from .exc import (
InvalidIteratorValueError,
@@ -51,6 +48,7 @@ from .exc import (
)
if TYPE_CHECKING:
from core.workflow.context import IExecutionContext
from core.workflow.graph_engine import GraphEngine
logger = logging.getLogger(__name__)
@@ -252,8 +250,7 @@ class IterationNode(LLMUsageTrackingMixin, Node[IterationNodeData]):
self._execute_single_iteration_parallel,
index=index,
item=item,
flask_app=current_app._get_current_object(), # type: ignore
context_vars=contextvars.copy_context(),
execution_context=self._capture_execution_context(),
)
future_to_index[future] = index
@@ -306,11 +303,10 @@ class IterationNode(LLMUsageTrackingMixin, Node[IterationNodeData]):
self,
index: int,
item: object,
flask_app: Flask,
context_vars: contextvars.Context,
execution_context: "IExecutionContext",
) -> tuple[datetime, list[GraphNodeEventBase], object | None, dict[str, Variable], LLMUsage]:
"""Execute a single iteration in parallel mode and return results."""
with preserve_flask_contexts(flask_app=flask_app, context_vars=context_vars):
with execution_context:
iter_start_at = datetime.now(UTC).replace(tzinfo=None)
events: list[GraphNodeEventBase] = []
outputs_temp: list[object] = []
@@ -339,6 +335,12 @@ class IterationNode(LLMUsageTrackingMixin, Node[IterationNodeData]):
graph_engine.graph_runtime_state.llm_usage,
)
def _capture_execution_context(self) -> "IExecutionContext":
"""Capture current execution context for parallel iterations."""
from core.workflow.context import capture_current_context
return capture_current_context()
def _handle_iteration_success(
self,
started_at: datetime,

View File

@@ -0,0 +1,74 @@
"""
Workspace permission helper functions.
These helpers check both billing/plan level and workspace-specific policy level permissions.
Checks are performed at two levels:
1. Billing/plan level - via FeatureService (e.g., SANDBOX plan restrictions)
2. Workspace policy level - via EnterpriseService (admin-configured per workspace)
"""
import logging
from werkzeug.exceptions import Forbidden
from configs import dify_config
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
logger = logging.getLogger(__name__)
def check_workspace_member_invite_permission(workspace_id: str) -> None:
"""
Check if workspace allows member invitations at both billing and policy levels.
Checks performed:
1. Billing/plan level - For future expansion (currently no plan-level restriction)
2. Enterprise policy level - Admin-configured workspace permission
Args:
workspace_id: The workspace ID to check permissions for
Raises:
Forbidden: If either billing plan or workspace policy prohibits member invitations
"""
# Check enterprise workspace policy level (only if enterprise enabled)
if dify_config.ENTERPRISE_ENABLED:
try:
permission = EnterpriseService.WorkspacePermissionService.get_permission(workspace_id)
if not permission.allow_member_invite:
raise Forbidden("Workspace policy prohibits member invitations")
except Forbidden:
raise
except Exception:
logger.exception("Failed to check workspace invite permission for %s", workspace_id)
def check_workspace_owner_transfer_permission(workspace_id: str) -> None:
"""
Check if workspace allows owner transfer at both billing and policy levels.
Checks performed:
1. Billing/plan level - SANDBOX plan blocks owner transfer
2. Enterprise policy level - Admin-configured workspace permission
Args:
workspace_id: The workspace ID to check permissions for
Raises:
Forbidden: If either billing plan or workspace policy prohibits ownership transfer
"""
features = FeatureService.get_features(workspace_id)
if not features.is_allow_transfer_workspace:
raise Forbidden("Your current plan does not allow workspace ownership transfer")
# Check enterprise workspace policy level (only if enterprise enabled)
if dify_config.ENTERPRISE_ENABLED:
try:
permission = EnterpriseService.WorkspacePermissionService.get_permission(workspace_id)
if not permission.allow_owner_transfer:
raise Forbidden("Workspace policy prohibits ownership transfer")
except Forbidden:
raise
except Exception:
logger.exception("Failed to check workspace transfer permission for %s", workspace_id)

View File

@@ -0,0 +1,35 @@
"""change workflow node execution workflow_run index
Revision ID: 288345cd01d1
Revises: 3334862ee907
Create Date: 2026-01-16 17:15:00.000000
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "288345cd01d1"
down_revision = "3334862ee907"
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table("workflow_node_executions", schema=None) as batch_op:
batch_op.drop_index("workflow_node_execution_workflow_run_idx")
batch_op.create_index(
"workflow_node_execution_workflow_run_id_idx",
["workflow_run_id"],
unique=False,
)
def downgrade():
with op.batch_alter_table("workflow_node_executions", schema=None) as batch_op:
batch_op.drop_index("workflow_node_execution_workflow_run_id_idx")
batch_op.create_index(
"workflow_node_execution_workflow_run_idx",
["tenant_id", "app_id", "workflow_id", "triggered_from", "workflow_run_id"],
unique=False,
)

View File

@@ -781,11 +781,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
return (
PrimaryKeyConstraint("id", name="workflow_node_execution_pkey"),
Index(
"workflow_node_execution_workflow_run_idx",
"tenant_id",
"app_id",
"workflow_id",
"triggered_from",
"workflow_node_execution_workflow_run_id_idx",
"workflow_run_id",
),
Index(

View File

@@ -13,6 +13,8 @@ from collections.abc import Sequence
from datetime import datetime
from typing import Protocol
from sqlalchemy.orm import Session
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
from models.workflow import WorkflowNodeExecutionModel
@@ -130,6 +132,18 @@ class DifyAPIWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository, Pr
"""
...
def count_by_runs(self, session: Session, run_ids: Sequence[str]) -> tuple[int, int]:
"""
Count node executions and offloads for the given workflow run ids.
"""
...
def delete_by_runs(self, session: Session, run_ids: Sequence[str]) -> tuple[int, int]:
"""
Delete node executions and offloads for the given workflow run ids.
"""
...
def delete_executions_by_app(
self,
tenant_id: str,

View File

@@ -7,17 +7,15 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations.
from collections.abc import Sequence
from datetime import datetime
from typing import TypedDict, cast
from typing import cast
from sqlalchemy import asc, delete, desc, func, select, tuple_
from sqlalchemy import asc, delete, desc, func, select
from sqlalchemy.engine import CursorResult
from sqlalchemy.orm import Session, sessionmaker
from models.enums import WorkflowRunTriggeredFrom
from models.workflow import (
WorkflowNodeExecutionModel,
WorkflowNodeExecutionOffload,
WorkflowNodeExecutionTriggeredFrom,
)
from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository
@@ -49,26 +47,6 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
"""
self._session_maker = session_maker
@staticmethod
def _map_run_triggered_from_to_node_triggered_from(triggered_from: str) -> str:
"""
Map workflow run triggered_from values to workflow node execution triggered_from values.
"""
if triggered_from in {
WorkflowRunTriggeredFrom.APP_RUN.value,
WorkflowRunTriggeredFrom.DEBUGGING.value,
WorkflowRunTriggeredFrom.SCHEDULE.value,
WorkflowRunTriggeredFrom.PLUGIN.value,
WorkflowRunTriggeredFrom.WEBHOOK.value,
}:
return WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
if triggered_from in {
WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN.value,
WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING.value,
}:
return WorkflowNodeExecutionTriggeredFrom.RAG_PIPELINE_RUN.value
return ""
def get_node_last_execution(
self,
tenant_id: str,
@@ -316,51 +294,16 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
session.commit()
return result.rowcount
class RunContext(TypedDict):
run_id: str
tenant_id: str
app_id: str
workflow_id: str
triggered_from: str
@staticmethod
def delete_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]:
def delete_by_runs(self, session: Session, run_ids: Sequence[str]) -> tuple[int, int]:
"""
Delete node executions (and offloads) for the given workflow runs using indexed columns.
Uses the composite index on (tenant_id, app_id, workflow_id, triggered_from, workflow_run_id)
by filtering on those columns with tuple IN.
Delete node executions (and offloads) for the given workflow runs using workflow_run_id.
"""
if not runs:
if not run_ids:
return 0, 0
tuple_values = [
(
run["tenant_id"],
run["app_id"],
run["workflow_id"],
DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
run["triggered_from"]
),
run["run_id"],
)
for run in runs
]
node_execution_ids = session.scalars(
select(WorkflowNodeExecutionModel.id).where(
tuple_(
WorkflowNodeExecutionModel.tenant_id,
WorkflowNodeExecutionModel.app_id,
WorkflowNodeExecutionModel.workflow_id,
WorkflowNodeExecutionModel.triggered_from,
WorkflowNodeExecutionModel.workflow_run_id,
).in_(tuple_values)
)
).all()
if not node_execution_ids:
return 0, 0
run_ids = list(run_ids)
run_id_filter = WorkflowNodeExecutionModel.workflow_run_id.in_(run_ids)
node_execution_ids = select(WorkflowNodeExecutionModel.id).where(run_id_filter)
offloads_deleted = (
cast(
@@ -377,55 +320,32 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut
node_executions_deleted = (
cast(
CursorResult,
session.execute(
delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(node_execution_ids))
),
session.execute(delete(WorkflowNodeExecutionModel).where(run_id_filter)),
).rowcount
or 0
)
return node_executions_deleted, offloads_deleted
@staticmethod
def count_by_runs(session: Session, runs: Sequence[RunContext]) -> tuple[int, int]:
def count_by_runs(self, session: Session, run_ids: Sequence[str]) -> tuple[int, int]:
"""
Count node executions (and offloads) for the given workflow runs using indexed columns.
Count node executions (and offloads) for the given workflow runs using workflow_run_id.
"""
if not runs:
if not run_ids:
return 0, 0
tuple_values = [
(
run["tenant_id"],
run["app_id"],
run["workflow_id"],
DifyAPISQLAlchemyWorkflowNodeExecutionRepository._map_run_triggered_from_to_node_triggered_from(
run["triggered_from"]
),
run["run_id"],
)
for run in runs
]
tuple_filter = tuple_(
WorkflowNodeExecutionModel.tenant_id,
WorkflowNodeExecutionModel.app_id,
WorkflowNodeExecutionModel.workflow_id,
WorkflowNodeExecutionModel.triggered_from,
WorkflowNodeExecutionModel.workflow_run_id,
).in_(tuple_values)
run_ids = list(run_ids)
run_id_filter = WorkflowNodeExecutionModel.workflow_run_id.in_(run_ids)
node_executions_count = (
session.scalar(select(func.count()).select_from(WorkflowNodeExecutionModel).where(tuple_filter)) or 0
session.scalar(select(func.count()).select_from(WorkflowNodeExecutionModel).where(run_id_filter)) or 0
)
node_execution_ids = select(WorkflowNodeExecutionModel.id).where(run_id_filter)
offloads_count = (
session.scalar(
select(func.count())
.select_from(WorkflowNodeExecutionOffload)
.join(
WorkflowNodeExecutionModel,
WorkflowNodeExecutionOffload.node_execution_id == WorkflowNodeExecutionModel.id,
)
.where(tuple_filter)
.where(WorkflowNodeExecutionOffload.node_execution_id.in_(node_execution_ids))
)
or 0
)

View File

@@ -1381,6 +1381,11 @@ class RegisterService:
normalized_email = email.lower()
"""Invite new member"""
# Check workspace permission for member invitations
from libs.workspace_permission import check_workspace_member_invite_permission
check_workspace_member_invite_permission(tenant.id)
with Session(db.engine) as session:
account = AccountService.get_account_by_email_with_case_fallback(email, session=session)

View File

@@ -13,6 +13,23 @@ class WebAppSettings(BaseModel):
)
class WorkspacePermission(BaseModel):
workspace_id: str = Field(
description="The ID of the workspace.",
alias="workspaceId",
)
allow_member_invite: bool = Field(
description="Whether to allow members to invite new members to the workspace.",
default=False,
alias="allowMemberInvite",
)
allow_owner_transfer: bool = Field(
description="Whether to allow owners to transfer ownership of the workspace.",
default=False,
alias="allowOwnerTransfer",
)
class EnterpriseService:
@classmethod
def get_info(cls):
@@ -44,6 +61,16 @@ class EnterpriseService:
except ValueError as e:
raise ValueError(f"Invalid date format: {data}") from e
class WorkspacePermissionService:
@classmethod
def get_permission(cls, workspace_id: str):
if not workspace_id:
raise ValueError("workspace_id must be provided.")
data = EnterpriseRequest.send_request("GET", f"/workspaces/{workspace_id}/permission")
if not data or "permission" not in data:
raise ValueError("No data found.")
return WorkspacePermission.model_validate(data["permission"])
class WebAppAuth:
@classmethod
def is_user_allowed_to_access_webapp(cls, user_id: str, app_id: str):

View File

@@ -10,9 +10,7 @@ from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from models.workflow import WorkflowRun
from repositories.api_workflow_run_repository import APIWorkflowRunRepository
from repositories.sqlalchemy_api_workflow_node_execution_repository import (
DifyAPISQLAlchemyWorkflowNodeExecutionRepository,
)
from repositories.factory import DifyAPIRepositoryFactory
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
from services.billing_service import BillingService, SubscriptionPlan
@@ -92,9 +90,12 @@ class WorkflowRunCleanup:
paid_or_skipped = len(run_rows) - len(free_runs)
if not free_runs:
skipped_message = (
f"[batch #{batch_index}] skipped (no sandbox runs in batch, {paid_or_skipped} paid/unknown)"
)
click.echo(
click.style(
f"[batch #{batch_index}] skipped (no sandbox runs in batch, {paid_or_skipped} paid/unknown)",
skipped_message,
fg="yellow",
)
)
@@ -255,21 +256,6 @@ class WorkflowRunCleanup:
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(session)
return trigger_repo.count_by_run_ids(run_ids)
@staticmethod
def _build_run_contexts(
runs: Sequence[WorkflowRun],
) -> list[DifyAPISQLAlchemyWorkflowNodeExecutionRepository.RunContext]:
return [
{
"run_id": run.id,
"tenant_id": run.tenant_id,
"app_id": run.app_id,
"workflow_id": run.workflow_id,
"triggered_from": run.triggered_from,
}
for run in runs
]
@staticmethod
def _empty_related_counts() -> dict[str, int]:
return {
@@ -293,9 +279,15 @@ class WorkflowRunCleanup:
)
def _count_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
run_contexts = self._build_run_contexts(runs)
return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.count_by_runs(session, run_contexts)
run_ids = [run.id for run in runs]
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
session_maker=sessionmaker(bind=session.get_bind(), expire_on_commit=False)
)
return repo.count_by_runs(session, run_ids)
def _delete_node_executions(self, session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
run_contexts = self._build_run_contexts(runs)
return DifyAPISQLAlchemyWorkflowNodeExecutionRepository.delete_by_runs(session, run_contexts)
run_ids = [run.id for run in runs]
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
session_maker=sessionmaker(bind=session.get_bind(), expire_on_commit=False)
)
return repo.delete_by_runs(session, run_ids)

View File

@@ -0,0 +1 @@
"""Tests for workflow context management."""

View File

@@ -0,0 +1,258 @@
"""Tests for execution context module."""
import contextvars
from typing import Any
from unittest.mock import MagicMock
import pytest
from core.workflow.context.execution_context import (
AppContext,
ExecutionContext,
ExecutionContextBuilder,
IExecutionContext,
NullAppContext,
)
class TestAppContext:
"""Test AppContext abstract base class."""
def test_app_context_is_abstract(self):
"""Test that AppContext cannot be instantiated directly."""
with pytest.raises(TypeError):
AppContext() # type: ignore
class TestNullAppContext:
"""Test NullAppContext implementation."""
def test_null_app_context_get_config(self):
"""Test get_config returns value from config dict."""
config = {"key1": "value1", "key2": "value2"}
ctx = NullAppContext(config=config)
assert ctx.get_config("key1") == "value1"
assert ctx.get_config("key2") == "value2"
def test_null_app_context_get_config_default(self):
"""Test get_config returns default when key not found."""
ctx = NullAppContext()
assert ctx.get_config("nonexistent", "default") == "default"
assert ctx.get_config("nonexistent") is None
def test_null_app_context_get_extension(self):
"""Test get_extension returns stored extension."""
ctx = NullAppContext()
extension = MagicMock()
ctx.set_extension("db", extension)
assert ctx.get_extension("db") == extension
def test_null_app_context_get_extension_not_found(self):
"""Test get_extension returns None when extension not found."""
ctx = NullAppContext()
assert ctx.get_extension("nonexistent") is None
def test_null_app_context_enter_yield(self):
"""Test enter method yields without any side effects."""
ctx = NullAppContext()
with ctx.enter():
# Should not raise any exception
pass
class TestExecutionContext:
"""Test ExecutionContext class."""
def test_initialization_with_all_params(self):
"""Test ExecutionContext initialization with all parameters."""
app_ctx = NullAppContext()
context_vars = contextvars.copy_context()
user = MagicMock()
ctx = ExecutionContext(
app_context=app_ctx,
context_vars=context_vars,
user=user,
)
assert ctx.app_context == app_ctx
assert ctx.context_vars == context_vars
assert ctx.user == user
def test_initialization_with_minimal_params(self):
"""Test ExecutionContext initialization with minimal parameters."""
ctx = ExecutionContext()
assert ctx.app_context is None
assert ctx.context_vars is None
assert ctx.user is None
def test_enter_with_context_vars(self):
"""Test enter restores context variables."""
test_var = contextvars.ContextVar("test_var")
test_var.set("original_value")
# Copy context with the variable
context_vars = contextvars.copy_context()
# Change the variable
test_var.set("new_value")
# Create execution context and enter it
ctx = ExecutionContext(context_vars=context_vars)
with ctx.enter():
# Variable should be restored to original value
assert test_var.get() == "original_value"
# After exiting, variable stays at the value from within the context
# (this is expected Python contextvars behavior)
assert test_var.get() == "original_value"
def test_enter_with_app_context(self):
"""Test enter enters app context if available."""
app_ctx = NullAppContext()
ctx = ExecutionContext(app_context=app_ctx)
# Should not raise any exception
with ctx.enter():
pass
def test_enter_without_app_context(self):
"""Test enter works without app context."""
ctx = ExecutionContext(app_context=None)
# Should not raise any exception
with ctx.enter():
pass
def test_context_manager_protocol(self):
"""Test ExecutionContext supports context manager protocol."""
ctx = ExecutionContext()
with ctx:
# Should not raise any exception
pass
def test_user_property(self):
"""Test user property returns set user."""
user = MagicMock()
ctx = ExecutionContext(user=user)
assert ctx.user == user
class TestIExecutionContextProtocol:
"""Test IExecutionContext protocol."""
def test_execution_context_implements_protocol(self):
"""Test that ExecutionContext implements IExecutionContext protocol."""
ctx = ExecutionContext()
# Should have __enter__ and __exit__ methods
assert hasattr(ctx, "__enter__")
assert hasattr(ctx, "__exit__")
assert hasattr(ctx, "user")
def test_protocol_compatibility(self):
"""Test that ExecutionContext can be used where IExecutionContext is expected."""
def accept_context(context: IExecutionContext) -> Any:
"""Function that accepts IExecutionContext protocol."""
# Just verify it has the required protocol attributes
assert hasattr(context, "__enter__")
assert hasattr(context, "__exit__")
assert hasattr(context, "user")
return context.user
ctx = ExecutionContext(user="test_user")
result = accept_context(ctx)
assert result == "test_user"
def test_protocol_with_flask_execution_context(self):
"""Test that IExecutionContext protocol is compatible with different implementations."""
# Verify the protocol works with ExecutionContext
ctx = ExecutionContext(user="test_user")
# Should have the required protocol attributes
assert hasattr(ctx, "__enter__")
assert hasattr(ctx, "__exit__")
assert hasattr(ctx, "user")
assert ctx.user == "test_user"
# Should work as context manager
with ctx:
assert ctx.user == "test_user"
class TestExecutionContextBuilder:
"""Test ExecutionContextBuilder class."""
def test_builder_with_all_params(self):
"""Test builder with all parameters set."""
app_ctx = NullAppContext()
context_vars = contextvars.copy_context()
user = MagicMock()
ctx = (
ExecutionContextBuilder().with_app_context(app_ctx).with_context_vars(context_vars).with_user(user).build()
)
assert ctx.app_context == app_ctx
assert ctx.context_vars == context_vars
assert ctx.user == user
def test_builder_with_partial_params(self):
"""Test builder with only some parameters set."""
app_ctx = NullAppContext()
ctx = ExecutionContextBuilder().with_app_context(app_ctx).build()
assert ctx.app_context == app_ctx
assert ctx.context_vars is None
assert ctx.user is None
def test_builder_fluent_interface(self):
"""Test builder provides fluent interface."""
builder = ExecutionContextBuilder()
# Each method should return the builder
assert isinstance(builder.with_app_context(NullAppContext()), ExecutionContextBuilder)
assert isinstance(builder.with_context_vars(contextvars.copy_context()), ExecutionContextBuilder)
assert isinstance(builder.with_user(None), ExecutionContextBuilder)
class TestCaptureCurrentContext:
"""Test capture_current_context function."""
def test_capture_current_context_returns_context(self):
"""Test that capture_current_context returns a valid context."""
from core.workflow.context.execution_context import capture_current_context
result = capture_current_context()
# Should return an object that implements IExecutionContext
assert hasattr(result, "__enter__")
assert hasattr(result, "__exit__")
assert hasattr(result, "user")
def test_capture_current_context_captures_contextvars(self):
"""Test that capture_current_context captures context variables."""
# Set a context variable before capturing
import contextvars
test_var = contextvars.ContextVar("capture_test_var")
test_var.set("test_value_123")
from core.workflow.context.execution_context import capture_current_context
result = capture_current_context()
# Context variables should be captured
assert result.context_vars is not None

View File

@@ -0,0 +1,316 @@
"""Tests for Flask app context module."""
import contextvars
from unittest.mock import MagicMock, patch
import pytest
class TestFlaskAppContext:
"""Test FlaskAppContext implementation."""
@pytest.fixture
def mock_flask_app(self):
"""Create a mock Flask app."""
app = MagicMock()
app.config = {"TEST_KEY": "test_value"}
app.extensions = {"db": MagicMock(), "cache": MagicMock()}
app.app_context = MagicMock()
app.app_context.return_value.__enter__ = MagicMock(return_value=None)
app.app_context.return_value.__exit__ = MagicMock(return_value=None)
return app
def test_flask_app_context_initialization(self, mock_flask_app):
"""Test FlaskAppContext initialization."""
# Import here to avoid Flask dependency in test environment
from context.flask_app_context import FlaskAppContext
ctx = FlaskAppContext(mock_flask_app)
assert ctx.flask_app == mock_flask_app
def test_flask_app_context_get_config(self, mock_flask_app):
"""Test get_config returns Flask app config value."""
from context.flask_app_context import FlaskAppContext
ctx = FlaskAppContext(mock_flask_app)
assert ctx.get_config("TEST_KEY") == "test_value"
def test_flask_app_context_get_config_default(self, mock_flask_app):
"""Test get_config returns default when key not found."""
from context.flask_app_context import FlaskAppContext
ctx = FlaskAppContext(mock_flask_app)
assert ctx.get_config("NONEXISTENT", "default") == "default"
def test_flask_app_context_get_extension(self, mock_flask_app):
"""Test get_extension returns Flask extension."""
from context.flask_app_context import FlaskAppContext
ctx = FlaskAppContext(mock_flask_app)
db_ext = mock_flask_app.extensions["db"]
assert ctx.get_extension("db") == db_ext
def test_flask_app_context_get_extension_not_found(self, mock_flask_app):
"""Test get_extension returns None when extension not found."""
from context.flask_app_context import FlaskAppContext
ctx = FlaskAppContext(mock_flask_app)
assert ctx.get_extension("nonexistent") is None
def test_flask_app_context_enter(self, mock_flask_app):
"""Test enter method enters Flask app context."""
from context.flask_app_context import FlaskAppContext
ctx = FlaskAppContext(mock_flask_app)
with ctx.enter():
# Should not raise any exception
pass
# Verify app_context was called
mock_flask_app.app_context.assert_called_once()
class TestFlaskExecutionContext:
"""Test FlaskExecutionContext class."""
@pytest.fixture
def mock_flask_app(self):
"""Create a mock Flask app."""
app = MagicMock()
app.config = {}
app.app_context = MagicMock()
app.app_context.return_value.__enter__ = MagicMock(return_value=None)
app.app_context.return_value.__exit__ = MagicMock(return_value=None)
return app
def test_initialization(self, mock_flask_app):
"""Test FlaskExecutionContext initialization."""
from context.flask_app_context import FlaskExecutionContext
context_vars = contextvars.copy_context()
user = MagicMock()
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=context_vars,
user=user,
)
assert ctx.context_vars == context_vars
assert ctx.user == user
def test_app_context_property(self, mock_flask_app):
"""Test app_context property returns FlaskAppContext."""
from context.flask_app_context import FlaskAppContext, FlaskExecutionContext
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=contextvars.copy_context(),
)
assert isinstance(ctx.app_context, FlaskAppContext)
assert ctx.app_context.flask_app == mock_flask_app
def test_context_manager_protocol(self, mock_flask_app):
"""Test FlaskExecutionContext supports context manager protocol."""
from context.flask_app_context import FlaskExecutionContext
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=contextvars.copy_context(),
)
# Should have __enter__ and __exit__ methods
assert hasattr(ctx, "__enter__")
assert hasattr(ctx, "__exit__")
# Should work as context manager
with ctx:
pass
class TestCaptureFlaskContext:
"""Test capture_flask_context function."""
@patch("context.flask_app_context.current_app")
@patch("context.flask_app_context.g")
def test_capture_flask_context_captures_app(self, mock_g, mock_current_app):
"""Test capture_flask_context captures Flask app."""
mock_app = MagicMock()
mock_app._get_current_object = MagicMock(return_value=mock_app)
mock_current_app._get_current_object = MagicMock(return_value=mock_app)
from context.flask_app_context import capture_flask_context
ctx = capture_flask_context()
assert ctx._flask_app == mock_app
@patch("context.flask_app_context.current_app")
@patch("context.flask_app_context.g")
def test_capture_flask_context_captures_user_from_g(self, mock_g, mock_current_app):
"""Test capture_flask_context captures user from Flask g object."""
mock_app = MagicMock()
mock_app._get_current_object = MagicMock(return_value=mock_app)
mock_current_app._get_current_object = MagicMock(return_value=mock_app)
mock_user = MagicMock()
mock_user.id = "user_123"
mock_g._login_user = mock_user
from context.flask_app_context import capture_flask_context
ctx = capture_flask_context()
assert ctx.user == mock_user
@patch("context.flask_app_context.current_app")
def test_capture_flask_context_with_explicit_user(self, mock_current_app):
"""Test capture_flask_context uses explicit user parameter."""
mock_app = MagicMock()
mock_app._get_current_object = MagicMock(return_value=mock_app)
mock_current_app._get_current_object = MagicMock(return_value=mock_app)
explicit_user = MagicMock()
explicit_user.id = "user_456"
from context.flask_app_context import capture_flask_context
ctx = capture_flask_context(user=explicit_user)
assert ctx.user == explicit_user
@patch("context.flask_app_context.current_app")
def test_capture_flask_context_captures_contextvars(self, mock_current_app):
"""Test capture_flask_context captures context variables."""
mock_app = MagicMock()
mock_app._get_current_object = MagicMock(return_value=mock_app)
mock_current_app._get_current_object = MagicMock(return_value=mock_app)
# Set a context variable
test_var = contextvars.ContextVar("test_var")
test_var.set("test_value")
from context.flask_app_context import capture_flask_context
ctx = capture_flask_context()
# Context variables should be captured
assert ctx.context_vars is not None
# Verify the variable is in the captured context
captured_value = ctx.context_vars[test_var]
assert captured_value == "test_value"
class TestFlaskExecutionContextIntegration:
"""Integration tests for FlaskExecutionContext."""
@pytest.fixture
def mock_flask_app(self):
"""Create a mock Flask app with proper app context."""
app = MagicMock()
app.config = {"TEST": "value"}
app.extensions = {"db": MagicMock()}
# Mock app context
mock_app_context = MagicMock()
mock_app_context.__enter__ = MagicMock(return_value=None)
mock_app_context.__exit__ = MagicMock(return_value=None)
app.app_context.return_value = mock_app_context
return app
def test_enter_restores_context_vars(self, mock_flask_app):
"""Test that enter restores captured context variables."""
# Create a context variable and set a value
test_var = contextvars.ContextVar("integration_test_var")
test_var.set("original_value")
# Capture the context
context_vars = contextvars.copy_context()
# Change the value
test_var.set("new_value")
# Create FlaskExecutionContext and enter it
from context.flask_app_context import FlaskExecutionContext
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=context_vars,
)
with ctx:
# Value should be restored to original
assert test_var.get() == "original_value"
# After exiting, variable stays at the value from within the context
# (this is expected Python contextvars behavior)
assert test_var.get() == "original_value"
def test_enter_enters_flask_app_context(self, mock_flask_app):
"""Test that enter enters Flask app context."""
from context.flask_app_context import FlaskExecutionContext
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=contextvars.copy_context(),
)
with ctx:
# Verify app context was entered
assert mock_flask_app.app_context.called
@patch("context.flask_app_context.g")
def test_enter_restores_user_in_g(self, mock_g, mock_flask_app):
"""Test that enter restores user in Flask g object."""
mock_user = MagicMock()
mock_user.id = "test_user"
# Note: FlaskExecutionContext saves user from g before entering context,
# then restores it after entering the app context.
# The user passed to constructor is NOT restored to g.
# So we need to test the actual behavior.
# Create FlaskExecutionContext with user in constructor
from context.flask_app_context import FlaskExecutionContext
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=contextvars.copy_context(),
user=mock_user,
)
# Set user in g before entering (simulating existing user in g)
mock_g._login_user = mock_user
with ctx:
# After entering, the user from g before entry should be restored
assert mock_g._login_user == mock_user
# The user in constructor is stored but not automatically restored to g
# (it's available via ctx.user property)
assert ctx.user == mock_user
def test_enter_method_as_context_manager(self, mock_flask_app):
"""Test enter method returns a proper context manager."""
from context.flask_app_context import FlaskExecutionContext
ctx = FlaskExecutionContext(
flask_app=mock_flask_app,
context_vars=contextvars.copy_context(),
)
# enter() should return a generator/context manager
with ctx.enter():
# Should work without issues
pass
# Verify app context was called
assert mock_flask_app.app_context.called

View File

@@ -0,0 +1,142 @@
from unittest.mock import Mock, patch
import pytest
from werkzeug.exceptions import Forbidden
from libs.workspace_permission import (
check_workspace_member_invite_permission,
check_workspace_owner_transfer_permission,
)
class TestWorkspacePermissionHelper:
"""Test workspace permission helper functions."""
@patch("libs.workspace_permission.dify_config")
@patch("libs.workspace_permission.EnterpriseService")
def test_community_edition_allows_invite(self, mock_enterprise_service, mock_config):
"""Community edition should always allow invitations without calling any service."""
mock_config.ENTERPRISE_ENABLED = False
# Should not raise
check_workspace_member_invite_permission("test-workspace-id")
# EnterpriseService should NOT be called in community edition
mock_enterprise_service.WorkspacePermissionService.get_permission.assert_not_called()
@patch("libs.workspace_permission.dify_config")
@patch("libs.workspace_permission.FeatureService")
def test_community_edition_allows_transfer(self, mock_feature_service, mock_config):
"""Community edition should check billing plan but not call enterprise service."""
mock_config.ENTERPRISE_ENABLED = False
mock_features = Mock()
mock_features.is_allow_transfer_workspace = True
mock_feature_service.get_features.return_value = mock_features
# Should not raise
check_workspace_owner_transfer_permission("test-workspace-id")
mock_feature_service.get_features.assert_called_once_with("test-workspace-id")
@patch("libs.workspace_permission.EnterpriseService")
@patch("libs.workspace_permission.dify_config")
def test_enterprise_blocks_invite_when_disabled(self, mock_config, mock_enterprise_service):
"""Enterprise edition should block invitations when workspace policy is False."""
mock_config.ENTERPRISE_ENABLED = True
mock_permission = Mock()
mock_permission.allow_member_invite = False
mock_enterprise_service.WorkspacePermissionService.get_permission.return_value = mock_permission
with pytest.raises(Forbidden, match="Workspace policy prohibits member invitations"):
check_workspace_member_invite_permission("test-workspace-id")
mock_enterprise_service.WorkspacePermissionService.get_permission.assert_called_once_with("test-workspace-id")
@patch("libs.workspace_permission.EnterpriseService")
@patch("libs.workspace_permission.dify_config")
def test_enterprise_allows_invite_when_enabled(self, mock_config, mock_enterprise_service):
"""Enterprise edition should allow invitations when workspace policy is True."""
mock_config.ENTERPRISE_ENABLED = True
mock_permission = Mock()
mock_permission.allow_member_invite = True
mock_enterprise_service.WorkspacePermissionService.get_permission.return_value = mock_permission
# Should not raise
check_workspace_member_invite_permission("test-workspace-id")
mock_enterprise_service.WorkspacePermissionService.get_permission.assert_called_once_with("test-workspace-id")
@patch("libs.workspace_permission.EnterpriseService")
@patch("libs.workspace_permission.dify_config")
@patch("libs.workspace_permission.FeatureService")
def test_billing_plan_blocks_transfer(self, mock_feature_service, mock_config, mock_enterprise_service):
"""SANDBOX billing plan should block owner transfer before checking enterprise policy."""
mock_config.ENTERPRISE_ENABLED = True
mock_features = Mock()
mock_features.is_allow_transfer_workspace = False # SANDBOX plan
mock_feature_service.get_features.return_value = mock_features
with pytest.raises(Forbidden, match="Your current plan does not allow workspace ownership transfer"):
check_workspace_owner_transfer_permission("test-workspace-id")
# Enterprise service should NOT be called since billing plan already blocks
mock_enterprise_service.WorkspacePermissionService.get_permission.assert_not_called()
@patch("libs.workspace_permission.EnterpriseService")
@patch("libs.workspace_permission.dify_config")
@patch("libs.workspace_permission.FeatureService")
def test_enterprise_blocks_transfer_when_disabled(self, mock_feature_service, mock_config, mock_enterprise_service):
"""Enterprise edition should block transfer when workspace policy is False."""
mock_config.ENTERPRISE_ENABLED = True
mock_features = Mock()
mock_features.is_allow_transfer_workspace = True # Billing plan allows
mock_feature_service.get_features.return_value = mock_features
mock_permission = Mock()
mock_permission.allow_owner_transfer = False # Workspace policy blocks
mock_enterprise_service.WorkspacePermissionService.get_permission.return_value = mock_permission
with pytest.raises(Forbidden, match="Workspace policy prohibits ownership transfer"):
check_workspace_owner_transfer_permission("test-workspace-id")
mock_enterprise_service.WorkspacePermissionService.get_permission.assert_called_once_with("test-workspace-id")
@patch("libs.workspace_permission.EnterpriseService")
@patch("libs.workspace_permission.dify_config")
@patch("libs.workspace_permission.FeatureService")
def test_enterprise_allows_transfer_when_both_enabled(
self, mock_feature_service, mock_config, mock_enterprise_service
):
"""Enterprise edition should allow transfer when both billing and workspace policy allow."""
mock_config.ENTERPRISE_ENABLED = True
mock_features = Mock()
mock_features.is_allow_transfer_workspace = True # Billing plan allows
mock_feature_service.get_features.return_value = mock_features
mock_permission = Mock()
mock_permission.allow_owner_transfer = True # Workspace policy allows
mock_enterprise_service.WorkspacePermissionService.get_permission.return_value = mock_permission
# Should not raise
check_workspace_owner_transfer_permission("test-workspace-id")
mock_enterprise_service.WorkspacePermissionService.get_permission.assert_called_once_with("test-workspace-id")
@patch("libs.workspace_permission.logger")
@patch("libs.workspace_permission.EnterpriseService")
@patch("libs.workspace_permission.dify_config")
def test_enterprise_service_error_fails_open(self, mock_config, mock_enterprise_service, mock_logger):
"""On enterprise service error, should fail-open (allow) and log error."""
mock_config.ENTERPRISE_ENABLED = True
# Simulate enterprise service error
mock_enterprise_service.WorkspacePermissionService.get_permission.side_effect = Exception("Service unavailable")
# Should not raise (fail-open)
check_workspace_member_invite_permission("test-workspace-id")
# Should log the error
mock_logger.exception.assert_called_once()
assert "Failed to check workspace invite permission" in str(mock_logger.exception.call_args)

2
web/.gitignore vendored
View File

@@ -64,3 +64,5 @@ public/fallback-*.js
.vscode/settings.json
.vscode/mcp.json
.eslintcache

View File

@@ -1,10 +1,9 @@
'use client'
import type { InvitationResult } from '@/models/common'
import { RiPencilLine, RiUserAddLine } from '@remixicon/react'
import { RiPencilLine } from '@remixicon/react'
import { useState } from 'react'
import { useTranslation } from 'react-i18next'
import Avatar from '@/app/components/base/avatar'
import Button from '@/app/components/base/button'
import Tooltip from '@/app/components/base/tooltip'
import { NUM_INFINITE } from '@/app/components/billing/config'
import { Plan } from '@/app/components/billing/type'
@@ -16,8 +15,8 @@ import { useProviderContext } from '@/context/provider-context'
import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now'
import { LanguagesSupported } from '@/i18n-config/language'
import { useMembers } from '@/service/use-common'
import { cn } from '@/utils/classnames'
import EditWorkspaceModal from './edit-workspace-modal'
import InviteButton from './invite-button'
import InviteModal from './invite-modal'
import InvitedModal from './invited-modal'
import Operation from './operation'
@@ -37,7 +36,7 @@ const MembersPage = () => {
const { userProfile, currentWorkspace, isCurrentWorkspaceOwner, isCurrentWorkspaceManager } = useAppContext()
const { data, refetch } = useMembers()
const { systemFeatures } = useGlobalPublicStore()
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
const { formatTimeFromNow } = useFormatTimeFromNow()
const [inviteModalVisible, setInviteModalVisible] = useState(false)
const [invitationResults, setInvitationResults] = useState<InvitationResult[]>([])
@@ -104,10 +103,9 @@ const MembersPage = () => {
{isMemberFull && (
<UpgradeBtn className="mr-2" loc="member-invite" />
)}
<Button variant="primary" className={cn('shrink-0')} disabled={!isCurrentWorkspaceManager || isMemberFull} onClick={() => setInviteModalVisible(true)}>
<RiUserAddLine className="mr-1 h-4 w-4" />
{t('members.invite', { ns: 'common' })}
</Button>
<div className="shrink-0">
<InviteButton disabled={!isCurrentWorkspaceManager || isMemberFull} onClick={() => setInviteModalVisible(true)} />
</div>
</div>
<div className="overflow-visible lg:overflow-visible">
<div className="flex min-w-[480px] items-center border-b border-divider-regular py-[7px]">

View File

@@ -0,0 +1,34 @@
import { RiUserAddLine } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Loading from '@/app/components/base/loading'
import { useAppContext } from '@/context/app-context'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { useWorkspacePermissions } from '@/service/use-workspace'
type InviteButtonProps = {
disabled?: boolean
onClick?: () => void
}
const InviteButton = (props: InviteButtonProps) => {
const { t } = useTranslation()
const { currentWorkspace } = useAppContext()
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
const { data: workspacePermissions, isFetching: isFetchingWorkspacePermissions } = useWorkspacePermissions(currentWorkspace!.id, systemFeatures.branding.enabled)
if (systemFeatures.branding.enabled) {
if (isFetchingWorkspacePermissions) {
return <Loading />
}
if (!workspacePermissions || workspacePermissions.allow_member_invite !== true) {
return null
}
}
return (
<Button variant="primary" {...props}>
<RiUserAddLine className="mr-1 h-4 w-4" />
{t('members.invite', { ns: 'common' })}
</Button>
)
}
export default InviteButton

View File

@@ -5,6 +5,10 @@ import {
} from '@remixicon/react'
import { Fragment } from 'react'
import { useTranslation } from 'react-i18next'
import Loading from '@/app/components/base/loading'
import { useAppContext } from '@/context/app-context'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { useWorkspacePermissions } from '@/service/use-workspace'
import { cn } from '@/utils/classnames'
type Props = {
@@ -13,6 +17,17 @@ type Props = {
const TransferOwnership = ({ onOperate }: Props) => {
const { t } = useTranslation()
const { currentWorkspace } = useAppContext()
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
const { data: workspacePermissions, isFetching: isFetchingWorkspacePermissions } = useWorkspacePermissions(currentWorkspace!.id, systemFeatures.branding.enabled)
if (systemFeatures.branding.enabled) {
if (isFetchingWorkspacePermissions) {
return <Loading />
}
if (!workspacePermissions || workspacePermissions.allow_owner_transfer !== true) {
return <span className="system-sm-regular px-3 text-text-secondary">{t('members.owner', { ns: 'common' })}</span>
}
}
return (
<Menu as="div" className="relative h-full w-full">

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,9 @@ import difyI18n from './eslint-rules/index.js'
export default antfu(
{
react: {
reactCompiler: true,
// This react compiler rules are pretty slow
// We can wait for https://github.com/Rel1cx/eslint-react/issues/1237
reactCompiler: false,
overrides: {
'react/no-context-provider': 'off',
'react/no-forward-ref': 'off',
@@ -57,47 +59,8 @@ export default antfu(
// sonar
{
rules: {
...sonar.configs.recommended.rules,
// code complexity
'sonarjs/cognitive-complexity': 'off',
'sonarjs/no-nested-functions': 'warn',
'sonarjs/no-nested-conditional': 'warn',
'sonarjs/nested-control-flow': 'warn', // 3 levels of nesting
'sonarjs/no-small-switch': 'off',
'sonarjs/no-nested-template-literals': 'warn',
'sonarjs/redundant-type-aliases': 'off',
'sonarjs/regex-complexity': 'warn',
// maintainability
'sonarjs/no-ignored-exceptions': 'off',
'sonarjs/no-commented-code': 'warn',
'sonarjs/no-unused-vars': 'warn',
'sonarjs/prefer-single-boolean-return': 'warn',
'sonarjs/duplicates-in-character-class': 'off',
'sonarjs/single-char-in-character-classes': 'off',
'sonarjs/anchor-precedence': 'warn',
'sonarjs/updated-loop-counter': 'off',
'sonarjs/no-dead-store': 'error',
'sonarjs/no-duplicated-branches': 'warn',
'sonarjs/max-lines': 'warn', // max 1000 lines
'sonarjs/no-variable-usage-before-declaration': 'error',
// security
'sonarjs/no-hardcoded-passwords': 'off', // detect the wrong code that is not password.
'sonarjs/no-hardcoded-secrets': 'off',
'sonarjs/pseudo-random': 'off',
// performance
'sonarjs/slow-regex': 'warn',
// others
'sonarjs/todo-tag': 'warn',
'sonarjs/table-header': 'off',
// new from this update
'sonarjs/unused-import': 'off',
'sonarjs/use-type-alias': 'warn',
'sonarjs/single-character-alternation': 'warn',
'sonarjs/no-os-command-from-path': 'warn',
'sonarjs/class-name': 'off',
'sonarjs/no-redundant-jump': 'warn',
// Manually pick rules that are actually useful and not slow.
// Or we can just drop the plugin entirely.
},
plugins: {
sonarjs: sonar,

View File

@@ -27,7 +27,7 @@
"build": "next build",
"build:docker": "next build && node scripts/optimize-standalone.js",
"start": "node ./scripts/copy-and-start.mjs",
"lint": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"lint": "eslint --cache",
"lint:ci": "pnpm lint --concurrency 3",
"lint:fix": "pnpm lint --fix",
"lint:quiet": "pnpm lint --quiet",

View File

@@ -0,0 +1,17 @@
import type { ICurrentWorkspace } from '@/models/common'
import { useQuery } from '@tanstack/react-query'
import { get } from './base'
type WorkspacePermissions = {
workspace_id: ICurrentWorkspace['id']
allow_member_invite: boolean
allow_owner_transfer: boolean
}
export function useWorkspacePermissions(workspaceId: ICurrentWorkspace['id'], enabled: boolean) {
return useQuery({
queryKey: ['workspace-permissions', workspaceId],
queryFn: () => get<WorkspacePermissions>('/workspaces/current/permission'),
enabled: enabled && !!workspaceId,
})
}