Compare commits

..

24 Commits

Author SHA1 Message Date
CodingOnStar
48cba768b1 test: enhance text generation and workflow callbacks tests
- Added comprehensive tests for the useTextGeneration hook, covering handleSend and handleStop functionalities, including file processing and error handling.
- Improved workflow callbacks tests to handle edge cases and ensure graceful handling of missing nodes and undefined tracing.
- Introduced new test cases for validating prompt variables and processing files in the text generation flow.
2026-02-06 18:03:41 +08:00
CodingOnStar
a985eb8725 Merge remote-tracking branch 'origin/main' into refactor/share 2026-02-06 17:32:21 +08:00
CodingOnStar
15d25f8876 feat: enhance text generation result components and add tests
- Removed unused ESLint suppressions from the configuration.
- Introduced new Result and Header components for better organization of text generation results.
- Added comprehensive tests for the new components and hooks to ensure functionality and reliability.
- Updated the useTextGeneration hook to streamline state management and improve performance.
2026-02-06 17:31:42 +08:00
CodingOnStar
cd4a4ed770 refactor: restructure text generation component and introduce new hooks
- Simplified the TextGeneration component by removing unused imports and state management.
- Introduced new hooks for managing app configuration and batch tasks.
- Added HeaderSection and ResultPanel components for better UI organization.
- Implemented tests for new components and hooks to ensure functionality.
- Updated types for improved type safety and clarity.
2026-02-06 16:51:01 +08:00
zyssyz123
2c9430313d fix: redis for api token (#31861)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: hj24 <mambahj24@gmail.com>
2026-02-06 16:25:27 +08:00
QuantumGhost
552ee369b2 chore: update deploy branches for deploy-hitl.yaml (#32051) 2026-02-06 16:14:05 +08:00
Stephen Zhou
d5b9a7b2f8 test: only remove text coverage in CI (#32043) 2026-02-06 16:12:28 +08:00
NeatGuyCoding
c2a3f459c7 fix(api): return proper HTTP 204 status code in DELETE endpoints (#32012)
Some checks failed
autofix.ci / autofix (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Main CI Pipeline / Check Changed Files (push) Has been cancelled
Main CI Pipeline / API Tests (push) Has been cancelled
Main CI Pipeline / Web Tests (push) Has been cancelled
Main CI Pipeline / Style Check (push) Has been cancelled
Main CI Pipeline / VDB Tests (push) Has been cancelled
Main CI Pipeline / DB Migration Test (push) Has been cancelled
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-06 15:32:52 +08:00
QuantumGhost
4971e11734 perf: use batch delete method instead of single delete (#32036)
Co-authored-by: fatelei <fatelei@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: FFXN <lizy@dify.ai>
2026-02-06 15:12:32 +08:00
wangxiaolei
a297b06aac fix: fix tool type is miss (#32042) 2026-02-06 14:38:15 +08:00
QuantumGhost
e988266f53 chore: update HITL auto deploy workflow (#32040) 2026-02-06 14:15:32 +08:00
longbingljw
d9530f7bb7 fix: make flask upgrade-db fail on error (#32024) 2026-02-06 12:01:31 +08:00
wangxiaolei
b24e6edada fix: fix agent node tool type is not right (#32008)
Infer real tool type via querying relevant database tables.

The root cause for incorrect `type` field is still not clear.
2026-02-06 11:24:39 +08:00
Ryan
59a9cbbf78 chore: remove .codex/skills directory (#32022)
Co-authored-by: Longwei Liu <longweiliu@LongweideMacBook-Air.local>
2026-02-06 10:46:50 +08:00
99
45164ce33e refactor: strip external imports in workflow template transform (#32017) 2026-02-06 10:37:26 +08:00
99
095b3ee234 chore: Remove redundant double space in variable type description (core/variables/variables.py) (#32002)
Some checks failed
autofix.ci / autofix (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Main CI Pipeline / Check Changed Files (push) Has been cancelled
Main CI Pipeline / API Tests (push) Has been cancelled
Main CI Pipeline / Web Tests (push) Has been cancelled
Main CI Pipeline / Style Check (push) Has been cancelled
Main CI Pipeline / VDB Tests (push) Has been cancelled
Main CI Pipeline / DB Migration Test (push) Has been cancelled
2026-02-05 21:44:31 +08:00
QuantumGhost
cb970e54da perf(api): Optimize the response time of AppListApi endpoint (#31999) 2026-02-05 19:05:09 +08:00
Stream
e04f2a0786 feat: use static manifest for pre-caching all plugin manifests before checking updates (#31942)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Junyan Qin <rockchinq@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-05 18:58:17 +08:00
Stephen Zhou
7202a24bcf chore: migrate to eslint-better-tailwind (#31969) 2026-02-05 18:36:08 +09:00
wangxiaolei
be8f265e43 fix: fix uuid_generate_v4 only used in postgresql (#31304) 2026-02-05 17:32:33 +08:00
lif
9e54f086dc fix(web): add rewrite rule to fix Serwist precaching 404 errors (#31770)
Signed-off-by: majiayu000 <1835304752@qq.com>
Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com>
2026-02-05 15:42:18 +08:00
Joel
8c31b69c8e chore: sticky the applist header in explore page (#31967)
Some checks failed
autofix.ci / autofix (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled
Main CI Pipeline / Check Changed Files (push) Has been cancelled
Main CI Pipeline / API Tests (push) Has been cancelled
Main CI Pipeline / Web Tests (push) Has been cancelled
Main CI Pipeline / Style Check (push) Has been cancelled
Main CI Pipeline / VDB Tests (push) Has been cancelled
Main CI Pipeline / DB Migration Test (push) Has been cancelled
2026-02-05 14:44:51 +08:00
wangxiaolei
b886b3f6c8 fix: fix miss use db.session (#31971) 2026-02-05 14:42:34 +08:00
Stephen Zhou
ef0d18bb61 test: fix test (#31975) 2026-02-05 14:31:21 +08:00
281 changed files with 8499 additions and 12664 deletions

View File

@@ -1 +0,0 @@
../../.agents/skills/component-refactoring

View File

@@ -1 +0,0 @@
../../.agents/skills/frontend-code-review

View File

@@ -1 +0,0 @@
../../.agents/skills/frontend-testing

View File

@@ -1 +0,0 @@
../../.agents/skills/orpc-contract-first

View File

@@ -79,29 +79,6 @@ jobs:
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 24
cache: pnpm
cache-dependency-path: ./web/pnpm-lock.yaml
- name: Install web dependencies
run: |
cd web
pnpm install --frozen-lockfile
- name: ESLint autofix
run: |
cd web
pnpm lint:fix || true
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
- name: mdformat
run: |

View File

@@ -8,7 +8,6 @@ on:
- "build/**"
- "release/e-*"
- "hotfix/**"
- "feat/hitl-frontend"
tags:
- "*"

View File

@@ -4,8 +4,7 @@ on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "feat/hitl-frontend"
- "feat/hitl-backend"
- "build/feat/hitl"
types:
- completed
@@ -14,10 +13,7 @@ jobs:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success' &&
(
github.event.workflow_run.head_branch == 'feat/hitl-frontend' ||
github.event.workflow_run.head_branch == 'feat/hitl-backend'
)
github.event.workflow_run.head_branch == 'build/feat/hitl'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v1

View File

@@ -39,7 +39,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Run tests
run: pnpm test:coverage
run: pnpm test:ci
- name: Coverage Summary
if: always()

View File

@@ -136,7 +136,6 @@ ignore_imports =
core.workflow.nodes.llm.llm_utils -> models.provider
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
core.workflow.nodes.llm.node -> core.tools.signature
core.workflow.nodes.template_transform.template_transform_node -> configs
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager

View File

@@ -122,7 +122,7 @@ These commands assume you start from the repository root.
```bash
cd api
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention
```
1. Optional: start Celery Beat (scheduled tasks, in a new terminal).

View File

@@ -739,8 +739,10 @@ def upgrade_db():
click.echo(click.style("Database migration successful!", fg="green"))
except Exception:
except Exception as e:
logger.exception("Failed to execute database migration")
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
raise SystemExit(1)
finally:
lock.release()
else:

View File

@@ -1155,6 +1155,16 @@ class CeleryScheduleTasksConfig(BaseSettings):
default=0,
)
# API token last_used_at batch update
ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK: bool = Field(
description="Enable periodic batch update of API token last_used_at timestamps",
default=True,
)
API_TOKEN_LAST_USED_UPDATE_INTERVAL: int = Field(
description="Interval in minutes for batch updating API token last_used_at (default 30)",
default=30,
)
# Trigger provider refresh (simple version)
ENABLE_TRIGGER_PROVIDER_REFRESH_TASK: bool = Field(
description="Enable trigger provider refresh poller",

View File

@@ -10,6 +10,7 @@ from libs.helper import TimestampField
from libs.login import current_account_with_tenant, login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from services.api_token_service import ApiTokenCache
from . import console_ns
from .wraps import account_initialization_required, edit_permission_required, setup_required
@@ -131,6 +132,11 @@ class BaseApiKeyResource(Resource):
if key is None:
flask_restx.abort(HTTPStatus.NOT_FOUND, message="API key not found")
# Invalidate cache before deleting from database
# Type assertion: key is guaranteed to be non-None here because abort() raises
assert key is not None # nosec - for type checker only
ApiTokenCache.delete(key.token, key.type)
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()

View File

@@ -1,3 +1,4 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Literal, TypeAlias
@@ -54,6 +55,8 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
class AppListQuery(BaseModel):
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
@@ -499,6 +502,7 @@ class AppListApi(Resource):
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
Workflow.tenant_id == current_tenant_id,
)
)
.scalars()
@@ -510,12 +514,14 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
node_id = None
try:
for _, node_data in workflow.walk_nodes():
for node_id, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
continue
for app in app_pagination.items:

View File

@@ -55,6 +55,7 @@ from libs.login import current_account_with_tenant, login_required
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.api_token_service import ApiTokenCache
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
# Register models for flask_restx to avoid dict type issues in Swagger
@@ -820,6 +821,11 @@ class DatasetApiDeleteApi(Resource):
if key is None:
console_ns.abort(404, message="API key not found")
# Invalidate cache before deleting from database
# Type assertion: key is guaranteed to be non-None here because abort() raises
assert key is not None # nosec - for type checker only
ApiTokenCache.delete(key.token, key.type)
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()

View File

@@ -120,7 +120,7 @@ class TagUpdateDeleteApi(Resource):
TagService.delete_tag(tag_id)
return 204
return "", 204
@console_ns.route("/tag-bindings/create")

View File

@@ -396,7 +396,7 @@ class DatasetApi(DatasetApiResource):
try:
if DatasetService.delete_dataset(dataset_id_str, current_user):
DatasetPermissionService.clear_partial_member_list(dataset_id_str)
return 204
return "", 204
else:
raise NotFound("Dataset not found.")
except services.errors.dataset.DatasetInUseError:
@@ -557,7 +557,7 @@ class DatasetTagsApi(DatasetApiResource):
payload = TagDeletePayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag(payload.tag_id)
return 204
return "", 204
@service_api_ns.route("/datasets/tags/binding")
@@ -581,7 +581,7 @@ class DatasetTagBindingApi(DatasetApiResource):
payload = TagBindingPayload.model_validate(service_api_ns.payload or {})
TagService.save_tag_binding({"tag_ids": payload.tag_ids, "target_id": payload.target_id, "type": "knowledge"})
return 204
return "", 204
@service_api_ns.route("/datasets/tags/unbinding")
@@ -605,7 +605,7 @@ class DatasetTagUnbindingApi(DatasetApiResource):
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
TagService.delete_tag_binding({"tag_id": payload.tag_id, "target_id": payload.target_id, "type": "knowledge"})
return 204
return "", 204
@service_api_ns.route("/datasets/<uuid:dataset_id>/tags")

View File

@@ -746,4 +746,4 @@ class DocumentApi(DatasetApiResource):
except services.errors.document.DocumentIndexingError:
raise DocumentIndexingError("Cannot delete document during indexing.")
return 204
return "", 204

View File

@@ -128,7 +128,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
DatasetService.check_dataset_permission(dataset, current_user)
MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
return 204
return "", 204
@service_api_ns.route("/datasets/<uuid:dataset_id>/metadata/built-in")

View File

@@ -233,7 +233,7 @@ class DatasetSegmentApi(DatasetApiResource):
if not segment:
raise NotFound("Segment not found.")
SegmentService.delete_segment(segment, document, dataset)
return 204
return "", 204
@service_api_ns.expect(service_api_ns.models[SegmentUpdatePayload.__name__])
@service_api_ns.doc("update_segment")
@@ -499,7 +499,7 @@ class DatasetChildChunkApi(DatasetApiResource):
except ChildChunkDeleteIndexServiceError as e:
raise ChildChunkDeleteIndexError(str(e))
return 204
return "", 204
@service_api_ns.expect(service_api_ns.models[ChildChunkUpdatePayload.__name__])
@service_api_ns.doc("update_child_chunk")

View File

@@ -1,27 +1,24 @@
import logging
import time
from collections.abc import Callable
from datetime import timedelta
from enum import StrEnum, auto
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar
from typing import Concatenate, ParamSpec, TypeVar, cast
from flask import current_app, request
from flask_login import user_logged_in
from flask_restx import Resource
from pydantic import BaseModel
from sqlalchemy import select, update
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, NotFound, Unauthorized
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from libs.login import current_user
from models import Account, Tenant, TenantAccountJoin, TenantStatus
from models.dataset import Dataset, RateLimitLog
from models.model import ApiToken, App
from services.api_token_service import ApiTokenCache, fetch_token_with_single_flight, record_token_usage
from services.end_user_service import EndUserService
from services.feature_service import FeatureService
@@ -296,7 +293,14 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
def validate_and_get_api_token(scope: str | None = None):
"""
Validate and get API token.
Validate and get API token with Redis caching.
This function uses a two-tier approach:
1. First checks Redis cache for the token
2. If not cached, queries database and caches the result
The last_used_at field is updated asynchronously via Celery task
to avoid blocking the request.
"""
auth_header = request.headers.get("Authorization")
if auth_header is None or " " not in auth_header:
@@ -308,29 +312,18 @@ def validate_and_get_api_token(scope: str | None = None):
if auth_scheme != "bearer":
raise Unauthorized("Authorization scheme must be 'Bearer'")
current_time = naive_utc_now()
cutoff_time = current_time - timedelta(minutes=1)
with Session(db.engine, expire_on_commit=False) as session:
update_stmt = (
update(ApiToken)
.where(
ApiToken.token == auth_token,
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < cutoff_time)),
ApiToken.type == scope,
)
.values(last_used_at=current_time)
)
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
result = session.execute(update_stmt)
api_token = session.scalar(stmt)
# Try to get token from cache first
# Returns a CachedApiToken (plain Python object), not a SQLAlchemy model
cached_token = ApiTokenCache.get(auth_token, scope)
if cached_token is not None:
logger.debug("Token validation served from cache for scope: %s", scope)
# Record usage in Redis for later batch update (no Celery task per request)
record_token_usage(auth_token, scope)
return cast(ApiToken, cached_token)
if hasattr(result, "rowcount") and result.rowcount > 0:
session.commit()
if not api_token:
raise Unauthorized("Access token is invalid")
return api_token
# Cache miss - use Redis lock for single-flight mode
# This ensures only one request queries DB for the same token concurrently
return fetch_token_with_single_flight(auth_token, scope)
class DatasetApiResource(Resource):

View File

@@ -47,6 +47,7 @@ class DifyNodeFactory(NodeFactory):
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
code_limits: CodeNodeLimits | None = None,
template_renderer: Jinja2TemplateRenderer | None = None,
template_transform_max_output_length: int | None = None,
http_request_http_client: HttpClientProtocol | None = None,
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
http_request_file_manager: FileManagerProtocol | None = None,
@@ -68,6 +69,9 @@ class DifyNodeFactory(NodeFactory):
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
self._template_transform_max_output_length = (
template_transform_max_output_length or dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
)
self._http_request_http_client = http_request_http_client or ssrf_proxy
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
self._http_request_file_manager = http_request_file_manager or file_manager
@@ -122,6 +126,7 @@ class DifyNodeFactory(NodeFactory):
graph_init_params=self.graph_init_params,
graph_runtime_state=self.graph_runtime_state,
template_renderer=self._template_renderer,
max_output_length=self._template_transform_max_output_length,
)
if node_type == NodeType.HTTP_REQUEST:

View File

@@ -6,7 +6,8 @@ from yarl import URL
from configs import dify_config
from core.helper.download import download_with_size_limit
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
from extensions.ext_redis import redis_client
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
logger = logging.getLogger(__name__)
@@ -43,28 +44,37 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
return data.get("data", {}).get("plugins", [])
def batch_fetch_plugin_manifests_ignore_deserialization_error(
plugin_ids: list[str],
) -> Sequence[MarketplacePluginDeclaration]:
if len(plugin_ids) == 0:
return []
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
response.raise_for_status()
result: list[MarketplacePluginDeclaration] = []
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration.model_validate(plugin))
except Exception:
logger.exception(
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
)
return result
def record_install_plugin_event(plugin_unique_identifier: str):
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
response.raise_for_status()
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
"""
Fetch all plugin manifests from marketplace and cache them in Redis.
This should be called once per check cycle to populate the instance-level cache.
Args:
cache_key_prefix: Redis key prefix for caching plugin manifests
cache_ttl: Cache TTL in seconds
Raises:
httpx.HTTPError: If the HTTP request fails
Exception: If any other error occurs during fetching or caching
"""
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
response.raise_for_status()
raw_json = response.json()
plugins_data = raw_json.get("plugins", [])
# Parse and cache all plugin snapshots
for plugin_data in plugins_data:
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
redis_client.setex(
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
time=cache_ttl,
value=plugin_snapshot.model_dump_json(),
)

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field, model_validator
from pydantic import BaseModel, Field, computed_field, model_validator
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.plugin.entities.endpoint import EndpointProviderDeclaration
@@ -48,3 +48,15 @@ class MarketplacePluginDeclaration(BaseModel):
if "tool" in data and not data["tool"]:
del data["tool"]
return data
class MarketplacePluginSnapshot(BaseModel):
org: str
name: str
latest_version: str
latest_package_identifier: str
latest_package_url: str
@computed_field
def plugin_id(self) -> str:
return f"{self.org}/{self.name}"

View File

@@ -112,7 +112,7 @@ class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable):
class RAGPipelineVariable(BaseModel):
belong_to_node_id: str = Field(description="belong to which node id, shared means public")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
label: str = Field(description="label")
description: str | None = Field(description="description", default="")
variable: str = Field(description="variable key", default="")

View File

@@ -1,7 +1,6 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any
from configs import dify_config
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes.base.node import Node
@@ -16,12 +15,13 @@ if TYPE_CHECKING:
from core.workflow.entities import GraphInitParams
from core.workflow.runtime import GraphRuntimeState
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
class TemplateTransformNode(Node[TemplateTransformNodeData]):
node_type = NodeType.TEMPLATE_TRANSFORM
_template_renderer: Jinja2TemplateRenderer
_max_output_length: int
def __init__(
self,
@@ -31,6 +31,7 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
graph_runtime_state: "GraphRuntimeState",
*,
template_renderer: Jinja2TemplateRenderer | None = None,
max_output_length: int | None = None,
) -> None:
super().__init__(
id=id,
@@ -40,6 +41,10 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
if max_output_length is not None and max_output_length <= 0:
raise ValueError("max_output_length must be a positive integer")
self._max_output_length = max_output_length or DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
@classmethod
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
"""
@@ -69,11 +74,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
except TemplateRenderError as e:
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
if len(rendered) > self._max_output_length:
return NodeRunResult(
inputs=variables,
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
error=f"Output length exceeds {self._max_output_length} characters",
)
return NodeRunResult(

View File

@@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then
if [[ -z "${CELERY_QUEUES}" ]]; then
if [[ "${EDITION}" == "CLOUD" ]]; then
# Cloud edition: separate queues for dataset and trigger tasks
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
else
# Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues
DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
DEFAULT_QUEUES="api_token,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
fi
else
DEFAULT_QUEUES="${CELERY_QUEUES}"

View File

@@ -184,6 +184,14 @@ def init_app(app: DifyApp) -> Celery:
"task": "schedule.trigger_provider_refresh_task.trigger_provider_refresh",
"schedule": timedelta(minutes=dify_config.TRIGGER_PROVIDER_REFRESH_INTERVAL),
}
if dify_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK:
imports.append("schedule.update_api_token_last_used_task")
beat_schedule["batch_update_api_token_last_used"] = {
"task": "schedule.update_api_token_last_used_task.batch_update_api_token_last_used",
"schedule": timedelta(minutes=dify_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL),
}
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
return celery_app

View File

@@ -10,6 +10,10 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7df29de0f6be'
down_revision = '03ea244985ce'
@@ -19,16 +23,31 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
else:
# For MySQL and other databases, UUID should be generated at application level
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)

View File

@@ -2166,7 +2166,9 @@ class TenantCreditPool(TypeBase):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)

View File

@@ -1,16 +1,24 @@
import logging
import math
import time
import click
import app
from core.helper.marketplace import fetch_global_plugin_manifest
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
logger = logging.getLogger(__name__)
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
MAX_CONCURRENT_CHECK_TASKS = 20
# Import cache constants from the task module
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
@app.celery.task(queue="plugin")
def check_upgradable_plugin_task():
@@ -40,6 +48,22 @@ def check_upgradable_plugin_task():
) # make sure all strategies are checked in this interval
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
if total_strategies == 0:
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
return
# Fetch and cache all plugin manifests before processing tenants
# This reduces load on marketplace from 300k requests to 1 request per check cycle
logger.info("fetching global plugin manifest from marketplace")
try:
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
logger.info("successfully fetched and cached global plugin manifest")
except Exception as e:
logger.exception("failed to fetch global plugin manifest")
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
return
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
for strategy in batch_strategies:

View File

@@ -0,0 +1,114 @@
"""
Scheduled task to batch-update API token last_used_at timestamps.
Instead of updating the database on every request, token usage is recorded
in Redis as lightweight SET keys (api_token_active:{scope}:{token}).
This task runs periodically (default every 30 minutes) to flush those
records into the database in a single batch operation.
"""
import logging
import time
from datetime import datetime
import click
from sqlalchemy import update
from sqlalchemy.orm import Session
import app
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.model import ApiToken
from services.api_token_service import ACTIVE_TOKEN_KEY_PREFIX
logger = logging.getLogger(__name__)
@app.celery.task(queue="api_token")
def batch_update_api_token_last_used():
"""
Batch update last_used_at for all recently active API tokens.
Scans Redis for api_token_active:* keys, parses the token and scope
from each key, and performs a batch database update.
"""
click.echo(click.style("batch_update_api_token_last_used: start.", fg="green"))
start_at = time.perf_counter()
updated_count = 0
scanned_count = 0
try:
# Collect all active token keys and their values (the actual usage timestamps)
token_entries: list[tuple[str, str | None, datetime]] = [] # (token, scope, usage_time)
keys_to_delete: list[str | bytes] = []
for key in redis_client.scan_iter(match=f"{ACTIVE_TOKEN_KEY_PREFIX}*", count=200):
if isinstance(key, bytes):
key = key.decode("utf-8")
scanned_count += 1
# Read the value (ISO timestamp recorded at actual request time)
value = redis_client.get(key)
if not value:
keys_to_delete.append(key)
continue
if isinstance(value, bytes):
value = value.decode("utf-8")
try:
usage_time = datetime.fromisoformat(value)
except (ValueError, TypeError):
logger.warning("Invalid timestamp in key %s: %s", key, value)
keys_to_delete.append(key)
continue
# Parse token info from key: api_token_active:{scope}:{token}
suffix = key[len(ACTIVE_TOKEN_KEY_PREFIX) :]
parts = suffix.split(":", 1)
if len(parts) == 2:
scope_str, token = parts
scope = None if scope_str == "None" else scope_str
token_entries.append((token, scope, usage_time))
keys_to_delete.append(key)
if not token_entries:
click.echo(click.style("batch_update_api_token_last_used: no active tokens found.", fg="yellow"))
# Still clean up any invalid keys
if keys_to_delete:
redis_client.delete(*keys_to_delete)
return
# Update each token in its own short transaction to avoid long transactions
for token, scope, usage_time in token_entries:
with Session(db.engine, expire_on_commit=False) as session, session.begin():
stmt = (
update(ApiToken)
.where(
ApiToken.token == token,
ApiToken.type == scope,
(ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < usage_time)),
)
.values(last_used_at=usage_time)
)
result = session.execute(stmt)
rowcount = getattr(result, "rowcount", 0)
if rowcount > 0:
updated_count += 1
# Delete processed keys from Redis
if keys_to_delete:
redis_client.delete(*keys_to_delete)
except Exception:
logger.exception("batch_update_api_token_last_used failed")
elapsed = time.perf_counter() - start_at
click.echo(
click.style(
f"batch_update_api_token_last_used: done. "
f"scanned={scanned_count}, updated={updated_count}, elapsed={elapsed:.2f}s",
fg="green",
)
)

View File

@@ -0,0 +1,330 @@
"""
API Token Service
Handles all API token caching, validation, and usage recording.
Includes Redis cache operations, database queries, and single-flight concurrency control.
"""
import logging
from datetime import datetime
from typing import Any
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import Unauthorized
from extensions.ext_database import db
from extensions.ext_redis import redis_client, redis_fallback
from libs.datetime_utils import naive_utc_now
from models.model import ApiToken
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------
# Pydantic DTO
# ---------------------------------------------------------------------
class CachedApiToken(BaseModel):
"""
Pydantic model for cached API token data.
This is NOT a SQLAlchemy model instance, but a plain Pydantic model
that mimics the ApiToken model interface for read-only access.
"""
id: str
app_id: str | None
tenant_id: str | None
type: str
token: str
last_used_at: datetime | None
created_at: datetime | None
def __repr__(self) -> str:
return f"<CachedApiToken id={self.id} type={self.type}>"
# ---------------------------------------------------------------------
# Cache configuration
# ---------------------------------------------------------------------
CACHE_KEY_PREFIX = "api_token"
CACHE_TTL_SECONDS = 600 # 10 minutes
CACHE_NULL_TTL_SECONDS = 60 # 1 minute for non-existent tokens
ACTIVE_TOKEN_KEY_PREFIX = "api_token_active:"
# ---------------------------------------------------------------------
# Cache class
# ---------------------------------------------------------------------
class ApiTokenCache:
"""
Redis cache wrapper for API tokens.
Handles serialization, deserialization, and cache invalidation.
"""
@staticmethod
def make_active_key(token: str, scope: str | None = None) -> str:
"""Generate Redis key for recording token usage."""
return f"{ACTIVE_TOKEN_KEY_PREFIX}{scope}:{token}"
@staticmethod
def _make_tenant_index_key(tenant_id: str) -> str:
"""Generate Redis key for tenant token index."""
return f"tenant_tokens:{tenant_id}"
@staticmethod
def _make_cache_key(token: str, scope: str | None = None) -> str:
"""Generate cache key for the given token and scope."""
scope_str = scope or "any"
return f"{CACHE_KEY_PREFIX}:{scope_str}:{token}"
@staticmethod
def _serialize_token(api_token: Any) -> bytes:
"""Serialize ApiToken object to JSON bytes."""
if isinstance(api_token, CachedApiToken):
return api_token.model_dump_json().encode("utf-8")
cached = CachedApiToken(
id=str(api_token.id),
app_id=str(api_token.app_id) if api_token.app_id else None,
tenant_id=str(api_token.tenant_id) if api_token.tenant_id else None,
type=api_token.type,
token=api_token.token,
last_used_at=api_token.last_used_at,
created_at=api_token.created_at,
)
return cached.model_dump_json().encode("utf-8")
@staticmethod
def _deserialize_token(cached_data: bytes | str) -> Any:
"""Deserialize JSON bytes/string back to a CachedApiToken Pydantic model."""
if cached_data in {b"null", "null"}:
return None
try:
if isinstance(cached_data, bytes):
cached_data = cached_data.decode("utf-8")
return CachedApiToken.model_validate_json(cached_data)
except (ValueError, Exception) as e:
logger.warning("Failed to deserialize token from cache: %s", e)
return None
@staticmethod
@redis_fallback(default_return=None)
def get(token: str, scope: str | None) -> Any | None:
"""Get API token from cache."""
cache_key = ApiTokenCache._make_cache_key(token, scope)
cached_data = redis_client.get(cache_key)
if cached_data is None:
logger.debug("Cache miss for token key: %s", cache_key)
return None
logger.debug("Cache hit for token key: %s", cache_key)
return ApiTokenCache._deserialize_token(cached_data)
@staticmethod
def _add_to_tenant_index(tenant_id: str | None, cache_key: str) -> None:
"""Add cache key to tenant index for efficient invalidation."""
if not tenant_id:
return
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
redis_client.sadd(index_key, cache_key)
redis_client.expire(index_key, CACHE_TTL_SECONDS + 60)
except Exception as e:
logger.warning("Failed to update tenant index: %s", e)
@staticmethod
def _remove_from_tenant_index(tenant_id: str | None, cache_key: str) -> None:
"""Remove cache key from tenant index."""
if not tenant_id:
return
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
redis_client.srem(index_key, cache_key)
except Exception as e:
logger.warning("Failed to remove from tenant index: %s", e)
@staticmethod
@redis_fallback(default_return=False)
def set(token: str, scope: str | None, api_token: Any | None, ttl: int = CACHE_TTL_SECONDS) -> bool:
"""Set API token in cache."""
cache_key = ApiTokenCache._make_cache_key(token, scope)
if api_token is None:
cached_value = b"null"
ttl = CACHE_NULL_TTL_SECONDS
else:
cached_value = ApiTokenCache._serialize_token(api_token)
try:
redis_client.setex(cache_key, ttl, cached_value)
if api_token is not None and hasattr(api_token, "tenant_id"):
ApiTokenCache._add_to_tenant_index(api_token.tenant_id, cache_key)
logger.debug("Cached token with key: %s, ttl: %ss", cache_key, ttl)
return True
except Exception as e:
logger.warning("Failed to cache token: %s", e)
return False
@staticmethod
@redis_fallback(default_return=False)
def delete(token: str, scope: str | None = None) -> bool:
"""Delete API token from cache."""
if scope is None:
pattern = f"{CACHE_KEY_PREFIX}:*:{token}"
try:
keys_to_delete = list(redis_client.scan_iter(match=pattern))
if keys_to_delete:
redis_client.delete(*keys_to_delete)
logger.info("Deleted %d cache entries for token", len(keys_to_delete))
return True
except Exception as e:
logger.warning("Failed to delete token cache with pattern: %s", e)
return False
else:
cache_key = ApiTokenCache._make_cache_key(token, scope)
try:
tenant_id = None
try:
cached_data = redis_client.get(cache_key)
if cached_data and cached_data != b"null":
cached_token = ApiTokenCache._deserialize_token(cached_data)
if cached_token:
tenant_id = cached_token.tenant_id
except Exception as e:
logger.debug("Failed to get tenant_id for cache cleanup: %s", e)
redis_client.delete(cache_key)
if tenant_id:
ApiTokenCache._remove_from_tenant_index(tenant_id, cache_key)
logger.info("Deleted cache for key: %s", cache_key)
return True
except Exception as e:
logger.warning("Failed to delete token cache: %s", e)
return False
@staticmethod
@redis_fallback(default_return=False)
def invalidate_by_tenant(tenant_id: str) -> bool:
"""Invalidate all API token caches for a specific tenant via tenant index."""
try:
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
cache_keys = redis_client.smembers(index_key)
if cache_keys:
deleted_count = 0
for cache_key in cache_keys:
if isinstance(cache_key, bytes):
cache_key = cache_key.decode("utf-8")
redis_client.delete(cache_key)
deleted_count += 1
redis_client.delete(index_key)
logger.info(
"Invalidated %d token cache entries for tenant: %s",
deleted_count,
tenant_id,
)
else:
logger.info(
"No tenant index found for %s, relying on TTL expiration",
tenant_id,
)
return True
except Exception as e:
logger.warning("Failed to invalidate tenant token cache: %s", e)
return False
# ---------------------------------------------------------------------
# Token usage recording (for batch update)
# ---------------------------------------------------------------------
def record_token_usage(auth_token: str, scope: str | None) -> None:
"""
Record token usage in Redis for later batch update by a scheduled job.
Instead of dispatching a Celery task per request, we simply SET a key in Redis.
A Celery Beat scheduled task will periodically scan these keys and batch-update
last_used_at in the database.
"""
try:
key = ApiTokenCache.make_active_key(auth_token, scope)
redis_client.set(key, naive_utc_now().isoformat(), ex=3600)
except Exception as e:
logger.warning("Failed to record token usage: %s", e)
# ---------------------------------------------------------------------
# Database query + single-flight
# ---------------------------------------------------------------------
def query_token_from_db(auth_token: str, scope: str | None) -> ApiToken:
"""
Query API token from database and cache the result.
Raises Unauthorized if token is invalid.
"""
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(ApiToken).where(ApiToken.token == auth_token, ApiToken.type == scope)
api_token = session.scalar(stmt)
if not api_token:
ApiTokenCache.set(auth_token, scope, None)
raise Unauthorized("Access token is invalid")
ApiTokenCache.set(auth_token, scope, api_token)
record_token_usage(auth_token, scope)
return api_token
def fetch_token_with_single_flight(auth_token: str, scope: str | None) -> ApiToken | Any:
"""
Fetch token from DB with single-flight pattern using Redis lock.
Ensures only one concurrent request queries the database for the same token.
Falls back to direct query if lock acquisition fails.
"""
logger.debug("Token cache miss, attempting to acquire query lock for scope: %s", scope)
lock_key = f"api_token_query_lock:{scope}:{auth_token}"
lock = redis_client.lock(lock_key, timeout=10, blocking_timeout=5)
try:
if lock.acquire(blocking=True):
try:
cached_token = ApiTokenCache.get(auth_token, scope)
if cached_token is not None:
logger.debug("Token cached by concurrent request, using cached version")
return cached_token
return query_token_from_db(auth_token, scope)
finally:
lock.release()
else:
logger.warning("Lock timeout for token: %s, proceeding with direct query", auth_token[:10])
return query_token_from_db(auth_token, scope)
except Unauthorized:
raise
except Exception as e:
logger.warning("Redis lock failed for token query: %s, proceeding anyway", e)
return query_token_from_db(auth_token, scope)

View File

@@ -14,6 +14,9 @@ from models.model import UploadFile
logger = logging.getLogger(__name__)
# Batch size for database operations to keep transactions short
BATCH_SIZE = 1000
@shared_task(queue="dataset")
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
@@ -31,63 +34,179 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
if not doc_form:
raise ValueError("doc_form is required")
with session_factory.create_session() as session:
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
).delete(synchronize_session=False)
storage_keys_to_delete: list[str] = []
index_node_ids: list[str] = []
segment_ids: list[str] = []
total_image_upload_file_ids: list[str] = []
try:
# ============ Step 1: Query segment and file data (short read-only transaction) ============
with session_factory.create_session() as session:
# Get segments info
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
).all()
# check segment is exist
if segments:
index_node_ids = [segment.index_node_id for segment in segments]
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
segment_ids = [segment.id for segment in segments]
# Collect image file IDs from segment content
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
for image_file in image_files:
try:
if image_file and image_file.key:
storage.delete(image_file.key)
except Exception:
logger.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: %s",
image_file.id,
)
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
session.execute(stmt)
session.delete(segment)
total_image_upload_file_ids.extend(image_upload_file_ids)
# Query storage keys for image files
if total_image_upload_file_ids:
image_files = session.scalars(
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
).all()
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
# Query storage keys for document files
if file_ids:
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
for file in files:
try:
storage.delete(file.key)
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
session.commit()
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
fg="green",
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
if index_node_ids:
try:
# Fetch dataset in a fresh session to avoid DetachedInstanceError
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
else:
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
except Exception:
logger.exception(
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
dataset_id,
document_ids,
len(index_node_ids),
)
)
# ============ Step 3: Delete metadata binding (separate short transaction) ============
try:
with session_factory.create_session() as session:
deleted_count = (
session.query(DatasetMetadataBinding)
.where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
)
.delete(synchronize_session=False)
)
session.commit()
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
except Exception:
logger.exception("Cleaned documents when documents deleted failed")
logger.exception(
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
if total_image_upload_file_ids:
failed_batches = 0
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
session.execute(stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
i,
i + len(batch),
dataset_id,
)
if failed_batches > 0:
logger.warning(
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
failed_batches,
total_batches,
dataset_id,
)
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
if segment_ids:
failed_batches = 0
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(segment_ids), BATCH_SIZE):
batch = segment_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
session.execute(segment_delete_stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
i,
i + len(batch),
dataset_id,
document_ids,
)
if failed_batches > 0:
logger.warning(
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
failed_batches,
total_batches,
document_ids,
)
# ============ Step 6: Delete document-associated files (separate short transaction) ============
if file_ids:
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
session.commit()
except Exception:
logger.exception(
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
dataset_id,
file_ids,
)
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
storage_delete_failures = 0
for storage_key in storage_keys_to_delete:
try:
storage.delete(storage_key)
except Exception:
storage_delete_failures += 1
logger.exception("Failed to delete file from storage, key: %s", storage_key)
if storage_delete_failures > 0:
logger.warning(
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
storage_delete_failures,
len(storage_keys_to_delete),
dataset_id,
)
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
f"storage_files: {len(storage_keys_to_delete)}",
fg="green",
)
)
except Exception:
logger.exception(
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)

View File

@@ -3,6 +3,7 @@ import time
import click
from celery import shared_task
from sqlalchemy import delete
from core.db.session_factory import session_factory
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
@@ -67,8 +68,14 @@ def delete_segment_from_index_task(
if segment_attachment_bindings:
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
for binding in segment_attachment_bindings:
session.delete(binding)
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
for i in range(0, len(segment_attachment_bind_ids), 1000):
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
)
session.execute(segment_attachment_bind_delete_stmt)
# delete upload file
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
session.commit()

View File

@@ -28,7 +28,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session:
with session_factory.create_session() as session, session.begin():
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@@ -68,7 +68,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
document.indexing_status = "error"
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
document.stopped_at = naive_utc_now()
session.commit()
return
loader = NotionExtractor(
@@ -85,7 +84,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:

View File

@@ -8,7 +8,6 @@ from sqlalchemy import delete, select
from core.db.session_factory import session_factory
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
@@ -27,7 +26,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session:
with session_factory.create_session() as session, session.begin():
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@@ -36,7 +35,6 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:
@@ -56,7 +54,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
db.session.commit()
end_at = time.perf_counter()
logger.info(
click.style(

View File

@@ -6,8 +6,8 @@ import typing
import click
from celery import shared_task
from core.helper import marketplace
from core.helper.marketplace import MarketplacePluginDeclaration
from core.helper.marketplace import record_install_plugin_event
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
@@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
logger = logging.getLogger(__name__)
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
CACHE_REDIS_TTL = 60 * 60 # 1 hour
@@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
"""
Get cached plugin manifest from Redis.
Returns:
- MarketplacePluginDeclaration: if found in cache
- MarketplacePluginSnapshot: if found in cache
- None: if cached as not found (marketplace returned no result)
- False: if not in cache at all
"""
@@ -43,76 +43,31 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclar
if cached_json is None:
return None
return MarketplacePluginDeclaration.model_validate(cached_json)
return MarketplacePluginSnapshot.model_validate(cached_json)
except Exception:
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
return False
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
"""
Cache plugin manifest in Redis.
Args:
plugin_id: The plugin ID
manifest: The manifest to cache, or None if not found in marketplace
"""
try:
key = _get_redis_cache_key(plugin_id)
if manifest is None:
# Cache the fact that this plugin was not found
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
else:
# Cache the manifest data
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
except Exception:
# If Redis fails, continue without caching
# traceback.print_exc()
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
def marketplace_batch_fetch_plugin_manifests(
plugin_ids_plain_list: list[str],
) -> list[MarketplacePluginDeclaration]:
"""Fetch plugin manifests with Redis caching support."""
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
not_cached_plugin_ids: list[str] = []
) -> list[MarketplacePluginSnapshot]:
"""
Fetch plugin manifests from Redis cache only.
This function assumes fetch_global_plugin_manifest() has been called
to pre-populate the cache with all marketplace plugins.
"""
result: list[MarketplacePluginSnapshot] = []
# Check Redis cache for each plugin
for plugin_id in plugin_ids_plain_list:
cached_result = _get_cached_manifest(plugin_id)
if cached_result is False:
# Not in cache, need to fetch
not_cached_plugin_ids.append(plugin_id)
else:
# Either found manifest or cached as None (not found in marketplace)
# At this point, cached_result is either MarketplacePluginDeclaration or None
if isinstance(cached_result, bool):
# This should never happen due to the if condition above, but for type safety
continue
cached_manifests[plugin_id] = cached_result
if not isinstance(cached_result, MarketplacePluginSnapshot):
# cached_result is False (not in cache) or None (cached as not found)
logger.warning("plugin %s not found in cache, skipping", plugin_id)
continue
# Fetch uncached plugins from marketplace
if not_cached_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
# Cache the fetched manifests
for manifest in manifests:
cached_manifests[manifest.plugin_id] = manifest
_set_cached_manifest(manifest.plugin_id, manifest)
# Cache plugins that were not found in marketplace
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
for plugin_id in not_cached_plugin_ids:
if plugin_id not in fetched_plugin_ids:
cached_manifests[plugin_id] = None
_set_cached_manifest(plugin_id, None)
# Build result list from cached manifests
result: list[MarketplacePluginDeclaration] = []
for plugin_id in plugin_ids_plain_list:
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
if cached_manifest is not None:
result.append(cached_manifest)
result.append(cached_result)
return result
@@ -211,7 +166,7 @@ def process_tenant_plugin_autoupgrade_check_task(
# execute upgrade
new_unique_identifier = manifest.latest_package_identifier
marketplace.record_install_plugin_event(new_unique_identifier)
record_install_plugin_event(new_unique_identifier)
click.echo(
click.style(
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",

View File

@@ -48,6 +48,7 @@ from models.workflow import (
WorkflowArchiveLog,
)
from repositories.factory import DifyAPIRepositoryFactory
from services.api_token_service import ApiTokenCache
logger = logging.getLogger(__name__)
@@ -134,6 +135,12 @@ def _delete_app_mcp_servers(tenant_id: str, app_id: str):
def _delete_app_api_tokens(tenant_id: str, app_id: str):
def del_api_token(session, api_token_id: str):
# Fetch token details for cache invalidation
token_obj = session.query(ApiToken).where(ApiToken.id == api_token_id).first()
if token_obj:
# Invalidate cache before deletion
ApiTokenCache.delete(token_obj.token, token_obj.type)
session.query(ApiToken).where(ApiToken.id == api_token_id).delete(synchronize_session=False)
_delete_records(

View File

@@ -0,0 +1,375 @@
"""
Integration tests for API Token Cache with Redis.
These tests require:
- Redis server running
- Test database configured
"""
import time
from datetime import datetime, timedelta
from unittest.mock import patch
import pytest
from extensions.ext_redis import redis_client
from models.model import ApiToken
from services.api_token_service import ApiTokenCache, CachedApiToken
class TestApiTokenCacheRedisIntegration:
"""Integration tests with real Redis."""
def setup_method(self):
"""Setup test fixtures and clean Redis."""
self.test_token = "test-integration-token-123"
self.test_scope = "app"
self.cache_key = f"api_token:{self.test_scope}:{self.test_token}"
# Clean up any existing test data
self._cleanup()
def teardown_method(self):
"""Cleanup test data from Redis."""
self._cleanup()
def _cleanup(self):
"""Remove test data from Redis."""
try:
redis_client.delete(self.cache_key)
redis_client.delete(ApiTokenCache._make_tenant_index_key("test-tenant-id"))
redis_client.delete(ApiTokenCache.make_active_key(self.test_token, self.test_scope))
except Exception:
pass # Ignore cleanup errors
def test_cache_set_and_get_with_real_redis(self):
"""Test cache set and get operations with real Redis."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id-123"
mock_token.app_id = "test-app-456"
mock_token.tenant_id = "test-tenant-789"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = datetime.now()
mock_token.created_at = datetime.now() - timedelta(days=30)
# Set in cache
result = ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
assert result is True
# Verify in Redis
cached_data = redis_client.get(self.cache_key)
assert cached_data is not None
# Get from cache
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
assert cached_token is not None
assert isinstance(cached_token, CachedApiToken)
assert cached_token.id == "test-id-123"
assert cached_token.app_id == "test-app-456"
assert cached_token.tenant_id == "test-tenant-789"
assert cached_token.type == "app"
assert cached_token.token == self.test_token
def test_cache_ttl_with_real_redis(self):
"""Test cache TTL is set correctly."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
ttl = redis_client.ttl(self.cache_key)
assert 595 <= ttl <= 600 # Should be around 600 seconds (10 minutes)
def test_cache_null_value_for_invalid_token(self):
"""Test caching null value for invalid tokens."""
result = ApiTokenCache.set(self.test_token, self.test_scope, None)
assert result is True
cached_data = redis_client.get(self.cache_key)
assert cached_data == b"null"
cached_token = ApiTokenCache.get(self.test_token, self.test_scope)
assert cached_token is None
ttl = redis_client.ttl(self.cache_key)
assert 55 <= ttl <= 60
def test_cache_delete_with_real_redis(self):
"""Test cache deletion with real Redis."""
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
assert redis_client.exists(self.cache_key) == 1
result = ApiTokenCache.delete(self.test_token, self.test_scope)
assert result is True
assert redis_client.exists(self.cache_key) == 0
def test_tenant_index_creation(self):
"""Test tenant index is created when caching token."""
from unittest.mock import MagicMock
tenant_id = "test-tenant-id"
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = tenant_id
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
index_key = ApiTokenCache._make_tenant_index_key(tenant_id)
assert redis_client.exists(index_key) == 1
members = redis_client.smembers(index_key)
cache_keys = [m.decode("utf-8") if isinstance(m, bytes) else m for m in members]
assert self.cache_key in cache_keys
def test_invalidate_by_tenant_via_index(self):
"""Test tenant-wide cache invalidation using index (fast path)."""
from unittest.mock import MagicMock
tenant_id = "test-tenant-id"
for i in range(3):
token_value = f"test-token-{i}"
mock_token = MagicMock()
mock_token.id = f"test-id-{i}"
mock_token.app_id = "test-app"
mock_token.tenant_id = tenant_id
mock_token.type = "app"
mock_token.token = token_value
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(token_value, "app", mock_token)
for i in range(3):
key = f"api_token:app:test-token-{i}"
assert redis_client.exists(key) == 1
result = ApiTokenCache.invalidate_by_tenant(tenant_id)
assert result is True
for i in range(3):
key = f"api_token:app:test-token-{i}"
assert redis_client.exists(key) == 0
assert redis_client.exists(ApiTokenCache._make_tenant_index_key(tenant_id)) == 0
def test_concurrent_cache_access(self):
"""Test concurrent cache access doesn't cause issues."""
import concurrent.futures
from unittest.mock import MagicMock
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = "app"
mock_token.token = self.test_token
mock_token.last_used_at = None
mock_token.created_at = datetime.now()
ApiTokenCache.set(self.test_token, self.test_scope, mock_token)
def get_from_cache():
return ApiTokenCache.get(self.test_token, self.test_scope)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = [executor.submit(get_from_cache) for _ in range(50)]
results = [f.result() for f in concurrent.futures.as_completed(futures)]
assert len(results) == 50
assert all(r is not None for r in results)
assert all(isinstance(r, CachedApiToken) for r in results)
class TestTokenUsageRecording:
"""Tests for recording token usage in Redis (batch update approach)."""
def setup_method(self):
self.test_token = "test-usage-token"
self.test_scope = "app"
self.active_key = ApiTokenCache.make_active_key(self.test_token, self.test_scope)
def teardown_method(self):
try:
redis_client.delete(self.active_key)
except Exception:
pass
def test_record_token_usage_sets_redis_key(self):
"""Test that record_token_usage writes an active key to Redis."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
# Key should exist
assert redis_client.exists(self.active_key) == 1
# Value should be an ISO timestamp
value = redis_client.get(self.active_key)
if isinstance(value, bytes):
value = value.decode("utf-8")
datetime.fromisoformat(value) # Should not raise
def test_record_token_usage_has_ttl(self):
"""Test that active keys have a TTL as safety net."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
ttl = redis_client.ttl(self.active_key)
assert 3595 <= ttl <= 3600 # ~1 hour
def test_record_token_usage_overwrites(self):
"""Test that repeated calls overwrite the same key (no accumulation)."""
from services.api_token_service import record_token_usage
record_token_usage(self.test_token, self.test_scope)
first_value = redis_client.get(self.active_key)
time.sleep(0.01) # Tiny delay so timestamp differs
record_token_usage(self.test_token, self.test_scope)
second_value = redis_client.get(self.active_key)
# Key count should still be 1 (overwritten, not accumulated)
assert redis_client.exists(self.active_key) == 1
class TestEndToEndCacheFlow:
"""End-to-end integration test for complete cache flow."""
@pytest.mark.usefixtures("db_session")
def test_complete_flow_cache_miss_then_hit(self, db_session):
"""
Test complete flow:
1. First request (cache miss) -> query DB -> cache result
2. Second request (cache hit) -> return from cache
3. Verify Redis state
"""
test_token_value = "test-e2e-token"
test_scope = "app"
test_token = ApiToken()
test_token.id = "test-e2e-id"
test_token.token = test_token_value
test_token.type = test_scope
test_token.app_id = "test-app"
test_token.tenant_id = "test-tenant"
test_token.last_used_at = None
test_token.created_at = datetime.now()
db_session.add(test_token)
db_session.commit()
try:
# Step 1: Cache miss - set token in cache
ApiTokenCache.set(test_token_value, test_scope, test_token)
cache_key = f"api_token:{test_scope}:{test_token_value}"
assert redis_client.exists(cache_key) == 1
# Step 2: Cache hit - get from cache
cached_token = ApiTokenCache.get(test_token_value, test_scope)
assert cached_token is not None
assert cached_token.id == test_token.id
assert cached_token.token == test_token_value
# Step 3: Verify tenant index
index_key = ApiTokenCache._make_tenant_index_key(test_token.tenant_id)
assert redis_client.exists(index_key) == 1
assert cache_key.encode() in redis_client.smembers(index_key)
# Step 4: Delete and verify cleanup
ApiTokenCache.delete(test_token_value, test_scope)
assert redis_client.exists(cache_key) == 0
assert cache_key.encode() not in redis_client.smembers(index_key)
finally:
db_session.delete(test_token)
db_session.commit()
redis_client.delete(f"api_token:{test_scope}:{test_token_value}")
redis_client.delete(ApiTokenCache._make_tenant_index_key(test_token.tenant_id))
def test_high_concurrency_simulation(self):
"""Simulate high concurrency access to cache."""
import concurrent.futures
from unittest.mock import MagicMock
test_token_value = "test-concurrent-token"
test_scope = "app"
mock_token = MagicMock()
mock_token.id = "concurrent-id"
mock_token.app_id = "test-app"
mock_token.tenant_id = "test-tenant"
mock_token.type = test_scope
mock_token.token = test_token_value
mock_token.last_used_at = datetime.now()
mock_token.created_at = datetime.now()
ApiTokenCache.set(test_token_value, test_scope, mock_token)
try:
def read_cache():
return ApiTokenCache.get(test_token_value, test_scope)
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
futures = [executor.submit(read_cache) for _ in range(100)]
results = [f.result() for f in concurrent.futures.as_completed(futures)]
elapsed = time.time() - start_time
assert len(results) == 100
assert all(r is not None for r in results)
assert elapsed < 1.0, f"Too slow: {elapsed}s for 100 cache reads"
finally:
ApiTokenCache.delete(test_token_value, test_scope)
redis_client.delete(ApiTokenCache._make_tenant_index_key(mock_token.tenant_id))
class TestRedisFailover:
"""Test behavior when Redis is unavailable."""
@patch("services.api_token_service.redis_client")
def test_graceful_degradation_when_redis_fails(self, mock_redis):
"""Test system degrades gracefully when Redis is unavailable."""
from redis import RedisError
mock_redis.get.side_effect = RedisError("Connection failed")
mock_redis.setex.side_effect = RedisError("Connection failed")
result_get = ApiTokenCache.get("test-token", "app")
assert result_get is None
result_set = ApiTokenCache.set("test-token", "app", None)
assert result_set is False

View File

@@ -0,0 +1,182 @@
from unittest.mock import MagicMock, patch
import pytest
from faker import Faker
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, Document, DocumentSegment
from tasks.document_indexing_update_task import document_indexing_update_task
class TestDocumentIndexingUpdateTask:
@pytest.fixture
def mock_external_dependencies(self):
"""Patch external collaborators used by the update task.
- IndexProcessorFactory.init_index_processor().clean(...)
- IndexingRunner.run([...])
"""
with (
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
):
processor_instance = MagicMock()
mock_factory.return_value.init_index_processor.return_value = processor_instance
runner_instance = MagicMock()
mock_runner.return_value = runner_instance
yield {
"factory": mock_factory,
"processor": processor_instance,
"runner": mock_runner,
"runner_instance": runner_instance,
}
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
fake = Faker()
# Account and tenant
account = Account(
email=fake.email(),
name=fake.name(),
interface_language="en-US",
status="active",
)
db_session_with_containers.add(account)
db_session_with_containers.commit()
tenant = Tenant(name=fake.company(), status="normal")
db_session_with_containers.add(tenant)
db_session_with_containers.commit()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
current=True,
)
db_session_with_containers.add(join)
db_session_with_containers.commit()
# Dataset and document
dataset = Dataset(
tenant_id=tenant.id,
name=fake.company(),
description=fake.text(max_nb_chars=64),
data_source_type="upload_file",
indexing_technique="high_quality",
created_by=account.id,
)
db_session_with_containers.add(dataset)
db_session_with_containers.commit()
document = Document(
tenant_id=tenant.id,
dataset_id=dataset.id,
position=0,
data_source_type="upload_file",
batch="test_batch",
name=fake.file_name(),
created_from="upload_file",
created_by=account.id,
indexing_status="waiting",
enabled=True,
doc_form="text_model",
)
db_session_with_containers.add(document)
db_session_with_containers.commit()
# Segments
node_ids = []
for i in range(segment_count):
node_id = f"node-{i + 1}"
seg = DocumentSegment(
tenant_id=tenant.id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=32),
answer=None,
word_count=10,
tokens=5,
index_node_id=node_id,
status="completed",
created_by=account.id,
)
db_session_with_containers.add(seg)
node_ids.append(node_id)
db_session_with_containers.commit()
# Refresh to ensure ORM state
db_session_with_containers.refresh(dataset)
db_session_with_containers.refresh(document)
return dataset, document, node_ids
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Act
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Assert document status updated before reindex
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
assert updated.indexing_status == "parsing"
assert updated.processing_started_at is not None
# Segments should be deleted
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining == 0
# Assert index processor clean was called with expected args
clean_call = mock_external_dependencies["processor"].clean.call_args
assert clean_call is not None
args, kwargs = clean_call
# args[0] is a Dataset instance (from another session) — validate by id
assert getattr(args[0], "id", None) == dataset.id
# args[1] should contain our node_ids
assert set(args[1]) == set(node_ids)
assert kwargs.get("with_keywords") is True
assert kwargs.get("delete_child_chunks") is True
# Assert indexing runner invoked with the updated document
run_call = mock_external_dependencies["runner_instance"].run.call_args
assert run_call is not None
run_docs = run_call[0][0]
assert len(run_docs) == 1
first = run_docs[0]
assert getattr(first, "id", None) == document.id
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Force clean to raise; task should continue to indexing
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Indexing should still be triggered
mock_external_dependencies["runner_instance"].run.assert_called_once()
# Segments should remain (since clean failed before DB delete)
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining > 0
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
fake = Faker()
# Act with non-existent document id
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
# Neither processor nor runner should be called
mock_external_dependencies["processor"].clean.assert_not_called()
mock_external_dependencies["runner_instance"].run.assert_not_called()

View File

@@ -217,7 +217,6 @@ class TestTemplateTransformNode:
@patch(
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
)
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
def test_run_output_length_exceeds_limit(
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
):
@@ -231,6 +230,7 @@ class TestTemplateTransformNode:
graph_init_params=graph_init_params,
graph=mock_graph,
graph_runtime_state=mock_graph_runtime_state,
max_output_length=10,
)
result = node._run()

View File

@@ -132,6 +132,8 @@ class TestCelerySSLConfiguration:
mock_config.WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK = 0
mock_config.ENABLE_TRIGGER_PROVIDER_REFRESH_TASK = False
mock_config.TRIGGER_PROVIDER_REFRESH_INTERVAL = 15
mock_config.ENABLE_API_TOKEN_LAST_USED_UPDATE_TASK = False
mock_config.API_TOKEN_LAST_USED_UPDATE_INTERVAL = 30
with patch("extensions.ext_celery.dify_config", mock_config):
from dify_app import DifyApp

View File

@@ -0,0 +1,250 @@
"""
Unit tests for API Token Cache module.
"""
import json
from datetime import datetime
from unittest.mock import MagicMock, patch
from services.api_token_service import (
CACHE_KEY_PREFIX,
CACHE_NULL_TTL_SECONDS,
CACHE_TTL_SECONDS,
ApiTokenCache,
CachedApiToken,
)
class TestApiTokenCache:
"""Test cases for ApiTokenCache class."""
def setup_method(self):
"""Setup test fixtures."""
self.mock_token = MagicMock()
self.mock_token.id = "test-token-id-123"
self.mock_token.app_id = "test-app-id-456"
self.mock_token.tenant_id = "test-tenant-id-789"
self.mock_token.type = "app"
self.mock_token.token = "test-token-value-abc"
self.mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
self.mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
def test_make_cache_key(self):
"""Test cache key generation."""
# Test with scope
key = ApiTokenCache._make_cache_key("my-token", "app")
assert key == f"{CACHE_KEY_PREFIX}:app:my-token"
# Test without scope
key = ApiTokenCache._make_cache_key("my-token", None)
assert key == f"{CACHE_KEY_PREFIX}:any:my-token"
def test_serialize_token(self):
"""Test token serialization."""
serialized = ApiTokenCache._serialize_token(self.mock_token)
data = json.loads(serialized)
assert data["id"] == "test-token-id-123"
assert data["app_id"] == "test-app-id-456"
assert data["tenant_id"] == "test-tenant-id-789"
assert data["type"] == "app"
assert data["token"] == "test-token-value-abc"
assert data["last_used_at"] == "2026-02-03T10:00:00"
assert data["created_at"] == "2026-01-01T00:00:00"
def test_serialize_token_with_nulls(self):
"""Test token serialization with None values."""
mock_token = MagicMock()
mock_token.id = "test-id"
mock_token.app_id = None
mock_token.tenant_id = None
mock_token.type = "dataset"
mock_token.token = "test-token"
mock_token.last_used_at = None
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
serialized = ApiTokenCache._serialize_token(mock_token)
data = json.loads(serialized)
assert data["app_id"] is None
assert data["tenant_id"] is None
assert data["last_used_at"] is None
def test_deserialize_token(self):
"""Test token deserialization."""
cached_data = json.dumps(
{
"id": "test-id",
"app_id": "test-app",
"tenant_id": "test-tenant",
"type": "app",
"token": "test-token",
"last_used_at": "2026-02-03T10:00:00",
"created_at": "2026-01-01T00:00:00",
}
)
result = ApiTokenCache._deserialize_token(cached_data)
assert isinstance(result, CachedApiToken)
assert result.id == "test-id"
assert result.app_id == "test-app"
assert result.tenant_id == "test-tenant"
assert result.type == "app"
assert result.token == "test-token"
assert result.last_used_at == datetime(2026, 2, 3, 10, 0, 0)
assert result.created_at == datetime(2026, 1, 1, 0, 0, 0)
def test_deserialize_null_token(self):
"""Test deserialization of null token (cached miss)."""
result = ApiTokenCache._deserialize_token("null")
assert result is None
def test_deserialize_invalid_json(self):
"""Test deserialization with invalid JSON."""
result = ApiTokenCache._deserialize_token("invalid-json{")
assert result is None
@patch("services.api_token_service.redis_client")
def test_get_cache_hit(self, mock_redis):
"""Test cache hit scenario."""
cached_data = json.dumps(
{
"id": "test-id",
"app_id": "test-app",
"tenant_id": "test-tenant",
"type": "app",
"token": "test-token",
"last_used_at": "2026-02-03T10:00:00",
"created_at": "2026-01-01T00:00:00",
}
).encode("utf-8")
mock_redis.get.return_value = cached_data
result = ApiTokenCache.get("test-token", "app")
assert result is not None
assert isinstance(result, CachedApiToken)
assert result.app_id == "test-app"
mock_redis.get.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
@patch("services.api_token_service.redis_client")
def test_get_cache_miss(self, mock_redis):
"""Test cache miss scenario."""
mock_redis.get.return_value = None
result = ApiTokenCache.get("test-token", "app")
assert result is None
mock_redis.get.assert_called_once()
@patch("services.api_token_service.redis_client")
def test_set_valid_token(self, mock_redis):
"""Test setting a valid token in cache."""
result = ApiTokenCache.set("test-token", "app", self.mock_token)
assert result is True
mock_redis.setex.assert_called_once()
args = mock_redis.setex.call_args[0]
assert args[0] == f"{CACHE_KEY_PREFIX}:app:test-token"
assert args[1] == CACHE_TTL_SECONDS
@patch("services.api_token_service.redis_client")
def test_set_null_token(self, mock_redis):
"""Test setting a null token (cache penetration prevention)."""
result = ApiTokenCache.set("invalid-token", "app", None)
assert result is True
mock_redis.setex.assert_called_once()
args = mock_redis.setex.call_args[0]
assert args[0] == f"{CACHE_KEY_PREFIX}:app:invalid-token"
assert args[1] == CACHE_NULL_TTL_SECONDS
assert args[2] == b"null"
@patch("services.api_token_service.redis_client")
def test_delete_with_scope(self, mock_redis):
"""Test deleting token cache with specific scope."""
result = ApiTokenCache.delete("test-token", "app")
assert result is True
mock_redis.delete.assert_called_once_with(f"{CACHE_KEY_PREFIX}:app:test-token")
@patch("services.api_token_service.redis_client")
def test_delete_without_scope(self, mock_redis):
"""Test deleting token cache without scope (delete all)."""
# Mock scan_iter to return an iterator of keys
mock_redis.scan_iter.return_value = iter(
[
b"api_token:app:test-token",
b"api_token:dataset:test-token",
]
)
result = ApiTokenCache.delete("test-token", None)
assert result is True
# Verify scan_iter was called with the correct pattern
mock_redis.scan_iter.assert_called_once()
call_args = mock_redis.scan_iter.call_args
assert call_args[1]["match"] == f"{CACHE_KEY_PREFIX}:*:test-token"
# Verify delete was called with all matched keys
mock_redis.delete.assert_called_once_with(
b"api_token:app:test-token",
b"api_token:dataset:test-token",
)
@patch("services.api_token_service.redis_client")
def test_redis_fallback_on_exception(self, mock_redis):
"""Test Redis fallback when Redis is unavailable."""
from redis import RedisError
mock_redis.get.side_effect = RedisError("Connection failed")
result = ApiTokenCache.get("test-token", "app")
# Should return None (fallback) instead of raising exception
assert result is None
class TestApiTokenCacheIntegration:
"""Integration test scenarios."""
@patch("services.api_token_service.redis_client")
def test_full_cache_lifecycle(self, mock_redis):
"""Test complete cache lifecycle: set -> get -> delete."""
# Setup mock token
mock_token = MagicMock()
mock_token.id = "id-123"
mock_token.app_id = "app-456"
mock_token.tenant_id = "tenant-789"
mock_token.type = "app"
mock_token.token = "token-abc"
mock_token.last_used_at = datetime(2026, 2, 3, 10, 0, 0)
mock_token.created_at = datetime(2026, 1, 1, 0, 0, 0)
# 1. Set token in cache
ApiTokenCache.set("token-abc", "app", mock_token)
assert mock_redis.setex.called
# 2. Simulate cache hit
cached_data = ApiTokenCache._serialize_token(mock_token)
mock_redis.get.return_value = cached_data # bytes from model_dump_json().encode()
retrieved = ApiTokenCache.get("token-abc", "app")
assert retrieved is not None
assert isinstance(retrieved, CachedApiToken)
# 3. Delete from cache
ApiTokenCache.delete("token-abc", "app")
assert mock_redis.delete.called
@patch("services.api_token_service.redis_client")
def test_cache_penetration_prevention(self, mock_redis):
"""Test that non-existent tokens are cached as null."""
# Set null token (cache miss)
ApiTokenCache.set("non-existent-token", "app", None)
args = mock_redis.setex.call_args[0]
assert args[2] == b"null"
assert args[1] == CACHE_NULL_TTL_SECONDS # Shorter TTL for null values

View File

@@ -114,6 +114,21 @@ def mock_db_session():
session = MagicMock()
# Ensure tests can observe session.close() via context manager teardown
session.close = MagicMock()
session.commit = MagicMock()
# Mock session.begin() context manager to auto-commit on exit
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def _begin_exit_side_effect(*args, **kwargs):
# session.begin().__exit__() should commit if no exception
if args[0] is None: # No exception
session.commit()
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session

View File

@@ -35,7 +35,6 @@ export const baseProviderContextValue: ProviderContextState = {
refreshLicenseLimit: noop,
isAllowTransferWorkspace: false,
isAllowPublishAsCustomKnowledgePipelineTemplate: false,
humanInputEmailDeliveryEnabled: false,
}
export const createMockProviderContextValue = (overrides: Partial<ProviderContextState> = {}): ProviderContextState => {

View File

@@ -8,6 +8,7 @@ describe('SVG Attribute Error Reproduction', () => {
// Capture console errors
const originalError = console.error
let errorMessages: string[] = []
beforeEach(() => {
errorMessages = []
console.error = vi.fn((message) => {

View File

@@ -1,289 +0,0 @@
'use client'
import type { ButtonProps } from '@/app/components/base/button'
import type { FormInputItem, UserAction } from '@/app/components/workflow/nodes/human-input/types'
import type { SiteInfo } from '@/models/share'
import type { HumanInputFormError } from '@/service/use-share'
import {
RiCheckboxCircleFill,
RiErrorWarningFill,
RiInformation2Fill,
} from '@remixicon/react'
import { produce } from 'immer'
import { useParams } from 'next/navigation'
import * as React from 'react'
import { useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import AppIcon from '@/app/components/base/app-icon'
import Button from '@/app/components/base/button'
import ContentItem from '@/app/components/base/chat/chat/answer/human-input-content/content-item'
import ExpirationTime from '@/app/components/base/chat/chat/answer/human-input-content/expiration-time'
import { getButtonStyle } from '@/app/components/base/chat/chat/answer/human-input-content/utils'
import Loading from '@/app/components/base/loading'
import DifyLogo from '@/app/components/base/logo/dify-logo'
import useDocumentTitle from '@/hooks/use-document-title'
import { useGetHumanInputForm, useSubmitHumanInputForm } from '@/service/use-share'
import { cn } from '@/utils/classnames'
export type FormData = {
site: { site: SiteInfo }
form_content: string
inputs: FormInputItem[]
resolved_default_values: Record<string, string>
user_actions: UserAction[]
expiration_time: number
}
const FormContent = () => {
const { t } = useTranslation()
const { token } = useParams<{ token: string }>()
useDocumentTitle('')
const [inputs, setInputs] = useState<Record<string, string>>({})
const [success, setSuccess] = useState(false)
const { mutate: submitForm, isPending: isSubmitting } = useSubmitHumanInputForm()
const { data: formData, isLoading, error } = useGetHumanInputForm(token)
const expired = (error as HumanInputFormError | null)?.code === 'human_input_form_expired'
const submitted = (error as HumanInputFormError | null)?.code === 'human_input_form_submitted'
const rateLimitExceeded = (error as HumanInputFormError | null)?.code === 'web_form_rate_limit_exceeded'
const splitByOutputVar = (content: string): string[] => {
const outputVarRegex = /(\{\{#\$output\.[^#]+#\}\})/g
const parts = content.split(outputVarRegex)
return parts.filter(part => part.length > 0)
}
const contentList = useMemo(() => {
if (!formData?.form_content)
return []
return splitByOutputVar(formData.form_content)
}, [formData?.form_content])
useEffect(() => {
if (!formData?.inputs)
return
const initialInputs: Record<string, string> = {}
formData.inputs.forEach((item) => {
initialInputs[item.output_variable_name] = item.default.type === 'variable' ? formData.resolved_default_values[item.output_variable_name] || '' : item.default.value
})
setInputs(initialInputs)
}, [formData?.inputs, formData?.resolved_default_values])
// use immer
const handleInputsChange = (name: string, value: string) => {
const newInputs = produce(inputs, (draft) => {
draft[name] = value
})
setInputs(newInputs)
}
const submit = (actionID: string) => {
submitForm(
{ token, data: { inputs, action: actionID } },
{
onSuccess: () => {
setSuccess(true)
},
},
)
}
if (isLoading) {
return (
<Loading type="app" />
)
}
if (success) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="h-[56px] w-[56px] shrink-0 rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiCheckboxCircleFill className="h-8 w-8 text-text-success" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.thanks', { ns: 'share' })}</div>
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.recorded', { ns: 'share' })}</div>
</div>
<div className="system-2xs-regular-uppercase shrink-0 text-text-tertiary">{t('humanInput.submissionID', { id: token, ns: 'share' })}</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (expired) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiInformation2Fill className="h-8 w-8 text-text-accent" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.sorry', { ns: 'share' })}</div>
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.expired', { ns: 'share' })}</div>
</div>
<div className="system-2xs-regular-uppercase shrink-0 text-text-tertiary">{t('humanInput.submissionID', { id: token, ns: 'share' })}</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (submitted) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiInformation2Fill className="h-8 w-8 text-text-accent" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.sorry', { ns: 'share' })}</div>
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.completed', { ns: 'share' })}</div>
</div>
<div className="system-2xs-regular-uppercase shrink-0 text-text-tertiary">{t('humanInput.submissionID', { id: token, ns: 'share' })}</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (rateLimitExceeded) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiErrorWarningFill className="h-8 w-8 text-text-destructive" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.rateLimitExceeded', { ns: 'share' })}</div>
</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
if (!formData) {
return (
<div className={cn('flex h-full w-full flex-col items-center justify-center')}>
<div className="min-w-[480px] max-w-[640px]">
<div className="border-components-divider-subtle flex h-[320px] flex-col gap-4 rounded-[20px] border bg-chat-bubble-bg p-10 pb-9 shadow-lg backdrop-blur-sm">
<div className="flex h-14 w-14 shrink-0 items-center justify-center rounded-2xl border border-components-panel-border-subtle bg-background-default-dodge p-3">
<RiErrorWarningFill className="h-8 w-8 text-text-destructive" />
</div>
<div className="grow">
<div className="title-4xl-semi-bold text-text-primary">{t('humanInput.formNotFound', { ns: 'share' })}</div>
</div>
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
const site = formData.site.site
return (
<div className={cn('mx-auto flex h-full w-full max-w-[720px] flex-col items-center')}>
<div className="mt-4 flex w-full shrink-0 items-center gap-3 py-3">
<AppIcon
size="large"
iconType={site.icon_type}
icon={site.icon}
background={site.icon_background}
imageUrl={site.icon_url}
/>
<div className="system-xl-semibold grow text-text-primary">{site.title}</div>
</div>
<div className="h-0 w-full grow overflow-y-auto">
<div className="border-components-divider-subtle rounded-[20px] border bg-chat-bubble-bg p-4 shadow-lg backdrop-blur-sm">
{contentList.map((content, index) => (
<ContentItem
key={index}
content={content}
formInputFields={formData.inputs}
inputs={inputs}
onInputChange={handleInputsChange}
/>
))}
<div className="flex flex-wrap gap-1 py-1">
{formData.user_actions.map((action: UserAction) => (
<Button
key={action.id}
disabled={isSubmitting}
variant={getButtonStyle(action.button_style) as ButtonProps['variant']}
onClick={() => submit(action.id)}
>
{action.title}
</Button>
))}
</div>
<ExpirationTime expirationTime={formData.expiration_time * 1000} />
</div>
<div className="flex flex-row-reverse px-2 py-3">
<div className={cn(
'flex shrink-0 items-center gap-1.5 px-1',
)}
>
<div className="system-2xs-medium-uppercase text-text-tertiary">{t('chat.poweredBy', { ns: 'share' })}</div>
<DifyLogo size="small" />
</div>
</div>
</div>
</div>
)
}
export default React.memo(FormContent)

View File

@@ -1,13 +0,0 @@
'use client'
import * as React from 'react'
import FormContent from './form'
const FormPage = () => {
return (
<div className="h-full min-w-[300px] bg-chatbot-bg pb-[env(safe-area-inset-bottom)]">
<FormContent />
</div>
)
}
export default React.memo(FormPage)

View File

@@ -47,7 +47,7 @@ const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => {
await webAppLogout(shareCode!)
const url = getSigninUrl()
router.replace(url)
}, [getSigninUrl, router, shareCode])
}, [getSigninUrl, router, webAppLogout, shareCode])
if (appInfoError) {
return (

View File

@@ -31,7 +31,7 @@ const Splash: FC<PropsWithChildren> = ({ children }) => {
await webAppLogout(shareCode!)
const url = getSigninUrl()
router.replace(url)
}, [getSigninUrl, router, shareCode])
}, [getSigninUrl, router, webAppLogout, shareCode])
const [isLoading, setIsLoading] = useState(true)
useEffect(() => {

View File

@@ -115,7 +115,6 @@ export type AppPublisherProps = {
missingStartNode?: boolean
hasTriggerNode?: boolean // Whether workflow currently contains any trigger nodes (used to hide missing-start CTA when triggers exist).
startNodeLimitExceeded?: boolean
hasHumanInputNode?: boolean
}
const PUBLISH_SHORTCUT = ['ctrl', '⇧', 'P']
@@ -139,14 +138,13 @@ const AppPublisher = ({
missingStartNode = false,
hasTriggerNode = false,
startNodeLimitExceeded = false,
hasHumanInputNode = false,
}: AppPublisherProps) => {
const { t } = useTranslation()
const [published, setPublished] = useState(false)
const [open, setOpen] = useState(false)
const [showAppAccessControl, setShowAppAccessControl] = useState(false)
const [isAppAccessSet, setIsAppAccessSet] = useState(true)
const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false)
const appDetail = useAppStore(state => state.appDetail)
@@ -163,13 +161,6 @@ const AppPublisher = ({
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
const openAsyncWindow = useAsyncWindowOpen()
const isAppAccessSet = useMemo(() => {
if (appDetail && appAccessSubjects) {
return !(appDetail.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS && appAccessSubjects.groups?.length === 0 && appAccessSubjects.members?.length === 0)
}
return true
}, [appAccessSubjects, appDetail])
const noAccessPermission = useMemo(() => systemFeatures.webapp_auth.enabled && appDetail && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result, [systemFeatures, appDetail, userCanAccessApp])
const disabledFunctionButton = useMemo(() => (!publishedAt || missingStartNode || noAccessPermission), [publishedAt, missingStartNode, noAccessPermission])
@@ -180,13 +171,25 @@ const AppPublisher = ({
return t('noUserInputNode', { ns: 'app' })
if (noAccessPermission)
return t('noAccessPermission', { ns: 'app' })
}, [missingStartNode, noAccessPermission, publishedAt, t])
}, [missingStartNode, noAccessPermission, publishedAt])
useEffect(() => {
if (systemFeatures.webapp_auth.enabled && open && appDetail)
refetch()
}, [open, appDetail, refetch, systemFeatures])
useEffect(() => {
if (appDetail && appAccessSubjects) {
if (appDetail.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS && appAccessSubjects.groups?.length === 0 && appAccessSubjects.members?.length === 0)
setIsAppAccessSet(false)
else
setIsAppAccessSet(true)
}
else {
setIsAppAccessSet(true)
}
}, [appAccessSubjects, appDetail])
const handlePublish = useCallback(async (params?: ModelAndParameter | PublishWorkflowParams) => {
try {
await onPublish?.(params)
@@ -458,7 +461,7 @@ const AppPublisher = ({
{t('common.accessAPIReference', { ns: 'workflow' })}
</SuggestedAction>
</Tooltip>
{appDetail?.mode === AppModeEnum.WORKFLOW && !hasHumanInputNode && (
{appDetail?.mode === AppModeEnum.WORKFLOW && (
<WorkflowToolConfigureButton
disabled={workflowToolDisabled}
published={!!toolPublished}

View File

@@ -109,6 +109,7 @@ const AgentTools: FC = () => {
tool_parameters: paramsWithDefaultValue,
notAuthor: !tool.is_team_authorization,
enabled: true,
type: tool.provider_type as CollectionType,
}
}
const handleSelectTool = (tool: ToolDefaultValue) => {

View File

@@ -68,7 +68,6 @@ type IDrawerContext = {
}
type StatusCount = {
paused: number
success: number
failed: number
partial_success: number
@@ -94,15 +93,7 @@ const statusTdRender = (statusCount: StatusCount) => {
if (!statusCount)
return null
if (statusCount.paused > 0) {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">
<Indicator color="yellow" />
<span className="text-util-colors-warning-warning-600">Pending</span>
</div>
)
}
else if (statusCount.partial_success + statusCount.failed === 0) {
if (statusCount.partial_success + statusCount.failed === 0) {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">
<Indicator color="green" />
@@ -305,7 +296,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
if (abortControllerRef.current === controller)
abortControllerRef.current = null
}
}, [detail.id, hasMore, timezone, t, appDetail])
}, [detail.id, hasMore, timezone, t, appDetail, detail?.model_config?.configs?.introduction])
// Derive chatItemTree, threadChatItems, and oldestAnswerIdRef from allChatItems
useEffect(() => {
@@ -420,7 +411,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
return false
}
}, [allChatItems, appDetail?.id, notify, t])
}, [allChatItems, appDetail?.id, t])
const fetchInitiated = useRef(false)
@@ -513,7 +504,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
finally {
setIsLoading(false)
}
}, [detail.id, hasMore, isLoading, timezone, t, appDetail])
}, [detail.id, hasMore, isLoading, timezone, t, appDetail, detail?.model_config?.configs?.introduction])
const handleScroll = useCallback(() => {
const scrollableDiv = document.getElementById('scrollableDiv')

View File

@@ -53,7 +53,6 @@ const defaultProviderContext = {
refreshLicenseLimit: noop,
isAllowTransferWorkspace: false,
isAllowPublishAsCustomKnowledgePipelineTemplate: false,
humanInputEmailDeliveryEnabled: false,
}
const defaultModalContext: ModalContextState = {

View File

@@ -8,7 +8,7 @@ import {
RiClipboardLine,
RiFileList3Line,
RiPlayList2Line,
RiResetLeftLine,
RiReplay15Line,
RiSparklingFill,
RiSparklingLine,
RiThumbDownLine,
@@ -18,12 +18,10 @@ import { useBoolean } from 'ahooks'
import copy from 'copy-to-clipboard'
import { useParams } from 'next/navigation'
import * as React from 'react'
import { useCallback, useEffect, useState } from 'react'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useStore as useAppStore } from '@/app/components/app/store'
import ActionButton, { ActionButtonState } from '@/app/components/base/action-button'
import HumanInputFilledFormList from '@/app/components/base/chat/chat/answer/human-input-filled-form-list'
import HumanInputFormList from '@/app/components/base/chat/chat/answer/human-input-form-list'
import WorkflowProcessItem from '@/app/components/base/chat/chat/answer/workflow-process'
import { useChatContext } from '@/app/components/base/chat/chat/context'
import Loading from '@/app/components/base/loading'
@@ -31,8 +29,7 @@ import { Markdown } from '@/app/components/base/markdown'
import NewAudioButton from '@/app/components/base/new-audio-button'
import Toast from '@/app/components/base/toast'
import { fetchTextGenerationMessage } from '@/service/debug'
import { AppSourceType, fetchMoreLikeThis, submitHumanInputForm, updateFeedback } from '@/service/share'
import { submitHumanInputForm as submitHumanInputFormService } from '@/service/workflow'
import { AppSourceType, fetchMoreLikeThis, updateFeedback } from '@/service/share'
import { cn } from '@/utils/classnames'
import ResultTab from './result-tab'
@@ -124,7 +121,7 @@ const GenerationItem: FC<IGenerationItemProps> = ({
const [isQuerying, { setTrue: startQuerying, setFalse: stopQuerying }] = useBoolean(false)
const childProps = {
isInWebApp,
isInWebApp: true,
content: completionRes,
messageId: childMessageId,
depth: depth + 1,
@@ -205,22 +202,16 @@ const GenerationItem: FC<IGenerationItemProps> = ({
}
const [currentTab, setCurrentTab] = useState<string>('DETAIL')
const showResultTabs = !!workflowProcessData?.resultText || !!workflowProcessData?.files?.length || (workflowProcessData?.humanInputFormDataList && workflowProcessData?.humanInputFormDataList.length > 0) || (workflowProcessData?.humanInputFilledFormDataList && workflowProcessData?.humanInputFilledFormDataList.length > 0)
const showResultTabs = !!workflowProcessData?.resultText || !!workflowProcessData?.files?.length
const switchTab = async (tab: string) => {
setCurrentTab(tab)
}
useEffect(() => {
if (workflowProcessData?.resultText || !!workflowProcessData?.files?.length || (workflowProcessData?.humanInputFormDataList && workflowProcessData?.humanInputFormDataList.length > 0) || (workflowProcessData?.humanInputFilledFormDataList && workflowProcessData?.humanInputFilledFormDataList.length > 0))
if (workflowProcessData?.resultText || !!workflowProcessData?.files?.length)
switchTab('RESULT')
else
switchTab('DETAIL')
}, [workflowProcessData?.files?.length, workflowProcessData?.resultText, workflowProcessData?.humanInputFormDataList, workflowProcessData?.humanInputFilledFormDataList])
const handleSubmitHumanInputForm = useCallback(async (formToken: string, formData: { inputs: Record<string, string>, action: string }) => {
if (appSourceType === AppSourceType.installedApp)
await submitHumanInputFormService(formToken, formData)
else
await submitHumanInputForm(formToken, formData)
}, [appSourceType])
}, [workflowProcessData?.files?.length, workflowProcessData?.resultText])
return (
<>
@@ -284,24 +275,7 @@ const GenerationItem: FC<IGenerationItemProps> = ({
)}
</div>
{!isError && (
<>
{currentTab === 'RESULT' && workflowProcessData.humanInputFormDataList && workflowProcessData.humanInputFormDataList.length > 0 && (
<div className="px-4 pt-4">
<HumanInputFormList
humanInputFormDataList={workflowProcessData.humanInputFormDataList}
onHumanInputFormSubmit={handleSubmitHumanInputForm}
/>
</div>
)}
{currentTab === 'RESULT' && workflowProcessData.humanInputFilledFormDataList && workflowProcessData.humanInputFilledFormDataList.length > 0 && (
<div className="px-4 pt-4">
<HumanInputFilledFormList
humanInputFilledFormDataList={workflowProcessData.humanInputFilledFormDataList}
/>
</div>
)}
<ResultTab data={workflowProcessData} content={content} currentTab={currentTab} />
</>
<ResultTab data={workflowProcessData} content={content} currentTab={currentTab} />
)}
</>
)}
@@ -374,7 +348,7 @@ const GenerationItem: FC<IGenerationItemProps> = ({
)}
{isInWebApp && isError && (
<ActionButton onClick={onRetry}>
<RiResetLeftLine className="h-4 w-4" />
<RiReplay15Line className="h-4 w-4" />
</ActionButton>
)}
{isInWebApp && !isWorkflow && !isTryApp && (

View File

@@ -81,14 +81,6 @@ const WorkflowAppLogList: FC<ILogs> = ({ logs, appDetail, onRefresh }) => {
</div>
)
}
if (status === 'paused') {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">
<Indicator color="yellow" />
<span className="text-util-colors-warning-warning-600">Pending</span>
</div>
)
}
if (status === 'running') {
return (
<div className="system-xs-semibold-uppercase inline-flex items-center gap-1">

View File

@@ -26,10 +26,6 @@
@apply p-0.5 w-6 h-6 rounded-lg
}
.action-btn-s {
@apply w-5 h-5 rounded-[6px]
}
.action-btn-xs {
@apply p-0 w-4 h-4 rounded
}

View File

@@ -18,7 +18,6 @@ const actionButtonVariants = cva(
variants: {
size: {
xs: 'action-btn-xs',
s: 'action-btn-s',
m: 'action-btn-m',
l: 'action-btn-l',
xl: 'action-btn-xl',

View File

@@ -2,7 +2,6 @@ import type { FileEntity } from '../../file-uploader/types'
import type {
ChatConfig,
ChatItem,
ChatItemInTree,
OnSend,
} from '../types'
import { useCallback, useEffect, useMemo, useState } from 'react'
@@ -17,9 +16,7 @@ import {
fetchSuggestedQuestions,
getUrl,
stopChatMessageResponding,
submitHumanInputForm,
} from '@/service/share'
import { submitHumanInputForm as submitHumanInputFormService } from '@/service/workflow'
import { TransferMethod } from '@/types/app'
import { cn } from '@/utils/classnames'
import { formatBooleanInputs } from '@/utils/model-config'
@@ -76,9 +73,9 @@ const ChatWrapper = () => {
}, [appParams, currentConversationItem?.introduction])
const {
chatList,
setTargetMessageId,
handleSend,
handleStop,
handleSwitchSibling,
isResponding: respondingState,
suggestedQuestions,
} = useChat(
@@ -125,11 +122,8 @@ const ChatWrapper = () => {
if (fileIsUploading)
return true
if (chatList.some(item => item.isAnswer && item.humanInputFormDataList && item.humanInputFormDataList.length > 0))
return true
return false
}, [allInputsHidden, inputsForms, chatList, inputsFormValue])
}, [inputsFormValue, inputsForms, allInputsHidden])
useEffect(() => {
if (currentChatInstanceRef.current)
@@ -140,40 +134,6 @@ const ChatWrapper = () => {
setIsResponding(respondingState)
}, [respondingState, setIsResponding])
// Resume paused workflows when chat history is loaded
useEffect(() => {
if (!appPrevChatTree || appPrevChatTree.length === 0)
return
// Find the last answer item with workflow_run_id that needs resumption (DFS - find deepest first)
let lastPausedNode: ChatItemInTree | undefined
const findLastPausedWorkflow = (nodes: ChatItemInTree[]) => {
nodes.forEach((node) => {
// DFS: recurse to children first
if (node.children && node.children.length > 0)
findLastPausedWorkflow(node.children)
// Track the last node with humanInputFormDataList
if (node.isAnswer && node.workflow_run_id && node.humanInputFormDataList && node.humanInputFormDataList.length > 0)
lastPausedNode = node
})
}
findLastPausedWorkflow(appPrevChatTree)
// Only resume the last paused workflow
if (lastPausedNode) {
handleSwitchSibling(
lastPausedNode.id,
{
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}
}, [])
const doSend: OnSend = useCallback((message, files, isRegenerate = false, parentAnswer: ChatItem | null = null) => {
const data: any = {
query: message,
@@ -189,10 +149,10 @@ const ChatWrapper = () => {
{
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: isHistoryConversation ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
isPublicAPI: !isInstalledApp,
},
)
}, [inputsForms, currentConversationId, currentConversationInputs, newConversationInputs, chatList, handleSend, appSourceType, appId, isHistoryConversation, handleNewConversationCompleted])
}, [chatList, handleNewConversationCompleted, handleSend, currentConversationId, currentConversationInputs, newConversationInputs, isInstalledApp, appId])
const doRegenerate = useCallback((chatItem: ChatItem, editedQuestion?: { message: string, files?: FileEntity[] }) => {
const question = editedQuestion ? chatItem : chatList.find(item => item.id === chatItem.parentMessageId)!
@@ -200,27 +160,12 @@ const ChatWrapper = () => {
doSend(editedQuestion ? editedQuestion.message : question.content, editedQuestion ? editedQuestion.files : question.message_files, true, isValidGeneratedAnswer(parentAnswer) ? parentAnswer : null)
}, [chatList, doSend])
const doSwitchSibling = useCallback((siblingMessageId: string) => {
handleSwitchSibling(siblingMessageId, {
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
})
}, [handleSwitchSibling, currentConversationId, handleNewConversationCompleted, appSourceType, appId])
const messageList = useMemo(() => {
if (currentConversationId || chatList.length > 1)
return chatList
// Without messages we are in the welcome screen, so hide the opening statement from chatlist
return chatList.filter(item => !item.isOpeningStatement)
}, [chatList, currentConversationId])
const handleSubmitHumanInputForm = useCallback(async (formToken: string, formData: any) => {
if (isInstalledApp)
await submitHumanInputFormService(formToken, formData)
else
await submitHumanInputForm(formToken, formData)
}, [isInstalledApp])
}, [chatList])
const [collapsed, setCollapsed] = useState(!!currentConversationId)
@@ -329,7 +274,6 @@ const ChatWrapper = () => {
inputsForm={inputsForms}
onRegenerate={doRegenerate}
onStopResponding={handleStop}
onHumanInputFormSubmit={handleSubmitHumanInputForm}
chatNode={(
<>
{chatNode}
@@ -342,7 +286,7 @@ const ChatWrapper = () => {
answerIcon={answerIcon}
hideProcessDetail
themeBuilder={themeBuilder}
switchSibling={doSwitchSibling}
switchSibling={siblingMessageId => setTargetMessageId(siblingMessageId)}
inputDisabled={inputDisabled}
sidebarCollapseState={sidebarCollapseState}
questionIcon={

View File

@@ -1,4 +1,3 @@
import type { ExtraContent } from '../chat/type'
import type {
Callback,
ChatConfig,
@@ -10,7 +9,6 @@ import type {
AppData,
ConversationItem,
} from '@/models/share'
import type { HumanInputFilledFormData, HumanInputFormData } from '@/types/workflow'
import { useLocalStorageState } from 'ahooks'
import { noop } from 'es-toolkit/function'
import { produce } from 'immer'
@@ -59,24 +57,6 @@ function getFormattedChatList(messages: any[]) {
parentMessageId: item.parent_message_id || undefined,
})
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
const humanInputFormDataList: HumanInputFormData[] = []
const humanInputFilledFormDataList: HumanInputFilledFormData[] = []
let workflowRunId = ''
if (item.status === 'paused') {
item.extra_contents?.forEach((content: ExtraContent) => {
if (content.type === 'human_input' && !content.submitted) {
humanInputFormDataList.push(content.form_definition)
workflowRunId = content.workflow_run_id
}
})
}
else if (item.status === 'normal') {
item.extra_contents?.forEach((content: ExtraContent) => {
if (content.type === 'human_input' && content.submitted) {
humanInputFilledFormDataList.push(content.form_submission_data)
}
})
}
newChatList.push({
id: item.id,
content: item.answer,
@@ -86,9 +66,6 @@ function getFormattedChatList(messages: any[]) {
citation: item.retriever_resources,
message_files: getProcessedFilesFromResponse(answerFiles.map((item: any) => ({ ...item, related_id: item.id, upload_file_id: item.upload_file_id }))),
parentMessageId: `question-${item.id}`,
humanInputFormDataList,
humanInputFilledFormDataList,
workflow_run_id: workflowRunId,
})
})
return newChatList

View File

@@ -1,54 +0,0 @@
import type { ContentItemProps } from './type'
import * as React from 'react'
import { useMemo } from 'react'
import { Markdown } from '@/app/components/base/markdown'
import Textarea from '@/app/components/base/textarea'
const ContentItem = ({
content,
formInputFields,
inputs,
onInputChange,
}: ContentItemProps) => {
const isInputField = (field: string) => {
const outputVarRegex = /\{\{#\$output\.[^#]+#\}\}/
return outputVarRegex.test(field)
}
const extractFieldName = (str: string): string => {
const outputVarRegex = /\{\{#\$output\.([^#]+)#\}\}/
const match = str.match(outputVarRegex)
return match ? match[1] : ''
}
const fieldName = useMemo(() => {
return extractFieldName(content)
}, [content])
const formInputField = useMemo(() => {
return formInputFields.find(field => field.output_variable_name === fieldName)
}, [formInputFields, fieldName])
if (!isInputField(content)) {
return (
<Markdown content={content} />
)
}
if (!formInputField)
return null
return (
<div className="py-3">
{formInputField.type === 'paragraph' && (
<Textarea
className="h-[104px] sm:text-xs"
value={inputs[fieldName]}
onChange={(e) => { onInputChange(fieldName, e.target.value) }}
/>
)}
</div>
)
}
export default React.memo(ContentItem)

View File

@@ -1,64 +0,0 @@
import { RiArrowDownSLine, RiArrowRightSLine } from '@remixicon/react'
import { useCallback, useState } from 'react'
import BlockIcon from '@/app/components/workflow/block-icon'
import { BlockEnum } from '@/app/components/workflow/types'
import { cn } from '@/utils/classnames'
type ContentWrapperProps = {
nodeTitle: string
children: React.ReactNode
showExpandIcon?: boolean
className?: string
expanded?: boolean
}
const ContentWrapper = ({
nodeTitle,
children,
showExpandIcon = false,
className,
expanded = false,
}: ContentWrapperProps) => {
const [isExpanded, setIsExpanded] = useState(expanded)
const handleToggleExpand = useCallback(() => {
setIsExpanded(!isExpanded)
}, [isExpanded])
return (
<div className={cn('rounded-2xl border-[0.5px] border-components-panel-border bg-background-section p-2 shadow-md', className)}>
<div className="flex items-center gap-2 p-2">
{/* node icon */}
<BlockIcon type={BlockEnum.HumanInput} className="shrink-0" />
{/* node name */}
<div
className="system-sm-semibold-uppercase grow truncate text-text-primary"
title={nodeTitle}
>
{nodeTitle}
</div>
{showExpandIcon && (
<div className="shrink-0 cursor-pointer" onClick={handleToggleExpand}>
{
isExpanded
? (
<RiArrowDownSLine className="size-4" />
)
: (
<RiArrowRightSLine className="size-4" />
)
}
</div>
)}
</div>
{(!showExpandIcon || isExpanded) && (
<div className="px-2 py-1">
{/* human input form content */}
{children}
</div>
)}
</div>
)
}
export default ContentWrapper

View File

@@ -1,30 +0,0 @@
import type { ExecutedAction as ExecutedActionType } from './type'
import { memo } from 'react'
import { Trans } from 'react-i18next'
import Divider from '@/app/components/base/divider'
import { TriggerAll } from '@/app/components/base/icons/src/vender/workflow'
type ExecutedActionProps = {
executedAction: ExecutedActionType
}
const ExecutedAction = ({
executedAction,
}: ExecutedActionProps) => {
return (
<div className="flex flex-col gap-y-1 py-1">
<Divider className="mb-2 mt-1 w-[30px]" />
<div className="system-xs-regular flex items-center gap-x-1 text-text-tertiary">
<TriggerAll className="size-3.5 shrink-0" />
<Trans
i18nKey="nodes.humanInput.userActions.triggered"
ns="workflow"
components={{ strong: <span className="system-xs-medium text-text-secondary"></span> }}
values={{ actionName: executedAction.id }}
/>
</div>
</div>
)
}
export default memo(ExecutedAction)

View File

@@ -1,46 +0,0 @@
'use client'
import { RiAlertFill, RiTimeLine } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { useLocale } from '@/context/i18n'
import { cn } from '@/utils/classnames'
import { getRelativeTime, isRelativeTimeSameOrAfter } from './utils'
type ExpirationTimeProps = {
expirationTime: number
}
const ExpirationTime = ({
expirationTime,
}: ExpirationTimeProps) => {
const { t } = useTranslation()
const locale = useLocale()
const relativeTime = getRelativeTime(expirationTime, locale)
const isSameOrAfter = isRelativeTimeSameOrAfter(expirationTime)
return (
<div
className={cn(
'system-xs-regular mt-1 flex items-center gap-x-1 text-text-tertiary',
!isSameOrAfter && 'text-text-warning',
)}
>
{
isSameOrAfter
? (
<>
<RiTimeLine className="size-3.5" />
<span>{t('humanInput.expirationTimeNowOrFuture', { relativeTime, ns: 'share' })}</span>
</>
)
: (
<>
<RiAlertFill className="size-3.5" />
<span>{t('humanInput.expiredTip', { ns: 'share' })}</span>
</>
)
}
</div>
)
}
export default ExpirationTime

View File

@@ -1,61 +0,0 @@
'use client'
import type { HumanInputFormProps } from './type'
import type { ButtonProps } from '@/app/components/base/button'
import type { UserAction } from '@/app/components/workflow/nodes/human-input/types'
import * as React from 'react'
import { useCallback, useState } from 'react'
import Button from '@/app/components/base/button'
import ContentItem from './content-item'
import { getButtonStyle, initializeInputs, splitByOutputVar } from './utils'
const HumanInputForm = ({
formData,
onSubmit,
}: HumanInputFormProps) => {
const formToken = formData.form_token
const defaultInputs = initializeInputs(formData.inputs, formData.resolved_default_values || {})
const contentList = splitByOutputVar(formData.form_content)
const [inputs, setInputs] = useState(defaultInputs)
const [isSubmitting, setIsSubmitting] = useState(false)
const handleInputsChange = useCallback((name: string, value: string) => {
setInputs(prev => ({
...prev,
[name]: value,
}))
}, [])
const submit = async (formToken: string, actionID: string, inputs: Record<string, string>) => {
setIsSubmitting(true)
await onSubmit?.(formToken, { inputs, action: actionID })
setIsSubmitting(false)
}
return (
<>
{contentList.map((content, index) => (
<ContentItem
key={index}
content={content}
formInputFields={formData.inputs}
inputs={inputs}
onInputChange={handleInputsChange}
/>
))}
<div className="flex flex-wrap gap-1 py-1">
{formData.actions.map((action: UserAction) => (
<Button
key={action.id}
disabled={isSubmitting}
variant={getButtonStyle(action.button_style) as ButtonProps['variant']}
onClick={() => submit(formToken, action.id, inputs)}
>
{action.title}
</Button>
))}
</div>
</>
)
}
export default React.memo(HumanInputForm)

View File

@@ -1,16 +0,0 @@
import * as React from 'react'
import { Markdown } from '@/app/components/base/markdown'
type SubmittedContentProps = {
content: string
}
const SubmittedContent = ({
content,
}: SubmittedContentProps) => {
return (
<Markdown content={content} />
)
}
export default React.memo(SubmittedContent)

View File

@@ -1,25 +0,0 @@
import type { SubmittedHumanInputContentProps } from './type'
import { useMemo } from 'react'
import ExecutedAction from './executed-action'
import SubmittedContent from './submitted-content'
export const SubmittedHumanInputContent = ({
formData,
}: SubmittedHumanInputContentProps) => {
const { rendered_content, action_id, action_text } = formData
const executedAction = useMemo(() => {
return {
id: action_id,
title: action_text,
}
}, [action_id, action_text])
return (
<>
<SubmittedContent content={rendered_content} />
{/* Executed Action */}
<ExecutedAction executedAction={executedAction} />
</>
)
}

View File

@@ -1,43 +0,0 @@
import { memo } from 'react'
import { Trans, useTranslation } from 'react-i18next'
import Divider from '@/app/components/base/divider'
import { useSelector as useAppSelector } from '@/context/app-context'
type TipsProps = {
showEmailTip: boolean
isEmailDebugMode: boolean
showDebugModeTip: boolean
}
const Tips = ({
showEmailTip,
isEmailDebugMode,
showDebugModeTip,
}: TipsProps) => {
const { t } = useTranslation()
const email = useAppSelector(s => s.userProfile.email)
return (
<>
<Divider className="!my-2 w-[30px]" />
<div className="space-y-1 pt-1">
{showEmailTip && !isEmailDebugMode && (
<div className="system-xs-regular text-text-secondary">{t('common.humanInputEmailTip', { ns: 'workflow' })}</div>
)}
{showEmailTip && isEmailDebugMode && (
<div className="system-xs-regular text-text-secondary">
<Trans
i18nKey="common.humanInputEmailTipInDebugMode"
ns="workflow"
components={{ email: <span className="system-xs-semibold"></span> }}
values={{ email }}
/>
</div>
)}
{showDebugModeTip && <div className="system-xs-medium text-text-warning">{t('common.humanInputWebappTip', { ns: 'workflow' })}</div>}
</div>
</>
)
}
export default memo(Tips)

View File

@@ -1,31 +0,0 @@
import type { FormInputItem } from '@/app/components/workflow/nodes/human-input/types'
import type { HumanInputFilledFormData, HumanInputFormData } from '@/types/workflow'
export type ExecutedAction = {
id: string
title: string
}
export type UnsubmittedHumanInputContentProps = {
formData: HumanInputFormData
showEmailTip?: boolean
isEmailDebugMode?: boolean
showDebugModeTip?: boolean
onSubmit?: (formToken: string, data: { inputs: Record<string, string>, action: string }) => Promise<void>
}
export type SubmittedHumanInputContentProps = {
formData: HumanInputFilledFormData
}
export type HumanInputFormProps = {
formData: HumanInputFormData
onSubmit?: (formToken: string, data: { inputs: Record<string, string>, action: string }) => Promise<void>
}
export type ContentItemProps = {
content: string
formInputFields: FormInputItem[]
inputs: Record<string, string>
onInputChange: (name: string, value: string) => void
}

View File

@@ -1,36 +0,0 @@
import type { UnsubmittedHumanInputContentProps } from './type'
import ExpirationTime from './expiration-time'
import HumanInputForm from './human-input-form'
import Tips from './tips'
export const UnsubmittedHumanInputContent = ({
formData,
showEmailTip = false,
isEmailDebugMode = false,
showDebugModeTip = false,
onSubmit,
}: UnsubmittedHumanInputContentProps) => {
const { expiration_time } = formData
return (
<>
{/* Form */}
<HumanInputForm
formData={formData}
onSubmit={onSubmit}
/>
{/* Tips */}
{(showEmailTip || showDebugModeTip) && (
<Tips
showEmailTip={showEmailTip}
isEmailDebugMode={isEmailDebugMode}
showDebugModeTip={showDebugModeTip}
/>
)}
{/* Expiration Time */}
{typeof expiration_time === 'number' && (
<ExpirationTime expirationTime={expiration_time * 1000} />
)}
</>
)
}

View File

@@ -1,64 +0,0 @@
import type { FormInputItem } from '@/app/components/workflow/nodes/human-input/types'
import type { Locale } from '@/i18n-config'
import dayjs from 'dayjs'
import isSameOrAfter from 'dayjs/plugin/isSameOrAfter'
import relativeTime from 'dayjs/plugin/relativeTime'
import utc from 'dayjs/plugin/utc'
import { UserActionButtonType } from '@/app/components/workflow/nodes/human-input/types'
import 'dayjs/locale/en'
import 'dayjs/locale/zh-cn'
import 'dayjs/locale/ja'
dayjs.extend(utc)
dayjs.extend(relativeTime)
dayjs.extend(isSameOrAfter)
export const getButtonStyle = (style: UserActionButtonType) => {
if (style === UserActionButtonType.Primary)
return 'primary'
if (style === UserActionButtonType.Default)
return 'secondary'
if (style === UserActionButtonType.Accent)
return 'secondary-accent'
if (style === UserActionButtonType.Ghost)
return 'ghost'
}
export const splitByOutputVar = (content: string): string[] => {
const outputVarRegex = /(\{\{#\$output\.[^#]+#\}\})/g
const parts = content.split(outputVarRegex)
return parts.filter(part => part.length > 0)
}
export const initializeInputs = (formInputs: FormInputItem[], defaultValues: Record<string, string> = {}) => {
const initialInputs: Record<string, any> = {}
formInputs.forEach((item) => {
if (item.type === 'text-input' || item.type === 'paragraph')
initialInputs[item.output_variable_name] = item.default.type === 'variable' ? defaultValues[item.output_variable_name] || '' : item.default.value
else
initialInputs[item.output_variable_name] = undefined
})
return initialInputs
}
const localeMap: Record<string, string> = {
'en-US': 'en',
'zh-Hans': 'zh-cn',
'ja-JP': 'ja',
}
export const getRelativeTime = (
utcTimestamp: string | number,
locale: Locale = 'en-US',
) => {
const dayjsLocale = localeMap[locale] ?? 'en'
return dayjs
.utc(utcTimestamp)
.locale(dayjsLocale)
.fromNow()
}
export const isRelativeTimeSameOrAfter = (utcTimestamp: string | number) => {
return dayjs.utc(utcTimestamp).isSameOrAfter(dayjs())
}

View File

@@ -1,32 +0,0 @@
import type { HumanInputFilledFormData } from '@/types/workflow'
import ContentWrapper from './human-input-content/content-wrapper'
import { SubmittedHumanInputContent } from './human-input-content/submitted'
type HumanInputFilledFormListProps = {
humanInputFilledFormDataList: HumanInputFilledFormData[]
}
const HumanInputFilledFormList = ({
humanInputFilledFormDataList,
}: HumanInputFilledFormListProps) => {
return (
<div className="mt-2 flex flex-col gap-y-2">
{
humanInputFilledFormDataList.map(formData => (
<ContentWrapper
key={formData.node_id}
nodeTitle={formData.node_title}
showExpandIcon
>
<SubmittedHumanInputContent
key={formData.node_id}
formData={formData}
/>
</ContentWrapper>
))
}
</div>
)
}
export default HumanInputFilledFormList

View File

@@ -1,70 +0,0 @@
import type { DeliveryMethod, HumanInputNodeType } from '@/app/components/workflow/nodes/human-input/types'
import type { Node } from '@/app/components/workflow/types'
import type { HumanInputFormData } from '@/types/workflow'
import { useMemo } from 'react'
import { DeliveryMethodType } from '@/app/components/workflow/nodes/human-input/types'
import ContentWrapper from './human-input-content/content-wrapper'
import { UnsubmittedHumanInputContent } from './human-input-content/unsubmitted'
type HumanInputFormListProps = {
humanInputFormDataList: HumanInputFormData[]
onHumanInputFormSubmit?: (formToken: string, formData: { inputs: Record<string, string>, action: string }) => Promise<void>
getHumanInputNodeData?: (nodeID: string) => Node<HumanInputNodeType> | undefined
}
const HumanInputFormList = ({
humanInputFormDataList,
onHumanInputFormSubmit,
getHumanInputNodeData,
}: HumanInputFormListProps) => {
const deliveryMethodsConfig = useMemo((): Record<string, { showEmailTip: boolean, isEmailDebugMode: boolean, showDebugModeTip: boolean }> => {
if (!humanInputFormDataList.length)
return {}
return humanInputFormDataList.reduce((acc, formData) => {
const deliveryMethodsConfig = getHumanInputNodeData?.(formData.node_id)?.data.delivery_methods || []
if (!deliveryMethodsConfig.length) {
acc[formData.node_id] = {
showEmailTip: false,
isEmailDebugMode: false,
showDebugModeTip: false,
}
return acc
}
const isWebappEnabled = deliveryMethodsConfig.some((method: DeliveryMethod) => method.type === DeliveryMethodType.WebApp && method.enabled)
const isEmailEnabled = deliveryMethodsConfig.some((method: DeliveryMethod) => method.type === DeliveryMethodType.Email && method.enabled)
const isEmailDebugMode = deliveryMethodsConfig.some((method: DeliveryMethod) => method.type === DeliveryMethodType.Email && method.config?.debug_mode)
acc[formData.node_id] = {
showEmailTip: isEmailEnabled,
isEmailDebugMode,
showDebugModeTip: !isWebappEnabled,
}
return acc
}, {} as Record<string, { showEmailTip: boolean, isEmailDebugMode: boolean, showDebugModeTip: boolean }>)
}, [getHumanInputNodeData, humanInputFormDataList])
const filteredHumanInputFormDataList = humanInputFormDataList.filter(formData => formData.display_in_ui)
return (
<div className="mt-2 flex flex-col gap-y-2">
{
filteredHumanInputFormDataList.map(formData => (
<ContentWrapper
key={formData.form_id}
nodeTitle={formData.node_title}
>
<UnsubmittedHumanInputContent
key={formData.form_id}
formData={formData}
showEmailTip={!!deliveryMethodsConfig[formData.node_id]?.showEmailTip}
isEmailDebugMode={!!deliveryMethodsConfig[formData.node_id]?.isEmailDebugMode}
showDebugModeTip={!!deliveryMethodsConfig[formData.node_id]?.showDebugModeTip}
onSubmit={onHumanInputFormSubmit}
/>
</ContentWrapper>
))
}
</div>
)
}
export default HumanInputFormList

View File

@@ -16,11 +16,8 @@ import LoadingAnim from '@/app/components/base/chat/chat/loading-anim'
import { FileList } from '@/app/components/base/file-uploader'
import { cn } from '@/utils/classnames'
import ContentSwitch from '../content-switch'
import { useChatContext } from '../context'
import AgentContent from './agent-content'
import BasicContent from './basic-content'
import HumanInputFilledFormList from './human-input-filled-form-list'
import HumanInputFormList from './human-input-form-list'
import More from './more'
import Operation from './operation'
import SuggestedQuestions from './suggested-questions'
@@ -39,8 +36,6 @@ type AnswerProps = {
appData?: AppData
noChatInput?: boolean
switchSibling?: (siblingMessageId: string) => void
hideAvatar?: boolean
onHumanInputFormSubmit?: (formToken: string, formData: any) => Promise<void>
}
const Answer: FC<AnswerProps> = ({
item,
@@ -55,8 +50,6 @@ const Answer: FC<AnswerProps> = ({
appData,
noChatInput,
switchSibling,
hideAvatar,
onHumanInputFormSubmit,
}) => {
const { t } = useTranslation()
const {
@@ -68,22 +61,13 @@ const Answer: FC<AnswerProps> = ({
workflowProcess,
allFiles,
message_files,
humanInputFormDataList,
humanInputFilledFormDataList,
} = item
const hasAgentThoughts = !!agent_thoughts?.length
const hasHumanInputs = !!humanInputFormDataList?.length || !!humanInputFilledFormDataList?.length
const [containerWidth, setContainerWidth] = useState(0)
const [contentWidth, setContentWidth] = useState(0)
const [humanInputFormContainerWidth, setHumanInputFormContainerWidth] = useState(0)
const containerRef = useRef<HTMLDivElement>(null)
const contentRef = useRef<HTMLDivElement>(null)
const humanInputFormContainerRef = useRef<HTMLDivElement>(null)
const {
getHumanInputNodeData,
} = useChatContext()
const getContainerWidth = () => {
if (containerRef.current)
@@ -103,23 +87,12 @@ const Answer: FC<AnswerProps> = ({
getContentWidth()
}, [responding])
const getHumanInputFormContainerWidth = () => {
if (humanInputFormContainerRef.current)
setHumanInputFormContainerWidth(humanInputFormContainerRef.current?.clientWidth)
}
useEffect(() => {
if (hasHumanInputs)
getHumanInputFormContainerWidth()
}, [hasHumanInputs])
// Recalculate contentWidth when content changes (e.g., SVG preview/source toggle)
useEffect(() => {
if (!containerRef.current)
return
const resizeObserver = new ResizeObserver(() => {
getContentWidth()
getHumanInputFormContainerWidth()
})
resizeObserver.observe(containerRef.current)
return () => {
@@ -142,285 +115,115 @@ const Answer: FC<AnswerProps> = ({
return (
<div className="mb-2 flex last:mb-0">
{!hideAvatar && (
<div className="relative h-10 w-10 shrink-0">
{answerIcon || <AnswerIcon />}
{responding && (
<div className="absolute left-[-3px] top-[-3px] flex h-4 w-4 items-center rounded-full border-[0.5px] border-divider-subtle bg-background-section-burn pl-[6px] shadow-xs">
<LoadingAnim type="avatar" />
</div>
)}
</div>
)}
<div className="relative h-10 w-10 shrink-0">
{answerIcon || <AnswerIcon />}
{responding && (
<div className="absolute left-[-3px] top-[-3px] flex h-4 w-4 items-center rounded-full border-[0.5px] border-divider-subtle bg-background-section-burn pl-[6px] shadow-xs">
<LoadingAnim type="avatar" />
</div>
)}
</div>
<div className="chat-answer-container group ml-4 w-0 grow pb-4" ref={containerRef}>
{/* Block 1: Workflow Process + Human Input Forms */}
{hasHumanInputs && (
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={humanInputFormContainerRef}
className={cn('body-lg-regular relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary')}
>
{
!responding && contentIsEmpty && !hasAgentThoughts && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - humanInputFormContainerWidth - 4}
contentWidth={humanInputFormContainerWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{/** Render workflow process */}
{
workflowProcess && (
<WorkflowProcessItem
data={workflowProcess}
item={item}
hideProcessDetail={hideProcessDetail}
readonly={hideProcessDetail && appData ? !appData.site.show_workflow_steps : undefined}
/>
)
}
{
humanInputFormDataList && humanInputFormDataList.length > 0 && (
<HumanInputFormList
humanInputFormDataList={humanInputFormDataList}
onHumanInputFormSubmit={onHumanInputFormSubmit}
getHumanInputNodeData={getHumanInputNodeData}
/>
)
}
{
humanInputFilledFormDataList && humanInputFilledFormDataList.length > 0 && (
<HumanInputFilledFormList
humanInputFilledFormDataList={humanInputFilledFormDataList}
/>
)
}
{
typeof item.siblingCount === 'number'
&& item.siblingCount > 1
&& !responding
&& contentIsEmpty
&& !hasAgentThoughts
&& (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={contentRef}
className={cn('body-lg-regular relative inline-block max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary', workflowProcess && 'w-full')}
>
{
!responding && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - contentWidth - 4}
contentWidth={contentWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{/** Render workflow process */}
{
workflowProcess && (
<WorkflowProcessItem
data={workflowProcess}
item={item}
hideProcessDetail={hideProcessDetail}
readonly={hideProcessDetail && appData ? !appData.site?.show_workflow_steps : undefined}
/>
)
}
{
responding && contentIsEmpty && !hasAgentThoughts && (
<div className="flex h-5 w-6 items-center justify-center">
<LoadingAnim type="text" />
</div>
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
<BasicContent item={item} />
)
}
{
(hasAgentThoughts) && (
<AgentContent
item={item}
responding={responding}
content={content}
/>
)
}
{
!!allFiles?.length && (
<FileList
className="my-1"
files={allFiles}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
!!message_files?.length && (
<FileList
className="my-1"
files={message_files}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
annotation?.id && annotation.authorName && (
<EditTitle
className="mt-1"
title={t('editBy', { ns: 'appAnnotation', author: annotation.authorName })}
/>
)
}
<SuggestedQuestions item={item} />
{
!!citation?.length && !responding && (
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{
!!(item.siblingCount && item.siblingCount > 1 && item.siblingIndex !== undefined) && (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
)}
{/* Block 2: Response Content (when human inputs exist) */}
{hasHumanInputs && (responding || !contentIsEmpty || hasAgentThoughts) && (
<div className={cn('group relative mt-2 pr-10', chatAnswerContainerInner)}>
<div className="absolute -top-2 left-6 h-3 w-0.5 bg-chat-answer-human-input-form-divider-bg" />
<div
ref={contentRef}
className="body-lg-regular relative inline-block w-full max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary"
>
{
!responding && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - contentWidth - 4}
contentWidth={contentWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{
responding && contentIsEmpty && !hasAgentThoughts && (
<div className="flex h-5 w-6 items-center justify-center">
<LoadingAnim type="text" />
</div>
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
<BasicContent item={item} />
)
}
{
hasAgentThoughts && (
<AgentContent
item={item}
responding={responding}
content={content}
/>
)
}
{
!!allFiles?.length && (
<FileList
className="my-1"
files={allFiles}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
!!message_files?.length && (
<FileList
className="my-1"
files={message_files}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
annotation?.id && annotation.authorName && (
<EditTitle
className="mt-1"
title={t('editBy', { ns: 'appAnnotation', author: annotation.authorName })}
/>
)
}
<SuggestedQuestions item={item} />
{
!!citation?.length && !responding && (
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{
typeof item.siblingCount === 'number'
&& item.siblingCount > 1
&& (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
</div>
)}
{/* Original single block layout (when no human inputs) */}
{!hasHumanInputs && (
<div className={cn('group relative pr-10', chatAnswerContainerInner)}>
<div
ref={contentRef}
className={cn('body-lg-regular relative inline-block max-w-full rounded-2xl bg-chat-bubble-bg px-4 py-3 text-text-primary', workflowProcess && 'w-full')}
>
{
!responding && (
<Operation
hasWorkflowProcess={!!workflowProcess}
maxSize={containerWidth - contentWidth - 4}
contentWidth={contentWidth}
item={item}
question={question}
index={index}
showPromptLog={showPromptLog}
noChatInput={noChatInput}
/>
)
}
{/** Render workflow process */}
{
workflowProcess && (
<WorkflowProcessItem
data={workflowProcess}
item={item}
hideProcessDetail={hideProcessDetail}
readonly={hideProcessDetail && appData ? !appData.site?.show_workflow_steps : undefined}
/>
)
}
{
responding && contentIsEmpty && !hasAgentThoughts && (
<div className="flex h-5 w-6 items-center justify-center">
<LoadingAnim type="text" />
</div>
)
}
{
!contentIsEmpty && !hasAgentThoughts && (
<BasicContent item={item} />
)
}
{
hasAgentThoughts && (
<AgentContent
item={item}
responding={responding}
content={content}
/>
)
}
{
!!allFiles?.length && (
<FileList
className="my-1"
files={allFiles}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
!!message_files?.length && (
<FileList
className="my-1"
files={message_files}
showDeleteAction={false}
showDownloadAction
canPreview
/>
)
}
{
annotation?.id && annotation.authorName && (
<EditTitle
className="mt-1"
title={t('editBy', { ns: 'appAnnotation', author: annotation.authorName })}
/>
)
}
<SuggestedQuestions item={item} />
{
!!citation?.length && !responding && (
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{
typeof item.siblingCount === 'number'
&& item.siblingCount > 1 && (
<ContentSwitch
count={item.siblingCount}
currentIndex={item.siblingIndex}
prevDisabled={!item.prevSibling}
nextDisabled={!item.nextSibling}
switchSibling={handleSwitchSibling}
/>
)
}
</div>
</div>
)}
</div>
<More more={more} />
</div>
</div>

View File

@@ -69,7 +69,6 @@ const Operation: FC<OperationProps> = ({
feedback,
adminFeedback,
agent_thoughts,
humanInputFormDataList,
} = item
const [userLocalFeedback, setUserLocalFeedback] = useState(feedback)
const [adminLocalFeedback, setAdminLocalFeedback] = useState(adminFeedback)
@@ -187,7 +186,7 @@ const Operation: FC<OperationProps> = ({
)}
style={(!hasWorkflowProcess && positionRight) ? { left: contentWidth + 8 } : {}}
>
{shouldShowUserFeedbackBar && !humanInputFormDataList?.length && (
{shouldShowUserFeedbackBar && (
<div className={cn(
'ml-1 items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm',
hasUserFeedback ? 'flex' : 'hidden group-hover:flex',
@@ -227,7 +226,7 @@ const Operation: FC<OperationProps> = ({
)}
</div>
)}
{shouldShowAdminFeedbackBar && !humanInputFormDataList?.length && (
{shouldShowAdminFeedbackBar && (
<div className={cn(
'ml-1 items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm',
(hasAdminFeedback || hasUserFeedback) ? 'flex' : 'hidden group-hover:flex',
@@ -306,28 +305,26 @@ const Operation: FC<OperationProps> = ({
)}
{!isOpeningStatement && (
<div className="ml-1 hidden items-center gap-0.5 rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0.5 shadow-md backdrop-blur-sm group-hover:flex">
{(config?.text_to_speech?.enabled && !humanInputFormDataList?.length) && (
{(config?.text_to_speech?.enabled) && (
<NewAudioButton
id={id}
value={content}
voice={config?.text_to_speech?.voice}
/>
)}
{!humanInputFormDataList?.length && (
<ActionButton onClick={() => {
copy(content)
Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) })
}}
>
<RiClipboardLine className="h-4 w-4" />
</ActionButton>
)}
<ActionButton onClick={() => {
copy(content)
Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) })
}}
>
<RiClipboardLine className="h-4 w-4" />
</ActionButton>
{!noChatInput && (
<ActionButton onClick={() => onRegenerate?.(item)}>
<RiResetLeftLine className="h-4 w-4" />
</ActionButton>
)}
{config?.supportAnnotation && config.annotation_reply?.enabled && !humanInputFormDataList?.length && (
{(config?.supportAnnotation && config.annotation_reply?.enabled) && (
<AnnotationCtrlButton
appId={config?.appId || ''}
messageId={id}

View File

@@ -3,7 +3,6 @@ import {
RiArrowRightSLine,
RiErrorWarningFill,
RiLoader2Line,
RiPauseCircleFill,
} from '@remixicon/react'
import {
useEffect,
@@ -35,8 +34,6 @@ const WorkflowProcessItem = ({
const running = data.status === WorkflowRunningStatus.Running
const succeeded = data.status === WorkflowRunningStatus.Succeeded
const failed = data.status === WorkflowRunningStatus.Failed || data.status === WorkflowRunningStatus.Stopped
const paused = data.status === WorkflowRunningStatus.Paused
const latestNode = data.tracing[data.tracing.length - 1]
useEffect(() => {
setCollapse(!expand)
@@ -53,10 +50,7 @@ const WorkflowProcessItem = ({
running && !collapse && 'bg-background-section-burn',
succeeded && !collapse && 'bg-state-success-hover',
failed && !collapse && 'bg-state-destructive-hover',
paused && !collapse && 'bg-state-warning-hover',
collapse && !failed && !paused && 'bg-workflow-process-bg',
collapse && paused && 'bg-workflow-process-paused-bg',
collapse && failed && 'bg-workflow-process-failed-bg',
collapse && 'bg-workflow-process-bg',
)}
>
<div
@@ -78,13 +72,8 @@ const WorkflowProcessItem = ({
<RiErrorWarningFill className="mr-1 h-3.5 w-3.5 shrink-0 text-text-destructive" />
)
}
{
paused && (
<RiPauseCircleFill className="mr-1 h-3.5 w-3.5 shrink-0 text-text-warning-secondary" />
)
}
<div className={cn('system-xs-medium text-text-secondary', !collapse && 'grow')}>
{!collapse ? t('common.workflowProcess', { ns: 'workflow' }) : latestNode?.title}
{t('common.workflowProcess', { ns: 'workflow' })}
</div>
<RiArrowRightSLine className={cn('ml-1 h-4 w-4 text-text-tertiary', !collapse && 'rotate-90')} />
</div>

View File

@@ -16,8 +16,7 @@ export type ChatContextValue = Pick<ChatProps, 'config'
| 'onAnnotationAdded'
| 'onAnnotationRemoved'
| 'disableFeedback'
| 'onFeedback'
| 'getHumanInputNodeData'> & {
| 'onFeedback'> & {
readonly?: boolean
}
@@ -46,7 +45,6 @@ export const ChatContextProvider = ({
onAnnotationRemoved,
disableFeedback,
onFeedback,
getHumanInputNodeData,
}: ChatContextProviderProps) => {
return (
<ChatContext.Provider value={{
@@ -64,7 +62,6 @@ export const ChatContextProvider = ({
onAnnotationRemoved,
disableFeedback,
onFeedback,
getHumanInputNodeData,
}}
>
{children}

File diff suppressed because it is too large Load Diff

View File

@@ -75,9 +75,6 @@ export type ChatProps = {
noSpacing?: boolean
inputDisabled?: boolean
sidebarCollapseState?: boolean
hideAvatar?: boolean
onHumanInputFormSubmit?: (formToken: string, formData: any) => Promise<void>
getHumanInputNodeData?: (nodeID: string) => any
}
const Chat: FC<ChatProps> = ({
@@ -119,9 +116,6 @@ const Chat: FC<ChatProps> = ({
noSpacing,
inputDisabled,
sidebarCollapseState,
hideAvatar,
onHumanInputFormSubmit,
getHumanInputNodeData,
}) => {
const { t } = useTranslation()
const { currentLogItem, setCurrentLogItem, showPromptLogModal, setShowPromptLogModal, showAgentLogModal, setShowAgentLogModal } = useAppStore(useShallow(state => ({
@@ -271,7 +265,6 @@ const Chat: FC<ChatProps> = ({
onAnnotationRemoved={onAnnotationRemoved}
disableFeedback={disableFeedback}
onFeedback={onFeedback}
getHumanInputNodeData={getHumanInputNodeData}
>
<div className={cn('relative h-full', isTryApp && 'flex flex-col')}>
<div
@@ -302,8 +295,6 @@ const Chat: FC<ChatProps> = ({
hideProcessDetail={hideProcessDetail}
noChatInput={noChatInput}
switchSibling={switchSibling}
hideAvatar={hideAvatar}
onHumanInputFormSubmit={onHumanInputFormSubmit}
/>
)
}
@@ -315,7 +306,6 @@ const Chat: FC<ChatProps> = ({
theme={themeBuilder?.theme}
enableEdit={config?.questionEditEnable}
switchSibling={switchSibling}
hideAvatar={hideAvatar}
/>
)
})

View File

@@ -32,7 +32,6 @@ type QuestionProps = {
theme: Theme | null | undefined
enableEdit?: boolean
switchSibling?: (siblingMessageId: string) => void
hideAvatar?: boolean
}
const Question: FC<QuestionProps> = ({
@@ -41,7 +40,6 @@ const Question: FC<QuestionProps> = ({
theme,
enableEdit = true,
switchSibling,
hideAvatar,
}) => {
const { t } = useTranslation()
@@ -176,17 +174,15 @@ const Question: FC<QuestionProps> = ({
</div>
<div className="mt-1 h-[18px]" />
</div>
{!hideAvatar && (
<div className="h-10 w-10 shrink-0">
{
questionIcon || (
<div className="h-full w-full rounded-full border-[0.5px] border-black/5">
<User className="h-full w-full" />
</div>
)
}
</div>
)}
<div className="h-10 w-10 shrink-0">
{
questionIcon || (
<div className="h-full w-full rounded-full border-[0.5px] border-black/5">
<User className="h-full w-full" />
</div>
)
}
</div>
</div>
)
}

View File

@@ -2,11 +2,7 @@ import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { TypeWithI18N } from '@/app/components/header/account-setting/model-provider-page/declarations'
import type { InputVarType } from '@/app/components/workflow/types'
import type { Annotation, MessageRating } from '@/models/log'
import type {
FileResponse,
HumanInputFilledFormData,
HumanInputFormData,
} from '@/types/workflow'
import type { FileResponse } from '@/types/workflow'
export type MessageMore = {
time: string
@@ -68,19 +64,6 @@ export type CitationItem = {
word_count: number
}
export type ExtraContent
= {
type: 'human_input'
submitted: false
form_definition: HumanInputFormData
workflow_run_id: string
}
| {
type: 'human_input'
submitted: true
form_submission_data: HumanInputFilledFormData
}
export type IChatItem = {
id: string
content: string
@@ -121,10 +104,6 @@ export type IChatItem = {
siblingIndex?: number
prevSibling?: string
nextSibling?: string
// for human input
humanInputFormDataList?: HumanInputFormData[]
humanInputFilledFormDataList?: HumanInputFilledFormData[]
extra_contents?: ExtraContent[]
}
export type Metadata = {

View File

@@ -2,7 +2,6 @@ import type { FileEntity } from '../../file-uploader/types'
import type {
ChatConfig,
ChatItem,
ChatItemInTree,
OnSend,
} from '../types'
import { useCallback, useEffect, useMemo, useState } from 'react'
@@ -18,9 +17,7 @@ import {
fetchSuggestedQuestions,
getUrl,
stopChatMessageResponding,
submitHumanInputForm,
} from '@/service/share'
import { submitHumanInputForm as submitHumanInputFormService } from '@/service/workflow'
import { TransferMethod } from '@/types/app'
import { cn } from '@/utils/classnames'
import Avatar from '../../avatar'
@@ -73,9 +70,9 @@ const ChatWrapper = () => {
}, [appParams, currentConversationItem?.introduction])
const {
chatList,
setTargetMessageId,
handleSend,
handleStop,
handleSwitchSibling,
isResponding: respondingState,
suggestedQuestions,
} = useChat(
@@ -133,40 +130,6 @@ const ChatWrapper = () => {
setIsResponding(respondingState)
}, [respondingState, setIsResponding])
// Resume paused workflows when chat history is loaded
useEffect(() => {
if (!appPrevChatList || appPrevChatList.length === 0)
return
// Find the last answer item with workflow_run_id that needs resumption (DFS - find deepest first)
let lastPausedNode: ChatItemInTree | undefined
const findLastPausedWorkflow = (nodes: ChatItemInTree[]) => {
nodes.forEach((node) => {
// DFS: recurse to children first
if (node.children && node.children.length > 0)
findLastPausedWorkflow(node.children)
// Track the last node with humanInputFormDataList
if (node.isAnswer && node.workflow_run_id && node.humanInputFormDataList && node.humanInputFormDataList.length > 0)
lastPausedNode = node
})
}
findLastPausedWorkflow(appPrevChatList)
// Only resume the last paused workflow
if (lastPausedNode) {
handleSwitchSibling(
lastPausedNode.id,
{
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}
}, [])
const doSend: OnSend = useCallback((message, files, isRegenerate = false, parentAnswer: ChatItem | null = null) => {
const data: any = {
query: message,
@@ -184,7 +147,7 @@ const ChatWrapper = () => {
isPublicAPI: appSourceType === AppSourceType.webApp,
},
)
}, [currentConversationId, currentConversationInputs, newConversationInputs, chatList, handleSend, appSourceType, appId, handleNewConversationCompleted])
}, [currentConversationId, currentConversationInputs, newConversationInputs, chatList, handleSend, isInstalledApp, appId, handleNewConversationCompleted])
const doRegenerate = useCallback((chatItem: ChatItem, editedQuestion?: { message: string, files?: FileEntity[] }) => {
const question = editedQuestion ? chatItem : chatList.find(item => item.id === chatItem.parentMessageId)!
@@ -192,14 +155,6 @@ const ChatWrapper = () => {
doSend(editedQuestion ? editedQuestion.message : question.content, editedQuestion ? editedQuestion.files : question.message_files, true, isValidGeneratedAnswer(parentAnswer) ? parentAnswer : null)
}, [chatList, doSend])
const doSwitchSibling = useCallback((siblingMessageId: string) => {
handleSwitchSibling(siblingMessageId, {
onGetSuggestedQuestions: responseItemId => fetchSuggestedQuestions(responseItemId, appSourceType, appId),
onConversationComplete: currentConversationId ? undefined : handleNewConversationCompleted,
isPublicAPI: appSourceType === AppSourceType.webApp,
})
}, [handleSwitchSibling, appSourceType, appId, currentConversationId, handleNewConversationCompleted])
const messageList = useMemo(() => {
if (currentConversationId || chatList.length > 1)
return chatList
@@ -223,13 +178,6 @@ const ChatWrapper = () => {
}
}, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden])
const handleSubmitHumanInputForm = useCallback(async (formToken: string, formData: any) => {
if (isInstalledApp)
await submitHumanInputFormService(formToken, formData)
else
await submitHumanInputForm(formToken, formData)
}, [isInstalledApp])
const welcome = useMemo(() => {
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
if (respondingState)
@@ -275,7 +223,7 @@ const ChatWrapper = () => {
</div>
</div>
)
}, [chatList, respondingState, currentConversationId, collapsed, inputsForms.length, allInputsHidden, appData?.site, isMobile])
}, [appData?.site, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden])
const answerIcon = isDify()
? <LogoAvatar className="relative shrink-0" />
@@ -305,7 +253,6 @@ const ChatWrapper = () => {
inputsForm={inputsForms}
onRegenerate={doRegenerate}
onStopResponding={handleStop}
onHumanInputFormSubmit={handleSubmitHumanInputForm}
chatNode={(
<>
{chatNode}
@@ -319,7 +266,7 @@ const ChatWrapper = () => {
answerIcon={answerIcon}
hideProcessDetail
themeBuilder={themeBuilder}
switchSibling={doSwitchSibling}
switchSibling={siblingMessageId => setTargetMessageId(siblingMessageId)}
inputDisabled={inputDisabled}
questionIcon={
initUserVariables?.avatar_url

View File

@@ -5,7 +5,7 @@ import type {
ModelConfig,
VisionSettings,
} from '@/types/app'
import type { HumanInputFilledFormData, HumanInputFormData, NodeTracing } from '@/types/workflow'
import type { NodeTracing } from '@/types/workflow'
export type {
Inputs,
@@ -67,8 +67,6 @@ export type WorkflowProcess = {
expand?: boolean // for UI
resultText?: string
files?: FileEntity[]
humanInputFormDataList?: HumanInputFormData[]
humanInputFilledFormDataList?: HumanInputFilledFormData[]
}
export type ChatItem = IChatItem & {

View File

@@ -1,4 +1,3 @@
/* eslint-disable tailwindcss/classnames-order */
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Effect from '.'
@@ -29,8 +28,8 @@ type Story = StoryObj<typeof meta>
export const Playground: Story = {
render: () => (
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
<Effect className="top-6 left-8" />
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
<Effect className="left-8 top-6" />
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
Accent glow
</div>

View File

@@ -1,6 +0,0 @@
<svg width="27" height="27" viewBox="0 0 27 27" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.625 0C9.27992 0.000320434 8.93828 0.0686147 8.6196 0.200983C8.30091 0.333351 8.01142 0.527199 7.76766 0.771458C7.5239 1.01572 7.33064 1.3056 7.19893 1.62456C7.06721 1.94351 6.99962 2.28529 7 2.63037C6.99968 2.97541 7.06732 3.31714 7.19907 3.63603C7.33081 3.95493 7.52408 4.24476 7.76783 4.48897C8.01159 4.73317 8.30105 4.92698 8.61971 5.05932C8.93836 5.19165 9.27996 5.25993 9.625 5.26025H12.25V2.63037C12.2504 2.28529 12.1828 1.94351 12.0511 1.62456C11.9194 1.3056 11.7261 1.01572 11.4823 0.771458C11.2386 0.527199 10.9491 0.333351 10.6304 0.200983C10.3117 0.0686147 9.97008 0.000320434 9.625 0ZM9.625 7.01416H2.625C2.27996 7.01448 1.93836 7.08276 1.61971 7.21509C1.30106 7.34743 1.01159 7.54124 0.767835 7.78544C0.524081 8.02965 0.330814 8.31948 0.199069 8.63838C0.0673241 8.95728 -0.000319124 9.299 1.63476e-06 9.64404C-0.000383292 9.98912 0.0672126 10.3309 0.198929 10.6499C0.330645 10.9688 0.523902 11.2587 0.767662 11.503C1.01142 11.7472 1.30091 11.9411 1.6196 12.0734C1.93828 12.2058 2.27992 12.2741 2.625 12.2744H9.625C9.97008 12.2741 10.3117 12.2058 10.6304 12.0734C10.9491 11.9411 11.2386 11.7472 11.4823 11.503C11.7261 11.2587 11.9194 10.9688 12.0511 10.6499C12.1828 10.3309 12.2504 9.98912 12.25 9.64404C12.2503 9.299 12.1827 8.95728 12.0509 8.63838C11.9192 8.31948 11.7259 8.02965 11.4822 7.78544C11.2384 7.54124 10.9489 7.34743 10.6303 7.21509C10.3116 7.08276 9.97004 7.01448 9.625 7.01416Z" fill="#44BEDF"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M26.25 9.64404C26.2503 9.299 26.1827 8.95728 26.0509 8.63838C25.9192 8.31948 25.7259 8.02965 25.4822 7.78544C25.2384 7.54124 24.9489 7.34743 24.6303 7.21509C24.3116 7.08276 23.97 7.01448 23.625 7.01416C23.28 7.01448 22.9384 7.08276 22.6197 7.21509C22.3011 7.34743 22.0116 7.54124 21.7678 7.78544C21.5241 8.02965 21.3308 8.31948 21.1991 8.63838C21.0673 8.95728 20.9997 9.299 21 9.64404V12.2744H23.625C23.9701 12.2741 24.3117 12.2058 24.6304 12.0734C24.9491 11.9411 25.2386 11.7472 25.4823 11.503C25.7261 11.2587 25.9194 10.9688 26.0511 10.6499C26.1828 10.3309 26.2504 9.98912 26.25 9.64404ZM19.25 9.64404V2.63037C19.2504 2.28529 19.1828 1.94351 19.0511 1.62456C18.9194 1.3056 18.7261 1.01572 18.4823 0.771458C18.2386 0.527199 17.9491 0.333351 17.6304 0.200983C17.3117 0.0686147 16.9701 0.000320434 16.625 0C16.2799 0.000320434 15.9383 0.0686147 15.6196 0.200983C15.3009 0.333351 15.0114 0.527199 14.7677 0.771458C14.5239 1.01572 14.3306 1.3056 14.1989 1.62456C14.0672 1.94351 13.9996 2.28529 14 2.63037V9.64404C13.9996 9.98912 14.0672 10.3309 14.1989 10.6499C14.3306 10.9688 14.5239 11.2587 14.7677 11.503C15.0114 11.7472 15.3009 11.9411 15.6196 12.0734C15.9383 12.2058 16.2799 12.2741 16.625 12.2744C16.9701 12.2741 17.3117 12.2058 17.6304 12.0734C17.9491 11.9411 18.2386 11.7472 18.4823 11.503C18.7261 11.2587 18.9194 10.9688 19.0511 10.6499C19.1828 10.3309 19.2504 9.98912 19.25 9.64404Z" fill="#2EB67D"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M16.625 26.302C16.9701 26.3017 17.3117 26.2334 17.6304 26.101C17.9491 25.9686 18.2386 25.7748 18.4823 25.5305C18.7261 25.2863 18.9194 24.9964 19.0511 24.6774C19.1828 24.3585 19.2504 24.0167 19.25 23.6716C19.2503 23.3266 19.1827 22.9849 19.0509 22.666C18.9192 22.3471 18.7259 22.0572 18.4822 21.813C18.2384 21.5688 17.9489 21.375 17.6303 21.2427C17.3116 21.1103 16.97 21.0421 16.625 21.0417H14V23.6716C13.9996 24.0167 14.0672 24.3585 14.1989 24.6774C14.3306 24.9964 14.5239 25.2863 14.7677 25.5305C15.0114 25.7748 15.3009 25.9686 15.6196 26.101C15.9383 26.2334 16.2799 26.3017 16.625 26.302ZM16.625 19.2878H23.625C23.97 19.2875 24.3116 19.2192 24.6303 19.0869C24.9489 18.9546 25.2384 18.7608 25.4822 18.5166C25.7259 18.2723 25.9192 17.9825 26.0509 17.6636C26.1827 17.3447 26.2503 17.003 26.25 16.658C26.2504 16.3129 26.1828 15.9711 26.0511 15.6521C25.9194 15.3332 25.7261 15.0433 25.4823 14.799C25.2386 14.5548 24.9491 14.3609 24.6304 14.2286C24.3117 14.0962 23.9701 14.0279 23.625 14.0276H16.625C16.2799 14.0279 15.9383 14.0962 15.6196 14.2286C15.3009 14.3609 15.0114 14.5548 14.7677 14.799C14.5239 15.0433 14.3306 15.3332 14.1989 15.6521C14.0672 15.9711 13.9996 16.3129 14 16.658C13.9997 17.003 14.0673 17.3447 14.1991 17.6636C14.3308 17.9825 14.5241 18.2723 14.7678 18.5166C15.0116 18.7608 15.3011 18.9546 15.6197 19.0869C15.9384 19.2192 16.28 19.2875 16.625 19.2878Z" fill="#ECB22E"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M2.22485e-06 16.658C-0.000318534 17.003 0.0673247 17.3447 0.19907 17.6636C0.330815 17.9825 0.524081 18.2723 0.767835 18.5166C1.01159 18.7608 1.30106 18.9546 1.61971 19.0869C1.93836 19.2192 2.27996 19.2875 2.625 19.2878C2.97004 19.2875 3.31164 19.2192 3.63029 19.0869C3.94895 18.9546 4.23841 18.7608 4.48217 18.5166C4.72592 18.2723 4.91919 17.9825 5.05093 17.6636C5.18268 17.3447 5.25032 17.003 5.25 16.658V14.0276H2.625C2.27988 14.0279 1.9382 14.0962 1.61948 14.2286C1.30077 14.361 1.01126 14.5549 0.76749 14.7992C0.523723 15.0435 0.330477 15.3335 0.198789 15.6525C0.0671016 15.9715 -0.000446885 16.3128 2.22485e-06 16.658ZM7 16.658V23.6716C6.99962 24.0167 7.06721 24.3585 7.19893 24.6774C7.33064 24.9964 7.5239 25.2863 7.76766 25.5305C8.01142 25.7748 8.30091 25.9686 8.6196 26.101C8.93828 26.2334 9.27992 26.3017 9.625 26.302C9.97008 26.3017 10.3117 26.2334 10.6304 26.101C10.9491 25.9686 11.2386 25.7748 11.4823 25.5305C11.7261 25.2863 11.9194 24.9964 12.0511 24.6774C12.1828 24.3585 12.2504 24.0167 12.25 23.6716V16.6584C12.2504 16.3134 12.1828 15.9716 12.0511 15.6526C11.9194 15.3337 11.7261 15.0438 11.4823 14.7995C11.2386 14.5553 10.9491 14.3614 10.6304 14.2291C10.3117 14.0967 9.97008 14.0284 9.625 14.0281C9.27992 14.0284 8.93828 14.0967 8.6196 14.2291C8.30091 14.3614 8.01142 14.5553 7.76766 14.7995C7.5239 15.0438 7.33064 15.3337 7.19893 15.6526C7.06721 15.9716 6.99962 16.3129 7 16.658Z" fill="#E01E5A"/>
</svg>

Before

Width:  |  Height:  |  Size: 5.8 KiB

View File

@@ -1,19 +0,0 @@
<svg width="28" height="28" viewBox="0 0 28 28" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_20307_29670)">
<path opacity="0.1" d="M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z" fill="black"/>
<path opacity="0.1" d="M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z" fill="black"/>
<path d="M26.6817 10.5H20.8484L19.2267 11.8183V18.34C19.2846 19.4628 19.7715 20.5204 20.5867 21.2946C21.4019 22.0689 22.4833 22.5005 23.6075 22.5005C24.7318 22.5005 25.8131 22.0689 26.6283 21.2946C27.4436 20.5204 27.9304 19.4628 27.9884 18.34V11.8183C27.9899 11.6458 27.9572 11.4746 27.8923 11.3147C27.8273 11.1548 27.7313 11.0094 27.6098 10.8868C27.4883 10.7643 27.3437 10.667 27.1844 10.6006C27.0251 10.5342 26.8543 10.5 26.6817 10.5Z" fill="#5059C9"/>
<path d="M23.9167 9.33333C25.5275 9.33333 26.8333 8.0275 26.8333 6.41667C26.8333 4.80584 25.5275 3.5 23.9167 3.5C22.3058 3.5 21 4.80584 21 6.41667C21 8.0275 22.3058 9.33333 23.9167 9.33333Z" fill="#5059C9"/>
<path d="M10.64 10.5H20.86C21.2065 10.5 21.5389 10.6377 21.7839 10.8827C22.029 11.1278 22.1666 11.4601 22.1666 11.8067V20.4167C22.1666 22.1185 21.4906 23.7506 20.2872 24.9539C19.0839 26.1573 17.4518 26.8333 15.75 26.8333C14.0482 26.8333 12.4161 26.1573 11.2127 24.9539C10.0094 23.7506 9.33331 22.1185 9.33331 20.4167V11.8067C9.33331 11.4601 9.47098 11.1278 9.71603 10.8827C9.96107 10.6377 10.2934 10.5 10.64 10.5Z" fill="#7B83EB"/>
<path d="M16.3333 9.69477C18.4661 9.69477 20.195 7.96584 20.195 5.8331C20.195 3.70036 18.4661 1.97144 16.3333 1.97144C14.2006 1.97144 12.4717 3.70036 12.4717 5.8331C12.4717 7.96584 14.2006 9.69477 16.3333 9.69477Z" fill="#7B83EB"/>
<path opacity="0.5" d="M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z" fill="black"/>
<path opacity="0.5" d="M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z" fill="black"/>
<path d="M14.9683 5.83325H1.365C0.611131 5.83325 0 6.44438 0 7.19825V20.8016C0 21.5555 0.611131 22.1666 1.365 22.1666H14.9683C15.7222 22.1666 16.3333 21.5555 16.3333 20.8016V7.19825C16.3333 6.44438 15.7222 5.83325 14.9683 5.83325Z" fill="#4B53BC"/>
<path d="M11.8767 11.1766H9.08833V18.6666H7.25666V11.1766H4.45667V9.33325H11.8767V11.1766Z" fill="white"/>
</g>
<defs>
<clipPath id="clip0_20307_29670">
<rect width="28" height="28" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.66634 2.66675C3.66634 2.29856 3.36787 2.00008 2.99967 2.00008C2.63149 2.00008 2.33301 2.29855 2.33301 2.66675L3.66634 2.66675ZM2.99967 5.33341H2.33301C2.33301 5.51023 2.40325 5.6798 2.52827 5.80482C2.65329 5.92984 2.82286 6.00008 2.99967 6.00008V5.33341ZM5.66641 6.00008C6.03459 6.00008 6.33307 5.7016 6.33307 5.33341C6.33307 4.96523 6.03459 4.66675 5.66641 4.66675L5.66641 6.00008ZM2.41183 5.01659C2.23816 5.34125 2.36056 5.74523 2.68522 5.91889C3.00988 6.09256 3.41385 5.97016 3.58752 5.6455L2.41183 5.01659ZM3.03395 8.58915C2.99109 8.22348 2.6599 7.96175 2.29421 8.00461C1.92853 8.04748 1.66682 8.37868 1.70967 8.74435L3.03395 8.58915ZM12.0439 5.05931C12.2607 5.3569 12.6777 5.42238 12.9753 5.20557C13.2729 4.98876 13.3383 4.57176 13.1215 4.27417L12.0439 5.05931ZM5.02145 13.5907C5.34627 13.7641 5.75013 13.6413 5.92349 13.3165C6.09685 12.9917 5.97407 12.5878 5.64925 12.4145L5.02145 13.5907ZM2.33301 2.66675L2.33301 5.33341H3.66634L3.66634 2.66675L2.33301 2.66675ZM2.99967 6.00008L5.66641 6.00008L5.66641 4.66675H2.99967L2.99967 6.00008ZM3.58752 5.6455C4.43045 4.06972 6.09066 3.00008 7.99968 3.00008V1.66675C5.57951 1.66675 3.47747 3.02445 2.41183 5.01659L3.58752 5.6455ZM7.99968 3.00008C9.66128 3.00008 11.1336 3.80991 12.0439 5.05931L13.1215 4.27417C11.9711 2.69513 10.1055 1.66675 7.99968 1.66675V3.00008ZM5.64925 12.4145C4.23557 11.6599 3.22828 10.2474 3.03395 8.58915L1.70967 8.74435C1.95639 10.8495 3.23403 12.6367 5.02145 13.5907L5.64925 12.4145Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.1946 8.13826C12.9027 7.84637 12.4294 7.84638 12.1375 8.13829L9.11842 11.1574C9.01642 11.2594 8.95025 11.3917 8.92985 11.5345C8.92985 11.5345 8.92985 11.5345 8.92985 11.5345L8.78508 12.5479L9.79846 12.4031C9.94127 12.3827 10.0736 12.3165 10.1756 12.2145L13.1947 9.19548C13.4866 8.90359 13.4866 8.43027 13.1946 8.13826C13.1947 8.13827 13.1946 8.13825 13.1946 8.13826ZM11.1947 7.19548C12.0073 6.38286 13.3249 6.38286 14.1375 7.19548C14.95 8.00814 14.9501 9.32565 14.1375 10.1383L11.1184 13.1574C10.8124 13.4633 10.4154 13.6618 9.98703 13.723L8.09369 13.9935C7.88596 14.0232 7.67639 13.9533 7.52801 13.805C7.37963 13.6566 7.30977 13.447 7.33945 13.2393L7.60991 11.3459C7.67111 10.9175 7.86961 10.5205 8.17561 10.2145L11.1947 7.19548Z" fill="white"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.528 10.8048L10.528 8.80482L11.4708 7.86201L13.4708 9.86201L12.528 10.8048Z" fill="white"/>
</svg>

Before

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -1,61 +0,0 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "27",
"height": "27",
"viewBox": "0 0 27 27",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M9.625 0C9.27992 0.000320434 8.93828 0.0686147 8.6196 0.200983C8.30091 0.333351 8.01142 0.527199 7.76766 0.771458C7.5239 1.01572 7.33064 1.3056 7.19893 1.62456C7.06721 1.94351 6.99962 2.28529 7 2.63037C6.99968 2.97541 7.06732 3.31714 7.19907 3.63603C7.33081 3.95493 7.52408 4.24476 7.76783 4.48897C8.01159 4.73317 8.30105 4.92698 8.61971 5.05932C8.93836 5.19165 9.27996 5.25993 9.625 5.26025H12.25V2.63037C12.2504 2.28529 12.1828 1.94351 12.0511 1.62456C11.9194 1.3056 11.7261 1.01572 11.4823 0.771458C11.2386 0.527199 10.9491 0.333351 10.6304 0.200983C10.3117 0.0686147 9.97008 0.000320434 9.625 0ZM9.625 7.01416H2.625C2.27996 7.01448 1.93836 7.08276 1.61971 7.21509C1.30106 7.34743 1.01159 7.54124 0.767835 7.78544C0.524081 8.02965 0.330814 8.31948 0.199069 8.63838C0.0673241 8.95728 -0.000319124 9.299 1.63476e-06 9.64404C-0.000383292 9.98912 0.0672126 10.3309 0.198929 10.6499C0.330645 10.9688 0.523902 11.2587 0.767662 11.503C1.01142 11.7472 1.30091 11.9411 1.6196 12.0734C1.93828 12.2058 2.27992 12.2741 2.625 12.2744H9.625C9.97008 12.2741 10.3117 12.2058 10.6304 12.0734C10.9491 11.9411 11.2386 11.7472 11.4823 11.503C11.7261 11.2587 11.9194 10.9688 12.0511 10.6499C12.1828 10.3309 12.2504 9.98912 12.25 9.64404C12.2503 9.299 12.1827 8.95728 12.0509 8.63838C11.9192 8.31948 11.7259 8.02965 11.4822 7.78544C11.2384 7.54124 10.9489 7.34743 10.6303 7.21509C10.3116 7.08276 9.97004 7.01448 9.625 7.01416Z",
"fill": "#44BEDF"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M26.25 9.64404C26.2503 9.299 26.1827 8.95728 26.0509 8.63838C25.9192 8.31948 25.7259 8.02965 25.4822 7.78544C25.2384 7.54124 24.9489 7.34743 24.6303 7.21509C24.3116 7.08276 23.97 7.01448 23.625 7.01416C23.28 7.01448 22.9384 7.08276 22.6197 7.21509C22.3011 7.34743 22.0116 7.54124 21.7678 7.78544C21.5241 8.02965 21.3308 8.31948 21.1991 8.63838C21.0673 8.95728 20.9997 9.299 21 9.64404V12.2744H23.625C23.9701 12.2741 24.3117 12.2058 24.6304 12.0734C24.9491 11.9411 25.2386 11.7472 25.4823 11.503C25.7261 11.2587 25.9194 10.9688 26.0511 10.6499C26.1828 10.3309 26.2504 9.98912 26.25 9.64404ZM19.25 9.64404V2.63037C19.2504 2.28529 19.1828 1.94351 19.0511 1.62456C18.9194 1.3056 18.7261 1.01572 18.4823 0.771458C18.2386 0.527199 17.9491 0.333351 17.6304 0.200983C17.3117 0.0686147 16.9701 0.000320434 16.625 0C16.2799 0.000320434 15.9383 0.0686147 15.6196 0.200983C15.3009 0.333351 15.0114 0.527199 14.7677 0.771458C14.5239 1.01572 14.3306 1.3056 14.1989 1.62456C14.0672 1.94351 13.9996 2.28529 14 2.63037V9.64404C13.9996 9.98912 14.0672 10.3309 14.1989 10.6499C14.3306 10.9688 14.5239 11.2587 14.7677 11.503C15.0114 11.7472 15.3009 11.9411 15.6196 12.0734C15.9383 12.2058 16.2799 12.2741 16.625 12.2744C16.9701 12.2741 17.3117 12.2058 17.6304 12.0734C17.9491 11.9411 18.2386 11.7472 18.4823 11.503C18.7261 11.2587 18.9194 10.9688 19.0511 10.6499C19.1828 10.3309 19.2504 9.98912 19.25 9.64404Z",
"fill": "#2EB67D"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M16.625 26.302C16.9701 26.3017 17.3117 26.2334 17.6304 26.101C17.9491 25.9686 18.2386 25.7748 18.4823 25.5305C18.7261 25.2863 18.9194 24.9964 19.0511 24.6774C19.1828 24.3585 19.2504 24.0167 19.25 23.6716C19.2503 23.3266 19.1827 22.9849 19.0509 22.666C18.9192 22.3471 18.7259 22.0572 18.4822 21.813C18.2384 21.5688 17.9489 21.375 17.6303 21.2427C17.3116 21.1103 16.97 21.0421 16.625 21.0417H14V23.6716C13.9996 24.0167 14.0672 24.3585 14.1989 24.6774C14.3306 24.9964 14.5239 25.2863 14.7677 25.5305C15.0114 25.7748 15.3009 25.9686 15.6196 26.101C15.9383 26.2334 16.2799 26.3017 16.625 26.302ZM16.625 19.2878H23.625C23.97 19.2875 24.3116 19.2192 24.6303 19.0869C24.9489 18.9546 25.2384 18.7608 25.4822 18.5166C25.7259 18.2723 25.9192 17.9825 26.0509 17.6636C26.1827 17.3447 26.2503 17.003 26.25 16.658C26.2504 16.3129 26.1828 15.9711 26.0511 15.6521C25.9194 15.3332 25.7261 15.0433 25.4823 14.799C25.2386 14.5548 24.9491 14.3609 24.6304 14.2286C24.3117 14.0962 23.9701 14.0279 23.625 14.0276H16.625C16.2799 14.0279 15.9383 14.0962 15.6196 14.2286C15.3009 14.3609 15.0114 14.5548 14.7677 14.799C14.5239 15.0433 14.3306 15.3332 14.1989 15.6521C14.0672 15.9711 13.9996 16.3129 14 16.658C13.9997 17.003 14.0673 17.3447 14.1991 17.6636C14.3308 17.9825 14.5241 18.2723 14.7678 18.5166C15.0116 18.7608 15.3011 18.9546 15.6197 19.0869C15.9384 19.2192 16.28 19.2875 16.625 19.2878Z",
"fill": "#ECB22E"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M2.22485e-06 16.658C-0.000318534 17.003 0.0673247 17.3447 0.19907 17.6636C0.330815 17.9825 0.524081 18.2723 0.767835 18.5166C1.01159 18.7608 1.30106 18.9546 1.61971 19.0869C1.93836 19.2192 2.27996 19.2875 2.625 19.2878C2.97004 19.2875 3.31164 19.2192 3.63029 19.0869C3.94895 18.9546 4.23841 18.7608 4.48217 18.5166C4.72592 18.2723 4.91919 17.9825 5.05093 17.6636C5.18268 17.3447 5.25032 17.003 5.25 16.658V14.0276H2.625C2.27988 14.0279 1.9382 14.0962 1.61948 14.2286C1.30077 14.361 1.01126 14.5549 0.76749 14.7992C0.523723 15.0435 0.330477 15.3335 0.198789 15.6525C0.0671016 15.9715 -0.000446885 16.3128 2.22485e-06 16.658ZM7 16.658V23.6716C6.99962 24.0167 7.06721 24.3585 7.19893 24.6774C7.33064 24.9964 7.5239 25.2863 7.76766 25.5305C8.01142 25.7748 8.30091 25.9686 8.6196 26.101C8.93828 26.2334 9.27992 26.3017 9.625 26.302C9.97008 26.3017 10.3117 26.2334 10.6304 26.101C10.9491 25.9686 11.2386 25.7748 11.4823 25.5305C11.7261 25.2863 11.9194 24.9964 12.0511 24.6774C12.1828 24.3585 12.2504 24.0167 12.25 23.6716V16.6584C12.2504 16.3134 12.1828 15.9716 12.0511 15.6526C11.9194 15.3337 11.7261 15.0438 11.4823 14.7995C11.2386 14.5553 10.9491 14.3614 10.6304 14.2291C10.3117 14.0967 9.97008 14.0284 9.625 14.0281C9.27992 14.0284 8.93828 14.0967 8.6196 14.2291C8.30091 14.3614 8.01142 14.5553 7.76766 14.7995C7.5239 15.0438 7.33064 15.3337 7.19893 15.6526C7.06721 15.9716 6.99962 16.3129 7 16.658Z",
"fill": "#E01E5A"
},
"children": []
}
]
},
"name": "Slack"
}

View File

@@ -1,20 +0,0 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Slack.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Slack'
export default Icon

View File

@@ -1,146 +0,0 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "28",
"height": "28",
"viewBox": "0 0 28 28",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"clip-path": "url(#clip0_20307_29670)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.1",
"d": "M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.1",
"d": "M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M26.6817 10.5H20.8484L19.2267 11.8183V18.34C19.2846 19.4628 19.7715 20.5204 20.5867 21.2946C21.4019 22.0689 22.4833 22.5005 23.6075 22.5005C24.7318 22.5005 25.8131 22.0689 26.6283 21.2946C27.4436 20.5204 27.9304 19.4628 27.9884 18.34V11.8183C27.9899 11.6458 27.9572 11.4746 27.8923 11.3147C27.8273 11.1548 27.7313 11.0094 27.6098 10.8868C27.4883 10.7643 27.3437 10.667 27.1844 10.6006C27.0251 10.5342 26.8543 10.5 26.6817 10.5Z",
"fill": "#5059C9"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M23.9167 9.33333C25.5275 9.33333 26.8333 8.0275 26.8333 6.41667C26.8333 4.80584 25.5275 3.5 23.9167 3.5C22.3058 3.5 21 4.80584 21 6.41667C21 8.0275 22.3058 9.33333 23.9167 9.33333Z",
"fill": "#5059C9"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M10.64 10.5H20.86C21.2065 10.5 21.5389 10.6377 21.7839 10.8827C22.029 11.1278 22.1666 11.4601 22.1666 11.8067V20.4167C22.1666 22.1185 21.4906 23.7506 20.2872 24.9539C19.0839 26.1573 17.4518 26.8333 15.75 26.8333C14.0482 26.8333 12.4161 26.1573 11.2127 24.9539C10.0094 23.7506 9.33331 22.1185 9.33331 20.4167V11.8067C9.33331 11.4601 9.47098 11.1278 9.71603 10.8827C9.96107 10.6377 10.2934 10.5 10.64 10.5Z",
"fill": "#7B83EB"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M16.3333 9.69477C18.4661 9.69477 20.195 7.96584 20.195 5.8331C20.195 3.70036 18.4661 1.97144 16.3333 1.97144C14.2006 1.97144 12.4717 3.70036 12.4717 5.8331C12.4717 7.96584 14.2006 9.69477 16.3333 9.69477Z",
"fill": "#7B83EB"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.5",
"d": "M9.33332 11.8067V20.4167C9.33204 21.432 9.57619 22.4327 10.045 23.3333H15.8783C16.2912 23.305 16.6807 23.1312 16.9776 22.8428C17.2745 22.5545 17.4596 22.1702 17.5 21.7583V10.5H17.1733H10.64C10.2934 10.5 9.96108 10.6377 9.71603 10.8827C9.47098 11.1278 9.33332 11.4601 9.33332 11.8067Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"opacity": "0.5",
"d": "M16.135 7H12.635C12.8805 7.78672 13.3726 8.47355 14.0386 8.9589C14.7047 9.44425 15.5093 9.70234 16.3333 9.695C16.7301 9.68885 17.1235 9.62197 17.5 9.49667V8.33C17.488 7.97504 17.3392 7.63847 17.0849 7.39061C16.8305 7.14276 16.4902 7.00281 16.135 7Z",
"fill": "black"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M14.9683 5.83325H1.365C0.611131 5.83325 0 6.44438 0 7.19825V20.8016C0 21.5555 0.611131 22.1666 1.365 22.1666H14.9683C15.7222 22.1666 16.3333 21.5555 16.3333 20.8016V7.19825C16.3333 6.44438 15.7222 5.83325 14.9683 5.83325Z",
"fill": "#4B53BC"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M11.8767 11.1766H9.08833V18.6666H7.25666V11.1766H4.45667V9.33325H11.8767V11.1766Z",
"fill": "white"
},
"children": []
}
]
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_20307_29670"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "28",
"height": "28",
"fill": "white"
},
"children": []
}
]
}
]
}
]
},
"name": "Teams"
}

View File

@@ -1,20 +0,0 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Teams.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Teams'
export default Icon

View File

@@ -2,5 +2,3 @@ export { default as DefaultToolIcon } from './DefaultToolIcon'
export { default as Icon3Dots } from './Icon3Dots'
export { default as Message3Fill } from './Message3Fill'
export { default as RowStruct } from './RowStruct'
export { default as Slack } from './Slack'
export { default as Teams } from './Teams'

View File

@@ -1,48 +0,0 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M3.66634 2.66675C3.66634 2.29856 3.36787 2.00008 2.99967 2.00008C2.63149 2.00008 2.33301 2.29855 2.33301 2.66675L3.66634 2.66675ZM2.99967 5.33341H2.33301C2.33301 5.51023 2.40325 5.6798 2.52827 5.80482C2.65329 5.92984 2.82286 6.00008 2.99967 6.00008V5.33341ZM5.66641 6.00008C6.03459 6.00008 6.33307 5.7016 6.33307 5.33341C6.33307 4.96523 6.03459 4.66675 5.66641 4.66675L5.66641 6.00008ZM2.41183 5.01659C2.23816 5.34125 2.36056 5.74523 2.68522 5.91889C3.00988 6.09256 3.41385 5.97016 3.58752 5.6455L2.41183 5.01659ZM3.03395 8.58915C2.99109 8.22348 2.6599 7.96175 2.29421 8.00461C1.92853 8.04748 1.66682 8.37868 1.70967 8.74435L3.03395 8.58915ZM12.0439 5.05931C12.2607 5.3569 12.6777 5.42238 12.9753 5.20557C13.2729 4.98876 13.3383 4.57176 13.1215 4.27417L12.0439 5.05931ZM5.02145 13.5907C5.34627 13.7641 5.75013 13.6413 5.92349 13.3165C6.09685 12.9917 5.97407 12.5878 5.64925 12.4145L5.02145 13.5907ZM2.33301 2.66675L2.33301 5.33341H3.66634L3.66634 2.66675L2.33301 2.66675ZM2.99967 6.00008L5.66641 6.00008L5.66641 4.66675H2.99967L2.99967 6.00008ZM3.58752 5.6455C4.43045 4.06972 6.09066 3.00008 7.99968 3.00008V1.66675C5.57951 1.66675 3.47747 3.02445 2.41183 5.01659L3.58752 5.6455ZM7.99968 3.00008C9.66128 3.00008 11.1336 3.80991 12.0439 5.05931L13.1215 4.27417C11.9711 2.69513 10.1055 1.66675 7.99968 1.66675V3.00008ZM5.64925 12.4145C4.23557 11.6599 3.22828 10.2474 3.03395 8.58915L1.70967 8.74435C1.95639 10.8495 3.23403 12.6367 5.02145 13.5907L5.64925 12.4145Z",
"fill": "currentColor"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M13.1946 8.13826C12.9027 7.84637 12.4294 7.84638 12.1375 8.13829L9.11842 11.1574C9.01642 11.2594 8.95025 11.3917 8.92985 11.5345C8.92985 11.5345 8.92985 11.5345 8.92985 11.5345L8.78508 12.5479L9.79846 12.4031C9.94127 12.3827 10.0736 12.3165 10.1756 12.2145L13.1947 9.19548C13.4866 8.90359 13.4866 8.43027 13.1946 8.13826C13.1947 8.13827 13.1946 8.13825 13.1946 8.13826ZM11.1947 7.19548C12.0073 6.38286 13.3249 6.38286 14.1375 7.19548C14.95 8.00814 14.9501 9.32565 14.1375 10.1383L11.1184 13.1574C10.8124 13.4633 10.4154 13.6618 9.98703 13.723L8.09369 13.9935C7.88596 14.0232 7.67639 13.9533 7.52801 13.805C7.37963 13.6566 7.30977 13.447 7.33945 13.2393L7.60991 11.3459C7.67111 10.9175 7.86961 10.5205 8.17561 10.2145L11.1947 7.19548Z",
"fill": "currentColor"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"fill-rule": "evenodd",
"clip-rule": "evenodd",
"d": "M12.528 10.8048L10.528 8.80482L11.4708 7.86201L13.4708 9.86201L12.528 10.8048Z",
"fill": "currentColor"
},
"children": []
}
]
},
"name": "HumanInLoop"
}

View File

@@ -1,20 +0,0 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './HumanInLoop.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'HumanInLoop'
export default Icon

View File

@@ -10,7 +10,6 @@ export { default as DocsExtractor } from './DocsExtractor'
export { default as End } from './End'
export { default as Home } from './Home'
export { default as Http } from './Http'
export { default as HumanInLoop } from './HumanInLoop'
export { default as IfElse } from './IfElse'
export { default as Iteration } from './Iteration'
export { default as IterationStart } from './IterationStart'

View File

@@ -18,7 +18,7 @@ export type MarkdownProps = {
content: string
className?: string
pluginInfo?: SimplePluginInfo
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements' | 'rehypePlugins'>
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements'>
export const Markdown = (props: MarkdownProps) => {
const { customComponents = {}, pluginInfo } = props
@@ -29,13 +29,7 @@ export const Markdown = (props: MarkdownProps) => {
return (
<div className={cn('markdown-body', '!text-text-primary', props.className)}>
<ReactMarkdown
pluginInfo={pluginInfo}
latexContent={latexContent}
customComponents={customComponents}
customDisallowedElements={props.customDisallowedElements}
rehypePlugins={props.rehypePlugins}
/>
<ReactMarkdown pluginInfo={pluginInfo} latexContent={latexContent} customComponents={customComponents} customDisallowedElements={props.customDisallowedElements} />
</div>
)
}

View File

@@ -22,7 +22,6 @@ export type ReactMarkdownWrapperProps = {
customDisallowedElements?: string[]
customComponents?: Record<string, React.ComponentType<any>>
pluginInfo?: SimplePluginInfo
rehypePlugins?: any// js: PluggableList[]
}
export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
@@ -56,7 +55,6 @@ export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
tree.children.forEach(iterate)
}
},
...(props.rehypePlugins || []),
]}
urlTransform={customUrlTransform}
disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]}

View File

@@ -4,7 +4,6 @@ import { SupportUploadFileTypes } from '../../workflow/types'
export const CONTEXT_PLACEHOLDER_TEXT = '{{#context#}}'
export const HISTORY_PLACEHOLDER_TEXT = '{{#histories#}}'
export const QUERY_PLACEHOLDER_TEXT = '{{#query#}}'
export const REQUEST_URL_PLACEHOLDER_TEXT = '{{#url#}}'
export const CURRENT_PLACEHOLDER_TEXT = '{{#current#}}'
export const ERROR_MESSAGE_PLACEHOLDER_TEXT = '{{#error_message#}}'
export const LAST_RUN_PLACEHOLDER_TEXT = '{{#last_run#}}'
@@ -31,12 +30,6 @@ export const checkHasQueryBlock = (text: string) => {
return text.includes(QUERY_PLACEHOLDER_TEXT)
}
export const checkHasRequestURLBlock = (text: string) => {
if (!text)
return false
return text.includes(REQUEST_URL_PLACEHOLDER_TEXT)
}
/*
* {{#1711617514996.name#}} => [1711617514996, name]
* {{#1711617514996.sys.query#}} => [sys, query]

Some files were not shown because too many files have changed in this diff Show More