Compare commits

..

29 Commits

Author SHA1 Message Date
Yanli 盐粒
b485850b76 fix: stabilize vector service tests in ci 2026-03-25 20:11:36 +08:00
Yanli 盐粒
b8d170cc60 fix: align backend tests with merged auth and vector behavior 2026-03-25 19:17:44 +08:00
autofix-ci[bot]
087e9c7d44 [autofix.ci] apply automated fixes 2026-03-25 10:50:27 +00:00
Yanli 盐粒
51c5be41af merge origin/main into yanli/pyrefly-fix-plan-v2 2026-03-25 18:48:11 +08:00
autofix-ci[bot]
e5e5fe14ff [autofix.ci] apply automated fixes 2026-03-18 17:40:49 +00:00
Yanli 盐粒
aa30eeaf27 🐛 fix: clarify invalid auth config errors 2026-03-19 01:38:45 +08:00
Yanli 盐粒
0745f573e6 fix: add missing word extractor type annotation 2026-03-19 01:06:18 +08:00
Yanli 盐粒
94b05b2ca1 refactor: remove resolved pyrefly excludes 2026-03-19 00:55:35 +08:00
Yanli 盐粒
c3b17fc833 🐛 fix: preserve URL-based PDF and DOCX extraction 2026-03-19 00:43:23 +08:00
Yanli 盐粒
8f99dc1ac1 refactor(api): remove unused vector payload alias 2026-03-19 00:10:25 +08:00
Yanli 盐粒
2028e2c3b8 Default attachment provider during normalization 2026-03-18 23:40:17 +08:00
Yanli 盐粒
7ff470d8a0 Preserve auth service interface compatibility 2026-03-18 23:34:24 +08:00
Yanli 盐粒
a4dbb76d3a Keep typing changes scoped in auth and extractor tests 2026-03-18 23:13:31 +08:00
Yanli 盐粒
134ae75a9b chore(api): keep auth api key validation aligned with main 2026-03-18 22:22:52 +08:00
Yanli 盐粒
2bbb45e97f fix(api): require api key in auth create validation 2026-03-18 20:05:50 +08:00
Yanli 盐粒
a0017183b6 Address API review follow-ups 2026-03-18 18:31:09 +08:00
Yanli 盐粒
db7d5e30cb Merge origin/main into yanli/pyrefly-fix-plan-v2 2026-03-18 18:16:42 +08:00
Yanli 盐粒
295587718d Stabilize API key auth validation tests 2026-03-18 18:13:14 +08:00
Yanli 盐粒
b85d010e42 Handle legacy website crawl mode values 2026-03-18 18:02:41 +08:00
Yanli 盐粒
c71e407c39 Fix OAuth payload validation regressions 2026-03-18 17:56:38 +08:00
Yanli 盐粒
1e5e65326e refactor(api): keep extractor typing changes behavior-neutral 2026-03-17 20:34:34 +08:00
Yanli 盐粒
fe18405f1d refactor(api): remove dead metadata guard 2026-03-17 20:11:10 +08:00
Yanli 盐粒
7ccc736929 refactor(api): preserve pydantic auth validation errors 2026-03-17 20:10:05 +08:00
Yanli 盐粒
bcac77c212 refactor(api): relax vector metadata id lookup contract 2026-03-17 20:07:07 +08:00
Yanli 盐粒
2b53f1bfea fix(api): annotate splitter variable for mypy 2026-03-17 20:06:07 +08:00
Yanli 盐粒
eec9c76b7b chore(api): clarify deferred pyrefly exclude comments 2026-03-17 19:59:01 +08:00
autofix-ci[bot]
98a94019c4 [autofix.ci] apply automated fixes 2026-03-17 11:55:31 +00:00
Yanli 盐粒
e8f120d87b fix(api): use typing_extensions TypedDict in auth 2026-03-17 19:53:19 +08:00
Yanli 盐粒
7572db15ff refactor(api): tighten shared adapter typing contracts 2026-03-17 19:47:16 +08:00
65 changed files with 1327 additions and 5799 deletions

View File

@@ -88,7 +88,7 @@ class LindormVectorStore(BaseVector):
batch_size: int = 64,
timeout: int = 60,
**kwargs,
):
) -> list[str]:
logger.info("Total documents to add: %s", len(documents))
uuids = self._get_uuids(documents)
@@ -130,8 +130,11 @@ class LindormVectorStore(BaseVector):
Field.METADATA_KEY: documents[i].metadata,
}
if self._using_ugc:
action_header["index"]["routing"] = self._routing
action_values[ROUTING_FIELD] = self._routing
routing = self._routing
if routing is None:
raise ValueError("UGC index should init vector with valid 'routing_value' parameter value")
action_header["index"]["routing"] = routing
action_values[ROUTING_FIELD] = routing
actions.append(action_header)
actions.append(action_values)
@@ -147,6 +150,8 @@ class LindormVectorStore(BaseVector):
logger.exception("Failed to process batch %s", batch_num + 1)
raise
return uuids
def get_ids_by_metadata_field(self, key: str, value: str):
query: dict[str, Any] = {
"query": {"bool": {"must": [{"term": {f"{Field.METADATA_KEY}.{key}.keyword": value}}]}}
@@ -378,18 +383,21 @@ class LindormVectorStoreFactory(AbstractVectorFactory):
raise ValueError("LINDORM_USING_UGC is not set")
routing_value = None
if dataset.index_struct:
index_struct_dict = dataset.index_struct_dict
if index_struct_dict is None:
raise ValueError("dataset.index_struct_dict is missing")
# if an existed record's index_struct_dict doesn't contain using_ugc field,
# it actually stores in the normal index format
stored_in_ugc: bool = dataset.index_struct_dict.get("using_ugc", False)
stored_in_ugc: bool = index_struct_dict.get("using_ugc", False)
using_ugc = stored_in_ugc
if stored_in_ugc:
dimension = dataset.index_struct_dict["dimension"]
index_type = dataset.index_struct_dict["index_type"]
distance_type = dataset.index_struct_dict["distance_type"]
routing_value = dataset.index_struct_dict["vector_store"]["class_prefix"]
dimension = index_struct_dict["dimension"]
index_type = index_struct_dict["index_type"]
distance_type = index_struct_dict["distance_type"]
routing_value = index_struct_dict["vector_store"]["class_prefix"]
index_name = f"{UGC_INDEX_PREFIX}_{dimension}_{index_type}_{distance_type}".lower()
else:
index_name = dataset.index_struct_dict["vector_store"]["class_prefix"].lower()
index_name = index_struct_dict["vector_store"]["class_prefix"].lower()
else:
embedding_vector = embeddings.embed_query("hello word")
dimension = len(embedding_vector)

View File

@@ -7,7 +7,9 @@ from core.rag.models.document import Document
class BaseVector(ABC):
def __init__(self, collection_name: str):
_collection_name: str
def __init__(self, collection_name: str) -> None:
self._collection_name = collection_name
@abstractmethod
@@ -30,7 +32,7 @@ class BaseVector(ABC):
def delete_by_ids(self, ids: list[str]) -> None:
raise NotImplementedError
def get_ids_by_metadata_field(self, key: str, value: str):
def get_ids_by_metadata_field(self, key: str, value: str) -> list[str] | None:
raise NotImplementedError
@abstractmethod
@@ -63,5 +65,5 @@ class BaseVector(ABC):
return [text.metadata["doc_id"] for text in texts if text.metadata and "doc_id" in text.metadata]
@property
def collection_name(self):
def collection_name(self) -> str:
return self._collection_name

View File

@@ -2,7 +2,8 @@ import base64
import logging
import time
from abc import ABC, abstractmethod
from typing import Any
from collections.abc import Sequence
from typing import Any, TypedDict
from sqlalchemy import select
@@ -13,7 +14,7 @@ from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.embedding.cached_embedding import CacheEmbedding
from core.rag.embedding.embedding_base import Embeddings
from core.rag.index_processor.constant.doc_type import DocType
from core.rag.models.document import Document
from core.rag.models.document import AttachmentDocument, ChildDocument, Document
from dify_graph.model_runtime.entities.model_entities import ModelType
from extensions.ext_database import db
from extensions.ext_redis import redis_client
@@ -24,19 +25,34 @@ from models.model import UploadFile
logger = logging.getLogger(__name__)
class VectorStoreIndexConfig(TypedDict):
class_prefix: str
class VectorIndexStructDict(TypedDict):
type: VectorType
vector_store: VectorStoreIndexConfig
VectorDocumentInput = Document | ChildDocument | AttachmentDocument
class AbstractVectorFactory(ABC):
@abstractmethod
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> BaseVector:
def init_vector(self, dataset: Dataset, attributes: list[str], embeddings: Embeddings) -> BaseVector:
raise NotImplementedError
@staticmethod
def gen_index_struct_dict(vector_type: VectorType, collection_name: str):
index_struct_dict = {"type": vector_type, "vector_store": {"class_prefix": collection_name}}
def gen_index_struct_dict(vector_type: VectorType, collection_name: str) -> VectorIndexStructDict:
index_struct_dict: VectorIndexStructDict = {
"type": vector_type,
"vector_store": {"class_prefix": collection_name},
}
return index_struct_dict
class Vector:
def __init__(self, dataset: Dataset, attributes: list | None = None):
def __init__(self, dataset: Dataset, attributes: list[str] | None = None) -> None:
if attributes is None:
attributes = ["doc_id", "dataset_id", "document_id", "doc_hash", "doc_type"]
self._dataset = dataset
@@ -198,12 +214,12 @@ class Vector:
case _:
raise ValueError(f"Vector store {vector_type} is not supported.")
def create(self, texts: list | None = None, **kwargs):
def create(self, texts: Sequence[Document | ChildDocument] | None = None, **kwargs: Any) -> None:
if texts:
start = time.time()
logger.info("start embedding %s texts %s", len(texts), start)
batch_size = 1000
total_batches = len(texts) + batch_size - 1
total_batches = (len(texts) + batch_size - 1) // batch_size
for i in range(0, len(texts), batch_size):
batch = texts[i : i + batch_size]
batch_start = time.time()
@@ -212,29 +228,33 @@ class Vector:
logger.info(
"Embedding batch %s/%s took %s s", i // batch_size + 1, total_batches, time.time() - batch_start
)
self._vector_processor.create(texts=batch, embeddings=batch_embeddings, **kwargs)
self._vector_processor.create(
texts=self._normalize_documents(batch), embeddings=batch_embeddings, **kwargs
)
logger.info("Embedding %s texts took %s s", len(texts), time.time() - start)
def create_multimodal(self, file_documents: list | None = None, **kwargs):
def create_multimodal(self, file_documents: list[AttachmentDocument] | None = None, **kwargs: Any) -> None:
if file_documents:
start = time.time()
logger.info("start embedding %s files %s", len(file_documents), start)
batch_size = 1000
total_batches = len(file_documents) + batch_size - 1
total_batches = (len(file_documents) + batch_size - 1) // batch_size
for i in range(0, len(file_documents), batch_size):
batch = file_documents[i : i + batch_size]
batch_start = time.time()
logger.info("Processing batch %s/%s (%s files)", i // batch_size + 1, total_batches, len(batch))
# Batch query all upload files to avoid N+1 queries
attachment_ids = [doc.metadata["doc_id"] for doc in batch]
attachment_ids = [doc.metadata["doc_id"] for doc in batch if doc.metadata is not None]
stmt = select(UploadFile).where(UploadFile.id.in_(attachment_ids))
upload_files = db.session.scalars(stmt).all()
upload_file_map = {str(f.id): f for f in upload_files}
file_base64_list = []
real_batch = []
file_base64_list: list[dict[str, str]] = []
real_batch: list[AttachmentDocument] = []
for document in batch:
if document.metadata is None:
continue
attachment_id = document.metadata["doc_id"]
doc_type = document.metadata["doc_type"]
upload_file = upload_file_map.get(attachment_id)
@@ -249,14 +269,20 @@ class Vector:
}
)
real_batch.append(document)
if not real_batch:
continue
batch_embeddings = self._embeddings.embed_multimodal_documents(file_base64_list)
logger.info(
"Embedding batch %s/%s took %s s", i // batch_size + 1, total_batches, time.time() - batch_start
)
self._vector_processor.create(texts=real_batch, embeddings=batch_embeddings, **kwargs)
self._vector_processor.create(
texts=self._normalize_documents(real_batch),
embeddings=batch_embeddings,
**kwargs,
)
logger.info("Embedding %s files took %s s", len(file_documents), time.time() - start)
def add_texts(self, documents: list[Document], **kwargs):
def add_texts(self, documents: list[Document], **kwargs: Any) -> None:
if kwargs.get("duplicate_check", False):
documents = self._filter_duplicate_texts(documents)
@@ -266,10 +292,10 @@ class Vector:
def text_exists(self, id: str) -> bool:
return self._vector_processor.text_exists(id)
def delete_by_ids(self, ids: list[str]):
def delete_by_ids(self, ids: list[str]) -> None:
self._vector_processor.delete_by_ids(ids)
def delete_by_metadata_field(self, key: str, value: str):
def delete_by_metadata_field(self, key: str, value: str) -> None:
self._vector_processor.delete_by_metadata_field(key, value)
def search_by_vector(self, query: str, **kwargs: Any) -> list[Document]:
@@ -295,7 +321,7 @@ class Vector:
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
return self._vector_processor.search_by_full_text(query, **kwargs)
def delete(self):
def delete(self) -> None:
self._vector_processor.delete()
# delete collection redis cache
if self._vector_processor.collection_name:
@@ -325,7 +351,26 @@ class Vector:
return texts
def __getattr__(self, name):
@staticmethod
def _normalize_documents(documents: Sequence[VectorDocumentInput]) -> list[Document]:
normalized_documents: list[Document] = []
for document in documents:
if isinstance(document, Document):
normalized_documents.append(document)
continue
normalized_documents.append(
Document(
page_content=document.page_content,
vector=document.vector,
metadata=document.metadata,
provider=(document.provider or "dify") if isinstance(document, AttachmentDocument) else "dify",
)
)
return normalized_documents
def __getattr__(self, name: str) -> Any:
if self._vector_processor is not None:
method = getattr(self._vector_processor, name)
if callable(method):

View File

@@ -1,7 +1,11 @@
from pydantic import BaseModel, ConfigDict
from typing import Literal
from pydantic import BaseModel, ConfigDict, field_validator
from core.rag.extractor.entity.datasource_type import DatasourceType
from models.dataset import Document
from models.model import UploadFile
from services.auth.auth_type import AuthType
class NotionInfo(BaseModel):
@@ -12,7 +16,7 @@ class NotionInfo(BaseModel):
credential_id: str | None = None
notion_workspace_id: str | None = ""
notion_obj_id: str
notion_page_type: str
notion_page_type: Literal["database", "page"]
document: Document | None = None
tenant_id: str
model_config = ConfigDict(arbitrary_types_allowed=True)
@@ -25,20 +29,27 @@ class WebsiteInfo(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
provider: str
provider: AuthType
job_id: str
url: str
mode: str
mode: Literal["crawl", "crawl_return_urls", "scrape"]
tenant_id: str
only_main_content: bool = False
@field_validator("mode", mode="before")
@classmethod
def _normalize_legacy_mode(cls, value: str) -> str:
if value == "single":
return "crawl"
return value
class ExtractSetting(BaseModel):
"""
Model class for provider response.
"""
datasource_type: str
datasource_type: DatasourceType
upload_file: UploadFile | None = None
notion_info: NotionInfo | None = None
website_info: WebsiteInfo | None = None

View File

@@ -1,7 +1,8 @@
import os
import re
import tempfile
from pathlib import Path
from typing import Union
from typing import TypeAlias
from urllib.parse import unquote
from configs import dify_config
@@ -31,19 +32,27 @@ from core.rag.extractor.word_extractor import WordExtractor
from core.rag.models.document import Document
from extensions.ext_storage import storage
from models.model import UploadFile
from services.auth.auth_type import AuthType
SUPPORT_URL_CONTENT_TYPES = ["application/pdf", "text/plain", "application/json"]
USER_AGENT = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124"
" Safari/537.36"
)
ExtractProcessorOutput: TypeAlias = list[Document] | str
class ExtractProcessor:
@staticmethod
def _build_temp_file_path(temp_dir: str, suffix: str) -> str:
file_descriptor, file_path = tempfile.mkstemp(dir=temp_dir, suffix=suffix)
os.close(file_descriptor)
return file_path
@classmethod
def load_from_upload_file(
cls, upload_file: UploadFile, return_text: bool = False, is_automatic: bool = False
) -> Union[list[Document], str]:
) -> ExtractProcessorOutput:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.FILE, upload_file=upload_file, document_model="text_model"
)
@@ -54,7 +63,7 @@ class ExtractProcessor:
return cls.extract(extract_setting, is_automatic)
@classmethod
def load_from_url(cls, url: str, return_text: bool = False) -> Union[list[Document], str]:
def load_from_url(cls, url: str, return_text: bool = False) -> ExtractProcessorOutput:
response = ssrf_proxy.get(url, headers={"User-Agent": USER_AGENT})
with tempfile.TemporaryDirectory() as temp_dir:
@@ -65,17 +74,16 @@ class ExtractProcessor:
suffix = "." + response.headers.get("Content-Type").split("/")[-1]
else:
content_disposition = response.headers.get("Content-Disposition")
filename_match = re.search(r'filename="([^"]+)"', content_disposition)
if filename_match:
filename = unquote(filename_match.group(1))
match = re.search(r"\.(\w+)$", filename)
if match:
suffix = "." + match.group(1)
else:
suffix = ""
# https://stackoverflow.com/questions/26541416/generate-temporary-file-names-without-creating-actual-file-in-python#comment90414256_26541521
# Generate a temporary filename under the created temp_dir and ensure the directory exists
file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore
if content_disposition:
filename_match = re.search(r'filename="([^"]+)"', content_disposition)
if filename_match:
filename = unquote(filename_match.group(1))
match = re.search(r"\.(\w+)$", filename)
if match:
suffix = "." + match.group(1)
else:
suffix = ""
file_path = cls._build_temp_file_path(temp_dir, suffix)
Path(file_path).write_bytes(response.content)
extract_setting = ExtractSetting(datasource_type=DatasourceType.FILE, document_model="text_model")
if return_text:
@@ -94,13 +102,13 @@ class ExtractProcessor:
cls, extract_setting: ExtractSetting, is_automatic: bool = False, file_path: str | None = None
) -> list[Document]:
if extract_setting.datasource_type == DatasourceType.FILE:
upload_file = extract_setting.upload_file
with tempfile.TemporaryDirectory() as temp_dir:
if not file_path:
assert extract_setting.upload_file is not None, "upload_file is required"
upload_file: UploadFile = extract_setting.upload_file
upload_file = extract_setting.upload_file
suffix = Path(upload_file.key).suffix
# FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here
file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore
file_path = cls._build_temp_file_path(temp_dir, suffix)
storage.download(upload_file.key, file_path)
input_file = Path(file_path)
file_extension = input_file.suffix.lower()
@@ -113,7 +121,11 @@ class ExtractProcessor:
if file_extension in {".xlsx", ".xls"}:
extractor = ExcelExtractor(file_path)
elif file_extension == ".pdf":
extractor = PdfExtractor(file_path, upload_file.tenant_id, upload_file.created_by)
extractor = PdfExtractor(
file_path,
upload_file.tenant_id if upload_file else None,
upload_file.created_by if upload_file else None,
)
elif file_extension in {".md", ".markdown", ".mdx"}:
extractor = (
UnstructuredMarkdownExtractor(file_path, unstructured_api_url, unstructured_api_key)
@@ -123,7 +135,11 @@ class ExtractProcessor:
elif file_extension in {".htm", ".html"}:
extractor = HtmlExtractor(file_path)
elif file_extension == ".docx":
extractor = WordExtractor(file_path, upload_file.tenant_id, upload_file.created_by)
extractor = WordExtractor(
file_path,
upload_file.tenant_id if upload_file else None,
upload_file.created_by if upload_file else None,
)
elif file_extension == ".doc":
extractor = UnstructuredWordExtractor(file_path, unstructured_api_url, unstructured_api_key)
elif file_extension == ".csv":
@@ -149,13 +165,21 @@ class ExtractProcessor:
if file_extension in {".xlsx", ".xls"}:
extractor = ExcelExtractor(file_path)
elif file_extension == ".pdf":
extractor = PdfExtractor(file_path, upload_file.tenant_id, upload_file.created_by)
extractor = PdfExtractor(
file_path,
upload_file.tenant_id if upload_file else None,
upload_file.created_by if upload_file else None,
)
elif file_extension in {".md", ".markdown", ".mdx"}:
extractor = MarkdownExtractor(file_path, autodetect_encoding=True)
elif file_extension in {".htm", ".html"}:
extractor = HtmlExtractor(file_path)
elif file_extension == ".docx":
extractor = WordExtractor(file_path, upload_file.tenant_id, upload_file.created_by)
extractor = WordExtractor(
file_path,
upload_file.tenant_id if upload_file else None,
upload_file.created_by if upload_file else None,
)
elif file_extension == ".csv":
extractor = CSVExtractor(file_path, autodetect_encoding=True)
elif file_extension == ".epub":
@@ -177,7 +201,7 @@ class ExtractProcessor:
return extractor.extract()
elif extract_setting.datasource_type == DatasourceType.WEBSITE:
assert extract_setting.website_info is not None, "website_info is required"
if extract_setting.website_info.provider == "firecrawl":
if extract_setting.website_info.provider == AuthType.FIRECRAWL:
extractor = FirecrawlWebExtractor(
url=extract_setting.website_info.url,
job_id=extract_setting.website_info.job_id,
@@ -186,7 +210,7 @@ class ExtractProcessor:
only_main_content=extract_setting.website_info.only_main_content,
)
return extractor.extract()
elif extract_setting.website_info.provider == "watercrawl":
elif extract_setting.website_info.provider == AuthType.WATERCRAWL:
extractor = WaterCrawlWebExtractor(
url=extract_setting.website_info.url,
job_id=extract_setting.website_info.job_id,
@@ -195,7 +219,7 @@ class ExtractProcessor:
only_main_content=extract_setting.website_info.only_main_content,
)
return extractor.extract()
elif extract_setting.website_info.provider == "jinareader":
elif extract_setting.website_info.provider == AuthType.JINA:
extractor = JinaReaderWebExtractor(
url=extract_setting.website_info.url,
job_id=extract_setting.website_info.job_id,

View File

@@ -2,10 +2,12 @@
from abc import ABC, abstractmethod
from core.rag.models.document import Document
class BaseExtractor(ABC):
"""Interface for extract files."""
@abstractmethod
def extract(self):
def extract(self) -> list[Document]:
raise NotImplementedError

View File

@@ -30,7 +30,7 @@ def detect_file_encodings(file_path: str, timeout: int = 5, sample_size: int = 1
For large files, reading only a sample is sufficient and prevents timeout.
"""
def read_and_detect(filename: str):
def read_and_detect(filename: str) -> list[FileEncoding]:
rst = charset_normalizer.from_path(filename)
best = rst.best()
if best is None:

View File

@@ -29,8 +29,8 @@ class PdfExtractor(BaseExtractor):
Args:
file_path: Path to the PDF file.
tenant_id: Workspace ID.
user_id: ID of the user performing the extraction.
tenant_id: Workspace ID used for extracted image persistence when available.
user_id: ID of the user performing the extraction when available.
file_cache_key: Optional cache key for the extracted text.
"""
@@ -48,7 +48,13 @@ class PdfExtractor(BaseExtractor):
]
MAX_MAGIC_LEN = max(len(m) for m, _, _ in IMAGE_FORMATS)
def __init__(self, file_path: str, tenant_id: str, user_id: str, file_cache_key: str | None = None):
def __init__(
self,
file_path: str,
tenant_id: str | None,
user_id: str | None,
file_cache_key: str | None = None,
):
"""Initialize PdfExtractor."""
self._file_path = file_path
self._tenant_id = tenant_id
@@ -117,6 +123,9 @@ class PdfExtractor(BaseExtractor):
upload_files = []
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
if self._tenant_id is None or self._user_id is None:
return ""
try:
image_objects = page.get_objects(filter=(pdfium_c.FPDF_PAGEOBJ_IMAGE,))
for obj in image_objects:

View File

@@ -9,6 +9,8 @@ import os
import re
import tempfile
import uuid
from collections.abc import Iterable
from typing import cast
from urllib.parse import urlparse
from docx import Document as DocxDocument
@@ -36,7 +38,7 @@ class WordExtractor(BaseExtractor):
file_path: Path to the file to load.
"""
def __init__(self, file_path: str, tenant_id: str, user_id: str):
def __init__(self, file_path: str, tenant_id: str | None, user_id: str | None):
"""Initialize with file path."""
self.file_path = file_path
self.tenant_id = tenant_id
@@ -87,9 +89,12 @@ class WordExtractor(BaseExtractor):
def _extract_images_from_docx(self, doc):
image_count = 0
image_map = {}
image_map: dict[object, str] = {}
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
if self.tenant_id is None or self.user_id is None:
return image_map
for r_id, rel in doc.part.rels.items():
if "image" in rel.target_ref:
image_count += 1
@@ -265,7 +270,7 @@ class WordExtractor(BaseExtractor):
def parse_docx(self, docx_path):
doc = DocxDocument(docx_path)
content = []
content: list[str] = []
image_map = self._extract_images_from_docx(doc)
@@ -363,7 +368,7 @@ class WordExtractor(BaseExtractor):
if link_text:
target_buffer.append(link_text)
paragraph_content = []
paragraph_content: list[str] = []
# State for legacy HYPERLINK fields
hyperlink_field_url = None
hyperlink_field_text_parts: list[str] = []
@@ -423,7 +428,8 @@ class WordExtractor(BaseExtractor):
paragraphs = doc.paragraphs.copy()
tables = doc.tables.copy()
for element in doc.element.body:
body_elements = cast(Iterable[object], getattr(doc.element, "body", []))
for element in body_elements:
if hasattr(element, "tag"):
if isinstance(element.tag, str) and element.tag.endswith("p"): # paragraph
para = paragraphs.pop(0)

View File

@@ -3,7 +3,7 @@ import datetime
import logging
import time
from collections.abc import Mapping
from typing import Any
from typing import Any, Literal, TypedDict
from flask import current_app
from sqlalchemy import delete, func, select
@@ -21,6 +21,16 @@ from .processor.paragraph_index_processor import ParagraphIndexProcessor
logger = logging.getLogger(__name__)
class IndexAndCleanResult(TypedDict):
dataset_id: str
dataset_name: str
batch: str
document_id: str
document_name: str
created_at: float
display_status: Literal["completed"]
class IndexProcessor:
def format_preview(self, chunk_structure: str, chunks: Any) -> Preview:
index_processor = IndexProcessorFactory(chunk_structure).init_index_processor()
@@ -52,9 +62,9 @@ class IndexProcessor:
document_id: str,
original_document_id: str,
chunks: Mapping[str, Any],
batch: Any,
batch: str,
summary_index_setting: SummaryIndexSettingDict | None = None,
):
) -> IndexAndCleanResult:
with session_factory.create_session() as session:
document = session.query(Document).filter_by(id=document_id).first()
if not document:

View File

@@ -122,6 +122,7 @@ class BaseIndexProcessor(ABC):
"""
Get the NodeParser object according to the processing rule.
"""
character_splitter: TextSplitter
if processing_rule_mode in ["custom", "hierarchical"]:
# The user-defined segmentation rule
max_segmentation_tokens_length = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
@@ -147,7 +148,7 @@ class BaseIndexProcessor(ABC):
embedding_model_instance=embedding_model_instance,
)
return character_splitter # type: ignore
return character_splitter
def _get_content_files(self, document: Document, current_user: Account | None = None) -> list[AttachmentDocument]:
"""
@@ -158,7 +159,7 @@ class BaseIndexProcessor(ABC):
images = self._extract_markdown_images(text)
if not images:
return multi_model_documents
upload_file_id_list = []
upload_file_id_list: list[str] = []
for image in images:
# Collect all upload_file_ids including duplicates to preserve occurrence count

View File

@@ -10,7 +10,7 @@ from core.rag.index_processor.processor.qa_index_processor import QAIndexProcess
class IndexProcessorFactory:
"""IndexProcessorInit."""
def __init__(self, index_type: str | None):
def __init__(self, index_type: str | None) -> None:
self._index_type = index_type
def init_index_processor(self) -> BaseIndexProcessor:
@@ -19,11 +19,12 @@ class IndexProcessorFactory:
if not self._index_type:
raise ValueError("Index type must be specified.")
if self._index_type == IndexStructureType.PARAGRAPH_INDEX:
return ParagraphIndexProcessor()
elif self._index_type == IndexStructureType.QA_INDEX:
return QAIndexProcessor()
elif self._index_type == IndexStructureType.PARENT_CHILD_INDEX:
return ParentChildIndexProcessor()
else:
raise ValueError(f"Index type {self._index_type} is not supported.")
match self._index_type:
case IndexStructureType.PARAGRAPH_INDEX:
return ParagraphIndexProcessor()
case IndexStructureType.QA_INDEX:
return QAIndexProcessor()
case IndexStructureType.PARENT_CHILD_INDEX:
return ParentChildIndexProcessor()
case _:
raise ValueError(f"Index type {self._index_type} is not supported.")

View File

@@ -30,7 +30,7 @@ class EnhanceRecursiveCharacterTextSplitter(RecursiveCharacterTextSplitter):
allowed_special: Union[Literal["all"], Set[str]] = set(), # noqa: UP037
disallowed_special: Union[Literal["all"], Collection[str]] = "all", # noqa: UP037
**kwargs: Any,
):
) -> TS:
def _token_encoder(texts: list[str]) -> list[int]:
if not texts:
return []

View File

@@ -8,7 +8,7 @@ class BaseStorage(ABC):
"""Interface for file storage."""
@abstractmethod
def save(self, filename: str, data: bytes):
def save(self, filename: str, data: bytes) -> None:
raise NotImplementedError
@abstractmethod
@@ -16,7 +16,7 @@ class BaseStorage(ABC):
raise NotImplementedError
@abstractmethod
def load_stream(self, filename: str) -> Generator:
def load_stream(self, filename: str) -> Generator[bytes, None, None]:
raise NotImplementedError
@abstractmethod
@@ -28,10 +28,10 @@ class BaseStorage(ABC):
raise NotImplementedError
@abstractmethod
def delete(self, filename: str):
def delete(self, filename: str) -> None:
raise NotImplementedError
def scan(self, path, files=True, directories=False) -> list[str]:
def scan(self, path: str, files: bool = True, directories: bool = False) -> list[str]:
"""
Scan files and directories in the given path.
This method is implemented only in some storage backends.

View File

@@ -20,7 +20,7 @@ else:
class NotionPageSummary(TypedDict):
page_id: str
page_name: str
page_icon: dict[str, str] | None
page_icon: dict[str, object] | None
parent_id: str
type: Literal["page", "database"]

View File

@@ -43,58 +43,6 @@ core/ops/tencent_trace/utils.py
core/plugin/backwards_invocation/base.py
core/plugin/backwards_invocation/model.py
core/prompt/utils/extract_thread_messages.py
core/rag/datasource/keyword/jieba/jieba.py
core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py
core/rag/datasource/vdb/analyticdb/analyticdb_vector.py
core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
core/rag/datasource/vdb/baidu/baidu_vector.py
core/rag/datasource/vdb/chroma/chroma_vector.py
core/rag/datasource/vdb/clickzetta/clickzetta_vector.py
core/rag/datasource/vdb/couchbase/couchbase_vector.py
core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py
core/rag/datasource/vdb/huawei/huawei_cloud_vector.py
core/rag/datasource/vdb/lindorm/lindorm_vector.py
core/rag/datasource/vdb/matrixone/matrixone_vector.py
core/rag/datasource/vdb/milvus/milvus_vector.py
core/rag/datasource/vdb/myscale/myscale_vector.py
core/rag/datasource/vdb/oceanbase/oceanbase_vector.py
core/rag/datasource/vdb/opensearch/opensearch_vector.py
core/rag/datasource/vdb/oracle/oraclevector.py
core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py
core/rag/datasource/vdb/relyt/relyt_vector.py
core/rag/datasource/vdb/tablestore/tablestore_vector.py
core/rag/datasource/vdb/tencent/tencent_vector.py
core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py
core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py
core/rag/datasource/vdb/tidb_vector/tidb_vector.py
core/rag/datasource/vdb/upstash/upstash_vector.py
core/rag/datasource/vdb/vikingdb/vikingdb_vector.py
core/rag/datasource/vdb/weaviate/weaviate_vector.py
core/rag/extractor/csv_extractor.py
core/rag/extractor/excel_extractor.py
core/rag/extractor/firecrawl/firecrawl_app.py
core/rag/extractor/firecrawl/firecrawl_web_extractor.py
core/rag/extractor/html_extractor.py
core/rag/extractor/jina_reader_extractor.py
core/rag/extractor/markdown_extractor.py
core/rag/extractor/notion_extractor.py
core/rag/extractor/pdf_extractor.py
core/rag/extractor/text_extractor.py
core/rag/extractor/unstructured/unstructured_doc_extractor.py
core/rag/extractor/unstructured/unstructured_eml_extractor.py
core/rag/extractor/unstructured/unstructured_epub_extractor.py
core/rag/extractor/unstructured/unstructured_markdown_extractor.py
core/rag/extractor/unstructured/unstructured_msg_extractor.py
core/rag/extractor/unstructured/unstructured_ppt_extractor.py
core/rag/extractor/unstructured/unstructured_pptx_extractor.py
core/rag/extractor/unstructured/unstructured_xml_extractor.py
core/rag/extractor/watercrawl/client.py
core/rag/extractor/watercrawl/extractor.py
core/rag/extractor/watercrawl/provider.py
core/rag/extractor/word_extractor.py
core/rag/index_processor/processor/paragraph_index_processor.py
core/rag/index_processor/processor/parent_child_index_processor.py
core/rag/index_processor/processor/qa_index_processor.py
core/rag/retrieval/router/multi_dataset_function_call_router.py
core/rag/summary_index/summary_index.py
core/repositories/sqlalchemy_workflow_execution_repository.py
@@ -140,27 +88,10 @@ dify_graph/nodes/variable_assigner/v2/node.py
extensions/logstore/repositories/logstore_api_workflow_run_repository.py
extensions/otel/instrumentation.py
extensions/otel/runtime.py
extensions/storage/aliyun_oss_storage.py
extensions/storage/aws_s3_storage.py
extensions/storage/azure_blob_storage.py
extensions/storage/baidu_obs_storage.py
extensions/storage/clickzetta_volume/clickzetta_volume_storage.py
extensions/storage/clickzetta_volume/file_lifecycle.py
extensions/storage/google_cloud_storage.py
extensions/storage/huawei_obs_storage.py
extensions/storage/opendal_storage.py
extensions/storage/oracle_oci_storage.py
extensions/storage/supabase_storage.py
extensions/storage/tencent_cos_storage.py
extensions/storage/volcengine_tos_storage.py
libs/gmpy2_pkcs10aep_cipher.py
schedule/queue_monitor_task.py
services/account_service.py
services/audio_service.py
services/auth/firecrawl/firecrawl.py
services/auth/jina.py
services/auth/jina/jina.py
services/auth/watercrawl/watercrawl.py
services/conversation_service.py
services/dataset_service.py
services/document_indexing_proxy/document_indexing_task_proxy.py
@@ -188,3 +119,75 @@ tasks/disable_segment_from_index_task.py
tasks/enable_segment_to_index_task.py
tasks/remove_document_from_index_task.py
tasks/workflow_execution_tasks.py
# no need to fix for now: storage adapters
extensions/storage/aliyun_oss_storage.py
extensions/storage/aws_s3_storage.py
extensions/storage/azure_blob_storage.py
extensions/storage/baidu_obs_storage.py
extensions/storage/clickzetta_volume/clickzetta_volume_storage.py
extensions/storage/clickzetta_volume/file_lifecycle.py
extensions/storage/google_cloud_storage.py
extensions/storage/huawei_obs_storage.py
extensions/storage/opendal_storage.py
extensions/storage/oracle_oci_storage.py
extensions/storage/supabase_storage.py
extensions/storage/tencent_cos_storage.py
extensions/storage/volcengine_tos_storage.py
# no need to fix for now: keyword adapters
core/rag/datasource/keyword/jieba/jieba.py
core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py
# no need to fix for now: vector db adapters
core/rag/datasource/vdb/analyticdb/analyticdb_vector.py
core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
core/rag/datasource/vdb/baidu/baidu_vector.py
core/rag/datasource/vdb/chroma/chroma_vector.py
core/rag/datasource/vdb/clickzetta/clickzetta_vector.py
core/rag/datasource/vdb/couchbase/couchbase_vector.py
core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py
core/rag/datasource/vdb/huawei/huawei_cloud_vector.py
core/rag/datasource/vdb/matrixone/matrixone_vector.py
core/rag/datasource/vdb/milvus/milvus_vector.py
core/rag/datasource/vdb/myscale/myscale_vector.py
core/rag/datasource/vdb/oceanbase/oceanbase_vector.py
core/rag/datasource/vdb/opensearch/opensearch_vector.py
core/rag/datasource/vdb/oracle/oraclevector.py
core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py
core/rag/datasource/vdb/relyt/relyt_vector.py
core/rag/datasource/vdb/tablestore/tablestore_vector.py
core/rag/datasource/vdb/tencent/tencent_vector.py
core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py
core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py
core/rag/datasource/vdb/tidb_vector/tidb_vector.py
core/rag/datasource/vdb/upstash/upstash_vector.py
core/rag/datasource/vdb/vikingdb/vikingdb_vector.py
core/rag/datasource/vdb/weaviate/weaviate_vector.py
# no need to fix for now: extractors
core/rag/extractor/csv_extractor.py
core/rag/extractor/excel_extractor.py
core/rag/extractor/firecrawl/firecrawl_app.py
core/rag/extractor/firecrawl/firecrawl_web_extractor.py
core/rag/extractor/html_extractor.py
core/rag/extractor/jina_reader_extractor.py
core/rag/extractor/markdown_extractor.py
core/rag/extractor/notion_extractor.py
core/rag/extractor/text_extractor.py
core/rag/extractor/unstructured/unstructured_doc_extractor.py
core/rag/extractor/unstructured/unstructured_eml_extractor.py
core/rag/extractor/unstructured/unstructured_epub_extractor.py
core/rag/extractor/unstructured/unstructured_markdown_extractor.py
core/rag/extractor/unstructured/unstructured_msg_extractor.py
core/rag/extractor/unstructured/unstructured_ppt_extractor.py
core/rag/extractor/unstructured/unstructured_pptx_extractor.py
core/rag/extractor/unstructured/unstructured_xml_extractor.py
core/rag/extractor/watercrawl/client.py
core/rag/extractor/watercrawl/extractor.py
core/rag/extractor/watercrawl/provider.py
# no need to fix for now: index processors
core/rag/index_processor/processor/paragraph_index_processor.py
core/rag/index_processor/processor/parent_child_index_processor.py
core/rag/index_processor/processor/qa_index_processor.py

View File

@@ -14,5 +14,5 @@ class ApiKeyAuthBase(ABC):
self.credentials = credentials
@abstractmethod
def validate_credentials(self):
def validate_credentials(self) -> bool:
raise NotImplementedError

View File

@@ -7,7 +7,7 @@ class ApiKeyAuthFactory:
auth_factory = self.get_apikey_auth_factory(provider)
self.auth = auth_factory(credentials)
def validate_credentials(self):
def validate_credentials(self) -> bool:
return self.auth.validate_credentials()
@staticmethod

View File

@@ -1,40 +1,63 @@
import json
from typing import cast
from pydantic import TypeAdapter
from sqlalchemy import select
from typing_extensions import TypedDict
from core.helper import encrypter
from extensions.ext_database import db
from models.source import DataSourceApiKeyAuthBinding
from services.auth.api_key_auth_base import AuthCredentials
from services.auth.api_key_auth_factory import ApiKeyAuthFactory
class ApiKeyAuthCreateArgs(TypedDict):
category: str
provider: str
credentials: AuthCredentials
AUTH_CREATE_ARGS_ADAPTER = TypeAdapter(ApiKeyAuthCreateArgs)
AUTH_CREDENTIALS_ADAPTER = TypeAdapter(dict[str, object])
class ApiKeyAuthService:
@staticmethod
def get_provider_auth_list(tenant_id: str):
def get_provider_auth_list(tenant_id: str) -> list[DataSourceApiKeyAuthBinding]:
data_source_api_key_bindings = db.session.scalars(
select(DataSourceApiKeyAuthBinding).where(
DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.disabled.is_(False)
)
).all()
return data_source_api_key_bindings
return list(data_source_api_key_bindings)
@staticmethod
def create_provider_auth(tenant_id: str, args: dict):
auth_result = ApiKeyAuthFactory(args["provider"], args["credentials"]).validate_credentials()
def create_provider_auth(tenant_id: str, args: dict[str, object]) -> None:
validated_args = ApiKeyAuthService.validate_api_key_auth_args(args)
raw_credentials = ApiKeyAuthService._get_credentials_dict(args)
auth_result = ApiKeyAuthFactory(
validated_args["provider"], validated_args["credentials"]
).validate_credentials()
if auth_result:
# Encrypt the api key
api_key = encrypter.encrypt_token(tenant_id, args["credentials"]["config"]["api_key"])
args["credentials"]["config"]["api_key"] = api_key
api_key_value = validated_args["credentials"]["config"].get("api_key")
if api_key_value is None:
raise KeyError("api_key")
api_key = encrypter.encrypt_token(tenant_id, api_key_value)
raw_config = ApiKeyAuthService._get_config_dict(raw_credentials)
raw_config["api_key"] = api_key
data_source_api_key_binding = DataSourceApiKeyAuthBinding(
tenant_id=tenant_id, category=args["category"], provider=args["provider"]
tenant_id=tenant_id,
category=validated_args["category"],
provider=validated_args["provider"],
)
data_source_api_key_binding.credentials = json.dumps(args["credentials"], ensure_ascii=False)
data_source_api_key_binding.credentials = json.dumps(raw_credentials, ensure_ascii=False)
db.session.add(data_source_api_key_binding)
db.session.commit()
@staticmethod
def get_auth_credentials(tenant_id: str, category: str, provider: str):
def get_auth_credentials(tenant_id: str, category: str, provider: str) -> dict[str, object] | None:
data_source_api_key_bindings = (
db.session.query(DataSourceApiKeyAuthBinding)
.where(
@@ -50,10 +73,10 @@ class ApiKeyAuthService:
if not data_source_api_key_bindings.credentials:
return None
credentials = json.loads(data_source_api_key_bindings.credentials)
return credentials
return AUTH_CREDENTIALS_ADAPTER.validate_python(credentials)
@staticmethod
def delete_provider_auth(tenant_id: str, binding_id: str):
def delete_provider_auth(tenant_id: str, binding_id: str) -> None:
data_source_api_key_binding = (
db.session.query(DataSourceApiKeyAuthBinding)
.where(DataSourceApiKeyAuthBinding.tenant_id == tenant_id, DataSourceApiKeyAuthBinding.id == binding_id)
@@ -63,8 +86,10 @@ class ApiKeyAuthService:
db.session.delete(data_source_api_key_binding)
db.session.commit()
@classmethod
def validate_api_key_auth_args(cls, args):
@staticmethod
def validate_api_key_auth_args(args: dict[str, object] | None) -> ApiKeyAuthCreateArgs:
if args is None:
raise TypeError("argument of type 'NoneType' is not iterable")
if "category" not in args or not args["category"]:
raise ValueError("category is required")
if "provider" not in args or not args["provider"]:
@@ -75,3 +100,18 @@ class ApiKeyAuthService:
raise ValueError("credentials must be a dictionary")
if "auth_type" not in args["credentials"] or not args["credentials"]["auth_type"]:
raise ValueError("auth_type is required")
return AUTH_CREATE_ARGS_ADAPTER.validate_python(args)
@staticmethod
def _get_credentials_dict(args: dict[str, object]) -> dict[str, object]:
credentials = args["credentials"]
if not isinstance(credentials, dict):
raise ValueError("credentials must be a dictionary")
return cast(dict[str, object], credentials)
@staticmethod
def _get_config_dict(credentials: dict[str, object]) -> dict[str, object]:
config = credentials["config"]
if not isinstance(config, dict):
raise TypeError(f"credentials['config'] must be a dictionary, got {type(config).__name__}")
return cast(dict[str, object], config)

View File

@@ -5,3 +5,6 @@ class AuthType(StrEnum):
FIRECRAWL = "firecrawl"
WATERCRAWL = "watercrawl"
JINA = "jinareader"
AuthProvider = AuthType | str

View File

@@ -200,6 +200,29 @@ class TestExtractProcessorFileRouting:
with pytest.raises(AssertionError, match="upload_file is required"):
ExtractProcessor.extract(setting)
@pytest.mark.parametrize(
("extension", "etl_type", "expected_extractor"),
[
(".pdf", "Unstructured", "PdfExtractor"),
(".docx", "Unstructured", "WordExtractor"),
(".pdf", "SelfHosted", "PdfExtractor"),
(".docx", "SelfHosted", "WordExtractor"),
],
)
def test_extract_allows_url_file_paths_without_upload_context(
self, monkeypatch, extension: str, etl_type: str, expected_extractor: str
):
factory = _patch_all_extractors(monkeypatch)
monkeypatch.setattr(processor_module.dify_config, "ETL_TYPE", etl_type)
setting = SimpleNamespace(datasource_type=DatasourceType.FILE, upload_file=None)
docs = ExtractProcessor.extract(setting, file_path=f"/tmp/example{extension}")
assert docs[0].page_content == f"extracted-by-{expected_extractor}"
assert factory.calls[-1][0] == expected_extractor
assert factory.calls[-1][1] == (f"/tmp/example{extension}", None, None)
class TestExtractProcessorDatasourceRouting:
def test_extract_routes_notion_datasource(self, monkeypatch):

View File

@@ -184,3 +184,21 @@ def test_extract_images_failures(mock_dependencies):
assert len(saves) == 1
assert saves[0][1] == jpeg_bytes
assert db_stub.session.committed is True
def test_extract_images_skips_persistence_without_upload_context(mock_dependencies):
mock_page = MagicMock()
mock_image_obj = MagicMock()
mock_image_obj.extract.side_effect = lambda buf, fb_format=None: buf.write(b"\xff\xd8\xff image")
mock_page.get_objects.return_value = [mock_image_obj]
extractor = pe.PdfExtractor(file_path="test.pdf", tenant_id=None, user_id=None)
with patch("pypdfium2.raw", autospec=True) as mock_raw:
mock_raw.FPDF_PAGEOBJ_IMAGE = 1
result = extractor._extract_images(mock_page)
assert result == ""
assert mock_dependencies.saves == []
assert mock_dependencies.db.session.added == []
assert mock_dependencies.db.session.committed is False

View File

@@ -179,6 +179,27 @@ def test_extract_images_from_docx(monkeypatch):
assert db_stub.session.committed is True
def test_extract_images_from_docx_skips_persistence_without_upload_context(monkeypatch):
saves: list[tuple[str, bytes]] = []
monkeypatch.setattr(we, "storage", SimpleNamespace(save=lambda key, data: saves.append((key, data))))
db_stub = SimpleNamespace(session=SimpleNamespace(add=lambda obj: None, commit=lambda: None))
monkeypatch.setattr(we, "db", db_stub)
rel_ext = SimpleNamespace(is_external=True, target_ref="https://example.com/image.png")
doc = SimpleNamespace(part=SimpleNamespace(rels={"rId1": rel_ext}))
extractor = object.__new__(WordExtractor)
extractor.tenant_id = None
extractor.user_id = None
image_map = extractor._extract_images_from_docx(doc)
assert image_map == {}
assert saves == []
def test_extract_images_from_docx_uses_internal_files_url():
"""Test that INTERNAL_FILES_URL takes precedence over FILES_URL for plugin access."""
# Test the URL generation logic directly

View File

@@ -1,7 +1,9 @@
import json
from copy import deepcopy
from unittest.mock import Mock, patch
import pytest
from pydantic import ValidationError
from models.source import DataSourceApiKeyAuthBinding
from services.auth.api_key_auth_service import ApiKeyAuthService
@@ -68,7 +70,16 @@ class TestApiKeyAuthService:
# Mock successful auth validation
mock_auth_instance = Mock()
mock_auth_instance.validate_credentials.return_value = True
mock_factory.return_value = mock_auth_instance
captured_provider = None
captured_credentials = None
def factory_side_effect(provider, credentials):
nonlocal captured_provider, captured_credentials
captured_provider = provider
captured_credentials = deepcopy(credentials)
return mock_auth_instance
mock_factory.side_effect = factory_side_effect
# Mock encryption
encrypted_key = "encrypted_test_key_123"
@@ -77,11 +88,14 @@ class TestApiKeyAuthService:
# Mock database operations
mock_session.add = Mock()
mock_session.commit = Mock()
expected_credentials = deepcopy(self.mock_credentials)
ApiKeyAuthService.create_provider_auth(self.tenant_id, self.mock_args)
# Verify factory class calls
mock_factory.assert_called_once_with(self.provider, self.mock_credentials)
assert mock_factory.call_count == 1
assert captured_provider == self.provider
assert captured_credentials == expected_credentials
mock_auth_instance.validate_credentials.assert_called_once()
# Verify encryption calls
@@ -378,10 +392,9 @@ class TestApiKeyAuthService:
ApiKeyAuthService.validate_api_key_auth_args(None)
def test_validate_api_key_auth_args_dict_credentials_with_list_auth_type(self):
"""Test API key auth args validation - dict credentials with list auth_type"""
"""Test API key auth args validation - list auth_type is rejected by the typed payload contract"""
args = self.mock_args.copy()
args["credentials"]["auth_type"] = ["api_key"]
# Current implementation checks if auth_type exists and is truthy, list ["api_key"] is truthy
# So this should not raise exception, this test should pass
ApiKeyAuthService.validate_api_key_auth_args(args)
with pytest.raises(ValidationError):
ApiKeyAuthService.validate_api_key_auth_args(args)

View File

@@ -4,12 +4,14 @@ from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from unittest.mock import MagicMock
from unittest.mock import MagicMock, Mock, patch
import pytest
import services.vector_service as vector_service_module
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.index_processor.constant.index_type import IndexStructureType, IndexTechniqueType
from core.rag.models.document import AttachmentDocument, ChildDocument, Document
from services.vector_service import VectorService
@@ -30,6 +32,10 @@ class _ParentDocStub:
children: list[_ChildDocStub]
def _identity_kwargs(**kwargs: Any) -> dict[str, Any]:
return kwargs
def _make_dataset(
*,
indexing_technique: str = IndexTechniqueType.HIGH_QUALITY,
@@ -414,7 +420,7 @@ def test_generate_child_chunks_regenerate_cleans_then_saves_children(monkeypatch
factory_instance.init_index_processor.return_value = index_processor
monkeypatch.setattr(vector_service_module, "IndexProcessorFactory", MagicMock(return_value=factory_instance))
child_chunk_ctor = MagicMock(side_effect=lambda **kwargs: kwargs)
child_chunk_ctor = MagicMock(side_effect=_identity_kwargs)
monkeypatch.setattr(vector_service_module, "ChildChunk", child_chunk_ctor)
db_mock = MagicMock()
@@ -638,7 +644,7 @@ def test_update_multimodel_vector_adds_bindings_and_vectors_and_skips_missing_up
db_mock = _mock_db_session_for_update_multimodel(upload_files=[_UploadFileStub(id="file-1", name="img.png")])
monkeypatch.setattr(vector_service_module, "db", db_mock)
binding_ctor = MagicMock(side_effect=lambda **kwargs: kwargs)
binding_ctor = MagicMock(side_effect=_identity_kwargs)
monkeypatch.setattr(vector_service_module, "SegmentAttachmentBinding", binding_ctor)
logger_mock = MagicMock()
@@ -670,9 +676,7 @@ def test_update_multimodel_vector_updates_bindings_without_multimodal_vector_ops
monkeypatch.setattr(vector_service_module, "Vector", MagicMock(return_value=vector_instance))
db_mock = _mock_db_session_for_update_multimodel(upload_files=[_UploadFileStub(id="file-1", name="img.png")])
monkeypatch.setattr(vector_service_module, "db", db_mock)
monkeypatch.setattr(
vector_service_module, "SegmentAttachmentBinding", MagicMock(side_effect=lambda **kwargs: kwargs)
)
monkeypatch.setattr(vector_service_module, "SegmentAttachmentBinding", MagicMock(side_effect=_identity_kwargs))
VectorService.update_multimodel_vector(segment=segment, attachment_ids=["file-1"], dataset=dataset)
@@ -691,9 +695,7 @@ def test_update_multimodel_vector_rolls_back_and_reraises_on_error(monkeypatch:
db_mock = _mock_db_session_for_update_multimodel(upload_files=[_UploadFileStub(id="file-1", name="img.png")])
db_mock.session.commit.side_effect = RuntimeError("boom")
monkeypatch.setattr(vector_service_module, "db", db_mock)
monkeypatch.setattr(
vector_service_module, "SegmentAttachmentBinding", MagicMock(side_effect=lambda **kwargs: kwargs)
)
monkeypatch.setattr(vector_service_module, "SegmentAttachmentBinding", MagicMock(side_effect=_identity_kwargs))
logger_mock = MagicMock()
monkeypatch.setattr(vector_service_module, "logger", logger_mock)
@@ -703,3 +705,101 @@ def test_update_multimodel_vector_rolls_back_and_reraises_on_error(monkeypatch:
logger_mock.exception.assert_called_once()
db_mock.session.rollback.assert_called_once()
def test_vector_create_normalizes_child_documents() -> None:
dataset = _make_dataset()
documents = [ChildDocument(page_content="Child content", metadata={"doc_id": "child-1", "dataset_id": "dataset-1"})]
mock_embeddings = Mock()
mock_embeddings.embed_documents.return_value = [[0.1] * 1536]
mock_vector_processor = Mock()
with (
patch.object(Vector, "_get_embeddings", return_value=mock_embeddings),
patch.object(Vector, "_init_vector", return_value=mock_vector_processor),
):
vector = Vector(dataset=dataset)
vector.create(texts=documents)
normalized_document = mock_vector_processor.create.call_args.kwargs["texts"][0]
assert isinstance(normalized_document, Document)
assert normalized_document.page_content == "Child content"
assert normalized_document.metadata["doc_id"] == "child-1"
@patch("core.rag.datasource.vdb.vector_factory.storage")
@patch("core.rag.datasource.vdb.vector_factory.db")
def test_vector_create_multimodal_normalizes_attachment_documents(
mock_db: Mock,
mock_storage: Mock,
) -> None:
dataset = _make_dataset()
file_document = AttachmentDocument(
page_content="Attachment content",
provider="custom-provider",
metadata={"doc_id": "file-1", "doc_type": "image/png"},
)
upload_file = Mock(id="file-1", key="upload-key")
mock_scalars = Mock()
mock_scalars.all.return_value = [upload_file]
mock_db.session.scalars.return_value = mock_scalars
mock_storage.load_once.return_value = b"binary-content"
mock_embeddings = Mock()
mock_embeddings.embed_multimodal_documents.return_value = [[0.2] * 1536]
mock_vector_processor = Mock()
with (
patch.object(Vector, "_get_embeddings", return_value=mock_embeddings),
patch.object(Vector, "_init_vector", return_value=mock_vector_processor),
):
vector = Vector(dataset=dataset)
vector.create_multimodal(file_documents=[file_document])
normalized_document = mock_vector_processor.create.call_args.kwargs["texts"][0]
assert isinstance(normalized_document, Document)
assert normalized_document.provider == "custom-provider"
assert normalized_document.metadata["doc_id"] == "file-1"
@patch("core.rag.datasource.vdb.vector_factory.storage")
@patch("core.rag.datasource.vdb.vector_factory.db")
def test_vector_create_multimodal_falls_back_to_dify_provider_when_attachment_provider_is_none(
mock_db: Mock,
mock_storage: Mock,
) -> None:
dataset = _make_dataset()
file_document = AttachmentDocument(
page_content="Attachment content",
provider=None,
metadata={"doc_id": "file-1", "doc_type": "image/png"},
)
upload_file = Mock(id="file-1", key="upload-key")
mock_scalars = Mock()
mock_scalars.all.return_value = [upload_file]
mock_db.session.scalars.return_value = mock_scalars
mock_storage.load_once.return_value = b"binary-content"
mock_embeddings = Mock()
mock_embeddings.embed_multimodal_documents.return_value = [[0.2] * 1536]
mock_vector_processor = Mock()
with (
patch.object(Vector, "_get_embeddings", return_value=mock_embeddings),
patch.object(Vector, "_init_vector", return_value=mock_vector_processor),
):
vector = Vector(dataset=dataset)
vector.create_multimodal(file_documents=[file_document])
normalized_document = mock_vector_processor.create.call_args.kwargs["texts"][0]
assert isinstance(normalized_document, Document)
assert normalized_document.provider == "dify"

View File

@@ -1,350 +0,0 @@
import type { ReactNode } from 'react'
import { render, screen, waitFor } from '@testing-library/react'
import WorkflowApp from '../index'
const mockSetTriggerStatuses = vi.fn()
const mockSetInputs = vi.fn()
const mockSetShowInputsPanel = vi.fn()
const mockSetShowDebugAndPreviewPanel = vi.fn()
const mockWorkflowStoreSetState = vi.fn()
const mockDebouncedCancel = vi.fn()
const mockFetchRunDetail = vi.fn()
const mockInitialNodes = vi.fn()
const mockInitialEdges = vi.fn()
const mockGetWorkflowRunAndTraceUrl = vi.fn()
let appStoreState: {
appDetail?: {
id: string
mode: string
}
}
let workflowInitState: {
data: {
graph: {
nodes: Array<Record<string, unknown>>
edges: Array<Record<string, unknown>>
viewport: { x: number, y: number, zoom: number }
}
features: Record<string, unknown>
} | null
isLoading: boolean
fileUploadConfigResponse: Record<string, unknown> | null
}
let appContextState: {
isLoadingCurrentWorkspace: boolean
currentWorkspace: {
id?: string
}
}
let appTriggersState: {
data?: {
data: Array<{
node_id: string
status: string
}>
}
}
let searchParamsValue: string | null = null
const mockWorkflowStore = {
setState: mockWorkflowStoreSetState,
getState: () => ({
setInputs: mockSetInputs,
setShowInputsPanel: mockSetShowInputsPanel,
setShowDebugAndPreviewPanel: mockSetShowDebugAndPreviewPanel,
debouncedSyncWorkflowDraft: {
cancel: mockDebouncedCancel,
},
}),
}
vi.mock('@/app/components/app/store', () => ({
useStore: <T,>(selector: (state: typeof appStoreState) => T) => selector(appStoreState),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => mockWorkflowStore,
}))
vi.mock('@/app/components/workflow/store/trigger-status', () => ({
useTriggerStatusStore: () => ({
setTriggerStatuses: mockSetTriggerStatuses,
}),
}))
vi.mock('@/context/app-context', () => ({
useAppContext: () => appContextState,
}))
vi.mock('@/next/navigation', () => ({
useSearchParams: () => ({
get: (key: string) => (key === 'replayRunId' ? searchParamsValue : null),
}),
}))
vi.mock('@/service/log', () => ({
fetchRunDetail: (...args: unknown[]) => mockFetchRunDetail(...args),
}))
vi.mock('@/service/use-tools', () => ({
useAppTriggers: () => appTriggersState,
}))
vi.mock('@/app/components/workflow-app/hooks/use-workflow-init', () => ({
useWorkflowInit: () => workflowInitState,
}))
vi.mock('@/app/components/workflow-app/hooks/use-get-run-and-trace-url', () => ({
useGetRunAndTraceUrl: () => ({
getWorkflowRunAndTraceUrl: mockGetWorkflowRunAndTraceUrl,
}),
}))
vi.mock('@/app/components/workflow/utils', async (importOriginal) => {
const actual = await importOriginal<typeof import('@/app/components/workflow/utils')>()
return {
...actual,
initialNodes: (...args: unknown[]) => mockInitialNodes(...args),
initialEdges: (...args: unknown[]) => mockInitialEdges(...args),
}
})
vi.mock('@/app/components/base/loading', () => ({
default: () => <div data-testid="loading">loading</div>,
}))
vi.mock('@/app/components/base/features', () => ({
FeaturesProvider: ({
features,
children,
}: {
features: Record<string, unknown>
children: ReactNode
}) => (
<div data-testid="features-provider" data-features={JSON.stringify(features)}>
{children}
</div>
),
}))
vi.mock('@/app/components/workflow', () => ({
default: ({
nodes,
edges,
children,
}: {
nodes: Array<Record<string, unknown>>
edges: Array<Record<string, unknown>>
children: ReactNode
}) => (
<div data-testid="workflow-default-context" data-nodes={JSON.stringify(nodes)} data-edges={JSON.stringify(edges)}>
{children}
</div>
),
}))
vi.mock('@/app/components/workflow/context', () => ({
WorkflowContextProvider: ({
children,
}: {
injectWorkflowStoreSliceFn: unknown
children: ReactNode
}) => (
<div data-testid="workflow-context-provider">{children}</div>
),
}))
vi.mock('@/app/components/workflow-app/components/workflow-main', () => ({
default: ({
nodes,
edges,
viewport,
}: {
nodes: Array<Record<string, unknown>>
edges: Array<Record<string, unknown>>
viewport: Record<string, unknown>
}) => (
<div
data-testid="workflow-app-main"
data-nodes={JSON.stringify(nodes)}
data-edges={JSON.stringify(edges)}
data-viewport={JSON.stringify(viewport)}
/>
),
}))
describe('WorkflowApp', () => {
beforeEach(() => {
vi.clearAllMocks()
appStoreState = {
appDetail: {
id: 'app-1',
mode: 'workflow',
},
}
workflowInitState = {
data: {
graph: {
nodes: [{ id: 'raw-node' }],
edges: [{ id: 'raw-edge' }],
viewport: { x: 1, y: 2, zoom: 3 },
},
features: {
file_upload: {
enabled: true,
},
},
},
isLoading: false,
fileUploadConfigResponse: { enabled: true },
}
appContextState = {
isLoadingCurrentWorkspace: false,
currentWorkspace: { id: 'workspace-1' },
}
appTriggersState = {}
searchParamsValue = null
mockFetchRunDetail.mockResolvedValue({ inputs: null })
mockInitialNodes.mockReturnValue([{ id: 'node-1' }])
mockInitialEdges.mockReturnValue([{ id: 'edge-1' }])
mockGetWorkflowRunAndTraceUrl.mockReturnValue({ runUrl: '/runs/run-1' })
})
it('should render the loading shell while workflow data is still loading', () => {
workflowInitState = {
data: null,
isLoading: true,
fileUploadConfigResponse: null,
}
render(<WorkflowApp />)
expect(screen.getByTestId('loading')).toBeInTheDocument()
expect(screen.queryByTestId('workflow-app-main')).not.toBeInTheDocument()
})
it('should render the workflow app shell and sync trigger statuses when data is ready', () => {
appTriggersState = {
data: {
data: [
{ node_id: 'trigger-enabled', status: 'enabled' },
{ node_id: 'trigger-disabled', status: 'paused' },
],
},
}
render(<WorkflowApp />)
expect(screen.getByTestId('workflow-context-provider')).toBeInTheDocument()
expect(screen.getByTestId('workflow-default-context')).toHaveAttribute('data-nodes', JSON.stringify([{ id: 'node-1' }]))
expect(screen.getByTestId('workflow-default-context')).toHaveAttribute('data-edges', JSON.stringify([{ id: 'edge-1' }]))
expect(screen.getByTestId('workflow-app-main')).toHaveAttribute('data-viewport', JSON.stringify({ x: 1, y: 2, zoom: 3 }))
expect(screen.getByTestId('features-provider')).toBeInTheDocument()
expect(mockSetTriggerStatuses).toHaveBeenCalledWith({
'trigger-enabled': 'enabled',
'trigger-disabled': 'disabled',
})
})
it('should not sync trigger statuses when trigger data is unavailable', () => {
render(<WorkflowApp />)
expect(screen.getByTestId('workflow-app-main')).toBeInTheDocument()
expect(mockSetTriggerStatuses).not.toHaveBeenCalled()
})
it('should replay workflow inputs from replayRunId and clean up workflow state on unmount', async () => {
searchParamsValue = 'run-1'
mockFetchRunDetail.mockResolvedValue({
inputs: '{"sys.query":"hidden","foo":"bar","count":2,"flag":true,"obj":{"nested":true},"nil":null}',
})
const { unmount } = render(<WorkflowApp />)
await waitFor(() => {
expect(mockFetchRunDetail).toHaveBeenCalledWith('/runs/run-1')
expect(mockSetInputs).toHaveBeenCalledWith({
foo: 'bar',
count: 2,
flag: true,
obj: '{"nested":true}',
nil: '',
})
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(true)
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
})
unmount()
expect(mockWorkflowStoreSetState).toHaveBeenCalledWith({ isWorkflowDataLoaded: false })
expect(mockDebouncedCancel).toHaveBeenCalled()
})
it('should skip replay lookups when replayRunId is missing', () => {
render(<WorkflowApp />)
expect(mockGetWorkflowRunAndTraceUrl).not.toHaveBeenCalled()
expect(mockFetchRunDetail).not.toHaveBeenCalled()
expect(mockSetInputs).not.toHaveBeenCalled()
})
it('should skip replay fetches when the resolved run url is empty', async () => {
searchParamsValue = 'run-1'
mockGetWorkflowRunAndTraceUrl.mockReturnValue({ runUrl: '' })
render(<WorkflowApp />)
await waitFor(() => {
expect(mockGetWorkflowRunAndTraceUrl).toHaveBeenCalledWith('run-1')
})
expect(mockFetchRunDetail).not.toHaveBeenCalled()
expect(mockSetInputs).not.toHaveBeenCalled()
})
it('should stop replay recovery when workflow run inputs cannot be parsed', async () => {
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
searchParamsValue = 'run-1'
mockFetchRunDetail.mockResolvedValue({
inputs: '{invalid-json}',
})
render(<WorkflowApp />)
await waitFor(() => {
expect(mockFetchRunDetail).toHaveBeenCalledWith('/runs/run-1')
})
expect(consoleErrorSpy).toHaveBeenCalledWith(
'Failed to parse workflow run inputs',
expect.any(Error),
)
expect(mockSetInputs).not.toHaveBeenCalled()
expect(mockSetShowInputsPanel).not.toHaveBeenCalled()
expect(mockSetShowDebugAndPreviewPanel).not.toHaveBeenCalled()
consoleErrorSpy.mockRestore()
})
it('should ignore replay inputs when they only contain sys variables', async () => {
searchParamsValue = 'run-1'
mockFetchRunDetail.mockResolvedValue({
inputs: '{"sys.query":"hidden","sys.user_id":"u-1"}',
})
render(<WorkflowApp />)
await waitFor(() => {
expect(mockFetchRunDetail).toHaveBeenCalledWith('/runs/run-1')
})
expect(mockSetInputs).not.toHaveBeenCalled()
expect(mockSetShowInputsPanel).not.toHaveBeenCalled()
expect(mockSetShowDebugAndPreviewPanel).not.toHaveBeenCalled()
})
})

View File

@@ -1,90 +0,0 @@
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
import { TransferMethod } from '@/types/app'
import {
buildInitialFeatures,
buildTriggerStatusMap,
coerceReplayUserInputs,
} from '../utils'
describe('workflow-app utils', () => {
it('should map trigger statuses to enabled and disabled states', () => {
expect(buildTriggerStatusMap([
{ node_id: 'node-1', status: 'enabled' },
{ node_id: 'node-2', status: 'disabled' },
{ node_id: 'node-3', status: 'paused' },
])).toEqual({
'node-1': 'enabled',
'node-2': 'disabled',
'node-3': 'disabled',
})
})
it('should coerce replay run inputs, omit sys keys, and stringify complex values', () => {
expect(coerceReplayUserInputs({
'sys.query': 'hidden',
'query': 'hello',
'count': 3,
'enabled': true,
'nullable': null,
'metadata': { nested: true },
})).toEqual({
query: 'hello',
count: 3,
enabled: true,
nullable: '',
metadata: '{"nested":true}',
})
expect(coerceReplayUserInputs('invalid')).toBeNull()
expect(coerceReplayUserInputs(null)).toBeNull()
})
it('should build initial features with file-upload and feature fallbacks', () => {
const result = buildInitialFeatures({
file_upload: {
enabled: true,
allowed_file_types: [SupportUploadFileTypes.image],
allowed_file_extensions: ['.png'],
allowed_file_upload_methods: [TransferMethod.local_file],
number_limits: 2,
image: {
enabled: true,
number_limits: 5,
transfer_methods: [TransferMethod.remote_url],
},
},
opening_statement: 'hello',
suggested_questions: ['Q1'],
suggested_questions_after_answer: { enabled: true },
speech_to_text: { enabled: true },
text_to_speech: { enabled: true },
retriever_resource: { enabled: true },
sensitive_word_avoidance: { enabled: true },
}, { enabled: true } as never)
expect(result).toMatchObject({
file: {
enabled: true,
allowed_file_types: [SupportUploadFileTypes.image],
allowed_file_extensions: ['.png'],
allowed_file_upload_methods: [TransferMethod.local_file],
number_limits: 2,
fileUploadConfig: { enabled: true },
image: {
enabled: true,
number_limits: 5,
transfer_methods: [TransferMethod.remote_url],
},
},
opening: {
enabled: true,
opening_statement: 'hello',
suggested_questions: ['Q1'],
},
suggested: { enabled: true },
speech2text: { enabled: true },
text2speech: { enabled: true },
citation: { enabled: true },
moderation: { enabled: true },
})
})
})

View File

@@ -1,494 +0,0 @@
import { act, render, screen } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import * as React from 'react'
import { DSL_EXPORT_CHECK } from '@/app/components/workflow/constants'
import { BlockEnum } from '@/app/components/workflow/types'
import WorkflowChildren from '../workflow-children'
type WorkflowStoreState = {
showFeaturesPanel: boolean
showImportDSLModal: boolean
setShowImportDSLModal: (show: boolean) => void
showOnboarding: boolean
setShowOnboarding: (show: boolean) => void
setHasSelectedStartNode: (selected: boolean) => void
setShouldAutoOpenStartNodeSelector: (open: boolean) => void
}
type TriggerPluginConfig = {
plugin_id: string
provider_name: string
provider_type: string
event_name: string
event_label: string
event_description: string
output_schema: Record<string, unknown>
paramSchemas: Array<Record<string, unknown>>
params: Record<string, unknown>
subscription_id: string
plugin_unique_identifier: string
is_team_authorization: boolean
meta?: Record<string, unknown>
}
const mockSetShowImportDSLModal = vi.fn()
const mockSetShowOnboarding = vi.fn()
const mockSetHasSelectedStartNode = vi.fn()
const mockSetShouldAutoOpenStartNodeSelector = vi.fn()
const mockSetNodes = vi.fn()
const mockSetEdges = vi.fn()
const mockHandleSyncWorkflowDraft = vi.fn()
const mockHandleOnboardingClose = vi.fn()
const mockHandlePaneContextmenuCancel = vi.fn()
const mockHandleExportDSL = vi.fn()
const mockExportCheck = vi.fn()
const mockAutoGenerateWebhookUrl = vi.fn()
let workflowStoreState: WorkflowStoreState
let eventSubscription: ((value: { type: string, payload: { data: Array<Record<string, unknown>> } }) => void) | null = null
let lastGenerateNodeInput: Record<string, unknown> | null = null
vi.mock('reactflow', () => ({
useStoreApi: () => ({
getState: () => ({
setNodes: mockSetNodes,
setEdges: mockSetEdges,
}),
}),
}))
vi.mock('@/app/components/workflow/store', () => ({
useStore: <T,>(selector: (state: WorkflowStoreState) => T) => selector(workflowStoreState),
}))
vi.mock('@/context/event-emitter', () => ({
useEventEmitterContextContext: () => ({
eventEmitter: {
useSubscription: (callback: typeof eventSubscription) => {
eventSubscription = callback
},
},
}),
}))
vi.mock('@/app/components/workflow/hooks', () => ({
useAutoGenerateWebhookUrl: () => mockAutoGenerateWebhookUrl,
useDSL: () => ({
exportCheck: mockExportCheck,
handleExportDSL: mockHandleExportDSL,
}),
usePanelInteractions: () => ({
handlePaneContextmenuCancel: mockHandlePaneContextmenuCancel,
}),
}))
vi.mock('@/app/components/workflow/hooks/use-nodes-sync-draft', () => ({
useNodesSyncDraft: () => ({
handleSyncWorkflowDraft: mockHandleSyncWorkflowDraft,
}),
}))
vi.mock('@/app/components/workflow/utils', async (importOriginal) => {
const actual = await importOriginal<typeof import('@/app/components/workflow/utils')>()
return {
...actual,
generateNewNode: (args: Record<string, unknown>) => {
lastGenerateNodeInput = args
return {
newNode: {
id: 'new-node-id',
position: args.position,
data: args.data,
},
}
},
}
})
vi.mock('@/app/components/workflow-app/hooks', () => ({
useAvailableNodesMetaData: () => ({
nodesMap: {
[BlockEnum.Start]: {
defaultValue: {
title: 'Start Title',
desc: 'Start description',
config: {
image: false,
},
},
},
[BlockEnum.TriggerPlugin]: {
defaultValue: {
title: 'Plugin title',
desc: 'Plugin description',
config: {
baseConfig: 'base',
},
},
},
},
}),
}))
vi.mock('@/app/components/workflow-app/hooks/use-auto-onboarding', () => ({
useAutoOnboarding: () => ({
handleOnboardingClose: mockHandleOnboardingClose,
}),
}))
vi.mock('@/app/components/workflow/plugin-dependency', () => ({
default: () => <div data-testid="plugin-dependency">plugin-dependency</div>,
}))
vi.mock('@/app/components/workflow-app/components/workflow-header', () => ({
default: () => <div data-testid="workflow-header">workflow-header</div>,
}))
vi.mock('@/app/components/workflow-app/components/workflow-panel', () => ({
default: () => <div data-testid="workflow-panel">workflow-panel</div>,
}))
vi.mock('@/next/dynamic', async () => {
const ReactModule = await import('react')
return {
default: (
loader: () => Promise<{ default: React.ComponentType<Record<string, unknown>> }>,
) => {
const DynamicComponent = (props: Record<string, unknown>) => {
const [Loaded, setLoaded] = ReactModule.useState<React.ComponentType<Record<string, unknown>> | null>(null)
ReactModule.useEffect(() => {
let mounted = true
loader().then((mod) => {
if (mounted)
setLoaded(() => mod.default)
})
return () => {
mounted = false
}
}, [])
return Loaded ? <Loaded {...props} /> : null
}
return DynamicComponent
},
}
})
vi.mock('@/app/components/workflow/features', () => ({
default: () => <div data-testid="workflow-features">features</div>,
}))
vi.mock('@/app/components/workflow/update-dsl-modal', () => ({
default: ({
onCancel,
onBackup,
onImport,
}: {
onCancel: () => void
onBackup: () => void
onImport: () => void
}) => (
<div data-testid="update-dsl-modal">
<button type="button" onClick={onCancel}>cancel-import-dsl</button>
<button type="button" onClick={onBackup}>backup-dsl</button>
<button type="button" onClick={onImport}>import-dsl</button>
</div>
),
}))
vi.mock('@/app/components/workflow/dsl-export-confirm-modal', () => ({
default: ({
envList,
onConfirm,
onClose,
}: {
envList: Array<Record<string, unknown>>
onConfirm: () => void
onClose: () => void
}) => (
<div data-testid="dsl-export-confirm-modal" data-env-count={String(envList.length)}>
<button type="button" onClick={onConfirm}>confirm-export-dsl</button>
<button type="button" onClick={onClose}>close-export-dsl</button>
</div>
),
}))
vi.mock('@/app/components/workflow-app/components/workflow-onboarding-modal', () => ({
default: ({
onClose,
onSelectStartNode,
}: {
isShow: boolean
onClose: () => void
onSelectStartNode: (nodeType: BlockEnum, config?: TriggerPluginConfig) => void
}) => (
<div data-testid="workflow-onboarding-modal">
<button type="button" onClick={onClose}>close-onboarding</button>
<button type="button" onClick={() => onSelectStartNode(BlockEnum.Start)}>select-start-node</button>
<button
type="button"
onClick={() => onSelectStartNode(BlockEnum.Start, {
title: 'Configured Start Title',
desc: 'Configured Start Description',
config: { image: true, custom: 'config' },
extra: 'field',
} as never)}
>
select-start-node-with-config
</button>
<button
type="button"
onClick={() => onSelectStartNode(BlockEnum.TriggerPlugin, {
plugin_id: 'plugin-id',
provider_name: 'provider-name',
provider_type: 'tool',
event_name: 'event-name',
event_label: 'Event Label',
event_description: 'Event Description',
output_schema: { output: true },
paramSchemas: [{ name: 'api_key' }],
params: { token: 'abc' },
subscription_id: 'subscription-id',
plugin_unique_identifier: 'plugin-unique',
is_team_authorization: true,
meta: { source: 'plugin' },
})}
>
select-trigger-plugin
</button>
<button
type="button"
onClick={() => onSelectStartNode(BlockEnum.TriggerPlugin, {
plugin_id: 'plugin-id-2',
provider_name: 'provider-name-2',
provider_type: 'tool',
event_name: 'event-name-2',
event_label: '',
event_description: '',
output_schema: {},
paramSchemas: undefined,
params: {},
subscription_id: 'subscription-id-2',
plugin_unique_identifier: 'plugin-unique-2',
is_team_authorization: false,
} as never)}
>
select-trigger-plugin-fallback
</button>
</div>
),
}))
describe('WorkflowChildren', () => {
beforeEach(() => {
vi.clearAllMocks()
workflowStoreState = {
showFeaturesPanel: false,
showImportDSLModal: false,
setShowImportDSLModal: mockSetShowImportDSLModal,
showOnboarding: false,
setShowOnboarding: mockSetShowOnboarding,
setHasSelectedStartNode: mockSetHasSelectedStartNode,
setShouldAutoOpenStartNodeSelector: mockSetShouldAutoOpenStartNodeSelector,
}
eventSubscription = null
lastGenerateNodeInput = null
mockHandleSyncWorkflowDraft.mockImplementation((_force?: boolean, _notRefresh?: boolean, callback?: { onSuccess?: () => void }) => {
callback?.onSuccess?.()
})
})
it('should render feature panel, import modal actions, and default workflow chrome', async () => {
const user = userEvent.setup()
workflowStoreState = {
...workflowStoreState,
showFeaturesPanel: true,
showImportDSLModal: true,
}
render(<WorkflowChildren />)
expect(screen.getByTestId('plugin-dependency')).toBeInTheDocument()
expect(screen.getByTestId('workflow-header')).toBeInTheDocument()
expect(screen.getByTestId('workflow-panel')).toBeInTheDocument()
expect(await screen.findByTestId('workflow-features')).toBeInTheDocument()
expect(screen.getByTestId('update-dsl-modal')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: /cancel-import-dsl/i }))
await user.click(screen.getByRole('button', { name: /backup-dsl/i }))
await user.click(screen.getByRole('button', { name: /^import-dsl$/i }))
expect(mockSetShowImportDSLModal).toHaveBeenCalledWith(false)
expect(mockExportCheck).toHaveBeenCalled()
expect(mockHandlePaneContextmenuCancel).toHaveBeenCalled()
})
it('should react to DSL export check events by showing the confirm modal and closing it', async () => {
const user = userEvent.setup()
render(<WorkflowChildren />)
await act(async () => {
eventSubscription?.({
type: DSL_EXPORT_CHECK,
payload: {
data: [{ id: 'env-1' }, { id: 'env-2' }],
},
})
})
expect(await screen.findByTestId('dsl-export-confirm-modal')).toHaveAttribute('data-env-count', '2')
await user.click(screen.getByRole('button', { name: /confirm-export-dsl/i }))
await user.click(screen.getByRole('button', { name: /close-export-dsl/i }))
expect(mockHandleExportDSL).toHaveBeenCalled()
expect(screen.queryByTestId('dsl-export-confirm-modal')).not.toBeInTheDocument()
})
it('should ignore unrelated workflow events when listening for DSL export checks', async () => {
render(<WorkflowChildren />)
await act(async () => {
eventSubscription?.({
type: 'UNRELATED_EVENT',
payload: {
data: [{ id: 'env-1' }],
},
})
})
expect(screen.queryByTestId('dsl-export-confirm-modal')).not.toBeInTheDocument()
})
it('should close onboarding through the onboarding hook callback', async () => {
const user = userEvent.setup()
workflowStoreState = {
...workflowStoreState,
showOnboarding: true,
}
render(<WorkflowChildren />)
expect(await screen.findByTestId('workflow-onboarding-modal')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: /close-onboarding/i }))
expect(mockHandleOnboardingClose).toHaveBeenCalled()
})
it('should create a start node, sync draft, and auto-generate webhook url after selecting a start node', async () => {
const user = userEvent.setup()
workflowStoreState = {
...workflowStoreState,
showOnboarding: true,
}
render(<WorkflowChildren />)
await user.click(await screen.findByRole('button', { name: /^select-start-node$/i }))
expect(lastGenerateNodeInput).toMatchObject({
data: {
title: 'Start Title',
desc: 'Start description',
config: {
image: false,
},
},
})
expect(mockSetNodes).toHaveBeenCalledWith([expect.objectContaining({ id: 'new-node-id' })])
expect(mockSetEdges).toHaveBeenCalledWith([])
expect(mockSetShowOnboarding).toHaveBeenCalledWith(false)
expect(mockSetHasSelectedStartNode).toHaveBeenCalledWith(true)
expect(mockSetShouldAutoOpenStartNodeSelector).toHaveBeenCalledWith(true)
expect(mockHandleSyncWorkflowDraft).toHaveBeenCalledWith(true, false, expect.any(Object))
expect(mockAutoGenerateWebhookUrl).toHaveBeenCalledWith('new-node-id')
})
it('should merge non-trigger start node config directly into the default node data', async () => {
const user = userEvent.setup()
workflowStoreState = {
...workflowStoreState,
showOnboarding: true,
}
render(<WorkflowChildren />)
await user.click(await screen.findByRole('button', { name: /select-start-node-with-config/i }))
expect(lastGenerateNodeInput).toMatchObject({
data: {
title: 'Configured Start Title',
desc: 'Configured Start Description',
config: {
image: true,
custom: 'config',
},
extra: 'field',
},
})
})
it('should merge trigger plugin defaults and config before creating the node', async () => {
const user = userEvent.setup()
workflowStoreState = {
...workflowStoreState,
showOnboarding: true,
}
render(<WorkflowChildren />)
await user.click(await screen.findByRole('button', { name: /^select-trigger-plugin$/i }))
expect(lastGenerateNodeInput).toMatchObject({
data: {
plugin_id: 'plugin-id',
provider_id: 'provider-name',
provider_name: 'provider-name',
provider_type: 'tool',
event_name: 'event-name',
event_label: 'Event Label',
event_description: 'Event Description',
title: 'Event Label',
desc: 'Event Description',
output_schema: { output: true },
parameters_schema: [{ name: 'api_key' }],
config: {
baseConfig: 'base',
token: 'abc',
},
subscription_id: 'subscription-id',
plugin_unique_identifier: 'plugin-unique',
is_team_authorization: true,
meta: { source: 'plugin' },
},
})
})
it('should fall back to plugin default title and description when trigger labels are missing', async () => {
const user = userEvent.setup()
workflowStoreState = {
...workflowStoreState,
showOnboarding: true,
}
render(<WorkflowChildren />)
await user.click(await screen.findByRole('button', { name: /select-trigger-plugin-fallback/i }))
expect(lastGenerateNodeInput).toMatchObject({
data: {
title: 'Plugin title',
desc: 'Plugin description',
parameters_schema: [],
config: {
baseConfig: 'base',
},
},
})
})
})

View File

@@ -1,277 +0,0 @@
import type { ReactNode } from 'react'
import type { WorkflowProps } from '@/app/components/workflow'
import { fireEvent, render, screen } from '@testing-library/react'
import WorkflowMain from '../workflow-main'
const mockSetFeatures = vi.fn()
const mockSetConversationVariables = vi.fn()
const mockSetEnvironmentVariables = vi.fn()
const hookFns = {
doSyncWorkflowDraft: vi.fn(),
syncWorkflowDraftWhenPageClose: vi.fn(),
handleRefreshWorkflowDraft: vi.fn(),
handleBackupDraft: vi.fn(),
handleLoadBackupDraft: vi.fn(),
handleRestoreFromPublishedWorkflow: vi.fn(),
handleRun: vi.fn(),
handleStopRun: vi.fn(),
handleStartWorkflowRun: vi.fn(),
handleWorkflowStartRunInChatflow: vi.fn(),
handleWorkflowStartRunInWorkflow: vi.fn(),
handleWorkflowTriggerScheduleRunInWorkflow: vi.fn(),
handleWorkflowTriggerWebhookRunInWorkflow: vi.fn(),
handleWorkflowTriggerPluginRunInWorkflow: vi.fn(),
handleWorkflowRunAllTriggersInWorkflow: vi.fn(),
getWorkflowRunAndTraceUrl: vi.fn(),
exportCheck: vi.fn(),
handleExportDSL: vi.fn(),
fetchInspectVars: vi.fn(),
hasNodeInspectVars: vi.fn(),
hasSetInspectVar: vi.fn(),
fetchInspectVarValue: vi.fn(),
editInspectVarValue: vi.fn(),
renameInspectVarName: vi.fn(),
appendNodeInspectVars: vi.fn(),
deleteInspectVar: vi.fn(),
deleteNodeInspectorVars: vi.fn(),
deleteAllInspectorVars: vi.fn(),
isInspectVarEdited: vi.fn(),
resetToLastRunVar: vi.fn(),
invalidateSysVarValues: vi.fn(),
resetConversationVar: vi.fn(),
invalidateConversationVarValues: vi.fn(),
}
let capturedContextProps: Record<string, unknown> | null = null
type MockWorkflowWithInnerContextProps = Pick<WorkflowProps, 'nodes' | 'edges' | 'viewport' | 'onWorkflowDataUpdate'> & {
hooksStore?: Record<string, unknown>
children?: ReactNode
}
vi.mock('@/app/components/base/features/hooks', () => ({
useFeaturesStore: () => ({
getState: () => ({
setFeatures: mockSetFeatures,
}),
}),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => ({
setConversationVariables: mockSetConversationVariables,
setEnvironmentVariables: mockSetEnvironmentVariables,
}),
}),
}))
vi.mock('@/app/components/workflow', () => ({
WorkflowWithInnerContext: ({
nodes,
edges,
viewport,
onWorkflowDataUpdate,
hooksStore,
children,
}: MockWorkflowWithInnerContextProps) => {
capturedContextProps = {
nodes,
edges,
viewport,
hooksStore,
}
return (
<div data-testid="workflow-inner-context">
<button
type="button"
onClick={() => onWorkflowDataUpdate?.({
features: { file: { enabled: true } },
conversation_variables: [{ id: 'conversation-1' }],
environment_variables: [{ id: 'env-1' }],
})}
>
update-workflow-data
</button>
<button
type="button"
onClick={() => onWorkflowDataUpdate?.({
conversation_variables: [{ id: 'conversation-only' }],
})}
>
update-conversation-only
</button>
<button
type="button"
onClick={() => onWorkflowDataUpdate?.({})}
>
update-empty-payload
</button>
{children}
</div>
)
},
}))
vi.mock('@/app/components/workflow-app/hooks', () => ({
useAvailableNodesMetaData: () => ({ nodes: [{ id: 'start' }], nodesMap: { start: { id: 'start' } } }),
useConfigsMap: () => ({ flowId: 'app-1', flowType: 'app-flow', fileSettings: { enabled: true } }),
useDSL: () => ({ exportCheck: hookFns.exportCheck, handleExportDSL: hookFns.handleExportDSL }),
useGetRunAndTraceUrl: () => ({ getWorkflowRunAndTraceUrl: hookFns.getWorkflowRunAndTraceUrl }),
useInspectVarsCrud: () => ({
hasNodeInspectVars: hookFns.hasNodeInspectVars,
hasSetInspectVar: hookFns.hasSetInspectVar,
fetchInspectVarValue: hookFns.fetchInspectVarValue,
editInspectVarValue: hookFns.editInspectVarValue,
renameInspectVarName: hookFns.renameInspectVarName,
appendNodeInspectVars: hookFns.appendNodeInspectVars,
deleteInspectVar: hookFns.deleteInspectVar,
deleteNodeInspectorVars: hookFns.deleteNodeInspectorVars,
deleteAllInspectorVars: hookFns.deleteAllInspectorVars,
isInspectVarEdited: hookFns.isInspectVarEdited,
resetToLastRunVar: hookFns.resetToLastRunVar,
invalidateSysVarValues: hookFns.invalidateSysVarValues,
resetConversationVar: hookFns.resetConversationVar,
invalidateConversationVarValues: hookFns.invalidateConversationVarValues,
}),
useNodesSyncDraft: () => ({
doSyncWorkflowDraft: hookFns.doSyncWorkflowDraft,
syncWorkflowDraftWhenPageClose: hookFns.syncWorkflowDraftWhenPageClose,
}),
useSetWorkflowVarsWithValue: () => ({
fetchInspectVars: hookFns.fetchInspectVars,
}),
useWorkflowRefreshDraft: () => ({ handleRefreshWorkflowDraft: hookFns.handleRefreshWorkflowDraft }),
useWorkflowRun: () => ({
handleBackupDraft: hookFns.handleBackupDraft,
handleLoadBackupDraft: hookFns.handleLoadBackupDraft,
handleRestoreFromPublishedWorkflow: hookFns.handleRestoreFromPublishedWorkflow,
handleRun: hookFns.handleRun,
handleStopRun: hookFns.handleStopRun,
}),
useWorkflowStartRun: () => ({
handleStartWorkflowRun: hookFns.handleStartWorkflowRun,
handleWorkflowStartRunInChatflow: hookFns.handleWorkflowStartRunInChatflow,
handleWorkflowStartRunInWorkflow: hookFns.handleWorkflowStartRunInWorkflow,
handleWorkflowTriggerScheduleRunInWorkflow: hookFns.handleWorkflowTriggerScheduleRunInWorkflow,
handleWorkflowTriggerWebhookRunInWorkflow: hookFns.handleWorkflowTriggerWebhookRunInWorkflow,
handleWorkflowTriggerPluginRunInWorkflow: hookFns.handleWorkflowTriggerPluginRunInWorkflow,
handleWorkflowRunAllTriggersInWorkflow: hookFns.handleWorkflowRunAllTriggersInWorkflow,
}),
}))
vi.mock('../workflow-children', () => ({
default: () => <div data-testid="workflow-children">workflow-children</div>,
}))
describe('WorkflowMain', () => {
beforeEach(() => {
vi.clearAllMocks()
capturedContextProps = null
})
it('should render the inner workflow context with children and forwarded graph props', () => {
const nodes = [{ id: 'node-1' }]
const edges = [{ id: 'edge-1' }]
const viewport = { x: 1, y: 2, zoom: 1.5 }
render(
<WorkflowMain
nodes={nodes as never}
edges={edges as never}
viewport={viewport}
/>,
)
expect(screen.getByTestId('workflow-inner-context')).toBeInTheDocument()
expect(screen.getByTestId('workflow-children')).toBeInTheDocument()
expect(capturedContextProps).toMatchObject({
nodes,
edges,
viewport,
})
})
it('should update features and workflow variables when workflow data changes', () => {
render(
<WorkflowMain
nodes={[]}
edges={[]}
viewport={{ x: 0, y: 0, zoom: 1 }}
/>,
)
fireEvent.click(screen.getByRole('button', { name: /update-workflow-data/i }))
expect(mockSetFeatures).toHaveBeenCalledWith({ file: { enabled: true } })
expect(mockSetConversationVariables).toHaveBeenCalledWith([{ id: 'conversation-1' }])
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([{ id: 'env-1' }])
})
it('should only update the workflow store slices present in the payload', () => {
render(
<WorkflowMain
nodes={[]}
edges={[]}
viewport={{ x: 0, y: 0, zoom: 1 }}
/>,
)
fireEvent.click(screen.getByRole('button', { name: /update-conversation-only/i }))
expect(mockSetConversationVariables).toHaveBeenCalledWith([{ id: 'conversation-only' }])
expect(mockSetFeatures).not.toHaveBeenCalled()
expect(mockSetEnvironmentVariables).not.toHaveBeenCalled()
})
it('should ignore empty workflow data updates', () => {
render(
<WorkflowMain
nodes={[]}
edges={[]}
viewport={{ x: 0, y: 0, zoom: 1 }}
/>,
)
fireEvent.click(screen.getByRole('button', { name: /update-empty-payload/i }))
expect(mockSetFeatures).not.toHaveBeenCalled()
expect(mockSetConversationVariables).not.toHaveBeenCalled()
expect(mockSetEnvironmentVariables).not.toHaveBeenCalled()
})
it('should expose the composed workflow action hooks through hooksStore', () => {
render(
<WorkflowMain
nodes={[]}
edges={[]}
viewport={{ x: 0, y: 0, zoom: 1 }}
/>,
)
expect(capturedContextProps?.hooksStore).toMatchObject({
syncWorkflowDraftWhenPageClose: hookFns.syncWorkflowDraftWhenPageClose,
doSyncWorkflowDraft: hookFns.doSyncWorkflowDraft,
handleRefreshWorkflowDraft: hookFns.handleRefreshWorkflowDraft,
handleBackupDraft: hookFns.handleBackupDraft,
handleLoadBackupDraft: hookFns.handleLoadBackupDraft,
handleRestoreFromPublishedWorkflow: hookFns.handleRestoreFromPublishedWorkflow,
handleRun: hookFns.handleRun,
handleStopRun: hookFns.handleStopRun,
handleStartWorkflowRun: hookFns.handleStartWorkflowRun,
handleWorkflowStartRunInChatflow: hookFns.handleWorkflowStartRunInChatflow,
handleWorkflowStartRunInWorkflow: hookFns.handleWorkflowStartRunInWorkflow,
handleWorkflowTriggerScheduleRunInWorkflow: hookFns.handleWorkflowTriggerScheduleRunInWorkflow,
handleWorkflowTriggerWebhookRunInWorkflow: hookFns.handleWorkflowTriggerWebhookRunInWorkflow,
handleWorkflowTriggerPluginRunInWorkflow: hookFns.handleWorkflowTriggerPluginRunInWorkflow,
handleWorkflowRunAllTriggersInWorkflow: hookFns.handleWorkflowRunAllTriggersInWorkflow,
availableNodesMetaData: { nodes: [{ id: 'start' }], nodesMap: { start: { id: 'start' } } },
getWorkflowRunAndTraceUrl: hookFns.getWorkflowRunAndTraceUrl,
exportCheck: hookFns.exportCheck,
handleExportDSL: hookFns.handleExportDSL,
fetchInspectVars: hookFns.fetchInspectVars,
configsMap: { flowId: 'app-1', flowType: 'app-flow', fileSettings: { enabled: true } },
})
})
})

View File

@@ -1,214 +0,0 @@
import type { ReactNode } from 'react'
import { render, screen } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import * as React from 'react'
import WorkflowPanel from '../workflow-panel'
type AppStoreState = {
appDetail?: {
id?: string
workflow?: {
id?: string
}
}
currentLogItem?: { id: string }
setCurrentLogItem: (item?: { id: string }) => void
showMessageLogModal: boolean
setShowMessageLogModal: (show: boolean) => void
currentLogModalActiveTab?: string
}
type WorkflowStoreState = {
historyWorkflowData?: Record<string, unknown>
showDebugAndPreviewPanel: boolean
showChatVariablePanel: boolean
showGlobalVariablePanel: boolean
}
const mockUseIsChatMode = vi.fn()
const mockSetCurrentLogItem = vi.fn()
const mockSetShowMessageLogModal = vi.fn()
let appStoreState: AppStoreState
let workflowStoreState: WorkflowStoreState
vi.mock('@/app/components/app/store', () => ({
useStore: <T,>(selector: (state: AppStoreState) => T) => selector(appStoreState),
}))
vi.mock('@/app/components/workflow/store', () => ({
useStore: <T,>(selector: (state: WorkflowStoreState) => T) => selector(workflowStoreState),
}))
vi.mock('@/app/components/workflow/panel', () => ({
default: ({
components,
versionHistoryPanelProps,
}: {
components?: {
left?: ReactNode
right?: ReactNode
}
versionHistoryPanelProps?: {
getVersionListUrl: string
deleteVersionUrl: (versionId: string) => string
restoreVersionUrl: (versionId: string) => string
updateVersionUrl: (versionId: string) => string
latestVersionId?: string
}
}) => (
<div
data-testid="panel"
data-version-list-url={versionHistoryPanelProps?.getVersionListUrl ?? ''}
data-delete-version-url={versionHistoryPanelProps?.deleteVersionUrl('version-1') ?? ''}
data-restore-version-url={versionHistoryPanelProps?.restoreVersionUrl('version-1') ?? ''}
data-update-version-url={versionHistoryPanelProps?.updateVersionUrl('version-1') ?? ''}
data-latest-version-id={versionHistoryPanelProps?.latestVersionId ?? ''}
>
<div data-testid="panel-left">{components?.left}</div>
<div data-testid="panel-right">{components?.right}</div>
</div>
),
}))
vi.mock('@/next/dynamic', () => ({
default: (loader: () => Promise<{ default: React.ComponentType<Record<string, unknown>> }>) => {
const LazyComp = React.lazy(loader)
return function DynamicWrapper(props: Record<string, unknown>) {
return React.createElement(
React.Suspense,
{ fallback: null },
React.createElement(LazyComp, props),
)
}
},
}))
vi.mock('@/app/components/base/message-log-modal', () => ({
default: ({
currentLogItem,
defaultTab,
onCancel,
}: {
currentLogItem?: { id: string }
defaultTab?: string
onCancel: () => void
}) => (
<div data-testid="message-log-modal" data-current-log-id={currentLogItem?.id ?? ''} data-default-tab={defaultTab ?? ''}>
<button type="button" onClick={onCancel}>close-message-log</button>
</div>
),
}))
vi.mock('@/app/components/workflow/panel/record', () => ({
default: () => <div data-testid="record-panel">record</div>,
}))
vi.mock('@/app/components/workflow/panel/chat-record', () => ({
default: () => <div data-testid="chat-record-panel">chat-record</div>,
}))
vi.mock('@/app/components/workflow/panel/debug-and-preview', () => ({
default: () => <div data-testid="debug-and-preview-panel">debug</div>,
}))
vi.mock('@/app/components/workflow/panel/workflow-preview', () => ({
default: () => <div data-testid="workflow-preview-panel">preview</div>,
}))
vi.mock('@/app/components/workflow/panel/chat-variable-panel', () => ({
default: () => <div data-testid="chat-variable-panel">chat-variable</div>,
}))
vi.mock('@/app/components/workflow/panel/global-variable-panel', () => ({
default: () => <div data-testid="global-variable-panel">global-variable</div>,
}))
vi.mock('@/app/components/workflow-app/hooks', () => ({
useIsChatMode: () => mockUseIsChatMode(),
}))
describe('WorkflowPanel', () => {
beforeEach(() => {
vi.clearAllMocks()
appStoreState = {
appDetail: {
id: 'app-123',
workflow: {
id: 'workflow-version-id',
},
},
currentLogItem: { id: 'log-1' },
setCurrentLogItem: mockSetCurrentLogItem,
showMessageLogModal: false,
setShowMessageLogModal: mockSetShowMessageLogModal,
currentLogModalActiveTab: 'detail',
}
workflowStoreState = {
historyWorkflowData: undefined,
showDebugAndPreviewPanel: false,
showChatVariablePanel: false,
showGlobalVariablePanel: false,
}
mockUseIsChatMode.mockReturnValue(false)
})
it('should configure workflow version history urls and latest version id for the panel shell', async () => {
render(<WorkflowPanel />)
const panel = await screen.findByTestId('panel')
expect(panel).toHaveAttribute('data-version-list-url', '/apps/app-123/workflows')
expect(panel).toHaveAttribute('data-delete-version-url', '/apps/app-123/workflows/version-1')
expect(panel).toHaveAttribute('data-restore-version-url', '/apps/app-123/workflows/version-1/restore')
expect(panel).toHaveAttribute('data-update-version-url', '/apps/app-123/workflows/version-1')
expect(panel).toHaveAttribute('data-latest-version-id', 'workflow-version-id')
})
it('should render and close the message log modal from the left panel slot', async () => {
const user = userEvent.setup()
appStoreState = {
...appStoreState,
showMessageLogModal: true,
}
render(<WorkflowPanel />)
expect(await screen.findByTestId('message-log-modal')).toHaveAttribute('data-current-log-id', 'log-1')
expect(screen.getByTestId('message-log-modal')).toHaveAttribute('data-default-tab', 'detail')
await user.click(screen.getByRole('button', { name: /close-message-log/i }))
expect(mockSetCurrentLogItem).toHaveBeenCalledWith()
expect(mockSetShowMessageLogModal).toHaveBeenCalledWith(false)
})
it('should switch right-side workflow panels based on chat mode and workflow state', async () => {
workflowStoreState = {
historyWorkflowData: { id: 'history-1' },
showDebugAndPreviewPanel: true,
showChatVariablePanel: true,
showGlobalVariablePanel: true,
}
mockUseIsChatMode.mockReturnValue(true)
const { unmount } = render(<WorkflowPanel />)
expect(await screen.findByTestId('chat-record-panel')).toBeInTheDocument()
expect(screen.getByTestId('debug-and-preview-panel')).toBeInTheDocument()
expect(screen.getByTestId('chat-variable-panel')).toBeInTheDocument()
expect(screen.getByTestId('global-variable-panel')).toBeInTheDocument()
expect(screen.queryByTestId('record-panel')).not.toBeInTheDocument()
expect(screen.queryByTestId('workflow-preview-panel')).not.toBeInTheDocument()
unmount()
mockUseIsChatMode.mockReturnValue(false)
render(<WorkflowPanel />)
expect(await screen.findByTestId('record-panel')).toBeInTheDocument()
expect(screen.getByTestId('workflow-preview-panel')).toBeInTheDocument()
expect(screen.getByTestId('global-variable-panel')).toBeInTheDocument()
expect(screen.queryByTestId('chat-record-panel')).not.toBeInTheDocument()
expect(screen.queryByTestId('debug-and-preview-panel')).not.toBeInTheDocument()
expect(screen.queryByTestId('chat-variable-panel')).not.toBeInTheDocument()
})
})

View File

@@ -149,7 +149,6 @@ const createProviderContext = ({
const renderWithToast = (ui: ReactElement) => {
return render(
// eslint-disable-next-line react/no-context-provider
<ToastContext.Provider value={{ notify: mockNotify, close: vi.fn() }}>
{ui}
</ToastContext.Provider>,
@@ -446,27 +445,6 @@ describe('FeaturesTrigger', () => {
})
})
it('should skip success side effects when publish mutation returns no workflow version', async () => {
// Arrange
const user = userEvent.setup()
mockPublishWorkflow.mockResolvedValue(null)
renderWithToast(<FeaturesTrigger />)
// Act
await user.click(screen.getByRole('button', { name: 'publisher-publish' }))
// Assert
await waitFor(() => {
expect(mockPublishWorkflow).toHaveBeenCalled()
})
expect(mockNotify).not.toHaveBeenCalledWith({ type: 'success', message: 'common.api.actionSuccess' })
expect(mockUpdatePublishedWorkflow).not.toHaveBeenCalled()
expect(mockInvalidateAppTriggers).not.toHaveBeenCalled()
expect(mockSetPublishedAt).not.toHaveBeenCalled()
expect(mockSetLastPublishedHasUserInput).not.toHaveBeenCalled()
expect(mockResetWorkflowVersionHistory).not.toHaveBeenCalled()
})
it('should log error when app detail refresh fails after publish', async () => {
// Arrange
const user = userEvent.setup()

View File

@@ -1,18 +0,0 @@
import * as hooks from '../index'
describe('workflow-app hooks index', () => {
it('should re-export workflow-app hooks', () => {
expect(hooks.useAvailableNodesMetaData).toBeTypeOf('function')
expect(hooks.useConfigsMap).toBeTypeOf('function')
expect(hooks.useDSL).toBeTypeOf('function')
expect(hooks.useGetRunAndTraceUrl).toBeTypeOf('function')
expect(hooks.useInspectVarsCrud).toBeTypeOf('function')
expect(hooks.useIsChatMode).toBeTypeOf('function')
expect(hooks.useNodesSyncDraft).toBeTypeOf('function')
expect(hooks.useWorkflowInit).toBeTypeOf('function')
expect(hooks.useWorkflowRefreshDraft).toBeTypeOf('function')
expect(hooks.useWorkflowRun).toBeTypeOf('function')
expect(hooks.useWorkflowStartRun).toBeTypeOf('function')
expect(hooks.useWorkflowTemplate).toBeTypeOf('function')
})
})

View File

@@ -1,206 +0,0 @@
import { act, renderHook, waitFor } from '@testing-library/react'
import { DSL_EXPORT_CHECK } from '@/app/components/workflow/constants'
import { useDSL } from '../use-DSL'
const mockNotify = vi.fn()
const mockEmit = vi.fn()
const mockDoSyncWorkflowDraft = vi.fn()
const mockExportAppConfig = vi.fn()
const mockFetchWorkflowDraft = vi.fn()
const mockDownloadBlob = vi.fn()
let appStoreState: {
appDetail?: {
id: string
name: string
}
}
vi.mock('@/app/components/base/toast/context', () => ({
useToastContext: () => ({ notify: mockNotify }),
}))
vi.mock('@/context/event-emitter', () => ({
useEventEmitterContextContext: () => ({
eventEmitter: {
emit: mockEmit,
},
}),
}))
vi.mock('@/app/components/app/store', () => ({
useStore: <T>(selector: (state: typeof appStoreState) => T) => selector(appStoreState),
}))
vi.mock('../use-nodes-sync-draft', () => ({
useNodesSyncDraft: () => ({
doSyncWorkflowDraft: mockDoSyncWorkflowDraft,
}),
}))
vi.mock('@/service/apps', () => ({
exportAppConfig: (...args: unknown[]) => mockExportAppConfig(...args),
}))
vi.mock('@/service/workflow', () => ({
fetchWorkflowDraft: (...args: unknown[]) => mockFetchWorkflowDraft(...args),
}))
vi.mock('@/utils/download', () => ({
downloadBlob: (...args: unknown[]) => mockDownloadBlob(...args),
}))
const createDeferred = <T>() => {
let resolve!: (value: T) => void
const promise = new Promise<T>((res) => {
resolve = res
})
return { promise, resolve }
}
describe('useDSL', () => {
beforeEach(() => {
vi.clearAllMocks()
appStoreState = {
appDetail: {
id: 'app-1',
name: 'Workflow App',
},
}
mockDoSyncWorkflowDraft.mockResolvedValue(undefined)
mockExportAppConfig.mockResolvedValue({ data: 'yaml-content' })
mockFetchWorkflowDraft.mockResolvedValue({ environment_variables: [] })
})
it('should export workflow dsl and download the yaml blob when no secret env is present', async () => {
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.exportCheck()
})
expect(mockFetchWorkflowDraft).toHaveBeenCalledWith('/apps/app-1/workflows/draft')
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
expect(mockExportAppConfig).toHaveBeenCalledWith({
appID: 'app-1',
include: false,
workflowID: undefined,
})
expect(mockDownloadBlob).toHaveBeenCalledWith(expect.objectContaining({
data: expect.any(Blob),
fileName: 'Workflow App.yml',
}))
})
it('should forward include and workflow id arguments when exporting dsl directly', async () => {
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.handleExportDSL(true, 'workflow-1')
})
expect(mockExportAppConfig).toHaveBeenCalledWith({
appID: 'app-1',
include: true,
workflowID: 'workflow-1',
})
})
it('should emit DSL_EXPORT_CHECK when secret environment variables exist', async () => {
const secretVars = [{ id: 'env-1', value_type: 'secret', value: 'secret-token' }]
mockFetchWorkflowDraft.mockResolvedValue({ environment_variables: secretVars })
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.exportCheck()
})
expect(mockEmit).toHaveBeenCalledWith({
type: DSL_EXPORT_CHECK,
payload: {
data: secretVars,
},
})
expect(mockExportAppConfig).not.toHaveBeenCalled()
})
it('should return early when app detail is unavailable', async () => {
appStoreState = {}
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.exportCheck()
await result.current.handleExportDSL()
})
expect(mockFetchWorkflowDraft).not.toHaveBeenCalled()
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockExportAppConfig).not.toHaveBeenCalled()
expect(mockEmit).not.toHaveBeenCalled()
})
it('should notify when export fails', async () => {
mockExportAppConfig.mockRejectedValue(new Error('export failed'))
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.handleExportDSL()
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith({
type: 'error',
message: 'app.exportFailed',
})
})
})
it('should notify when exportCheck cannot load the workflow draft', async () => {
mockFetchWorkflowDraft.mockRejectedValue(new Error('draft fetch failed'))
const { result } = renderHook(() => useDSL())
await act(async () => {
await result.current.exportCheck()
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith({
type: 'error',
message: 'app.exportFailed',
})
})
expect(mockExportAppConfig).not.toHaveBeenCalled()
})
it('should ignore repeated export attempts while an export is already in progress', async () => {
const deferred = createDeferred<{ data: string }>()
mockExportAppConfig.mockReturnValue(deferred.promise)
const { result } = renderHook(() => useDSL())
let firstExportPromise!: Promise<void>
act(() => {
firstExportPromise = result.current.handleExportDSL()
})
await waitFor(() => {
expect(mockDoSyncWorkflowDraft).toHaveBeenCalledTimes(1)
expect(mockExportAppConfig).toHaveBeenCalledTimes(1)
})
act(() => {
void result.current.handleExportDSL()
})
expect(mockExportAppConfig).toHaveBeenCalledTimes(1)
await act(async () => {
deferred.resolve({ data: 'yaml-content' })
await firstExportPromise
})
})
})

View File

@@ -1,118 +0,0 @@
import { act, renderHook } from '@testing-library/react'
import { useAutoOnboarding } from '../use-auto-onboarding'
const mockGetNodes = vi.fn()
const mockWorkflowStore = {
getState: vi.fn(),
}
const mockSetShowOnboarding = vi.fn()
const mockSetHasShownOnboarding = vi.fn()
const mockSetShouldAutoOpenStartNodeSelector = vi.fn()
const mockSetHasSelectedStartNode = vi.fn()
vi.mock('reactflow', () => ({
useStoreApi: () => ({
getState: () => ({
getNodes: mockGetNodes,
}),
}),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => mockWorkflowStore,
}))
describe('useAutoOnboarding', () => {
beforeEach(() => {
vi.clearAllMocks()
vi.useFakeTimers()
mockGetNodes.mockReturnValue([])
mockWorkflowStore.getState.mockReturnValue({
showOnboarding: false,
hasShownOnboarding: false,
notInitialWorkflow: false,
setShowOnboarding: mockSetShowOnboarding,
setHasShownOnboarding: mockSetHasShownOnboarding,
setShouldAutoOpenStartNodeSelector: mockSetShouldAutoOpenStartNodeSelector,
hasSelectedStartNode: false,
setHasSelectedStartNode: mockSetHasSelectedStartNode,
})
})
afterEach(() => {
vi.useRealTimers()
})
it('should open onboarding after the delayed empty-canvas check on mount', () => {
renderHook(() => useAutoOnboarding())
act(() => {
vi.advanceTimersByTime(500)
})
expect(mockSetShowOnboarding).toHaveBeenCalledWith(true)
expect(mockSetHasShownOnboarding).toHaveBeenCalledWith(true)
expect(mockSetShouldAutoOpenStartNodeSelector).toHaveBeenCalledWith(true)
})
it('should skip auto onboarding when it is already visible or the workflow is not initial', () => {
mockWorkflowStore.getState.mockReturnValue({
showOnboarding: true,
hasShownOnboarding: false,
notInitialWorkflow: true,
setShowOnboarding: mockSetShowOnboarding,
setHasShownOnboarding: mockSetHasShownOnboarding,
setShouldAutoOpenStartNodeSelector: mockSetShouldAutoOpenStartNodeSelector,
hasSelectedStartNode: false,
setHasSelectedStartNode: mockSetHasSelectedStartNode,
})
renderHook(() => useAutoOnboarding())
act(() => {
vi.advanceTimersByTime(500)
})
expect(mockSetShowOnboarding).not.toHaveBeenCalled()
expect(mockSetHasShownOnboarding).not.toHaveBeenCalled()
expect(mockSetShouldAutoOpenStartNodeSelector).not.toHaveBeenCalled()
})
it('should close onboarding and reset selected start node state when one was chosen', () => {
mockWorkflowStore.getState.mockReturnValue({
showOnboarding: false,
hasShownOnboarding: true,
notInitialWorkflow: false,
setShowOnboarding: mockSetShowOnboarding,
setHasShownOnboarding: mockSetHasShownOnboarding,
setShouldAutoOpenStartNodeSelector: mockSetShouldAutoOpenStartNodeSelector,
hasSelectedStartNode: true,
setHasSelectedStartNode: mockSetHasSelectedStartNode,
})
const { result } = renderHook(() => useAutoOnboarding())
act(() => {
result.current.handleOnboardingClose()
})
expect(mockSetShowOnboarding).toHaveBeenCalledWith(false)
expect(mockSetHasShownOnboarding).toHaveBeenCalledWith(true)
expect(mockSetHasSelectedStartNode).toHaveBeenCalledWith(false)
expect(mockSetShouldAutoOpenStartNodeSelector).not.toHaveBeenCalled()
})
it('should close onboarding and disable auto-open when no start node was selected', () => {
const { result } = renderHook(() => useAutoOnboarding())
act(() => {
result.current.handleOnboardingClose()
})
expect(mockSetShowOnboarding).toHaveBeenCalledWith(false)
expect(mockSetHasShownOnboarding).toHaveBeenCalledWith(true)
expect(mockSetShouldAutoOpenStartNodeSelector).toHaveBeenCalledWith(false)
expect(mockSetHasSelectedStartNode).not.toHaveBeenCalled()
})
})

View File

@@ -1,49 +0,0 @@
import { renderHook } from '@testing-library/react'
import { BlockEnum } from '@/app/components/workflow/types'
import { useAvailableNodesMetaData } from '../use-available-nodes-meta-data'
const mockUseIsChatMode = vi.fn()
vi.mock('@/app/components/workflow-app/hooks/use-is-chat-mode', () => ({
useIsChatMode: () => mockUseIsChatMode(),
}))
vi.mock('@/context/i18n', () => ({
useDocLink: () => (path: string) => `/docs${path}`,
}))
describe('useAvailableNodesMetaData', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('should include chat-specific nodes and make the start node undeletable in chat mode', () => {
mockUseIsChatMode.mockReturnValue(true)
const { result } = renderHook(() => useAvailableNodesMetaData())
expect(result.current.nodesMap?.[BlockEnum.Start]?.metaData.isUndeletable).toBe(true)
expect(result.current.nodesMap?.[BlockEnum.Answer]).toBeDefined()
expect(result.current.nodesMap?.[BlockEnum.End]).toBeUndefined()
expect(result.current.nodesMap?.[BlockEnum.TriggerWebhook]).toBeUndefined()
expect(result.current.nodesMap?.[BlockEnum.VariableAssigner]).toBe(result.current.nodesMap?.[BlockEnum.VariableAggregator])
expect(result.current.nodesMap?.[BlockEnum.Start]?.metaData.helpLinkUri).toContain('/docs/use-dify/nodes/')
})
it('should include workflow-specific trigger and end nodes outside chat mode', () => {
mockUseIsChatMode.mockReturnValue(false)
const { result } = renderHook(() => useAvailableNodesMetaData())
expect(result.current.nodesMap?.[BlockEnum.Start]?.metaData.isUndeletable).toBe(false)
expect(result.current.nodesMap?.[BlockEnum.End]).toBeDefined()
expect(result.current.nodesMap?.[BlockEnum.TriggerWebhook]).toBeDefined()
expect(result.current.nodesMap?.[BlockEnum.TriggerSchedule]).toBeDefined()
expect(result.current.nodesMap?.[BlockEnum.TriggerPlugin]).toBeDefined()
expect(result.current.nodesMap?.[BlockEnum.Answer]).toBeUndefined()
expect(result.current.nodesMap?.[BlockEnum.Start]?.defaultValue).toMatchObject({
type: BlockEnum.Start,
title: 'workflow.blocks.start',
})
})
})

View File

@@ -1,40 +0,0 @@
import { renderHook } from '@testing-library/react'
import { FlowType } from '@/types/common'
import { useConfigsMap } from '../use-configs-map'
const mockUseFeatures = vi.fn()
vi.mock('@/app/components/base/features/hooks', () => ({
useFeatures: (selector: (state: { features: { file: Record<string, unknown> } }) => unknown) => mockUseFeatures(selector),
}))
vi.mock('@/app/components/workflow/store', () => ({
useStore: <T>(selector: (state: { appId: string }) => T) => selector({ appId: 'app-1' }),
}))
describe('useConfigsMap', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUseFeatures.mockImplementation((selector: (state: { features: { file: Record<string, unknown> } }) => unknown) => selector({
features: {
file: {
enabled: true,
number_limits: 3,
},
},
}))
})
it('should map workflow app id and feature file settings into inspect-var configs', () => {
const { result } = renderHook(() => useConfigsMap())
expect(result.current).toEqual({
flowId: 'app-1',
flowType: FlowType.appFlow,
fileSettings: {
enabled: true,
number_limits: 3,
},
})
})
})

View File

@@ -1,28 +0,0 @@
import { renderHook } from '@testing-library/react'
import { useGetRunAndTraceUrl } from '../use-get-run-and-trace-url'
const mockWorkflowStore = {
getState: vi.fn(),
}
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => mockWorkflowStore,
}))
describe('useGetRunAndTraceUrl', () => {
beforeEach(() => {
vi.clearAllMocks()
mockWorkflowStore.getState.mockReturnValue({
appId: 'app-123',
})
})
it('should build workflow run and trace urls from the current app id', () => {
const { result } = renderHook(() => useGetRunAndTraceUrl())
expect(result.current.getWorkflowRunAndTraceUrl('run-1')).toEqual({
runUrl: '/apps/app-123/workflow-runs/run-1',
traceUrl: '/apps/app-123/workflow-runs/run-1/node-executions',
})
})
})

View File

@@ -1,44 +0,0 @@
import { renderHook } from '@testing-library/react'
import { useInspectVarsCrud } from '../use-inspect-vars-crud'
const mockUseInspectVarsCrudCommon = vi.fn()
const mockUseConfigsMap = vi.fn()
vi.mock('@/app/components/workflow/hooks/use-inspect-vars-crud-common', () => ({
useInspectVarsCrudCommon: (...args: unknown[]) => mockUseInspectVarsCrudCommon(...args),
}))
vi.mock('@/app/components/workflow-app/hooks/use-configs-map', () => ({
useConfigsMap: () => mockUseConfigsMap(),
}))
describe('useInspectVarsCrud', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUseConfigsMap.mockReturnValue({
flowId: 'app-1',
flowType: 'app-flow',
fileSettings: { enabled: true },
})
mockUseInspectVarsCrudCommon.mockReturnValue({
fetchInspectVarValue: vi.fn(),
editInspectVarValue: vi.fn(),
deleteInspectVar: vi.fn(),
})
})
it('should call the shared inspect vars hook with workflow-app configs and return its api', () => {
const { result } = renderHook(() => useInspectVarsCrud())
expect(mockUseInspectVarsCrudCommon).toHaveBeenCalledWith({
flowId: 'app-1',
flowType: 'app-flow',
fileSettings: { enabled: true },
})
expect(result.current).toEqual({
fetchInspectVarValue: expect.any(Function),
editInspectVarValue: expect.any(Function),
deleteInspectVar: expect.any(Function),
})
})
})

View File

@@ -4,57 +4,42 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
import { useNodesSyncDraft } from '../use-nodes-sync-draft'
const mockGetNodes = vi.fn()
const mockPostWithKeepalive = vi.fn()
const mockSetSyncWorkflowDraftHash = vi.fn()
const mockSetDraftUpdatedAt = vi.fn()
const mockGetNodesReadOnly = vi.fn()
let reactFlowState: {
getNodes: typeof mockGetNodes
edges: Array<Record<string, unknown>>
transform: [number, number, number]
}
let workflowStoreState: {
appId: string
isWorkflowDataLoaded: boolean
syncWorkflowDraftHash: string | null
environmentVariables: Array<Record<string, unknown>>
conversationVariables: Array<Record<string, unknown>>
setSyncWorkflowDraftHash: typeof mockSetSyncWorkflowDraftHash
setDraftUpdatedAt: typeof mockSetDraftUpdatedAt
}
let featuresState: {
features: {
opening: { enabled: boolean, opening_statement: string, suggested_questions: string[] }
suggested: Record<string, unknown>
text2speech: Record<string, unknown>
speech2text: Record<string, unknown>
citation: Record<string, unknown>
moderation: Record<string, unknown>
file: Record<string, unknown>
}
}
vi.mock('reactflow', () => ({
useStoreApi: () => ({ getState: () => reactFlowState }),
useStoreApi: () => ({ getState: () => ({ getNodes: mockGetNodes, edges: [], transform: [0, 0, 1] }) }),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => workflowStoreState,
getState: () => ({
appId: 'app-1',
isWorkflowDataLoaded: true,
syncWorkflowDraftHash: 'hash-123',
environmentVariables: [],
conversationVariables: [],
setSyncWorkflowDraftHash: vi.fn(),
setDraftUpdatedAt: vi.fn(),
}),
}),
}))
vi.mock('@/app/components/base/features/hooks', () => ({
useFeaturesStore: () => ({
getState: () => featuresState,
getState: () => ({
features: {
opening: { enabled: false, opening_statement: '', suggested_questions: [] },
suggested: {},
text2speech: {},
speech2text: {},
citation: {},
moderation: {},
file: {},
},
}),
}),
}))
vi.mock('@/app/components/workflow/hooks/use-workflow', () => ({
useNodesReadOnly: () => ({ getNodesReadOnly: mockGetNodesReadOnly }),
useNodesReadOnly: () => ({ getNodesReadOnly: () => false }),
}))
vi.mock('@/app/components/workflow/hooks/use-serial-async-callback', () => ({
@@ -70,7 +55,7 @@ vi.mock('@/service/workflow', () => ({
syncWorkflowDraft: (p: unknown) => mockSyncWorkflowDraft(p),
}))
vi.mock('@/service/fetch', () => ({ postWithKeepalive: (...args: unknown[]) => mockPostWithKeepalive(...args) }))
vi.mock('@/service/fetch', () => ({ postWithKeepalive: vi.fn() }))
vi.mock('@/config', () => ({ API_PREFIX: '/api' }))
const mockHandleRefreshWorkflowDraft = vi.fn()
@@ -81,32 +66,6 @@ vi.mock('@/app/components/workflow-app/hooks', () => ({
describe('useNodesSyncDraft — handleRefreshWorkflowDraft(true) on 409', () => {
beforeEach(() => {
vi.clearAllMocks()
reactFlowState = {
getNodes: mockGetNodes,
edges: [],
transform: [0, 0, 1],
}
workflowStoreState = {
appId: 'app-1',
isWorkflowDataLoaded: true,
syncWorkflowDraftHash: 'hash-123',
environmentVariables: [],
conversationVariables: [],
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setDraftUpdatedAt: mockSetDraftUpdatedAt,
}
featuresState = {
features: {
opening: { enabled: false, opening_statement: '', suggested_questions: [] },
suggested: {},
text2speech: {},
speech2text: {},
citation: {},
moderation: {},
file: {},
},
}
mockGetNodesReadOnly.mockReturnValue(false)
mockGetNodes.mockReturnValue([{ id: 'n1', position: { x: 0, y: 0 }, data: { type: 'start' } }])
mockSyncWorkflowDraft.mockResolvedValue({ hash: 'new', updated_at: 1 })
})
@@ -163,102 +122,4 @@ describe('useNodesSyncDraft — handleRefreshWorkflowDraft(true) on 409', () =>
}),
}))
})
it('should strip temp entities and private data, use the latest hash, and invoke success callbacks', async () => {
reactFlowState = {
...reactFlowState,
edges: [
{ id: 'edge-1', source: 'n1', target: 'n2', data: { _isTemp: false, _private: 'drop', stable: 'keep' } },
{ id: 'temp-edge', source: 'n2', target: 'n3', data: { _isTemp: true } },
],
transform: [10, 20, 1.5],
}
mockGetNodes.mockReturnValue([
{ id: 'n1', position: { x: 0, y: 0 }, data: { type: 'start', _tempField: 'drop', label: 'Start' } },
{ id: 'temp-node', position: { x: 1, y: 1 }, data: { type: 'answer', _isTempNode: true } },
])
workflowStoreState = {
...workflowStoreState,
syncWorkflowDraftHash: 'latest-hash',
environmentVariables: [{ id: 'env-1', value: 'env' }],
conversationVariables: [{ id: 'conversation-1', value: 'conversation' }],
}
featuresState = {
features: {
opening: { enabled: true, opening_statement: 'Hello', suggested_questions: ['Q1'] },
suggested: { enabled: true },
text2speech: { enabled: true },
speech2text: { enabled: true },
citation: { enabled: true },
moderation: { enabled: false },
file: { enabled: true },
},
}
const callbacks = {
onSuccess: vi.fn(),
onError: vi.fn(),
onSettled: vi.fn(),
}
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, callbacks)
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith({
url: '/apps/app-1/workflows/draft',
params: {
graph: {
nodes: [{ id: 'n1', position: { x: 0, y: 0 }, data: { type: 'start', label: 'Start' } }],
edges: [{ id: 'edge-1', source: 'n1', target: 'n2', data: { stable: 'keep' } }],
viewport: { x: 10, y: 20, zoom: 1.5 },
},
features: {
opening_statement: 'Hello',
suggested_questions: ['Q1'],
suggested_questions_after_answer: { enabled: true },
text_to_speech: { enabled: true },
speech_to_text: { enabled: true },
retriever_resource: { enabled: true },
sensitive_word_avoidance: { enabled: false },
file_upload: { enabled: true },
},
environment_variables: [{ id: 'env-1', value: 'env' }],
conversation_variables: [{ id: 'conversation-1', value: 'conversation' }],
hash: 'latest-hash',
},
})
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new')
expect(mockSetDraftUpdatedAt).toHaveBeenCalledWith(1)
expect(callbacks.onSuccess).toHaveBeenCalled()
expect(callbacks.onError).not.toHaveBeenCalled()
expect(callbacks.onSettled).toHaveBeenCalled()
})
it('should post workflow draft with keepalive when the page closes', () => {
reactFlowState = {
...reactFlowState,
transform: [1, 2, 3],
}
workflowStoreState = {
...workflowStoreState,
environmentVariables: [{ id: 'env-1' }],
conversationVariables: [{ id: 'conversation-1' }],
}
const { result } = renderHook(() => useNodesSyncDraft())
act(() => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockPostWithKeepalive).toHaveBeenCalledWith('/api/apps/app-1/workflows/draft', expect.objectContaining({
graph: expect.objectContaining({
viewport: { x: 1, y: 2, zoom: 3 },
}),
hash: 'hash-123',
}))
})
})

View File

@@ -1,6 +1,5 @@
import { renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { BlockEnum } from '@/app/components/workflow/types'
import { useWorkflowInit } from '../use-workflow-init'
@@ -12,21 +11,6 @@ const mockSetLastPublishedHasUserInput = vi.fn()
const mockSetFileUploadConfig = vi.fn()
const mockWorkflowStoreSetState = vi.fn()
const mockWorkflowStoreGetState = vi.fn()
const mockFetchNodesDefaultConfigs = vi.fn()
const mockFetchPublishedWorkflow = vi.fn()
let appStoreState: {
appDetail: {
id: string
name: string
mode: string
}
}
let workflowConfigState: {
data: Record<string, unknown> | null
isLoading: boolean
}
vi.mock('@/app/components/workflow/store', () => ({
useStore: <T>(selector: (state: { setSyncWorkflowDraftHash: ReturnType<typeof vi.fn> }) => T): T =>
@@ -38,8 +22,8 @@ vi.mock('@/app/components/workflow/store', () => ({
}))
vi.mock('@/app/components/app/store', () => ({
useStore: <T>(selector: (state: typeof appStoreState) => T): T =>
selector(appStoreState),
useStore: <T>(selector: (state: { appDetail: { id: string, name: string, mode: string } }) => T): T =>
selector({ appDetail: { id: 'app-1', name: 'Test', mode: 'workflow' } }),
}))
vi.mock('../use-workflow-template', () => ({
@@ -47,11 +31,7 @@ vi.mock('../use-workflow-template', () => ({
}))
vi.mock('@/service/use-workflow', () => ({
useWorkflowConfig: (_url: string, onSuccess: (config: Record<string, unknown>) => void) => {
if (workflowConfigState.data)
onSuccess(workflowConfigState.data)
return workflowConfigState
},
useWorkflowConfig: () => ({ data: null, isLoading: false }),
}))
const mockFetchWorkflowDraft = vi.fn()
@@ -60,8 +40,8 @@ const mockSyncWorkflowDraft = vi.fn()
vi.mock('@/service/workflow', () => ({
fetchWorkflowDraft: (...args: unknown[]) => mockFetchWorkflowDraft(...args),
syncWorkflowDraft: (...args: unknown[]) => mockSyncWorkflowDraft(...args),
fetchNodesDefaultConfigs: (...args: unknown[]) => mockFetchNodesDefaultConfigs(...args),
fetchPublishedWorkflow: (...args: unknown[]) => mockFetchPublishedWorkflow(...args),
fetchNodesDefaultConfigs: () => Promise.resolve([]),
fetchPublishedWorkflow: () => Promise.resolve({ created_at: 0, graph: { nodes: [], edges: [] } }),
}))
const notExistError = () => ({
@@ -88,10 +68,6 @@ const draftResponse = {
describe('useWorkflowInit — hash fix (draft_workflow_not_exist)', () => {
beforeEach(() => {
vi.clearAllMocks()
appStoreState = {
appDetail: { id: 'app-1', name: 'Test', mode: 'workflow' },
}
workflowConfigState = { data: null, isLoading: false }
mockWorkflowStoreGetState.mockReturnValue({
setDraftUpdatedAt: mockSetDraftUpdatedAt,
setToolPublished: mockSetToolPublished,
@@ -99,8 +75,6 @@ describe('useWorkflowInit — hash fix (draft_workflow_not_exist)', () => {
setLastPublishedHasUserInput: mockSetLastPublishedHasUserInput,
setFileUploadConfig: mockSetFileUploadConfig,
})
mockFetchNodesDefaultConfigs.mockResolvedValue([])
mockFetchPublishedWorkflow.mockResolvedValue({ created_at: 0, graph: { nodes: [], edges: [] } })
mockFetchWorkflowDraft
.mockRejectedValueOnce(notExistError())
.mockResolvedValueOnce(draftResponse)
@@ -130,77 +104,4 @@ describe('useWorkflowInit — hash fix (draft_workflow_not_exist)', () => {
expect(order).toContain('hash:new-hash')
expect(order.indexOf('hash:new-hash')).toBeLessThan(order.indexOf('fetch:2'))
})
it('should hydrate draft state, preload defaults, and derive published workflow metadata on success', async () => {
workflowConfigState = {
data: { enabled: true, sizeLimit: 20 },
isLoading: false,
}
mockFetchWorkflowDraft.mockReset().mockResolvedValue({
...draftResponse,
updated_at: 9,
tool_published: true,
environment_variables: [
{ id: 'env-secret', value_type: 'secret', value: 'top-secret', name: 'SECRET' },
{ id: 'env-plain', value_type: 'text', value: 'visible', name: 'PLAIN' },
],
conversation_variables: [{ id: 'conversation-1' }],
})
mockFetchNodesDefaultConfigs.mockResolvedValue([
{ type: 'start', config: { title: 'Start Config' } },
{ type: 'start', config: { title: 'Ignored Duplicate' } },
])
mockFetchPublishedWorkflow.mockResolvedValue({
created_at: 99,
graph: {
nodes: [{ id: 'start', data: { type: BlockEnum.Start } }],
edges: [{ source: 'start', target: 'end' }],
},
})
const { result } = renderHook(() => useWorkflowInit())
await waitFor(() => {
expect(result.current.data?.hash).toBe('server-hash')
})
expect(mockWorkflowStoreSetState).toHaveBeenCalledWith({ appId: 'app-1', appName: 'Test' })
expect(mockWorkflowStoreSetState).toHaveBeenCalledWith(expect.objectContaining({
envSecrets: { 'env-secret': 'top-secret' },
environmentVariables: [
{ id: 'env-secret', value_type: 'secret', value: '[__HIDDEN__]', name: 'SECRET' },
{ id: 'env-plain', value_type: 'text', value: 'visible', name: 'PLAIN' },
],
conversationVariables: [{ id: 'conversation-1' }],
isWorkflowDataLoaded: true,
}))
expect(mockWorkflowStoreSetState).toHaveBeenCalledWith({
nodesDefaultConfigs: {
start: { title: 'Start Config' },
},
})
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('server-hash')
expect(mockSetDraftUpdatedAt).toHaveBeenCalledWith(9)
expect(mockSetToolPublished).toHaveBeenCalledWith(true)
expect(mockSetPublishedAt).toHaveBeenCalledWith(99)
expect(mockSetLastPublishedHasUserInput).toHaveBeenCalledWith(true)
expect(mockSetFileUploadConfig).toHaveBeenCalledWith({ enabled: true, sizeLimit: 20 })
expect(result.current.fileUploadConfigResponse).toEqual({ enabled: true, sizeLimit: 20 })
expect(result.current.isLoading).toBe(false)
})
it('should fall back to no published user input when preload requests fail', async () => {
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => undefined)
mockFetchWorkflowDraft.mockReset().mockResolvedValue(draftResponse)
mockFetchNodesDefaultConfigs.mockRejectedValue(new Error('preload failed'))
renderHook(() => useWorkflowInit())
await waitFor(() => {
expect(mockSetLastPublishedHasUserInput).toHaveBeenCalledWith(false)
})
expect(consoleErrorSpy).toHaveBeenCalled()
consoleErrorSpy.mockRestore()
})
})

View File

@@ -1,32 +1,24 @@
import { act, renderHook, waitFor } from '@testing-library/react'
import { act, renderHook } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { useWorkflowRefreshDraft } from '../use-workflow-refresh-draft'
const mockHandleUpdateWorkflowCanvas = vi.fn()
const mockSetSyncWorkflowDraftHash = vi.fn()
const mockSetIsSyncingWorkflowDraft = vi.fn()
const mockSetEnvironmentVariables = vi.fn()
const mockSetEnvSecrets = vi.fn()
const mockSetConversationVariables = vi.fn()
const mockSetIsWorkflowDataLoaded = vi.fn()
const mockCancel = vi.fn()
let workflowStoreState: {
appId: string
isWorkflowDataLoaded: boolean
debouncedSyncWorkflowDraft?: { cancel: () => void }
setSyncWorkflowDraftHash: typeof mockSetSyncWorkflowDraftHash
setIsSyncingWorkflowDraft: typeof mockSetIsSyncingWorkflowDraft
setEnvironmentVariables: typeof mockSetEnvironmentVariables
setEnvSecrets: typeof mockSetEnvSecrets
setConversationVariables: typeof mockSetConversationVariables
setIsWorkflowDataLoaded: typeof mockSetIsWorkflowDataLoaded
}
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => workflowStoreState,
getState: () => ({
appId: 'app-1',
isWorkflowDataLoaded: true,
debouncedSyncWorkflowDraft: undefined,
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setIsSyncingWorkflowDraft: vi.fn(),
setEnvironmentVariables: vi.fn(),
setEnvSecrets: vi.fn(),
setConversationVariables: vi.fn(),
setIsWorkflowDataLoaded: vi.fn(),
}),
}),
}))
@@ -49,17 +41,6 @@ const draftResponse = {
describe('useWorkflowRefreshDraft — notUpdateCanvas parameter', () => {
beforeEach(() => {
vi.clearAllMocks()
workflowStoreState = {
appId: 'app-1',
isWorkflowDataLoaded: true,
debouncedSyncWorkflowDraft: undefined,
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setIsSyncingWorkflowDraft: mockSetIsSyncingWorkflowDraft,
setEnvironmentVariables: mockSetEnvironmentVariables,
setEnvSecrets: mockSetEnvSecrets,
setConversationVariables: mockSetConversationVariables,
setIsWorkflowDataLoaded: mockSetIsWorkflowDataLoaded,
}
mockFetchWorkflowDraft.mockResolvedValue(draftResponse)
})
@@ -94,67 +75,6 @@ describe('useWorkflowRefreshDraft — notUpdateCanvas parameter', () => {
await act(async () => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('server-hash')
})
})
it('should cancel pending draft sync, use fallback viewport, and persist masked secrets', async () => {
workflowStoreState = {
...workflowStoreState,
debouncedSyncWorkflowDraft: { cancel: mockCancel },
}
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'server-hash',
graph: {
nodes: [{ id: 'n1' }],
edges: [],
},
environment_variables: [
{ id: 'env-secret', value_type: 'secret', value: 'top-secret', name: 'SECRET' },
{ id: 'env-plain', value_type: 'text', value: 'visible', name: 'PLAIN' },
],
conversation_variables: [{ id: 'conversation-1' }],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockCancel).toHaveBeenCalled()
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'n1' }],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
})
expect(mockSetEnvSecrets).toHaveBeenCalledWith({
'env-secret': 'top-secret',
})
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([
{ id: 'env-secret', value_type: 'secret', value: '[__HIDDEN__]', name: 'SECRET' },
{ id: 'env-plain', value_type: 'text', value: 'visible', name: 'PLAIN' },
])
expect(mockSetConversationVariables).toHaveBeenCalledWith([{ id: 'conversation-1' }])
})
})
it('should restore loaded state when refresh fails after workflow data was already loaded', async () => {
mockFetchWorkflowDraft.mockRejectedValue(new Error('refresh failed'))
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsWorkflowDataLoaded).toHaveBeenNthCalledWith(1, false)
expect(mockSetIsWorkflowDataLoaded).toHaveBeenNthCalledWith(2, true)
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(true)
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenLastCalledWith(false)
})
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('server-hash')
})
})

View File

@@ -1,451 +0,0 @@
import type AudioPlayer from '@/app/components/base/audio-btn/audio'
import { createBaseWorkflowRunCallbacks, createFinalWorkflowRunCallbacks } from '../use-workflow-run-callbacks'
const {
mockSseGet,
mockResetMsgId,
} = vi.hoisted(() => ({
mockSseGet: vi.fn(),
mockResetMsgId: vi.fn(),
}))
vi.mock('@/service/base', () => ({
sseGet: mockSseGet,
}))
vi.mock('@/app/components/base/audio-btn/audio.player.manager', () => ({
AudioPlayerManager: {
getInstance: () => ({
resetMsgId: mockResetMsgId,
}),
},
}))
const createHandlers = () => ({
handleWorkflowStarted: vi.fn(),
handleWorkflowFinished: vi.fn(),
handleWorkflowFailed: vi.fn(),
handleWorkflowNodeStarted: vi.fn(),
handleWorkflowNodeFinished: vi.fn(),
handleWorkflowNodeHumanInputRequired: vi.fn(),
handleWorkflowNodeHumanInputFormFilled: vi.fn(),
handleWorkflowNodeHumanInputFormTimeout: vi.fn(),
handleWorkflowNodeIterationStarted: vi.fn(),
handleWorkflowNodeIterationNext: vi.fn(),
handleWorkflowNodeIterationFinished: vi.fn(),
handleWorkflowNodeLoopStarted: vi.fn(),
handleWorkflowNodeLoopNext: vi.fn(),
handleWorkflowNodeLoopFinished: vi.fn(),
handleWorkflowNodeRetry: vi.fn(),
handleWorkflowAgentLog: vi.fn(),
handleWorkflowTextChunk: vi.fn(),
handleWorkflowTextReplace: vi.fn(),
handleWorkflowPaused: vi.fn(),
})
const createUserCallbacks = () => ({
onWorkflowStarted: vi.fn(),
onWorkflowFinished: vi.fn(),
onNodeStarted: vi.fn(),
onNodeFinished: vi.fn(),
onIterationStart: vi.fn(),
onIterationNext: vi.fn(),
onIterationFinish: vi.fn(),
onLoopStart: vi.fn(),
onLoopNext: vi.fn(),
onLoopFinish: vi.fn(),
onNodeRetry: vi.fn(),
onAgentLog: vi.fn(),
onError: vi.fn(),
onWorkflowPaused: vi.fn(),
onHumanInputRequired: vi.fn(),
onHumanInputFormFilled: vi.fn(),
onHumanInputFormTimeout: vi.fn(),
onCompleted: vi.fn(),
})
describe('useWorkflowRun callbacks helpers', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('should create base callbacks that wrap workflow events, errors, pause continuation, and lazy tts playback', () => {
const handlers = createHandlers()
const clearAbortController = vi.fn()
const clearListeningState = vi.fn()
const invalidateRunHistory = vi.fn()
const fetchInspectVars = vi.fn()
const invalidAllLastRun = vi.fn()
const trackWorkflowRunFailed = vi.fn()
const userOnWorkflowFinished = vi.fn()
const userOnError = vi.fn()
const userOnWorkflowPaused = vi.fn()
const player = {
playAudioWithAudio: vi.fn(),
} as unknown as AudioPlayer
const getOrCreatePlayer = vi.fn<() => AudioPlayer | null>(() => player)
const callbacks = createBaseWorkflowRunCallbacks({
clientWidth: 320,
clientHeight: 240,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: true,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController,
clearListeningState,
trackWorkflowRunFailed,
handlers,
callbacks: {
onWorkflowFinished: userOnWorkflowFinished,
onError: userOnError,
onWorkflowPaused: userOnWorkflowPaused,
},
restCallback: {},
getOrCreatePlayer,
})
callbacks.onWorkflowFinished?.({ workflow_run_id: 'run-1' } as never)
expect(clearListeningState).toHaveBeenCalled()
expect(handlers.handleWorkflowFinished).toHaveBeenCalled()
expect(invalidateRunHistory).toHaveBeenCalledWith('/apps/app-1/workflow-runs')
expect(userOnWorkflowFinished).toHaveBeenCalled()
expect(fetchInspectVars).toHaveBeenCalledWith({})
expect(invalidAllLastRun).toHaveBeenCalled()
callbacks.onError?.({ error: 'failed', node_type: 'llm' } as never)
expect(clearAbortController).toHaveBeenCalled()
expect(handlers.handleWorkflowFailed).toHaveBeenCalled()
expect(userOnError).toHaveBeenCalled()
expect(trackWorkflowRunFailed).toHaveBeenCalledWith({ error: 'failed', node_type: 'llm' })
callbacks.onTTSChunk?.('message-1', 'audio-chunk')
expect(getOrCreatePlayer).toHaveBeenCalled()
expect(player.playAudioWithAudio).toHaveBeenCalledWith('audio-chunk', true)
expect(mockResetMsgId).toHaveBeenCalledWith('message-1')
callbacks.onWorkflowPaused?.({ workflow_run_id: 'run-2' } as never)
expect(handlers.handleWorkflowPaused).toHaveBeenCalled()
expect(userOnWorkflowPaused).toHaveBeenCalled()
expect(mockSseGet).toHaveBeenCalledWith('/workflow/run-2/events', {}, callbacks)
})
it('should create final callbacks that preserve rest callback override order and eager abort-controller wiring', () => {
const handlers = createHandlers()
const restOnNodeStarted = vi.fn()
const setAbortController = vi.fn()
const player = {
playAudioWithAudio: vi.fn(),
} as unknown as AudioPlayer
const baseSseOptions = createBaseWorkflowRunCallbacks({
clientWidth: 320,
clientHeight: 240,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: false,
fetchInspectVars: vi.fn(),
invalidAllLastRun: vi.fn(),
invalidateRunHistory: vi.fn(),
clearAbortController: vi.fn(),
clearListeningState: vi.fn(),
trackWorkflowRunFailed: vi.fn(),
handlers,
callbacks: {},
restCallback: {},
getOrCreatePlayer: vi.fn<() => AudioPlayer | null>(() => player),
})
const finalCallbacks = createFinalWorkflowRunCallbacks({
clientWidth: 320,
clientHeight: 240,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: false,
fetchInspectVars: vi.fn(),
invalidAllLastRun: vi.fn(),
invalidateRunHistory: vi.fn(),
clearAbortController: vi.fn(),
clearListeningState: vi.fn(),
trackWorkflowRunFailed: vi.fn(),
handlers,
callbacks: {},
restCallback: {
onNodeStarted: restOnNodeStarted,
},
baseSseOptions,
player,
setAbortController,
})
const controller = new AbortController()
finalCallbacks.getAbortController?.(controller)
expect(setAbortController).toHaveBeenCalledWith(controller)
finalCallbacks.onNodeStarted?.({ node_id: 'node-1' } as never)
expect(restOnNodeStarted).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeStarted).not.toHaveBeenCalled()
finalCallbacks.onTTSChunk?.('message-2', 'audio-chunk')
expect(player.playAudioWithAudio).toHaveBeenCalledWith('audio-chunk', true)
expect(mockResetMsgId).toHaveBeenCalledWith('message-2')
})
it('should route base workflow events through handlers, user callbacks, and pause continuation with the same callback object', async () => {
const handlers = createHandlers()
const userCallbacks = createUserCallbacks()
const clearAbortController = vi.fn()
const clearListeningState = vi.fn()
const invalidateRunHistory = vi.fn()
const fetchInspectVars = vi.fn()
const invalidAllLastRun = vi.fn()
const trackWorkflowRunFailed = vi.fn()
const player = {
playAudioWithAudio: vi.fn(),
} as unknown as AudioPlayer
const callbacks = createBaseWorkflowRunCallbacks({
clientWidth: 640,
clientHeight: 360,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: true,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController,
clearListeningState,
trackWorkflowRunFailed,
handlers,
callbacks: userCallbacks,
restCallback: {},
getOrCreatePlayer: vi.fn<() => AudioPlayer | null>(() => player),
})
callbacks.onWorkflowStarted?.({ workflow_run_id: 'run-1' } as never)
callbacks.onNodeStarted?.({ node_id: 'node-1' } as never)
callbacks.onNodeFinished?.({ node_id: 'node-1' } as never)
callbacks.onIterationStart?.({ node_id: 'node-1' } as never)
callbacks.onIterationNext?.({ node_id: 'node-1' } as never)
callbacks.onIterationFinish?.({ node_id: 'node-1' } as never)
callbacks.onLoopStart?.({ node_id: 'node-1' } as never)
callbacks.onLoopNext?.({ node_id: 'node-1' } as never)
callbacks.onLoopFinish?.({ node_id: 'node-1' } as never)
callbacks.onNodeRetry?.({ node_id: 'node-1' } as never)
callbacks.onAgentLog?.({ node_id: 'node-1' } as never)
callbacks.onTextChunk?.({ data: 'chunk' } as never)
callbacks.onTextReplace?.({ text: 'replacement' } as never)
callbacks.onHumanInputRequired?.({ node_id: 'node-1' } as never)
callbacks.onHumanInputFormFilled?.({ node_id: 'node-1' } as never)
callbacks.onHumanInputFormTimeout?.({ node_id: 'node-1' } as never)
callbacks.onWorkflowFinished?.({ workflow_run_id: 'run-1' } as never)
await callbacks.onCompleted?.(false, '')
callbacks.onTTSChunk?.('message-1', 'audio-chunk')
callbacks.onTTSEnd?.('message-1', 'audio-finished')
callbacks.onWorkflowPaused?.({ workflow_run_id: 'run-2' } as never)
callbacks.onError?.({ error: 'failed', node_type: 'llm' } as never, '500')
expect(handlers.handleWorkflowStarted).toHaveBeenCalled()
expect(userCallbacks.onWorkflowStarted).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeStarted).toHaveBeenCalledWith(
{ node_id: 'node-1' },
{ clientWidth: 640, clientHeight: 360 },
)
expect(userCallbacks.onNodeStarted).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeFinished).toHaveBeenCalled()
expect(userCallbacks.onNodeFinished).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeIterationStarted).toHaveBeenCalledWith(
{ node_id: 'node-1' },
{ clientWidth: 640, clientHeight: 360 },
)
expect(userCallbacks.onIterationStart).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeIterationNext).toHaveBeenCalled()
expect(userCallbacks.onIterationNext).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeIterationFinished).toHaveBeenCalled()
expect(userCallbacks.onIterationFinish).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeLoopStarted).toHaveBeenCalledWith(
{ node_id: 'node-1' },
{ clientWidth: 640, clientHeight: 360 },
)
expect(userCallbacks.onLoopStart).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeLoopNext).toHaveBeenCalled()
expect(userCallbacks.onLoopNext).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeLoopFinished).toHaveBeenCalled()
expect(userCallbacks.onLoopFinish).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeRetry).toHaveBeenCalled()
expect(userCallbacks.onNodeRetry).toHaveBeenCalled()
expect(handlers.handleWorkflowAgentLog).toHaveBeenCalled()
expect(userCallbacks.onAgentLog).toHaveBeenCalled()
expect(handlers.handleWorkflowTextChunk).toHaveBeenCalled()
expect(handlers.handleWorkflowTextReplace).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeHumanInputRequired).toHaveBeenCalled()
expect(userCallbacks.onHumanInputRequired).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeHumanInputFormFilled).toHaveBeenCalled()
expect(userCallbacks.onHumanInputFormFilled).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeHumanInputFormTimeout).toHaveBeenCalled()
expect(userCallbacks.onHumanInputFormTimeout).toHaveBeenCalled()
expect(clearListeningState).toHaveBeenCalled()
expect(handlers.handleWorkflowFinished).toHaveBeenCalled()
expect(userCallbacks.onWorkflowFinished).toHaveBeenCalled()
expect(fetchInspectVars).toHaveBeenCalledWith({})
expect(invalidAllLastRun).toHaveBeenCalled()
expect(userCallbacks.onCompleted).toHaveBeenCalledWith(false, '')
expect(player.playAudioWithAudio).toHaveBeenCalledWith('audio-chunk', true)
expect(player.playAudioWithAudio).toHaveBeenCalledWith('audio-finished', false)
expect(mockResetMsgId).toHaveBeenCalledWith('message-1')
expect(handlers.handleWorkflowPaused).toHaveBeenCalled()
expect(userCallbacks.onWorkflowPaused).toHaveBeenCalled()
expect(mockSseGet).toHaveBeenCalledWith('/workflow/run-2/events', {}, callbacks)
expect(clearAbortController).toHaveBeenCalled()
expect(handlers.handleWorkflowFailed).toHaveBeenCalled()
expect(userCallbacks.onError).toHaveBeenCalledWith({ error: 'failed', node_type: 'llm' }, '500')
expect(trackWorkflowRunFailed).toHaveBeenCalledWith({ error: 'failed', node_type: 'llm' })
expect(invalidateRunHistory).toHaveBeenCalledWith('/apps/app-1/workflow-runs')
})
it('should skip base debug-only side effects and audio playback when debug mode is off or audio is empty', () => {
const handlers = createHandlers()
const fetchInspectVars = vi.fn()
const invalidAllLastRun = vi.fn()
const getOrCreatePlayer = vi.fn<() => AudioPlayer | null>(() => null)
const callbacks = createBaseWorkflowRunCallbacks({
clientWidth: 320,
clientHeight: 240,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: false,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory: vi.fn(),
clearAbortController: vi.fn(),
clearListeningState: vi.fn(),
trackWorkflowRunFailed: vi.fn(),
handlers,
callbacks: {},
restCallback: {},
getOrCreatePlayer,
})
callbacks.onWorkflowFinished?.({ workflow_run_id: 'run-1' } as never)
callbacks.onTTSChunk?.('message-1', '')
callbacks.onTTSEnd?.('message-1', 'audio-finished')
expect(fetchInspectVars).not.toHaveBeenCalled()
expect(invalidAllLastRun).not.toHaveBeenCalled()
expect(getOrCreatePlayer).toHaveBeenCalledTimes(1)
expect(mockResetMsgId).not.toHaveBeenCalled()
})
it('should route final workflow events through handlers and continue paused runs with final callbacks', async () => {
const handlers = createHandlers()
const userCallbacks = createUserCallbacks()
const fetchInspectVars = vi.fn()
const invalidAllLastRun = vi.fn()
const invalidateRunHistory = vi.fn()
const setAbortController = vi.fn()
const player = {
playAudioWithAudio: vi.fn(),
} as unknown as AudioPlayer
const baseSseOptions = createBaseWorkflowRunCallbacks({
clientWidth: 480,
clientHeight: 320,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: false,
fetchInspectVars: vi.fn(),
invalidAllLastRun: vi.fn(),
invalidateRunHistory: vi.fn(),
clearAbortController: vi.fn(),
clearListeningState: vi.fn(),
trackWorkflowRunFailed: vi.fn(),
handlers,
callbacks: {},
restCallback: {},
getOrCreatePlayer: vi.fn<() => AudioPlayer | null>(() => player),
})
const finalCallbacks = createFinalWorkflowRunCallbacks({
clientWidth: 480,
clientHeight: 320,
runHistoryUrl: '/apps/app-1/workflow-runs',
isInWorkflowDebug: true,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController: vi.fn(),
clearListeningState: vi.fn(),
trackWorkflowRunFailed: vi.fn(),
handlers,
callbacks: userCallbacks,
restCallback: {},
baseSseOptions,
player,
setAbortController,
})
finalCallbacks.getAbortController?.(new AbortController())
finalCallbacks.onWorkflowFinished?.({ workflow_run_id: 'run-1' } as never)
finalCallbacks.onNodeStarted?.({ node_id: 'node-1' } as never)
finalCallbacks.onNodeFinished?.({ node_id: 'node-1' } as never)
finalCallbacks.onIterationStart?.({ node_id: 'node-1' } as never)
finalCallbacks.onIterationNext?.({ node_id: 'node-1' } as never)
finalCallbacks.onIterationFinish?.({ node_id: 'node-1' } as never)
finalCallbacks.onLoopStart?.({ node_id: 'node-1' } as never)
finalCallbacks.onLoopNext?.({ node_id: 'node-1' } as never)
finalCallbacks.onLoopFinish?.({ node_id: 'node-1' } as never)
finalCallbacks.onNodeRetry?.({ node_id: 'node-1' } as never)
finalCallbacks.onAgentLog?.({ node_id: 'node-1' } as never)
finalCallbacks.onTextChunk?.({ data: 'chunk' } as never)
finalCallbacks.onTextReplace?.({ text: 'replacement' } as never)
finalCallbacks.onHumanInputRequired?.({ node_id: 'node-1' } as never)
finalCallbacks.onHumanInputFormFilled?.({ node_id: 'node-1' } as never)
finalCallbacks.onHumanInputFormTimeout?.({ node_id: 'node-1' } as never)
finalCallbacks.onWorkflowPaused?.({ workflow_run_id: 'run-2' } as never)
finalCallbacks.onTTSChunk?.('message-2', 'audio-chunk')
finalCallbacks.onTTSEnd?.('message-2', 'audio-finished')
await finalCallbacks.onCompleted?.(true, 'done')
finalCallbacks.onError?.({ error: 'failed' } as never, '500')
expect(setAbortController).toHaveBeenCalled()
expect(handlers.handleWorkflowFinished).toHaveBeenCalled()
expect(userCallbacks.onWorkflowFinished).toHaveBeenCalled()
expect(fetchInspectVars).toHaveBeenCalledWith({})
expect(invalidAllLastRun).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeStarted).toHaveBeenCalledWith(
{ node_id: 'node-1' },
{ clientWidth: 480, clientHeight: 320 },
)
expect(handlers.handleWorkflowNodeIterationStarted).toHaveBeenCalledWith(
{ node_id: 'node-1' },
{ clientWidth: 480, clientHeight: 320 },
)
expect(handlers.handleWorkflowNodeLoopStarted).toHaveBeenCalledWith(
{ node_id: 'node-1' },
{ clientWidth: 480, clientHeight: 320 },
)
expect(userCallbacks.onNodeStarted).toHaveBeenCalled()
expect(userCallbacks.onNodeFinished).toHaveBeenCalled()
expect(userCallbacks.onIterationStart).toHaveBeenCalled()
expect(userCallbacks.onIterationNext).toHaveBeenCalled()
expect(userCallbacks.onIterationFinish).toHaveBeenCalled()
expect(userCallbacks.onLoopStart).toHaveBeenCalled()
expect(userCallbacks.onLoopNext).toHaveBeenCalled()
expect(userCallbacks.onLoopFinish).toHaveBeenCalled()
expect(userCallbacks.onNodeRetry).toHaveBeenCalled()
expect(userCallbacks.onAgentLog).toHaveBeenCalled()
expect(handlers.handleWorkflowTextChunk).toHaveBeenCalled()
expect(handlers.handleWorkflowTextReplace).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeHumanInputRequired).toHaveBeenCalled()
expect(userCallbacks.onHumanInputRequired).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeHumanInputFormFilled).toHaveBeenCalled()
expect(userCallbacks.onHumanInputFormFilled).toHaveBeenCalled()
expect(handlers.handleWorkflowNodeHumanInputFormTimeout).toHaveBeenCalled()
expect(userCallbacks.onHumanInputFormTimeout).toHaveBeenCalled()
expect(handlers.handleWorkflowPaused).toHaveBeenCalled()
expect(userCallbacks.onWorkflowPaused).toHaveBeenCalled()
expect(mockSseGet).toHaveBeenCalledWith('/workflow/run-2/events', {}, finalCallbacks)
expect(player.playAudioWithAudio).toHaveBeenCalledWith('audio-chunk', true)
expect(player.playAudioWithAudio).toHaveBeenCalledWith('audio-finished', false)
expect(handlers.handleWorkflowFailed).toHaveBeenCalled()
expect(userCallbacks.onError).toHaveBeenCalledWith({ error: 'failed' }, '500')
expect(invalidateRunHistory).toHaveBeenCalledWith('/apps/app-1/workflow-runs')
})
})

View File

@@ -1,431 +0,0 @@
import { TriggerType } from '@/app/components/workflow/header/test-run-menu'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import { AppModeEnum } from '@/types/app'
import {
applyRunningStateForMode,
applyStoppedState,
buildListeningTriggerNodeIds,
buildRunHistoryUrl,
buildTTSConfig,
buildWorkflowRunRequestBody,
clearListeningState,
clearWindowDebugControllers,
createFailedWorkflowState,
createRunningWorkflowState,
createStoppedWorkflowState,
mapPublishedWorkflowFeatures,
normalizePublishedWorkflowNodes,
resolveWorkflowRunUrl,
runTriggerDebug,
validateWorkflowRunRequest,
} from '../use-workflow-run-utils'
const {
mockPost,
mockHandleStream,
mockToastError,
} = vi.hoisted(() => ({
mockPost: vi.fn(),
mockHandleStream: vi.fn(),
mockToastError: vi.fn(),
}))
vi.mock('@/service/base', () => ({
post: mockPost,
handleStream: mockHandleStream,
}))
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
error: mockToastError,
},
}))
const createListeningActions = () => ({
setWorkflowRunningData: vi.fn(),
setIsListening: vi.fn(),
setShowVariableInspectPanel: vi.fn(),
setListeningTriggerType: vi.fn(),
setListeningTriggerNodeIds: vi.fn(),
setListeningTriggerIsAll: vi.fn(),
setListeningTriggerNodeId: vi.fn(),
})
describe('useWorkflowRun utils', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('should resolve run history urls and run endpoints for workflow modes', () => {
expect(buildRunHistoryUrl({ id: 'app-1', mode: AppModeEnum.WORKFLOW })).toBe('/apps/app-1/workflow-runs')
expect(buildRunHistoryUrl({ id: 'app-1', mode: AppModeEnum.ADVANCED_CHAT })).toBe('/apps/app-1/advanced-chat/workflow-runs')
expect(resolveWorkflowRunUrl({ id: 'app-1', mode: AppModeEnum.WORKFLOW }, TriggerType.UserInput, true)).toBe('/apps/app-1/workflows/draft/run')
expect(resolveWorkflowRunUrl({ id: 'app-1', mode: AppModeEnum.ADVANCED_CHAT }, TriggerType.UserInput, false)).toBe('/apps/app-1/advanced-chat/workflows/draft/run')
expect(resolveWorkflowRunUrl({ id: 'app-1', mode: AppModeEnum.WORKFLOW }, TriggerType.Schedule, true)).toBe('/apps/app-1/workflows/draft/trigger/run')
expect(resolveWorkflowRunUrl({ id: 'app-1', mode: AppModeEnum.WORKFLOW }, TriggerType.All, true)).toBe('/apps/app-1/workflows/draft/trigger/run-all')
})
it('should build request bodies and validation errors for trigger runs', () => {
expect(buildWorkflowRunRequestBody(TriggerType.Schedule, {}, { scheduleNodeId: 'schedule-1' })).toEqual({ node_id: 'schedule-1' })
expect(buildWorkflowRunRequestBody(TriggerType.Webhook, {}, { webhookNodeId: 'webhook-1' })).toEqual({ node_id: 'webhook-1' })
expect(buildWorkflowRunRequestBody(TriggerType.Plugin, {}, { pluginNodeId: 'plugin-1' })).toEqual({ node_id: 'plugin-1' })
expect(buildWorkflowRunRequestBody(TriggerType.All, {}, { allNodeIds: ['trigger-1', 'trigger-2'] })).toEqual({ node_ids: ['trigger-1', 'trigger-2'] })
expect(buildWorkflowRunRequestBody(TriggerType.UserInput, { inputs: { query: 'hello' } })).toEqual({ inputs: { query: 'hello' } })
expect(validateWorkflowRunRequest(TriggerType.Schedule)).toBe('handleRun: schedule trigger run requires node id')
expect(validateWorkflowRunRequest(TriggerType.Webhook)).toBe('handleRun: webhook trigger run requires node id')
expect(validateWorkflowRunRequest(TriggerType.Plugin)).toBe('handleRun: plugin trigger run requires node id')
expect(validateWorkflowRunRequest(TriggerType.All)).toBe('')
expect(validateWorkflowRunRequest(TriggerType.All, { allNodeIds: [] })).toBe('')
})
it('should return empty trigger urls when app id is missing and keep user-input urls empty outside workflow debug', () => {
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
expect(resolveWorkflowRunUrl(undefined, TriggerType.Plugin, true)).toBe('')
expect(resolveWorkflowRunUrl(undefined, TriggerType.All, true)).toBe('')
expect(resolveWorkflowRunUrl({ id: 'app-1', mode: AppModeEnum.WORKFLOW }, TriggerType.UserInput, false)).toBe('')
expect(consoleErrorSpy).toHaveBeenCalledWith('handleRun: missing app id for trigger plugin run')
expect(consoleErrorSpy).toHaveBeenCalledWith('handleRun: missing app id for trigger run all')
consoleErrorSpy.mockRestore()
})
it('should configure listening state for trigger and non-trigger modes', () => {
const triggerActions = createListeningActions()
applyRunningStateForMode(triggerActions, TriggerType.All, { allNodeIds: ['trigger-1', 'trigger-2'] })
expect(triggerActions.setIsListening).toHaveBeenCalledWith(true)
expect(triggerActions.setShowVariableInspectPanel).toHaveBeenCalledWith(true)
expect(triggerActions.setListeningTriggerIsAll).toHaveBeenCalledWith(true)
expect(triggerActions.setListeningTriggerNodeIds).toHaveBeenCalledWith(['trigger-1', 'trigger-2'])
expect(triggerActions.setWorkflowRunningData).toHaveBeenCalledWith(createRunningWorkflowState())
const normalActions = createListeningActions()
applyRunningStateForMode(normalActions, TriggerType.UserInput)
expect(normalActions.setIsListening).toHaveBeenCalledWith(false)
expect(normalActions.setListeningTriggerType).toHaveBeenCalledWith(null)
expect(normalActions.setListeningTriggerNodeId).toHaveBeenCalledWith(null)
expect(normalActions.setListeningTriggerNodeIds).toHaveBeenCalledWith([])
expect(normalActions.setListeningTriggerIsAll).toHaveBeenCalledWith(false)
expect(normalActions.setWorkflowRunningData).toHaveBeenCalledWith(createRunningWorkflowState())
})
it('should clear listening state, stop state, and remove debug controllers', () => {
const listeningActions = createListeningActions()
clearListeningState(listeningActions)
expect(listeningActions.setIsListening).toHaveBeenCalledWith(false)
expect(listeningActions.setListeningTriggerType).toHaveBeenCalledWith(null)
expect(listeningActions.setListeningTriggerNodeId).toHaveBeenCalledWith(null)
expect(listeningActions.setListeningTriggerNodeIds).toHaveBeenCalledWith([])
expect(listeningActions.setListeningTriggerIsAll).toHaveBeenCalledWith(false)
const stoppedActions = createListeningActions()
applyStoppedState(stoppedActions)
expect(stoppedActions.setWorkflowRunningData).toHaveBeenCalledWith(createStoppedWorkflowState())
expect(stoppedActions.setShowVariableInspectPanel).toHaveBeenCalledWith(true)
const controllerTarget = {
__webhookDebugAbortController: { abort: vi.fn() },
__pluginDebugAbortController: { abort: vi.fn() },
__scheduleDebugAbortController: { abort: vi.fn() },
__allTriggersDebugAbortController: { abort: vi.fn() },
}
clearWindowDebugControllers(controllerTarget)
expect(controllerTarget).toEqual({})
})
it('should derive listening node ids, tts config, and published workflow mappings', () => {
expect(buildListeningTriggerNodeIds(TriggerType.Webhook, { webhookNodeId: 'webhook-1' })).toEqual(['webhook-1'])
expect(buildListeningTriggerNodeIds(TriggerType.Schedule, { scheduleNodeId: 'schedule-1' })).toEqual(['schedule-1'])
expect(buildListeningTriggerNodeIds(TriggerType.Plugin, { pluginNodeId: 'plugin-1' })).toEqual(['plugin-1'])
expect(buildListeningTriggerNodeIds(TriggerType.All, { allNodeIds: ['trigger-1', 'trigger-2'] })).toEqual(['trigger-1', 'trigger-2'])
expect(buildTTSConfig({ token: 'public-token' }, '/apps/app-1')).toEqual({
ttsUrl: '/text-to-audio',
ttsIsPublic: true,
})
expect(buildTTSConfig({ appId: 'app-1' }, '/explore/installed/app-1')).toEqual({
ttsUrl: '/installed-apps/app-1/text-to-audio',
ttsIsPublic: false,
})
expect(buildTTSConfig({ appId: 'app-1' }, '/apps/app-1/workflow')).toEqual({
ttsUrl: '/apps/app-1/text-to-audio',
ttsIsPublic: false,
})
const publishedWorkflow = {
graph: {
nodes: [{ id: 'node-1', selected: true, data: { selected: true, title: 'Start' } }],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
},
features: {
opening_statement: 'hello',
suggested_questions: ['Q1'],
suggested_questions_after_answer: { enabled: true },
text_to_speech: { enabled: true },
speech_to_text: { enabled: true },
retriever_resource: { enabled: true },
sensitive_word_avoidance: { enabled: true },
file_upload: { enabled: true },
},
} as never
expect(normalizePublishedWorkflowNodes(publishedWorkflow)).toEqual([
{ id: 'node-1', selected: false, data: { selected: false, title: 'Start' } },
])
expect(mapPublishedWorkflowFeatures(publishedWorkflow)).toMatchObject({
opening: {
enabled: true,
opening_statement: 'hello',
suggested_questions: ['Q1'],
},
suggested: { enabled: true },
text2speech: { enabled: true },
speech2text: { enabled: true },
citation: { enabled: true },
moderation: { enabled: true },
file: { enabled: true },
})
})
it('should handle trigger debug null and invalid json responses as request failures', async () => {
const clearAbortController = vi.fn()
const clearListeningStateSpy = vi.fn()
const setAbortController = vi.fn()
const setWorkflowRunningData = vi.fn()
const controllerTarget: Record<string, unknown> = {}
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
mockPost.mockResolvedValueOnce(null)
await runTriggerDebug({
debugType: TriggerType.Webhook,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'webhook-1' },
baseSseOptions: {},
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(mockToastError).toHaveBeenCalledWith('Webhook debug request failed')
expect(clearAbortController).toHaveBeenCalledTimes(1)
expect(clearListeningStateSpy).not.toHaveBeenCalled()
mockPost.mockResolvedValueOnce(new Response('{invalid-json}', {
headers: { 'content-type': 'application/json' },
}))
await runTriggerDebug({
debugType: TriggerType.Schedule,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'schedule-1' },
baseSseOptions: {},
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(consoleErrorSpy).toHaveBeenCalledWith(
'handleRun: schedule debug response parse error',
expect.any(Error),
)
expect(mockToastError).toHaveBeenCalledWith('Schedule debug request failed')
expect(clearAbortController).toHaveBeenCalledTimes(2)
expect(clearListeningStateSpy).toHaveBeenCalledTimes(1)
expect(setWorkflowRunningData).not.toHaveBeenCalled()
consoleErrorSpy.mockRestore()
})
it('should handle trigger debug json failures and stream responses', async () => {
const clearAbortController = vi.fn()
const clearListeningStateSpy = vi.fn()
const setAbortController = vi.fn()
const setWorkflowRunningData = vi.fn()
const controllerTarget: Record<string, unknown> = {}
const baseSseOptions = {
onData: vi.fn(),
onCompleted: vi.fn(),
}
mockPost.mockResolvedValueOnce(new Response(JSON.stringify({ message: 'Webhook failed' }), {
headers: { 'content-type': 'application/json' },
}))
await runTriggerDebug({
debugType: TriggerType.Webhook,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'webhook-1' },
baseSseOptions,
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(setAbortController).toHaveBeenCalledTimes(1)
expect(mockToastError).toHaveBeenCalledWith('Webhook failed')
expect(clearAbortController).toHaveBeenCalled()
expect(clearListeningStateSpy).toHaveBeenCalled()
expect(setWorkflowRunningData).toHaveBeenCalledWith(createFailedWorkflowState('Webhook failed'))
mockPost.mockResolvedValueOnce(new Response('data: ok', {
headers: { 'content-type': 'text/event-stream' },
}))
await runTriggerDebug({
debugType: TriggerType.Plugin,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'plugin-1' },
baseSseOptions,
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(clearListeningStateSpy).toHaveBeenCalledTimes(2)
expect(mockHandleStream).toHaveBeenCalledTimes(1)
})
it('should retry waiting trigger debug responses until a stream is returned', async () => {
vi.useFakeTimers()
const clearAbortController = vi.fn()
const clearListeningStateSpy = vi.fn()
const setAbortController = vi.fn()
const setWorkflowRunningData = vi.fn()
const controllerTarget: Record<string, unknown> = {}
const baseSseOptions = {
onData: vi.fn(),
onCompleted: vi.fn(),
}
mockPost
.mockResolvedValueOnce(new Response(JSON.stringify({ status: 'waiting', retry_in: 1 }), {
headers: { 'content-type': 'application/json' },
}))
.mockResolvedValueOnce(new Response('data: ok', {
headers: { 'content-type': 'text/event-stream' },
}))
const runPromise = runTriggerDebug({
debugType: TriggerType.All,
url: '/apps/app-1/workflows/draft/trigger/run-all',
requestBody: { node_ids: ['trigger-1'] },
baseSseOptions,
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
await vi.advanceTimersByTimeAsync(1)
await runPromise
expect(mockPost).toHaveBeenCalledTimes(2)
expect(clearListeningStateSpy).toHaveBeenCalledTimes(1)
expect(mockHandleStream).toHaveBeenCalledTimes(1)
vi.useRealTimers()
})
it('should stop trigger debug processing when the controller aborts before handling the response', async () => {
const clearAbortController = vi.fn()
const clearListeningStateSpy = vi.fn()
const setWorkflowRunningData = vi.fn()
const controllerTarget: Record<string, unknown> = {}
mockPost.mockResolvedValueOnce(new Response('data: ok', {
headers: { 'content-type': 'text/event-stream' },
}))
await runTriggerDebug({
debugType: TriggerType.Plugin,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'plugin-1' },
baseSseOptions: {},
controllerTarget,
setAbortController: (controller) => {
controller?.abort()
},
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(mockHandleStream).not.toHaveBeenCalled()
expect(mockToastError).not.toHaveBeenCalled()
expect(clearAbortController).not.toHaveBeenCalled()
expect(clearListeningStateSpy).not.toHaveBeenCalled()
expect(setWorkflowRunningData).not.toHaveBeenCalled()
})
it('should handle Response and non-Response trigger debug exceptions correctly', async () => {
const clearAbortController = vi.fn()
const clearListeningStateSpy = vi.fn()
const setAbortController = vi.fn()
const setWorkflowRunningData = vi.fn()
const controllerTarget: Record<string, unknown> = {}
mockPost.mockRejectedValueOnce(new Response(JSON.stringify({ error: 'Plugin failed' }), {
headers: { 'content-type': 'application/json' },
}))
await runTriggerDebug({
debugType: TriggerType.Plugin,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'plugin-1' },
baseSseOptions: {},
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(mockToastError).toHaveBeenCalledWith('Plugin failed')
expect(clearAbortController).toHaveBeenCalledTimes(1)
expect(setWorkflowRunningData).toHaveBeenCalledWith(createFailedWorkflowState('Plugin failed'))
expect(clearListeningStateSpy).toHaveBeenCalledTimes(1)
mockPost.mockRejectedValueOnce(new Error('network failed'))
await runTriggerDebug({
debugType: TriggerType.Plugin,
url: '/apps/app-1/workflows/draft/trigger/run',
requestBody: { node_id: 'plugin-1' },
baseSseOptions: {},
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState: clearListeningStateSpy,
setWorkflowRunningData,
})
expect(clearAbortController).toHaveBeenCalledTimes(1)
expect(setWorkflowRunningData).toHaveBeenCalledTimes(1)
expect(clearListeningStateSpy).toHaveBeenCalledTimes(2)
})
it('should expose the canonical workflow state factories', () => {
expect(createRunningWorkflowState().result.status).toBe(WorkflowRunningStatus.Running)
expect(createStoppedWorkflowState().result.status).toBe(WorkflowRunningStatus.Stopped)
expect(createFailedWorkflowState('failed').result.status).toBe(WorkflowRunningStatus.Failed)
})
})

View File

@@ -1,592 +0,0 @@
import { act, renderHook } from '@testing-library/react'
import { TriggerType } from '@/app/components/workflow/header/test-run-menu'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import { useWorkflowRun } from '../use-workflow-run'
type DebugAbortControllerRef = {
abort: () => void
}
type DebugControllerWindow = Window & {
__webhookDebugAbortController?: DebugAbortControllerRef
__pluginDebugAbortController?: DebugAbortControllerRef
__scheduleDebugAbortController?: DebugAbortControllerRef
__allTriggersDebugAbortController?: DebugAbortControllerRef
}
type WorkflowStoreState = {
backupDraft?: unknown
environmentVariables?: unknown
setBackupDraft?: (value: unknown) => void
setEnvironmentVariables?: (value: unknown) => void
setWorkflowRunningData?: (value: unknown) => void
setIsListening?: (value: boolean) => void
setShowVariableInspectPanel?: (value: boolean) => void
setListeningTriggerType?: (value: unknown) => void
setListeningTriggerNodeIds?: (value: string[]) => void
setListeningTriggerIsAll?: (value: boolean) => void
setListeningTriggerNodeId?: (value: string | null) => void
}
const mocks = vi.hoisted(() => {
const appStoreState = {
appDetail: {
id: 'app-1',
mode: 'workflow',
name: 'Workflow App',
},
}
const reactFlowStoreState = {
edges: [{ id: 'edge-1' }],
getNodes: vi.fn(),
setNodes: vi.fn(),
}
const workflowStoreState: WorkflowStoreState = {}
const workflowStoreSetState = vi.fn((partial: Record<string, unknown>) => {
Object.assign(workflowStoreState, partial)
})
const featuresStoreState = {
features: {
file: {
enabled: true,
},
},
}
const featuresStoreSetState = vi.fn((partial: Record<string, unknown>) => {
Object.assign(featuresStoreState, partial)
})
return {
appStoreState,
reactFlowStoreState,
workflowStoreState,
workflowStoreSetState,
featuresStoreState,
featuresStoreSetState,
mockGetViewport: vi.fn(),
mockDoSyncWorkflowDraft: vi.fn(),
mockHandleUpdateWorkflowCanvas: vi.fn(),
mockFetchInspectVars: vi.fn(),
mockInvalidateAllLastRun: vi.fn(),
mockInvalidateRunHistory: vi.fn(),
mockSsePost: vi.fn(),
mockSseGet: vi.fn(),
mockHandleStream: vi.fn(),
mockPost: vi.fn(),
mockStopWorkflowRun: vi.fn(),
mockTrackEvent: vi.fn(),
mockGetAudioPlayer: vi.fn(),
mockResetMsgId: vi.fn(),
mockCreateBaseWorkflowRunCallbacks: vi.fn(),
mockCreateFinalWorkflowRunCallbacks: vi.fn(),
runEventHandlers: {
handleWorkflowStarted: vi.fn(),
handleWorkflowFinished: vi.fn(),
handleWorkflowFailed: vi.fn(),
handleWorkflowNodeStarted: vi.fn(),
handleWorkflowNodeFinished: vi.fn(),
handleWorkflowNodeHumanInputRequired: vi.fn(),
handleWorkflowNodeHumanInputFormFilled: vi.fn(),
handleWorkflowNodeHumanInputFormTimeout: vi.fn(),
handleWorkflowNodeIterationStarted: vi.fn(),
handleWorkflowNodeIterationNext: vi.fn(),
handleWorkflowNodeIterationFinished: vi.fn(),
handleWorkflowNodeLoopStarted: vi.fn(),
handleWorkflowNodeLoopNext: vi.fn(),
handleWorkflowNodeLoopFinished: vi.fn(),
handleWorkflowNodeRetry: vi.fn(),
handleWorkflowAgentLog: vi.fn(),
handleWorkflowTextChunk: vi.fn(),
handleWorkflowTextReplace: vi.fn(),
handleWorkflowPaused: vi.fn(),
},
}
})
vi.mock('reactflow', () => ({
useStoreApi: () => ({
getState: () => mocks.reactFlowStoreState,
}),
useReactFlow: () => ({
getViewport: mocks.mockGetViewport,
}),
}))
vi.mock('@/app/components/app/store', () => {
const useStore = Object.assign(vi.fn(), {
getState: () => mocks.appStoreState,
})
return {
useStore,
}
})
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: mocks.mockTrackEvent,
}))
vi.mock('@/app/components/base/audio-btn/audio.player.manager', () => ({
AudioPlayerManager: {
getInstance: () => ({
getAudioPlayer: mocks.mockGetAudioPlayer,
resetMsgId: mocks.mockResetMsgId,
}),
},
}))
vi.mock('@/app/components/base/features/hooks', () => ({
useFeaturesStore: () => ({
getState: () => mocks.featuresStoreState,
setState: mocks.featuresStoreSetState,
}),
}))
vi.mock('@/app/components/workflow/hooks/use-workflow-interactions', () => ({
useWorkflowUpdate: () => ({
handleUpdateWorkflowCanvas: mocks.mockHandleUpdateWorkflowCanvas,
}),
}))
vi.mock('@/app/components/workflow/hooks/use-workflow-run-event/use-workflow-run-event', () => ({
useWorkflowRunEvent: () => mocks.runEventHandlers,
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => mocks.workflowStoreState,
setState: mocks.workflowStoreSetState,
}),
}))
vi.mock('@/next/navigation', () => ({
usePathname: () => '/apps/app-1/workflow',
}))
vi.mock('@/service/base', () => ({
ssePost: mocks.mockSsePost,
sseGet: mocks.mockSseGet,
post: mocks.mockPost,
handleStream: mocks.mockHandleStream,
}))
vi.mock('@/service/use-workflow', () => ({
useInvalidAllLastRun: () => mocks.mockInvalidateAllLastRun,
useInvalidateWorkflowRunHistory: () => mocks.mockInvalidateRunHistory,
useInvalidateConversationVarValues: () => vi.fn(),
useInvalidateSysVarValues: () => vi.fn(),
}))
vi.mock('@/service/workflow', () => ({
stopWorkflowRun: mocks.mockStopWorkflowRun,
}))
vi.mock('@/app/components/workflow/hooks/use-fetch-workflow-inspect-vars', () => ({
useSetWorkflowVarsWithValue: () => ({
fetchInspectVars: mocks.mockFetchInspectVars,
}),
}))
vi.mock('../use-configs-map', () => ({
useConfigsMap: () => ({
flowId: 'flow-1',
flowType: 'workflow',
}),
}))
vi.mock('../use-nodes-sync-draft', () => ({
useNodesSyncDraft: () => ({
doSyncWorkflowDraft: mocks.mockDoSyncWorkflowDraft,
}),
}))
vi.mock('../use-workflow-run-callbacks', async (importOriginal) => {
const actual = await importOriginal<typeof import('../use-workflow-run-callbacks')>()
return {
...actual,
createBaseWorkflowRunCallbacks: vi.fn((params) => {
mocks.mockCreateBaseWorkflowRunCallbacks(params)
return actual.createBaseWorkflowRunCallbacks(params)
}),
createFinalWorkflowRunCallbacks: vi.fn((params) => {
mocks.mockCreateFinalWorkflowRunCallbacks(params)
return actual.createFinalWorkflowRunCallbacks(params)
}),
}
})
const createWorkflowStoreState = () => ({
backupDraft: undefined,
environmentVariables: [{ id: 'env-current', value: 'secret' }],
setBackupDraft: vi.fn((value: unknown) => {
mocks.workflowStoreState.backupDraft = value
}),
setEnvironmentVariables: vi.fn((value: unknown) => {
mocks.workflowStoreState.environmentVariables = value
}),
setWorkflowRunningData: vi.fn(),
setIsListening: vi.fn(),
setShowVariableInspectPanel: vi.fn(),
setListeningTriggerType: vi.fn(),
setListeningTriggerNodeIds: vi.fn(),
setListeningTriggerIsAll: vi.fn(),
setListeningTriggerNodeId: vi.fn(),
})
describe('useWorkflowRun', () => {
beforeEach(() => {
vi.clearAllMocks()
document.body.innerHTML = '<div id="workflow-container"></div>'
const workflowContainer = document.getElementById('workflow-container')!
Object.defineProperty(workflowContainer, 'clientWidth', { value: 960, configurable: true })
Object.defineProperty(workflowContainer, 'clientHeight', { value: 540, configurable: true })
mocks.reactFlowStoreState.getNodes.mockReturnValue([
{ id: 'node-1', data: { selected: true, _runningStatus: 'running' } },
])
mocks.mockGetViewport.mockReturnValue({ x: 1, y: 2, zoom: 1.5 })
mocks.mockDoSyncWorkflowDraft.mockResolvedValue(undefined)
mocks.mockPost.mockResolvedValue(new Response('data: ok', {
headers: { 'content-type': 'text/event-stream' },
}))
mocks.mockGetAudioPlayer.mockReturnValue({
playAudioWithAudio: vi.fn(),
})
mocks.workflowStoreState.backupDraft = undefined
Object.assign(mocks.workflowStoreState, createWorkflowStoreState())
mocks.workflowStoreSetState.mockImplementation((partial: Record<string, unknown>) => {
Object.assign(mocks.workflowStoreState, partial)
})
mocks.featuresStoreState.features = {
file: {
enabled: true,
},
}
})
it('should backup the current draft once and skip subsequent backups until it is cleared', () => {
const { result } = renderHook(() => useWorkflowRun())
act(() => {
result.current.handleBackupDraft()
result.current.handleBackupDraft()
})
expect(mocks.workflowStoreState.setBackupDraft).toHaveBeenCalledTimes(1)
expect(mocks.workflowStoreState.setBackupDraft).toHaveBeenCalledWith({
nodes: [{ id: 'node-1', data: { selected: true, _runningStatus: 'running' } }],
edges: [{ id: 'edge-1' }],
viewport: { x: 1, y: 2, zoom: 1.5 },
features: { file: { enabled: true } },
environmentVariables: [{ id: 'env-current', value: 'secret' }],
})
expect(mocks.mockDoSyncWorkflowDraft).toHaveBeenCalledTimes(1)
})
it('should load a backup draft into canvas, environment variables, and features state', () => {
mocks.workflowStoreState.backupDraft = {
nodes: [{ id: 'backup-node' }],
edges: [{ id: 'backup-edge' }],
viewport: { x: 0, y: 0, zoom: 2 },
features: { opening: { enabled: true } },
environmentVariables: [{ id: 'env-backup', value: 'value' }],
}
const { result } = renderHook(() => useWorkflowRun())
act(() => {
result.current.handleLoadBackupDraft()
})
expect(mocks.mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'backup-node' }],
edges: [{ id: 'backup-edge' }],
viewport: { x: 0, y: 0, zoom: 2 },
})
expect(mocks.workflowStoreState.setEnvironmentVariables).toHaveBeenCalledWith([{ id: 'env-backup', value: 'value' }])
expect(mocks.featuresStoreSetState).toHaveBeenCalledWith({
features: { opening: { enabled: true } },
})
expect(mocks.workflowStoreState.setBackupDraft).toHaveBeenCalledWith(undefined)
})
it('should prepare the graph and dispatch a workflow run through ssePost for user-input mode', async () => {
const { result } = renderHook(() => useWorkflowRun())
await act(async () => {
await result.current.handleRun({ inputs: { query: 'hello' } })
})
expect(mocks.reactFlowStoreState.setNodes).toHaveBeenCalledWith([
{ id: 'node-1', data: { selected: false, _runningStatus: undefined } },
])
expect(mocks.mockDoSyncWorkflowDraft).toHaveBeenCalled()
expect(mocks.workflowStoreSetState).toHaveBeenCalledWith({ historyWorkflowData: undefined })
expect(mocks.workflowStoreState.setIsListening).toHaveBeenCalledWith(false)
expect(mocks.workflowStoreState.setListeningTriggerType).toHaveBeenCalledWith(null)
expect(mocks.workflowStoreState.setListeningTriggerNodeId).toHaveBeenCalledWith(null)
expect(mocks.workflowStoreState.setListeningTriggerNodeIds).toHaveBeenCalledWith([])
expect(mocks.workflowStoreState.setListeningTriggerIsAll).toHaveBeenCalledWith(false)
expect(mocks.workflowStoreState.setWorkflowRunningData).toHaveBeenCalledWith(expect.objectContaining({
result: expect.objectContaining({
status: WorkflowRunningStatus.Running,
}),
}))
expect(mocks.mockSsePost).toHaveBeenCalledWith(
'/apps/app-1/workflows/draft/run',
{ body: { inputs: { query: 'hello' } } },
expect.objectContaining({
getAbortController: expect.any(Function),
}),
)
})
it.each([
{
title: 'schedule',
params: {},
options: { mode: TriggerType.Schedule, scheduleNodeId: 'schedule-1' },
expectedUrl: '/apps/app-1/workflows/draft/trigger/run',
expectedBody: { node_id: 'schedule-1' },
expectedNodeIds: ['schedule-1'],
expectedIsAll: false,
},
{
title: 'webhook',
params: { node_id: 'webhook-1' },
options: { mode: TriggerType.Webhook, webhookNodeId: 'webhook-1' },
expectedUrl: '/apps/app-1/workflows/draft/trigger/run',
expectedBody: { node_id: 'webhook-1' },
expectedNodeIds: ['webhook-1'],
expectedIsAll: false,
},
{
title: 'plugin',
params: { node_id: 'plugin-1' },
options: { mode: TriggerType.Plugin, pluginNodeId: 'plugin-1' },
expectedUrl: '/apps/app-1/workflows/draft/trigger/run',
expectedBody: { node_id: 'plugin-1' },
expectedNodeIds: ['plugin-1'],
expectedIsAll: false,
},
{
title: 'all',
params: { node_ids: ['trigger-1', 'trigger-2'] },
options: { mode: TriggerType.All, allNodeIds: ['trigger-1', 'trigger-2'] },
expectedUrl: '/apps/app-1/workflows/draft/trigger/run-all',
expectedBody: { node_ids: ['trigger-1', 'trigger-2'] },
expectedNodeIds: ['trigger-1', 'trigger-2'],
expectedIsAll: true,
},
])('should dispatch $title trigger runs through the debug runner integration', async ({
params,
options,
expectedUrl,
expectedBody,
expectedNodeIds,
expectedIsAll,
}) => {
const { result } = renderHook(() => useWorkflowRun())
await act(async () => {
await result.current.handleRun(params, undefined, options)
})
expect(mocks.mockPost).toHaveBeenCalledWith(
expectedUrl,
expect.objectContaining({
body: expectedBody,
signal: expect.any(AbortSignal),
}),
{ needAllResponseContent: true },
)
expect(mocks.workflowStoreState.setIsListening).toHaveBeenCalledWith(true)
expect(mocks.workflowStoreState.setListeningTriggerNodeIds).toHaveBeenCalledWith(expectedNodeIds)
expect(mocks.workflowStoreState.setListeningTriggerIsAll).toHaveBeenCalledWith(expectedIsAll)
expect(mocks.mockSsePost).not.toHaveBeenCalled()
})
it('should expose the workflow-failed tracker through the callback factory context', async () => {
const { result } = renderHook(() => useWorkflowRun())
await act(async () => {
await result.current.handleRun({ inputs: { query: 'hello' } })
})
const baseCallbackFactoryContext = mocks.mockCreateBaseWorkflowRunCallbacks.mock.calls.at(-1)?.[0] as {
trackWorkflowRunFailed: (params: { error?: string, node_type?: string }) => void
}
baseCallbackFactoryContext.trackWorkflowRunFailed({ error: 'failed', node_type: 'llm' })
expect(mocks.mockTrackEvent).toHaveBeenCalledWith('workflow_run_failed', {
workflow_id: 'flow-1',
reason: 'failed',
node_type: 'llm',
})
})
it('should lazily create audio players with the correct public and private tts urls', async () => {
const { result } = renderHook(() => useWorkflowRun())
await act(async () => {
await result.current.handleRun({ token: 'public-token' })
})
const publicBaseCallbackFactoryContext = mocks.mockCreateBaseWorkflowRunCallbacks.mock.calls.at(-1)?.[0] as {
getOrCreatePlayer: () => unknown
}
publicBaseCallbackFactoryContext.getOrCreatePlayer()
expect(mocks.mockGetAudioPlayer).toHaveBeenCalledWith(
'/text-to-audio',
true,
expect.any(String),
'none',
'none',
expect.any(Function),
)
mocks.mockSsePost.mockClear()
mocks.mockGetAudioPlayer.mockClear()
await act(async () => {
await result.current.handleRun({ appId: 'app-2' })
})
const privateBaseCallbackFactoryContext = mocks.mockCreateBaseWorkflowRunCallbacks.mock.calls.at(-1)?.[0] as {
getOrCreatePlayer: () => unknown
}
privateBaseCallbackFactoryContext.getOrCreatePlayer()
expect(mocks.mockGetAudioPlayer).toHaveBeenCalledWith(
'/apps/app-2/text-to-audio',
false,
expect.any(String),
'none',
'none',
expect.any(Function),
)
})
it('should stop workflow runs by task id or by aborting active debug controllers', async () => {
const { result } = renderHook(() => useWorkflowRun())
await act(async () => {
await result.current.handleRun({ inputs: { query: 'hello' } })
})
act(() => {
result.current.handleStopRun('task-1')
})
expect(mocks.mockStopWorkflowRun).toHaveBeenCalledWith('/apps/app-1/workflow-runs/tasks/task-1/stop')
expect(mocks.workflowStoreState.setWorkflowRunningData).toHaveBeenCalledWith(expect.objectContaining({
result: expect.objectContaining({
status: WorkflowRunningStatus.Stopped,
}),
}))
const webhookAbort = vi.fn()
const pluginAbort = vi.fn()
const scheduleAbort = vi.fn()
const allTriggersAbort = vi.fn()
const windowWithDebugControllers = window as DebugControllerWindow
windowWithDebugControllers.__webhookDebugAbortController = { abort: webhookAbort }
windowWithDebugControllers.__pluginDebugAbortController = { abort: pluginAbort }
windowWithDebugControllers.__scheduleDebugAbortController = { abort: scheduleAbort }
windowWithDebugControllers.__allTriggersDebugAbortController = { abort: allTriggersAbort }
const refController = new AbortController()
const refAbortSpy = vi.spyOn(refController, 'abort')
const { getAbortController } = mocks.mockSsePost.mock.calls.at(-1)?.[2] as {
getAbortController?: (controller: AbortController) => void
}
getAbortController?.(refController)
act(() => {
result.current.handleStopRun('')
})
expect(webhookAbort).toHaveBeenCalled()
expect(pluginAbort).toHaveBeenCalled()
expect(scheduleAbort).toHaveBeenCalled()
expect(allTriggersAbort).toHaveBeenCalled()
expect(refAbortSpy).toHaveBeenCalled()
})
it('should restore published workflow graph, features, and environment variables', () => {
const { result } = renderHook(() => useWorkflowRun())
act(() => {
result.current.handleRestoreFromPublishedWorkflow({
graph: {
nodes: [{ id: 'published-node', selected: true, data: { selected: true, label: 'Published' } }],
edges: [{ id: 'published-edge' }],
viewport: { x: 10, y: 20, zoom: 0.8 },
},
features: {
opening_statement: 'hello',
suggested_questions: ['Q1'],
suggested_questions_after_answer: { enabled: true },
text_to_speech: { enabled: true },
speech_to_text: { enabled: true },
retriever_resource: { enabled: true },
sensitive_word_avoidance: { enabled: true },
file_upload: { enabled: true },
},
environment_variables: [{ id: 'env-published', value: 'value' }],
} as never)
})
expect(mocks.mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'published-node', selected: false, data: { selected: false, label: 'Published' } }],
edges: [{ id: 'published-edge' }],
viewport: { x: 10, y: 20, zoom: 0.8 },
})
expect(mocks.featuresStoreSetState).toHaveBeenCalledWith({
features: expect.objectContaining({
opening: expect.objectContaining({
enabled: true,
opening_statement: 'hello',
}),
file: { enabled: true },
}),
})
expect(mocks.workflowStoreState.setEnvironmentVariables).toHaveBeenCalledWith([{ id: 'env-published', value: 'value' }])
})
it('should restore published workflows with empty environment variables as an empty list', () => {
const { result } = renderHook(() => useWorkflowRun())
act(() => {
result.current.handleRestoreFromPublishedWorkflow({
graph: {
nodes: [{ id: 'published-node', selected: true, data: { selected: true, label: 'Published' } }],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
},
features: {
opening_statement: '',
suggested_questions: [],
suggested_questions_after_answer: { enabled: false },
text_to_speech: { enabled: false },
speech_to_text: { enabled: false },
retriever_resource: { enabled: false },
sensitive_word_avoidance: { enabled: false },
file_upload: { enabled: false },
},
} as never)
})
expect(mocks.featuresStoreSetState).toHaveBeenCalledWith({
features: expect.objectContaining({
opening: expect.objectContaining({ enabled: false }),
file: { enabled: false },
}),
})
expect(mocks.workflowStoreState.setEnvironmentVariables).toHaveBeenCalledWith([])
})
})

View File

@@ -1,391 +0,0 @@
import { act, renderHook } from '@testing-library/react'
import { TriggerType } from '@/app/components/workflow/header/test-run-menu'
import {
BlockEnum,
WorkflowRunningStatus,
} from '@/app/components/workflow/types'
import { useWorkflowStartRun } from '../use-workflow-start-run'
const mockGetNodes = vi.fn()
const mockGetFeaturesState = vi.fn()
const mockHandleCancelDebugAndPreviewPanel = vi.fn()
const mockHandleRun = vi.fn()
const mockDoSyncWorkflowDraft = vi.fn()
const mockUseIsChatMode = vi.fn()
const mockSetShowDebugAndPreviewPanel = vi.fn()
const mockSetShowInputsPanel = vi.fn()
const mockSetShowEnvPanel = vi.fn()
const mockSetShowGlobalVariablePanel = vi.fn()
const mockSetShowChatVariablePanel = vi.fn()
const mockSetListeningTriggerType = vi.fn()
const mockSetListeningTriggerNodeId = vi.fn()
const mockSetListeningTriggerNodeIds = vi.fn()
const mockSetListeningTriggerIsAll = vi.fn()
const mockSetHistoryWorkflowData = vi.fn()
let workflowStoreState: Record<string, unknown>
vi.mock('reactflow', () => ({
useStoreApi: () => ({
getState: () => ({
getNodes: mockGetNodes,
}),
}),
}))
vi.mock('@/app/components/base/features/hooks', () => ({
useFeaturesStore: () => ({
getState: mockGetFeaturesState,
}),
}))
vi.mock('@/app/components/workflow/hooks', () => ({
useWorkflowInteractions: () => ({
handleCancelDebugAndPreviewPanel: mockHandleCancelDebugAndPreviewPanel,
}),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => workflowStoreState,
}),
}))
vi.mock('@/app/components/workflow-app/hooks', () => ({
useIsChatMode: () => mockUseIsChatMode(),
useNodesSyncDraft: () => ({
doSyncWorkflowDraft: mockDoSyncWorkflowDraft,
}),
useWorkflowRun: () => ({
handleRun: mockHandleRun,
}),
}))
const createWorkflowStoreState = (overrides: Record<string, unknown> = {}) => ({
workflowRunningData: undefined,
showDebugAndPreviewPanel: false,
setShowDebugAndPreviewPanel: mockSetShowDebugAndPreviewPanel,
setShowInputsPanel: mockSetShowInputsPanel,
setShowEnvPanel: mockSetShowEnvPanel,
setShowGlobalVariablePanel: mockSetShowGlobalVariablePanel,
setShowChatVariablePanel: mockSetShowChatVariablePanel,
setListeningTriggerType: mockSetListeningTriggerType,
setListeningTriggerNodeId: mockSetListeningTriggerNodeId,
setListeningTriggerNodeIds: mockSetListeningTriggerNodeIds,
setListeningTriggerIsAll: mockSetListeningTriggerIsAll,
setHistoryWorkflowData: mockSetHistoryWorkflowData,
...overrides,
})
describe('useWorkflowStartRun', () => {
beforeEach(() => {
vi.clearAllMocks()
workflowStoreState = createWorkflowStoreState()
mockGetNodes.mockReturnValue([
{ id: 'start-1', data: { type: BlockEnum.Start, variables: [] } },
])
mockGetFeaturesState.mockReturnValue({
features: {
file: {
image: {
enabled: false,
},
},
},
})
mockDoSyncWorkflowDraft.mockResolvedValue(undefined)
mockUseIsChatMode.mockReturnValue(false)
})
it('should run the workflow immediately when there are no start variables and no image upload input', async () => {
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowStartRunInWorkflow()
})
expect(mockSetShowEnvPanel).toHaveBeenCalledWith(false)
expect(mockSetShowGlobalVariablePanel).toHaveBeenCalledWith(false)
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
expect(mockHandleRun).toHaveBeenCalledWith({ inputs: {}, files: [] })
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(false)
})
it('should open the input panel instead of running immediately when start inputs are required', async () => {
mockGetNodes.mockReturnValue([
{ id: 'start-1', data: { type: BlockEnum.Start, variables: [{ name: 'query' }] } },
])
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowStartRunInWorkflow()
})
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(true)
})
it('should open the input panel when image upload is enabled even without start variables', async () => {
mockGetFeaturesState.mockReturnValue({
features: {
file: {
image: {
enabled: true,
},
},
},
})
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowStartRunInWorkflow()
})
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(true)
})
it('should cancel the current debug panel instead of starting another workflow when one is already open', async () => {
workflowStoreState = createWorkflowStoreState({
showDebugAndPreviewPanel: true,
})
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowStartRunInWorkflow()
})
expect(mockHandleCancelDebugAndPreviewPanel).toHaveBeenCalled()
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
})
it('should short-circuit workflow start when a run is already in progress', async () => {
workflowStoreState = createWorkflowStoreState({
workflowRunningData: {
result: {
status: WorkflowRunningStatus.Running,
},
},
})
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowStartRunInWorkflow()
})
expect(mockSetShowEnvPanel).not.toHaveBeenCalled()
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
})
it('should configure schedule trigger runs and execute the workflow with schedule options', async () => {
mockGetNodes.mockReturnValue([
{ id: 'schedule-1', data: { type: BlockEnum.TriggerSchedule } },
])
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowTriggerScheduleRunInWorkflow('schedule-1')
})
expect(mockSetShowEnvPanel).toHaveBeenCalledWith(false)
expect(mockSetShowGlobalVariablePanel).toHaveBeenCalledWith(false)
expect(mockSetListeningTriggerType).toHaveBeenCalledWith(BlockEnum.TriggerSchedule)
expect(mockSetListeningTriggerNodeId).toHaveBeenCalledWith('schedule-1')
expect(mockSetListeningTriggerNodeIds).toHaveBeenCalledWith(['schedule-1'])
expect(mockSetListeningTriggerIsAll).toHaveBeenCalledWith(false)
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
expect(mockHandleRun).toHaveBeenCalledWith(
{},
undefined,
{
mode: TriggerType.Schedule,
scheduleNodeId: 'schedule-1',
},
)
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(false)
})
it('should cancel schedule trigger execution when the debug panel is already open', async () => {
workflowStoreState = createWorkflowStoreState({
showDebugAndPreviewPanel: true,
})
mockGetNodes.mockReturnValue([
{ id: 'schedule-1', data: { type: BlockEnum.TriggerSchedule } },
])
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowTriggerScheduleRunInWorkflow('schedule-1')
})
expect(mockHandleCancelDebugAndPreviewPanel).toHaveBeenCalled()
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
})
it.each([
{
title: 'schedule',
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerScheduleRunInWorkflow(undefined),
},
{
title: 'webhook',
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerWebhookRunInWorkflow({ nodeId: '' }),
},
{
title: 'plugin',
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerPluginRunInWorkflow(''),
},
])('should ignore $title trigger execution when the node id is empty', async ({ invoke }) => {
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await invoke(result.current)
})
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
})
it.each([
{
title: 'schedule',
warnMessage: 'handleWorkflowTriggerScheduleRunInWorkflow: schedule node not found',
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerScheduleRunInWorkflow('schedule-missing'),
},
{
title: 'webhook',
warnMessage: 'handleWorkflowTriggerWebhookRunInWorkflow: webhook node not found',
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerWebhookRunInWorkflow({ nodeId: 'webhook-missing' }),
},
{
title: 'plugin',
warnMessage: 'handleWorkflowTriggerPluginRunInWorkflow: plugin node not found',
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerPluginRunInWorkflow('plugin-missing'),
},
])('should warn when the $title trigger node cannot be found', async ({ warnMessage, invoke }) => {
const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {})
mockGetNodes.mockReturnValue([{ id: 'other-node', data: { type: BlockEnum.Start } }])
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await invoke(result.current)
})
expect(consoleWarnSpy).toHaveBeenCalledWith(warnMessage, expect.stringContaining('missing'))
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
consoleWarnSpy.mockRestore()
})
it.each([
{
title: 'webhook',
nodeId: 'webhook-1',
nodeType: BlockEnum.TriggerWebhook,
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerWebhookRunInWorkflow({ nodeId: 'webhook-1' }),
expectedParams: { node_id: 'webhook-1' },
expectedOptions: { mode: TriggerType.Webhook, webhookNodeId: 'webhook-1' },
},
{
title: 'plugin',
nodeId: 'plugin-1',
nodeType: BlockEnum.TriggerPlugin,
invoke: (hook: ReturnType<typeof useWorkflowStartRun>) => hook.handleWorkflowTriggerPluginRunInWorkflow('plugin-1'),
expectedParams: { node_id: 'plugin-1' },
expectedOptions: { mode: TriggerType.Plugin, pluginNodeId: 'plugin-1' },
},
])('should configure $title trigger runs with node-specific options', async ({ nodeId, nodeType, invoke, expectedParams, expectedOptions }) => {
mockGetNodes.mockReturnValue([
{ id: nodeId, data: { type: nodeType } },
])
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await invoke(result.current)
})
expect(mockSetShowEnvPanel).toHaveBeenCalledWith(false)
expect(mockSetShowGlobalVariablePanel).toHaveBeenCalledWith(false)
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(false)
expect(mockSetListeningTriggerType).toHaveBeenCalledWith(nodeType)
expect(mockSetListeningTriggerNodeId).toHaveBeenCalledWith(nodeId)
expect(mockSetListeningTriggerNodeIds).toHaveBeenCalledWith([nodeId])
expect(mockSetListeningTriggerIsAll).toHaveBeenCalledWith(false)
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
expect(mockHandleRun).toHaveBeenCalledWith(expectedParams, undefined, expectedOptions)
})
it('should run all triggers and mark the listener state as global', async () => {
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowRunAllTriggersInWorkflow(['trigger-1', 'trigger-2'])
})
expect(mockSetShowEnvPanel).toHaveBeenCalledWith(false)
expect(mockSetShowGlobalVariablePanel).toHaveBeenCalledWith(false)
expect(mockSetShowInputsPanel).toHaveBeenCalledWith(false)
expect(mockSetListeningTriggerIsAll).toHaveBeenCalledWith(true)
expect(mockSetListeningTriggerNodeIds).toHaveBeenCalledWith(['trigger-1', 'trigger-2'])
expect(mockSetListeningTriggerNodeId).toHaveBeenCalledWith(null)
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
expect(mockHandleRun).toHaveBeenCalledWith(
{ node_ids: ['trigger-1', 'trigger-2'] },
undefined,
{
mode: TriggerType.All,
allNodeIds: ['trigger-1', 'trigger-2'],
},
)
})
it('should ignore run-all requests when there are no trigger nodes', async () => {
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
await result.current.handleWorkflowRunAllTriggersInWorkflow([])
})
expect(mockSetListeningTriggerIsAll).not.toHaveBeenCalled()
expect(mockDoSyncWorkflowDraft).not.toHaveBeenCalled()
expect(mockHandleRun).not.toHaveBeenCalled()
})
it('should route handleStartWorkflowRun to the chatflow path when chat mode is enabled', async () => {
mockUseIsChatMode.mockReturnValue(true)
const { result } = renderHook(() => useWorkflowStartRun())
await act(async () => {
result.current.handleStartWorkflowRun()
})
expect(mockSetShowEnvPanel).toHaveBeenCalledWith(false)
expect(mockSetShowChatVariablePanel).toHaveBeenCalledWith(false)
expect(mockSetShowGlobalVariablePanel).toHaveBeenCalledWith(false)
expect(mockSetShowDebugAndPreviewPanel).toHaveBeenCalledWith(true)
expect(mockSetHistoryWorkflowData).toHaveBeenCalledWith(undefined)
expect(mockHandleRun).not.toHaveBeenCalled()
})
})

View File

@@ -1,82 +0,0 @@
import { renderHook } from '@testing-library/react'
import { useWorkflowTemplate } from '../use-workflow-template'
const mockUseIsChatMode = vi.fn()
let generateNewNodeCalls: Array<Record<string, unknown>> = []
vi.mock('@/app/components/workflow-app/hooks/use-is-chat-mode', () => ({
useIsChatMode: () => mockUseIsChatMode(),
}))
vi.mock('@/app/components/workflow/utils', async (importOriginal) => {
const actual = await importOriginal<typeof import('@/app/components/workflow/utils')>()
return {
...actual,
generateNewNode: (args: { id?: string, data: Record<string, unknown>, position: Record<string, unknown> }) => {
generateNewNodeCalls.push(args)
return {
newNode: {
id: args.id ?? `generated-${generateNewNodeCalls.length}`,
data: args.data,
position: args.position,
},
}
},
}
})
describe('useWorkflowTemplate', () => {
beforeEach(() => {
vi.clearAllMocks()
generateNewNodeCalls = []
})
it('should return only the start node template in workflow mode', () => {
mockUseIsChatMode.mockReturnValue(false)
const { result } = renderHook(() => useWorkflowTemplate())
expect(result.current.nodes).toHaveLength(1)
expect(result.current.edges).toEqual([])
expect(generateNewNodeCalls).toHaveLength(1)
})
it('should build start, llm, and answer templates with linked edges in chat mode', () => {
mockUseIsChatMode.mockReturnValue(true)
const { result } = renderHook(() => useWorkflowTemplate())
expect(result.current.nodes).toHaveLength(3)
expect(result.current.nodes.map(node => node.id)).toEqual(['generated-1', 'llm', 'answer'])
expect(result.current.edges).toEqual([
{
id: 'generated-1-llm',
source: 'generated-1',
sourceHandle: 'source',
target: 'llm',
targetHandle: 'target',
},
{
id: 'llm-answer',
source: 'llm',
sourceHandle: 'source',
target: 'answer',
targetHandle: 'target',
},
])
expect(generateNewNodeCalls).toHaveLength(3)
expect(generateNewNodeCalls[0].data).toMatchObject({
type: 'start',
title: 'workflow.blocks.start',
})
expect(generateNewNodeCalls[1].data).toMatchObject({
type: 'llm',
title: 'workflow.blocks.llm',
})
expect(generateNewNodeCalls[2].data).toMatchObject({
type: 'answer',
title: 'workflow.blocks.answer',
answer: '{{#llm.text#}}',
})
})
})

View File

@@ -1,470 +0,0 @@
import type AudioPlayer from '@/app/components/base/audio-btn/audio'
import type { IOtherOptions } from '@/service/base'
import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager'
import { sseGet } from '@/service/base'
type ContainerSize = {
clientWidth: number
clientHeight: number
}
type WorkflowRunEventHandlers = {
handleWorkflowStarted: NonNullable<IOtherOptions['onWorkflowStarted']>
handleWorkflowFinished: NonNullable<IOtherOptions['onWorkflowFinished']>
handleWorkflowFailed: () => void
handleWorkflowNodeStarted: (params: Parameters<NonNullable<IOtherOptions['onNodeStarted']>>[0], containerParams: ContainerSize) => void
handleWorkflowNodeFinished: NonNullable<IOtherOptions['onNodeFinished']>
handleWorkflowNodeHumanInputRequired: NonNullable<IOtherOptions['onHumanInputRequired']>
handleWorkflowNodeHumanInputFormFilled: NonNullable<IOtherOptions['onHumanInputFormFilled']>
handleWorkflowNodeHumanInputFormTimeout: NonNullable<IOtherOptions['onHumanInputFormTimeout']>
handleWorkflowNodeIterationStarted: (params: Parameters<NonNullable<IOtherOptions['onIterationStart']>>[0], containerParams: ContainerSize) => void
handleWorkflowNodeIterationNext: NonNullable<IOtherOptions['onIterationNext']>
handleWorkflowNodeIterationFinished: NonNullable<IOtherOptions['onIterationFinish']>
handleWorkflowNodeLoopStarted: (params: Parameters<NonNullable<IOtherOptions['onLoopStart']>>[0], containerParams: ContainerSize) => void
handleWorkflowNodeLoopNext: NonNullable<IOtherOptions['onLoopNext']>
handleWorkflowNodeLoopFinished: NonNullable<IOtherOptions['onLoopFinish']>
handleWorkflowNodeRetry: NonNullable<IOtherOptions['onNodeRetry']>
handleWorkflowAgentLog: NonNullable<IOtherOptions['onAgentLog']>
handleWorkflowTextChunk: NonNullable<IOtherOptions['onTextChunk']>
handleWorkflowTextReplace: NonNullable<IOtherOptions['onTextReplace']>
handleWorkflowPaused: () => void
}
type UserCallbackHandlers = {
onWorkflowStarted?: IOtherOptions['onWorkflowStarted']
onWorkflowFinished?: IOtherOptions['onWorkflowFinished']
onNodeStarted?: IOtherOptions['onNodeStarted']
onNodeFinished?: IOtherOptions['onNodeFinished']
onIterationStart?: IOtherOptions['onIterationStart']
onIterationNext?: IOtherOptions['onIterationNext']
onIterationFinish?: IOtherOptions['onIterationFinish']
onLoopStart?: IOtherOptions['onLoopStart']
onLoopNext?: IOtherOptions['onLoopNext']
onLoopFinish?: IOtherOptions['onLoopFinish']
onNodeRetry?: IOtherOptions['onNodeRetry']
onAgentLog?: IOtherOptions['onAgentLog']
onError?: IOtherOptions['onError']
onWorkflowPaused?: IOtherOptions['onWorkflowPaused']
onHumanInputRequired?: IOtherOptions['onHumanInputRequired']
onHumanInputFormFilled?: IOtherOptions['onHumanInputFormFilled']
onHumanInputFormTimeout?: IOtherOptions['onHumanInputFormTimeout']
onCompleted?: IOtherOptions['onCompleted']
}
type CallbackContext = {
clientWidth: number
clientHeight: number
runHistoryUrl: string
isInWorkflowDebug: boolean
fetchInspectVars: (params: Record<string, never>) => void
invalidAllLastRun: () => void
invalidateRunHistory: (url: string) => void
clearAbortController: () => void
clearListeningState: () => void
trackWorkflowRunFailed: (params: unknown) => void
handlers: WorkflowRunEventHandlers
callbacks: UserCallbackHandlers
restCallback: IOtherOptions
}
type BaseCallbacksContext = CallbackContext & {
getOrCreatePlayer: () => AudioPlayer | null
}
type FinalCallbacksContext = CallbackContext & {
baseSseOptions: IOtherOptions
player: AudioPlayer | null
setAbortController: (controller: AbortController) => void
}
export const createBaseWorkflowRunCallbacks = ({
clientWidth,
clientHeight,
runHistoryUrl,
isInWorkflowDebug,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController,
clearListeningState,
trackWorkflowRunFailed,
handlers,
callbacks,
restCallback,
getOrCreatePlayer,
}: BaseCallbacksContext): IOtherOptions => {
const {
handleWorkflowStarted,
handleWorkflowFinished,
handleWorkflowFailed,
handleWorkflowNodeStarted,
handleWorkflowNodeFinished,
handleWorkflowNodeHumanInputRequired,
handleWorkflowNodeHumanInputFormFilled,
handleWorkflowNodeHumanInputFormTimeout,
handleWorkflowNodeIterationStarted,
handleWorkflowNodeIterationNext,
handleWorkflowNodeIterationFinished,
handleWorkflowNodeLoopStarted,
handleWorkflowNodeLoopNext,
handleWorkflowNodeLoopFinished,
handleWorkflowNodeRetry,
handleWorkflowAgentLog,
handleWorkflowTextChunk,
handleWorkflowTextReplace,
handleWorkflowPaused,
} = handlers
const {
onWorkflowStarted,
onWorkflowFinished,
onNodeStarted,
onNodeFinished,
onIterationStart,
onIterationNext,
onIterationFinish,
onLoopStart,
onLoopNext,
onLoopFinish,
onNodeRetry,
onAgentLog,
onError,
onWorkflowPaused,
onHumanInputRequired,
onHumanInputFormFilled,
onHumanInputFormTimeout,
onCompleted,
} = callbacks
const wrappedOnError: IOtherOptions['onError'] = (params, code) => {
clearAbortController()
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
clearListeningState()
if (onError)
onError(params, code)
trackWorkflowRunFailed(params)
}
const wrappedOnCompleted: IOtherOptions['onCompleted'] = async (hasError, errorMessage) => {
clearAbortController()
clearListeningState()
if (onCompleted)
onCompleted(hasError, errorMessage)
}
const baseSseOptions: IOtherOptions = {
...restCallback,
onWorkflowStarted: (params) => {
handleWorkflowStarted(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowStarted)
onWorkflowStarted(params)
},
onWorkflowFinished: (params) => {
clearListeningState()
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowFinished)
onWorkflowFinished(params)
if (isInWorkflowDebug) {
fetchInspectVars({})
invalidAllLastRun()
}
},
onNodeStarted: (params) => {
handleWorkflowNodeStarted(params, { clientWidth, clientHeight })
if (onNodeStarted)
onNodeStarted(params)
},
onNodeFinished: (params) => {
handleWorkflowNodeFinished(params)
if (onNodeFinished)
onNodeFinished(params)
},
onIterationStart: (params) => {
handleWorkflowNodeIterationStarted(params, { clientWidth, clientHeight })
if (onIterationStart)
onIterationStart(params)
},
onIterationNext: (params) => {
handleWorkflowNodeIterationNext(params)
if (onIterationNext)
onIterationNext(params)
},
onIterationFinish: (params) => {
handleWorkflowNodeIterationFinished(params)
if (onIterationFinish)
onIterationFinish(params)
},
onLoopStart: (params) => {
handleWorkflowNodeLoopStarted(params, { clientWidth, clientHeight })
if (onLoopStart)
onLoopStart(params)
},
onLoopNext: (params) => {
handleWorkflowNodeLoopNext(params)
if (onLoopNext)
onLoopNext(params)
},
onLoopFinish: (params) => {
handleWorkflowNodeLoopFinished(params)
if (onLoopFinish)
onLoopFinish(params)
},
onNodeRetry: (params) => {
handleWorkflowNodeRetry(params)
if (onNodeRetry)
onNodeRetry(params)
},
onAgentLog: (params) => {
handleWorkflowAgentLog(params)
if (onAgentLog)
onAgentLog(params)
},
onTextChunk: (params) => {
handleWorkflowTextChunk(params)
},
onTextReplace: (params) => {
handleWorkflowTextReplace(params)
},
onTTSChunk: (messageId: string, audio: string) => {
if (!audio || audio === '')
return
const audioPlayer = getOrCreatePlayer()
if (audioPlayer) {
audioPlayer.playAudioWithAudio(audio, true)
AudioPlayerManager.getInstance().resetMsgId(messageId)
}
},
onTTSEnd: (_messageId: string, audio: string) => {
const audioPlayer = getOrCreatePlayer()
if (audioPlayer)
audioPlayer.playAudioWithAudio(audio, false)
},
onWorkflowPaused: (params) => {
handleWorkflowPaused()
invalidateRunHistory(runHistoryUrl)
if (onWorkflowPaused)
onWorkflowPaused(params)
const url = `/workflow/${params.workflow_run_id}/events`
sseGet(url, {}, baseSseOptions)
},
onHumanInputRequired: (params) => {
handleWorkflowNodeHumanInputRequired(params)
if (onHumanInputRequired)
onHumanInputRequired(params)
},
onHumanInputFormFilled: (params) => {
handleWorkflowNodeHumanInputFormFilled(params)
if (onHumanInputFormFilled)
onHumanInputFormFilled(params)
},
onHumanInputFormTimeout: (params) => {
handleWorkflowNodeHumanInputFormTimeout(params)
if (onHumanInputFormTimeout)
onHumanInputFormTimeout(params)
},
onError: wrappedOnError,
onCompleted: wrappedOnCompleted,
}
return baseSseOptions
}
export const createFinalWorkflowRunCallbacks = ({
clientWidth,
clientHeight,
runHistoryUrl,
isInWorkflowDebug,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController: _clearAbortController,
clearListeningState: _clearListeningState,
trackWorkflowRunFailed: _trackWorkflowRunFailed,
handlers,
callbacks,
restCallback,
baseSseOptions,
player,
setAbortController,
}: FinalCallbacksContext): IOtherOptions => {
const {
handleWorkflowFinished,
handleWorkflowFailed,
handleWorkflowNodeStarted,
handleWorkflowNodeFinished,
handleWorkflowNodeHumanInputRequired,
handleWorkflowNodeHumanInputFormFilled,
handleWorkflowNodeHumanInputFormTimeout,
handleWorkflowNodeIterationStarted,
handleWorkflowNodeIterationNext,
handleWorkflowNodeIterationFinished,
handleWorkflowNodeLoopStarted,
handleWorkflowNodeLoopNext,
handleWorkflowNodeLoopFinished,
handleWorkflowNodeRetry,
handleWorkflowAgentLog,
handleWorkflowTextChunk,
handleWorkflowTextReplace,
handleWorkflowPaused,
} = handlers
const {
onWorkflowFinished,
onNodeStarted,
onNodeFinished,
onIterationStart,
onIterationNext,
onIterationFinish,
onLoopStart,
onLoopNext,
onLoopFinish,
onNodeRetry,
onAgentLog,
onError,
onWorkflowPaused,
onHumanInputRequired,
onHumanInputFormFilled,
onHumanInputFormTimeout,
} = callbacks
const finalCallbacks: IOtherOptions = {
...baseSseOptions,
getAbortController: (controller: AbortController) => {
setAbortController(controller)
},
onWorkflowFinished: (params) => {
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowFinished)
onWorkflowFinished(params)
if (isInWorkflowDebug) {
fetchInspectVars({})
invalidAllLastRun()
}
},
onError: (params, code) => {
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
if (onError)
onError(params, code)
},
onNodeStarted: (params) => {
handleWorkflowNodeStarted(params, { clientWidth, clientHeight })
if (onNodeStarted)
onNodeStarted(params)
},
onNodeFinished: (params) => {
handleWorkflowNodeFinished(params)
if (onNodeFinished)
onNodeFinished(params)
},
onIterationStart: (params) => {
handleWorkflowNodeIterationStarted(params, { clientWidth, clientHeight })
if (onIterationStart)
onIterationStart(params)
},
onIterationNext: (params) => {
handleWorkflowNodeIterationNext(params)
if (onIterationNext)
onIterationNext(params)
},
onIterationFinish: (params) => {
handleWorkflowNodeIterationFinished(params)
if (onIterationFinish)
onIterationFinish(params)
},
onLoopStart: (params) => {
handleWorkflowNodeLoopStarted(params, { clientWidth, clientHeight })
if (onLoopStart)
onLoopStart(params)
},
onLoopNext: (params) => {
handleWorkflowNodeLoopNext(params)
if (onLoopNext)
onLoopNext(params)
},
onLoopFinish: (params) => {
handleWorkflowNodeLoopFinished(params)
if (onLoopFinish)
onLoopFinish(params)
},
onNodeRetry: (params) => {
handleWorkflowNodeRetry(params)
if (onNodeRetry)
onNodeRetry(params)
},
onAgentLog: (params) => {
handleWorkflowAgentLog(params)
if (onAgentLog)
onAgentLog(params)
},
onTextChunk: (params) => {
handleWorkflowTextChunk(params)
},
onTextReplace: (params) => {
handleWorkflowTextReplace(params)
},
onTTSChunk: (messageId: string, audio: string) => {
if (!audio || audio === '')
return
player?.playAudioWithAudio(audio, true)
AudioPlayerManager.getInstance().resetMsgId(messageId)
},
onTTSEnd: (_messageId: string, audio: string) => {
player?.playAudioWithAudio(audio, false)
},
onWorkflowPaused: (params) => {
handleWorkflowPaused()
invalidateRunHistory(runHistoryUrl)
if (onWorkflowPaused)
onWorkflowPaused(params)
const url = `/workflow/${params.workflow_run_id}/events`
sseGet(url, {}, finalCallbacks)
},
onHumanInputRequired: (params) => {
handleWorkflowNodeHumanInputRequired(params)
if (onHumanInputRequired)
onHumanInputRequired(params)
},
onHumanInputFormFilled: (params) => {
handleWorkflowNodeHumanInputFormFilled(params)
if (onHumanInputFormFilled)
onHumanInputFormFilled(params)
},
onHumanInputFormTimeout: (params) => {
handleWorkflowNodeHumanInputFormTimeout(params)
if (onHumanInputFormTimeout)
onHumanInputFormTimeout(params)
},
...restCallback,
}
return finalCallbacks
}

View File

@@ -1,443 +0,0 @@
import type { Features as FeaturesData } from '@/app/components/base/features/types'
import type { TriggerNodeType } from '@/app/components/workflow/types'
import type { IOtherOptions } from '@/service/base'
import type { VersionHistory } from '@/types/workflow'
import { noop } from 'es-toolkit/function'
import { toast } from '@/app/components/base/ui/toast'
import { TriggerType } from '@/app/components/workflow/header/test-run-menu'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import { handleStream, post } from '@/service/base'
import { ContentType } from '@/service/fetch'
import { AppModeEnum } from '@/types/app'
export type HandleRunMode = TriggerType
export type HandleRunOptions = {
mode?: HandleRunMode
scheduleNodeId?: string
webhookNodeId?: string
pluginNodeId?: string
allNodeIds?: string[]
}
export type DebuggableTriggerType = Exclude<TriggerType, TriggerType.UserInput>
type AppDetailLike = {
id?: string
mode?: AppModeEnum
}
type TTSParamsLike = {
token?: string
appId?: string
}
type ListeningStateActions = {
setWorkflowRunningData: (data: ReturnType<typeof createRunningWorkflowState> | ReturnType<typeof createFailedWorkflowState> | ReturnType<typeof createStoppedWorkflowState>) => void
setIsListening: (value: boolean) => void
setShowVariableInspectPanel: (value: boolean) => void
setListeningTriggerType: (value: TriggerNodeType | null) => void
setListeningTriggerNodeIds: (value: string[]) => void
setListeningTriggerIsAll: (value: boolean) => void
setListeningTriggerNodeId: (value: string | null) => void
}
type TriggerDebugRunnerOptions = {
debugType: DebuggableTriggerType
url: string
requestBody: unknown
baseSseOptions: IOtherOptions
controllerTarget: Record<string, unknown>
setAbortController: (controller: AbortController | null) => void
clearAbortController: () => void
clearListeningState: () => void
setWorkflowRunningData: ListeningStateActions['setWorkflowRunningData']
}
export const controllerKeyMap: Record<DebuggableTriggerType, string> = {
[TriggerType.Webhook]: '__webhookDebugAbortController',
[TriggerType.Plugin]: '__pluginDebugAbortController',
[TriggerType.All]: '__allTriggersDebugAbortController',
[TriggerType.Schedule]: '__scheduleDebugAbortController',
}
export const debugLabelMap: Record<DebuggableTriggerType, string> = {
[TriggerType.Webhook]: 'Webhook',
[TriggerType.Plugin]: 'Plugin',
[TriggerType.All]: 'All',
[TriggerType.Schedule]: 'Schedule',
}
export const createRunningWorkflowState = () => {
return {
result: {
status: WorkflowRunningStatus.Running,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
resultText: '',
}
}
export const createStoppedWorkflowState = () => {
return {
result: {
status: WorkflowRunningStatus.Stopped,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
resultText: '',
}
}
export const createFailedWorkflowState = (error: string) => {
return {
result: {
status: WorkflowRunningStatus.Failed,
error,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
}
}
export const buildRunHistoryUrl = (appDetail?: AppDetailLike) => {
return appDetail?.mode === AppModeEnum.ADVANCED_CHAT
? `/apps/${appDetail.id}/advanced-chat/workflow-runs`
: `/apps/${appDetail?.id}/workflow-runs`
}
export const resolveWorkflowRunUrl = (
appDetail: AppDetailLike | undefined,
runMode: HandleRunMode,
isInWorkflowDebug: boolean,
) => {
if (runMode === TriggerType.Plugin || runMode === TriggerType.Webhook || runMode === TriggerType.Schedule) {
if (!appDetail?.id) {
console.error('handleRun: missing app id for trigger plugin run')
return ''
}
return `/apps/${appDetail.id}/workflows/draft/trigger/run`
}
if (runMode === TriggerType.All) {
if (!appDetail?.id) {
console.error('handleRun: missing app id for trigger run all')
return ''
}
return `/apps/${appDetail.id}/workflows/draft/trigger/run-all`
}
if (appDetail?.mode === AppModeEnum.ADVANCED_CHAT)
return `/apps/${appDetail.id}/advanced-chat/workflows/draft/run`
if (isInWorkflowDebug && appDetail?.id)
return `/apps/${appDetail.id}/workflows/draft/run`
return ''
}
export const buildWorkflowRunRequestBody = (
runMode: HandleRunMode,
resolvedParams: Record<string, unknown>,
options?: HandleRunOptions,
) => {
if (runMode === TriggerType.Schedule)
return { node_id: options?.scheduleNodeId }
if (runMode === TriggerType.Webhook)
return { node_id: options?.webhookNodeId }
if (runMode === TriggerType.Plugin)
return { node_id: options?.pluginNodeId }
if (runMode === TriggerType.All)
return { node_ids: options?.allNodeIds }
return resolvedParams
}
export const validateWorkflowRunRequest = (
runMode: HandleRunMode,
options?: HandleRunOptions,
) => {
if (runMode === TriggerType.Schedule && !options?.scheduleNodeId)
return 'handleRun: schedule trigger run requires node id'
if (runMode === TriggerType.Webhook && !options?.webhookNodeId)
return 'handleRun: webhook trigger run requires node id'
if (runMode === TriggerType.Plugin && !options?.pluginNodeId)
return 'handleRun: plugin trigger run requires node id'
if (runMode === TriggerType.All && !options?.allNodeIds && options?.allNodeIds?.length === 0)
return 'handleRun: all trigger run requires node ids'
return ''
}
export const isDebuggableTriggerType = (
runMode: HandleRunMode,
): runMode is DebuggableTriggerType => {
return (
runMode === TriggerType.Schedule
|| runMode === TriggerType.Webhook
|| runMode === TriggerType.Plugin
|| runMode === TriggerType.All
)
}
export const buildListeningTriggerNodeIds = (
runMode: DebuggableTriggerType,
options?: HandleRunOptions,
) => {
if (runMode === TriggerType.All)
return options?.allNodeIds ?? []
if (runMode === TriggerType.Webhook && options?.webhookNodeId)
return [options.webhookNodeId]
if (runMode === TriggerType.Schedule && options?.scheduleNodeId)
return [options.scheduleNodeId]
if (runMode === TriggerType.Plugin && options?.pluginNodeId)
return [options.pluginNodeId]
return []
}
export const applyRunningStateForMode = (
actions: ListeningStateActions,
runMode: HandleRunMode,
options?: HandleRunOptions,
) => {
if (isDebuggableTriggerType(runMode)) {
actions.setIsListening(true)
actions.setShowVariableInspectPanel(true)
actions.setListeningTriggerIsAll(runMode === TriggerType.All)
actions.setListeningTriggerNodeIds(buildListeningTriggerNodeIds(runMode, options))
actions.setWorkflowRunningData(createRunningWorkflowState())
return
}
actions.setIsListening(false)
actions.setListeningTriggerType(null)
actions.setListeningTriggerNodeId(null)
actions.setListeningTriggerNodeIds([])
actions.setListeningTriggerIsAll(false)
actions.setWorkflowRunningData(createRunningWorkflowState())
}
export const clearListeningState = (actions: Pick<ListeningStateActions, 'setIsListening' | 'setListeningTriggerType' | 'setListeningTriggerNodeId' | 'setListeningTriggerNodeIds' | 'setListeningTriggerIsAll'>) => {
actions.setIsListening(false)
actions.setListeningTriggerType(null)
actions.setListeningTriggerNodeId(null)
actions.setListeningTriggerNodeIds([])
actions.setListeningTriggerIsAll(false)
}
export const applyStoppedState = (actions: Pick<ListeningStateActions, 'setWorkflowRunningData' | 'setIsListening' | 'setShowVariableInspectPanel' | 'setListeningTriggerType' | 'setListeningTriggerNodeId'>) => {
actions.setWorkflowRunningData(createStoppedWorkflowState())
actions.setIsListening(false)
actions.setListeningTriggerType(null)
actions.setListeningTriggerNodeId(null)
actions.setShowVariableInspectPanel(true)
}
export const clearWindowDebugControllers = (controllerTarget: Record<string, unknown>) => {
delete controllerTarget.__webhookDebugAbortController
delete controllerTarget.__pluginDebugAbortController
delete controllerTarget.__scheduleDebugAbortController
delete controllerTarget.__allTriggersDebugAbortController
}
export const buildTTSConfig = (resolvedParams: TTSParamsLike, pathname: string) => {
let ttsUrl = ''
let ttsIsPublic = false
if (resolvedParams.token) {
ttsUrl = '/text-to-audio'
ttsIsPublic = true
}
else if (resolvedParams.appId) {
if (pathname.search('explore/installed') > -1)
ttsUrl = `/installed-apps/${resolvedParams.appId}/text-to-audio`
else
ttsUrl = `/apps/${resolvedParams.appId}/text-to-audio`
}
return {
ttsUrl,
ttsIsPublic,
}
}
export const mapPublishedWorkflowFeatures = (publishedWorkflow: VersionHistory): FeaturesData => {
return {
opening: {
enabled: !!publishedWorkflow.features.opening_statement || !!publishedWorkflow.features.suggested_questions.length,
opening_statement: publishedWorkflow.features.opening_statement,
suggested_questions: publishedWorkflow.features.suggested_questions,
},
suggested: publishedWorkflow.features.suggested_questions_after_answer,
text2speech: publishedWorkflow.features.text_to_speech,
speech2text: publishedWorkflow.features.speech_to_text,
citation: publishedWorkflow.features.retriever_resource,
moderation: publishedWorkflow.features.sensitive_word_avoidance,
file: publishedWorkflow.features.file_upload,
}
}
export const normalizePublishedWorkflowNodes = (publishedWorkflow: VersionHistory) => {
return publishedWorkflow.graph.nodes.map(node => ({
...node,
selected: false,
data: {
...node.data,
selected: false,
},
}))
}
export const waitWithAbort = (signal: AbortSignal, delay: number) => new Promise<void>((resolve) => {
const timer = window.setTimeout(resolve, delay)
signal.addEventListener('abort', () => {
clearTimeout(timer)
resolve()
}, { once: true })
})
export const runTriggerDebug = async ({
debugType,
url,
requestBody,
baseSseOptions,
controllerTarget,
setAbortController,
clearAbortController,
clearListeningState,
setWorkflowRunningData,
}: TriggerDebugRunnerOptions) => {
const controller = new AbortController()
setAbortController(controller)
const controllerKey = controllerKeyMap[debugType]
controllerTarget[controllerKey] = controller
const debugLabel = debugLabelMap[debugType]
const poll = async (): Promise<void> => {
try {
const response = await post<Response>(url, {
body: requestBody,
signal: controller.signal,
}, {
needAllResponseContent: true,
})
if (controller.signal.aborted)
return
if (!response) {
const message = `${debugLabel} debug request failed`
toast.error(message)
clearAbortController()
return
}
const contentType = response.headers.get('content-type') || ''
if (contentType.includes(ContentType.json)) {
let data: Record<string, unknown> | null = null
try {
data = await response.json() as Record<string, unknown>
}
catch (jsonError) {
console.error(`handleRun: ${debugLabel.toLowerCase()} debug response parse error`, jsonError)
toast.error(`${debugLabel} debug request failed`)
clearAbortController()
clearListeningState()
return
}
if (controller.signal.aborted)
return
if (data?.status === 'waiting') {
const delay = Number(data.retry_in) || 2000
await waitWithAbort(controller.signal, delay)
if (controller.signal.aborted)
return
await poll()
return
}
const errorMessage = typeof data?.message === 'string' ? data.message : `${debugLabel} debug failed`
toast.error(errorMessage)
clearAbortController()
setWorkflowRunningData(createFailedWorkflowState(errorMessage))
clearListeningState()
return
}
clearListeningState()
handleStream(
response,
baseSseOptions.onData ?? noop,
baseSseOptions.onCompleted,
baseSseOptions.onThought,
baseSseOptions.onMessageEnd,
baseSseOptions.onMessageReplace,
baseSseOptions.onFile,
baseSseOptions.onWorkflowStarted,
baseSseOptions.onWorkflowFinished,
baseSseOptions.onNodeStarted,
baseSseOptions.onNodeFinished,
baseSseOptions.onIterationStart,
baseSseOptions.onIterationNext,
baseSseOptions.onIterationFinish,
baseSseOptions.onLoopStart,
baseSseOptions.onLoopNext,
baseSseOptions.onLoopFinish,
baseSseOptions.onNodeRetry,
baseSseOptions.onParallelBranchStarted,
baseSseOptions.onParallelBranchFinished,
baseSseOptions.onTextChunk,
baseSseOptions.onTTSChunk,
baseSseOptions.onTTSEnd,
baseSseOptions.onTextReplace,
baseSseOptions.onAgentLog,
baseSseOptions.onHumanInputRequired,
baseSseOptions.onHumanInputFormFilled,
baseSseOptions.onHumanInputFormTimeout,
baseSseOptions.onWorkflowPaused,
baseSseOptions.onDataSourceNodeProcessing,
baseSseOptions.onDataSourceNodeCompleted,
baseSseOptions.onDataSourceNodeError,
)
}
catch (error) {
if (controller.signal.aborted)
return
if (error instanceof Response) {
const data = await error.clone().json() as Record<string, unknown>
const errorMessage = typeof data?.error === 'string' ? data.error : ''
toast.error(errorMessage)
clearAbortController()
setWorkflowRunningData(createFailedWorkflowState(errorMessage))
}
clearListeningState()
}
}
await poll()
}

View File

@@ -1,4 +1,3 @@
import type { HandleRunOptions } from './use-workflow-run-utils'
import type AudioPlayer from '@/app/components/base/audio-btn/audio'
import type { Node } from '@/app/components/workflow/types'
import type { IOtherOptions } from '@/service/base'
@@ -15,38 +14,46 @@ import { useStore as useAppStore } from '@/app/components/app/store'
import { trackEvent } from '@/app/components/base/amplitude'
import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager'
import { useFeaturesStore } from '@/app/components/base/features/hooks'
import Toast from '@/app/components/base/toast'
import { TriggerType } from '@/app/components/workflow/header/test-run-menu'
import { useWorkflowUpdate } from '@/app/components/workflow/hooks/use-workflow-interactions'
import { useWorkflowRunEvent } from '@/app/components/workflow/hooks/use-workflow-run-event/use-workflow-run-event'
import { useWorkflowStore } from '@/app/components/workflow/store'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import { usePathname } from '@/next/navigation'
import { ssePost } from '@/service/base'
import { handleStream, post, sseGet, ssePost } from '@/service/base'
import { ContentType } from '@/service/fetch'
import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow'
import { stopWorkflowRun } from '@/service/workflow'
import { AppModeEnum } from '@/types/app'
import { useSetWorkflowVarsWithValue } from '../../workflow/hooks/use-fetch-workflow-inspect-vars'
import { useConfigsMap } from './use-configs-map'
import { useNodesSyncDraft } from './use-nodes-sync-draft'
import {
createBaseWorkflowRunCallbacks,
createFinalWorkflowRunCallbacks,
} from './use-workflow-run-callbacks'
import {
applyRunningStateForMode,
applyStoppedState,
buildRunHistoryUrl,
buildTTSConfig,
buildWorkflowRunRequestBody,
clearListeningState,
clearWindowDebugControllers,
isDebuggableTriggerType,
mapPublishedWorkflowFeatures,
normalizePublishedWorkflowNodes,
resolveWorkflowRunUrl,
runTriggerDebug,
validateWorkflowRunRequest,
} from './use-workflow-run-utils'
type HandleRunMode = TriggerType
type HandleRunOptions = {
mode?: HandleRunMode
scheduleNodeId?: string
webhookNodeId?: string
pluginNodeId?: string
allNodeIds?: string[]
}
type DebuggableTriggerType = Exclude<TriggerType, TriggerType.UserInput>
const controllerKeyMap: Record<DebuggableTriggerType, string> = {
[TriggerType.Webhook]: '__webhookDebugAbortController',
[TriggerType.Plugin]: '__pluginDebugAbortController',
[TriggerType.All]: '__allTriggersDebugAbortController',
[TriggerType.Schedule]: '__scheduleDebugAbortController',
}
const debugLabelMap: Record<DebuggableTriggerType, string> = {
[TriggerType.Webhook]: 'Webhook',
[TriggerType.Plugin]: 'Plugin',
[TriggerType.All]: 'All',
[TriggerType.Schedule]: 'Schedule',
}
export const useWorkflowRun = () => {
const store = useStoreApi()
@@ -145,7 +152,7 @@ export const useWorkflowRun = () => {
callback?: IOtherOptions,
options?: HandleRunOptions,
) => {
const runMode = options?.mode ?? TriggerType.UserInput
const runMode: HandleRunMode = options?.mode ?? TriggerType.UserInput
const resolvedParams = params ?? {}
const {
getNodes,
@@ -183,7 +190,9 @@ export const useWorkflowRun = () => {
} = callback || {}
workflowStore.setState({ historyWorkflowData: undefined })
const appDetail = useAppStore.getState().appDetail
const runHistoryUrl = buildRunHistoryUrl(appDetail)
const runHistoryUrl = appDetail?.mode === AppModeEnum.ADVANCED_CHAT
? `/apps/${appDetail.id}/advanced-chat/workflow-runs`
: `/apps/${appDetail?.id}/workflow-runs`
const workflowContainer = document.getElementById('workflow-container')
const {
@@ -193,15 +202,65 @@ export const useWorkflowRun = () => {
const isInWorkflowDebug = appDetail?.mode === AppModeEnum.WORKFLOW
const url = resolveWorkflowRunUrl(appDetail, runMode, isInWorkflowDebug)
const requestBody = buildWorkflowRunRequestBody(runMode, resolvedParams, options)
let url = ''
if (runMode === TriggerType.Plugin || runMode === TriggerType.Webhook || runMode === TriggerType.Schedule) {
if (!appDetail?.id) {
console.error('handleRun: missing app id for trigger plugin run')
return
}
url = `/apps/${appDetail.id}/workflows/draft/trigger/run`
}
else if (runMode === TriggerType.All) {
if (!appDetail?.id) {
console.error('handleRun: missing app id for trigger run all')
return
}
url = `/apps/${appDetail.id}/workflows/draft/trigger/run-all`
}
else if (appDetail?.mode === AppModeEnum.ADVANCED_CHAT) {
url = `/apps/${appDetail.id}/advanced-chat/workflows/draft/run`
}
else if (isInWorkflowDebug && appDetail?.id) {
url = `/apps/${appDetail.id}/workflows/draft/run`
}
let requestBody = {}
if (runMode === TriggerType.Schedule)
requestBody = { node_id: options?.scheduleNodeId }
else if (runMode === TriggerType.Webhook)
requestBody = { node_id: options?.webhookNodeId }
else if (runMode === TriggerType.Plugin)
requestBody = { node_id: options?.pluginNodeId }
else if (runMode === TriggerType.All)
requestBody = { node_ids: options?.allNodeIds }
else
requestBody = resolvedParams
if (!url)
return
const validationMessage = validateWorkflowRunRequest(runMode, options)
if (validationMessage) {
console.error(validationMessage)
if (runMode === TriggerType.Schedule && !options?.scheduleNodeId) {
console.error('handleRun: schedule trigger run requires node id')
return
}
if (runMode === TriggerType.Webhook && !options?.webhookNodeId) {
console.error('handleRun: webhook trigger run requires node id')
return
}
if (runMode === TriggerType.Plugin && !options?.pluginNodeId) {
console.error('handleRun: plugin trigger run requires node id')
return
}
if (runMode === TriggerType.All && !options?.allNodeIds && options?.allNodeIds?.length === 0) {
console.error('handleRun: all trigger run requires node ids')
return
}
@@ -218,17 +277,66 @@ export const useWorkflowRun = () => {
setListeningTriggerNodeId,
} = workflowStore.getState()
applyRunningStateForMode({
setWorkflowRunningData,
setIsListening,
setShowVariableInspectPanel,
setListeningTriggerType,
setListeningTriggerNodeIds,
setListeningTriggerIsAll,
setListeningTriggerNodeId,
}, runMode, options)
if (
runMode === TriggerType.Webhook
|| runMode === TriggerType.Plugin
|| runMode === TriggerType.All
|| runMode === TriggerType.Schedule
) {
setIsListening(true)
setShowVariableInspectPanel(true)
setListeningTriggerIsAll(runMode === TriggerType.All)
if (runMode === TriggerType.All)
setListeningTriggerNodeIds(options?.allNodeIds ?? [])
else if (runMode === TriggerType.Webhook && options?.webhookNodeId)
setListeningTriggerNodeIds([options.webhookNodeId])
else if (runMode === TriggerType.Schedule && options?.scheduleNodeId)
setListeningTriggerNodeIds([options.scheduleNodeId])
else if (runMode === TriggerType.Plugin && options?.pluginNodeId)
setListeningTriggerNodeIds([options.pluginNodeId])
else
setListeningTriggerNodeIds([])
setWorkflowRunningData({
result: {
status: WorkflowRunningStatus.Running,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
resultText: '',
})
}
else {
setIsListening(false)
setListeningTriggerType(null)
setListeningTriggerNodeId(null)
setListeningTriggerNodeIds([])
setListeningTriggerIsAll(false)
setWorkflowRunningData({
result: {
status: WorkflowRunningStatus.Running,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
resultText: '',
})
}
const { ttsUrl, ttsIsPublic } = buildTTSConfig(resolvedParams, pathname)
let ttsUrl = ''
let ttsIsPublic = false
if (resolvedParams.token) {
ttsUrl = '/text-to-audio'
ttsIsPublic = true
}
else if (resolvedParams.appId) {
if (pathname.search('explore/installed') > -1)
ttsUrl = `/installed-apps/${resolvedParams.appId}/text-to-audio`
else
ttsUrl = `/apps/${resolvedParams.appId}/text-to-audio`
}
// Lazy initialization: Only create AudioPlayer when TTS is actually needed
// This prevents opening audio channel unnecessarily
let player: AudioPlayer | null = null
@@ -241,121 +349,497 @@ export const useWorkflowRun = () => {
const clearAbortController = () => {
abortControllerRef.current = null
clearWindowDebugControllers(window as unknown as Record<string, unknown>)
delete (window as any).__webhookDebugAbortController
delete (window as any).__pluginDebugAbortController
delete (window as any).__scheduleDebugAbortController
delete (window as any).__allTriggersDebugAbortController
}
const clearListeningStateInStore = () => {
const clearListeningState = () => {
const state = workflowStore.getState()
clearListeningState({
setIsListening: state.setIsListening,
setListeningTriggerType: state.setListeningTriggerType,
setListeningTriggerNodeId: state.setListeningTriggerNodeId,
setListeningTriggerNodeIds: state.setListeningTriggerNodeIds,
setListeningTriggerIsAll: state.setListeningTriggerIsAll,
})
state.setIsListening(false)
state.setListeningTriggerType(null)
state.setListeningTriggerNodeId(null)
state.setListeningTriggerNodeIds([])
state.setListeningTriggerIsAll(false)
}
const workflowRunEventHandlers = {
handleWorkflowStarted,
handleWorkflowFinished,
handleWorkflowFailed,
handleWorkflowNodeStarted,
handleWorkflowNodeFinished,
handleWorkflowNodeHumanInputRequired,
handleWorkflowNodeHumanInputFormFilled,
handleWorkflowNodeHumanInputFormTimeout,
handleWorkflowNodeIterationStarted,
handleWorkflowNodeIterationNext,
handleWorkflowNodeIterationFinished,
handleWorkflowNodeLoopStarted,
handleWorkflowNodeLoopNext,
handleWorkflowNodeLoopFinished,
handleWorkflowNodeRetry,
handleWorkflowAgentLog,
handleWorkflowTextChunk,
handleWorkflowTextReplace,
handleWorkflowPaused,
}
const userCallbacks = {
onWorkflowStarted,
onWorkflowFinished,
onNodeStarted,
onNodeFinished,
onIterationStart,
onIterationNext,
onIterationFinish,
onLoopStart,
onLoopNext,
onLoopFinish,
onNodeRetry,
onAgentLog,
onError,
onWorkflowPaused,
onHumanInputRequired,
onHumanInputFormFilled,
onHumanInputFormTimeout,
onCompleted,
const wrappedOnError = (params: any) => {
clearAbortController()
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
clearListeningState()
if (onError)
onError(params)
trackEvent('workflow_run_failed', { workflow_id: flowId, reason: params.error, node_type: params.node_type })
}
const trackWorkflowRunFailed = (eventParams: unknown) => {
const payload = eventParams as { error?: string, node_type?: string }
trackEvent('workflow_run_failed', { workflow_id: flowId, reason: payload?.error, node_type: payload?.node_type })
const wrappedOnCompleted: IOtherOptions['onCompleted'] = async (hasError?: boolean, errorMessage?: string) => {
clearAbortController()
clearListeningState()
if (onCompleted)
onCompleted(hasError, errorMessage)
}
const baseSseOptions = createBaseWorkflowRunCallbacks({
clientWidth,
clientHeight,
runHistoryUrl,
isInWorkflowDebug,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController,
clearListeningState: clearListeningStateInStore,
trackWorkflowRunFailed,
handlers: workflowRunEventHandlers,
callbacks: userCallbacks,
restCallback,
getOrCreatePlayer,
const baseSseOptions: IOtherOptions = {
...restCallback,
onWorkflowStarted: (params) => {
handleWorkflowStarted(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowStarted)
onWorkflowStarted(params)
},
onWorkflowFinished: (params) => {
clearListeningState()
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowFinished)
onWorkflowFinished(params)
if (isInWorkflowDebug) {
fetchInspectVars({})
invalidAllLastRun()
}
},
onNodeStarted: (params) => {
handleWorkflowNodeStarted(
params,
{
clientWidth,
clientHeight,
},
)
if (onNodeStarted)
onNodeStarted(params)
},
onNodeFinished: (params) => {
handleWorkflowNodeFinished(params)
if (onNodeFinished)
onNodeFinished(params)
},
onIterationStart: (params) => {
handleWorkflowNodeIterationStarted(
params,
{
clientWidth,
clientHeight,
},
)
if (onIterationStart)
onIterationStart(params)
},
onIterationNext: (params) => {
handleWorkflowNodeIterationNext(params)
if (onIterationNext)
onIterationNext(params)
},
onIterationFinish: (params) => {
handleWorkflowNodeIterationFinished(params)
if (onIterationFinish)
onIterationFinish(params)
},
onLoopStart: (params) => {
handleWorkflowNodeLoopStarted(
params,
{
clientWidth,
clientHeight,
},
)
if (onLoopStart)
onLoopStart(params)
},
onLoopNext: (params) => {
handleWorkflowNodeLoopNext(params)
if (onLoopNext)
onLoopNext(params)
},
onLoopFinish: (params) => {
handleWorkflowNodeLoopFinished(params)
if (onLoopFinish)
onLoopFinish(params)
},
onNodeRetry: (params) => {
handleWorkflowNodeRetry(params)
if (onNodeRetry)
onNodeRetry(params)
},
onAgentLog: (params) => {
handleWorkflowAgentLog(params)
if (onAgentLog)
onAgentLog(params)
},
onTextChunk: (params) => {
handleWorkflowTextChunk(params)
},
onTextReplace: (params) => {
handleWorkflowTextReplace(params)
},
onTTSChunk: (messageId: string, audio: string) => {
if (!audio || audio === '')
return
const audioPlayer = getOrCreatePlayer()
if (audioPlayer) {
audioPlayer.playAudioWithAudio(audio, true)
AudioPlayerManager.getInstance().resetMsgId(messageId)
}
},
onTTSEnd: (messageId: string, audio: string) => {
const audioPlayer = getOrCreatePlayer()
if (audioPlayer)
audioPlayer.playAudioWithAudio(audio, false)
},
onWorkflowPaused: (params) => {
handleWorkflowPaused()
invalidateRunHistory(runHistoryUrl)
if (onWorkflowPaused)
onWorkflowPaused(params)
const url = `/workflow/${params.workflow_run_id}/events`
sseGet(
url,
{},
baseSseOptions,
)
},
onHumanInputRequired: (params) => {
handleWorkflowNodeHumanInputRequired(params)
if (onHumanInputRequired)
onHumanInputRequired(params)
},
onHumanInputFormFilled: (params) => {
handleWorkflowNodeHumanInputFormFilled(params)
if (onHumanInputFormFilled)
onHumanInputFormFilled(params)
},
onHumanInputFormTimeout: (params) => {
handleWorkflowNodeHumanInputFormTimeout(params)
if (onHumanInputFormTimeout)
onHumanInputFormTimeout(params)
},
onError: wrappedOnError,
onCompleted: wrappedOnCompleted,
}
const waitWithAbort = (signal: AbortSignal, delay: number) => new Promise<void>((resolve) => {
const timer = window.setTimeout(resolve, delay)
signal.addEventListener('abort', () => {
clearTimeout(timer)
resolve()
}, { once: true })
})
if (isDebuggableTriggerType(runMode)) {
await runTriggerDebug({
debugType: runMode,
url,
requestBody,
baseSseOptions,
controllerTarget: window as unknown as Record<string, unknown>,
setAbortController: (controller) => {
abortControllerRef.current = controller
},
clearAbortController,
clearListeningState: clearListeningStateInStore,
setWorkflowRunningData,
})
const runTriggerDebug = async (debugType: DebuggableTriggerType) => {
const controller = new AbortController()
abortControllerRef.current = controller
const controllerKey = controllerKeyMap[debugType]
; (window as any)[controllerKey] = controller
const debugLabel = debugLabelMap[debugType]
const poll = async (): Promise<void> => {
try {
const response = await post<Response>(url, {
body: requestBody,
signal: controller.signal,
}, {
needAllResponseContent: true,
})
if (controller.signal.aborted)
return
if (!response) {
const message = `${debugLabel} debug request failed`
Toast.notify({ type: 'error', message })
clearAbortController()
return
}
const contentType = response.headers.get('content-type') || ''
if (contentType.includes(ContentType.json)) {
let data: any = null
try {
data = await response.json()
}
catch (jsonError) {
console.error(`handleRun: ${debugLabel.toLowerCase()} debug response parse error`, jsonError)
Toast.notify({ type: 'error', message: `${debugLabel} debug request failed` })
clearAbortController()
clearListeningState()
return
}
if (controller.signal.aborted)
return
if (data?.status === 'waiting') {
const delay = Number(data.retry_in) || 2000
await waitWithAbort(controller.signal, delay)
if (controller.signal.aborted)
return
await poll()
return
}
const errorMessage = data?.message || `${debugLabel} debug failed`
Toast.notify({ type: 'error', message: errorMessage })
clearAbortController()
setWorkflowRunningData({
result: {
status: WorkflowRunningStatus.Failed,
error: errorMessage,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
})
clearListeningState()
return
}
clearListeningState()
handleStream(
response,
baseSseOptions.onData ?? noop,
baseSseOptions.onCompleted,
baseSseOptions.onThought,
baseSseOptions.onMessageEnd,
baseSseOptions.onMessageReplace,
baseSseOptions.onFile,
baseSseOptions.onWorkflowStarted,
baseSseOptions.onWorkflowFinished,
baseSseOptions.onNodeStarted,
baseSseOptions.onNodeFinished,
baseSseOptions.onIterationStart,
baseSseOptions.onIterationNext,
baseSseOptions.onIterationFinish,
baseSseOptions.onLoopStart,
baseSseOptions.onLoopNext,
baseSseOptions.onLoopFinish,
baseSseOptions.onNodeRetry,
baseSseOptions.onParallelBranchStarted,
baseSseOptions.onParallelBranchFinished,
baseSseOptions.onTextChunk,
baseSseOptions.onTTSChunk,
baseSseOptions.onTTSEnd,
baseSseOptions.onTextReplace,
baseSseOptions.onAgentLog,
baseSseOptions.onHumanInputRequired,
baseSseOptions.onHumanInputFormFilled,
baseSseOptions.onHumanInputFormTimeout,
baseSseOptions.onWorkflowPaused,
baseSseOptions.onDataSourceNodeProcessing,
baseSseOptions.onDataSourceNodeCompleted,
baseSseOptions.onDataSourceNodeError,
)
}
catch (error) {
if (controller.signal.aborted)
return
if (error instanceof Response) {
const data = await error.clone().json() as Record<string, any>
const { error: respError } = data || {}
Toast.notify({ type: 'error', message: respError })
clearAbortController()
setWorkflowRunningData({
result: {
status: WorkflowRunningStatus.Failed,
error: respError,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
})
}
clearListeningState()
}
}
await poll()
}
if (runMode === TriggerType.Schedule) {
await runTriggerDebug(TriggerType.Schedule)
return
}
const finalCallbacks = createFinalWorkflowRunCallbacks({
clientWidth,
clientHeight,
runHistoryUrl,
isInWorkflowDebug,
fetchInspectVars,
invalidAllLastRun,
invalidateRunHistory,
clearAbortController,
clearListeningState: clearListeningStateInStore,
trackWorkflowRunFailed,
handlers: workflowRunEventHandlers,
callbacks: userCallbacks,
restCallback,
baseSseOptions,
player,
setAbortController: (controller) => {
if (runMode === TriggerType.Webhook) {
await runTriggerDebug(TriggerType.Webhook)
return
}
if (runMode === TriggerType.Plugin) {
await runTriggerDebug(TriggerType.Plugin)
return
}
if (runMode === TriggerType.All) {
await runTriggerDebug(TriggerType.All)
return
}
const finalCallbacks: IOtherOptions = {
...baseSseOptions,
getAbortController: (controller: AbortController) => {
abortControllerRef.current = controller
},
})
onWorkflowFinished: (params) => {
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowFinished)
onWorkflowFinished(params)
if (isInWorkflowDebug) {
fetchInspectVars({})
invalidAllLastRun()
}
},
onError: (params) => {
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
if (onError)
onError(params)
},
onNodeStarted: (params) => {
handleWorkflowNodeStarted(
params,
{
clientWidth,
clientHeight,
},
)
if (onNodeStarted)
onNodeStarted(params)
},
onNodeFinished: (params) => {
handleWorkflowNodeFinished(params)
if (onNodeFinished)
onNodeFinished(params)
},
onIterationStart: (params) => {
handleWorkflowNodeIterationStarted(
params,
{
clientWidth,
clientHeight,
},
)
if (onIterationStart)
onIterationStart(params)
},
onIterationNext: (params) => {
handleWorkflowNodeIterationNext(params)
if (onIterationNext)
onIterationNext(params)
},
onIterationFinish: (params) => {
handleWorkflowNodeIterationFinished(params)
if (onIterationFinish)
onIterationFinish(params)
},
onLoopStart: (params) => {
handleWorkflowNodeLoopStarted(
params,
{
clientWidth,
clientHeight,
},
)
if (onLoopStart)
onLoopStart(params)
},
onLoopNext: (params) => {
handleWorkflowNodeLoopNext(params)
if (onLoopNext)
onLoopNext(params)
},
onLoopFinish: (params) => {
handleWorkflowNodeLoopFinished(params)
if (onLoopFinish)
onLoopFinish(params)
},
onNodeRetry: (params) => {
handleWorkflowNodeRetry(params)
if (onNodeRetry)
onNodeRetry(params)
},
onAgentLog: (params) => {
handleWorkflowAgentLog(params)
if (onAgentLog)
onAgentLog(params)
},
onTextChunk: (params) => {
handleWorkflowTextChunk(params)
},
onTextReplace: (params) => {
handleWorkflowTextReplace(params)
},
onTTSChunk: (messageId: string, audio: string) => {
if (!audio || audio === '')
return
player?.playAudioWithAudio(audio, true)
AudioPlayerManager.getInstance().resetMsgId(messageId)
},
onTTSEnd: (messageId: string, audio: string) => {
player?.playAudioWithAudio(audio, false)
},
onWorkflowPaused: (params) => {
handleWorkflowPaused()
invalidateRunHistory(runHistoryUrl)
if (onWorkflowPaused)
onWorkflowPaused(params)
const url = `/workflow/${params.workflow_run_id}/events`
sseGet(
url,
{},
finalCallbacks,
)
},
onHumanInputRequired: (params) => {
handleWorkflowNodeHumanInputRequired(params)
if (onHumanInputRequired)
onHumanInputRequired(params)
},
onHumanInputFormFilled: (params) => {
handleWorkflowNodeHumanInputFormFilled(params)
if (onHumanInputFormFilled)
onHumanInputFormFilled(params)
},
onHumanInputFormTimeout: (params) => {
handleWorkflowNodeHumanInputFormTimeout(params)
if (onHumanInputFormTimeout)
onHumanInputFormTimeout(params)
},
...restCallback,
}
ssePost(
url,
@@ -376,13 +860,20 @@ export const useWorkflowRun = () => {
setListeningTriggerNodeId,
} = workflowStore.getState()
applyStoppedState({
setWorkflowRunningData,
setIsListening,
setShowVariableInspectPanel,
setListeningTriggerType,
setListeningTriggerNodeId,
setWorkflowRunningData({
result: {
status: WorkflowRunningStatus.Stopped,
inputs_truncated: false,
process_data_truncated: false,
outputs_truncated: false,
},
tracing: [],
resultText: '',
})
setIsListening(false)
setListeningTriggerType(null)
setListeningTriggerNodeId(null)
setShowVariableInspectPanel(true)
}
if (taskId) {
@@ -418,7 +909,7 @@ export const useWorkflowRun = () => {
}, [workflowStore])
const handleRestoreFromPublishedWorkflow = useCallback((publishedWorkflow: VersionHistory) => {
const nodes = normalizePublishedWorkflowNodes(publishedWorkflow)
const nodes = publishedWorkflow.graph.nodes.map(node => ({ ...node, selected: false, data: { ...node.data, selected: false } }))
const edges = publishedWorkflow.graph.edges
const viewport = publishedWorkflow.graph.viewport!
handleUpdateWorkflowCanvas({
@@ -426,7 +917,21 @@ export const useWorkflowRun = () => {
edges,
viewport,
})
featuresStore?.setState({ features: mapPublishedWorkflowFeatures(publishedWorkflow) })
const mappedFeatures = {
opening: {
enabled: !!publishedWorkflow.features.opening_statement || !!publishedWorkflow.features.suggested_questions.length,
opening_statement: publishedWorkflow.features.opening_statement,
suggested_questions: publishedWorkflow.features.suggested_questions,
},
suggested: publishedWorkflow.features.suggested_questions_after_answer,
text2speech: publishedWorkflow.features.text_to_speech,
speech2text: publishedWorkflow.features.speech_to_text,
citation: publishedWorkflow.features.retriever_resource,
moderation: publishedWorkflow.features.sensitive_word_avoidance,
file: publishedWorkflow.features.file_upload,
}
featuresStore?.setState({ features: mappedFeatures })
workflowStore.getState().setEnvironmentVariables(publishedWorkflow.environment_variables || [])
}, [featuresStore, handleUpdateWorkflowCanvas, workflowStore])

View File

@@ -9,12 +9,16 @@ import {
import { useStore as useAppStore } from '@/app/components/app/store'
import { FeaturesProvider } from '@/app/components/base/features'
import Loading from '@/app/components/base/loading'
import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants'
import WorkflowWithDefaultContext from '@/app/components/workflow'
import {
WorkflowContextProvider,
} from '@/app/components/workflow/context'
import { useWorkflowStore } from '@/app/components/workflow/store'
import { useTriggerStatusStore } from '@/app/components/workflow/store/trigger-status'
import {
SupportUploadFileTypes,
} from '@/app/components/workflow/types'
import {
initialEdges,
initialNodes,
@@ -31,11 +35,6 @@ import {
useWorkflowInit,
} from './hooks/use-workflow-init'
import { createWorkflowSlice } from './store/workflow/workflow-slice'
import {
buildInitialFeatures,
buildTriggerStatusMap,
coerceReplayUserInputs,
} from './utils'
const WorkflowAppWithAdditionalContext = () => {
const {
@@ -59,7 +58,13 @@ const WorkflowAppWithAdditionalContext = () => {
// Sync trigger statuses to store when data loads
useEffect(() => {
if (triggersResponse?.data) {
setTriggerStatuses(buildTriggerStatusMap(triggersResponse.data))
// Map API status to EntryNodeStatus: 'enabled' stays 'enabled', all others become 'disabled'
const statusMap = triggersResponse.data.reduce((acc, trigger) => {
acc[trigger.node_id] = trigger.status === 'enabled' ? 'enabled' : 'disabled'
return acc
}, {} as Record<string, 'enabled' | 'disabled'>)
setTriggerStatuses(statusMap)
}
}, [triggersResponse?.data, setTriggerStatuses])
@@ -103,21 +108,49 @@ const WorkflowAppWithAdditionalContext = () => {
fetchRunDetail(runUrl).then((res) => {
const { setInputs, setShowInputsPanel, setShowDebugAndPreviewPanel } = workflowStore.getState()
const rawInputs = res.inputs
let parsedInputs: unknown = rawInputs
let parsedInputs: Record<string, unknown> | null = null
if (typeof rawInputs === 'string') {
try {
parsedInputs = JSON.parse(rawInputs) as unknown
const maybeParsed = JSON.parse(rawInputs) as unknown
if (maybeParsed && typeof maybeParsed === 'object' && !Array.isArray(maybeParsed))
parsedInputs = maybeParsed as Record<string, unknown>
}
catch (error) {
console.error('Failed to parse workflow run inputs', error)
return
}
}
else if (rawInputs && typeof rawInputs === 'object' && !Array.isArray(rawInputs)) {
parsedInputs = rawInputs as Record<string, unknown>
}
const userInputs = coerceReplayUserInputs(parsedInputs)
if (!parsedInputs)
return
if (!userInputs || !Object.keys(userInputs).length)
const userInputs: Record<string, string | number | boolean> = {}
Object.entries(parsedInputs).forEach(([key, value]) => {
if (key.startsWith('sys.'))
return
if (value == null) {
userInputs[key] = ''
return
}
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
userInputs[key] = value
return
}
try {
userInputs[key] = JSON.stringify(value)
}
catch {
userInputs[key] = String(value)
}
})
if (!Object.keys(userInputs).length)
return
setInputs(userInputs)
@@ -134,7 +167,32 @@ const WorkflowAppWithAdditionalContext = () => {
)
}
const initialFeatures: FeaturesData = buildInitialFeatures(data.features, fileUploadConfigResponse)
const features = data.features || {}
const initialFeatures: FeaturesData = {
file: {
image: {
enabled: !!features.file_upload?.image?.enabled,
number_limits: features.file_upload?.image?.number_limits || 3,
transfer_methods: features.file_upload?.image?.transfer_methods || ['local_file', 'remote_url'],
},
enabled: !!(features.file_upload?.enabled || features.file_upload?.image?.enabled),
allowed_file_types: features.file_upload?.allowed_file_types || [SupportUploadFileTypes.image],
allowed_file_extensions: features.file_upload?.allowed_file_extensions || FILE_EXTS[SupportUploadFileTypes.image].map(ext => `.${ext}`),
allowed_file_upload_methods: features.file_upload?.allowed_file_upload_methods || features.file_upload?.image?.transfer_methods || ['local_file', 'remote_url'],
number_limits: features.file_upload?.number_limits || features.file_upload?.image?.number_limits || 3,
fileUploadConfig: fileUploadConfigResponse,
},
opening: {
enabled: !!features.opening_statement,
opening_statement: features.opening_statement,
suggested_questions: features.suggested_questions,
},
suggested: features.suggested_questions_after_answer || { enabled: false },
speech2text: features.speech_to_text || { enabled: false },
text2speech: features.text_to_speech || { enabled: false },
citation: features.retriever_resource || { enabled: false },
moderation: features.sensitive_word_avoidance || { enabled: false },
}
return (
<WorkflowWithDefaultContext

View File

@@ -1,44 +0,0 @@
import { createStore } from 'zustand/vanilla'
import { createWorkflowSlice } from '../workflow-slice'
describe('createWorkflowSlice', () => {
it('should initialize workflow slice state with expected defaults', () => {
const store = createStore(createWorkflowSlice)
const state = store.getState()
expect(state.appId).toBe('')
expect(state.appName).toBe('')
expect(state.notInitialWorkflow).toBe(false)
expect(state.shouldAutoOpenStartNodeSelector).toBe(false)
expect(state.nodesDefaultConfigs).toEqual({})
expect(state.showOnboarding).toBe(false)
expect(state.hasSelectedStartNode).toBe(false)
expect(state.hasShownOnboarding).toBe(false)
})
it('should update every workflow slice field through its setters', () => {
const store = createStore(createWorkflowSlice)
store.setState({
appId: 'app-1',
appName: 'Workflow App',
})
store.getState().setNotInitialWorkflow(true)
store.getState().setShouldAutoOpenStartNodeSelector(true)
store.getState().setNodesDefaultConfigs({ start: { title: 'Start' } })
store.getState().setShowOnboarding(true)
store.getState().setHasSelectedStartNode(true)
store.getState().setHasShownOnboarding(true)
expect(store.getState()).toMatchObject({
appId: 'app-1',
appName: 'Workflow App',
notInitialWorkflow: true,
shouldAutoOpenStartNodeSelector: true,
nodesDefaultConfigs: { start: { title: 'Start' } },
showOnboarding: true,
hasSelectedStartNode: true,
hasShownOnboarding: true,
})
})
})

View File

@@ -1,107 +0,0 @@
import type { Features as FeaturesData } from '@/app/components/base/features/types'
import type { FileUploadConfigResponse } from '@/models/common'
import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants'
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
import { TransferMethod } from '@/types/app'
type TriggerStatusLike = {
node_id: string
status: string
}
type FileUploadFeatureLike = {
enabled?: boolean
allowed_file_types?: SupportUploadFileTypes[]
allowed_file_extensions?: string[]
allowed_file_upload_methods?: TransferMethod[]
number_limits?: number
image?: {
enabled?: boolean
number_limits?: number
transfer_methods?: TransferMethod[]
}
}
type WorkflowFeaturesLike = {
file_upload?: FileUploadFeatureLike
opening_statement?: string
suggested_questions?: string[]
suggested_questions_after_answer?: { enabled?: boolean }
speech_to_text?: { enabled?: boolean }
text_to_speech?: { enabled?: boolean }
retriever_resource?: { enabled?: boolean }
sensitive_word_avoidance?: { enabled?: boolean }
}
export const buildTriggerStatusMap = (triggers: TriggerStatusLike[]) => {
return triggers.reduce<Record<string, 'enabled' | 'disabled'>>((acc, trigger) => {
acc[trigger.node_id] = trigger.status === 'enabled' ? 'enabled' : 'disabled'
return acc
}, {})
}
export const coerceReplayUserInputs = (rawInputs: unknown): Record<string, string | number | boolean> | null => {
if (!rawInputs || typeof rawInputs !== 'object' || Array.isArray(rawInputs))
return null
const userInputs: Record<string, string | number | boolean> = {}
Object.entries(rawInputs as Record<string, unknown>).forEach(([key, value]) => {
if (key.startsWith('sys.'))
return
if (value == null) {
userInputs[key] = ''
return
}
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
userInputs[key] = value
return
}
try {
userInputs[key] = JSON.stringify(value)
}
catch {
userInputs[key] = String(value)
}
})
return userInputs
}
export const buildInitialFeatures = (
featuresSource: WorkflowFeaturesLike | null | undefined,
fileUploadConfigResponse: FileUploadConfigResponse | undefined,
): FeaturesData => {
const features = featuresSource || {}
const fileUpload = features.file_upload
const imageUpload = fileUpload?.image
return {
file: {
image: {
enabled: !!imageUpload?.enabled,
number_limits: imageUpload?.number_limits || 3,
transfer_methods: imageUpload?.transfer_methods || [TransferMethod.local_file, TransferMethod.remote_url],
},
enabled: !!(fileUpload?.enabled || imageUpload?.enabled),
allowed_file_types: fileUpload?.allowed_file_types || [SupportUploadFileTypes.image],
allowed_file_extensions: fileUpload?.allowed_file_extensions || FILE_EXTS[SupportUploadFileTypes.image].map(ext => `.${ext}`),
allowed_file_upload_methods: fileUpload?.allowed_file_upload_methods || imageUpload?.transfer_methods || [TransferMethod.local_file, TransferMethod.remote_url],
number_limits: fileUpload?.number_limits || imageUpload?.number_limits || 3,
fileUploadConfig: fileUploadConfigResponse,
},
opening: {
enabled: !!features.opening_statement,
opening_statement: features.opening_statement,
suggested_questions: features.suggested_questions,
},
suggested: features.suggested_questions_after_answer || { enabled: false },
speech2text: features.speech_to_text || { enabled: false },
text2speech: features.text_to_speech || { enabled: false },
citation: features.retriever_resource || { enabled: false },
moderation: features.sensitive_word_avoidance || { enabled: false },
}
}

View File

@@ -2,9 +2,6 @@ import { renderHook } from '@testing-library/react'
import useNodeResizeObserver from '../use-node-resize-observer'
describe('useNodeResizeObserver', () => {
afterEach(() => {
vi.unstubAllGlobals()
})
it('should observe and disconnect when enabled with a mounted node ref', () => {
const observe = vi.fn()
const disconnect = vi.fn()

View File

@@ -57,16 +57,6 @@ describe('before-run-form helpers', () => {
values: createValues({ query: '' }),
})], [{}], t)).toContain('errorMsg.fieldRequired')
expect(getFormErrorMessage([createForm({
inputs: [createInput({ variable: 'file', label: 'File', type: InputVarType.singleFile, required: true })],
values: createValues({ file: [] }),
})], [{}], t)).toContain('errorMsg.fieldRequired')
expect(getFormErrorMessage([createForm({
inputs: [createInput({ variable: 'files', label: 'Files', type: InputVarType.multiFiles, required: true })],
values: createValues({ files: [] }),
})], [{}], t)).toContain('errorMsg.fieldRequired')
expect(getFormErrorMessage([createForm({
inputs: [createInput({ variable: 'file', label: 'File', type: InputVarType.singleFile })],
values: createValues({ file: { transferMethod: TransferMethod.local_file } }),

View File

@@ -56,16 +56,7 @@ export const getFormErrorMessage = (
const missingRequired = input.required
&& input.type !== InputVarType.checkbox
&& !(input.variable in existVarValuesInForm)
&& (
value === '' || value === undefined || value === null
|| (
(input.type === InputVarType.files
|| input.type === InputVarType.multiFiles
|| input.type === InputVarType.singleFile)
&& Array.isArray(value)
&& value.length === 0
)
)
&& (value === '' || value === undefined || value === null || (input.type === InputVarType.files && Array.isArray(value) && value.length === 0))
if (!errMsg && missingRequired) {
errMsg = t('errorMsg.fieldRequired', { ns: 'workflow', field: typeof input.label === 'object' ? input.label.variable : input.label })

View File

@@ -75,12 +75,16 @@ describe('workflow-panel helpers', () => {
})
describe('custom run form fallback', () => {
it('should return null for unsupported custom run form nodes', () => {
it('should return a fallback message for unsupported custom run form nodes', () => {
const form = getCustomRunForm({
...createCustomRunFormProps({ type: BlockEnum.Tool }),
})
expect(form).toBeNull()
expect(form).toMatchObject({
props: {
children: expect.arrayContaining(['Custom Run Form:', ' ', 'not found']),
},
})
})
})
})

View File

@@ -39,7 +39,14 @@ export const getCustomRunForm = (params: CustomRunFormProps): ReactNode => {
case BlockEnum.DataSource:
return <DataSourceBeforeRunForm {...params} />
default:
return null
return (
<div>
Custom Run Form:
{nodeType}
{' '}
not found
</div>
)
}
}

View File

@@ -1,4 +1,4 @@
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { useState } from 'react'
import GenericTable from '../generic-table'
@@ -50,19 +50,8 @@ const advancedColumns = [
describe('GenericTable', () => {
beforeEach(() => {
vi.clearAllMocks()
vi.useRealTimers()
})
const selectOption = async (triggerName: string, optionName: string) => {
await act(async () => {
fireEvent.click(screen.getByRole('button', { name: triggerName }))
})
await act(async () => {
fireEvent.click(await screen.findByRole('option', { name: optionName }))
})
}
it('should render an empty editable row and append a configured row when typing into the virtual row', async () => {
const onChange = vi.fn()
@@ -154,11 +143,11 @@ describe('GenericTable', () => {
<ControlledTable />,
)
await selectOption('Choose method', 'POST')
await user.click(screen.getByRole('button', { name: 'Choose method' }))
await user.click(await screen.findByRole('option', { name: 'POST' }))
await waitFor(() => {
expect(onChange).toHaveBeenCalledWith([{ method: 'post', preview: '' }])
expect(screen.getByRole('button', { name: 'POST' })).toBeInTheDocument()
})
onChange.mockClear()

View File

@@ -90,22 +90,6 @@ describe('useVariableModalState', () => {
])
})
it('should keep valid object rows when switching to json mode from form mode', () => {
const { result } = renderHook(() => useVariableModalState(createOptions()))
act(() => {
result.current.handleTypeChange(ChatVarType.Object)
result.current.setObjectValue([
{ key: '', type: ChatVarType.String, value: undefined },
{ key: 'timeout', type: ChatVarType.Number, value: 30 },
])
result.current.handleEditorChange(true)
})
expect(result.current.editInJSON).toBe(true)
expect(result.current.value).toEqual({ timeout: 30 })
expect(result.current.editorContent).toBe(JSON.stringify({ timeout: 30 }))
})
it('should reset object form values when leaving empty json mode', () => {
const { result } = renderHook(() => useVariableModalState(createOptions({
chatVar: {
@@ -157,19 +141,6 @@ describe('useVariableModalState', () => {
expect(result.current.editorContent).toBe(JSON.stringify(['True', 'False']))
})
it('should preserve zero values when switching number arrays into json mode', () => {
const { result } = renderHook(() => useVariableModalState(createOptions()))
act(() => {
result.current.handleTypeChange(ChatVarType.ArrayNumber)
result.current.setValue([0, 2, undefined])
result.current.handleEditorChange(true)
})
expect(result.current.editInJSON).toBe(true)
expect(result.current.value).toEqual([0, 2])
expect(result.current.editorContent).toBe(JSON.stringify([0, 2]))
})
it('should notify and stop saving when object keys are invalid', () => {
const notify = vi.fn()
const onSave = vi.fn()
@@ -190,7 +161,7 @@ describe('useVariableModalState', () => {
result.current.handleSave()
})
expect(notify).toHaveBeenCalledWith({ type: 'error', message: 'chatVariable.modal.objectKeyRequired' })
expect(notify).toHaveBeenCalledWith({ type: 'error', message: 'object key can not be empty' })
expect(onSave).not.toHaveBeenCalled()
expect(onClose).not.toHaveBeenCalled()
})

View File

@@ -33,10 +33,6 @@ describe('variable-modal helpers', () => {
{ key: '', type: ChatVarType.Number, value: 1 },
])).toEqual({ apiKey: 'secret' })
expect(formatObjectValueFromList([
{ key: 'count', type: ChatVarType.Number, value: 0 },
{ key: 'label', type: ChatVarType.String, value: '' },
])).toEqual({ count: 0, label: null })
expect(formatChatVariableValue({
editInJSON: false,
objectValue: [{ key: 'enabled', type: ChatVarType.String, value: 'true' }],
@@ -58,13 +54,6 @@ describe('variable-modal helpers', () => {
value: ['a', '', 'b'],
})).toEqual(['a', 'b'])
expect(formatChatVariableValue({
editInJSON: false,
objectValue: [],
type: ChatVarType.ArrayNumber,
value: [0, 1, undefined, null, ''] as unknown as Array<number | undefined>,
})).toEqual([0, 1])
expect(formatChatVariableValue({
editInJSON: false,
objectValue: [],
@@ -105,10 +94,6 @@ describe('variable-modal helpers', () => {
type: ChatVarType.ArrayBoolean,
})).toEqual([true, false, true, false])
expect(() => parseEditorContent({
content: '{"enabled":true}',
type: ChatVarType.ArrayBoolean,
})).toThrow('JSON array')
expect(parseEditorContent({
content: '{"enabled":true}',
type: ChatVarType.Object,

View File

@@ -80,7 +80,7 @@ describe('variable-modal', () => {
await user.type(screen.getByPlaceholderText('workflow.chatVariable.modal.namePlaceholder'), 'existing_name')
await user.click(screen.getByText('common.operation.save'))
expect(mockToastError.mock.calls.at(-1)?.[0]).toBe('appDebug.varKeyError.keyAlreadyExists:{"key":"workflow.chatVariable.modal.name"}')
expect(mockToastError.mock.calls.at(-1)?.[0]).toBe('name is existed')
expect(onSave).not.toHaveBeenCalled()
})
@@ -100,10 +100,8 @@ describe('variable-modal', () => {
expect(screen.getByDisplayValue('secret')).toBeInTheDocument()
expect(screen.getByDisplayValue('30')).toBeInTheDocument()
const timeoutInput = screen.getByDisplayValue('30') as HTMLInputElement
await user.clear(screen.getByDisplayValue('secret'))
await user.clear(timeoutInput)
await user.type(timeoutInput, '5')
await user.type(screen.getByDisplayValue('30'), '5')
await user.click(screen.getByText('common.operation.save'))
expect(onSave).toHaveBeenCalledWith({
@@ -112,7 +110,7 @@ describe('variable-modal', () => {
value_type: ChatVarType.Object,
value: {
apiKey: null,
timeout: 5,
timeout: 305,
},
description: 'settings',
})
@@ -197,22 +195,4 @@ describe('variable-modal', () => {
description: '',
})
})
it('should keep the number input empty while editing after the user clears it', async () => {
const user = userEvent.setup()
renderVariableModal({
chatVar: {
id: 'var-4',
name: 'timeout',
description: '',
value_type: ChatVarType.Number,
value: 3,
},
})
const input = screen.getByDisplayValue('3') as HTMLInputElement
await user.clear(input)
expect(input.value).toBe('')
})
})

View File

@@ -108,7 +108,7 @@ export const useVariableModalState = ({
if (prev.type === ChatVarType.Object) {
if (nextEditInJSON) {
const nextValue = prev.objectValue.some(item => item.key) ? formatObjectValueFromList(prev.objectValue) : undefined
const nextValue = !prev.objectValue[0].key ? undefined : formatObjectValueFromList(prev.objectValue)
nextState.value = nextValue
nextState.editorContent = JSON.stringify(nextValue)
return nextState
@@ -133,11 +133,8 @@ export const useVariableModalState = ({
if (prev.type === ChatVarType.ArrayString || prev.type === ChatVarType.ArrayNumber) {
if (nextEditInJSON) {
const compactValues = Array.isArray(prev.value)
? prev.value.filter(item => item !== null && item !== undefined && item !== '')
: []
const nextValue = compactValues.length
? compactValues
const nextValue = (Array.isArray(prev.value) && prev.value.length && prev.value.filter(Boolean).length)
? prev.value.filter(Boolean)
: undefined
nextState.value = nextValue
if (!prev.editorContent)
@@ -184,15 +181,12 @@ export const useVariableModalState = ({
return
if (!chatVar && conversationVariables.some(item => item.name === state.name)) {
notify({
type: 'error',
message: t('varKeyError.keyAlreadyExists', { ns: 'appDebug', key: t('chatVariable.modal.name', { ns: 'workflow' }) }),
})
notify({ type: 'error', message: 'name is existed' })
return
}
if (state.type === ChatVarType.Object && state.objectValue.some(item => !item.key && item.value !== undefined && item.value !== '')) {
notify({ type: 'error', message: t('chatVariable.modal.objectKeyRequired', { ns: 'workflow' }) })
if (state.type === ChatVarType.Object && state.objectValue.some(item => !item.key && !!item.value)) {
notify({ type: 'error', message: 'object key can not be empty' })
return
}

View File

@@ -72,7 +72,7 @@ export const buildObjectValueItems = (chatVar?: ConversationVariable): ObjectVal
export const formatObjectValueFromList = (list: ObjectValueItem[]) => {
return list.reduce<Record<string, string | number | null>>((acc, curr) => {
if (curr.key)
acc[curr.key] = curr.value === '' || curr.value === undefined ? null : curr.value
acc[curr.key] = curr.value || null
return acc
}, {})
}
@@ -88,8 +88,6 @@ export const formatChatVariableValue = ({
type: ChatVarType
value: unknown
}) => {
const compactArrayValue = (items: unknown[]) =>
items.filter(item => item !== null && item !== undefined && item !== '')
switch (type) {
case ChatVarTypeEnum.String:
return value || ''
@@ -102,7 +100,7 @@ export const formatChatVariableValue = ({
case ChatVarTypeEnum.ArrayString:
case ChatVarTypeEnum.ArrayNumber:
case ChatVarTypeEnum.ArrayObject:
return Array.isArray(value) ? compactArrayValue(value) : []
return Array.isArray(value) ? value.filter(Boolean) : []
case ChatVarTypeEnum.ArrayBoolean:
return value || []
}
@@ -153,8 +151,6 @@ export const parseEditorContent = ({
if (type !== ChatVarTypeEnum.ArrayBoolean)
return parsed
if (!Array.isArray(parsed))
throw new TypeError('ArrayBoolean editor content must be a JSON array')
return parsed
.map((item: string | boolean) => {
if (item === 'True' || item === 'true' || item === true)

View File

@@ -138,10 +138,7 @@ export const ValueSection = ({
<Input
placeholder={t('chatVariable.modal.valuePlaceholder', { ns: 'workflow' }) || ''}
value={value as number | undefined}
onChange={(e) => {
const rawValue = e.target.value
onArrayChange([rawValue === '' ? undefined : Number(rawValue)])
}}
onChange={e => onArrayChange([Number(e.target.value)])}
type="number"
/>
)}

View File

@@ -6416,8 +6416,11 @@
}
},
"app/components/workflow-app/hooks/use-workflow-run.ts": {
"no-restricted-imports": {
"count": 1
},
"ts/no-explicit-any": {
"count": 5
"count": 13
}
},
"app/components/workflow-app/hooks/use-workflow-template.ts": {