Compare commits

..

1 Commits

Author SHA1 Message Date
Stephen Zhou
e535a1def2 test: stable test 2026-02-09 16:40:29 +08:00
36 changed files with 991 additions and 5353 deletions

View File

@@ -34,7 +34,6 @@ from .dataset import (
metadata,
segment,
)
from .dataset.rag_pipeline import rag_pipeline_workflow
from .end_user import end_user
from .workspace import models
@@ -54,7 +53,6 @@ __all__ = [
"message",
"metadata",
"models",
"rag_pipeline_workflow",
"segment",
"site",
"workflow",

View File

@@ -1,3 +1,5 @@
import string
import uuid
from collections.abc import Generator
from typing import Any
@@ -10,7 +12,6 @@ from controllers.common.errors import FilenameNotExistsError, NoFileUploadedErro
from controllers.common.schema import register_schema_model
from controllers.service_api import service_api_ns
from controllers.service_api.dataset.error import PipelineRunError
from controllers.service_api.dataset.rag_pipeline.serializers import serialize_upload_file
from controllers.service_api.wraps import DatasetApiResource
from core.app.apps.pipeline.pipeline_generator import PipelineGenerator
from core.app.entities.app_invoke_entities import InvokeFrom
@@ -40,7 +41,7 @@ register_schema_model(service_api_ns, DatasourceNodeRunPayload)
register_schema_model(service_api_ns, PipelineRunApiEntity)
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/datasource-plugins")
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/datasource-plugins")
class DatasourcePluginsApi(DatasetApiResource):
"""Resource for datasource plugins."""
@@ -75,7 +76,7 @@ class DatasourcePluginsApi(DatasetApiResource):
return datasource_plugins, 200
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/datasource/nodes/<string:node_id>/run")
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/datasource/nodes/{string:node_id}/run")
class DatasourceNodeRunApi(DatasetApiResource):
"""Resource for datasource node run."""
@@ -130,7 +131,7 @@ class DatasourceNodeRunApi(DatasetApiResource):
)
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/run")
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/run")
class PipelineRunApi(DatasetApiResource):
"""Resource for datasource node run."""
@@ -231,4 +232,12 @@ class KnowledgebasePipelineFileUploadApi(DatasetApiResource):
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
return serialize_upload_file(upload_file), 201
return {
"id": upload_file.id,
"name": upload_file.name,
"size": upload_file.size,
"extension": upload_file.extension,
"mime_type": upload_file.mime_type,
"created_by": upload_file.created_by,
"created_at": upload_file.created_at,
}, 201

View File

@@ -1,22 +0,0 @@
"""
Serialization helpers for Service API knowledge pipeline endpoints.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from models.model import UploadFile
def serialize_upload_file(upload_file: UploadFile) -> dict[str, Any]:
return {
"id": upload_file.id,
"name": upload_file.name,
"size": upload_file.size,
"extension": upload_file.extension,
"mime_type": upload_file.mime_type,
"created_by": upload_file.created_by,
"created_at": upload_file.created_at.isoformat() if upload_file.created_at else None,
}

View File

@@ -217,8 +217,6 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
def decorator(view: Callable[Concatenate[T, P], R]):
@wraps(view)
def decorated(*args: P.args, **kwargs: P.kwargs):
api_token = validate_and_get_api_token("dataset")
# get url path dataset_id from positional args or kwargs
# Flask passes URL path parameters as positional arguments
dataset_id = None
@@ -255,18 +253,12 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
# Validate dataset if dataset_id is provided
if dataset_id:
dataset_id = str(dataset_id)
dataset = (
db.session.query(Dataset)
.where(
Dataset.id == dataset_id,
Dataset.tenant_id == api_token.tenant_id,
)
.first()
)
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise NotFound("Dataset not found.")
if not dataset.enable_api:
raise Forbidden("Dataset api access is not enabled.")
api_token = validate_and_get_api_token("dataset")
tenant_account_join = (
db.session.query(Tenant, TenantAccountJoin)
.where(Tenant.id == api_token.tenant_id)

View File

@@ -1,6 +1,6 @@
import functools
from collections.abc import Callable
from typing import ParamSpec, TypeVar, cast
from typing import Any, TypeVar, cast
from opentelemetry.trace import get_tracer
@@ -8,8 +8,7 @@ from configs import dify_config
from extensions.otel.decorators.handler import SpanHandler
from extensions.otel.runtime import is_instrument_flag_enabled
P = ParamSpec("P")
R = TypeVar("R")
T = TypeVar("T", bound=Callable[..., Any])
_HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()}
@@ -21,7 +20,7 @@ def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler:
return _HANDLER_INSTANCES[handler_class]
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T], T]:
"""
Decorator that traces a function with an OpenTelemetry span.
@@ -31,9 +30,9 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Call
:param handler_class: Optional handler class to use for this span. If None, uses the default SpanHandler.
"""
def decorator(func: Callable[P, R]) -> Callable[P, R]:
def decorator(func: T) -> T:
@functools.wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
def wrapper(*args: Any, **kwargs: Any) -> Any:
if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()):
return func(*args, **kwargs)
@@ -47,6 +46,6 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Call
kwargs=kwargs,
)
return cast(Callable[P, R], wrapper)
return cast(T, wrapper)
return decorator

View File

@@ -1,11 +1,9 @@
import inspect
from collections.abc import Callable, Mapping
from typing import Any, TypeVar
from typing import Any
from opentelemetry.trace import SpanKind, Status, StatusCode
R = TypeVar("R")
class SpanHandler:
"""
@@ -33,9 +31,9 @@ class SpanHandler:
def _extract_arguments(
self,
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> dict[str, Any] | None:
"""
Extract function arguments using inspect.signature.
@@ -64,10 +62,10 @@ class SpanHandler:
def wrapper(
self,
tracer: Any,
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> R:
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
"""
Fully control the wrapper behavior.

View File

@@ -1,6 +1,6 @@
import logging
from collections.abc import Callable, Mapping
from typing import Any, TypeVar
from typing import Any
from opentelemetry.trace import SpanKind, Status, StatusCode
from opentelemetry.util.types import AttributeValue
@@ -12,19 +12,16 @@ from models.model import Account
logger = logging.getLogger(__name__)
R = TypeVar("R")
class AppGenerateHandler(SpanHandler):
"""Span handler for ``AppGenerateService.generate``."""
def wrapper(
self,
tracer: Any,
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> R:
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
try:
arguments = self._extract_arguments(wrapped, args, kwargs)
if not arguments:

View File

@@ -1329,24 +1329,10 @@ class RagPipelineService:
"""
Get datasource plugins
"""
dataset: Dataset | None = (
db.session.query(Dataset)
.where(
Dataset.id == dataset_id,
Dataset.tenant_id == tenant_id,
)
.first()
)
dataset: Dataset | None = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise ValueError("Dataset not found")
pipeline: Pipeline | None = (
db.session.query(Pipeline)
.where(
Pipeline.id == dataset.pipeline_id,
Pipeline.tenant_id == tenant_id,
)
.first()
)
pipeline: Pipeline | None = db.session.query(Pipeline).where(Pipeline.id == dataset.pipeline_id).first()
if not pipeline:
raise ValueError("Pipeline not found")
@@ -1427,24 +1413,10 @@ class RagPipelineService:
"""
Get pipeline
"""
dataset: Dataset | None = (
db.session.query(Dataset)
.where(
Dataset.id == dataset_id,
Dataset.tenant_id == tenant_id,
)
.first()
)
dataset: Dataset | None = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise ValueError("Dataset not found")
pipeline: Pipeline | None = (
db.session.query(Pipeline)
.where(
Pipeline.id == dataset.pipeline_id,
Pipeline.tenant_id == tenant_id,
)
.first()
)
pipeline: Pipeline | None = db.session.query(Pipeline).where(Pipeline.id == dataset.pipeline_id).first()
if not pipeline:
raise ValueError("Pipeline not found")
return pipeline

View File

@@ -23,40 +23,40 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
"""
logger.info(click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green"))
start_at = time.perf_counter()
total_index_node_ids = []
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
if not dataset:
raise Exception("Document has no dataset")
index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
session.execute(document_delete_stmt)
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
session.execute(document_delete_stmt)
for document_id in document_ids:
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
total_index_node_ids.extend([segment.index_node_id for segment in segments])
for document_id in document_ids:
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
).all()
index_node_ids = [segment.index_node_id for segment in segments]
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(
dataset, total_index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
session.commit()
end_at = time.perf_counter()
logger.info(
click.style(
"Clean document when import form notion document deleted end :: {} latency: {}".format(
dataset_id, end_at - start_at
),
fg="green",
)
)
with session_factory.create_session() as session, session.begin():
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
session.execute(segment_delete_stmt)
end_at = time.perf_counter()
logger.info(
click.style(
"Clean document when import form notion document deleted end :: {} latency: {}".format(
dataset_id, end_at - start_at
),
fg="green",
)
)
except Exception:
logger.exception("Cleaned document when import form notion document deleted failed")

View File

@@ -27,7 +27,6 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
"""
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
start_at = time.perf_counter()
tenant_id = None
with session_factory.create_session() as session, session.begin():
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
@@ -36,120 +35,94 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Document not found: {document_id}", fg="red"))
return
if document.indexing_status == "parsing":
logger.info(click.style(f"Document {document_id} is already being processed, skipping", fg="yellow"))
return
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
data_source_info = document.data_source_info_dict
if document.data_source_type != "notion_import":
logger.info(click.style(f"Document {document_id} is not a notion_import, skipping", fg="yellow"))
return
if document.data_source_type == "notion_import":
if (
not data_source_info
or "notion_page_id" not in data_source_info
or "notion_workspace_id" not in data_source_info
):
raise ValueError("no notion page found")
workspace_id = data_source_info["notion_workspace_id"]
page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"]
credential_id = data_source_info.get("credential_id")
if (
not data_source_info
or "notion_page_id" not in data_source_info
or "notion_workspace_id" not in data_source_info
):
raise ValueError("no notion page found")
# Get credentials from datasource provider
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=document.tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
workspace_id = data_source_info["notion_workspace_id"]
page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"]
credential_id = data_source_info.get("credential_id")
tenant_id = document.tenant_id
index_type = document.doc_form
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
index_node_ids = [segment.index_node_id for segment in segments]
# Get credentials from datasource provider
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
if not credential:
logger.error(
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
document_id,
tenant_id,
credential_id,
)
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if document:
if not credential:
logger.error(
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
document_id,
document.tenant_id,
credential_id,
)
document.indexing_status = "error"
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
document.stopped_at = naive_utc_now()
return
return
loader = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=tenant_id,
)
loader = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=document.tenant_id,
)
last_edited_time = loader.get_notion_last_edited_time()
if last_edited_time == page_edited_time:
logger.info(click.style(f"Document {document_id} content unchanged, skipping sync", fg="yellow"))
return
last_edited_time = loader.get_notion_last_edited_time()
logger.info(click.style(f"Document {document_id} content changed, starting sync", fg="green"))
# check the page is updated
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
try:
index_processor = IndexProcessorFactory(index_type).init_index_processor()
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
logger.info(click.style(f"Cleaned vector index for document {document_id}", fg="green"))
except Exception:
logger.exception("Failed to clean vector index for document %s", document_id)
# delete all document segment and index
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
index_type = document.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if not document:
logger.warning(click.style(f"Document {document_id} not found during sync", fg="yellow"))
return
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
).all()
index_node_ids = [segment.index_node_id for segment in segments]
data_source_info = document.data_source_info_dict
data_source_info["last_edited_time"] = last_edited_time
document.data_source_info = data_source_info
# delete from vector index
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
session.execute(segment_delete_stmt)
end_at = time.perf_counter()
logger.info(
click.style(
"Cleaned document when document update data source or process rule: {} latency: {}".format(
document_id, end_at - start_at
),
fg="green",
)
)
except Exception:
logger.exception("Cleaned document when document update data source or process rule failed")
logger.info(click.style(f"Deleted segments for document {document_id}", fg="green"))
try:
indexing_runner = IndexingRunner()
with session_factory.create_session() as session:
document = session.query(Document).filter_by(id=document_id).first()
if document:
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"Sync completed for document {document_id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception as e:
logger.exception("document_indexing_sync_task failed for document_id: %s", document_id)
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = naive_utc_now()
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception:
logger.exception("document_indexing_sync_task failed, document_id: %s", document_id)

View File

@@ -153,7 +153,8 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task(document_ids, dataset.id)
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id.in_(document_ids)).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id.in_(document_ids))
@@ -161,9 +162,9 @@ class TestCleanNotionDocumentTask:
== 0
)
# Verify index processor was called
# Verify index processor was called for each document
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_processor.clean.assert_called_once()
assert mock_processor.clean.call_count == len(document_ids)
# This test successfully verifies:
# 1. Document records are properly deleted from the database
@@ -185,12 +186,12 @@ class TestCleanNotionDocumentTask:
non_existent_dataset_id = str(uuid.uuid4())
document_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
# Execute cleanup task with non-existent dataset - expect exception
with pytest.raises(Exception, match="Document has no dataset"):
clean_notion_document_task(document_ids, non_existent_dataset_id)
# Execute cleanup task with non-existent dataset
clean_notion_document_task(document_ids, non_existent_dataset_id)
# Verify that the index processor factory was not used
mock_index_processor_factory.return_value.init_index_processor.assert_not_called()
# Verify that the index processor was not called
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_processor.clean.assert_not_called()
def test_clean_notion_document_task_empty_document_list(
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
@@ -228,13 +229,9 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task with empty document list
clean_notion_document_task([], dataset.id)
# Verify that the index processor was called once with empty node list
# Verify that the index processor was not called
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
assert mock_processor.clean.call_count == 1
args, kwargs = mock_processor.clean.call_args
# args: (dataset, total_index_node_ids)
assert isinstance(args[0], Dataset)
assert args[1] == []
mock_processor.clean.assert_not_called()
def test_clean_notion_document_task_with_different_index_types(
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
@@ -318,7 +315,8 @@ class TestCleanNotionDocumentTask:
# Note: This test successfully verifies cleanup with different document types.
# The task properly handles various index types and document configurations.
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id == document.id)
@@ -406,7 +404,8 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task([document.id], dataset.id)
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
== 0
@@ -509,7 +508,8 @@ class TestCleanNotionDocumentTask:
clean_notion_document_task(documents_to_clean, dataset.id)
# Verify only specified documents' segments are deleted
# Verify only specified documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id.in_(documents_to_clean)).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id.in_(documents_to_clean))
@@ -697,12 +697,11 @@ class TestCleanNotionDocumentTask:
db_session_with_containers.commit()
# Mock index processor to raise an exception
mock_index_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_index_processor = mock_index_processor_factory.init_index_processor.return_value
mock_index_processor.clean.side_effect = Exception("Index processor error")
# Execute cleanup task - current implementation propagates the exception
with pytest.raises(Exception, match="Index processor error"):
clean_notion_document_task([document.id], dataset.id)
# Execute cleanup task - it should handle the exception gracefully
clean_notion_document_task([document.id], dataset.id)
# Note: This test demonstrates the task's error handling capability.
# Even with external service errors, the database operations complete successfully.
@@ -804,7 +803,8 @@ class TestCleanNotionDocumentTask:
all_document_ids = [doc.id for doc in documents]
clean_notion_document_task(all_document_ids, dataset.id)
# Verify all segments are deleted
# Verify all documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
== 0
@@ -914,7 +914,8 @@ class TestCleanNotionDocumentTask:
clean_notion_document_task([target_document.id], target_dataset.id)
# Verify only documents' segments from target dataset are deleted
# Verify only documents from target dataset are deleted
assert db_session_with_containers.query(Document).filter(Document.id == target_document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id == target_document.id)
@@ -1029,7 +1030,8 @@ class TestCleanNotionDocumentTask:
all_document_ids = [doc.id for doc in documents]
clean_notion_document_task(all_document_ids, dataset.id)
# Verify all segments are deleted regardless of status
# Verify all documents and segments are deleted regardless of status
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
== 0
@@ -1140,7 +1142,8 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task([document.id], dataset.id)
# Verify segments are deleted
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
== 0

View File

@@ -1,62 +0,0 @@
"""
Unit tests for Service API knowledge pipeline file-upload serialization.
"""
import importlib.util
from datetime import UTC, datetime
from pathlib import Path
class FakeUploadFile:
id: str
name: str
size: int
extension: str
mime_type: str
created_by: str
created_at: datetime | None
def _load_serialize_upload_file():
api_dir = Path(__file__).resolve().parents[5]
serializers_path = api_dir / "controllers" / "service_api" / "dataset" / "rag_pipeline" / "serializers.py"
spec = importlib.util.spec_from_file_location("rag_pipeline_serializers", serializers_path)
assert spec
assert spec.loader
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module) # type: ignore[attr-defined]
return module.serialize_upload_file
def test_file_upload_created_at_is_isoformat_string():
serialize_upload_file = _load_serialize_upload_file()
created_at = datetime(2026, 2, 8, 12, 0, 0, tzinfo=UTC)
upload_file = FakeUploadFile()
upload_file.id = "file-1"
upload_file.name = "test.pdf"
upload_file.size = 123
upload_file.extension = "pdf"
upload_file.mime_type = "application/pdf"
upload_file.created_by = "account-1"
upload_file.created_at = created_at
result = serialize_upload_file(upload_file)
assert result["created_at"] == created_at.isoformat()
def test_file_upload_created_at_none_serializes_to_null():
serialize_upload_file = _load_serialize_upload_file()
upload_file = FakeUploadFile()
upload_file.id = "file-1"
upload_file.name = "test.pdf"
upload_file.size = 123
upload_file.extension = "pdf"
upload_file.mime_type = "application/pdf"
upload_file.created_by = "account-1"
upload_file.created_at = None
result = serialize_upload_file(upload_file)
assert result["created_at"] is None

View File

@@ -1,54 +0,0 @@
"""
Unit tests for Service API knowledge pipeline route registration.
"""
import ast
from pathlib import Path
def test_rag_pipeline_routes_registered():
api_dir = Path(__file__).resolve().parents[5]
service_api_init = api_dir / "controllers" / "service_api" / "__init__.py"
rag_pipeline_workflow = (
api_dir / "controllers" / "service_api" / "dataset" / "rag_pipeline" / "rag_pipeline_workflow.py"
)
assert service_api_init.exists()
assert rag_pipeline_workflow.exists()
init_tree = ast.parse(service_api_init.read_text(encoding="utf-8"))
import_found = False
for node in ast.walk(init_tree):
if not isinstance(node, ast.ImportFrom):
continue
if node.module != "dataset.rag_pipeline" or node.level != 1:
continue
if any(alias.name == "rag_pipeline_workflow" for alias in node.names):
import_found = True
break
assert import_found, "from .dataset.rag_pipeline import rag_pipeline_workflow not found in service_api/__init__.py"
workflow_tree = ast.parse(rag_pipeline_workflow.read_text(encoding="utf-8"))
route_paths: set[str] = set()
for node in ast.walk(workflow_tree):
if not isinstance(node, ast.ClassDef):
continue
for decorator in node.decorator_list:
if not isinstance(decorator, ast.Call):
continue
if not isinstance(decorator.func, ast.Attribute):
continue
if decorator.func.attr != "route":
continue
if not decorator.args:
continue
first_arg = decorator.args[0]
if isinstance(first_arg, ast.Constant) and isinstance(first_arg.value, str):
route_paths.add(first_arg.value)
assert "/datasets/<uuid:dataset_id>/pipeline/datasource-plugins" in route_paths
assert "/datasets/<uuid:dataset_id>/pipeline/datasource/nodes/<string:node_id>/run" in route_paths
assert "/datasets/<uuid:dataset_id>/pipeline/run" in route_paths
assert "/datasets/pipeline/file-upload" in route_paths

View File

@@ -4,7 +4,7 @@ from typing import Any
from uuid import uuid4
import pytest
from hypothesis import HealthCheck, given, settings
from hypothesis import given, settings
from hypothesis import strategies as st
from core.file import File, FileTransferMethod, FileType
@@ -493,7 +493,7 @@ def _scalar_value() -> st.SearchStrategy[int | float | str | File | None]:
)
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
@settings(max_examples=50)
@given(_scalar_value())
def test_build_segment_and_extract_values_for_scalar_types(value):
seg = variable_factory.build_segment(value)
@@ -504,7 +504,7 @@ def test_build_segment_and_extract_values_for_scalar_types(value):
assert seg.value == value
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
@settings(max_examples=50)
@given(values=st.lists(_scalar_value(), max_size=20))
def test_build_segment_and_extract_values_for_array_types(values):
seg = variable_factory.build_segment(values)

View File

@@ -109,87 +109,40 @@ def mock_document_segments(document_id):
@pytest.fixture
def mock_db_session():
"""Mock database session via session_factory.create_session().
After session split refactor, the code calls create_session() multiple times.
This fixture creates shared query mocks so all sessions use the same
query configuration, simulating database persistence across sessions.
The fixture automatically converts side_effect to cycle to prevent StopIteration.
Tests configure mocks the same way as before, but behind the scenes the values
are cycled infinitely for all sessions.
"""
from itertools import cycle
"""Mock database session via session_factory.create_session()."""
with patch("tasks.document_indexing_sync_task.session_factory") as mock_sf:
sessions = []
session = MagicMock()
# Ensure tests can observe session.close() via context manager teardown
session.close = MagicMock()
session.commit = MagicMock()
# Shared query mocks - all sessions use these
shared_query = MagicMock()
shared_filter_by = MagicMock()
shared_scalars_result = MagicMock()
# Mock session.begin() context manager to auto-commit on exit
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
# Create custom first mock that auto-cycles side_effect
class CyclicMock(MagicMock):
def __setattr__(self, name, value):
if name == "side_effect" and value is not None:
# Convert list/tuple to infinite cycle
if isinstance(value, (list, tuple)):
value = cycle(value)
super().__setattr__(name, value)
def _begin_exit_side_effect(*args, **kwargs):
# session.begin().__exit__() should commit if no exception
if args[0] is None: # No exception
session.commit()
shared_query.where.return_value.first = CyclicMock()
shared_filter_by.first = CyclicMock()
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
def _create_session():
"""Create a new mock session for each create_session() call."""
session = MagicMock()
session.close = MagicMock()
session.commit = MagicMock()
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session
# Mock session.begin() context manager
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def _exit_side_effect(*args, **kwargs):
session.close()
def _begin_exit_side_effect(exc_type, exc, tb):
# commit on success
if exc_type is None:
session.commit()
# return False to propagate exceptions
return False
cm.__exit__.side_effect = _exit_side_effect
mock_sf.create_session.return_value = cm
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session
def _exit_side_effect(exc_type, exc, tb):
session.close()
return False
cm.__exit__.side_effect = _exit_side_effect
# All sessions use the same shared query mocks
session.query.return_value = shared_query
shared_query.where.return_value = shared_query
shared_query.filter_by.return_value = shared_filter_by
session.scalars.return_value = shared_scalars_result
sessions.append(session)
# Attach helpers on the first created session for assertions across all sessions
if len(sessions) == 1:
session.get_all_sessions = lambda: sessions
session.any_close_called = lambda: any(s.close.called for s in sessions)
session.any_commit_called = lambda: any(s.commit.called for s in sessions)
return cm
mock_sf.create_session.side_effect = _create_session
# Create first session and return it
_create_session()
yield sessions[0]
query = MagicMock()
session.query.return_value = query
query.where.return_value = query
session.scalars.return_value = MagicMock()
yield session
@pytest.fixture
@@ -248,8 +201,8 @@ class TestDocumentIndexingSyncTask:
# Act
document_indexing_sync_task(dataset_id, document_id)
# Assert - at least one session should have been closed
assert mock_db_session.any_close_called()
# Assert
mock_db_session.close.assert_called_once()
def test_missing_notion_workspace_id(self, mock_db_session, mock_document, dataset_id, document_id):
"""Test that task raises error when notion_workspace_id is missing."""
@@ -292,7 +245,6 @@ class TestDocumentIndexingSyncTask:
"""Test that task handles missing credentials by updating document status."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_datasource_provider_service.get_datasource_credentials.return_value = None
# Act
@@ -302,8 +254,8 @@ class TestDocumentIndexingSyncTask:
assert mock_document.indexing_status == "error"
assert "Datasource credential not found" in mock_document.error
assert mock_document.stopped_at is not None
assert mock_db_session.any_commit_called()
assert mock_db_session.any_close_called()
mock_db_session.commit.assert_called()
mock_db_session.close.assert_called()
def test_page_not_updated(
self,
@@ -317,7 +269,6 @@ class TestDocumentIndexingSyncTask:
"""Test that task does nothing when page has not been updated."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
# Return same time as stored in document
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
@@ -327,8 +278,8 @@ class TestDocumentIndexingSyncTask:
# Assert
# Document status should remain unchanged
assert mock_document.indexing_status == "completed"
# At least one session should have been closed via context manager teardown
assert mock_db_session.any_close_called()
# Session should still be closed via context manager teardown
assert mock_db_session.close.called
def test_successful_sync_when_page_updated(
self,
@@ -345,20 +296,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test successful sync flow when Notion page has been updated."""
# Arrange
# Set exact sequence of returns across calls to `.first()`:
# 1) document (initial fetch)
# 2) dataset (pre-check)
# 3) dataset (cleaning phase)
# 4) document (pre-indexing update)
# 5) document (indexing runner fetch)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
mock_dataset,
mock_document,
mock_document,
]
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
# NotionExtractor returns updated time
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
@@ -376,40 +314,28 @@ class TestDocumentIndexingSyncTask:
mock_processor.clean.assert_called_once()
# Verify segments were deleted from database in batch (DELETE FROM document_segments)
# Aggregate execute calls across all created sessions
execute_sqls = []
for s in mock_db_session.get_all_sessions():
execute_sqls.extend([" ".join(str(c[0][0]).split()) for c in s.execute.call_args_list])
execute_sqls = [" ".join(str(c[0][0]).split()) for c in mock_db_session.execute.call_args_list]
assert any("DELETE FROM document_segments" in sql for sql in execute_sqls)
# Verify indexing runner was called
mock_indexing_runner.run.assert_called_once_with([mock_document])
# Verify session operations (across any created session)
assert mock_db_session.any_commit_called()
assert mock_db_session.any_close_called()
# Verify session operations
assert mock_db_session.commit.called
mock_db_session.close.assert_called_once()
def test_dataset_not_found_during_cleaning(
self,
mock_db_session,
mock_datasource_provider_service,
mock_notion_extractor,
mock_indexing_runner,
mock_document,
dataset_id,
document_id,
):
"""Test that task handles dataset not found during cleaning phase."""
# Arrange
# Sequence: document (initial), dataset (pre-check), None (cleaning), document (update), document (indexing)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
None,
mock_document,
mock_document,
]
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, None]
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
# Act
@@ -418,8 +344,8 @@ class TestDocumentIndexingSyncTask:
# Assert
# Document should still be set to parsing
assert mock_document.indexing_status == "parsing"
# At least one session should be closed after error
assert mock_db_session.any_close_called()
# Session should be closed after error
mock_db_session.close.assert_called_once()
def test_cleaning_error_continues_to_indexing(
self,
@@ -435,14 +361,8 @@ class TestDocumentIndexingSyncTask:
):
"""Test that indexing continues even if cleaning fails."""
# Arrange
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
# Make the cleaning step fail but not the segment fetch
processor = mock_index_processor_factory.return_value.init_index_processor.return_value
processor.clean.side_effect = Exception("Cleaning error")
mock_db_session.scalars.return_value.all.return_value = []
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.side_effect = Exception("Cleaning error")
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
# Act
@@ -451,7 +371,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Indexing should still be attempted despite cleaning error
mock_indexing_runner.run.assert_called_once_with([mock_document])
assert mock_db_session.any_close_called()
mock_db_session.close.assert_called_once()
def test_indexing_runner_document_paused_error(
self,
@@ -468,10 +388,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test that DocumentIsPausedError is handled gracefully."""
# Arrange
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused")
@@ -481,7 +398,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Session should be closed after handling error
assert mock_db_session.any_close_called()
mock_db_session.close.assert_called_once()
def test_indexing_runner_general_error(
self,
@@ -498,10 +415,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test that general exceptions during indexing are handled."""
# Arrange
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
mock_indexing_runner.run.side_effect = Exception("Indexing error")
@@ -511,7 +425,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Session should be closed after error
assert mock_db_session.any_close_called()
mock_db_session.close.assert_called_once()
def test_notion_extractor_initialized_with_correct_params(
self,
@@ -618,14 +532,7 @@ class TestDocumentIndexingSyncTask:
):
"""Test that index processor clean is called with correct parameters."""
# Arrange
# Sequence: document (initial), dataset (pre-check), dataset (cleaning), document (update), document (indexing)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
mock_dataset,
mock_document,
mock_document,
]
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"

View File

@@ -3,8 +3,7 @@ import type { MockedFunction } from 'vitest'
import type { IndexingType } from '@/app/components/datasets/create/step-two'
import type { DataSet } from '@/models/datasets'
import type { RetrievalConfig } from '@/types/app'
import { fireEvent, render, screen, waitFor, within } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
import { ChunkingMode, DatasetPermission, DataSourceType } from '@/models/datasets'
import { RETRIEVE_METHOD } from '@/types/app'
@@ -137,6 +136,12 @@ const renderItem = (config: DataSet, props?: Partial<React.ComponentProps<typeof
return { onSave, onRemove }
}
const getActionButtons = (card: HTMLElement) => {
const actionButtons = Array.from(card.querySelectorAll<HTMLButtonElement>('button.action-btn'))
expect(actionButtons).toHaveLength(2)
return actionButtons
}
describe('dataset-config/card-item', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -155,7 +160,7 @@ describe('dataset-config/card-item', () => {
renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const actionButtons = within(card).getAllByRole('button', { hidden: true })
const actionButtons = getActionButtons(card)
expect(screen.getByText(dataset.name)).toBeInTheDocument()
expect(screen.getByText('dataset.indexingTechnique.high_quality · dataset.indexingMethod.semantic_search')).toBeInTheDocument()
@@ -164,20 +169,19 @@ describe('dataset-config/card-item', () => {
})
it('should open settings drawer from edit action and close after saving', async () => {
const user = userEvent.setup()
const dataset = createDataset()
const { onSave } = renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const [editButton] = within(card).getAllByRole('button', { hidden: true })
await user.click(editButton)
const [editButton] = getActionButtons(card)
fireEvent.click(editButton)
expect(screen.getByText('Mock settings modal')).toBeInTheDocument()
await waitFor(() => {
expect(screen.getByRole('dialog')).toBeVisible()
})
await user.click(screen.getByText('Save changes'))
fireEvent.click(screen.getByRole('button', { name: 'Save changes' }))
await waitFor(() => {
expect(onSave).toHaveBeenCalledWith(expect.objectContaining({ name: 'Updated dataset' }))
@@ -188,13 +192,11 @@ describe('dataset-config/card-item', () => {
})
it('should call onRemove and toggle destructive state on hover', async () => {
const user = userEvent.setup()
const dataset = createDataset()
const { onRemove } = renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const buttons = within(card).getAllByRole('button', { hidden: true })
const deleteButton = buttons[buttons.length - 1]
const [, deleteButton] = getActionButtons(card)
expect(deleteButton.className).not.toContain('action-btn-destructive')
@@ -205,7 +207,7 @@ describe('dataset-config/card-item', () => {
fireEvent.mouseLeave(deleteButton)
expect(deleteButton.className).not.toContain('action-btn-destructive')
await user.click(deleteButton)
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledWith(dataset.id)
})
@@ -223,14 +225,13 @@ describe('dataset-config/card-item', () => {
it('should apply mask overlay on mobile when drawer is open', async () => {
mockedUseBreakpoints.mockReturnValue(MediaType.mobile)
const user = userEvent.setup()
const dataset = createDataset()
renderItem(dataset)
const card = screen.getByText(dataset.name).closest('.group') as HTMLElement
const [editButton] = within(card).getAllByRole('button', { hidden: true })
await user.click(editButton)
const [editButton] = getActionButtons(card)
fireEvent.click(editButton)
expect(screen.getByText('Mock settings modal')).toBeInTheDocument()
const overlay = Array.from(document.querySelectorAll('[class]'))

View File

@@ -38,7 +38,7 @@ const DeprecationNotice: FC<DeprecationNoticeProps> = ({
iconWrapperClassName,
textClassName,
}) => {
const { t } = useTranslation('plugin')
const { t } = useTranslation()
const deprecatedReasonKey = useMemo(() => {
if (!deprecatedReason)

View File

@@ -18,25 +18,25 @@ const StatusContainer: FC<Props> = ({
return (
<div
className={cn(
'relative break-all rounded-lg border px-3 py-2.5 system-xs-regular',
'system-xs-regular relative break-all rounded-lg border px-3 py-2.5',
status === 'succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
status === 'succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'partial-succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
status === 'partial-succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'failed' && 'border-[rgba(240,68,56,0.8)] bg-workflow-display-error-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-error.svg)] text-text-warning',
status === 'failed' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(240,68,56,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
(status === 'stopped' || status === 'paused') && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
(status === 'stopped' || status === 'paused') && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'exception' && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
status === 'exception' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'running' && 'border-[rgba(11,165,236,0.8)] bg-workflow-display-normal-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-running.svg)] text-util-colors-blue-light-blue-light-600',
status === 'running' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(11,165,236,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
)}
>
<div className={cn(

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,9 @@ import consistentPlaceholders from './rules/consistent-placeholders.js'
import noAsAnyInT from './rules/no-as-any-in-t.js'
import noExtraKeys from './rules/no-extra-keys.js'
import noLegacyNamespacePrefix from './rules/no-legacy-namespace-prefix.js'
import noVersionPrefix from './rules/no-version-prefix.js'
import requireNsOption from './rules/require-ns-option.js'
import validI18nKeys from './rules/valid-i18n-keys.js'
/** @type {import('eslint').ESLint.Plugin} */
const plugin = {
@@ -15,7 +17,9 @@ const plugin = {
'no-as-any-in-t': noAsAnyInT,
'no-extra-keys': noExtraKeys,
'no-legacy-namespace-prefix': noLegacyNamespacePrefix,
'no-version-prefix': noVersionPrefix,
'require-ns-option': requireNsOption,
'valid-i18n-keys': validI18nKeys,
},
}

View File

@@ -0,0 +1,45 @@
const DEPENDENCY_KEYS = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies']
const VERSION_PREFIXES = ['^', '~']
/** @type {import('eslint').Rule.RuleModule} */
export default {
meta: {
type: 'problem',
docs: {
description: `Ensure package.json dependencies do not use version prefixes (${VERSION_PREFIXES.join(' or ')})`,
},
fixable: 'code',
},
create(context) {
const { filename } = context
if (!filename.endsWith('package.json'))
return {}
const selector = `JSONProperty:matches(${DEPENDENCY_KEYS.map(k => `[key.value="${k}"]`).join(', ')}) > JSONObjectExpression > JSONProperty`
return {
[selector](node) {
const versionNode = node.value
if (versionNode && versionNode.type === 'JSONLiteral' && typeof versionNode.value === 'string') {
const version = versionNode.value
const foundPrefix = VERSION_PREFIXES.find(prefix => version.startsWith(prefix))
if (foundPrefix) {
const packageName = node.key.value || node.key.name
const cleanVersion = version.substring(1)
const canAutoFix = /^\d+\.\d+\.\d+$/.test(cleanVersion)
context.report({
node: versionNode,
message: `Dependency "${packageName}" has version prefix "${foundPrefix}" that should be removed (found: "${version}", expected: "${cleanVersion}")`,
fix: canAutoFix
? fixer => fixer.replaceText(versionNode, `"${cleanVersion}"`)
: undefined,
})
}
}
},
}
},
}

View File

@@ -0,0 +1,61 @@
import { cleanJsonText } from '../utils.js'
/** @type {import('eslint').Rule.RuleModule} */
export default {
meta: {
type: 'problem',
docs: {
description: 'Ensure i18n JSON keys are flat and valid as object paths',
},
},
create(context) {
return {
Program(node) {
const { filename, sourceCode } = context
if (!filename.endsWith('.json'))
return
let json
try {
json = JSON.parse(cleanJsonText(sourceCode.text))
}
catch {
context.report({
node,
message: 'Invalid JSON format',
})
return
}
const keys = Object.keys(json)
const keyPrefixes = new Set()
for (const key of keys) {
if (key.includes('.')) {
const parts = key.split('.')
for (let i = 1; i < parts.length; i++) {
const prefix = parts.slice(0, i).join('.')
if (keys.includes(prefix)) {
context.report({
node,
message: `Invalid key structure: '${key}' conflicts with '${prefix}'`,
})
}
keyPrefixes.add(prefix)
}
}
}
for (const key of keys) {
if (keyPrefixes.has(key)) {
context.report({
node,
message: `Invalid key structure: '${key}' is a prefix of another key`,
})
}
}
},
}
},
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,7 @@
// @ts-check
import antfu, { GLOB_TESTS, GLOB_TS, GLOB_TSX } from '@antfu/eslint-config'
import antfu from '@antfu/eslint-config'
import pluginQuery from '@tanstack/eslint-plugin-query'
import tailwindcss from 'eslint-plugin-better-tailwindcss'
import hyoban from 'eslint-plugin-hyoban'
import sonar from 'eslint-plugin-sonarjs'
import storybook from 'eslint-plugin-storybook'
import dify from './eslint-rules/index.js'
@@ -68,8 +67,7 @@ export default antfu(
},
},
{
files: [GLOB_TS, GLOB_TSX],
ignores: GLOB_TESTS,
files: ['**/*.{ts,tsx}'],
plugins: {
tailwindcss,
},
@@ -81,47 +79,7 @@ export default antfu(
},
},
{
name: 'dify/custom/setup',
plugins: {
dify,
hyoban,
},
},
{
files: ['**/*.tsx'],
rules: {
'hyoban/prefer-tailwind-icons': ['warn', {
prefix: 'i-',
propMappings: {
size: 'size',
width: 'w',
height: 'h',
},
libraries: [
{
prefix: 'i-custom-',
source: '^@/app/components/base/icons/src/(?<set>(?:public|vender)(?:/.*)?)$',
name: '^(?<name>.*)$',
},
{
source: '^@remixicon/react$',
name: '^(?<set>Ri)(?<name>.+)$',
},
{
source: '^@(?<set>heroicons)/react/24/outline$',
name: '^(?<name>.*)Icon$',
},
{
source: '^@(?<set>heroicons)/react/24/(?<variant>solid)$',
name: '^(?<name>.*)Icon$',
},
{
source: '^@(?<set>heroicons)/react/(?<variant>\\d+/(?:solid|outline))$',
name: '^(?<name>.*)Icon$',
},
],
}],
},
plugins: { dify },
},
{
files: ['i18n/**/*.json'],
@@ -130,7 +88,7 @@ export default antfu(
'max-lines': 'off',
'jsonc/sort-keys': 'error',
'hyoban/i18n-flat-key': 'error',
'dify/valid-i18n-keys': 'error',
'dify/no-extra-keys': 'error',
'dify/consistent-placeholders': 'error',
},
@@ -138,7 +96,7 @@ export default antfu(
{
files: ['**/package.json'],
rules: {
'hyoban/no-dependency-version-prefix': 'error',
'dify/no-version-prefix': 'error',
},
},
)

View File

@@ -1,7 +1,7 @@
'use client'
import type { Resource } from 'i18next'
import type { Locale } from '.'
import type { Namespace, NamespaceInFileName } from './resources'
import type { NamespaceCamelCase, NamespaceKebabCase } from './resources'
import { kebabCase } from 'es-toolkit/string'
import { createInstance } from 'i18next'
import resourcesToBackend from 'i18next-resources-to-backend'
@@ -14,7 +14,7 @@ export function createI18nextInstance(lng: Locale, resources: Resource) {
.use(initReactI18next)
.use(resourcesToBackend((
language: Locale,
namespace: NamespaceInFileName | Namespace,
namespace: NamespaceKebabCase | NamespaceCamelCase,
) => {
const namespaceKebab = kebabCase(namespace)
return import(`../i18n/${language}/${namespaceKebab}.json`)

View File

@@ -1,9 +1,9 @@
'use client'
import type { Namespace } from './resources'
import type { NamespaceCamelCase } from './resources'
import { useTranslation as useTranslationOriginal } from 'react-i18next'
export function useTranslation<T extends Namespace | undefined = undefined>(ns?: T) {
export function useTranslation(ns?: NamespaceCamelCase) {
return useTranslationOriginal(ns)
}

View File

@@ -1,13 +1,13 @@
import type { Namespace } from './resources'
import type { NamespaceCamelCase } from './resources'
import { use } from 'react'
import { getLocaleOnServer, getTranslation } from './server'
async function getI18nConfig<T extends Namespace | undefined = undefined>(ns?: T) {
async function getI18nConfig(ns?: NamespaceCamelCase) {
const lang = await getLocaleOnServer()
return getTranslation(lang, ns)
}
export function useTranslation<T extends Namespace | undefined = undefined>(ns?: T) {
export function useTranslation(ns?: NamespaceCamelCase) {
return use(getI18nConfig(ns))
}

View File

@@ -1,5 +1,4 @@
import { kebabCase } from 'string-ts'
import { ObjectKeys } from '@/utils/object'
import { kebabCase } from 'es-toolkit/string'
import appAnnotation from '../i18n/en-US/app-annotation.json'
import appApi from '../i18n/en-US/app-api.json'
import appDebug from '../i18n/en-US/app-debug.json'
@@ -65,10 +64,19 @@ const resources = {
workflow,
}
export type KebabCase<S extends string> = S extends `${infer T}${infer U}`
? T extends Lowercase<T>
? `${T}${KebabCase<U>}`
: `-${Lowercase<T>}${KebabCase<U>}`
: S
export type CamelCase<S extends string> = S extends `${infer T}-${infer U}`
? `${T}${Capitalize<CamelCase<U>>}`
: S
export type Resources = typeof resources
export type NamespaceCamelCase = keyof Resources
export type NamespaceKebabCase = KebabCase<NamespaceCamelCase>
export const namespaces = ObjectKeys(resources)
export type Namespace = typeof namespaces[number]
export const namespacesInFileName = namespaces.map(ns => kebabCase(ns))
export type NamespaceInFileName = typeof namespacesInFileName[number]
export const namespacesCamelCase = Object.keys(resources) as NamespaceCamelCase[]
export const namespacesKebabCase = namespacesCamelCase.map(ns => kebabCase(ns)) as NamespaceKebabCase[]

View File

@@ -1,6 +1,6 @@
import type { i18n as I18nInstance, Resource, ResourceLanguage } from 'i18next'
import type { Locale } from '.'
import type { Namespace, NamespaceInFileName } from './resources'
import type { NamespaceCamelCase, NamespaceKebabCase } from './resources'
import { match } from '@formatjs/intl-localematcher'
import { kebabCase } from 'es-toolkit/compat'
import { camelCase } from 'es-toolkit/string'
@@ -12,7 +12,7 @@ import { cache } from 'react'
import { initReactI18next } from 'react-i18next/initReactI18next'
import { serverOnlyContext } from '@/utils/server-only-context'
import { i18n } from '.'
import { namespacesInFileName } from './resources'
import { namespacesKebabCase } from './resources'
import { getInitOptions } from './settings'
const [getLocaleCache, setLocaleCache] = serverOnlyContext<Locale | null>(null)
@@ -26,8 +26,8 @@ const getOrCreateI18next = async (lng: Locale) => {
instance = createInstance()
await instance
.use(initReactI18next)
.use(resourcesToBackend((language: Locale, namespace: Namespace | NamespaceInFileName) => {
const fileNamespace = kebabCase(namespace)
.use(resourcesToBackend((language: Locale, namespace: NamespaceCamelCase | NamespaceKebabCase) => {
const fileNamespace = kebabCase(namespace) as NamespaceKebabCase
return import(`../i18n/${language}/${fileNamespace}.json`)
}))
.init({
@@ -38,7 +38,7 @@ const getOrCreateI18next = async (lng: Locale) => {
return instance
}
export async function getTranslation<T extends Namespace>(lng: Locale, ns?: T) {
export async function getTranslation(lng: Locale, ns?: NamespaceCamelCase) {
const i18nextInstance = await getOrCreateI18next(lng)
if (ns && !i18nextInstance.hasLoadedNamespace(ns))
@@ -84,7 +84,7 @@ export const getResources = cache(async (lng: Locale): Promise<Resource> => {
const messages = {} as ResourceLanguage
await Promise.all(
(namespacesInFileName).map(async (ns) => {
(namespacesKebabCase).map(async (ns) => {
const mod = await import(`../i18n/${lng}/${ns}.json`)
messages[camelCase(ns)] = mod.default
}),

View File

@@ -1,5 +1,5 @@
import type { InitOptions } from 'i18next'
import { namespaces } from './resources'
import { namespacesCamelCase } from './resources'
export function getInitOptions(): InitOptions {
return {
@@ -8,7 +8,7 @@ export function getInitOptions(): InitOptions {
fallbackLng: 'en-US',
partialBundledLanguages: true,
keySeparator: false,
ns: namespaces,
ns: namespacesCamelCase,
interpolation: {
escapeValue: false,
},

View File

@@ -31,8 +31,8 @@
"build": "next build",
"build:docker": "next build && node scripts/optimize-standalone.js",
"start": "node ./scripts/copy-and-start.mjs",
"lint": "eslint --cache --concurrency=auto",
"lint:ci": "eslint --cache --concurrency 2",
"lint": "eslint --cache --concurrency=\"auto\"",
"lint:ci": "eslint --cache --concurrency 3",
"lint:fix": "pnpm lint --fix",
"lint:quiet": "pnpm lint --quiet",
"lint:complexity": "pnpm lint --rule 'complexity: [error, {max: 15}]' --quiet",
@@ -166,10 +166,7 @@
"devDependencies": {
"@antfu/eslint-config": "7.2.0",
"@chromatic-com/storybook": "5.0.0",
"@egoist/tailwindcss-icons": "1.9.2",
"@eslint-react/eslint-plugin": "2.9.4",
"@iconify-json/heroicons": "1.2.3",
"@iconify-json/ri": "1.2.9",
"@mdx-js/loader": "3.1.1",
"@mdx-js/react": "3.1.1",
"@next/bundle-analyzer": "16.1.5",
@@ -197,8 +194,7 @@
"@types/js-cookie": "3.0.6",
"@types/js-yaml": "4.0.9",
"@types/negotiator": "0.6.4",
"@types/node": "24.10.12",
"@types/postcss-js": "4.1.0",
"@types/node": "18.15.0",
"@types/qs": "6.14.0",
"@types/react": "19.2.9",
"@types/react-dom": "19.2.3",
@@ -217,21 +213,18 @@
"cross-env": "10.1.0",
"esbuild": "0.27.2",
"eslint": "9.39.2",
"eslint-plugin-better-tailwindcss": "https://pkg.pr.new/hyoban/eslint-plugin-better-tailwindcss@c0161c7",
"eslint-plugin-hyoban": "0.11.1",
"eslint-plugin-better-tailwindcss": "4.1.1",
"eslint-plugin-react-hooks": "7.0.1",
"eslint-plugin-react-refresh": "0.5.0",
"eslint-plugin-sonarjs": "3.0.6",
"eslint-plugin-storybook": "10.2.6",
"husky": "9.1.7",
"iconify-import-svg": "0.1.1",
"jsdom": "27.3.0",
"jsdom-testing-mocks": "1.16.0",
"knip": "5.78.0",
"lint-staged": "15.5.2",
"nock": "14.0.10",
"postcss": "8.5.6",
"postcss-js": "5.0.3",
"react-scan": "0.4.3",
"sass": "1.93.2",
"serwist": "9.5.4",

560
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,8 @@
import path from 'node:path'
import { fileURLToPath } from 'node:url'
import { getIconCollections, iconsPlugin } from '@egoist/tailwindcss-icons'
import tailwindTypography from '@tailwindcss/typography'
import { importSvgCollections } from 'iconify-import-svg'
// @ts-expect-error workaround for turbopack issue
import { cssAsPlugin } from './tailwind-css-plugin.ts'
// @ts-expect-error workaround for turbopack issue
import tailwindThemeVarDefine from './themes/tailwind-theme-var-define.ts'
import typography from './typography.js'
const _dirname = typeof __dirname !== 'undefined'
? __dirname
: path.dirname(fileURLToPath(import.meta.url))
const config = {
theme: {
typography,
@@ -158,32 +148,7 @@ const config = {
},
},
},
plugins: [
tailwindTypography,
iconsPlugin({
collections: {
...getIconCollections(['heroicons', 'ri']),
...importSvgCollections({
source: path.resolve(_dirname, 'app/components/base/icons/assets/public'),
prefix: 'custom-public',
ignoreImportErrors: true,
}),
...importSvgCollections({
source: path.resolve(_dirname, 'app/components/base/icons/assets/vender'),
prefix: 'custom-vender',
ignoreImportErrors: true,
}),
},
extraProperties: {
width: '1rem',
height: '1rem',
display: 'block',
},
}),
cssAsPlugin([
path.resolve(_dirname, './app/styles/globals.css'),
]),
],
plugins: [tailwindTypography],
// https://github.com/tailwindlabs/tailwindcss/discussions/5969
corePlugins: {
preflight: false,

View File

@@ -1,25 +0,0 @@
// Credits:
// https://github.com/tailwindlabs/tailwindcss-intellisense/issues/227
import type { PluginCreator } from 'tailwindcss/types/config'
import { readFileSync } from 'node:fs'
import { parse } from 'postcss'
import { objectify } from 'postcss-js'
export const cssAsPlugin: (cssPath: string[]) => PluginCreator = (cssPath: string[]) => {
if (process.env.NODE_ENV === 'production') {
return () => {}
}
return ({ addUtilities, addComponents, addBase }) => {
const jssList = cssPath.map(p => objectify(parse(readFileSync(p, 'utf8'))))
for (const jss of jssList) {
if (jss['@layer utilities'])
addUtilities(jss['@layer utilities'])
if (jss['@layer components'])
addComponents(jss['@layer components'])
if (jss['@layer base'])
addBase(jss['@layer base'])
}
}
}

7
web/types/i18n.d.ts vendored
View File

@@ -1,16 +1,17 @@
import type { Namespace, Resources } from '../i18n-config/resources'
import type { NamespaceCamelCase, Resources } from '../i18n-config/resources'
import 'i18next'
declare module 'i18next' {
// eslint-disable-next-line ts/consistent-type-definitions
interface CustomTypeOptions {
defaultNS: 'common'
resources: Resources
keySeparator: false
}
}
export type I18nKeysByPrefix<
NS extends Namespace,
NS extends NamespaceCamelCase,
Prefix extends string = '',
> = Prefix extends ''
? keyof Resources[NS]
@@ -21,7 +22,7 @@ export type I18nKeysByPrefix<
: never
export type I18nKeysWithPrefix<
NS extends Namespace,
NS extends NamespaceCamelCase,
Prefix extends string = '',
> = Prefix extends ''
? keyof Resources[NS]

View File

@@ -1,7 +0,0 @@
export function ObjectFromEntries<const T extends ReadonlyArray<readonly [PropertyKey, unknown]>>(entries: T): { [K in T[number]as K[0]]: K[1] } {
return Object.fromEntries(entries) as { [K in T[number]as K[0]]: K[1] }
}
export function ObjectKeys<const T extends Record<string, unknown>>(obj: T): (keyof T)[] {
return Object.keys(obj) as (keyof T)[]
}