mirror of
https://github.com/langgenius/dify.git
synced 2026-02-13 20:24:24 +00:00
Compare commits
12 Commits
2-9-stable
...
feat/notif
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c243e91668 | ||
|
|
004fbbe52b | ||
|
|
63fb0ddde5 | ||
|
|
7dabc03a08 | ||
|
|
1a050c9f86 | ||
|
|
7fb6e0cdfe | ||
|
|
e0fcf33979 | ||
|
|
898e09264b | ||
|
|
4ac461d882 | ||
|
|
fa763216d0 | ||
|
|
d546210040 | ||
|
|
4e0a7a7f9e |
@@ -39,6 +39,7 @@ from . import (
|
||||
feature,
|
||||
human_input_form,
|
||||
init_validate,
|
||||
notification,
|
||||
ping,
|
||||
setup,
|
||||
spec,
|
||||
@@ -184,6 +185,7 @@ __all__ = [
|
||||
"model_config",
|
||||
"model_providers",
|
||||
"models",
|
||||
"notification",
|
||||
"oauth",
|
||||
"oauth_server",
|
||||
"ops_trace",
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import csv
|
||||
import io
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import ParamSpec, TypeVar
|
||||
@@ -6,7 +8,7 @@ from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy import select
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import supported_language
|
||||
@@ -16,6 +18,7 @@ from core.db.session_factory import session_factory
|
||||
from extensions.ext_database import db
|
||||
from libs.token import extract_access_token
|
||||
from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp
|
||||
from services.billing_service import BillingService
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
@@ -277,3 +280,115 @@ class DeleteExploreBannerApi(Resource):
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
class SaveNotificationContentPayload(BaseModel):
|
||||
content: str = Field(...)
|
||||
|
||||
|
||||
class SaveNotificationUserPayload(BaseModel):
|
||||
user_email: list[str] = Field(...)
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
SaveNotificationContentPayload.__name__,
|
||||
SaveNotificationContentPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
console_ns.schema_model(
|
||||
SaveNotificationUserPayload.__name__,
|
||||
SaveNotificationUserPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/admin/save_notification_content")
|
||||
class SaveNotificationContentApi(Resource):
|
||||
@console_ns.doc("save_notification_content")
|
||||
@console_ns.doc(description="Save a notification content")
|
||||
@console_ns.expect(console_ns.models[SaveNotificationContentPayload.__name__])
|
||||
@console_ns.response(200, "Notification content saved successfully")
|
||||
@only_edition_cloud
|
||||
@admin_required
|
||||
def post(self):
|
||||
payload = SaveNotificationContentPayload.model_validate(console_ns.payload)
|
||||
BillingService.save_notification_content(payload.content)
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
@console_ns.route("/admin/save_notification_user")
|
||||
class SaveNotificationUserApi(Resource):
|
||||
@console_ns.doc("save_notification_user")
|
||||
@console_ns.doc(
|
||||
description="Save notification users via JSON body or file upload. "
|
||||
'JSON: {"user_email": ["a@example.com", ...]}. '
|
||||
"File: multipart/form-data with a 'file' field (CSV or TXT, one email per line)."
|
||||
)
|
||||
@console_ns.response(200, "Notification users saved successfully")
|
||||
@only_edition_cloud
|
||||
@admin_required
|
||||
def post(self):
|
||||
# Determine input mode: file upload or JSON body
|
||||
if "file" in request.files:
|
||||
emails = self._parse_emails_from_file()
|
||||
else:
|
||||
payload = SaveNotificationUserPayload.model_validate(console_ns.payload)
|
||||
emails = payload.user_email
|
||||
|
||||
if not emails:
|
||||
raise BadRequest("No valid email addresses provided.")
|
||||
|
||||
# Use batch API for bulk insert (chunks of 1000 per request to billing service)
|
||||
result = BillingService.save_notification_users_batch(emails)
|
||||
|
||||
return {
|
||||
"result": "success",
|
||||
"total": len(emails),
|
||||
"succeeded": result["succeeded"],
|
||||
"failed_chunks": result["failed_chunks"],
|
||||
}, 200
|
||||
|
||||
@staticmethod
|
||||
def _parse_emails_from_file() -> list[str]:
|
||||
"""Parse email addresses from an uploaded CSV or TXT file."""
|
||||
file = request.files["file"]
|
||||
|
||||
if not file.filename:
|
||||
raise BadRequest("Uploaded file has no filename.")
|
||||
|
||||
filename_lower = file.filename.lower()
|
||||
if not filename_lower.endswith((".csv", ".txt")):
|
||||
raise BadRequest("Invalid file type. Only CSV (.csv) and TXT (.txt) files are allowed.")
|
||||
|
||||
# Read file content
|
||||
try:
|
||||
content = file.read().decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
file.seek(0)
|
||||
content = file.read().decode("gbk")
|
||||
except UnicodeDecodeError:
|
||||
raise BadRequest("Unable to decode the file. Please use UTF-8 or GBK encoding.")
|
||||
|
||||
emails: list[str] = []
|
||||
if filename_lower.endswith(".csv"):
|
||||
reader = csv.reader(io.StringIO(content))
|
||||
for row in reader:
|
||||
for cell in row:
|
||||
cell = cell.strip()
|
||||
emails.append(cell)
|
||||
else:
|
||||
# TXT file: one email per line
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
emails.append(line)
|
||||
|
||||
# Deduplicate while preserving order
|
||||
seen: set[str] = set()
|
||||
unique_emails: list[str] = []
|
||||
for email in emails:
|
||||
email_lower = email.lower()
|
||||
if email_lower not in seen:
|
||||
seen.add(email_lower)
|
||||
unique_emails.append(email)
|
||||
|
||||
return unique_emails
|
||||
|
||||
26
api/controllers/console/notification.py
Normal file
26
api/controllers/console/notification.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from flask_restx import Resource
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.billing_service import BillingService
|
||||
|
||||
|
||||
@console_ns.route("/notification")
|
||||
class NotificationApi(Resource):
|
||||
@console_ns.doc("get_notification")
|
||||
@console_ns.doc(description="Get notification for the current user")
|
||||
@console_ns.doc(
|
||||
responses={
|
||||
200: "Success",
|
||||
401: "Unauthorized",
|
||||
}
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@only_edition_cloud
|
||||
def get(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
notification = BillingService.read_notification(current_user.email)
|
||||
return notification
|
||||
@@ -34,6 +34,7 @@ from .dataset import (
|
||||
metadata,
|
||||
segment,
|
||||
)
|
||||
from .dataset.rag_pipeline import rag_pipeline_workflow
|
||||
from .end_user import end_user
|
||||
from .workspace import models
|
||||
|
||||
@@ -53,6 +54,7 @@ __all__ = [
|
||||
"message",
|
||||
"metadata",
|
||||
"models",
|
||||
"rag_pipeline_workflow",
|
||||
"segment",
|
||||
"site",
|
||||
"workflow",
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import string
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
@@ -12,6 +10,7 @@ from controllers.common.errors import FilenameNotExistsError, NoFileUploadedErro
|
||||
from controllers.common.schema import register_schema_model
|
||||
from controllers.service_api import service_api_ns
|
||||
from controllers.service_api.dataset.error import PipelineRunError
|
||||
from controllers.service_api.dataset.rag_pipeline.serializers import serialize_upload_file
|
||||
from controllers.service_api.wraps import DatasetApiResource
|
||||
from core.app.apps.pipeline.pipeline_generator import PipelineGenerator
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
@@ -41,7 +40,7 @@ register_schema_model(service_api_ns, DatasourceNodeRunPayload)
|
||||
register_schema_model(service_api_ns, PipelineRunApiEntity)
|
||||
|
||||
|
||||
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/datasource-plugins")
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/datasource-plugins")
|
||||
class DatasourcePluginsApi(DatasetApiResource):
|
||||
"""Resource for datasource plugins."""
|
||||
|
||||
@@ -76,7 +75,7 @@ class DatasourcePluginsApi(DatasetApiResource):
|
||||
return datasource_plugins, 200
|
||||
|
||||
|
||||
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/datasource/nodes/{string:node_id}/run")
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/datasource/nodes/<string:node_id>/run")
|
||||
class DatasourceNodeRunApi(DatasetApiResource):
|
||||
"""Resource for datasource node run."""
|
||||
|
||||
@@ -131,7 +130,7 @@ class DatasourceNodeRunApi(DatasetApiResource):
|
||||
)
|
||||
|
||||
|
||||
@service_api_ns.route(f"/datasets/{uuid:dataset_id}/pipeline/run")
|
||||
@service_api_ns.route("/datasets/<uuid:dataset_id>/pipeline/run")
|
||||
class PipelineRunApi(DatasetApiResource):
|
||||
"""Resource for datasource node run."""
|
||||
|
||||
@@ -232,12 +231,4 @@ class KnowledgebasePipelineFileUploadApi(DatasetApiResource):
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
return {
|
||||
"id": upload_file.id,
|
||||
"name": upload_file.name,
|
||||
"size": upload_file.size,
|
||||
"extension": upload_file.extension,
|
||||
"mime_type": upload_file.mime_type,
|
||||
"created_by": upload_file.created_by,
|
||||
"created_at": upload_file.created_at,
|
||||
}, 201
|
||||
return serialize_upload_file(upload_file), 201
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
"""
|
||||
Serialization helpers for Service API knowledge pipeline endpoints.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from models.model import UploadFile
|
||||
|
||||
|
||||
def serialize_upload_file(upload_file: UploadFile) -> dict[str, Any]:
|
||||
return {
|
||||
"id": upload_file.id,
|
||||
"name": upload_file.name,
|
||||
"size": upload_file.size,
|
||||
"extension": upload_file.extension,
|
||||
"mime_type": upload_file.mime_type,
|
||||
"created_by": upload_file.created_by,
|
||||
"created_at": upload_file.created_at.isoformat() if upload_file.created_at else None,
|
||||
}
|
||||
@@ -217,6 +217,8 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
|
||||
def decorator(view: Callable[Concatenate[T, P], R]):
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
api_token = validate_and_get_api_token("dataset")
|
||||
|
||||
# get url path dataset_id from positional args or kwargs
|
||||
# Flask passes URL path parameters as positional arguments
|
||||
dataset_id = None
|
||||
@@ -253,12 +255,18 @@ def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
|
||||
# Validate dataset if dataset_id is provided
|
||||
if dataset_id:
|
||||
dataset_id = str(dataset_id)
|
||||
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset = (
|
||||
db.session.query(Dataset)
|
||||
.where(
|
||||
Dataset.id == dataset_id,
|
||||
Dataset.tenant_id == api_token.tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not dataset:
|
||||
raise NotFound("Dataset not found.")
|
||||
if not dataset.enable_api:
|
||||
raise Forbidden("Dataset api access is not enabled.")
|
||||
api_token = validate_and_get_api_token("dataset")
|
||||
tenant_account_join = (
|
||||
db.session.query(Tenant, TenantAccountJoin)
|
||||
.where(Tenant.id == api_token.tenant_id)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import functools
|
||||
from collections.abc import Callable
|
||||
from typing import Any, TypeVar, cast
|
||||
from typing import ParamSpec, TypeVar, cast
|
||||
|
||||
from opentelemetry.trace import get_tracer
|
||||
|
||||
@@ -8,7 +8,8 @@ from configs import dify_config
|
||||
from extensions.otel.decorators.handler import SpanHandler
|
||||
from extensions.otel.runtime import is_instrument_flag_enabled
|
||||
|
||||
T = TypeVar("T", bound=Callable[..., Any])
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
_HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()}
|
||||
|
||||
@@ -20,7 +21,7 @@ def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler:
|
||||
return _HANDLER_INSTANCES[handler_class]
|
||||
|
||||
|
||||
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T], T]:
|
||||
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
||||
"""
|
||||
Decorator that traces a function with an OpenTelemetry span.
|
||||
|
||||
@@ -30,9 +31,9 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T],
|
||||
:param handler_class: Optional handler class to use for this span. If None, uses the default SpanHandler.
|
||||
"""
|
||||
|
||||
def decorator(func: T) -> T:
|
||||
def decorator(func: Callable[P, R]) -> Callable[P, R]:
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
@@ -46,6 +47,6 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T],
|
||||
kwargs=kwargs,
|
||||
)
|
||||
|
||||
return cast(T, wrapper)
|
||||
return cast(Callable[P, R], wrapper)
|
||||
|
||||
return decorator
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import inspect
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from opentelemetry.trace import SpanKind, Status, StatusCode
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class SpanHandler:
|
||||
"""
|
||||
@@ -31,9 +33,9 @@ class SpanHandler:
|
||||
|
||||
def _extract_arguments(
|
||||
self,
|
||||
wrapped: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: Mapping[str, Any],
|
||||
wrapped: Callable[..., R],
|
||||
args: tuple[object, ...],
|
||||
kwargs: Mapping[str, object],
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Extract function arguments using inspect.signature.
|
||||
@@ -62,10 +64,10 @@ class SpanHandler:
|
||||
def wrapper(
|
||||
self,
|
||||
tracer: Any,
|
||||
wrapped: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: Mapping[str, Any],
|
||||
) -> Any:
|
||||
wrapped: Callable[..., R],
|
||||
args: tuple[object, ...],
|
||||
kwargs: Mapping[str, object],
|
||||
) -> R:
|
||||
"""
|
||||
Fully control the wrapper behavior.
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from collections.abc import Callable, Mapping
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from opentelemetry.trace import SpanKind, Status, StatusCode
|
||||
from opentelemetry.util.types import AttributeValue
|
||||
@@ -12,16 +12,19 @@ from models.model import Account
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class AppGenerateHandler(SpanHandler):
|
||||
"""Span handler for ``AppGenerateService.generate``."""
|
||||
|
||||
def wrapper(
|
||||
self,
|
||||
tracer: Any,
|
||||
wrapped: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: Mapping[str, Any],
|
||||
) -> Any:
|
||||
wrapped: Callable[..., R],
|
||||
args: tuple[object, ...],
|
||||
kwargs: Mapping[str, object],
|
||||
) -> R:
|
||||
try:
|
||||
arguments = self._extract_arguments(wrapped, args, kwargs)
|
||||
if not arguments:
|
||||
|
||||
@@ -1225,7 +1225,12 @@ class TenantService:
|
||||
|
||||
@staticmethod
|
||||
def remove_member_from_tenant(tenant: Tenant, account: Account, operator: Account):
|
||||
"""Remove member from tenant"""
|
||||
"""Remove member from tenant.
|
||||
|
||||
If the removed member has ``AccountStatus.PENDING`` (invited but never
|
||||
activated) and no remaining workspace memberships, the orphaned account
|
||||
record is deleted as well.
|
||||
"""
|
||||
if operator.id == account.id:
|
||||
raise CannotOperateSelfError("Cannot operate self.")
|
||||
|
||||
@@ -1235,9 +1240,31 @@ class TenantService:
|
||||
if not ta:
|
||||
raise MemberNotInTenantError("Member not in tenant.")
|
||||
|
||||
# Capture identifiers before any deletions; attribute access on the ORM
|
||||
# object may fail after commit() expires the instance.
|
||||
account_id = account.id
|
||||
account_email = account.email
|
||||
|
||||
db.session.delete(ta)
|
||||
|
||||
# Clean up orphaned pending accounts (invited but never activated)
|
||||
should_delete_account = False
|
||||
if account.status == AccountStatus.PENDING:
|
||||
# autoflush flushes ta deletion before this query, so 0 means no remaining joins
|
||||
remaining_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).count()
|
||||
if remaining_joins == 0:
|
||||
db.session.delete(account)
|
||||
should_delete_account = True
|
||||
|
||||
db.session.commit()
|
||||
|
||||
if should_delete_account:
|
||||
logger.info(
|
||||
"Deleted orphaned pending account: account_id=%s, email=%s",
|
||||
account_id,
|
||||
account_email,
|
||||
)
|
||||
|
||||
if dify_config.BILLING_ENABLED:
|
||||
BillingService.clean_billing_info_cache(tenant.id)
|
||||
|
||||
@@ -1245,13 +1272,13 @@ class TenantService:
|
||||
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
|
||||
|
||||
sync_success = sync_workspace_member_removal(
|
||||
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
|
||||
workspace_id=tenant.id, member_id=account_id, source="workspace_member_removed"
|
||||
)
|
||||
if not sync_success:
|
||||
logger.warning(
|
||||
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
|
||||
tenant.id,
|
||||
account.id,
|
||||
account_id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -393,3 +393,35 @@ class BillingService:
|
||||
for item in data:
|
||||
tenant_whitelist.append(item["tenant_id"])
|
||||
return tenant_whitelist
|
||||
|
||||
@classmethod
|
||||
def read_notification(cls, user_email: str):
|
||||
params = {"user_email": user_email}
|
||||
return cls._send_request("GET", "/notification/read", params=params)
|
||||
|
||||
@classmethod
|
||||
def save_notification_user(cls, user_email: str):
|
||||
json = {"user_email": user_email}
|
||||
return cls._send_request("POST", "/notification/new-notification-user", json=json)
|
||||
|
||||
@classmethod
|
||||
def save_notification_users_batch(cls, user_emails: list[str]) -> dict:
|
||||
"""Batch save notification users in chunks of 1000."""
|
||||
chunk_size = 1000
|
||||
total_succeeded = 0
|
||||
failed_chunks: list[dict] = []
|
||||
|
||||
for i in range(0, len(user_emails), chunk_size):
|
||||
chunk = user_emails[i : i + chunk_size]
|
||||
try:
|
||||
resp = cls._send_request("POST", "/notification/batch-notification-users", json={"user_emails": chunk})
|
||||
total_succeeded += resp.get("count", len(chunk))
|
||||
except Exception as e:
|
||||
failed_chunks.append({"offset": i, "count": len(chunk), "error": str(e)})
|
||||
|
||||
return {"succeeded": total_succeeded, "failed_chunks": failed_chunks}
|
||||
|
||||
@classmethod
|
||||
def save_notification_content(cls, content: str):
|
||||
json = {"content": content}
|
||||
return cls._send_request("POST", "/notification/new-notification", json=json)
|
||||
|
||||
@@ -1329,10 +1329,24 @@ class RagPipelineService:
|
||||
"""
|
||||
Get datasource plugins
|
||||
"""
|
||||
dataset: Dataset | None = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset: Dataset | None = (
|
||||
db.session.query(Dataset)
|
||||
.where(
|
||||
Dataset.id == dataset_id,
|
||||
Dataset.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not dataset:
|
||||
raise ValueError("Dataset not found")
|
||||
pipeline: Pipeline | None = db.session.query(Pipeline).where(Pipeline.id == dataset.pipeline_id).first()
|
||||
pipeline: Pipeline | None = (
|
||||
db.session.query(Pipeline)
|
||||
.where(
|
||||
Pipeline.id == dataset.pipeline_id,
|
||||
Pipeline.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not pipeline:
|
||||
raise ValueError("Pipeline not found")
|
||||
|
||||
@@ -1413,10 +1427,24 @@ class RagPipelineService:
|
||||
"""
|
||||
Get pipeline
|
||||
"""
|
||||
dataset: Dataset | None = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset: Dataset | None = (
|
||||
db.session.query(Dataset)
|
||||
.where(
|
||||
Dataset.id == dataset_id,
|
||||
Dataset.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not dataset:
|
||||
raise ValueError("Dataset not found")
|
||||
pipeline: Pipeline | None = db.session.query(Pipeline).where(Pipeline.id == dataset.pipeline_id).first()
|
||||
pipeline: Pipeline | None = (
|
||||
db.session.query(Pipeline)
|
||||
.where(
|
||||
Pipeline.id == dataset.pipeline_id,
|
||||
Pipeline.tenant_id == tenant_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not pipeline:
|
||||
raise ValueError("Pipeline not found")
|
||||
return pipeline
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from flask_login import current_user
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from services.account_service import TenantService
|
||||
@@ -53,7 +54,12 @@ class WorkspaceService:
|
||||
from services.credit_pool_service import CreditPoolService
|
||||
|
||||
paid_pool = CreditPoolService.get_pool(tenant_id=tenant.id, pool_type="paid")
|
||||
if paid_pool:
|
||||
# if the tenant is not on the sandbox plan and the paid pool is not full, use the paid pool
|
||||
if (
|
||||
feature.billing.subscription.plan != CloudPlan.SANDBOX
|
||||
and paid_pool is not None
|
||||
and (paid_pool.quota_limit == -1 or paid_pool.quota_limit > paid_pool.quota_used)
|
||||
):
|
||||
tenant_info["trial_credits"] = paid_pool.quota_limit
|
||||
tenant_info["trial_credits_used"] = paid_pool.quota_used
|
||||
else:
|
||||
|
||||
@@ -23,40 +23,40 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
|
||||
"""
|
||||
logger.info(click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
total_index_node_ids = []
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
index_type = dataset.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
index_type = dataset.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
|
||||
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
|
||||
session.execute(document_delete_stmt)
|
||||
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
|
||||
session.execute(document_delete_stmt)
|
||||
|
||||
for document_id in document_ids:
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
|
||||
).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
for document_id in document_ids:
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
total_index_node_ids.extend([segment.index_node_id for segment in segments])
|
||||
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
session.commit()
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
"Clean document when import form notion document deleted end :: {} latency: {}".format(
|
||||
dataset_id, end_at - start_at
|
||||
),
|
||||
fg="green",
|
||||
)
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset:
|
||||
index_processor.clean(
|
||||
dataset, total_index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Cleaned document when import form notion document deleted failed")
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
"Clean document when import form notion document deleted end :: {} latency: {}".format(
|
||||
dataset_id, end_at - start_at
|
||||
),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -27,6 +27,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
"""
|
||||
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
tenant_id = None
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
||||
@@ -35,94 +36,120 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
|
||||
logger.info(click.style(f"Document not found: {document_id}", fg="red"))
|
||||
return
|
||||
|
||||
if document.indexing_status == "parsing":
|
||||
logger.info(click.style(f"Document {document_id} is already being processed, skipping", fg="yellow"))
|
||||
return
|
||||
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
raise Exception("Dataset not found")
|
||||
|
||||
data_source_info = document.data_source_info_dict
|
||||
if document.data_source_type == "notion_import":
|
||||
if (
|
||||
not data_source_info
|
||||
or "notion_page_id" not in data_source_info
|
||||
or "notion_workspace_id" not in data_source_info
|
||||
):
|
||||
raise ValueError("no notion page found")
|
||||
workspace_id = data_source_info["notion_workspace_id"]
|
||||
page_id = data_source_info["notion_page_id"]
|
||||
page_type = data_source_info["type"]
|
||||
page_edited_time = data_source_info["last_edited_time"]
|
||||
credential_id = data_source_info.get("credential_id")
|
||||
if document.data_source_type != "notion_import":
|
||||
logger.info(click.style(f"Document {document_id} is not a notion_import, skipping", fg="yellow"))
|
||||
return
|
||||
|
||||
# Get credentials from datasource provider
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
credential = datasource_provider_service.get_datasource_credentials(
|
||||
tenant_id=document.tenant_id,
|
||||
credential_id=credential_id,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
if (
|
||||
not data_source_info
|
||||
or "notion_page_id" not in data_source_info
|
||||
or "notion_workspace_id" not in data_source_info
|
||||
):
|
||||
raise ValueError("no notion page found")
|
||||
|
||||
if not credential:
|
||||
logger.error(
|
||||
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
|
||||
document_id,
|
||||
document.tenant_id,
|
||||
credential_id,
|
||||
)
|
||||
workspace_id = data_source_info["notion_workspace_id"]
|
||||
page_id = data_source_info["notion_page_id"]
|
||||
page_type = data_source_info["type"]
|
||||
page_edited_time = data_source_info["last_edited_time"]
|
||||
credential_id = data_source_info.get("credential_id")
|
||||
tenant_id = document.tenant_id
|
||||
index_type = document.doc_form
|
||||
|
||||
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
|
||||
# Get credentials from datasource provider
|
||||
datasource_provider_service = DatasourceProviderService()
|
||||
credential = datasource_provider_service.get_datasource_credentials(
|
||||
tenant_id=tenant_id,
|
||||
credential_id=credential_id,
|
||||
provider="notion_datasource",
|
||||
plugin_id="langgenius/notion_datasource",
|
||||
)
|
||||
|
||||
if not credential:
|
||||
logger.error(
|
||||
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
|
||||
document_id,
|
||||
tenant_id,
|
||||
credential_id,
|
||||
)
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).filter_by(id=document_id).first()
|
||||
if document:
|
||||
document.indexing_status = "error"
|
||||
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
|
||||
document.stopped_at = naive_utc_now()
|
||||
return
|
||||
return
|
||||
|
||||
loader = NotionExtractor(
|
||||
notion_workspace_id=workspace_id,
|
||||
notion_obj_id=page_id,
|
||||
notion_page_type=page_type,
|
||||
notion_access_token=credential.get("integration_secret"),
|
||||
tenant_id=document.tenant_id,
|
||||
)
|
||||
loader = NotionExtractor(
|
||||
notion_workspace_id=workspace_id,
|
||||
notion_obj_id=page_id,
|
||||
notion_page_type=page_type,
|
||||
notion_access_token=credential.get("integration_secret"),
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
|
||||
last_edited_time = loader.get_notion_last_edited_time()
|
||||
last_edited_time = loader.get_notion_last_edited_time()
|
||||
if last_edited_time == page_edited_time:
|
||||
logger.info(click.style(f"Document {document_id} content unchanged, skipping sync", fg="yellow"))
|
||||
return
|
||||
|
||||
# check the page is updated
|
||||
if last_edited_time != page_edited_time:
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
logger.info(click.style(f"Document {document_id} content changed, starting sync", fg="green"))
|
||||
|
||||
# delete all document segment and index
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if not dataset:
|
||||
raise Exception("Dataset not found")
|
||||
index_type = document.doc_form
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
try:
|
||||
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
if dataset:
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
logger.info(click.style(f"Cleaned vector index for document {document_id}", fg="green"))
|
||||
except Exception:
|
||||
logger.exception("Failed to clean vector index for document %s", document_id)
|
||||
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
|
||||
).all()
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).filter_by(id=document_id).first()
|
||||
if not document:
|
||||
logger.warning(click.style(f"Document {document_id} not found during sync", fg="yellow"))
|
||||
return
|
||||
|
||||
# delete from vector index
|
||||
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
||||
data_source_info = document.data_source_info_dict
|
||||
data_source_info["last_edited_time"] = last_edited_time
|
||||
document.data_source_info = data_source_info
|
||||
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
|
||||
session.execute(segment_delete_stmt)
|
||||
document.indexing_status = "parsing"
|
||||
document.processing_started_at = naive_utc_now()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
"Cleaned document when document update data source or process rule: {} latency: {}".format(
|
||||
document_id, end_at - start_at
|
||||
),
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Cleaned document when document update data source or process rule failed")
|
||||
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
|
||||
session.execute(segment_delete_stmt)
|
||||
|
||||
try:
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run([document])
|
||||
end_at = time.perf_counter()
|
||||
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logger.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
logger.exception("document_indexing_sync_task failed, document_id: %s", document_id)
|
||||
logger.info(click.style(f"Deleted segments for document {document_id}", fg="green"))
|
||||
|
||||
try:
|
||||
indexing_runner = IndexingRunner()
|
||||
with session_factory.create_session() as session:
|
||||
document = session.query(Document).filter_by(id=document_id).first()
|
||||
if document:
|
||||
indexing_runner.run([document])
|
||||
end_at = time.perf_counter()
|
||||
logger.info(click.style(f"Sync completed for document {document_id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logger.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception as e:
|
||||
logger.exception("document_indexing_sync_task failed for document_id: %s", document_id)
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
document = session.query(Document).filter_by(id=document_id).first()
|
||||
if document:
|
||||
document.indexing_status = "error"
|
||||
document.error = str(e)
|
||||
document.stopped_at = naive_utc_now()
|
||||
|
||||
@@ -153,8 +153,7 @@ class TestCleanNotionDocumentTask:
|
||||
# Execute cleanup task
|
||||
clean_notion_document_task(document_ids, dataset.id)
|
||||
|
||||
# Verify documents and segments are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.id.in_(document_ids)).count() == 0
|
||||
# Verify segments are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id.in_(document_ids))
|
||||
@@ -162,9 +161,9 @@ class TestCleanNotionDocumentTask:
|
||||
== 0
|
||||
)
|
||||
|
||||
# Verify index processor was called for each document
|
||||
# Verify index processor was called
|
||||
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
assert mock_processor.clean.call_count == len(document_ids)
|
||||
mock_processor.clean.assert_called_once()
|
||||
|
||||
# This test successfully verifies:
|
||||
# 1. Document records are properly deleted from the database
|
||||
@@ -186,12 +185,12 @@ class TestCleanNotionDocumentTask:
|
||||
non_existent_dataset_id = str(uuid.uuid4())
|
||||
document_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
|
||||
|
||||
# Execute cleanup task with non-existent dataset
|
||||
clean_notion_document_task(document_ids, non_existent_dataset_id)
|
||||
# Execute cleanup task with non-existent dataset - expect exception
|
||||
with pytest.raises(Exception, match="Document has no dataset"):
|
||||
clean_notion_document_task(document_ids, non_existent_dataset_id)
|
||||
|
||||
# Verify that the index processor was not called
|
||||
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
mock_processor.clean.assert_not_called()
|
||||
# Verify that the index processor factory was not used
|
||||
mock_index_processor_factory.return_value.init_index_processor.assert_not_called()
|
||||
|
||||
def test_clean_notion_document_task_empty_document_list(
|
||||
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
|
||||
@@ -229,9 +228,13 @@ class TestCleanNotionDocumentTask:
|
||||
# Execute cleanup task with empty document list
|
||||
clean_notion_document_task([], dataset.id)
|
||||
|
||||
# Verify that the index processor was not called
|
||||
# Verify that the index processor was called once with empty node list
|
||||
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
mock_processor.clean.assert_not_called()
|
||||
assert mock_processor.clean.call_count == 1
|
||||
args, kwargs = mock_processor.clean.call_args
|
||||
# args: (dataset, total_index_node_ids)
|
||||
assert isinstance(args[0], Dataset)
|
||||
assert args[1] == []
|
||||
|
||||
def test_clean_notion_document_task_with_different_index_types(
|
||||
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
|
||||
@@ -315,8 +318,7 @@ class TestCleanNotionDocumentTask:
|
||||
# Note: This test successfully verifies cleanup with different document types.
|
||||
# The task properly handles various index types and document configurations.
|
||||
|
||||
# Verify documents and segments are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
|
||||
# Verify segments are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == document.id)
|
||||
@@ -404,8 +406,7 @@ class TestCleanNotionDocumentTask:
|
||||
# Execute cleanup task
|
||||
clean_notion_document_task([document.id], dataset.id)
|
||||
|
||||
# Verify documents and segments are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
|
||||
# Verify segments are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
|
||||
== 0
|
||||
@@ -508,8 +509,7 @@ class TestCleanNotionDocumentTask:
|
||||
|
||||
clean_notion_document_task(documents_to_clean, dataset.id)
|
||||
|
||||
# Verify only specified documents and segments are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.id.in_(documents_to_clean)).count() == 0
|
||||
# Verify only specified documents' segments are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id.in_(documents_to_clean))
|
||||
@@ -697,11 +697,12 @@ class TestCleanNotionDocumentTask:
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Mock index processor to raise an exception
|
||||
mock_index_processor = mock_index_processor_factory.init_index_processor.return_value
|
||||
mock_index_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
mock_index_processor.clean.side_effect = Exception("Index processor error")
|
||||
|
||||
# Execute cleanup task - it should handle the exception gracefully
|
||||
clean_notion_document_task([document.id], dataset.id)
|
||||
# Execute cleanup task - current implementation propagates the exception
|
||||
with pytest.raises(Exception, match="Index processor error"):
|
||||
clean_notion_document_task([document.id], dataset.id)
|
||||
|
||||
# Note: This test demonstrates the task's error handling capability.
|
||||
# Even with external service errors, the database operations complete successfully.
|
||||
@@ -803,8 +804,7 @@ class TestCleanNotionDocumentTask:
|
||||
all_document_ids = [doc.id for doc in documents]
|
||||
clean_notion_document_task(all_document_ids, dataset.id)
|
||||
|
||||
# Verify all documents and segments are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
|
||||
# Verify all segments are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
|
||||
== 0
|
||||
@@ -914,8 +914,7 @@ class TestCleanNotionDocumentTask:
|
||||
|
||||
clean_notion_document_task([target_document.id], target_dataset.id)
|
||||
|
||||
# Verify only documents from target dataset are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.id == target_document.id).count() == 0
|
||||
# Verify only documents' segments from target dataset are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment)
|
||||
.filter(DocumentSegment.document_id == target_document.id)
|
||||
@@ -1030,8 +1029,7 @@ class TestCleanNotionDocumentTask:
|
||||
all_document_ids = [doc.id for doc in documents]
|
||||
clean_notion_document_task(all_document_ids, dataset.id)
|
||||
|
||||
# Verify all documents and segments are deleted regardless of status
|
||||
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
|
||||
# Verify all segments are deleted regardless of status
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
|
||||
== 0
|
||||
@@ -1142,8 +1140,7 @@ class TestCleanNotionDocumentTask:
|
||||
# Execute cleanup task
|
||||
clean_notion_document_task([document.id], dataset.id)
|
||||
|
||||
# Verify documents and segments are deleted
|
||||
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
|
||||
# Verify segments are deleted
|
||||
assert (
|
||||
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
|
||||
== 0
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
"""
|
||||
Unit tests for Service API knowledge pipeline file-upload serialization.
|
||||
"""
|
||||
|
||||
import importlib.util
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class FakeUploadFile:
|
||||
id: str
|
||||
name: str
|
||||
size: int
|
||||
extension: str
|
||||
mime_type: str
|
||||
created_by: str
|
||||
created_at: datetime | None
|
||||
|
||||
|
||||
def _load_serialize_upload_file():
|
||||
api_dir = Path(__file__).resolve().parents[5]
|
||||
serializers_path = api_dir / "controllers" / "service_api" / "dataset" / "rag_pipeline" / "serializers.py"
|
||||
|
||||
spec = importlib.util.spec_from_file_location("rag_pipeline_serializers", serializers_path)
|
||||
assert spec
|
||||
assert spec.loader
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module) # type: ignore[attr-defined]
|
||||
return module.serialize_upload_file
|
||||
|
||||
|
||||
def test_file_upload_created_at_is_isoformat_string():
|
||||
serialize_upload_file = _load_serialize_upload_file()
|
||||
|
||||
created_at = datetime(2026, 2, 8, 12, 0, 0, tzinfo=UTC)
|
||||
upload_file = FakeUploadFile()
|
||||
upload_file.id = "file-1"
|
||||
upload_file.name = "test.pdf"
|
||||
upload_file.size = 123
|
||||
upload_file.extension = "pdf"
|
||||
upload_file.mime_type = "application/pdf"
|
||||
upload_file.created_by = "account-1"
|
||||
upload_file.created_at = created_at
|
||||
|
||||
result = serialize_upload_file(upload_file)
|
||||
assert result["created_at"] == created_at.isoformat()
|
||||
|
||||
|
||||
def test_file_upload_created_at_none_serializes_to_null():
|
||||
serialize_upload_file = _load_serialize_upload_file()
|
||||
|
||||
upload_file = FakeUploadFile()
|
||||
upload_file.id = "file-1"
|
||||
upload_file.name = "test.pdf"
|
||||
upload_file.size = 123
|
||||
upload_file.extension = "pdf"
|
||||
upload_file.mime_type = "application/pdf"
|
||||
upload_file.created_by = "account-1"
|
||||
upload_file.created_at = None
|
||||
|
||||
result = serialize_upload_file(upload_file)
|
||||
assert result["created_at"] is None
|
||||
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
Unit tests for Service API knowledge pipeline route registration.
|
||||
"""
|
||||
|
||||
import ast
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def test_rag_pipeline_routes_registered():
|
||||
api_dir = Path(__file__).resolve().parents[5]
|
||||
|
||||
service_api_init = api_dir / "controllers" / "service_api" / "__init__.py"
|
||||
rag_pipeline_workflow = (
|
||||
api_dir / "controllers" / "service_api" / "dataset" / "rag_pipeline" / "rag_pipeline_workflow.py"
|
||||
)
|
||||
|
||||
assert service_api_init.exists()
|
||||
assert rag_pipeline_workflow.exists()
|
||||
|
||||
init_tree = ast.parse(service_api_init.read_text(encoding="utf-8"))
|
||||
import_found = False
|
||||
for node in ast.walk(init_tree):
|
||||
if not isinstance(node, ast.ImportFrom):
|
||||
continue
|
||||
if node.module != "dataset.rag_pipeline" or node.level != 1:
|
||||
continue
|
||||
if any(alias.name == "rag_pipeline_workflow" for alias in node.names):
|
||||
import_found = True
|
||||
break
|
||||
assert import_found, "from .dataset.rag_pipeline import rag_pipeline_workflow not found in service_api/__init__.py"
|
||||
|
||||
workflow_tree = ast.parse(rag_pipeline_workflow.read_text(encoding="utf-8"))
|
||||
route_paths: set[str] = set()
|
||||
|
||||
for node in ast.walk(workflow_tree):
|
||||
if not isinstance(node, ast.ClassDef):
|
||||
continue
|
||||
for decorator in node.decorator_list:
|
||||
if not isinstance(decorator, ast.Call):
|
||||
continue
|
||||
if not isinstance(decorator.func, ast.Attribute):
|
||||
continue
|
||||
if decorator.func.attr != "route":
|
||||
continue
|
||||
if not decorator.args:
|
||||
continue
|
||||
first_arg = decorator.args[0]
|
||||
if isinstance(first_arg, ast.Constant) and isinstance(first_arg.value, str):
|
||||
route_paths.add(first_arg.value)
|
||||
|
||||
assert "/datasets/<uuid:dataset_id>/pipeline/datasource-plugins" in route_paths
|
||||
assert "/datasets/<uuid:dataset_id>/pipeline/datasource/nodes/<string:node_id>/run" in route_paths
|
||||
assert "/datasets/<uuid:dataset_id>/pipeline/run" in route_paths
|
||||
assert "/datasets/pipeline/file-upload" in route_paths
|
||||
@@ -4,7 +4,7 @@ from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from hypothesis import given, settings
|
||||
from hypothesis import HealthCheck, given, settings
|
||||
from hypothesis import strategies as st
|
||||
|
||||
from core.file import File, FileTransferMethod, FileType
|
||||
@@ -493,7 +493,7 @@ def _scalar_value() -> st.SearchStrategy[int | float | str | File | None]:
|
||||
)
|
||||
|
||||
|
||||
@settings(max_examples=50)
|
||||
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
|
||||
@given(_scalar_value())
|
||||
def test_build_segment_and_extract_values_for_scalar_types(value):
|
||||
seg = variable_factory.build_segment(value)
|
||||
@@ -504,7 +504,7 @@ def test_build_segment_and_extract_values_for_scalar_types(value):
|
||||
assert seg.value == value
|
||||
|
||||
|
||||
@settings(max_examples=50)
|
||||
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
|
||||
@given(values=st.lists(_scalar_value(), max_size=20))
|
||||
def test_build_segment_and_extract_values_for_array_types(values):
|
||||
seg = variable_factory.build_segment(values)
|
||||
|
||||
@@ -698,6 +698,132 @@ class TestTenantService:
|
||||
|
||||
self._assert_database_operations_called(mock_db_dependencies["db"])
|
||||
|
||||
# ==================== Member Removal Tests ====================
|
||||
|
||||
def test_remove_pending_member_deletes_orphaned_account(self):
|
||||
"""Test that removing a pending member with no other workspaces deletes the account."""
|
||||
# Arrange
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
|
||||
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
|
||||
)
|
||||
|
||||
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
|
||||
)
|
||||
|
||||
with patch("services.account_service.db") as mock_db:
|
||||
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="operator-123", role="owner"
|
||||
)
|
||||
|
||||
query_mock_permission = MagicMock()
|
||||
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
|
||||
|
||||
query_mock_ta = MagicMock()
|
||||
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
|
||||
|
||||
query_mock_count = MagicMock()
|
||||
query_mock_count.filter_by.return_value.count.return_value = 0
|
||||
|
||||
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Act
|
||||
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
|
||||
|
||||
# Assert: enterprise sync still receives the correct member ID
|
||||
mock_sync.assert_called_once_with(
|
||||
workspace_id="tenant-456",
|
||||
member_id="pending-user-789",
|
||||
source="workspace_member_removed",
|
||||
)
|
||||
|
||||
# Assert: both join record and account should be deleted
|
||||
mock_db.session.delete.assert_any_call(mock_ta)
|
||||
mock_db.session.delete.assert_any_call(mock_pending_member)
|
||||
assert mock_db.session.delete.call_count == 2
|
||||
|
||||
def test_remove_pending_member_keeps_account_with_other_workspaces(self):
|
||||
"""Test that removing a pending member who belongs to other workspaces preserves the account."""
|
||||
# Arrange
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
|
||||
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
|
||||
)
|
||||
|
||||
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
|
||||
)
|
||||
|
||||
with patch("services.account_service.db") as mock_db:
|
||||
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="operator-123", role="owner"
|
||||
)
|
||||
|
||||
query_mock_permission = MagicMock()
|
||||
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
|
||||
|
||||
query_mock_ta = MagicMock()
|
||||
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
|
||||
|
||||
# Remaining join count = 1 (still in another workspace)
|
||||
query_mock_count = MagicMock()
|
||||
query_mock_count.filter_by.return_value.count.return_value = 1
|
||||
|
||||
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Act
|
||||
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
|
||||
|
||||
# Assert: only the join record should be deleted, not the account
|
||||
mock_db.session.delete.assert_called_once_with(mock_ta)
|
||||
|
||||
def test_remove_active_member_preserves_account(self):
|
||||
"""Test that removing an active member never deletes the account, even with no other workspaces."""
|
||||
# Arrange
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
|
||||
mock_active_member = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="active-user-789", email="active@example.com", status=AccountStatus.ACTIVE
|
||||
)
|
||||
|
||||
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="active-user-789", role="normal"
|
||||
)
|
||||
|
||||
with patch("services.account_service.db") as mock_db:
|
||||
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
|
||||
tenant_id="tenant-456", account_id="operator-123", role="owner"
|
||||
)
|
||||
|
||||
query_mock_permission = MagicMock()
|
||||
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
|
||||
|
||||
query_mock_ta = MagicMock()
|
||||
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
|
||||
|
||||
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta]
|
||||
|
||||
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
|
||||
mock_sync.return_value = True
|
||||
|
||||
# Act
|
||||
TenantService.remove_member_from_tenant(mock_tenant, mock_active_member, mock_operator)
|
||||
|
||||
# Assert: only the join record should be deleted
|
||||
mock_db.session.delete.assert_called_once_with(mock_ta)
|
||||
|
||||
# ==================== Tenant Switching Tests ====================
|
||||
|
||||
def test_switch_tenant_success(self):
|
||||
|
||||
@@ -109,40 +109,87 @@ def mock_document_segments(document_id):
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session():
|
||||
"""Mock database session via session_factory.create_session()."""
|
||||
"""Mock database session via session_factory.create_session().
|
||||
|
||||
After session split refactor, the code calls create_session() multiple times.
|
||||
This fixture creates shared query mocks so all sessions use the same
|
||||
query configuration, simulating database persistence across sessions.
|
||||
|
||||
The fixture automatically converts side_effect to cycle to prevent StopIteration.
|
||||
Tests configure mocks the same way as before, but behind the scenes the values
|
||||
are cycled infinitely for all sessions.
|
||||
"""
|
||||
from itertools import cycle
|
||||
|
||||
with patch("tasks.document_indexing_sync_task.session_factory") as mock_sf:
|
||||
session = MagicMock()
|
||||
# Ensure tests can observe session.close() via context manager teardown
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
sessions = []
|
||||
|
||||
# Mock session.begin() context manager to auto-commit on exit
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
# Shared query mocks - all sessions use these
|
||||
shared_query = MagicMock()
|
||||
shared_filter_by = MagicMock()
|
||||
shared_scalars_result = MagicMock()
|
||||
|
||||
def _begin_exit_side_effect(*args, **kwargs):
|
||||
# session.begin().__exit__() should commit if no exception
|
||||
if args[0] is None: # No exception
|
||||
session.commit()
|
||||
# Create custom first mock that auto-cycles side_effect
|
||||
class CyclicMock(MagicMock):
|
||||
def __setattr__(self, name, value):
|
||||
if name == "side_effect" and value is not None:
|
||||
# Convert list/tuple to infinite cycle
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = cycle(value)
|
||||
super().__setattr__(name, value)
|
||||
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
shared_query.where.return_value.first = CyclicMock()
|
||||
shared_filter_by.first = CyclicMock()
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
def _create_session():
|
||||
"""Create a new mock session for each create_session() call."""
|
||||
session = MagicMock()
|
||||
session.close = MagicMock()
|
||||
session.commit = MagicMock()
|
||||
|
||||
def _exit_side_effect(*args, **kwargs):
|
||||
session.close()
|
||||
# Mock session.begin() context manager
|
||||
begin_cm = MagicMock()
|
||||
begin_cm.__enter__.return_value = session
|
||||
|
||||
cm.__exit__.side_effect = _exit_side_effect
|
||||
mock_sf.create_session.return_value = cm
|
||||
def _begin_exit_side_effect(exc_type, exc, tb):
|
||||
# commit on success
|
||||
if exc_type is None:
|
||||
session.commit()
|
||||
# return False to propagate exceptions
|
||||
return False
|
||||
|
||||
query = MagicMock()
|
||||
session.query.return_value = query
|
||||
query.where.return_value = query
|
||||
session.scalars.return_value = MagicMock()
|
||||
yield session
|
||||
begin_cm.__exit__.side_effect = _begin_exit_side_effect
|
||||
session.begin.return_value = begin_cm
|
||||
|
||||
# Mock create_session() context manager
|
||||
cm = MagicMock()
|
||||
cm.__enter__.return_value = session
|
||||
|
||||
def _exit_side_effect(exc_type, exc, tb):
|
||||
session.close()
|
||||
return False
|
||||
|
||||
cm.__exit__.side_effect = _exit_side_effect
|
||||
|
||||
# All sessions use the same shared query mocks
|
||||
session.query.return_value = shared_query
|
||||
shared_query.where.return_value = shared_query
|
||||
shared_query.filter_by.return_value = shared_filter_by
|
||||
session.scalars.return_value = shared_scalars_result
|
||||
|
||||
sessions.append(session)
|
||||
# Attach helpers on the first created session for assertions across all sessions
|
||||
if len(sessions) == 1:
|
||||
session.get_all_sessions = lambda: sessions
|
||||
session.any_close_called = lambda: any(s.close.called for s in sessions)
|
||||
session.any_commit_called = lambda: any(s.commit.called for s in sessions)
|
||||
return cm
|
||||
|
||||
mock_sf.create_session.side_effect = _create_session
|
||||
|
||||
# Create first session and return it
|
||||
_create_session()
|
||||
yield sessions[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -201,8 +248,8 @@ class TestDocumentIndexingSyncTask:
|
||||
# Act
|
||||
document_indexing_sync_task(dataset_id, document_id)
|
||||
|
||||
# Assert
|
||||
mock_db_session.close.assert_called_once()
|
||||
# Assert - at least one session should have been closed
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_missing_notion_workspace_id(self, mock_db_session, mock_document, dataset_id, document_id):
|
||||
"""Test that task raises error when notion_workspace_id is missing."""
|
||||
@@ -245,6 +292,7 @@ class TestDocumentIndexingSyncTask:
|
||||
"""Test that task handles missing credentials by updating document status."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
mock_datasource_provider_service.get_datasource_credentials.return_value = None
|
||||
|
||||
# Act
|
||||
@@ -254,8 +302,8 @@ class TestDocumentIndexingSyncTask:
|
||||
assert mock_document.indexing_status == "error"
|
||||
assert "Datasource credential not found" in mock_document.error
|
||||
assert mock_document.stopped_at is not None
|
||||
mock_db_session.commit.assert_called()
|
||||
mock_db_session.close.assert_called()
|
||||
assert mock_db_session.any_commit_called()
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_page_not_updated(
|
||||
self,
|
||||
@@ -269,6 +317,7 @@ class TestDocumentIndexingSyncTask:
|
||||
"""Test that task does nothing when page has not been updated."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
# Return same time as stored in document
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
|
||||
|
||||
@@ -278,8 +327,8 @@ class TestDocumentIndexingSyncTask:
|
||||
# Assert
|
||||
# Document status should remain unchanged
|
||||
assert mock_document.indexing_status == "completed"
|
||||
# Session should still be closed via context manager teardown
|
||||
assert mock_db_session.close.called
|
||||
# At least one session should have been closed via context manager teardown
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_successful_sync_when_page_updated(
|
||||
self,
|
||||
@@ -296,7 +345,20 @@ class TestDocumentIndexingSyncTask:
|
||||
):
|
||||
"""Test successful sync flow when Notion page has been updated."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
# Set exact sequence of returns across calls to `.first()`:
|
||||
# 1) document (initial fetch)
|
||||
# 2) dataset (pre-check)
|
||||
# 3) dataset (cleaning phase)
|
||||
# 4) document (pre-indexing update)
|
||||
# 5) document (indexing runner fetch)
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [
|
||||
mock_document,
|
||||
mock_dataset,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
mock_document,
|
||||
]
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
# NotionExtractor returns updated time
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
@@ -314,28 +376,40 @@ class TestDocumentIndexingSyncTask:
|
||||
mock_processor.clean.assert_called_once()
|
||||
|
||||
# Verify segments were deleted from database in batch (DELETE FROM document_segments)
|
||||
execute_sqls = [" ".join(str(c[0][0]).split()) for c in mock_db_session.execute.call_args_list]
|
||||
# Aggregate execute calls across all created sessions
|
||||
execute_sqls = []
|
||||
for s in mock_db_session.get_all_sessions():
|
||||
execute_sqls.extend([" ".join(str(c[0][0]).split()) for c in s.execute.call_args_list])
|
||||
assert any("DELETE FROM document_segments" in sql for sql in execute_sqls)
|
||||
|
||||
# Verify indexing runner was called
|
||||
mock_indexing_runner.run.assert_called_once_with([mock_document])
|
||||
|
||||
# Verify session operations
|
||||
assert mock_db_session.commit.called
|
||||
mock_db_session.close.assert_called_once()
|
||||
# Verify session operations (across any created session)
|
||||
assert mock_db_session.any_commit_called()
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_dataset_not_found_during_cleaning(
|
||||
self,
|
||||
mock_db_session,
|
||||
mock_datasource_provider_service,
|
||||
mock_notion_extractor,
|
||||
mock_indexing_runner,
|
||||
mock_document,
|
||||
dataset_id,
|
||||
document_id,
|
||||
):
|
||||
"""Test that task handles dataset not found during cleaning phase."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, None]
|
||||
# Sequence: document (initial), dataset (pre-check), None (cleaning), document (update), document (indexing)
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [
|
||||
mock_document,
|
||||
mock_dataset,
|
||||
None,
|
||||
mock_document,
|
||||
mock_document,
|
||||
]
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
# Act
|
||||
@@ -344,8 +418,8 @@ class TestDocumentIndexingSyncTask:
|
||||
# Assert
|
||||
# Document should still be set to parsing
|
||||
assert mock_document.indexing_status == "parsing"
|
||||
# Session should be closed after error
|
||||
mock_db_session.close.assert_called_once()
|
||||
# At least one session should be closed after error
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_cleaning_error_continues_to_indexing(
|
||||
self,
|
||||
@@ -361,8 +435,14 @@ class TestDocumentIndexingSyncTask:
|
||||
):
|
||||
"""Test that indexing continues even if cleaning fails."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
mock_db_session.scalars.return_value.all.side_effect = Exception("Cleaning error")
|
||||
from itertools import cycle
|
||||
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
# Make the cleaning step fail but not the segment fetch
|
||||
processor = mock_index_processor_factory.return_value.init_index_processor.return_value
|
||||
processor.clean.side_effect = Exception("Cleaning error")
|
||||
mock_db_session.scalars.return_value.all.return_value = []
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
# Act
|
||||
@@ -371,7 +451,7 @@ class TestDocumentIndexingSyncTask:
|
||||
# Assert
|
||||
# Indexing should still be attempted despite cleaning error
|
||||
mock_indexing_runner.run.assert_called_once_with([mock_document])
|
||||
mock_db_session.close.assert_called_once()
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_indexing_runner_document_paused_error(
|
||||
self,
|
||||
@@ -388,7 +468,10 @@ class TestDocumentIndexingSyncTask:
|
||||
):
|
||||
"""Test that DocumentIsPausedError is handled gracefully."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
from itertools import cycle
|
||||
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused")
|
||||
@@ -398,7 +481,7 @@ class TestDocumentIndexingSyncTask:
|
||||
|
||||
# Assert
|
||||
# Session should be closed after handling error
|
||||
mock_db_session.close.assert_called_once()
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_indexing_runner_general_error(
|
||||
self,
|
||||
@@ -415,7 +498,10 @@ class TestDocumentIndexingSyncTask:
|
||||
):
|
||||
"""Test that general exceptions during indexing are handled."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
from itertools import cycle
|
||||
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
|
||||
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
mock_indexing_runner.run.side_effect = Exception("Indexing error")
|
||||
@@ -425,7 +511,7 @@ class TestDocumentIndexingSyncTask:
|
||||
|
||||
# Assert
|
||||
# Session should be closed after error
|
||||
mock_db_session.close.assert_called_once()
|
||||
assert mock_db_session.any_close_called()
|
||||
|
||||
def test_notion_extractor_initialized_with_correct_params(
|
||||
self,
|
||||
@@ -532,7 +618,14 @@ class TestDocumentIndexingSyncTask:
|
||||
):
|
||||
"""Test that index processor clean is called with correct parameters."""
|
||||
# Arrange
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
|
||||
# Sequence: document (initial), dataset (pre-check), dataset (cleaning), document (update), document (indexing)
|
||||
mock_db_session.query.return_value.where.return_value.first.side_effect = [
|
||||
mock_document,
|
||||
mock_dataset,
|
||||
mock_dataset,
|
||||
mock_document,
|
||||
mock_document,
|
||||
]
|
||||
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
|
||||
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ const DeprecationNotice: FC<DeprecationNoticeProps> = ({
|
||||
iconWrapperClassName,
|
||||
textClassName,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { t } = useTranslation('plugin')
|
||||
|
||||
const deprecatedReasonKey = useMemo(() => {
|
||||
if (!deprecatedReason)
|
||||
|
||||
@@ -18,25 +18,25 @@ const StatusContainer: FC<Props> = ({
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'system-xs-regular relative break-all rounded-lg border px-3 py-2.5',
|
||||
'relative break-all rounded-lg border px-3 py-2.5 system-xs-regular',
|
||||
status === 'succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
|
||||
status === 'succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'partial-succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
|
||||
status === 'partial-succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'failed' && 'border-[rgba(240,68,56,0.8)] bg-workflow-display-error-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-error.svg)] text-text-warning',
|
||||
status === 'failed' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(240,68,56,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
(status === 'stopped' || status === 'paused') && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
|
||||
(status === 'stopped' || status === 'paused') && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'exception' && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
|
||||
status === 'exception' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
status === 'running' && 'border-[rgba(11,165,236,0.8)] bg-workflow-display-normal-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-running.svg)] text-util-colors-blue-light-blue-light-600',
|
||||
status === 'running' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(11,165,236,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
|
||||
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
|
||||
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
|
||||
)}
|
||||
>
|
||||
<div className={cn(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,7 @@ import consistentPlaceholders from './rules/consistent-placeholders.js'
|
||||
import noAsAnyInT from './rules/no-as-any-in-t.js'
|
||||
import noExtraKeys from './rules/no-extra-keys.js'
|
||||
import noLegacyNamespacePrefix from './rules/no-legacy-namespace-prefix.js'
|
||||
import noVersionPrefix from './rules/no-version-prefix.js'
|
||||
import requireNsOption from './rules/require-ns-option.js'
|
||||
import validI18nKeys from './rules/valid-i18n-keys.js'
|
||||
|
||||
/** @type {import('eslint').ESLint.Plugin} */
|
||||
const plugin = {
|
||||
@@ -17,9 +15,7 @@ const plugin = {
|
||||
'no-as-any-in-t': noAsAnyInT,
|
||||
'no-extra-keys': noExtraKeys,
|
||||
'no-legacy-namespace-prefix': noLegacyNamespacePrefix,
|
||||
'no-version-prefix': noVersionPrefix,
|
||||
'require-ns-option': requireNsOption,
|
||||
'valid-i18n-keys': validI18nKeys,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
const DEPENDENCY_KEYS = ['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies']
|
||||
const VERSION_PREFIXES = ['^', '~']
|
||||
|
||||
/** @type {import('eslint').Rule.RuleModule} */
|
||||
export default {
|
||||
meta: {
|
||||
type: 'problem',
|
||||
docs: {
|
||||
description: `Ensure package.json dependencies do not use version prefixes (${VERSION_PREFIXES.join(' or ')})`,
|
||||
},
|
||||
fixable: 'code',
|
||||
},
|
||||
create(context) {
|
||||
const { filename } = context
|
||||
|
||||
if (!filename.endsWith('package.json'))
|
||||
return {}
|
||||
|
||||
const selector = `JSONProperty:matches(${DEPENDENCY_KEYS.map(k => `[key.value="${k}"]`).join(', ')}) > JSONObjectExpression > JSONProperty`
|
||||
|
||||
return {
|
||||
[selector](node) {
|
||||
const versionNode = node.value
|
||||
|
||||
if (versionNode && versionNode.type === 'JSONLiteral' && typeof versionNode.value === 'string') {
|
||||
const version = versionNode.value
|
||||
const foundPrefix = VERSION_PREFIXES.find(prefix => version.startsWith(prefix))
|
||||
|
||||
if (foundPrefix) {
|
||||
const packageName = node.key.value || node.key.name
|
||||
const cleanVersion = version.substring(1)
|
||||
const canAutoFix = /^\d+\.\d+\.\d+$/.test(cleanVersion)
|
||||
context.report({
|
||||
node: versionNode,
|
||||
message: `Dependency "${packageName}" has version prefix "${foundPrefix}" that should be removed (found: "${version}", expected: "${cleanVersion}")`,
|
||||
fix: canAutoFix
|
||||
? fixer => fixer.replaceText(versionNode, `"${cleanVersion}"`)
|
||||
: undefined,
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
import { cleanJsonText } from '../utils.js'
|
||||
|
||||
/** @type {import('eslint').Rule.RuleModule} */
|
||||
export default {
|
||||
meta: {
|
||||
type: 'problem',
|
||||
docs: {
|
||||
description: 'Ensure i18n JSON keys are flat and valid as object paths',
|
||||
},
|
||||
},
|
||||
create(context) {
|
||||
return {
|
||||
Program(node) {
|
||||
const { filename, sourceCode } = context
|
||||
|
||||
if (!filename.endsWith('.json'))
|
||||
return
|
||||
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(cleanJsonText(sourceCode.text))
|
||||
}
|
||||
catch {
|
||||
context.report({
|
||||
node,
|
||||
message: 'Invalid JSON format',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const keys = Object.keys(json)
|
||||
const keyPrefixes = new Set()
|
||||
|
||||
for (const key of keys) {
|
||||
if (key.includes('.')) {
|
||||
const parts = key.split('.')
|
||||
for (let i = 1; i < parts.length; i++) {
|
||||
const prefix = parts.slice(0, i).join('.')
|
||||
if (keys.includes(prefix)) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Invalid key structure: '${key}' conflicts with '${prefix}'`,
|
||||
})
|
||||
}
|
||||
keyPrefixes.add(prefix)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of keys) {
|
||||
if (keyPrefixes.has(key)) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Invalid key structure: '${key}' is a prefix of another key`,
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,8 @@
|
||||
// @ts-check
|
||||
import antfu from '@antfu/eslint-config'
|
||||
import antfu, { GLOB_TESTS, GLOB_TS, GLOB_TSX } from '@antfu/eslint-config'
|
||||
import pluginQuery from '@tanstack/eslint-plugin-query'
|
||||
import tailwindcss from 'eslint-plugin-better-tailwindcss'
|
||||
import hyoban from 'eslint-plugin-hyoban'
|
||||
import sonar from 'eslint-plugin-sonarjs'
|
||||
import storybook from 'eslint-plugin-storybook'
|
||||
import dify from './eslint-rules/index.js'
|
||||
@@ -67,7 +68,8 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
files: [GLOB_TS, GLOB_TSX],
|
||||
ignores: GLOB_TESTS,
|
||||
plugins: {
|
||||
tailwindcss,
|
||||
},
|
||||
@@ -79,7 +81,47 @@ export default antfu(
|
||||
},
|
||||
},
|
||||
{
|
||||
plugins: { dify },
|
||||
name: 'dify/custom/setup',
|
||||
plugins: {
|
||||
dify,
|
||||
hyoban,
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['**/*.tsx'],
|
||||
rules: {
|
||||
'hyoban/prefer-tailwind-icons': ['warn', {
|
||||
prefix: 'i-',
|
||||
propMappings: {
|
||||
size: 'size',
|
||||
width: 'w',
|
||||
height: 'h',
|
||||
},
|
||||
libraries: [
|
||||
{
|
||||
prefix: 'i-custom-',
|
||||
source: '^@/app/components/base/icons/src/(?<set>(?:public|vender)(?:/.*)?)$',
|
||||
name: '^(?<name>.*)$',
|
||||
},
|
||||
{
|
||||
source: '^@remixicon/react$',
|
||||
name: '^(?<set>Ri)(?<name>.+)$',
|
||||
},
|
||||
{
|
||||
source: '^@(?<set>heroicons)/react/24/outline$',
|
||||
name: '^(?<name>.*)Icon$',
|
||||
},
|
||||
{
|
||||
source: '^@(?<set>heroicons)/react/24/(?<variant>solid)$',
|
||||
name: '^(?<name>.*)Icon$',
|
||||
},
|
||||
{
|
||||
source: '^@(?<set>heroicons)/react/(?<variant>\\d+/(?:solid|outline))$',
|
||||
name: '^(?<name>.*)Icon$',
|
||||
},
|
||||
],
|
||||
}],
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['i18n/**/*.json'],
|
||||
@@ -88,7 +130,7 @@ export default antfu(
|
||||
'max-lines': 'off',
|
||||
'jsonc/sort-keys': 'error',
|
||||
|
||||
'dify/valid-i18n-keys': 'error',
|
||||
'hyoban/i18n-flat-key': 'error',
|
||||
'dify/no-extra-keys': 'error',
|
||||
'dify/consistent-placeholders': 'error',
|
||||
},
|
||||
@@ -96,7 +138,7 @@ export default antfu(
|
||||
{
|
||||
files: ['**/package.json'],
|
||||
rules: {
|
||||
'dify/no-version-prefix': 'error',
|
||||
'hyoban/no-dependency-version-prefix': 'error',
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
import type { Resource } from 'i18next'
|
||||
import type { Locale } from '.'
|
||||
import type { NamespaceCamelCase, NamespaceKebabCase } from './resources'
|
||||
import type { Namespace, NamespaceInFileName } from './resources'
|
||||
import { kebabCase } from 'es-toolkit/string'
|
||||
import { createInstance } from 'i18next'
|
||||
import resourcesToBackend from 'i18next-resources-to-backend'
|
||||
@@ -14,7 +14,7 @@ export function createI18nextInstance(lng: Locale, resources: Resource) {
|
||||
.use(initReactI18next)
|
||||
.use(resourcesToBackend((
|
||||
language: Locale,
|
||||
namespace: NamespaceKebabCase | NamespaceCamelCase,
|
||||
namespace: NamespaceInFileName | Namespace,
|
||||
) => {
|
||||
const namespaceKebab = kebabCase(namespace)
|
||||
return import(`../i18n/${language}/${namespaceKebab}.json`)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import type { NamespaceCamelCase } from './resources'
|
||||
import type { Namespace } from './resources'
|
||||
import { useTranslation as useTranslationOriginal } from 'react-i18next'
|
||||
|
||||
export function useTranslation(ns?: NamespaceCamelCase) {
|
||||
export function useTranslation<T extends Namespace | undefined = undefined>(ns?: T) {
|
||||
return useTranslationOriginal(ns)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import type { NamespaceCamelCase } from './resources'
|
||||
import type { Namespace } from './resources'
|
||||
import { use } from 'react'
|
||||
import { getLocaleOnServer, getTranslation } from './server'
|
||||
|
||||
async function getI18nConfig(ns?: NamespaceCamelCase) {
|
||||
async function getI18nConfig<T extends Namespace | undefined = undefined>(ns?: T) {
|
||||
const lang = await getLocaleOnServer()
|
||||
return getTranslation(lang, ns)
|
||||
}
|
||||
|
||||
export function useTranslation(ns?: NamespaceCamelCase) {
|
||||
export function useTranslation<T extends Namespace | undefined = undefined>(ns?: T) {
|
||||
return use(getI18nConfig(ns))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { kebabCase } from 'es-toolkit/string'
|
||||
import { kebabCase } from 'string-ts'
|
||||
import { ObjectKeys } from '@/utils/object'
|
||||
import appAnnotation from '../i18n/en-US/app-annotation.json'
|
||||
import appApi from '../i18n/en-US/app-api.json'
|
||||
import appDebug from '../i18n/en-US/app-debug.json'
|
||||
@@ -64,19 +65,10 @@ const resources = {
|
||||
workflow,
|
||||
}
|
||||
|
||||
export type KebabCase<S extends string> = S extends `${infer T}${infer U}`
|
||||
? T extends Lowercase<T>
|
||||
? `${T}${KebabCase<U>}`
|
||||
: `-${Lowercase<T>}${KebabCase<U>}`
|
||||
: S
|
||||
|
||||
export type CamelCase<S extends string> = S extends `${infer T}-${infer U}`
|
||||
? `${T}${Capitalize<CamelCase<U>>}`
|
||||
: S
|
||||
|
||||
export type Resources = typeof resources
|
||||
export type NamespaceCamelCase = keyof Resources
|
||||
export type NamespaceKebabCase = KebabCase<NamespaceCamelCase>
|
||||
|
||||
export const namespacesCamelCase = Object.keys(resources) as NamespaceCamelCase[]
|
||||
export const namespacesKebabCase = namespacesCamelCase.map(ns => kebabCase(ns)) as NamespaceKebabCase[]
|
||||
export const namespaces = ObjectKeys(resources)
|
||||
export type Namespace = typeof namespaces[number]
|
||||
|
||||
export const namespacesInFileName = namespaces.map(ns => kebabCase(ns))
|
||||
export type NamespaceInFileName = typeof namespacesInFileName[number]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { i18n as I18nInstance, Resource, ResourceLanguage } from 'i18next'
|
||||
import type { Locale } from '.'
|
||||
import type { NamespaceCamelCase, NamespaceKebabCase } from './resources'
|
||||
import type { Namespace, NamespaceInFileName } from './resources'
|
||||
import { match } from '@formatjs/intl-localematcher'
|
||||
import { kebabCase } from 'es-toolkit/compat'
|
||||
import { camelCase } from 'es-toolkit/string'
|
||||
@@ -12,7 +12,7 @@ import { cache } from 'react'
|
||||
import { initReactI18next } from 'react-i18next/initReactI18next'
|
||||
import { serverOnlyContext } from '@/utils/server-only-context'
|
||||
import { i18n } from '.'
|
||||
import { namespacesKebabCase } from './resources'
|
||||
import { namespacesInFileName } from './resources'
|
||||
import { getInitOptions } from './settings'
|
||||
|
||||
const [getLocaleCache, setLocaleCache] = serverOnlyContext<Locale | null>(null)
|
||||
@@ -26,8 +26,8 @@ const getOrCreateI18next = async (lng: Locale) => {
|
||||
instance = createInstance()
|
||||
await instance
|
||||
.use(initReactI18next)
|
||||
.use(resourcesToBackend((language: Locale, namespace: NamespaceCamelCase | NamespaceKebabCase) => {
|
||||
const fileNamespace = kebabCase(namespace) as NamespaceKebabCase
|
||||
.use(resourcesToBackend((language: Locale, namespace: Namespace | NamespaceInFileName) => {
|
||||
const fileNamespace = kebabCase(namespace)
|
||||
return import(`../i18n/${language}/${fileNamespace}.json`)
|
||||
}))
|
||||
.init({
|
||||
@@ -38,7 +38,7 @@ const getOrCreateI18next = async (lng: Locale) => {
|
||||
return instance
|
||||
}
|
||||
|
||||
export async function getTranslation(lng: Locale, ns?: NamespaceCamelCase) {
|
||||
export async function getTranslation<T extends Namespace>(lng: Locale, ns?: T) {
|
||||
const i18nextInstance = await getOrCreateI18next(lng)
|
||||
|
||||
if (ns && !i18nextInstance.hasLoadedNamespace(ns))
|
||||
@@ -84,7 +84,7 @@ export const getResources = cache(async (lng: Locale): Promise<Resource> => {
|
||||
const messages = {} as ResourceLanguage
|
||||
|
||||
await Promise.all(
|
||||
(namespacesKebabCase).map(async (ns) => {
|
||||
(namespacesInFileName).map(async (ns) => {
|
||||
const mod = await import(`../i18n/${lng}/${ns}.json`)
|
||||
messages[camelCase(ns)] = mod.default
|
||||
}),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { InitOptions } from 'i18next'
|
||||
import { namespacesCamelCase } from './resources'
|
||||
import { namespaces } from './resources'
|
||||
|
||||
export function getInitOptions(): InitOptions {
|
||||
return {
|
||||
@@ -8,7 +8,7 @@ export function getInitOptions(): InitOptions {
|
||||
fallbackLng: 'en-US',
|
||||
partialBundledLanguages: true,
|
||||
keySeparator: false,
|
||||
ns: namespacesCamelCase,
|
||||
ns: namespaces,
|
||||
interpolation: {
|
||||
escapeValue: false,
|
||||
},
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"build": "next build",
|
||||
"build:docker": "next build && node scripts/optimize-standalone.js",
|
||||
"start": "node ./scripts/copy-and-start.mjs",
|
||||
"lint": "eslint --cache --concurrency=\"auto\"",
|
||||
"lint:ci": "eslint --cache --concurrency 3",
|
||||
"lint": "eslint --cache --concurrency=auto",
|
||||
"lint:ci": "eslint --cache --concurrency 2",
|
||||
"lint:fix": "pnpm lint --fix",
|
||||
"lint:quiet": "pnpm lint --quiet",
|
||||
"lint:complexity": "pnpm lint --rule 'complexity: [error, {max: 15}]' --quiet",
|
||||
@@ -166,7 +166,10 @@
|
||||
"devDependencies": {
|
||||
"@antfu/eslint-config": "7.2.0",
|
||||
"@chromatic-com/storybook": "5.0.0",
|
||||
"@egoist/tailwindcss-icons": "1.9.2",
|
||||
"@eslint-react/eslint-plugin": "2.9.4",
|
||||
"@iconify-json/heroicons": "1.2.3",
|
||||
"@iconify-json/ri": "1.2.9",
|
||||
"@mdx-js/loader": "3.1.1",
|
||||
"@mdx-js/react": "3.1.1",
|
||||
"@next/bundle-analyzer": "16.1.5",
|
||||
@@ -194,7 +197,8 @@
|
||||
"@types/js-cookie": "3.0.6",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/negotiator": "0.6.4",
|
||||
"@types/node": "18.15.0",
|
||||
"@types/node": "24.10.12",
|
||||
"@types/postcss-js": "4.1.0",
|
||||
"@types/qs": "6.14.0",
|
||||
"@types/react": "19.2.9",
|
||||
"@types/react-dom": "19.2.3",
|
||||
@@ -213,18 +217,21 @@
|
||||
"cross-env": "10.1.0",
|
||||
"esbuild": "0.27.2",
|
||||
"eslint": "9.39.2",
|
||||
"eslint-plugin-better-tailwindcss": "4.1.1",
|
||||
"eslint-plugin-better-tailwindcss": "https://pkg.pr.new/hyoban/eslint-plugin-better-tailwindcss@c0161c7",
|
||||
"eslint-plugin-hyoban": "0.11.1",
|
||||
"eslint-plugin-react-hooks": "7.0.1",
|
||||
"eslint-plugin-react-refresh": "0.5.0",
|
||||
"eslint-plugin-sonarjs": "3.0.6",
|
||||
"eslint-plugin-storybook": "10.2.6",
|
||||
"husky": "9.1.7",
|
||||
"iconify-import-svg": "0.1.1",
|
||||
"jsdom": "27.3.0",
|
||||
"jsdom-testing-mocks": "1.16.0",
|
||||
"knip": "5.78.0",
|
||||
"lint-staged": "15.5.2",
|
||||
"nock": "14.0.10",
|
||||
"postcss": "8.5.6",
|
||||
"postcss-js": "5.0.3",
|
||||
"react-scan": "0.4.3",
|
||||
"sass": "1.93.2",
|
||||
"serwist": "9.5.4",
|
||||
|
||||
560
web/pnpm-lock.yaml
generated
560
web/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,18 @@
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import { getIconCollections, iconsPlugin } from '@egoist/tailwindcss-icons'
|
||||
import tailwindTypography from '@tailwindcss/typography'
|
||||
import { importSvgCollections } from 'iconify-import-svg'
|
||||
// @ts-expect-error workaround for turbopack issue
|
||||
import { cssAsPlugin } from './tailwind-css-plugin.ts'
|
||||
// @ts-expect-error workaround for turbopack issue
|
||||
import tailwindThemeVarDefine from './themes/tailwind-theme-var-define.ts'
|
||||
import typography from './typography.js'
|
||||
|
||||
const _dirname = typeof __dirname !== 'undefined'
|
||||
? __dirname
|
||||
: path.dirname(fileURLToPath(import.meta.url))
|
||||
|
||||
const config = {
|
||||
theme: {
|
||||
typography,
|
||||
@@ -148,7 +158,32 @@ const config = {
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [tailwindTypography],
|
||||
plugins: [
|
||||
tailwindTypography,
|
||||
iconsPlugin({
|
||||
collections: {
|
||||
...getIconCollections(['heroicons', 'ri']),
|
||||
...importSvgCollections({
|
||||
source: path.resolve(_dirname, 'app/components/base/icons/assets/public'),
|
||||
prefix: 'custom-public',
|
||||
ignoreImportErrors: true,
|
||||
}),
|
||||
...importSvgCollections({
|
||||
source: path.resolve(_dirname, 'app/components/base/icons/assets/vender'),
|
||||
prefix: 'custom-vender',
|
||||
ignoreImportErrors: true,
|
||||
}),
|
||||
},
|
||||
extraProperties: {
|
||||
width: '1rem',
|
||||
height: '1rem',
|
||||
display: 'block',
|
||||
},
|
||||
}),
|
||||
cssAsPlugin([
|
||||
path.resolve(_dirname, './app/styles/globals.css'),
|
||||
]),
|
||||
],
|
||||
// https://github.com/tailwindlabs/tailwindcss/discussions/5969
|
||||
corePlugins: {
|
||||
preflight: false,
|
||||
|
||||
25
web/tailwind-css-plugin.ts
Normal file
25
web/tailwind-css-plugin.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
// Credits:
|
||||
// https://github.com/tailwindlabs/tailwindcss-intellisense/issues/227
|
||||
|
||||
import type { PluginCreator } from 'tailwindcss/types/config'
|
||||
import { readFileSync } from 'node:fs'
|
||||
import { parse } from 'postcss'
|
||||
import { objectify } from 'postcss-js'
|
||||
|
||||
export const cssAsPlugin: (cssPath: string[]) => PluginCreator = (cssPath: string[]) => {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return () => {}
|
||||
}
|
||||
return ({ addUtilities, addComponents, addBase }) => {
|
||||
const jssList = cssPath.map(p => objectify(parse(readFileSync(p, 'utf8'))))
|
||||
|
||||
for (const jss of jssList) {
|
||||
if (jss['@layer utilities'])
|
||||
addUtilities(jss['@layer utilities'])
|
||||
if (jss['@layer components'])
|
||||
addComponents(jss['@layer components'])
|
||||
if (jss['@layer base'])
|
||||
addBase(jss['@layer base'])
|
||||
}
|
||||
}
|
||||
}
|
||||
7
web/types/i18n.d.ts
vendored
7
web/types/i18n.d.ts
vendored
@@ -1,17 +1,16 @@
|
||||
import type { NamespaceCamelCase, Resources } from '../i18n-config/resources'
|
||||
import type { Namespace, Resources } from '../i18n-config/resources'
|
||||
import 'i18next'
|
||||
|
||||
declare module 'i18next' {
|
||||
// eslint-disable-next-line ts/consistent-type-definitions
|
||||
interface CustomTypeOptions {
|
||||
defaultNS: 'common'
|
||||
resources: Resources
|
||||
keySeparator: false
|
||||
}
|
||||
}
|
||||
|
||||
export type I18nKeysByPrefix<
|
||||
NS extends NamespaceCamelCase,
|
||||
NS extends Namespace,
|
||||
Prefix extends string = '',
|
||||
> = Prefix extends ''
|
||||
? keyof Resources[NS]
|
||||
@@ -22,7 +21,7 @@ export type I18nKeysByPrefix<
|
||||
: never
|
||||
|
||||
export type I18nKeysWithPrefix<
|
||||
NS extends NamespaceCamelCase,
|
||||
NS extends Namespace,
|
||||
Prefix extends string = '',
|
||||
> = Prefix extends ''
|
||||
? keyof Resources[NS]
|
||||
|
||||
7
web/utils/object.ts
Normal file
7
web/utils/object.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export function ObjectFromEntries<const T extends ReadonlyArray<readonly [PropertyKey, unknown]>>(entries: T): { [K in T[number]as K[0]]: K[1] } {
|
||||
return Object.fromEntries(entries) as { [K in T[number]as K[0]]: K[1] }
|
||||
}
|
||||
|
||||
export function ObjectKeys<const T extends Record<string, unknown>>(obj: T): (keyof T)[] {
|
||||
return Object.keys(obj) as (keyof T)[]
|
||||
}
|
||||
Reference in New Issue
Block a user